v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_IA32)
31 
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
38 #include "parser.h"
39 #include "scopes.h"
40 #include "stub-cache.h"
41 
42 namespace v8 {
43 namespace internal {
44 
45 #define __ ACCESS_MASM(masm_)
46 
47 
48 class JumpPatchSite BASE_EMBEDDED {
49  public:
50  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51 #ifdef DEBUG
52  info_emitted_ = false;
53 #endif
54  }
55 
56  ~JumpPatchSite() {
57  ASSERT(patch_site_.is_bound() == info_emitted_);
58  }
59 
60  void EmitJumpIfNotSmi(Register reg,
61  Label* target,
62  Label::Distance distance = Label::kFar) {
63  __ test(reg, Immediate(kSmiTagMask));
64  EmitJump(not_carry, target, distance); // Always taken before patched.
65  }
66 
67  void EmitJumpIfSmi(Register reg,
68  Label* target,
69  Label::Distance distance = Label::kFar) {
70  __ test(reg, Immediate(kSmiTagMask));
71  EmitJump(carry, target, distance); // Never taken before patched.
72  }
73 
74  void EmitPatchInfo() {
75  if (patch_site_.is_bound()) {
76  int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77  ASSERT(is_int8(delta_to_patch_site));
78  __ test(eax, Immediate(delta_to_patch_site));
79 #ifdef DEBUG
80  info_emitted_ = true;
81 #endif
82  } else {
83  __ nop(); // Signals no inlined code.
84  }
85  }
86 
87  private:
88  // jc will be patched with jz, jnc will become jnz.
89  void EmitJump(Condition cc, Label* target, Label::Distance distance) {
90  ASSERT(!patch_site_.is_bound() && !info_emitted_);
91  ASSERT(cc == carry || cc == not_carry);
92  __ bind(&patch_site_);
93  __ j(cc, target, distance);
94  }
95 
96  MacroAssembler* masm_;
97  Label patch_site_;
98 #ifdef DEBUG
99  bool info_emitted_;
100 #endif
101 };
102 
103 
104 // Generate code for a JS function. On entry to the function the receiver
105 // and arguments have been pushed on the stack left to right, with the
106 // return address on top of them. The actual argument count matches the
107 // formal parameter count expected by the function.
108 //
109 // The live registers are:
110 // o edi: the JS function object being called (i.e. ourselves)
111 // o esi: our context
112 // o ebp: our caller's frame pointer
113 // o esp: stack pointer (pointing to return address)
114 //
115 // The function builds a JS frame. Please see JavaScriptFrameConstants in
116 // frames-ia32.h for its layout.
117 void FullCodeGenerator::Generate() {
118  CompilationInfo* info = info_;
119  handler_table_ =
120  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
121  profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
122  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
123  SetFunctionPosition(function());
124  Comment cmnt(masm_, "[ function compiled by full code generator");
125 
127 
128 #ifdef DEBUG
129  if (strlen(FLAG_stop_at) > 0 &&
130  info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
131  __ int3();
132  }
133 #endif
134 
135  // Strict mode functions and builtins need to replace the receiver
136  // with undefined when called as functions (without an explicit
137  // receiver object). ecx is zero for method calls and non-zero for
138  // function calls.
139  if (!info->is_classic_mode() || info->is_native()) {
140  Label ok;
141  __ test(ecx, ecx);
142  __ j(zero, &ok, Label::kNear);
143  // +1 for return address.
144  int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
145  __ mov(ecx, Operand(esp, receiver_offset));
146  __ JumpIfSmi(ecx, &ok);
147  __ CmpObjectType(ecx, JS_GLOBAL_PROXY_TYPE, ecx);
148  __ j(not_equal, &ok, Label::kNear);
149  __ mov(Operand(esp, receiver_offset),
150  Immediate(isolate()->factory()->undefined_value()));
151  __ bind(&ok);
152  }
153 
154  // Open a frame scope to indicate that there is a frame on the stack. The
155  // MANUAL indicates that the scope shouldn't actually generate code to set up
156  // the frame (that is done below).
157  FrameScope frame_scope(masm_, StackFrame::MANUAL);
158 
159  __ push(ebp); // Caller's frame pointer.
160  __ mov(ebp, esp);
161  __ push(esi); // Callee's context.
162  __ push(edi); // Callee's JS Function.
163 
164  { Comment cmnt(masm_, "[ Allocate locals");
165  int locals_count = info->scope()->num_stack_slots();
166  if (locals_count == 1) {
167  __ push(Immediate(isolate()->factory()->undefined_value()));
168  } else if (locals_count > 1) {
169  __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
170  for (int i = 0; i < locals_count; i++) {
171  __ push(eax);
172  }
173  }
174  }
175 
176  bool function_in_register = true;
177 
178  // Possibly allocate a local context.
179  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
180  if (heap_slots > 0) {
181  Comment cmnt(masm_, "[ Allocate context");
182  // Argument to NewContext is the function, which is still in edi.
183  __ push(edi);
184  if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
185  __ Push(info->scope()->GetScopeInfo());
186  __ CallRuntime(Runtime::kNewGlobalContext, 2);
187  } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
188  FastNewContextStub stub(heap_slots);
189  __ CallStub(&stub);
190  } else {
191  __ CallRuntime(Runtime::kNewFunctionContext, 1);
192  }
193  function_in_register = false;
194  // Context is returned in both eax and esi. It replaces the context
195  // passed to us. It's saved in the stack and kept live in esi.
197 
198  // Copy parameters into context if necessary.
199  int num_parameters = info->scope()->num_parameters();
200  for (int i = 0; i < num_parameters; i++) {
201  Variable* var = scope()->parameter(i);
202  if (var->IsContextSlot()) {
203  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
204  (num_parameters - 1 - i) * kPointerSize;
205  // Load parameter from stack.
206  __ mov(eax, Operand(ebp, parameter_offset));
207  // Store it in the context.
208  int context_offset = Context::SlotOffset(var->index());
209  __ mov(Operand(esi, context_offset), eax);
210  // Update the write barrier. This clobbers eax and ebx.
211  __ RecordWriteContextSlot(esi,
212  context_offset,
213  eax,
214  ebx,
216  }
217  }
218  }
219 
220  Variable* arguments = scope()->arguments();
221  if (arguments != NULL) {
222  // Function uses arguments object.
223  Comment cmnt(masm_, "[ Allocate arguments object");
224  if (function_in_register) {
225  __ push(edi);
226  } else {
228  }
229  // Receiver is just before the parameters on the caller's stack.
230  int num_parameters = info->scope()->num_parameters();
231  int offset = num_parameters * kPointerSize;
232  __ lea(edx,
233  Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
234  __ push(edx);
235  __ push(Immediate(Smi::FromInt(num_parameters)));
236  // Arguments to ArgumentsAccessStub:
237  // function, receiver address, parameter count.
238  // The stub will rewrite receiver and parameter count if the previous
239  // stack frame was an arguments adapter frame.
241  if (!is_classic_mode()) {
243  } else if (function()->has_duplicate_parameters()) {
245  } else {
247  }
248  ArgumentsAccessStub stub(type);
249  __ CallStub(&stub);
250 
251  SetVar(arguments, eax, ebx, edx);
252  }
253 
254  if (FLAG_trace) {
255  __ CallRuntime(Runtime::kTraceEnter, 0);
256  }
257 
258  // Visit the declarations and body unless there is an illegal
259  // redeclaration.
260  if (scope()->HasIllegalRedeclaration()) {
261  Comment cmnt(masm_, "[ Declarations");
262  scope()->VisitIllegalRedeclaration(this);
263 
264  } else {
265  PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
266  { Comment cmnt(masm_, "[ Declarations");
267  // For named function expressions, declare the function name as a
268  // constant.
269  if (scope()->is_function_scope() && scope()->function() != NULL) {
270  VariableDeclaration* function = scope()->function();
271  ASSERT(function->proxy()->var()->mode() == CONST ||
272  function->proxy()->var()->mode() == CONST_HARMONY);
273  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
274  VisitVariableDeclaration(function);
275  }
276  VisitDeclarations(scope()->declarations());
277  }
278 
279  { Comment cmnt(masm_, "[ Stack check");
280  PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
281  Label ok;
282  ExternalReference stack_limit =
283  ExternalReference::address_of_stack_limit(isolate());
284  __ cmp(esp, Operand::StaticVariable(stack_limit));
285  __ j(above_equal, &ok, Label::kNear);
286  StackCheckStub stub;
287  __ CallStub(&stub);
288  __ bind(&ok);
289  }
290 
291  { Comment cmnt(masm_, "[ Body");
292  ASSERT(loop_depth() == 0);
293  VisitStatements(function()->body());
294  ASSERT(loop_depth() == 0);
295  }
296  }
297 
298  // Always emit a 'return undefined' in case control fell off the end of
299  // the body.
300  { Comment cmnt(masm_, "[ return <undefined>;");
301  __ mov(eax, isolate()->factory()->undefined_value());
302  EmitReturnSequence();
303  }
304 }
305 
306 
307 void FullCodeGenerator::ClearAccumulator() {
308  __ Set(eax, Immediate(Smi::FromInt(0)));
309 }
310 
311 
312 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
313  __ mov(ebx, Immediate(profiling_counter_));
315  Immediate(Smi::FromInt(delta)));
316 }
317 
318 
319 void FullCodeGenerator::EmitProfilingCounterReset() {
320  int reset_value = FLAG_interrupt_budget;
321  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
322  // Self-optimization is a one-off thing: if it fails, don't try again.
323  reset_value = Smi::kMaxValue;
324  }
325  __ mov(ebx, Immediate(profiling_counter_));
327  Immediate(Smi::FromInt(reset_value)));
328 }
329 
330 
331 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
332  Label* back_edge_target) {
333  Comment cmnt(masm_, "[ Stack check");
334  Label ok;
335 
336  if (FLAG_count_based_interrupts) {
337  int weight = 1;
338  if (FLAG_weighted_back_edges) {
339  ASSERT(back_edge_target->is_bound());
340  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
341  weight = Min(kMaxBackEdgeWeight,
342  Max(1, distance / kBackEdgeDistanceUnit));
343  }
344  EmitProfilingCounterDecrement(weight);
345  __ j(positive, &ok, Label::kNear);
346  InterruptStub stub;
347  __ CallStub(&stub);
348  } else {
349  // Count based interrupts happen often enough when they are enabled
350  // that the additional stack checks are not necessary (they would
351  // only check for interrupts).
352  ExternalReference stack_limit =
353  ExternalReference::address_of_stack_limit(isolate());
354  __ cmp(esp, Operand::StaticVariable(stack_limit));
355  __ j(above_equal, &ok, Label::kNear);
356  StackCheckStub stub;
357  __ CallStub(&stub);
358  }
359 
360  // Record a mapping of this PC offset to the OSR id. This is used to find
361  // the AST id from the unoptimized code in order to use it as a key into
362  // the deoptimization input data found in the optimized code.
363  RecordStackCheck(stmt->OsrEntryId());
364 
365  // Loop stack checks can be patched to perform on-stack replacement. In
366  // order to decide whether or not to perform OSR we embed the loop depth
367  // in a test instruction after the call so we can extract it from the OSR
368  // builtin.
369  ASSERT(loop_depth() > 0);
370  __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
371 
372  if (FLAG_count_based_interrupts) {
373  EmitProfilingCounterReset();
374  }
375 
376  __ bind(&ok);
377  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
378  // Record a mapping of the OSR id to this PC. This is used if the OSR
379  // entry becomes the target of a bailout. We don't expect it to be, but
380  // we want it to work if it is.
381  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
382 }
383 
384 
385 void FullCodeGenerator::EmitReturnSequence() {
386  Comment cmnt(masm_, "[ Return sequence");
387  if (return_label_.is_bound()) {
388  __ jmp(&return_label_);
389  } else {
390  // Common return label
391  __ bind(&return_label_);
392  if (FLAG_trace) {
393  __ push(eax);
394  __ CallRuntime(Runtime::kTraceExit, 1);
395  }
396  if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
397  // Pretend that the exit is a backwards jump to the entry.
398  int weight = 1;
399  if (info_->ShouldSelfOptimize()) {
400  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
401  } else if (FLAG_weighted_back_edges) {
402  int distance = masm_->pc_offset();
403  weight = Min(kMaxBackEdgeWeight,
404  Max(1, distance / kBackEdgeDistanceUnit));
405  }
406  EmitProfilingCounterDecrement(weight);
407  Label ok;
408  __ j(positive, &ok, Label::kNear);
409  __ push(eax);
410  if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
412  __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
413  } else {
414  InterruptStub stub;
415  __ CallStub(&stub);
416  }
417  __ pop(eax);
418  EmitProfilingCounterReset();
419  __ bind(&ok);
420  }
421 #ifdef DEBUG
422  // Add a label for checking the size of the code used for returning.
423  Label check_exit_codesize;
424  masm_->bind(&check_exit_codesize);
425 #endif
426  SetSourcePosition(function()->end_position() - 1);
427  __ RecordJSReturn();
428  // Do not use the leave instruction here because it is too short to
429  // patch with the code required by the debugger.
430  __ mov(esp, ebp);
431  __ pop(ebp);
432 
433  int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
434  __ Ret(arguments_bytes, ecx);
435 #ifdef ENABLE_DEBUGGER_SUPPORT
436  // Check that the size of the code used for returning is large enough
437  // for the debugger's requirements.
439  masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
440 #endif
441  }
442 }
443 
444 
445 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
446  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
447 }
448 
449 
450 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
451  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
452  codegen()->GetVar(result_register(), var);
453 }
454 
455 
456 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
457  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
458  MemOperand operand = codegen()->VarOperand(var, result_register());
459  // Memory operands can be pushed directly.
460  __ push(operand);
461 }
462 
463 
464 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
465  // For simplicity we always test the accumulator register.
466  codegen()->GetVar(result_register(), var);
467  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
468  codegen()->DoTest(this);
469 }
470 
471 
472 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
473  UNREACHABLE(); // Not used on IA32.
474 }
475 
476 
477 void FullCodeGenerator::AccumulatorValueContext::Plug(
478  Heap::RootListIndex index) const {
479  UNREACHABLE(); // Not used on IA32.
480 }
481 
482 
483 void FullCodeGenerator::StackValueContext::Plug(
484  Heap::RootListIndex index) const {
485  UNREACHABLE(); // Not used on IA32.
486 }
487 
488 
489 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
490  UNREACHABLE(); // Not used on IA32.
491 }
492 
493 
494 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
495 }
496 
497 
498 void FullCodeGenerator::AccumulatorValueContext::Plug(
499  Handle<Object> lit) const {
500  if (lit->IsSmi()) {
501  __ SafeSet(result_register(), Immediate(lit));
502  } else {
503  __ Set(result_register(), Immediate(lit));
504  }
505 }
506 
507 
508 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
509  if (lit->IsSmi()) {
510  __ SafePush(Immediate(lit));
511  } else {
512  __ push(Immediate(lit));
513  }
514 }
515 
516 
517 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
518  codegen()->PrepareForBailoutBeforeSplit(condition(),
519  true,
520  true_label_,
521  false_label_);
522  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
523  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
524  if (false_label_ != fall_through_) __ jmp(false_label_);
525  } else if (lit->IsTrue() || lit->IsJSObject()) {
526  if (true_label_ != fall_through_) __ jmp(true_label_);
527  } else if (lit->IsString()) {
528  if (String::cast(*lit)->length() == 0) {
529  if (false_label_ != fall_through_) __ jmp(false_label_);
530  } else {
531  if (true_label_ != fall_through_) __ jmp(true_label_);
532  }
533  } else if (lit->IsSmi()) {
534  if (Smi::cast(*lit)->value() == 0) {
535  if (false_label_ != fall_through_) __ jmp(false_label_);
536  } else {
537  if (true_label_ != fall_through_) __ jmp(true_label_);
538  }
539  } else {
540  // For simplicity we always test the accumulator register.
541  __ mov(result_register(), lit);
542  codegen()->DoTest(this);
543  }
544 }
545 
546 
547 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
548  Register reg) const {
549  ASSERT(count > 0);
550  __ Drop(count);
551 }
552 
553 
554 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
555  int count,
556  Register reg) const {
557  ASSERT(count > 0);
558  __ Drop(count);
559  __ Move(result_register(), reg);
560 }
561 
562 
563 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
564  Register reg) const {
565  ASSERT(count > 0);
566  if (count > 1) __ Drop(count - 1);
567  __ mov(Operand(esp, 0), reg);
568 }
569 
570 
571 void FullCodeGenerator::TestContext::DropAndPlug(int count,
572  Register reg) const {
573  ASSERT(count > 0);
574  // For simplicity we always test the accumulator register.
575  __ Drop(count);
576  __ Move(result_register(), reg);
577  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
578  codegen()->DoTest(this);
579 }
580 
581 
582 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
583  Label* materialize_false) const {
584  ASSERT(materialize_true == materialize_false);
585  __ bind(materialize_true);
586 }
587 
588 
589 void FullCodeGenerator::AccumulatorValueContext::Plug(
590  Label* materialize_true,
591  Label* materialize_false) const {
592  Label done;
593  __ bind(materialize_true);
594  __ mov(result_register(), isolate()->factory()->true_value());
595  __ jmp(&done, Label::kNear);
596  __ bind(materialize_false);
597  __ mov(result_register(), isolate()->factory()->false_value());
598  __ bind(&done);
599 }
600 
601 
602 void FullCodeGenerator::StackValueContext::Plug(
603  Label* materialize_true,
604  Label* materialize_false) const {
605  Label done;
606  __ bind(materialize_true);
607  __ push(Immediate(isolate()->factory()->true_value()));
608  __ jmp(&done, Label::kNear);
609  __ bind(materialize_false);
610  __ push(Immediate(isolate()->factory()->false_value()));
611  __ bind(&done);
612 }
613 
614 
615 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
616  Label* materialize_false) const {
617  ASSERT(materialize_true == true_label_);
618  ASSERT(materialize_false == false_label_);
619 }
620 
621 
622 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
623 }
624 
625 
626 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
627  Handle<Object> value = flag
628  ? isolate()->factory()->true_value()
629  : isolate()->factory()->false_value();
630  __ mov(result_register(), value);
631 }
632 
633 
634 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
635  Handle<Object> value = flag
636  ? isolate()->factory()->true_value()
637  : isolate()->factory()->false_value();
638  __ push(Immediate(value));
639 }
640 
641 
642 void FullCodeGenerator::TestContext::Plug(bool flag) const {
643  codegen()->PrepareForBailoutBeforeSplit(condition(),
644  true,
645  true_label_,
646  false_label_);
647  if (flag) {
648  if (true_label_ != fall_through_) __ jmp(true_label_);
649  } else {
650  if (false_label_ != fall_through_) __ jmp(false_label_);
651  }
652 }
653 
654 
655 void FullCodeGenerator::DoTest(Expression* condition,
656  Label* if_true,
657  Label* if_false,
658  Label* fall_through) {
659  ToBooleanStub stub(result_register());
660  __ push(result_register());
661  __ CallStub(&stub, condition->test_id());
662  __ test(result_register(), result_register());
663  // The stub returns nonzero for true.
664  Split(not_zero, if_true, if_false, fall_through);
665 }
666 
667 
668 void FullCodeGenerator::Split(Condition cc,
669  Label* if_true,
670  Label* if_false,
671  Label* fall_through) {
672  if (if_false == fall_through) {
673  __ j(cc, if_true);
674  } else if (if_true == fall_through) {
675  __ j(NegateCondition(cc), if_false);
676  } else {
677  __ j(cc, if_true);
678  __ jmp(if_false);
679  }
680 }
681 
682 
683 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
684  ASSERT(var->IsStackAllocated());
685  // Offset is negative because higher indexes are at lower addresses.
686  int offset = -var->index() * kPointerSize;
687  // Adjust by a (parameter or local) base offset.
688  if (var->IsParameter()) {
689  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
690  } else {
692  }
693  return Operand(ebp, offset);
694 }
695 
696 
697 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
698  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
699  if (var->IsContextSlot()) {
700  int context_chain_length = scope()->ContextChainLength(var->scope());
701  __ LoadContext(scratch, context_chain_length);
702  return ContextOperand(scratch, var->index());
703  } else {
704  return StackOperand(var);
705  }
706 }
707 
708 
709 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
710  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
711  MemOperand location = VarOperand(var, dest);
712  __ mov(dest, location);
713 }
714 
715 
716 void FullCodeGenerator::SetVar(Variable* var,
717  Register src,
718  Register scratch0,
719  Register scratch1) {
720  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
721  ASSERT(!scratch0.is(src));
722  ASSERT(!scratch0.is(scratch1));
723  ASSERT(!scratch1.is(src));
724  MemOperand location = VarOperand(var, scratch0);
725  __ mov(location, src);
726 
727  // Emit the write barrier code if the location is in the heap.
728  if (var->IsContextSlot()) {
729  int offset = Context::SlotOffset(var->index());
730  ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
731  __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
732  }
733 }
734 
735 
736 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
737  bool should_normalize,
738  Label* if_true,
739  Label* if_false) {
740  // Only prepare for bailouts before splits if we're in a test
741  // context. Otherwise, we let the Visit function deal with the
742  // preparation to avoid preparing with the same AST id twice.
743  if (!context()->IsTest() || !info_->IsOptimizable()) return;
744 
745  Label skip;
746  if (should_normalize) __ jmp(&skip, Label::kNear);
747  PrepareForBailout(expr, TOS_REG);
748  if (should_normalize) {
749  __ cmp(eax, isolate()->factory()->true_value());
750  Split(equal, if_true, if_false, NULL);
751  __ bind(&skip);
752  }
753 }
754 
755 
756 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
757  // The variable in the declaration always resides in the current function
758  // context.
759  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
760  if (generate_debug_code_) {
761  // Check that we're not inside a with or catch context.
763  __ cmp(ebx, isolate()->factory()->with_context_map());
764  __ Check(not_equal, "Declaration in with context.");
765  __ cmp(ebx, isolate()->factory()->catch_context_map());
766  __ Check(not_equal, "Declaration in catch context.");
767  }
768 }
769 
770 
771 void FullCodeGenerator::VisitVariableDeclaration(
772  VariableDeclaration* declaration) {
773  // If it was not possible to allocate the variable at compile time, we
774  // need to "declare" it at runtime to make sure it actually exists in the
775  // local context.
776  VariableProxy* proxy = declaration->proxy();
777  VariableMode mode = declaration->mode();
778  Variable* variable = proxy->var();
779  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
780  switch (variable->location()) {
782  globals_->Add(variable->name(), zone());
783  globals_->Add(variable->binding_needs_init()
784  ? isolate()->factory()->the_hole_value()
785  : isolate()->factory()->undefined_value(), zone());
786  break;
787 
788  case Variable::PARAMETER:
789  case Variable::LOCAL:
790  if (hole_init) {
791  Comment cmnt(masm_, "[ VariableDeclaration");
792  __ mov(StackOperand(variable),
793  Immediate(isolate()->factory()->the_hole_value()));
794  }
795  break;
796 
797  case Variable::CONTEXT:
798  if (hole_init) {
799  Comment cmnt(masm_, "[ VariableDeclaration");
800  EmitDebugCheckDeclarationContext(variable);
801  __ mov(ContextOperand(esi, variable->index()),
802  Immediate(isolate()->factory()->the_hole_value()));
803  // No write barrier since the hole value is in old space.
804  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
805  }
806  break;
807 
808  case Variable::LOOKUP: {
809  Comment cmnt(masm_, "[ VariableDeclaration");
810  __ push(esi);
811  __ push(Immediate(variable->name()));
812  // VariableDeclaration nodes are always introduced in one of four modes.
814  PropertyAttributes attr =
816  __ push(Immediate(Smi::FromInt(attr)));
817  // Push initial value, if any.
818  // Note: For variables we must not push an initial value (such as
819  // 'undefined') because we may have a (legal) redeclaration and we
820  // must not destroy the current value.
821  if (hole_init) {
822  __ push(Immediate(isolate()->factory()->the_hole_value()));
823  } else {
824  __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
825  }
826  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
827  break;
828  }
829  }
830 }
831 
832 
833 void FullCodeGenerator::VisitFunctionDeclaration(
834  FunctionDeclaration* declaration) {
835  VariableProxy* proxy = declaration->proxy();
836  Variable* variable = proxy->var();
837  switch (variable->location()) {
838  case Variable::UNALLOCATED: {
839  globals_->Add(variable->name(), zone());
840  Handle<SharedFunctionInfo> function =
841  Compiler::BuildFunctionInfo(declaration->fun(), script());
842  // Check for stack-overflow exception.
843  if (function.is_null()) return SetStackOverflow();
844  globals_->Add(function, zone());
845  break;
846  }
847 
848  case Variable::PARAMETER:
849  case Variable::LOCAL: {
850  Comment cmnt(masm_, "[ FunctionDeclaration");
851  VisitForAccumulatorValue(declaration->fun());
852  __ mov(StackOperand(variable), result_register());
853  break;
854  }
855 
856  case Variable::CONTEXT: {
857  Comment cmnt(masm_, "[ FunctionDeclaration");
858  EmitDebugCheckDeclarationContext(variable);
859  VisitForAccumulatorValue(declaration->fun());
860  __ mov(ContextOperand(esi, variable->index()), result_register());
861  // We know that we have written a function, which is not a smi.
862  __ RecordWriteContextSlot(esi,
863  Context::SlotOffset(variable->index()),
864  result_register(),
865  ecx,
869  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
870  break;
871  }
872 
873  case Variable::LOOKUP: {
874  Comment cmnt(masm_, "[ FunctionDeclaration");
875  __ push(esi);
876  __ push(Immediate(variable->name()));
877  __ push(Immediate(Smi::FromInt(NONE)));
878  VisitForStackValue(declaration->fun());
879  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
880  break;
881  }
882  }
883 }
884 
885 
886 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
887  VariableProxy* proxy = declaration->proxy();
888  Variable* variable = proxy->var();
889  Handle<JSModule> instance = declaration->module()->interface()->Instance();
890  ASSERT(!instance.is_null());
891 
892  switch (variable->location()) {
893  case Variable::UNALLOCATED: {
894  Comment cmnt(masm_, "[ ModuleDeclaration");
895  globals_->Add(variable->name(), zone());
896  globals_->Add(instance, zone());
897  Visit(declaration->module());
898  break;
899  }
900 
901  case Variable::CONTEXT: {
902  Comment cmnt(masm_, "[ ModuleDeclaration");
903  EmitDebugCheckDeclarationContext(variable);
904  __ mov(ContextOperand(esi, variable->index()), Immediate(instance));
905  Visit(declaration->module());
906  break;
907  }
908 
909  case Variable::PARAMETER:
910  case Variable::LOCAL:
911  case Variable::LOOKUP:
912  UNREACHABLE();
913  }
914 }
915 
916 
917 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
918  VariableProxy* proxy = declaration->proxy();
919  Variable* variable = proxy->var();
920  switch (variable->location()) {
922  // TODO(rossberg)
923  break;
924 
925  case Variable::CONTEXT: {
926  Comment cmnt(masm_, "[ ImportDeclaration");
927  EmitDebugCheckDeclarationContext(variable);
928  // TODO(rossberg)
929  break;
930  }
931 
932  case Variable::PARAMETER:
933  case Variable::LOCAL:
934  case Variable::LOOKUP:
935  UNREACHABLE();
936  }
937 }
938 
939 
940 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
941  // TODO(rossberg)
942 }
943 
944 
945 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
946  // Call the runtime to declare the globals.
947  __ push(esi); // The context is the first argument.
948  __ push(Immediate(pairs));
949  __ push(Immediate(Smi::FromInt(DeclareGlobalsFlags())));
950  __ CallRuntime(Runtime::kDeclareGlobals, 3);
951  // Return value is ignored.
952 }
953 
954 
955 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
956  Comment cmnt(masm_, "[ SwitchStatement");
957  Breakable nested_statement(this, stmt);
958  SetStatementPosition(stmt);
959 
960  // Keep the switch value on the stack until a case matches.
961  VisitForStackValue(stmt->tag());
962  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
963 
964  ZoneList<CaseClause*>* clauses = stmt->cases();
965  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
966 
967  Label next_test; // Recycled for each test.
968  // Compile all the tests with branches to their bodies.
969  for (int i = 0; i < clauses->length(); i++) {
970  CaseClause* clause = clauses->at(i);
971  clause->body_target()->Unuse();
972 
973  // The default is not a test, but remember it as final fall through.
974  if (clause->is_default()) {
975  default_clause = clause;
976  continue;
977  }
978 
979  Comment cmnt(masm_, "[ Case comparison");
980  __ bind(&next_test);
981  next_test.Unuse();
982 
983  // Compile the label expression.
984  VisitForAccumulatorValue(clause->label());
985 
986  // Perform the comparison as if via '==='.
987  __ mov(edx, Operand(esp, 0)); // Switch value.
988  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
989  JumpPatchSite patch_site(masm_);
990  if (inline_smi_code) {
991  Label slow_case;
992  __ mov(ecx, edx);
993  __ or_(ecx, eax);
994  patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
995 
996  __ cmp(edx, eax);
997  __ j(not_equal, &next_test);
998  __ Drop(1); // Switch value is no longer needed.
999  __ jmp(clause->body_target());
1000  __ bind(&slow_case);
1001  }
1002 
1003  // Record position before stub call for type feedback.
1004  SetSourcePosition(clause->position());
1005  Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
1006  CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1007  patch_site.EmitPatchInfo();
1008  __ test(eax, eax);
1009  __ j(not_equal, &next_test);
1010  __ Drop(1); // Switch value is no longer needed.
1011  __ jmp(clause->body_target());
1012  }
1013 
1014  // Discard the test value and jump to the default if present, otherwise to
1015  // the end of the statement.
1016  __ bind(&next_test);
1017  __ Drop(1); // Switch value is no longer needed.
1018  if (default_clause == NULL) {
1019  __ jmp(nested_statement.break_label());
1020  } else {
1021  __ jmp(default_clause->body_target());
1022  }
1023 
1024  // Compile all the case bodies.
1025  for (int i = 0; i < clauses->length(); i++) {
1026  Comment cmnt(masm_, "[ Case body");
1027  CaseClause* clause = clauses->at(i);
1028  __ bind(clause->body_target());
1029  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1030  VisitStatements(clause->statements());
1031  }
1032 
1033  __ bind(nested_statement.break_label());
1034  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1035 }
1036 
1037 
1038 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1039  Comment cmnt(masm_, "[ ForInStatement");
1040  SetStatementPosition(stmt);
1041 
1042  Label loop, exit;
1043  ForIn loop_statement(this, stmt);
1044  increment_loop_depth();
1045 
1046  // Get the object to enumerate over. Both SpiderMonkey and JSC
1047  // ignore null and undefined in contrast to the specification; see
1048  // ECMA-262 section 12.6.4.
1049  VisitForAccumulatorValue(stmt->enumerable());
1050  __ cmp(eax, isolate()->factory()->undefined_value());
1051  __ j(equal, &exit);
1052  __ cmp(eax, isolate()->factory()->null_value());
1053  __ j(equal, &exit);
1054 
1055  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1056 
1057  // Convert the object to a JS object.
1058  Label convert, done_convert;
1059  __ JumpIfSmi(eax, &convert, Label::kNear);
1060  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1061  __ j(above_equal, &done_convert, Label::kNear);
1062  __ bind(&convert);
1063  __ push(eax);
1064  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1065  __ bind(&done_convert);
1066  __ push(eax);
1067 
1068  // Check for proxies.
1069  Label call_runtime, use_cache, fixed_array;
1071  __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1072  __ j(below_equal, &call_runtime);
1073 
1074  // Check cache validity in generated code. This is a fast case for
1075  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1076  // guarantee cache validity, call the runtime system to check cache
1077  // validity or get the property names in a fixed array.
1078  __ CheckEnumCache(&call_runtime);
1079 
1081  __ jmp(&use_cache, Label::kNear);
1082 
1083  // Get the set of properties to enumerate.
1084  __ bind(&call_runtime);
1085  __ push(eax);
1086  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1088  isolate()->factory()->meta_map());
1089  __ j(not_equal, &fixed_array);
1090 
1091 
1092  // We got a map in register eax. Get the enumeration cache from it.
1093  Label no_descriptors;
1094  __ bind(&use_cache);
1095 
1096  __ EnumLength(edx, eax);
1097  __ cmp(edx, Immediate(Smi::FromInt(0)));
1098  __ j(equal, &no_descriptors);
1099 
1100  __ LoadInstanceDescriptors(eax, ecx);
1103 
1104  // Set up the four remaining stack slots.
1105  __ push(eax); // Map.
1106  __ push(ecx); // Enumeration cache.
1107  __ push(edx); // Number of valid entries for the map in the enum cache.
1108  __ push(Immediate(Smi::FromInt(0))); // Initial index.
1109  __ jmp(&loop);
1110 
1111  __ bind(&no_descriptors);
1112  __ add(esp, Immediate(kPointerSize));
1113  __ jmp(&exit);
1114 
1115  // We got a fixed array in register eax. Iterate through that.
1116  Label non_proxy;
1117  __ bind(&fixed_array);
1118 
1119  Handle<JSGlobalPropertyCell> cell =
1120  isolate()->factory()->NewJSGlobalPropertyCell(
1121  Handle<Object>(
1123  RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
1124  __ LoadHeapObject(ebx, cell);
1127 
1128  __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1129  __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1131  __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1132  __ j(above, &non_proxy);
1133  __ mov(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1134  __ bind(&non_proxy);
1135  __ push(ebx); // Smi
1136  __ push(eax); // Array
1138  __ push(eax); // Fixed array length (as smi).
1139  __ push(Immediate(Smi::FromInt(0))); // Initial index.
1140 
1141  // Generate code for doing the condition check.
1142  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1143  __ bind(&loop);
1144  __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1145  __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1146  __ j(above_equal, loop_statement.break_label());
1147 
1148  // Get the current entry of the array into register ebx.
1149  __ mov(ebx, Operand(esp, 2 * kPointerSize));
1151 
1152  // Get the expected map from the stack or a smi in the
1153  // permanent slow case into register edx.
1154  __ mov(edx, Operand(esp, 3 * kPointerSize));
1155 
1156  // Check if the expected map still matches that of the enumerable.
1157  // If not, we may have to filter the key.
1158  Label update_each;
1159  __ mov(ecx, Operand(esp, 4 * kPointerSize));
1161  __ j(equal, &update_each, Label::kNear);
1162 
1163  // For proxies, no filtering is done.
1164  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1165  ASSERT(Smi::FromInt(0) == 0);
1166  __ test(edx, edx);
1167  __ j(zero, &update_each);
1168 
1169  // Convert the entry to a string or null if it isn't a property
1170  // anymore. If the property has been removed while iterating, we
1171  // just skip it.
1172  __ push(ecx); // Enumerable.
1173  __ push(ebx); // Current entry.
1174  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1175  __ test(eax, eax);
1176  __ j(equal, loop_statement.continue_label());
1177  __ mov(ebx, eax);
1178 
1179  // Update the 'each' property or variable from the possibly filtered
1180  // entry in register ebx.
1181  __ bind(&update_each);
1182  __ mov(result_register(), ebx);
1183  // Perform the assignment as if via '='.
1184  { EffectContext context(this);
1185  EmitAssignment(stmt->each());
1186  }
1187 
1188  // Generate code for the body of the loop.
1189  Visit(stmt->body());
1190 
1191  // Generate code for going to the next element by incrementing the
1192  // index (smi) stored on top of the stack.
1193  __ bind(loop_statement.continue_label());
1194  __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1195 
1196  EmitStackCheck(stmt, &loop);
1197  __ jmp(&loop);
1198 
1199  // Remove the pointers stored on the stack.
1200  __ bind(loop_statement.break_label());
1201  __ add(esp, Immediate(5 * kPointerSize));
1202 
1203  // Exit and decrement the loop depth.
1204  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1205  __ bind(&exit);
1206  decrement_loop_depth();
1207 }
1208 
1209 
1210 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1211  bool pretenure) {
1212  // Use the fast case closure allocation code that allocates in new
1213  // space for nested functions that don't need literals cloning. If
1214  // we're running with the --always-opt or the --prepare-always-opt
1215  // flag, we need to use the runtime function so that the new function
1216  // we are creating here gets a chance to have its code optimized and
1217  // doesn't just get a copy of the existing unoptimized code.
1218  if (!FLAG_always_opt &&
1219  !FLAG_prepare_always_opt &&
1220  !pretenure &&
1221  scope()->is_function_scope() &&
1222  info->num_literals() == 0) {
1223  FastNewClosureStub stub(info->language_mode());
1224  __ push(Immediate(info));
1225  __ CallStub(&stub);
1226  } else {
1227  __ push(esi);
1228  __ push(Immediate(info));
1229  __ push(Immediate(pretenure
1230  ? isolate()->factory()->true_value()
1231  : isolate()->factory()->false_value()));
1232  __ CallRuntime(Runtime::kNewClosure, 3);
1233  }
1234  context()->Plug(eax);
1235 }
1236 
1237 
1238 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1239  Comment cmnt(masm_, "[ VariableProxy");
1240  EmitVariableLoad(expr);
1241 }
1242 
1243 
1244 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1245  TypeofState typeof_state,
1246  Label* slow) {
1247  Register context = esi;
1248  Register temp = edx;
1249 
1250  Scope* s = scope();
1251  while (s != NULL) {
1252  if (s->num_heap_slots() > 0) {
1253  if (s->calls_non_strict_eval()) {
1254  // Check that extension is NULL.
1256  Immediate(0));
1257  __ j(not_equal, slow);
1258  }
1259  // Load next context in chain.
1260  __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1261  // Walk the rest of the chain without clobbering esi.
1262  context = temp;
1263  }
1264  // If no outer scope calls eval, we do not need to check more
1265  // context extensions. If we have reached an eval scope, we check
1266  // all extensions from this point.
1267  if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1268  s = s->outer_scope();
1269  }
1270 
1271  if (s != NULL && s->is_eval_scope()) {
1272  // Loop up the context chain. There is no frame effect so it is
1273  // safe to use raw labels here.
1274  Label next, fast;
1275  if (!context.is(temp)) {
1276  __ mov(temp, context);
1277  }
1278  __ bind(&next);
1279  // Terminate at native context.
1281  Immediate(isolate()->factory()->native_context_map()));
1282  __ j(equal, &fast, Label::kNear);
1283  // Check that extension is NULL.
1284  __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1285  __ j(not_equal, slow);
1286  // Load next context in chain.
1287  __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1288  __ jmp(&next);
1289  __ bind(&fast);
1290  }
1291 
1292  // All extension objects were empty and it is safe to use a global
1293  // load IC call.
1294  __ mov(edx, GlobalObjectOperand());
1295  __ mov(ecx, var->name());
1296  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1297  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1298  ? RelocInfo::CODE_TARGET
1299  : RelocInfo::CODE_TARGET_CONTEXT;
1300  CallIC(ic, mode);
1301 }
1302 
1303 
1304 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1305  Label* slow) {
1306  ASSERT(var->IsContextSlot());
1307  Register context = esi;
1308  Register temp = ebx;
1309 
1310  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1311  if (s->num_heap_slots() > 0) {
1312  if (s->calls_non_strict_eval()) {
1313  // Check that extension is NULL.
1315  Immediate(0));
1316  __ j(not_equal, slow);
1317  }
1318  __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1319  // Walk the rest of the chain without clobbering esi.
1320  context = temp;
1321  }
1322  }
1323  // Check that last extension is NULL.
1324  __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1325  __ j(not_equal, slow);
1326 
1327  // This function is used only for loads, not stores, so it's safe to
1328  // return an esi-based operand (the write barrier cannot be allowed to
1329  // destroy the esi register).
1330  return ContextOperand(context, var->index());
1331 }
1332 
1333 
1334 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1335  TypeofState typeof_state,
1336  Label* slow,
1337  Label* done) {
1338  // Generate fast-case code for variables that might be shadowed by
1339  // eval-introduced variables. Eval is used a lot without
1340  // introducing variables. In those cases, we do not want to
1341  // perform a runtime call for all variables in the scope
1342  // containing the eval.
1343  if (var->mode() == DYNAMIC_GLOBAL) {
1344  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1345  __ jmp(done);
1346  } else if (var->mode() == DYNAMIC_LOCAL) {
1347  Variable* local = var->local_if_not_shadowed();
1348  __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1349  if (local->mode() == CONST ||
1350  local->mode() == CONST_HARMONY ||
1351  local->mode() == LET) {
1352  __ cmp(eax, isolate()->factory()->the_hole_value());
1353  __ j(not_equal, done);
1354  if (local->mode() == CONST) {
1355  __ mov(eax, isolate()->factory()->undefined_value());
1356  } else { // LET || CONST_HARMONY
1357  __ push(Immediate(var->name()));
1358  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1359  }
1360  }
1361  __ jmp(done);
1362  }
1363 }
1364 
1365 
1366 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1367  // Record position before possible IC call.
1368  SetSourcePosition(proxy->position());
1369  Variable* var = proxy->var();
1370 
1371  // Three cases: global variables, lookup variables, and all other types of
1372  // variables.
1373  switch (var->location()) {
1374  case Variable::UNALLOCATED: {
1375  Comment cmnt(masm_, "Global variable");
1376  // Use inline caching. Variable name is passed in ecx and the global
1377  // object in eax.
1378  __ mov(edx, GlobalObjectOperand());
1379  __ mov(ecx, var->name());
1380  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1381  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1382  context()->Plug(eax);
1383  break;
1384  }
1385 
1386  case Variable::PARAMETER:
1387  case Variable::LOCAL:
1388  case Variable::CONTEXT: {
1389  Comment cmnt(masm_, var->IsContextSlot()
1390  ? "Context variable"
1391  : "Stack variable");
1392  if (var->binding_needs_init()) {
1393  // var->scope() may be NULL when the proxy is located in eval code and
1394  // refers to a potential outside binding. Currently those bindings are
1395  // always looked up dynamically, i.e. in that case
1396  // var->location() == LOOKUP.
1397  // always holds.
1398  ASSERT(var->scope() != NULL);
1399 
1400  // Check if the binding really needs an initialization check. The check
1401  // can be skipped in the following situation: we have a LET or CONST
1402  // binding in harmony mode, both the Variable and the VariableProxy have
1403  // the same declaration scope (i.e. they are both in global code, in the
1404  // same function or in the same eval code) and the VariableProxy is in
1405  // the source physically located after the initializer of the variable.
1406  //
1407  // We cannot skip any initialization checks for CONST in non-harmony
1408  // mode because const variables may be declared but never initialized:
1409  // if (false) { const x; }; var y = x;
1410  //
1411  // The condition on the declaration scopes is a conservative check for
1412  // nested functions that access a binding and are called before the
1413  // binding is initialized:
1414  // function() { f(); let x = 1; function f() { x = 2; } }
1415  //
1416  bool skip_init_check;
1417  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1418  skip_init_check = false;
1419  } else {
1420  // Check that we always have valid source position.
1421  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1422  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1423  skip_init_check = var->mode() != CONST &&
1424  var->initializer_position() < proxy->position();
1425  }
1426 
1427  if (!skip_init_check) {
1428  // Let and const need a read barrier.
1429  Label done;
1430  GetVar(eax, var);
1431  __ cmp(eax, isolate()->factory()->the_hole_value());
1432  __ j(not_equal, &done, Label::kNear);
1433  if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1434  // Throw a reference error when using an uninitialized let/const
1435  // binding in harmony mode.
1436  __ push(Immediate(var->name()));
1437  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1438  } else {
1439  // Uninitalized const bindings outside of harmony mode are unholed.
1440  ASSERT(var->mode() == CONST);
1441  __ mov(eax, isolate()->factory()->undefined_value());
1442  }
1443  __ bind(&done);
1444  context()->Plug(eax);
1445  break;
1446  }
1447  }
1448  context()->Plug(var);
1449  break;
1450  }
1451 
1452  case Variable::LOOKUP: {
1453  Label done, slow;
1454  // Generate code for loading from variables potentially shadowed
1455  // by eval-introduced variables.
1456  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1457  __ bind(&slow);
1458  Comment cmnt(masm_, "Lookup variable");
1459  __ push(esi); // Context.
1460  __ push(Immediate(var->name()));
1461  __ CallRuntime(Runtime::kLoadContextSlot, 2);
1462  __ bind(&done);
1463  context()->Plug(eax);
1464  break;
1465  }
1466  }
1467 }
1468 
1469 
1470 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1471  Comment cmnt(masm_, "[ RegExpLiteral");
1472  Label materialized;
1473  // Registers will be used as follows:
1474  // edi = JS function.
1475  // ecx = literals array.
1476  // ebx = regexp literal.
1477  // eax = regexp literal clone.
1480  int literal_offset =
1481  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1482  __ mov(ebx, FieldOperand(ecx, literal_offset));
1483  __ cmp(ebx, isolate()->factory()->undefined_value());
1484  __ j(not_equal, &materialized, Label::kNear);
1485 
1486  // Create regexp literal using runtime function
1487  // Result will be in eax.
1488  __ push(ecx);
1489  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1490  __ push(Immediate(expr->pattern()));
1491  __ push(Immediate(expr->flags()));
1492  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1493  __ mov(ebx, eax);
1494 
1495  __ bind(&materialized);
1497  Label allocated, runtime_allocate;
1498  __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1499  __ jmp(&allocated);
1500 
1501  __ bind(&runtime_allocate);
1502  __ push(ebx);
1503  __ push(Immediate(Smi::FromInt(size)));
1504  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1505  __ pop(ebx);
1506 
1507  __ bind(&allocated);
1508  // Copy the content into the newly allocated memory.
1509  // (Unroll copy loop once for better throughput).
1510  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1511  __ mov(edx, FieldOperand(ebx, i));
1512  __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1513  __ mov(FieldOperand(eax, i), edx);
1514  __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1515  }
1516  if ((size % (2 * kPointerSize)) != 0) {
1517  __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1518  __ mov(FieldOperand(eax, size - kPointerSize), edx);
1519  }
1520  context()->Plug(eax);
1521 }
1522 
1523 
1524 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1525  if (expression == NULL) {
1526  __ push(Immediate(isolate()->factory()->null_value()));
1527  } else {
1528  VisitForStackValue(expression);
1529  }
1530 }
1531 
1532 
1533 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1534  Comment cmnt(masm_, "[ ObjectLiteral");
1535  Handle<FixedArray> constant_properties = expr->constant_properties();
1538  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1539  __ push(Immediate(constant_properties));
1540  int flags = expr->fast_elements()
1543  flags |= expr->has_function()
1546  __ push(Immediate(Smi::FromInt(flags)));
1547  int properties_count = constant_properties->length() / 2;
1548  if (expr->depth() > 1) {
1549  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1550  } else if (flags != ObjectLiteral::kFastElements ||
1552  __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1553  } else {
1554  FastCloneShallowObjectStub stub(properties_count);
1555  __ CallStub(&stub);
1556  }
1557 
1558  // If result_saved is true the result is on top of the stack. If
1559  // result_saved is false the result is in eax.
1560  bool result_saved = false;
1561 
1562  // Mark all computed expressions that are bound to a key that
1563  // is shadowed by a later occurrence of the same key. For the
1564  // marked expressions, no store code is emitted.
1565  expr->CalculateEmitStore(zone());
1566 
1567  AccessorTable accessor_table(zone());
1568  for (int i = 0; i < expr->properties()->length(); i++) {
1569  ObjectLiteral::Property* property = expr->properties()->at(i);
1570  if (property->IsCompileTimeValue()) continue;
1571 
1572  Literal* key = property->key();
1573  Expression* value = property->value();
1574  if (!result_saved) {
1575  __ push(eax); // Save result on the stack
1576  result_saved = true;
1577  }
1578  switch (property->kind()) {
1580  UNREACHABLE();
1583  // Fall through.
1585  if (key->handle()->IsSymbol()) {
1586  if (property->emit_store()) {
1587  VisitForAccumulatorValue(value);
1588  __ mov(ecx, Immediate(key->handle()));
1589  __ mov(edx, Operand(esp, 0));
1590  Handle<Code> ic = is_classic_mode()
1591  ? isolate()->builtins()->StoreIC_Initialize()
1592  : isolate()->builtins()->StoreIC_Initialize_Strict();
1593  CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId());
1594  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1595  } else {
1596  VisitForEffect(value);
1597  }
1598  break;
1599  }
1600  // Fall through.
1602  __ push(Operand(esp, 0)); // Duplicate receiver.
1603  VisitForStackValue(key);
1604  VisitForStackValue(value);
1605  if (property->emit_store()) {
1606  __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1607  __ CallRuntime(Runtime::kSetProperty, 4);
1608  } else {
1609  __ Drop(3);
1610  }
1611  break;
1613  accessor_table.lookup(key)->second->getter = value;
1614  break;
1616  accessor_table.lookup(key)->second->setter = value;
1617  break;
1618  }
1619  }
1620 
1621  // Emit code to define accessors, using only a single call to the runtime for
1622  // each pair of corresponding getters and setters.
1623  for (AccessorTable::Iterator it = accessor_table.begin();
1624  it != accessor_table.end();
1625  ++it) {
1626  __ push(Operand(esp, 0)); // Duplicate receiver.
1627  VisitForStackValue(it->first);
1628  EmitAccessor(it->second->getter);
1629  EmitAccessor(it->second->setter);
1630  __ push(Immediate(Smi::FromInt(NONE)));
1631  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1632  }
1633 
1634  if (expr->has_function()) {
1635  ASSERT(result_saved);
1636  __ push(Operand(esp, 0));
1637  __ CallRuntime(Runtime::kToFastProperties, 1);
1638  }
1639 
1640  if (result_saved) {
1641  context()->PlugTOS();
1642  } else {
1643  context()->Plug(eax);
1644  }
1645 }
1646 
1647 
1648 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1649  Comment cmnt(masm_, "[ ArrayLiteral");
1650 
1651  ZoneList<Expression*>* subexprs = expr->values();
1652  int length = subexprs->length();
1653  Handle<FixedArray> constant_elements = expr->constant_elements();
1654  ASSERT_EQ(2, constant_elements->length());
1655  ElementsKind constant_elements_kind =
1656  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1657  bool has_constant_fast_elements =
1658  IsFastObjectElementsKind(constant_elements_kind);
1659  Handle<FixedArrayBase> constant_elements_values(
1660  FixedArrayBase::cast(constant_elements->get(1)));
1661 
1664  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1665  __ push(Immediate(constant_elements));
1666  Heap* heap = isolate()->heap();
1667  if (has_constant_fast_elements &&
1668  constant_elements_values->map() == heap->fixed_cow_array_map()) {
1669  // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1670  // change, so it's possible to specialize the stub in advance.
1671  __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1672  FastCloneShallowArrayStub stub(
1674  length);
1675  __ CallStub(&stub);
1676  } else if (expr->depth() > 1) {
1677  __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1679  __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1680  } else {
1681  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1682  FLAG_smi_only_arrays);
1683  // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1684  // change, so it's possible to specialize the stub in advance.
1685  FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
1688  FastCloneShallowArrayStub stub(mode, length);
1689  __ CallStub(&stub);
1690  }
1691 
1692  bool result_saved = false; // Is the result saved to the stack?
1693 
1694  // Emit code to evaluate all the non-constant subexpressions and to store
1695  // them into the newly cloned array.
1696  for (int i = 0; i < length; i++) {
1697  Expression* subexpr = subexprs->at(i);
1698  // If the subexpression is a literal or a simple materialized literal it
1699  // is already set in the cloned array.
1700  if (subexpr->AsLiteral() != NULL ||
1702  continue;
1703  }
1704 
1705  if (!result_saved) {
1706  __ push(eax);
1707  result_saved = true;
1708  }
1709  VisitForAccumulatorValue(subexpr);
1710 
1711  if (IsFastObjectElementsKind(constant_elements_kind)) {
1712  // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1713  // cannot transition and don't need to call the runtime stub.
1714  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1715  __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
1717  // Store the subexpression value in the array's elements.
1718  __ mov(FieldOperand(ebx, offset), result_register());
1719  // Update the write barrier for the array store.
1720  __ RecordWriteField(ebx, offset, result_register(), ecx,
1721  kDontSaveFPRegs,
1722  EMIT_REMEMBERED_SET,
1724  } else {
1725  // Store the subexpression value in the array's elements.
1726  __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
1728  __ mov(ecx, Immediate(Smi::FromInt(i)));
1729  __ mov(edx, Immediate(Smi::FromInt(expr->literal_index())));
1730  StoreArrayLiteralElementStub stub;
1731  __ CallStub(&stub);
1732  }
1733 
1734  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1735  }
1736 
1737  if (result_saved) {
1738  context()->PlugTOS();
1739  } else {
1740  context()->Plug(eax);
1741  }
1742 }
1743 
1744 
1745 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1746  Comment cmnt(masm_, "[ Assignment");
1747  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1748  // on the left-hand side.
1749  if (!expr->target()->IsValidLeftHandSide()) {
1750  VisitForEffect(expr->target());
1751  return;
1752  }
1753 
1754  // Left-hand side can only be a property, a global or a (parameter or local)
1755  // slot.
1756  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1757  LhsKind assign_type = VARIABLE;
1758  Property* property = expr->target()->AsProperty();
1759  if (property != NULL) {
1760  assign_type = (property->key()->IsPropertyName())
1761  ? NAMED_PROPERTY
1762  : KEYED_PROPERTY;
1763  }
1764 
1765  // Evaluate LHS expression.
1766  switch (assign_type) {
1767  case VARIABLE:
1768  // Nothing to do here.
1769  break;
1770  case NAMED_PROPERTY:
1771  if (expr->is_compound()) {
1772  // We need the receiver both on the stack and in edx.
1773  VisitForStackValue(property->obj());
1774  __ mov(edx, Operand(esp, 0));
1775  } else {
1776  VisitForStackValue(property->obj());
1777  }
1778  break;
1779  case KEYED_PROPERTY: {
1780  if (expr->is_compound()) {
1781  VisitForStackValue(property->obj());
1782  VisitForStackValue(property->key());
1783  __ mov(edx, Operand(esp, kPointerSize)); // Object.
1784  __ mov(ecx, Operand(esp, 0)); // Key.
1785  } else {
1786  VisitForStackValue(property->obj());
1787  VisitForStackValue(property->key());
1788  }
1789  break;
1790  }
1791  }
1792 
1793  // For compound assignments we need another deoptimization point after the
1794  // variable/property load.
1795  if (expr->is_compound()) {
1796  AccumulatorValueContext result_context(this);
1797  { AccumulatorValueContext left_operand_context(this);
1798  switch (assign_type) {
1799  case VARIABLE:
1800  EmitVariableLoad(expr->target()->AsVariableProxy());
1801  PrepareForBailout(expr->target(), TOS_REG);
1802  break;
1803  case NAMED_PROPERTY:
1804  EmitNamedPropertyLoad(property);
1805  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1806  break;
1807  case KEYED_PROPERTY:
1808  EmitKeyedPropertyLoad(property);
1809  PrepareForBailoutForId(property->LoadId(), TOS_REG);
1810  break;
1811  }
1812  }
1813 
1814  Token::Value op = expr->binary_op();
1815  __ push(eax); // Left operand goes on the stack.
1816  VisitForAccumulatorValue(expr->value());
1817 
1818  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1819  ? OVERWRITE_RIGHT
1820  : NO_OVERWRITE;
1821  SetSourcePosition(expr->position() + 1);
1822  if (ShouldInlineSmiCase(op)) {
1823  EmitInlineSmiBinaryOp(expr->binary_operation(),
1824  op,
1825  mode,
1826  expr->target(),
1827  expr->value());
1828  } else {
1829  EmitBinaryOp(expr->binary_operation(), op, mode);
1830  }
1831 
1832  // Deoptimization point in case the binary operation may have side effects.
1833  PrepareForBailout(expr->binary_operation(), TOS_REG);
1834  } else {
1835  VisitForAccumulatorValue(expr->value());
1836  }
1837 
1838  // Record source position before possible IC call.
1839  SetSourcePosition(expr->position());
1840 
1841  // Store the value.
1842  switch (assign_type) {
1843  case VARIABLE:
1844  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1845  expr->op());
1846  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1847  context()->Plug(eax);
1848  break;
1849  case NAMED_PROPERTY:
1850  EmitNamedPropertyAssignment(expr);
1851  break;
1852  case KEYED_PROPERTY:
1853  EmitKeyedPropertyAssignment(expr);
1854  break;
1855  }
1856 }
1857 
1858 
1859 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1860  SetSourcePosition(prop->position());
1861  Literal* key = prop->key()->AsLiteral();
1862  ASSERT(!key->handle()->IsSmi());
1863  __ mov(ecx, Immediate(key->handle()));
1864  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1865  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1866 }
1867 
1868 
1869 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1870  SetSourcePosition(prop->position());
1871  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1872  CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId());
1873 }
1874 
1875 
1876 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1877  Token::Value op,
1878  OverwriteMode mode,
1879  Expression* left,
1880  Expression* right) {
1881  // Do combined smi check of the operands. Left operand is on the
1882  // stack. Right operand is in eax.
1883  Label smi_case, done, stub_call;
1884  __ pop(edx);
1885  __ mov(ecx, eax);
1886  __ or_(eax, edx);
1887  JumpPatchSite patch_site(masm_);
1888  patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1889 
1890  __ bind(&stub_call);
1891  __ mov(eax, ecx);
1892  BinaryOpStub stub(op, mode);
1893  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1894  expr->BinaryOperationFeedbackId());
1895  patch_site.EmitPatchInfo();
1896  __ jmp(&done, Label::kNear);
1897 
1898  // Smi case.
1899  __ bind(&smi_case);
1900  __ mov(eax, edx); // Copy left operand in case of a stub call.
1901 
1902  switch (op) {
1903  case Token::SAR:
1904  __ SmiUntag(eax);
1905  __ SmiUntag(ecx);
1906  __ sar_cl(eax); // No checks of result necessary
1907  __ SmiTag(eax);
1908  break;
1909  case Token::SHL: {
1910  Label result_ok;
1911  __ SmiUntag(eax);
1912  __ SmiUntag(ecx);
1913  __ shl_cl(eax);
1914  // Check that the *signed* result fits in a smi.
1915  __ cmp(eax, 0xc0000000);
1916  __ j(positive, &result_ok);
1917  __ SmiTag(ecx);
1918  __ jmp(&stub_call);
1919  __ bind(&result_ok);
1920  __ SmiTag(eax);
1921  break;
1922  }
1923  case Token::SHR: {
1924  Label result_ok;
1925  __ SmiUntag(eax);
1926  __ SmiUntag(ecx);
1927  __ shr_cl(eax);
1928  __ test(eax, Immediate(0xc0000000));
1929  __ j(zero, &result_ok);
1930  __ SmiTag(ecx);
1931  __ jmp(&stub_call);
1932  __ bind(&result_ok);
1933  __ SmiTag(eax);
1934  break;
1935  }
1936  case Token::ADD:
1937  __ add(eax, ecx);
1938  __ j(overflow, &stub_call);
1939  break;
1940  case Token::SUB:
1941  __ sub(eax, ecx);
1942  __ j(overflow, &stub_call);
1943  break;
1944  case Token::MUL: {
1945  __ SmiUntag(eax);
1946  __ imul(eax, ecx);
1947  __ j(overflow, &stub_call);
1948  __ test(eax, eax);
1949  __ j(not_zero, &done, Label::kNear);
1950  __ mov(ebx, edx);
1951  __ or_(ebx, ecx);
1952  __ j(negative, &stub_call);
1953  break;
1954  }
1955  case Token::BIT_OR:
1956  __ or_(eax, ecx);
1957  break;
1958  case Token::BIT_AND:
1959  __ and_(eax, ecx);
1960  break;
1961  case Token::BIT_XOR:
1962  __ xor_(eax, ecx);
1963  break;
1964  default:
1965  UNREACHABLE();
1966  }
1967 
1968  __ bind(&done);
1969  context()->Plug(eax);
1970 }
1971 
1972 
1973 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1974  Token::Value op,
1975  OverwriteMode mode) {
1976  __ pop(edx);
1977  BinaryOpStub stub(op, mode);
1978  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1979  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
1980  expr->BinaryOperationFeedbackId());
1981  patch_site.EmitPatchInfo();
1982  context()->Plug(eax);
1983 }
1984 
1985 
1986 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1987  // Invalid left-hand sides are rewritten to have a 'throw
1988  // ReferenceError' on the left-hand side.
1989  if (!expr->IsValidLeftHandSide()) {
1990  VisitForEffect(expr);
1991  return;
1992  }
1993 
1994  // Left-hand side can only be a property, a global or a (parameter or local)
1995  // slot.
1996  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1997  LhsKind assign_type = VARIABLE;
1998  Property* prop = expr->AsProperty();
1999  if (prop != NULL) {
2000  assign_type = (prop->key()->IsPropertyName())
2001  ? NAMED_PROPERTY
2002  : KEYED_PROPERTY;
2003  }
2004 
2005  switch (assign_type) {
2006  case VARIABLE: {
2007  Variable* var = expr->AsVariableProxy()->var();
2008  EffectContext context(this);
2009  EmitVariableAssignment(var, Token::ASSIGN);
2010  break;
2011  }
2012  case NAMED_PROPERTY: {
2013  __ push(eax); // Preserve value.
2014  VisitForAccumulatorValue(prop->obj());
2015  __ mov(edx, eax);
2016  __ pop(eax); // Restore value.
2017  __ mov(ecx, prop->key()->AsLiteral()->handle());
2018  Handle<Code> ic = is_classic_mode()
2019  ? isolate()->builtins()->StoreIC_Initialize()
2020  : isolate()->builtins()->StoreIC_Initialize_Strict();
2021  CallIC(ic);
2022  break;
2023  }
2024  case KEYED_PROPERTY: {
2025  __ push(eax); // Preserve value.
2026  VisitForStackValue(prop->obj());
2027  VisitForAccumulatorValue(prop->key());
2028  __ mov(ecx, eax);
2029  __ pop(edx); // Receiver.
2030  __ pop(eax); // Restore value.
2031  Handle<Code> ic = is_classic_mode()
2032  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2033  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2034  CallIC(ic);
2035  break;
2036  }
2037  }
2038  context()->Plug(eax);
2039 }
2040 
2041 
2042 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2043  Token::Value op) {
2044  if (var->IsUnallocated()) {
2045  // Global var, const, or let.
2046  __ mov(ecx, var->name());
2047  __ mov(edx, GlobalObjectOperand());
2048  Handle<Code> ic = is_classic_mode()
2049  ? isolate()->builtins()->StoreIC_Initialize()
2050  : isolate()->builtins()->StoreIC_Initialize_Strict();
2051  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2052 
2053  } else if (op == Token::INIT_CONST) {
2054  // Const initializers need a write barrier.
2055  ASSERT(!var->IsParameter()); // No const parameters.
2056  if (var->IsStackLocal()) {
2057  Label skip;
2058  __ mov(edx, StackOperand(var));
2059  __ cmp(edx, isolate()->factory()->the_hole_value());
2060  __ j(not_equal, &skip);
2061  __ mov(StackOperand(var), eax);
2062  __ bind(&skip);
2063  } else {
2064  ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2065  // Like var declarations, const declarations are hoisted to function
2066  // scope. However, unlike var initializers, const initializers are
2067  // able to drill a hole to that function context, even from inside a
2068  // 'with' context. We thus bypass the normal static scope lookup for
2069  // var->IsContextSlot().
2070  __ push(eax);
2071  __ push(esi);
2072  __ push(Immediate(var->name()));
2073  __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2074  }
2075 
2076  } else if (var->mode() == LET && op != Token::INIT_LET) {
2077  // Non-initializing assignment to let variable needs a write barrier.
2078  if (var->IsLookupSlot()) {
2079  __ push(eax); // Value.
2080  __ push(esi); // Context.
2081  __ push(Immediate(var->name()));
2082  __ push(Immediate(Smi::FromInt(language_mode())));
2083  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2084  } else {
2085  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2086  Label assign;
2087  MemOperand location = VarOperand(var, ecx);
2088  __ mov(edx, location);
2089  __ cmp(edx, isolate()->factory()->the_hole_value());
2090  __ j(not_equal, &assign, Label::kNear);
2091  __ push(Immediate(var->name()));
2092  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2093  __ bind(&assign);
2094  __ mov(location, eax);
2095  if (var->IsContextSlot()) {
2096  __ mov(edx, eax);
2097  int offset = Context::SlotOffset(var->index());
2098  __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2099  }
2100  }
2101 
2102  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2103  // Assignment to var or initializing assignment to let/const
2104  // in harmony mode.
2105  if (var->IsStackAllocated() || var->IsContextSlot()) {
2106  MemOperand location = VarOperand(var, ecx);
2107  if (generate_debug_code_ && op == Token::INIT_LET) {
2108  // Check for an uninitialized let binding.
2109  __ mov(edx, location);
2110  __ cmp(edx, isolate()->factory()->the_hole_value());
2111  __ Check(equal, "Let binding re-initialization.");
2112  }
2113  // Perform the assignment.
2114  __ mov(location, eax);
2115  if (var->IsContextSlot()) {
2116  __ mov(edx, eax);
2117  int offset = Context::SlotOffset(var->index());
2118  __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2119  }
2120  } else {
2121  ASSERT(var->IsLookupSlot());
2122  __ push(eax); // Value.
2123  __ push(esi); // Context.
2124  __ push(Immediate(var->name()));
2125  __ push(Immediate(Smi::FromInt(language_mode())));
2126  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2127  }
2128  }
2129  // Non-initializing assignments to consts are ignored.
2130 }
2131 
2132 
2133 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2134  // Assignment to a property, using a named store IC.
2135  // eax : value
2136  // esp[0] : receiver
2137 
2138  Property* prop = expr->target()->AsProperty();
2139  ASSERT(prop != NULL);
2140  ASSERT(prop->key()->AsLiteral() != NULL);
2141 
2142  // Record source code position before IC call.
2143  SetSourcePosition(expr->position());
2144  __ mov(ecx, prop->key()->AsLiteral()->handle());
2145  __ pop(edx);
2146  Handle<Code> ic = is_classic_mode()
2147  ? isolate()->builtins()->StoreIC_Initialize()
2148  : isolate()->builtins()->StoreIC_Initialize_Strict();
2149  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2150 
2151  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2152  context()->Plug(eax);
2153 }
2154 
2155 
2156 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2157  // Assignment to a property, using a keyed store IC.
2158  // eax : value
2159  // esp[0] : key
2160  // esp[kPointerSize] : receiver
2161 
2162  __ pop(ecx); // Key.
2163  __ pop(edx);
2164  // Record source code position before IC call.
2165  SetSourcePosition(expr->position());
2166  Handle<Code> ic = is_classic_mode()
2167  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2168  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2169  CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId());
2170 
2171  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2172  context()->Plug(eax);
2173 }
2174 
2175 
2176 void FullCodeGenerator::VisitProperty(Property* expr) {
2177  Comment cmnt(masm_, "[ Property");
2178  Expression* key = expr->key();
2179 
2180  if (key->IsPropertyName()) {
2181  VisitForAccumulatorValue(expr->obj());
2182  __ mov(edx, result_register());
2183  EmitNamedPropertyLoad(expr);
2184  PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2185  context()->Plug(eax);
2186  } else {
2187  VisitForStackValue(expr->obj());
2188  VisitForAccumulatorValue(expr->key());
2189  __ pop(edx); // Object.
2190  __ mov(ecx, result_register()); // Key.
2191  EmitKeyedPropertyLoad(expr);
2192  context()->Plug(eax);
2193  }
2194 }
2195 
2196 
2197 void FullCodeGenerator::CallIC(Handle<Code> code,
2198  RelocInfo::Mode rmode,
2199  TypeFeedbackId ast_id) {
2200  ic_total_count_++;
2201  __ call(code, rmode, ast_id);
2202 }
2203 
2204 
2205 
2206 
2207 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2208  Handle<Object> name,
2209  RelocInfo::Mode mode) {
2210  // Code common for calls using the IC.
2211  ZoneList<Expression*>* args = expr->arguments();
2212  int arg_count = args->length();
2213  { PreservePositionScope scope(masm()->positions_recorder());
2214  for (int i = 0; i < arg_count; i++) {
2215  VisitForStackValue(args->at(i));
2216  }
2217  __ Set(ecx, Immediate(name));
2218  }
2219  // Record source position of the IC call.
2220  SetSourcePosition(expr->position());
2221  Handle<Code> ic =
2222  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2223  CallIC(ic, mode, expr->CallFeedbackId());
2224  RecordJSReturnSite(expr);
2225  // Restore context register.
2227  context()->Plug(eax);
2228 }
2229 
2230 
2231 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2232  Expression* key) {
2233  // Load the key.
2234  VisitForAccumulatorValue(key);
2235 
2236  // Swap the name of the function and the receiver on the stack to follow
2237  // the calling convention for call ICs.
2238  __ pop(ecx);
2239  __ push(eax);
2240  __ push(ecx);
2241 
2242  // Load the arguments.
2243  ZoneList<Expression*>* args = expr->arguments();
2244  int arg_count = args->length();
2245  { PreservePositionScope scope(masm()->positions_recorder());
2246  for (int i = 0; i < arg_count; i++) {
2247  VisitForStackValue(args->at(i));
2248  }
2249  }
2250  // Record source position of the IC call.
2251  SetSourcePosition(expr->position());
2252  Handle<Code> ic =
2253  isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2254  __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
2255  CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId());
2256  RecordJSReturnSite(expr);
2257  // Restore context register.
2259  context()->DropAndPlug(1, eax); // Drop the key still on the stack.
2260 }
2261 
2262 
2263 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2264  // Code common for calls using the call stub.
2265  ZoneList<Expression*>* args = expr->arguments();
2266  int arg_count = args->length();
2267  { PreservePositionScope scope(masm()->positions_recorder());
2268  for (int i = 0; i < arg_count; i++) {
2269  VisitForStackValue(args->at(i));
2270  }
2271  }
2272  // Record source position for debugger.
2273  SetSourcePosition(expr->position());
2274 
2275  // Record call targets in unoptimized code.
2276  flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2277  Handle<Object> uninitialized =
2279  Handle<JSGlobalPropertyCell> cell =
2280  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2281  RecordTypeFeedbackCell(expr->CallFeedbackId(), cell);
2282  __ mov(ebx, cell);
2283 
2284  CallFunctionStub stub(arg_count, flags);
2285  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2286  __ CallStub(&stub, expr->CallFeedbackId());
2287 
2288  RecordJSReturnSite(expr);
2289  // Restore context register.
2291  context()->DropAndPlug(1, eax);
2292 }
2293 
2294 
2295 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2296  // Push copy of the first argument or undefined if it doesn't exist.
2297  if (arg_count > 0) {
2298  __ push(Operand(esp, arg_count * kPointerSize));
2299  } else {
2300  __ push(Immediate(isolate()->factory()->undefined_value()));
2301  }
2302 
2303  // Push the receiver of the enclosing function.
2304  __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2305  // Push the language mode.
2306  __ push(Immediate(Smi::FromInt(language_mode())));
2307 
2308  // Push the start position of the scope the calls resides in.
2309  __ push(Immediate(Smi::FromInt(scope()->start_position())));
2310 
2311  // Do the runtime call.
2312  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2313 }
2314 
2315 
2316 void FullCodeGenerator::VisitCall(Call* expr) {
2317 #ifdef DEBUG
2318  // We want to verify that RecordJSReturnSite gets called on all paths
2319  // through this function. Avoid early returns.
2320  expr->return_is_recorded_ = false;
2321 #endif
2322 
2323  Comment cmnt(masm_, "[ Call");
2324  Expression* callee = expr->expression();
2325  VariableProxy* proxy = callee->AsVariableProxy();
2326  Property* property = callee->AsProperty();
2327 
2328  if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2329  // In a call to eval, we first call %ResolvePossiblyDirectEval to
2330  // resolve the function we need to call and the receiver of the call.
2331  // Then we call the resolved function using the given arguments.
2332  ZoneList<Expression*>* args = expr->arguments();
2333  int arg_count = args->length();
2334  { PreservePositionScope pos_scope(masm()->positions_recorder());
2335  VisitForStackValue(callee);
2336  // Reserved receiver slot.
2337  __ push(Immediate(isolate()->factory()->undefined_value()));
2338  // Push the arguments.
2339  for (int i = 0; i < arg_count; i++) {
2340  VisitForStackValue(args->at(i));
2341  }
2342 
2343  // Push a copy of the function (found below the arguments) and
2344  // resolve eval.
2345  __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2346  EmitResolvePossiblyDirectEval(arg_count);
2347 
2348  // The runtime call returns a pair of values in eax (function) and
2349  // edx (receiver). Touch up the stack with the right values.
2350  __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2351  __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2352  }
2353  // Record source position for debugger.
2354  SetSourcePosition(expr->position());
2355  CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2356  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2357  __ CallStub(&stub);
2358  RecordJSReturnSite(expr);
2359  // Restore context register.
2361  context()->DropAndPlug(1, eax);
2362 
2363  } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2364  // Push global object as receiver for the call IC.
2365  __ push(GlobalObjectOperand());
2366  EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2367 
2368  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2369  // Call to a lookup slot (dynamically introduced variable).
2370  Label slow, done;
2371  { PreservePositionScope scope(masm()->positions_recorder());
2372  // Generate code for loading from variables potentially shadowed by
2373  // eval-introduced variables.
2374  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2375  }
2376  __ bind(&slow);
2377  // Call the runtime to find the function to call (returned in eax) and
2378  // the object holding it (returned in edx).
2379  __ push(context_register());
2380  __ push(Immediate(proxy->name()));
2381  __ CallRuntime(Runtime::kLoadContextSlot, 2);
2382  __ push(eax); // Function.
2383  __ push(edx); // Receiver.
2384 
2385  // If fast case code has been generated, emit code to push the function
2386  // and receiver and have the slow path jump around this code.
2387  if (done.is_linked()) {
2388  Label call;
2389  __ jmp(&call, Label::kNear);
2390  __ bind(&done);
2391  // Push function.
2392  __ push(eax);
2393  // The receiver is implicitly the global receiver. Indicate this by
2394  // passing the hole to the call function stub.
2395  __ push(Immediate(isolate()->factory()->the_hole_value()));
2396  __ bind(&call);
2397  }
2398 
2399  // The receiver is either the global receiver or an object found by
2400  // LoadContextSlot. That object could be the hole if the receiver is
2401  // implicitly the global object.
2402  EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2403 
2404  } else if (property != NULL) {
2405  { PreservePositionScope scope(masm()->positions_recorder());
2406  VisitForStackValue(property->obj());
2407  }
2408  if (property->key()->IsPropertyName()) {
2409  EmitCallWithIC(expr,
2410  property->key()->AsLiteral()->handle(),
2411  RelocInfo::CODE_TARGET);
2412  } else {
2413  EmitKeyedCallWithIC(expr, property->key());
2414  }
2415 
2416  } else {
2417  // Call to an arbitrary expression not handled specially above.
2418  { PreservePositionScope scope(masm()->positions_recorder());
2419  VisitForStackValue(callee);
2420  }
2421  // Load global receiver object.
2422  __ mov(ebx, GlobalObjectOperand());
2424  // Emit function call.
2425  EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2426  }
2427 
2428 #ifdef DEBUG
2429  // RecordJSReturnSite should have been called.
2430  ASSERT(expr->return_is_recorded_);
2431 #endif
2432 }
2433 
2434 
2435 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2436  Comment cmnt(masm_, "[ CallNew");
2437  // According to ECMA-262, section 11.2.2, page 44, the function
2438  // expression in new calls must be evaluated before the
2439  // arguments.
2440 
2441  // Push constructor on the stack. If it's not a function it's used as
2442  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2443  // ignored.
2444  VisitForStackValue(expr->expression());
2445 
2446  // Push the arguments ("left-to-right") on the stack.
2447  ZoneList<Expression*>* args = expr->arguments();
2448  int arg_count = args->length();
2449  for (int i = 0; i < arg_count; i++) {
2450  VisitForStackValue(args->at(i));
2451  }
2452 
2453  // Call the construct call builtin that handles allocation and
2454  // constructor invocation.
2455  SetSourcePosition(expr->position());
2456 
2457  // Load function and argument count into edi and eax.
2458  __ Set(eax, Immediate(arg_count));
2459  __ mov(edi, Operand(esp, arg_count * kPointerSize));
2460 
2461  // Record call targets in unoptimized code.
2462  Handle<Object> uninitialized =
2464  Handle<JSGlobalPropertyCell> cell =
2465  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2466  RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell);
2467  __ mov(ebx, cell);
2468 
2469  CallConstructStub stub(RECORD_CALL_TARGET);
2470  __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2471  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2472  context()->Plug(eax);
2473 }
2474 
2475 
2476 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2477  ZoneList<Expression*>* args = expr->arguments();
2478  ASSERT(args->length() == 1);
2479 
2480  VisitForAccumulatorValue(args->at(0));
2481 
2482  Label materialize_true, materialize_false;
2483  Label* if_true = NULL;
2484  Label* if_false = NULL;
2485  Label* fall_through = NULL;
2486  context()->PrepareTest(&materialize_true, &materialize_false,
2487  &if_true, &if_false, &fall_through);
2488 
2489  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2490  __ test(eax, Immediate(kSmiTagMask));
2491  Split(zero, if_true, if_false, fall_through);
2492 
2493  context()->Plug(if_true, if_false);
2494 }
2495 
2496 
2497 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2498  ZoneList<Expression*>* args = expr->arguments();
2499  ASSERT(args->length() == 1);
2500 
2501  VisitForAccumulatorValue(args->at(0));
2502 
2503  Label materialize_true, materialize_false;
2504  Label* if_true = NULL;
2505  Label* if_false = NULL;
2506  Label* fall_through = NULL;
2507  context()->PrepareTest(&materialize_true, &materialize_false,
2508  &if_true, &if_false, &fall_through);
2509 
2510  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2511  __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2512  Split(zero, if_true, if_false, fall_through);
2513 
2514  context()->Plug(if_true, if_false);
2515 }
2516 
2517 
2518 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2519  ZoneList<Expression*>* args = expr->arguments();
2520  ASSERT(args->length() == 1);
2521 
2522  VisitForAccumulatorValue(args->at(0));
2523 
2524  Label materialize_true, materialize_false;
2525  Label* if_true = NULL;
2526  Label* if_false = NULL;
2527  Label* fall_through = NULL;
2528  context()->PrepareTest(&materialize_true, &materialize_false,
2529  &if_true, &if_false, &fall_through);
2530 
2531  __ JumpIfSmi(eax, if_false);
2532  __ cmp(eax, isolate()->factory()->null_value());
2533  __ j(equal, if_true);
2535  // Undetectable objects behave like undefined when tested with typeof.
2536  __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2537  __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2538  __ j(not_zero, if_false);
2539  __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2541  __ j(below, if_false);
2543  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2544  Split(below_equal, if_true, if_false, fall_through);
2545 
2546  context()->Plug(if_true, if_false);
2547 }
2548 
2549 
2550 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2551  ZoneList<Expression*>* args = expr->arguments();
2552  ASSERT(args->length() == 1);
2553 
2554  VisitForAccumulatorValue(args->at(0));
2555 
2556  Label materialize_true, materialize_false;
2557  Label* if_true = NULL;
2558  Label* if_false = NULL;
2559  Label* fall_through = NULL;
2560  context()->PrepareTest(&materialize_true, &materialize_false,
2561  &if_true, &if_false, &fall_through);
2562 
2563  __ JumpIfSmi(eax, if_false);
2564  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
2565  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2566  Split(above_equal, if_true, if_false, fall_through);
2567 
2568  context()->Plug(if_true, if_false);
2569 }
2570 
2571 
2572 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2573  ZoneList<Expression*>* args = expr->arguments();
2574  ASSERT(args->length() == 1);
2575 
2576  VisitForAccumulatorValue(args->at(0));
2577 
2578  Label materialize_true, materialize_false;
2579  Label* if_true = NULL;
2580  Label* if_false = NULL;
2581  Label* fall_through = NULL;
2582  context()->PrepareTest(&materialize_true, &materialize_false,
2583  &if_true, &if_false, &fall_through);
2584 
2585  __ JumpIfSmi(eax, if_false);
2588  __ test(ebx, Immediate(1 << Map::kIsUndetectable));
2589  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2590  Split(not_zero, if_true, if_false, fall_through);
2591 
2592  context()->Plug(if_true, if_false);
2593 }
2594 
2595 
2596 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2597  CallRuntime* expr) {
2598  ZoneList<Expression*>* args = expr->arguments();
2599  ASSERT(args->length() == 1);
2600 
2601  VisitForAccumulatorValue(args->at(0));
2602 
2603  Label materialize_true, materialize_false;
2604  Label* if_true = NULL;
2605  Label* if_false = NULL;
2606  Label* fall_through = NULL;
2607  context()->PrepareTest(&materialize_true, &materialize_false,
2608  &if_true, &if_false, &fall_through);
2609 
2610  __ AssertNotSmi(eax);
2611 
2612  // Check whether this map has already been checked to be safe for default
2613  // valueOf.
2617  __ j(not_zero, if_true);
2618 
2619  // Check for fast case object. Return false for slow case objects.
2621  __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2622  __ cmp(ecx, FACTORY->hash_table_map());
2623  __ j(equal, if_false);
2624 
2625  // Look for valueOf symbol in the descriptor array, and indicate false if
2626  // found. Since we omit an enumeration index check, if it is added via a
2627  // transition that shares its descriptor array, this is a false positive.
2628  Label entry, loop, done;
2629 
2630  // Skip loop if no descriptors are valid.
2631  __ NumberOfOwnDescriptors(ecx, ebx);
2632  __ cmp(ecx, 0);
2633  __ j(equal, &done);
2634 
2635  __ LoadInstanceDescriptors(ebx, ebx);
2636  // ebx: descriptor array.
2637  // ecx: valid entries in the descriptor array.
2638  // Calculate the end of the descriptor array.
2639  STATIC_ASSERT(kSmiTag == 0);
2640  STATIC_ASSERT(kSmiTagSize == 1);
2641  STATIC_ASSERT(kPointerSize == 4);
2642  __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
2643  __ lea(ecx, Operand(ebx, ecx, times_2, DescriptorArray::kFirstOffset));
2644  // Calculate location of the first key name.
2645  __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
2646  // Loop through all the keys in the descriptor array. If one of these is the
2647  // symbol valueOf the result is false.
2648  __ jmp(&entry);
2649  __ bind(&loop);
2650  __ mov(edx, FieldOperand(ebx, 0));
2651  __ cmp(edx, FACTORY->value_of_symbol());
2652  __ j(equal, if_false);
2653  __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
2654  __ bind(&entry);
2655  __ cmp(ebx, ecx);
2656  __ j(not_equal, &loop);
2657 
2658  __ bind(&done);
2659 
2660  // Reload map as register ebx was used as temporary above.
2662 
2663  // If a valueOf property is not found on the object check that its
2664  // prototype is the un-modified String prototype. If not result is false.
2666  __ JumpIfSmi(ecx, if_false);
2667  __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2669  __ mov(edx,
2671  __ cmp(ecx,
2674  __ j(not_equal, if_false);
2675  // Set the bit in the map to indicate that it has been checked safe for
2676  // default valueOf and set true result.
2679  __ jmp(if_true);
2680 
2681  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2682  context()->Plug(if_true, if_false);
2683 }
2684 
2685 
2686 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2687  ZoneList<Expression*>* args = expr->arguments();
2688  ASSERT(args->length() == 1);
2689 
2690  VisitForAccumulatorValue(args->at(0));
2691 
2692  Label materialize_true, materialize_false;
2693  Label* if_true = NULL;
2694  Label* if_false = NULL;
2695  Label* fall_through = NULL;
2696  context()->PrepareTest(&materialize_true, &materialize_false,
2697  &if_true, &if_false, &fall_through);
2698 
2699  __ JumpIfSmi(eax, if_false);
2700  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2701  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2702  Split(equal, if_true, if_false, fall_through);
2703 
2704  context()->Plug(if_true, if_false);
2705 }
2706 
2707 
2708 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2709  ZoneList<Expression*>* args = expr->arguments();
2710  ASSERT(args->length() == 1);
2711 
2712  VisitForAccumulatorValue(args->at(0));
2713 
2714  Label materialize_true, materialize_false;
2715  Label* if_true = NULL;
2716  Label* if_false = NULL;
2717  Label* fall_through = NULL;
2718  context()->PrepareTest(&materialize_true, &materialize_false,
2719  &if_true, &if_false, &fall_through);
2720 
2721  __ JumpIfSmi(eax, if_false);
2722  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2723  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2724  Split(equal, if_true, if_false, fall_through);
2725 
2726  context()->Plug(if_true, if_false);
2727 }
2728 
2729 
2730 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2731  ZoneList<Expression*>* args = expr->arguments();
2732  ASSERT(args->length() == 1);
2733 
2734  VisitForAccumulatorValue(args->at(0));
2735 
2736  Label materialize_true, materialize_false;
2737  Label* if_true = NULL;
2738  Label* if_false = NULL;
2739  Label* fall_through = NULL;
2740  context()->PrepareTest(&materialize_true, &materialize_false,
2741  &if_true, &if_false, &fall_through);
2742 
2743  __ JumpIfSmi(eax, if_false);
2744  __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2745  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2746  Split(equal, if_true, if_false, fall_through);
2747 
2748  context()->Plug(if_true, if_false);
2749 }
2750 
2751 
2752 
2753 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2754  ASSERT(expr->arguments()->length() == 0);
2755 
2756  Label materialize_true, materialize_false;
2757  Label* if_true = NULL;
2758  Label* if_false = NULL;
2759  Label* fall_through = NULL;
2760  context()->PrepareTest(&materialize_true, &materialize_false,
2761  &if_true, &if_false, &fall_through);
2762 
2763  // Get the frame pointer for the calling frame.
2765 
2766  // Skip the arguments adaptor frame if it exists.
2767  Label check_frame_marker;
2770  __ j(not_equal, &check_frame_marker);
2772 
2773  // Check the marker in the calling frame.
2774  __ bind(&check_frame_marker);
2776  Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
2777  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2778  Split(equal, if_true, if_false, fall_through);
2779 
2780  context()->Plug(if_true, if_false);
2781 }
2782 
2783 
2784 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2785  ZoneList<Expression*>* args = expr->arguments();
2786  ASSERT(args->length() == 2);
2787 
2788  // Load the two objects into registers and perform the comparison.
2789  VisitForStackValue(args->at(0));
2790  VisitForAccumulatorValue(args->at(1));
2791 
2792  Label materialize_true, materialize_false;
2793  Label* if_true = NULL;
2794  Label* if_false = NULL;
2795  Label* fall_through = NULL;
2796  context()->PrepareTest(&materialize_true, &materialize_false,
2797  &if_true, &if_false, &fall_through);
2798 
2799  __ pop(ebx);
2800  __ cmp(eax, ebx);
2801  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2802  Split(equal, if_true, if_false, fall_through);
2803 
2804  context()->Plug(if_true, if_false);
2805 }
2806 
2807 
2808 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2809  ZoneList<Expression*>* args = expr->arguments();
2810  ASSERT(args->length() == 1);
2811 
2812  // ArgumentsAccessStub expects the key in edx and the formal
2813  // parameter count in eax.
2814  VisitForAccumulatorValue(args->at(0));
2815  __ mov(edx, eax);
2816  __ Set(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
2817  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2818  __ CallStub(&stub);
2819  context()->Plug(eax);
2820 }
2821 
2822 
2823 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2824  ASSERT(expr->arguments()->length() == 0);
2825 
2826  Label exit;
2827  // Get the number of formal parameters.
2828  __ Set(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
2829 
2830  // Check if the calling frame is an arguments adaptor frame.
2834  __ j(not_equal, &exit);
2835 
2836  // Arguments adaptor case: Read the arguments length from the
2837  // adaptor frame.
2839 
2840  __ bind(&exit);
2841  __ AssertSmi(eax);
2842  context()->Plug(eax);
2843 }
2844 
2845 
2846 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2847  ZoneList<Expression*>* args = expr->arguments();
2848  ASSERT(args->length() == 1);
2849  Label done, null, function, non_function_constructor;
2850 
2851  VisitForAccumulatorValue(args->at(0));
2852 
2853  // If the object is a smi, we return null.
2854  __ JumpIfSmi(eax, &null);
2855 
2856  // Check that the object is a JS object but take special care of JS
2857  // functions to make sure they have 'Function' as their class.
2858  // Assume that there are only two callable types, and one of them is at
2859  // either end of the type range for JS object types. Saves extra comparisons.
2861  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
2862  // Map is now in eax.
2863  __ j(below, &null);
2866  __ j(equal, &function);
2867 
2868  __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
2870  LAST_SPEC_OBJECT_TYPE - 1);
2871  __ j(equal, &function);
2872  // Assume that there is no larger type.
2874 
2875  // Check if the constructor in the map is a JS function.
2877  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2878  __ j(not_equal, &non_function_constructor);
2879 
2880  // eax now contains the constructor function. Grab the
2881  // instance class name from there.
2884  __ jmp(&done);
2885 
2886  // Functions have class 'Function'.
2887  __ bind(&function);
2888  __ mov(eax, isolate()->factory()->function_class_symbol());
2889  __ jmp(&done);
2890 
2891  // Objects with a non-function constructor have class 'Object'.
2892  __ bind(&non_function_constructor);
2893  __ mov(eax, isolate()->factory()->Object_symbol());
2894  __ jmp(&done);
2895 
2896  // Non-JS objects have class null.
2897  __ bind(&null);
2898  __ mov(eax, isolate()->factory()->null_value());
2899 
2900  // All done.
2901  __ bind(&done);
2902 
2903  context()->Plug(eax);
2904 }
2905 
2906 
2907 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2908  // Conditionally generate a log call.
2909  // Args:
2910  // 0 (literal string): The type of logging (corresponds to the flags).
2911  // This is used to determine whether or not to generate the log call.
2912  // 1 (string): Format string. Access the string at argument index 2
2913  // with '%2s' (see Logger::LogRuntime for all the formats).
2914  // 2 (array): Arguments to the format string.
2915  ZoneList<Expression*>* args = expr->arguments();
2916  ASSERT_EQ(args->length(), 3);
2917  if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2918  VisitForStackValue(args->at(1));
2919  VisitForStackValue(args->at(2));
2920  __ CallRuntime(Runtime::kLog, 2);
2921  }
2922  // Finally, we're expected to leave a value on the top of the stack.
2923  __ mov(eax, isolate()->factory()->undefined_value());
2924  context()->Plug(eax);
2925 }
2926 
2927 
2928 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2929  ASSERT(expr->arguments()->length() == 0);
2930 
2931  Label slow_allocate_heapnumber;
2932  Label heapnumber_allocated;
2933 
2934  __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
2935  __ jmp(&heapnumber_allocated);
2936 
2937  __ bind(&slow_allocate_heapnumber);
2938  // Allocate a heap number.
2939  __ CallRuntime(Runtime::kNumberAlloc, 0);
2940  __ mov(edi, eax);
2941 
2942  __ bind(&heapnumber_allocated);
2943 
2944  __ PrepareCallCFunction(1, ebx);
2945  __ mov(eax, ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
2947  __ mov(Operand(esp, 0), eax);
2948  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2949 
2950  // Convert 32 random bits in eax to 0.(32 random bits) in a double
2951  // by computing:
2952  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2953  // This is implemented on both SSE2 and FPU.
2955  CpuFeatures::Scope fscope(SSE2);
2956  __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
2957  __ movd(xmm1, ebx);
2958  __ movd(xmm0, eax);
2959  __ cvtss2sd(xmm1, xmm1);
2960  __ xorps(xmm0, xmm1);
2961  __ subsd(xmm0, xmm1);
2963  } else {
2964  // 0x4130000000000000 is 1.0 x 2^20 as a double.
2966  Immediate(0x41300000));
2969  __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
2971  __ fsubp(1);
2973  }
2974  __ mov(eax, edi);
2975  context()->Plug(eax);
2976 }
2977 
2978 
2979 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
2980  // Load the arguments on the stack and call the stub.
2981  SubStringStub stub;
2982  ZoneList<Expression*>* args = expr->arguments();
2983  ASSERT(args->length() == 3);
2984  VisitForStackValue(args->at(0));
2985  VisitForStackValue(args->at(1));
2986  VisitForStackValue(args->at(2));
2987  __ CallStub(&stub);
2988  context()->Plug(eax);
2989 }
2990 
2991 
2992 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
2993  // Load the arguments on the stack and call the stub.
2994  RegExpExecStub stub;
2995  ZoneList<Expression*>* args = expr->arguments();
2996  ASSERT(args->length() == 4);
2997  VisitForStackValue(args->at(0));
2998  VisitForStackValue(args->at(1));
2999  VisitForStackValue(args->at(2));
3000  VisitForStackValue(args->at(3));
3001  __ CallStub(&stub);
3002  context()->Plug(eax);
3003 }
3004 
3005 
3006 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3007  ZoneList<Expression*>* args = expr->arguments();
3008  ASSERT(args->length() == 1);
3009 
3010  VisitForAccumulatorValue(args->at(0)); // Load the object.
3011 
3012  Label done;
3013  // If the object is a smi return the object.
3014  __ JumpIfSmi(eax, &done, Label::kNear);
3015  // If the object is not a value type, return the object.
3016  __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3017  __ j(not_equal, &done, Label::kNear);
3019 
3020  __ bind(&done);
3021  context()->Plug(eax);
3022 }
3023 
3024 
3025 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3026  ZoneList<Expression*>* args = expr->arguments();
3027  ASSERT(args->length() == 2);
3028  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3029  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3030 
3031  VisitForAccumulatorValue(args->at(0)); // Load the object.
3032 
3033  Label runtime, done, not_date_object;
3034  Register object = eax;
3035  Register result = eax;
3036  Register scratch = ecx;
3037 
3038  __ JumpIfSmi(object, &not_date_object);
3039  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3040  __ j(not_equal, &not_date_object);
3041 
3042  if (index->value() == 0) {
3043  __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3044  __ jmp(&done);
3045  } else {
3046  if (index->value() < JSDate::kFirstUncachedField) {
3047  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3048  __ mov(scratch, Operand::StaticVariable(stamp));
3049  __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3050  __ j(not_equal, &runtime, Label::kNear);
3051  __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3052  kPointerSize * index->value()));
3053  __ jmp(&done);
3054  }
3055  __ bind(&runtime);
3056  __ PrepareCallCFunction(2, scratch);
3057  __ mov(Operand(esp, 0), object);
3058  __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3059  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3060  __ jmp(&done);
3061  }
3062 
3063  __ bind(&not_date_object);
3064  __ CallRuntime(Runtime::kThrowNotDateError, 0);
3065  __ bind(&done);
3066  context()->Plug(result);
3067 }
3068 
3069 
3070 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3071  // Load the arguments on the stack and call the runtime function.
3072  ZoneList<Expression*>* args = expr->arguments();
3073  ASSERT(args->length() == 2);
3074  VisitForStackValue(args->at(0));
3075  VisitForStackValue(args->at(1));
3076 
3078  MathPowStub stub(MathPowStub::ON_STACK);
3079  __ CallStub(&stub);
3080  } else {
3081  __ CallRuntime(Runtime::kMath_pow, 2);
3082  }
3083  context()->Plug(eax);
3084 }
3085 
3086 
3087 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3088  ZoneList<Expression*>* args = expr->arguments();
3089  ASSERT(args->length() == 2);
3090 
3091  VisitForStackValue(args->at(0)); // Load the object.
3092  VisitForAccumulatorValue(args->at(1)); // Load the value.
3093  __ pop(ebx); // eax = value. ebx = object.
3094 
3095  Label done;
3096  // If the object is a smi, return the value.
3097  __ JumpIfSmi(ebx, &done, Label::kNear);
3098 
3099  // If the object is not a value type, return the value.
3100  __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3101  __ j(not_equal, &done, Label::kNear);
3102 
3103  // Store the value.
3105 
3106  // Update the write barrier. Save the value as it will be
3107  // overwritten by the write barrier code and is needed afterward.
3108  __ mov(edx, eax);
3109  __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3110 
3111  __ bind(&done);
3112  context()->Plug(eax);
3113 }
3114 
3115 
3116 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3117  ZoneList<Expression*>* args = expr->arguments();
3118  ASSERT_EQ(args->length(), 1);
3119 
3120  // Load the argument on the stack and call the stub.
3121  VisitForStackValue(args->at(0));
3122 
3123  NumberToStringStub stub;
3124  __ CallStub(&stub);
3125  context()->Plug(eax);
3126 }
3127 
3128 
3129 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3130  ZoneList<Expression*>* args = expr->arguments();
3131  ASSERT(args->length() == 1);
3132 
3133  VisitForAccumulatorValue(args->at(0));
3134 
3135  Label done;
3136  StringCharFromCodeGenerator generator(eax, ebx);
3137  generator.GenerateFast(masm_);
3138  __ jmp(&done);
3139 
3140  NopRuntimeCallHelper call_helper;
3141  generator.GenerateSlow(masm_, call_helper);
3142 
3143  __ bind(&done);
3144  context()->Plug(ebx);
3145 }
3146 
3147 
3148 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3149  ZoneList<Expression*>* args = expr->arguments();
3150  ASSERT(args->length() == 2);
3151 
3152  VisitForStackValue(args->at(0));
3153  VisitForAccumulatorValue(args->at(1));
3154 
3155  Register object = ebx;
3156  Register index = eax;
3157  Register result = edx;
3158 
3159  __ pop(object);
3160 
3161  Label need_conversion;
3162  Label index_out_of_range;
3163  Label done;
3164  StringCharCodeAtGenerator generator(object,
3165  index,
3166  result,
3167  &need_conversion,
3168  &need_conversion,
3169  &index_out_of_range,
3171  generator.GenerateFast(masm_);
3172  __ jmp(&done);
3173 
3174  __ bind(&index_out_of_range);
3175  // When the index is out of range, the spec requires us to return
3176  // NaN.
3177  __ Set(result, Immediate(isolate()->factory()->nan_value()));
3178  __ jmp(&done);
3179 
3180  __ bind(&need_conversion);
3181  // Move the undefined value into the result register, which will
3182  // trigger conversion.
3183  __ Set(result, Immediate(isolate()->factory()->undefined_value()));
3184  __ jmp(&done);
3185 
3186  NopRuntimeCallHelper call_helper;
3187  generator.GenerateSlow(masm_, call_helper);
3188 
3189  __ bind(&done);
3190  context()->Plug(result);
3191 }
3192 
3193 
3194 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3195  ZoneList<Expression*>* args = expr->arguments();
3196  ASSERT(args->length() == 2);
3197 
3198  VisitForStackValue(args->at(0));
3199  VisitForAccumulatorValue(args->at(1));
3200 
3201  Register object = ebx;
3202  Register index = eax;
3203  Register scratch = edx;
3204  Register result = eax;
3205 
3206  __ pop(object);
3207 
3208  Label need_conversion;
3209  Label index_out_of_range;
3210  Label done;
3211  StringCharAtGenerator generator(object,
3212  index,
3213  scratch,
3214  result,
3215  &need_conversion,
3216  &need_conversion,
3217  &index_out_of_range,
3219  generator.GenerateFast(masm_);
3220  __ jmp(&done);
3221 
3222  __ bind(&index_out_of_range);
3223  // When the index is out of range, the spec requires us to return
3224  // the empty string.
3225  __ Set(result, Immediate(isolate()->factory()->empty_string()));
3226  __ jmp(&done);
3227 
3228  __ bind(&need_conversion);
3229  // Move smi zero into the result register, which will trigger
3230  // conversion.
3231  __ Set(result, Immediate(Smi::FromInt(0)));
3232  __ jmp(&done);
3233 
3234  NopRuntimeCallHelper call_helper;
3235  generator.GenerateSlow(masm_, call_helper);
3236 
3237  __ bind(&done);
3238  context()->Plug(result);
3239 }
3240 
3241 
3242 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3243  ZoneList<Expression*>* args = expr->arguments();
3244  ASSERT_EQ(2, args->length());
3245 
3246  VisitForStackValue(args->at(0));
3247  VisitForStackValue(args->at(1));
3248 
3249  StringAddStub stub(NO_STRING_ADD_FLAGS);
3250  __ CallStub(&stub);
3251  context()->Plug(eax);
3252 }
3253 
3254 
3255 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3256  ZoneList<Expression*>* args = expr->arguments();
3257  ASSERT_EQ(2, args->length());
3258 
3259  VisitForStackValue(args->at(0));
3260  VisitForStackValue(args->at(1));
3261 
3262  StringCompareStub stub;
3263  __ CallStub(&stub);
3264  context()->Plug(eax);
3265 }
3266 
3267 
3268 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3269  // Load the argument on the stack and call the stub.
3270  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3272  ZoneList<Expression*>* args = expr->arguments();
3273  ASSERT(args->length() == 1);
3274  VisitForStackValue(args->at(0));
3275  __ CallStub(&stub);
3276  context()->Plug(eax);
3277 }
3278 
3279 
3280 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3281  // Load the argument on the stack and call the stub.
3282  TranscendentalCacheStub stub(TranscendentalCache::COS,
3284  ZoneList<Expression*>* args = expr->arguments();
3285  ASSERT(args->length() == 1);
3286  VisitForStackValue(args->at(0));
3287  __ CallStub(&stub);
3288  context()->Plug(eax);
3289 }
3290 
3291 
3292 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3293  // Load the argument on the stack and call the stub.
3294  TranscendentalCacheStub stub(TranscendentalCache::TAN,
3296  ZoneList<Expression*>* args = expr->arguments();
3297  ASSERT(args->length() == 1);
3298  VisitForStackValue(args->at(0));
3299  __ CallStub(&stub);
3300  context()->Plug(eax);
3301 }
3302 
3303 
3304 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3305  // Load the argument on the stack and call the stub.
3306  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3308  ZoneList<Expression*>* args = expr->arguments();
3309  ASSERT(args->length() == 1);
3310  VisitForStackValue(args->at(0));
3311  __ CallStub(&stub);
3312  context()->Plug(eax);
3313 }
3314 
3315 
3316 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3317  // Load the argument on the stack and call the runtime function.
3318  ZoneList<Expression*>* args = expr->arguments();
3319  ASSERT(args->length() == 1);
3320  VisitForStackValue(args->at(0));
3321  __ CallRuntime(Runtime::kMath_sqrt, 1);
3322  context()->Plug(eax);
3323 }
3324 
3325 
3326 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3327  ZoneList<Expression*>* args = expr->arguments();
3328  ASSERT(args->length() >= 2);
3329 
3330  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3331  for (int i = 0; i < arg_count + 1; ++i) {
3332  VisitForStackValue(args->at(i));
3333  }
3334  VisitForAccumulatorValue(args->last()); // Function.
3335 
3336  Label runtime, done;
3337  // Check for non-function argument (including proxy).
3338  __ JumpIfSmi(eax, &runtime);
3339  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3340  __ j(not_equal, &runtime);
3341 
3342  // InvokeFunction requires the function in edi. Move it in there.
3343  __ mov(edi, result_register());
3344  ParameterCount count(arg_count);
3345  __ InvokeFunction(edi, count, CALL_FUNCTION,
3346  NullCallWrapper(), CALL_AS_METHOD);
3348  __ jmp(&done);
3349 
3350  __ bind(&runtime);
3351  __ push(eax);
3352  __ CallRuntime(Runtime::kCall, args->length());
3353  __ bind(&done);
3354 
3355  context()->Plug(eax);
3356 }
3357 
3358 
3359 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3360  // Load the arguments on the stack and call the stub.
3361  RegExpConstructResultStub stub;
3362  ZoneList<Expression*>* args = expr->arguments();
3363  ASSERT(args->length() == 3);
3364  VisitForStackValue(args->at(0));
3365  VisitForStackValue(args->at(1));
3366  VisitForStackValue(args->at(2));
3367  __ CallStub(&stub);
3368  context()->Plug(eax);
3369 }
3370 
3371 
3372 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3373  ZoneList<Expression*>* args = expr->arguments();
3374  ASSERT_EQ(2, args->length());
3375 
3376  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3377  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3378 
3379  Handle<FixedArray> jsfunction_result_caches(
3380  isolate()->native_context()->jsfunction_result_caches());
3381  if (jsfunction_result_caches->length() <= cache_id) {
3382  __ Abort("Attempt to use undefined cache.");
3383  __ mov(eax, isolate()->factory()->undefined_value());
3384  context()->Plug(eax);
3385  return;
3386  }
3387 
3388  VisitForAccumulatorValue(args->at(1));
3389 
3390  Register key = eax;
3391  Register cache = ebx;
3392  Register tmp = ecx;
3394  __ mov(cache,
3397  __ mov(cache,
3398  FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3399 
3400  Label done, not_found;
3401  // tmp now holds finger offset as a smi.
3402  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3404  __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp));
3405  __ j(not_equal, &not_found);
3406 
3407  __ mov(eax, CodeGenerator::FixedArrayElementOperand(cache, tmp, 1));
3408  __ jmp(&done);
3409 
3410  __ bind(&not_found);
3411  // Call runtime to perform the lookup.
3412  __ push(cache);
3413  __ push(key);
3414  __ CallRuntime(Runtime::kGetFromCache, 2);
3415 
3416  __ bind(&done);
3417  context()->Plug(eax);
3418 }
3419 
3420 
3421 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3422  ZoneList<Expression*>* args = expr->arguments();
3423  ASSERT_EQ(2, args->length());
3424 
3425  Register right = eax;
3426  Register left = ebx;
3427  Register tmp = ecx;
3428 
3429  VisitForStackValue(args->at(0));
3430  VisitForAccumulatorValue(args->at(1));
3431  __ pop(left);
3432 
3433  Label done, fail, ok;
3434  __ cmp(left, right);
3435  __ j(equal, &ok);
3436  // Fail if either is a non-HeapObject.
3437  __ mov(tmp, left);
3438  __ and_(tmp, right);
3439  __ JumpIfSmi(tmp, &fail);
3440  __ mov(tmp, FieldOperand(left, HeapObject::kMapOffset));
3441  __ CmpInstanceType(tmp, JS_REGEXP_TYPE);
3442  __ j(not_equal, &fail);
3443  __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
3444  __ j(not_equal, &fail);
3445  __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3446  __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3447  __ j(equal, &ok);
3448  __ bind(&fail);
3449  __ mov(eax, Immediate(isolate()->factory()->false_value()));
3450  __ jmp(&done);
3451  __ bind(&ok);
3452  __ mov(eax, Immediate(isolate()->factory()->true_value()));
3453  __ bind(&done);
3454 
3455  context()->Plug(eax);
3456 }
3457 
3458 
3459 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3460  ZoneList<Expression*>* args = expr->arguments();
3461  ASSERT(args->length() == 1);
3462 
3463  VisitForAccumulatorValue(args->at(0));
3464 
3465  __ AssertString(eax);
3466 
3467  Label materialize_true, materialize_false;
3468  Label* if_true = NULL;
3469  Label* if_false = NULL;
3470  Label* fall_through = NULL;
3471  context()->PrepareTest(&materialize_true, &materialize_false,
3472  &if_true, &if_false, &fall_through);
3473 
3476  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3477  Split(zero, if_true, if_false, fall_through);
3478 
3479  context()->Plug(if_true, if_false);
3480 }
3481 
3482 
3483 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3484  ZoneList<Expression*>* args = expr->arguments();
3485  ASSERT(args->length() == 1);
3486  VisitForAccumulatorValue(args->at(0));
3487 
3488  __ AssertString(eax);
3489 
3491  __ IndexFromHash(eax, eax);
3492 
3493  context()->Plug(eax);
3494 }
3495 
3496 
3497 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3498  Label bailout, done, one_char_separator, long_separator,
3499  non_trivial_array, not_size_one_array, loop,
3500  loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3501 
3502  ZoneList<Expression*>* args = expr->arguments();
3503  ASSERT(args->length() == 2);
3504  // We will leave the separator on the stack until the end of the function.
3505  VisitForStackValue(args->at(1));
3506  // Load this to eax (= array)
3507  VisitForAccumulatorValue(args->at(0));
3508  // All aliases of the same register have disjoint lifetimes.
3509  Register array = eax;
3510  Register elements = no_reg; // Will be eax.
3511 
3512  Register index = edx;
3513 
3514  Register string_length = ecx;
3515 
3516  Register string = esi;
3517 
3518  Register scratch = ebx;
3519 
3520  Register array_length = edi;
3521  Register result_pos = no_reg; // Will be edi.
3522 
3523  // Separator operand is already pushed.
3524  Operand separator_operand = Operand(esp, 2 * kPointerSize);
3525  Operand result_operand = Operand(esp, 1 * kPointerSize);
3526  Operand array_length_operand = Operand(esp, 0);
3527  __ sub(esp, Immediate(2 * kPointerSize));
3528  __ cld();
3529  // Check that the array is a JSArray
3530  __ JumpIfSmi(array, &bailout);
3531  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3532  __ j(not_equal, &bailout);
3533 
3534  // Check that the array has fast elements.
3535  __ CheckFastElements(scratch, &bailout);
3536 
3537  // If the array has length zero, return the empty string.
3538  __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3539  __ SmiUntag(array_length);
3540  __ j(not_zero, &non_trivial_array);
3541  __ mov(result_operand, isolate()->factory()->empty_string());
3542  __ jmp(&done);
3543 
3544  // Save the array length.
3545  __ bind(&non_trivial_array);
3546  __ mov(array_length_operand, array_length);
3547 
3548  // Save the FixedArray containing array's elements.
3549  // End of array's live range.
3550  elements = array;
3551  __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3552  array = no_reg;
3553 
3554 
3555  // Check that all array elements are sequential ASCII strings, and
3556  // accumulate the sum of their lengths, as a smi-encoded value.
3557  __ Set(index, Immediate(0));
3558  __ Set(string_length, Immediate(0));
3559  // Loop condition: while (index < length).
3560  // Live loop registers: index, array_length, string,
3561  // scratch, string_length, elements.
3562  if (generate_debug_code_) {
3563  __ cmp(index, array_length);
3564  __ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
3565  }
3566  __ bind(&loop);
3567  __ mov(string, FieldOperand(elements,
3568  index,
3571  __ JumpIfSmi(string, &bailout);
3572  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3573  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3574  __ and_(scratch, Immediate(
3576  __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
3577  __ j(not_equal, &bailout);
3578  __ add(string_length,
3580  __ j(overflow, &bailout);
3581  __ add(index, Immediate(1));
3582  __ cmp(index, array_length);
3583  __ j(less, &loop);
3584 
3585  // If array_length is 1, return elements[0], a string.
3586  __ cmp(array_length, 1);
3587  __ j(not_equal, &not_size_one_array);
3588  __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3589  __ mov(result_operand, scratch);
3590  __ jmp(&done);
3591 
3592  __ bind(&not_size_one_array);
3593 
3594  // End of array_length live range.
3595  result_pos = array_length;
3596  array_length = no_reg;
3597 
3598  // Live registers:
3599  // string_length: Sum of string lengths, as a smi.
3600  // elements: FixedArray of strings.
3601 
3602  // Check that the separator is a flat ASCII string.
3603  __ mov(string, separator_operand);
3604  __ JumpIfSmi(string, &bailout);
3605  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3606  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3607  __ and_(scratch, Immediate(
3609  __ cmp(scratch, ASCII_STRING_TYPE);
3610  __ j(not_equal, &bailout);
3611 
3612  // Add (separator length times array_length) - separator length
3613  // to string_length.
3614  __ mov(scratch, separator_operand);
3615  __ mov(scratch, FieldOperand(scratch, SeqAsciiString::kLengthOffset));
3616  __ sub(string_length, scratch); // May be negative, temporarily.
3617  __ imul(scratch, array_length_operand);
3618  __ j(overflow, &bailout);
3619  __ add(string_length, scratch);
3620  __ j(overflow, &bailout);
3621 
3622  __ shr(string_length, 1);
3623  // Live registers and stack values:
3624  // string_length
3625  // elements
3626  __ AllocateAsciiString(result_pos, string_length, scratch,
3627  index, string, &bailout);
3628  __ mov(result_operand, result_pos);
3629  __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3630 
3631 
3632  __ mov(string, separator_operand);
3634  Immediate(Smi::FromInt(1)));
3635  __ j(equal, &one_char_separator);
3636  __ j(greater, &long_separator);
3637 
3638 
3639  // Empty separator case
3640  __ mov(index, Immediate(0));
3641  __ jmp(&loop_1_condition);
3642  // Loop condition: while (index < length).
3643  __ bind(&loop_1);
3644  // Each iteration of the loop concatenates one string to the result.
3645  // Live values in registers:
3646  // index: which element of the elements array we are adding to the result.
3647  // result_pos: the position to which we are currently copying characters.
3648  // elements: the FixedArray of strings we are joining.
3649 
3650  // Get string = array[index].
3651  __ mov(string, FieldOperand(elements, index,
3654  __ mov(string_length,
3656  __ shr(string_length, 1);
3657  __ lea(string,
3659  __ CopyBytes(string, result_pos, string_length, scratch);
3660  __ add(index, Immediate(1));
3661  __ bind(&loop_1_condition);
3662  __ cmp(index, array_length_operand);
3663  __ j(less, &loop_1); // End while (index < length).
3664  __ jmp(&done);
3665 
3666 
3667 
3668  // One-character separator case
3669  __ bind(&one_char_separator);
3670  // Replace separator with its ASCII character value.
3671  __ mov_b(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3672  __ mov_b(separator_operand, scratch);
3673 
3674  __ Set(index, Immediate(0));
3675  // Jump into the loop after the code that copies the separator, so the first
3676  // element is not preceded by a separator
3677  __ jmp(&loop_2_entry);
3678  // Loop condition: while (index < length).
3679  __ bind(&loop_2);
3680  // Each iteration of the loop concatenates one string to the result.
3681  // Live values in registers:
3682  // index: which element of the elements array we are adding to the result.
3683  // result_pos: the position to which we are currently copying characters.
3684 
3685  // Copy the separator character to the result.
3686  __ mov_b(scratch, separator_operand);
3687  __ mov_b(Operand(result_pos, 0), scratch);
3688  __ inc(result_pos);
3689 
3690  __ bind(&loop_2_entry);
3691  // Get string = array[index].
3692  __ mov(string, FieldOperand(elements, index,
3695  __ mov(string_length,
3697  __ shr(string_length, 1);
3698  __ lea(string,
3700  __ CopyBytes(string, result_pos, string_length, scratch);
3701  __ add(index, Immediate(1));
3702 
3703  __ cmp(index, array_length_operand);
3704  __ j(less, &loop_2); // End while (index < length).
3705  __ jmp(&done);
3706 
3707 
3708  // Long separator case (separator is more than one character).
3709  __ bind(&long_separator);
3710 
3711  __ Set(index, Immediate(0));
3712  // Jump into the loop after the code that copies the separator, so the first
3713  // element is not preceded by a separator
3714  __ jmp(&loop_3_entry);
3715  // Loop condition: while (index < length).
3716  __ bind(&loop_3);
3717  // Each iteration of the loop concatenates one string to the result.
3718  // Live values in registers:
3719  // index: which element of the elements array we are adding to the result.
3720  // result_pos: the position to which we are currently copying characters.
3721 
3722  // Copy the separator to the result.
3723  __ mov(string, separator_operand);
3724  __ mov(string_length,
3726  __ shr(string_length, 1);
3727  __ lea(string,
3729  __ CopyBytes(string, result_pos, string_length, scratch);
3730 
3731  __ bind(&loop_3_entry);
3732  // Get string = array[index].
3733  __ mov(string, FieldOperand(elements, index,
3736  __ mov(string_length,
3738  __ shr(string_length, 1);
3739  __ lea(string,
3741  __ CopyBytes(string, result_pos, string_length, scratch);
3742  __ add(index, Immediate(1));
3743 
3744  __ cmp(index, array_length_operand);
3745  __ j(less, &loop_3); // End while (index < length).
3746  __ jmp(&done);
3747 
3748 
3749  __ bind(&bailout);
3750  __ mov(result_operand, isolate()->factory()->undefined_value());
3751  __ bind(&done);
3752  __ mov(eax, result_operand);
3753  // Drop temp values from the stack, and restore context register.
3754  __ add(esp, Immediate(3 * kPointerSize));
3755 
3757  context()->Plug(eax);
3758 }
3759 
3760 
3761 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3762  Handle<String> name = expr->name();
3763  if (name->length() > 0 && name->Get(0) == '_') {
3764  Comment cmnt(masm_, "[ InlineRuntimeCall");
3765  EmitInlineRuntimeCall(expr);
3766  return;
3767  }
3768 
3769  Comment cmnt(masm_, "[ CallRuntime");
3770  ZoneList<Expression*>* args = expr->arguments();
3771 
3772  if (expr->is_jsruntime()) {
3773  // Prepare for calling JS runtime function.
3774  __ mov(eax, GlobalObjectOperand());
3776  }
3777 
3778  // Push the arguments ("left-to-right").
3779  int arg_count = args->length();
3780  for (int i = 0; i < arg_count; i++) {
3781  VisitForStackValue(args->at(i));
3782  }
3783 
3784  if (expr->is_jsruntime()) {
3785  // Call the JS runtime function via a call IC.
3786  __ Set(ecx, Immediate(expr->name()));
3787  RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3788  Handle<Code> ic =
3789  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3790  CallIC(ic, mode, expr->CallRuntimeFeedbackId());
3791  // Restore context register.
3793  } else {
3794  // Call the C runtime function.
3795  __ CallRuntime(expr->function(), arg_count);
3796  }
3797  context()->Plug(eax);
3798 }
3799 
3800 
3801 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3802  switch (expr->op()) {
3803  case Token::DELETE: {
3804  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3805  Property* property = expr->expression()->AsProperty();
3806  VariableProxy* proxy = expr->expression()->AsVariableProxy();
3807 
3808  if (property != NULL) {
3809  VisitForStackValue(property->obj());
3810  VisitForStackValue(property->key());
3811  StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3813  __ push(Immediate(Smi::FromInt(strict_mode_flag)));
3814  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3815  context()->Plug(eax);
3816  } else if (proxy != NULL) {
3817  Variable* var = proxy->var();
3818  // Delete of an unqualified identifier is disallowed in strict mode
3819  // but "delete this" is allowed.
3820  ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3821  if (var->IsUnallocated()) {
3822  __ push(GlobalObjectOperand());
3823  __ push(Immediate(var->name()));
3824  __ push(Immediate(Smi::FromInt(kNonStrictMode)));
3825  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3826  context()->Plug(eax);
3827  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3828  // Result of deleting non-global variables is false. 'this' is
3829  // not really a variable, though we implement it as one. The
3830  // subexpression does not have side effects.
3831  context()->Plug(var->is_this());
3832  } else {
3833  // Non-global variable. Call the runtime to try to delete from the
3834  // context where the variable was introduced.
3835  __ push(context_register());
3836  __ push(Immediate(var->name()));
3837  __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3838  context()->Plug(eax);
3839  }
3840  } else {
3841  // Result of deleting non-property, non-variable reference is true.
3842  // The subexpression may have side effects.
3843  VisitForEffect(expr->expression());
3844  context()->Plug(true);
3845  }
3846  break;
3847  }
3848 
3849  case Token::VOID: {
3850  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3851  VisitForEffect(expr->expression());
3852  context()->Plug(isolate()->factory()->undefined_value());
3853  break;
3854  }
3855 
3856  case Token::NOT: {
3857  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3858  if (context()->IsEffect()) {
3859  // Unary NOT has no side effects so it's only necessary to visit the
3860  // subexpression. Match the optimizing compiler by not branching.
3861  VisitForEffect(expr->expression());
3862  } else if (context()->IsTest()) {
3863  const TestContext* test = TestContext::cast(context());
3864  // The labels are swapped for the recursive call.
3865  VisitForControl(expr->expression(),
3866  test->false_label(),
3867  test->true_label(),
3868  test->fall_through());
3869  context()->Plug(test->true_label(), test->false_label());
3870  } else {
3871  // We handle value contexts explicitly rather than simply visiting
3872  // for control and plugging the control flow into the context,
3873  // because we need to prepare a pair of extra administrative AST ids
3874  // for the optimizing compiler.
3875  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3876  Label materialize_true, materialize_false, done;
3877  VisitForControl(expr->expression(),
3878  &materialize_false,
3879  &materialize_true,
3880  &materialize_true);
3881  __ bind(&materialize_true);
3882  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3883  if (context()->IsAccumulatorValue()) {
3884  __ mov(eax, isolate()->factory()->true_value());
3885  } else {
3886  __ Push(isolate()->factory()->true_value());
3887  }
3888  __ jmp(&done, Label::kNear);
3889  __ bind(&materialize_false);
3890  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3891  if (context()->IsAccumulatorValue()) {
3892  __ mov(eax, isolate()->factory()->false_value());
3893  } else {
3894  __ Push(isolate()->factory()->false_value());
3895  }
3896  __ bind(&done);
3897  }
3898  break;
3899  }
3900 
3901  case Token::TYPEOF: {
3902  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3903  { StackValueContext context(this);
3904  VisitForTypeofValue(expr->expression());
3905  }
3906  __ CallRuntime(Runtime::kTypeof, 1);
3907  context()->Plug(eax);
3908  break;
3909  }
3910 
3911  case Token::ADD: {
3912  Comment cmt(masm_, "[ UnaryOperation (ADD)");
3913  VisitForAccumulatorValue(expr->expression());
3914  Label no_conversion;
3915  __ JumpIfSmi(result_register(), &no_conversion);
3916  ToNumberStub convert_stub;
3917  __ CallStub(&convert_stub);
3918  __ bind(&no_conversion);
3919  context()->Plug(result_register());
3920  break;
3921  }
3922 
3923  case Token::SUB:
3924  EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3925  break;
3926 
3927  case Token::BIT_NOT:
3928  EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3929  break;
3930 
3931  default:
3932  UNREACHABLE();
3933  }
3934 }
3935 
3936 
3937 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3938  const char* comment) {
3939  Comment cmt(masm_, comment);
3940  bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3941  UnaryOverwriteMode overwrite =
3942  can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3943  UnaryOpStub stub(expr->op(), overwrite);
3944  // UnaryOpStub expects the argument to be in the
3945  // accumulator register eax.
3946  VisitForAccumulatorValue(expr->expression());
3947  SetSourcePosition(expr->position());
3948  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
3949  expr->UnaryOperationFeedbackId());
3950  context()->Plug(eax);
3951 }
3952 
3953 
3954 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3955  Comment cmnt(masm_, "[ CountOperation");
3956  SetSourcePosition(expr->position());
3957 
3958  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
3959  // as the left-hand side.
3960  if (!expr->expression()->IsValidLeftHandSide()) {
3961  VisitForEffect(expr->expression());
3962  return;
3963  }
3964 
3965  // Expression can only be a property, a global or a (parameter or local)
3966  // slot.
3967  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3968  LhsKind assign_type = VARIABLE;
3969  Property* prop = expr->expression()->AsProperty();
3970  // In case of a property we use the uninitialized expression context
3971  // of the key to detect a named property.
3972  if (prop != NULL) {
3973  assign_type =
3974  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3975  }
3976 
3977  // Evaluate expression and get value.
3978  if (assign_type == VARIABLE) {
3979  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3980  AccumulatorValueContext context(this);
3981  EmitVariableLoad(expr->expression()->AsVariableProxy());
3982  } else {
3983  // Reserve space for result of postfix operation.
3984  if (expr->is_postfix() && !context()->IsEffect()) {
3985  __ push(Immediate(Smi::FromInt(0)));
3986  }
3987  if (assign_type == NAMED_PROPERTY) {
3988  // Put the object both on the stack and in edx.
3989  VisitForAccumulatorValue(prop->obj());
3990  __ push(eax);
3991  __ mov(edx, eax);
3992  EmitNamedPropertyLoad(prop);
3993  } else {
3994  VisitForStackValue(prop->obj());
3995  VisitForStackValue(prop->key());
3996  __ mov(edx, Operand(esp, kPointerSize)); // Object.
3997  __ mov(ecx, Operand(esp, 0)); // Key.
3998  EmitKeyedPropertyLoad(prop);
3999  }
4000  }
4001 
4002  // We need a second deoptimization point after loading the value
4003  // in case evaluating the property load my have a side effect.
4004  if (assign_type == VARIABLE) {
4005  PrepareForBailout(expr->expression(), TOS_REG);
4006  } else {
4007  PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4008  }
4009 
4010  // Call ToNumber only if operand is not a smi.
4011  Label no_conversion;
4012  if (ShouldInlineSmiCase(expr->op())) {
4013  __ JumpIfSmi(eax, &no_conversion, Label::kNear);
4014  }
4015  ToNumberStub convert_stub;
4016  __ CallStub(&convert_stub);
4017  __ bind(&no_conversion);
4018 
4019  // Save result for postfix expressions.
4020  if (expr->is_postfix()) {
4021  if (!context()->IsEffect()) {
4022  // Save the result on the stack. If we have a named or keyed property
4023  // we store the result under the receiver that is currently on top
4024  // of the stack.
4025  switch (assign_type) {
4026  case VARIABLE:
4027  __ push(eax);
4028  break;
4029  case NAMED_PROPERTY:
4030  __ mov(Operand(esp, kPointerSize), eax);
4031  break;
4032  case KEYED_PROPERTY:
4033  __ mov(Operand(esp, 2 * kPointerSize), eax);
4034  break;
4035  }
4036  }
4037  }
4038 
4039  // Inline smi case if we are in a loop.
4040  Label done, stub_call;
4041  JumpPatchSite patch_site(masm_);
4042 
4043  if (ShouldInlineSmiCase(expr->op())) {
4044  if (expr->op() == Token::INC) {
4045  __ add(eax, Immediate(Smi::FromInt(1)));
4046  } else {
4047  __ sub(eax, Immediate(Smi::FromInt(1)));
4048  }
4049  __ j(overflow, &stub_call, Label::kNear);
4050  // We could eliminate this smi check if we split the code at
4051  // the first smi check before calling ToNumber.
4052  patch_site.EmitJumpIfSmi(eax, &done, Label::kNear);
4053 
4054  __ bind(&stub_call);
4055  // Call stub. Undo operation first.
4056  if (expr->op() == Token::INC) {
4057  __ sub(eax, Immediate(Smi::FromInt(1)));
4058  } else {
4059  __ add(eax, Immediate(Smi::FromInt(1)));
4060  }
4061  }
4062 
4063  // Record position before stub call.
4064  SetSourcePosition(expr->position());
4065 
4066  // Call stub for +1/-1.
4067  __ mov(edx, eax);
4068  __ mov(eax, Immediate(Smi::FromInt(1)));
4069  BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
4070  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
4071  patch_site.EmitPatchInfo();
4072  __ bind(&done);
4073 
4074  // Store the value returned in eax.
4075  switch (assign_type) {
4076  case VARIABLE:
4077  if (expr->is_postfix()) {
4078  // Perform the assignment as if via '='.
4079  { EffectContext context(this);
4080  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4081  Token::ASSIGN);
4082  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4083  context.Plug(eax);
4084  }
4085  // For all contexts except EffectContext We have the result on
4086  // top of the stack.
4087  if (!context()->IsEffect()) {
4088  context()->PlugTOS();
4089  }
4090  } else {
4091  // Perform the assignment as if via '='.
4092  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4093  Token::ASSIGN);
4094  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4095  context()->Plug(eax);
4096  }
4097  break;
4098  case NAMED_PROPERTY: {
4099  __ mov(ecx, prop->key()->AsLiteral()->handle());
4100  __ pop(edx);
4101  Handle<Code> ic = is_classic_mode()
4102  ? isolate()->builtins()->StoreIC_Initialize()
4103  : isolate()->builtins()->StoreIC_Initialize_Strict();
4104  CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4105  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4106  if (expr->is_postfix()) {
4107  if (!context()->IsEffect()) {
4108  context()->PlugTOS();
4109  }
4110  } else {
4111  context()->Plug(eax);
4112  }
4113  break;
4114  }
4115  case KEYED_PROPERTY: {
4116  __ pop(ecx);
4117  __ pop(edx);
4118  Handle<Code> ic = is_classic_mode()
4119  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4120  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4121  CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId());
4122  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4123  if (expr->is_postfix()) {
4124  // Result is on the stack
4125  if (!context()->IsEffect()) {
4126  context()->PlugTOS();
4127  }
4128  } else {
4129  context()->Plug(eax);
4130  }
4131  break;
4132  }
4133  }
4134 }
4135 
4136 
4137 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4138  VariableProxy* proxy = expr->AsVariableProxy();
4139  ASSERT(!context()->IsEffect());
4140  ASSERT(!context()->IsTest());
4141 
4142  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4143  Comment cmnt(masm_, "Global variable");
4144  __ mov(edx, GlobalObjectOperand());
4145  __ mov(ecx, Immediate(proxy->name()));
4146  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4147  // Use a regular load, not a contextual load, to avoid a reference
4148  // error.
4149  CallIC(ic);
4150  PrepareForBailout(expr, TOS_REG);
4151  context()->Plug(eax);
4152  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4153  Label done, slow;
4154 
4155  // Generate code for loading from variables potentially shadowed
4156  // by eval-introduced variables.
4157  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4158 
4159  __ bind(&slow);
4160  __ push(esi);
4161  __ push(Immediate(proxy->name()));
4162  __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4163  PrepareForBailout(expr, TOS_REG);
4164  __ bind(&done);
4165 
4166  context()->Plug(eax);
4167  } else {
4168  // This expression cannot throw a reference error at the top level.
4169  VisitInDuplicateContext(expr);
4170  }
4171 }
4172 
4173 
4174 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4175  Expression* sub_expr,
4176  Handle<String> check) {
4177  Label materialize_true, materialize_false;
4178  Label* if_true = NULL;
4179  Label* if_false = NULL;
4180  Label* fall_through = NULL;
4181  context()->PrepareTest(&materialize_true, &materialize_false,
4182  &if_true, &if_false, &fall_through);
4183 
4184  { AccumulatorValueContext context(this);
4185  VisitForTypeofValue(sub_expr);
4186  }
4187  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4188 
4189  if (check->Equals(isolate()->heap()->number_symbol())) {
4190  __ JumpIfSmi(eax, if_true);
4192  isolate()->factory()->heap_number_map());
4193  Split(equal, if_true, if_false, fall_through);
4194  } else if (check->Equals(isolate()->heap()->string_symbol())) {
4195  __ JumpIfSmi(eax, if_false);
4196  __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4197  __ j(above_equal, if_false);
4198  // Check for undetectable objects => false.
4200  1 << Map::kIsUndetectable);
4201  Split(zero, if_true, if_false, fall_through);
4202  } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4203  __ cmp(eax, isolate()->factory()->true_value());
4204  __ j(equal, if_true);
4205  __ cmp(eax, isolate()->factory()->false_value());
4206  Split(equal, if_true, if_false, fall_through);
4207  } else if (FLAG_harmony_typeof &&
4208  check->Equals(isolate()->heap()->null_symbol())) {
4209  __ cmp(eax, isolate()->factory()->null_value());
4210  Split(equal, if_true, if_false, fall_through);
4211  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4212  __ cmp(eax, isolate()->factory()->undefined_value());
4213  __ j(equal, if_true);
4214  __ JumpIfSmi(eax, if_false);
4215  // Check for undetectable objects => true.
4217  __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4218  __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4219  Split(not_zero, if_true, if_false, fall_through);
4220  } else if (check->Equals(isolate()->heap()->function_symbol())) {
4221  __ JumpIfSmi(eax, if_false);
4223  __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4224  __ j(equal, if_true);
4225  __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4226  Split(equal, if_true, if_false, fall_through);
4227  } else if (check->Equals(isolate()->heap()->object_symbol())) {
4228  __ JumpIfSmi(eax, if_false);
4229  if (!FLAG_harmony_typeof) {
4230  __ cmp(eax, isolate()->factory()->null_value());
4231  __ j(equal, if_true);
4232  }
4233  __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4234  __ j(below, if_false);
4235  __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4236  __ j(above, if_false);
4237  // Check for undetectable objects => false.
4239  1 << Map::kIsUndetectable);
4240  Split(zero, if_true, if_false, fall_through);
4241  } else {
4242  if (if_false != fall_through) __ jmp(if_false);
4243  }
4244  context()->Plug(if_true, if_false);
4245 }
4246 
4247 
4248 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4249  Comment cmnt(masm_, "[ CompareOperation");
4250  SetSourcePosition(expr->position());
4251 
4252  // First we try a fast inlined version of the compare when one of
4253  // the operands is a literal.
4254  if (TryLiteralCompare(expr)) return;
4255 
4256  // Always perform the comparison for its control flow. Pack the result
4257  // into the expression's context after the comparison is performed.
4258  Label materialize_true, materialize_false;
4259  Label* if_true = NULL;
4260  Label* if_false = NULL;
4261  Label* fall_through = NULL;
4262  context()->PrepareTest(&materialize_true, &materialize_false,
4263  &if_true, &if_false, &fall_through);
4264 
4265  Token::Value op = expr->op();
4266  VisitForStackValue(expr->left());
4267  switch (op) {
4268  case Token::IN:
4269  VisitForStackValue(expr->right());
4270  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4271  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4272  __ cmp(eax, isolate()->factory()->true_value());
4273  Split(equal, if_true, if_false, fall_through);
4274  break;
4275 
4276  case Token::INSTANCEOF: {
4277  VisitForStackValue(expr->right());
4278  InstanceofStub stub(InstanceofStub::kNoFlags);
4279  __ CallStub(&stub);
4280  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4281  __ test(eax, eax);
4282  // The stub returns 0 for true.
4283  Split(zero, if_true, if_false, fall_through);
4284  break;
4285  }
4286 
4287  default: {
4288  VisitForAccumulatorValue(expr->right());
4289  Condition cc = no_condition;
4290  switch (op) {
4291  case Token::EQ_STRICT:
4292  case Token::EQ:
4293  cc = equal;
4294  break;
4295  case Token::LT:
4296  cc = less;
4297  break;
4298  case Token::GT:
4299  cc = greater;
4300  break;
4301  case Token::LTE:
4302  cc = less_equal;
4303  break;
4304  case Token::GTE:
4305  cc = greater_equal;
4306  break;
4307  case Token::IN:
4308  case Token::INSTANCEOF:
4309  default:
4310  UNREACHABLE();
4311  }
4312  __ pop(edx);
4313 
4314  bool inline_smi_code = ShouldInlineSmiCase(op);
4315  JumpPatchSite patch_site(masm_);
4316  if (inline_smi_code) {
4317  Label slow_case;
4318  __ mov(ecx, edx);
4319  __ or_(ecx, eax);
4320  patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4321  __ cmp(edx, eax);
4322  Split(cc, if_true, if_false, NULL);
4323  __ bind(&slow_case);
4324  }
4325 
4326  // Record position and call the compare IC.
4327  SetSourcePosition(expr->position());
4328  Handle<Code> ic = CompareIC::GetUninitialized(op);
4329  CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
4330  patch_site.EmitPatchInfo();
4331 
4332  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4333  __ test(eax, eax);
4334  Split(cc, if_true, if_false, fall_through);
4335  }
4336  }
4337 
4338  // Convert the result of the comparison into one expected for this
4339  // expression's context.
4340  context()->Plug(if_true, if_false);
4341 }
4342 
4343 
4344 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4345  Expression* sub_expr,
4346  NilValue nil) {
4347  Label materialize_true, materialize_false;
4348  Label* if_true = NULL;
4349  Label* if_false = NULL;
4350  Label* fall_through = NULL;
4351  context()->PrepareTest(&materialize_true, &materialize_false,
4352  &if_true, &if_false, &fall_through);
4353 
4354  VisitForAccumulatorValue(sub_expr);
4355  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4356  Handle<Object> nil_value = nil == kNullValue ?
4357  isolate()->factory()->null_value() :
4358  isolate()->factory()->undefined_value();
4359  __ cmp(eax, nil_value);
4360  if (expr->op() == Token::EQ_STRICT) {
4361  Split(equal, if_true, if_false, fall_through);
4362  } else {
4363  Handle<Object> other_nil_value = nil == kNullValue ?
4364  isolate()->factory()->undefined_value() :
4365  isolate()->factory()->null_value();
4366  __ j(equal, if_true);
4367  __ cmp(eax, other_nil_value);
4368  __ j(equal, if_true);
4369  __ JumpIfSmi(eax, if_false);
4370  // It can be an undetectable object.
4373  __ test(edx, Immediate(1 << Map::kIsUndetectable));
4374  Split(not_zero, if_true, if_false, fall_through);
4375  }
4376  context()->Plug(if_true, if_false);
4377 }
4378 
4379 
4380 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4382  context()->Plug(eax);
4383 }
4384 
4385 
4386 Register FullCodeGenerator::result_register() {
4387  return eax;
4388 }
4389 
4390 
4391 Register FullCodeGenerator::context_register() {
4392  return esi;
4393 }
4394 
4395 
4396 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4397  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4398  __ mov(Operand(ebp, frame_offset), value);
4399 }
4400 
4401 
4402 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4403  __ mov(dst, ContextOperand(esi, context_index));
4404 }
4405 
4406 
4407 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4408  Scope* declaration_scope = scope()->DeclarationScope();
4409  if (declaration_scope->is_global_scope() ||
4410  declaration_scope->is_module_scope()) {
4411  // Contexts nested in the native context have a canonical empty function
4412  // as their closure, not the anonymous closure containing the global
4413  // code. Pass a smi sentinel and let the runtime look up the empty
4414  // function.
4415  __ push(Immediate(Smi::FromInt(0)));
4416  } else if (declaration_scope->is_eval_scope()) {
4417  // Contexts nested inside eval code have the same closure as the context
4418  // calling eval, not the anonymous closure containing the eval code.
4419  // Fetch it from the context.
4421  } else {
4422  ASSERT(declaration_scope->is_function_scope());
4424  }
4425 }
4426 
4427 
4428 // ----------------------------------------------------------------------------
4429 // Non-local control flow support.
4430 
4431 void FullCodeGenerator::EnterFinallyBlock() {
4432  // Cook return address on top of stack (smi encoded Code* delta)
4433  ASSERT(!result_register().is(edx));
4434  __ pop(edx);
4435  __ sub(edx, Immediate(masm_->CodeObject()));
4437  STATIC_ASSERT(kSmiTag == 0);
4438  __ SmiTag(edx);
4439  __ push(edx);
4440 
4441  // Store result register while executing finally block.
4442  __ push(result_register());
4443 
4444  // Store pending message while executing finally block.
4445  ExternalReference pending_message_obj =
4446  ExternalReference::address_of_pending_message_obj(isolate());
4447  __ mov(edx, Operand::StaticVariable(pending_message_obj));
4448  __ push(edx);
4449 
4450  ExternalReference has_pending_message =
4451  ExternalReference::address_of_has_pending_message(isolate());
4452  __ mov(edx, Operand::StaticVariable(has_pending_message));
4453  __ SmiTag(edx);
4454  __ push(edx);
4455 
4456  ExternalReference pending_message_script =
4457  ExternalReference::address_of_pending_message_script(isolate());
4458  __ mov(edx, Operand::StaticVariable(pending_message_script));
4459  __ push(edx);
4460 }
4461 
4462 
4463 void FullCodeGenerator::ExitFinallyBlock() {
4464  ASSERT(!result_register().is(edx));
4465  // Restore pending message from stack.
4466  __ pop(edx);
4467  ExternalReference pending_message_script =
4468  ExternalReference::address_of_pending_message_script(isolate());
4469  __ mov(Operand::StaticVariable(pending_message_script), edx);
4470 
4471  __ pop(edx);
4472  __ SmiUntag(edx);
4473  ExternalReference has_pending_message =
4474  ExternalReference::address_of_has_pending_message(isolate());
4475  __ mov(Operand::StaticVariable(has_pending_message), edx);
4476 
4477  __ pop(edx);
4478  ExternalReference pending_message_obj =
4479  ExternalReference::address_of_pending_message_obj(isolate());
4480  __ mov(Operand::StaticVariable(pending_message_obj), edx);
4481 
4482  // Restore result register from stack.
4483  __ pop(result_register());
4484 
4485  // Uncook return address.
4486  __ pop(edx);
4487  __ SmiUntag(edx);
4488  __ add(edx, Immediate(masm_->CodeObject()));
4489  __ jmp(edx);
4490 }
4491 
4492 
4493 #undef __
4494 
4495 #define __ ACCESS_MASM(masm())
4496 
4497 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4498  int* stack_depth,
4499  int* context_length) {
4500  // The macros used here must preserve the result register.
4501 
4502  // Because the handler block contains the context of the finally
4503  // code, we can restore it directly from there for the finally code
4504  // rather than iteratively unwinding contexts via their previous
4505  // links.
4506  __ Drop(*stack_depth); // Down to the handler block.
4507  if (*context_length > 0) {
4508  // Restore the context to its dedicated register and the stack.
4511  }
4512  __ PopTryHandler();
4513  __ call(finally_entry_);
4514 
4515  *stack_depth = 0;
4516  *context_length = 0;
4517  return previous_;
4518 }
4519 
4520 #undef __
4521 
4522 } } // namespace v8::internal
4523 
4524 #endif // V8_TARGET_ARCH_IA32
static const int kBitFieldOffset
Definition: objects.h:5160
Scope * DeclarationScope()
Definition: scopes.cc:745
const intptr_t kSmiTagMask
Definition: v8.h:4016
VariableDeclaration * function() const
Definition: scopes.h:324
static int SlotOffset(int index)
Definition: contexts.h:425
static const int kBuiltinsOffset
Definition: objects.h:6285
static const int kEnumCacheOffset
Definition: objects.h:2632
static String * cast(Object *obj)
static Smi * FromInt(int value)
Definition: objects-inl.h:981
bool IsFastObjectElementsKind(ElementsKind kind)
const Register esp
static const int kDataOffset
Definition: objects.h:6624
static const int kGlobalReceiverOffset
Definition: objects.h:6288
int SizeOfCodeGeneratedSince(Label *label)
T Max(T a, T b)
Definition: utils.h:222
Scope * outer_scope() const
Definition: scopes.h:348
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:5339
static bool IsSupported(CpuFeature f)
bool is_int8(int x)
Definition: assembler.h:836
static const int kSize
Definition: objects.h:6625
#define ASSERT(condition)
Definition: checks.h:270
static const int kMaxBackEdgeWeight
Definition: full-codegen.h:120
static const int kInObjectFieldCount
Definition: objects.h:6679
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3830
#define POINTER_SIZE_ALIGN(value)
Definition: v8globals.h:387
const uint32_t kStringRepresentationMask
Definition: objects.h:474
static const int kMaximumSlots
Definition: code-stubs.h:344
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
Definition: objects.h:5800
bool IsOptimizable() const
Definition: compiler.h:151
Variable * parameter(int index) const
Definition: scopes.h:331
PropertyAttributes
MemOperand ContextOperand(Register context, int index)
static Smi * cast(Object *object)
const Register edi
int ContextChainLength(Scope *scope)
Definition: scopes.cc:735
static const int kHashFieldOffset
Definition: objects.h:7319
#define IN
static const int kLiteralsOffset
Definition: objects.h:6188
const Register ebp
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Definition: objects.h:7318
const Register eax
static const int kValueOffset
Definition: objects.h:1342
Variable * arguments() const
Definition: scopes.h:339
static const int kForInSlowCaseMarker
Definition: objects.h:4167
static const int kFirstOffset
Definition: objects.h:2633
NilValue
Definition: v8.h:141
const XMMRegister xmm1
static BailoutId Declarations()
Definition: utils.h:1016
const int kPointerSize
Definition: globals.h:220
static const int kJSReturnSequenceLength
static const int kForInFastCaseMarker
Definition: objects.h:4166
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:5177
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
const Register ecx
#define __
static const int kCacheStampOffset
Definition: objects.h:6476
static TestContext * cast(AstContext *context)
Definition: hydrogen.h:721
static const int kDescriptorSize
Definition: objects.h:2642
static const int kPropertiesOffset
Definition: objects.h:2171
int num_parameters() const
Definition: scopes.h:336
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
static const int kHeaderSize
Definition: objects.h:7517
static const int kElementsOffset
Definition: objects.h:2172
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7374
static BailoutId FunctionEntry()
Definition: utils.h:1015
const uint32_t kStringTag
Definition: objects.h:456
#define BASE_EMBEDDED
Definition: allocation.h:68
bool IsDeclaredVariableMode(VariableMode mode)
Definition: v8globals.h:516
Vector< const char > CStrVector(const char *data)
Definition: utils.h:526
static int OffsetOfElementAt(int index)
Definition: objects.h:2356
static const int kLengthOffset
Definition: objects.h:8332
static const int kMaxLoopNestingMarker
Definition: objects.h:4527
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
Definition: objects.h:2296
static const int kMapOffset
Definition: objects.h:1261
static const int kValueOffset
Definition: objects.h:6468
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:2636
const uint32_t kIsNotStringMask
Definition: objects.h:455
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:545
static const int kLengthOffset
Definition: objects.h:2295
const Register ebx
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction pairs(ARM only)") DEFINE_bool(enable_unaligned_accesses
const int kSmiShiftSize
Definition: v8.h:4060
const int kSmiTagSize
Definition: v8.h:4015
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
const Register esi
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:926
static const int kConstructorOffset
Definition: objects.h:5127
const int kSmiTag
Definition: v8.h:4014
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
static Operand FixedArrayElementOperand(Register array, Register index_as_smi, int additional_offset=0)
Definition: codegen-ia32.h:63
static const int kIsUndetectable
Definition: objects.h:5171
static bool ShouldGenerateLog(Expression *type)
Definition: codegen.cc:153
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
#define FACTORY
Definition: isolate.h:1434
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
static const int kPrototypeOffset
Definition: objects.h:5126
const Register no_reg
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
Definition: objects.h:6385
bool IsImmutableVariableMode(VariableMode mode)
Definition: v8globals.h:526
static const int kNativeContextOffset
Definition: objects.h:6286
const uint32_t kAsciiStringTag
Definition: objects.h:470
const Register edx
T Min(T a, T b)
Definition: utils.h:229
static const int kSharedFunctionInfoOffset
Definition: objects.h:6185
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
Definition: flags.cc:495
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:1731
static const int kMaxValue
Definition: objects.h:1050
static const int kBitField2Offset
Definition: objects.h:5161
#define VOID
static Handle< Code > GetUninitialized(Token::Value op)
Definition: ic.cc:2565
void check(i::Vector< const char > string)
static const int kExponentOffset
Definition: objects.h:1348
const uint32_t kStringEncodingMask
Definition: objects.h:468
static const int kInstanceTypeOffset
Definition: objects.h:5158
static const int kMantissaOffset
Definition: objects.h:1347
TypeofState
Definition: codegen.h:70
Scope * scope() const
Definition: compiler.h:67
const XMMRegister xmm0
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kInstanceClassNameOffset flag
Definition: objects-inl.h:3923