v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
full-codegen-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_IA32)
31 
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
38 #include "parser.h"
39 #include "scopes.h"
40 #include "stub-cache.h"
41 
42 namespace v8 {
43 namespace internal {
44 
45 #define __ ACCESS_MASM(masm_)
46 
47 
48 class JumpPatchSite BASE_EMBEDDED {
49  public:
50  explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51 #ifdef DEBUG
52  info_emitted_ = false;
53 #endif
54  }
55 
56  ~JumpPatchSite() {
57  ASSERT(patch_site_.is_bound() == info_emitted_);
58  }
59 
60  void EmitJumpIfNotSmi(Register reg,
61  Label* target,
62  Label::Distance distance = Label::kFar) {
63  __ test(reg, Immediate(kSmiTagMask));
64  EmitJump(not_carry, target, distance); // Always taken before patched.
65  }
66 
67  void EmitJumpIfSmi(Register reg,
68  Label* target,
69  Label::Distance distance = Label::kFar) {
70  __ test(reg, Immediate(kSmiTagMask));
71  EmitJump(carry, target, distance); // Never taken before patched.
72  }
73 
74  void EmitPatchInfo() {
75  if (patch_site_.is_bound()) {
76  int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77  ASSERT(is_int8(delta_to_patch_site));
78  __ test(eax, Immediate(delta_to_patch_site));
79 #ifdef DEBUG
80  info_emitted_ = true;
81 #endif
82  } else {
83  __ nop(); // Signals no inlined code.
84  }
85  }
86 
87  private:
88  // jc will be patched with jz, jnc will become jnz.
89  void EmitJump(Condition cc, Label* target, Label::Distance distance) {
90  ASSERT(!patch_site_.is_bound() && !info_emitted_);
91  ASSERT(cc == carry || cc == not_carry);
92  __ bind(&patch_site_);
93  __ j(cc, target, distance);
94  }
95 
96  MacroAssembler* masm_;
97  Label patch_site_;
98 #ifdef DEBUG
99  bool info_emitted_;
100 #endif
101 };
102 
103 
104 // Generate code for a JS function. On entry to the function the receiver
105 // and arguments have been pushed on the stack left to right, with the
106 // return address on top of them. The actual argument count matches the
107 // formal parameter count expected by the function.
108 //
109 // The live registers are:
110 // o edi: the JS function object being called (i.e. ourselves)
111 // o esi: our context
112 // o ebp: our caller's frame pointer
113 // o esp: stack pointer (pointing to return address)
114 //
115 // The function builds a JS frame. Please see JavaScriptFrameConstants in
116 // frames-ia32.h for its layout.
117 void FullCodeGenerator::Generate() {
118  CompilationInfo* info = info_;
119  handler_table_ =
120  isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
121  profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
122  Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
123  SetFunctionPosition(function());
124  Comment cmnt(masm_, "[ function compiled by full code generator");
125 
126 #ifdef DEBUG
127  if (strlen(FLAG_stop_at) > 0 &&
128  info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
129  __ int3();
130  }
131 #endif
132 
133  // Strict mode functions and builtins need to replace the receiver
134  // with undefined when called as functions (without an explicit
135  // receiver object). ecx is zero for method calls and non-zero for
136  // function calls.
137  if (!info->is_classic_mode() || info->is_native()) {
138  Label ok;
139  __ test(ecx, ecx);
140  __ j(zero, &ok, Label::kNear);
141  // +1 for return address.
142  int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
143  __ mov(ecx, Operand(esp, receiver_offset));
144  __ JumpIfSmi(ecx, &ok);
145  __ CmpObjectType(ecx, JS_GLOBAL_PROXY_TYPE, ecx);
146  __ j(not_equal, &ok, Label::kNear);
147  __ mov(Operand(esp, receiver_offset),
148  Immediate(isolate()->factory()->undefined_value()));
149  __ bind(&ok);
150  }
151 
152  // Open a frame scope to indicate that there is a frame on the stack. The
153  // MANUAL indicates that the scope shouldn't actually generate code to set up
154  // the frame (that is done below).
155  FrameScope frame_scope(masm_, StackFrame::MANUAL);
156 
157  __ push(ebp); // Caller's frame pointer.
158  __ mov(ebp, esp);
159  __ push(esi); // Callee's context.
160  __ push(edi); // Callee's JS Function.
161 
162  { Comment cmnt(masm_, "[ Allocate locals");
163  int locals_count = info->scope()->num_stack_slots();
164  if (locals_count == 1) {
165  __ push(Immediate(isolate()->factory()->undefined_value()));
166  } else if (locals_count > 1) {
167  __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
168  for (int i = 0; i < locals_count; i++) {
169  __ push(eax);
170  }
171  }
172  }
173 
174  bool function_in_register = true;
175 
176  // Possibly allocate a local context.
177  int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
178  if (heap_slots > 0) {
179  Comment cmnt(masm_, "[ Allocate local context");
180  // Argument to NewContext is the function, which is still in edi.
181  __ push(edi);
182  if (heap_slots <= FastNewContextStub::kMaximumSlots) {
183  FastNewContextStub stub(heap_slots);
184  __ CallStub(&stub);
185  } else {
186  __ CallRuntime(Runtime::kNewFunctionContext, 1);
187  }
188  function_in_register = false;
189  // Context is returned in both eax and esi. It replaces the context
190  // passed to us. It's saved in the stack and kept live in esi.
192 
193  // Copy parameters into context if necessary.
194  int num_parameters = info->scope()->num_parameters();
195  for (int i = 0; i < num_parameters; i++) {
196  Variable* var = scope()->parameter(i);
197  if (var->IsContextSlot()) {
198  int parameter_offset = StandardFrameConstants::kCallerSPOffset +
199  (num_parameters - 1 - i) * kPointerSize;
200  // Load parameter from stack.
201  __ mov(eax, Operand(ebp, parameter_offset));
202  // Store it in the context.
203  int context_offset = Context::SlotOffset(var->index());
204  __ mov(Operand(esi, context_offset), eax);
205  // Update the write barrier. This clobbers eax and ebx.
206  __ RecordWriteContextSlot(esi,
207  context_offset,
208  eax,
209  ebx,
211  }
212  }
213  }
214 
215  Variable* arguments = scope()->arguments();
216  if (arguments != NULL) {
217  // Function uses arguments object.
218  Comment cmnt(masm_, "[ Allocate arguments object");
219  if (function_in_register) {
220  __ push(edi);
221  } else {
223  }
224  // Receiver is just before the parameters on the caller's stack.
225  int num_parameters = info->scope()->num_parameters();
226  int offset = num_parameters * kPointerSize;
227  __ lea(edx,
228  Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
229  __ push(edx);
230  __ SafePush(Immediate(Smi::FromInt(num_parameters)));
231  // Arguments to ArgumentsAccessStub:
232  // function, receiver address, parameter count.
233  // The stub will rewrite receiver and parameter count if the previous
234  // stack frame was an arguments adapter frame.
236  if (!is_classic_mode()) {
238  } else if (function()->has_duplicate_parameters()) {
240  } else {
242  }
243  ArgumentsAccessStub stub(type);
244  __ CallStub(&stub);
245 
246  SetVar(arguments, eax, ebx, edx);
247  }
248 
249  if (FLAG_trace) {
250  __ CallRuntime(Runtime::kTraceEnter, 0);
251  }
252 
253  // Visit the declarations and body unless there is an illegal
254  // redeclaration.
255  if (scope()->HasIllegalRedeclaration()) {
256  Comment cmnt(masm_, "[ Declarations");
257  scope()->VisitIllegalRedeclaration(this);
258 
259  } else {
260  PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
261  { Comment cmnt(masm_, "[ Declarations");
262  // For named function expressions, declare the function name as a
263  // constant.
264  if (scope()->is_function_scope() && scope()->function() != NULL) {
265  VariableDeclaration* function = scope()->function();
266  ASSERT(function->proxy()->var()->mode() == CONST ||
267  function->proxy()->var()->mode() == CONST_HARMONY);
268  ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
269  VisitVariableDeclaration(function);
270  }
271  VisitDeclarations(scope()->declarations());
272  }
273 
274  { Comment cmnt(masm_, "[ Stack check");
275  PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
276  Label ok;
277  ExternalReference stack_limit =
278  ExternalReference::address_of_stack_limit(isolate());
279  __ cmp(esp, Operand::StaticVariable(stack_limit));
280  __ j(above_equal, &ok, Label::kNear);
281  StackCheckStub stub;
282  __ CallStub(&stub);
283  __ bind(&ok);
284  }
285 
286  { Comment cmnt(masm_, "[ Body");
287  ASSERT(loop_depth() == 0);
288  VisitStatements(function()->body());
289  ASSERT(loop_depth() == 0);
290  }
291  }
292 
293  // Always emit a 'return undefined' in case control fell off the end of
294  // the body.
295  { Comment cmnt(masm_, "[ return <undefined>;");
296  __ mov(eax, isolate()->factory()->undefined_value());
297  EmitReturnSequence();
298  }
299 }
300 
301 
302 void FullCodeGenerator::ClearAccumulator() {
303  __ Set(eax, Immediate(Smi::FromInt(0)));
304 }
305 
306 
307 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
308  __ mov(ebx, Immediate(profiling_counter_));
310  Immediate(Smi::FromInt(delta)));
311 }
312 
313 
314 void FullCodeGenerator::EmitProfilingCounterReset() {
315  int reset_value = FLAG_interrupt_budget;
316  if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
317  // Self-optimization is a one-off thing: if it fails, don't try again.
318  reset_value = Smi::kMaxValue;
319  }
320  if (isolate()->IsDebuggerActive()) {
321  // Detect debug break requests as soon as possible.
322  reset_value = 10;
323  }
324  __ mov(ebx, Immediate(profiling_counter_));
326  Immediate(Smi::FromInt(reset_value)));
327 }
328 
329 
330 static const int kMaxBackEdgeWeight = 127;
331 static const int kBackEdgeDistanceDivisor = 100;
332 
333 
334 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
335  Label* back_edge_target) {
336  Comment cmnt(masm_, "[ Stack check");
337  Label ok;
338 
339  if (FLAG_count_based_interrupts) {
340  int weight = 1;
341  if (FLAG_weighted_back_edges) {
342  ASSERT(back_edge_target->is_bound());
343  int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
344  weight = Min(kMaxBackEdgeWeight,
345  Max(1, distance / kBackEdgeDistanceDivisor));
346  }
347  EmitProfilingCounterDecrement(weight);
348  __ j(positive, &ok, Label::kNear);
349  InterruptStub stub;
350  __ CallStub(&stub);
351  } else {
352  // Count based interrupts happen often enough when they are enabled
353  // that the additional stack checks are not necessary (they would
354  // only check for interrupts).
355  ExternalReference stack_limit =
356  ExternalReference::address_of_stack_limit(isolate());
357  __ cmp(esp, Operand::StaticVariable(stack_limit));
358  __ j(above_equal, &ok, Label::kNear);
359  StackCheckStub stub;
360  __ CallStub(&stub);
361  }
362 
363  // Record a mapping of this PC offset to the OSR id. This is used to find
364  // the AST id from the unoptimized code in order to use it as a key into
365  // the deoptimization input data found in the optimized code.
366  RecordStackCheck(stmt->OsrEntryId());
367 
368  // Loop stack checks can be patched to perform on-stack replacement. In
369  // order to decide whether or not to perform OSR we embed the loop depth
370  // in a test instruction after the call so we can extract it from the OSR
371  // builtin.
372  ASSERT(loop_depth() > 0);
373  __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
374 
375  if (FLAG_count_based_interrupts) {
376  EmitProfilingCounterReset();
377  }
378 
379  __ bind(&ok);
380  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
381  // Record a mapping of the OSR id to this PC. This is used if the OSR
382  // entry becomes the target of a bailout. We don't expect it to be, but
383  // we want it to work if it is.
384  PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
385 }
386 
387 
388 void FullCodeGenerator::EmitReturnSequence() {
389  Comment cmnt(masm_, "[ Return sequence");
390  if (return_label_.is_bound()) {
391  __ jmp(&return_label_);
392  } else {
393  // Common return label
394  __ bind(&return_label_);
395  if (FLAG_trace) {
396  __ push(eax);
397  __ CallRuntime(Runtime::kTraceExit, 1);
398  }
399  if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
400  // Pretend that the exit is a backwards jump to the entry.
401  int weight = 1;
402  if (info_->ShouldSelfOptimize()) {
403  weight = FLAG_interrupt_budget / FLAG_self_opt_count;
404  } else if (FLAG_weighted_back_edges) {
405  int distance = masm_->pc_offset();
406  weight = Min(kMaxBackEdgeWeight,
407  Max(1, distance / kBackEdgeDistanceDivisor));
408  }
409  EmitProfilingCounterDecrement(weight);
410  Label ok;
411  __ j(positive, &ok, Label::kNear);
412  __ push(eax);
413  if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
415  __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
416  } else {
417  InterruptStub stub;
418  __ CallStub(&stub);
419  }
420  __ pop(eax);
421  EmitProfilingCounterReset();
422  __ bind(&ok);
423  }
424 #ifdef DEBUG
425  // Add a label for checking the size of the code used for returning.
426  Label check_exit_codesize;
427  masm_->bind(&check_exit_codesize);
428 #endif
429  SetSourcePosition(function()->end_position() - 1);
430  __ RecordJSReturn();
431  // Do not use the leave instruction here because it is too short to
432  // patch with the code required by the debugger.
433  __ mov(esp, ebp);
434  __ pop(ebp);
435 
436  int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
437  __ Ret(arguments_bytes, ecx);
438 #ifdef ENABLE_DEBUGGER_SUPPORT
439  // Check that the size of the code used for returning is large enough
440  // for the debugger's requirements.
442  masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
443 #endif
444  }
445 }
446 
447 
448 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
449  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
450 }
451 
452 
453 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
454  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
455  codegen()->GetVar(result_register(), var);
456 }
457 
458 
459 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
460  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
461  MemOperand operand = codegen()->VarOperand(var, result_register());
462  // Memory operands can be pushed directly.
463  __ push(operand);
464 }
465 
466 
467 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
468  // For simplicity we always test the accumulator register.
469  codegen()->GetVar(result_register(), var);
470  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
471  codegen()->DoTest(this);
472 }
473 
474 
475 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
476  UNREACHABLE(); // Not used on IA32.
477 }
478 
479 
480 void FullCodeGenerator::AccumulatorValueContext::Plug(
481  Heap::RootListIndex index) const {
482  UNREACHABLE(); // Not used on IA32.
483 }
484 
485 
486 void FullCodeGenerator::StackValueContext::Plug(
487  Heap::RootListIndex index) const {
488  UNREACHABLE(); // Not used on IA32.
489 }
490 
491 
492 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
493  UNREACHABLE(); // Not used on IA32.
494 }
495 
496 
497 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
498 }
499 
500 
501 void FullCodeGenerator::AccumulatorValueContext::Plug(
502  Handle<Object> lit) const {
503  if (lit->IsSmi()) {
504  __ SafeSet(result_register(), Immediate(lit));
505  } else {
506  __ Set(result_register(), Immediate(lit));
507  }
508 }
509 
510 
511 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
512  if (lit->IsSmi()) {
513  __ SafePush(Immediate(lit));
514  } else {
515  __ push(Immediate(lit));
516  }
517 }
518 
519 
520 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
521  codegen()->PrepareForBailoutBeforeSplit(condition(),
522  true,
523  true_label_,
524  false_label_);
525  ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
526  if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
527  if (false_label_ != fall_through_) __ jmp(false_label_);
528  } else if (lit->IsTrue() || lit->IsJSObject()) {
529  if (true_label_ != fall_through_) __ jmp(true_label_);
530  } else if (lit->IsString()) {
531  if (String::cast(*lit)->length() == 0) {
532  if (false_label_ != fall_through_) __ jmp(false_label_);
533  } else {
534  if (true_label_ != fall_through_) __ jmp(true_label_);
535  }
536  } else if (lit->IsSmi()) {
537  if (Smi::cast(*lit)->value() == 0) {
538  if (false_label_ != fall_through_) __ jmp(false_label_);
539  } else {
540  if (true_label_ != fall_through_) __ jmp(true_label_);
541  }
542  } else {
543  // For simplicity we always test the accumulator register.
544  __ mov(result_register(), lit);
545  codegen()->DoTest(this);
546  }
547 }
548 
549 
550 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
551  Register reg) const {
552  ASSERT(count > 0);
553  __ Drop(count);
554 }
555 
556 
557 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
558  int count,
559  Register reg) const {
560  ASSERT(count > 0);
561  __ Drop(count);
562  __ Move(result_register(), reg);
563 }
564 
565 
566 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
567  Register reg) const {
568  ASSERT(count > 0);
569  if (count > 1) __ Drop(count - 1);
570  __ mov(Operand(esp, 0), reg);
571 }
572 
573 
574 void FullCodeGenerator::TestContext::DropAndPlug(int count,
575  Register reg) const {
576  ASSERT(count > 0);
577  // For simplicity we always test the accumulator register.
578  __ Drop(count);
579  __ Move(result_register(), reg);
580  codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
581  codegen()->DoTest(this);
582 }
583 
584 
585 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
586  Label* materialize_false) const {
587  ASSERT(materialize_true == materialize_false);
588  __ bind(materialize_true);
589 }
590 
591 
592 void FullCodeGenerator::AccumulatorValueContext::Plug(
593  Label* materialize_true,
594  Label* materialize_false) const {
595  Label done;
596  __ bind(materialize_true);
597  __ mov(result_register(), isolate()->factory()->true_value());
598  __ jmp(&done, Label::kNear);
599  __ bind(materialize_false);
600  __ mov(result_register(), isolate()->factory()->false_value());
601  __ bind(&done);
602 }
603 
604 
605 void FullCodeGenerator::StackValueContext::Plug(
606  Label* materialize_true,
607  Label* materialize_false) const {
608  Label done;
609  __ bind(materialize_true);
610  __ push(Immediate(isolate()->factory()->true_value()));
611  __ jmp(&done, Label::kNear);
612  __ bind(materialize_false);
613  __ push(Immediate(isolate()->factory()->false_value()));
614  __ bind(&done);
615 }
616 
617 
618 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
619  Label* materialize_false) const {
620  ASSERT(materialize_true == true_label_);
621  ASSERT(materialize_false == false_label_);
622 }
623 
624 
625 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
626 }
627 
628 
629 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
630  Handle<Object> value = flag
631  ? isolate()->factory()->true_value()
632  : isolate()->factory()->false_value();
633  __ mov(result_register(), value);
634 }
635 
636 
637 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
638  Handle<Object> value = flag
639  ? isolate()->factory()->true_value()
640  : isolate()->factory()->false_value();
641  __ push(Immediate(value));
642 }
643 
644 
645 void FullCodeGenerator::TestContext::Plug(bool flag) const {
646  codegen()->PrepareForBailoutBeforeSplit(condition(),
647  true,
648  true_label_,
649  false_label_);
650  if (flag) {
651  if (true_label_ != fall_through_) __ jmp(true_label_);
652  } else {
653  if (false_label_ != fall_through_) __ jmp(false_label_);
654  }
655 }
656 
657 
658 void FullCodeGenerator::DoTest(Expression* condition,
659  Label* if_true,
660  Label* if_false,
661  Label* fall_through) {
662  ToBooleanStub stub(result_register());
663  __ push(result_register());
664  __ CallStub(&stub, condition->test_id());
665  __ test(result_register(), result_register());
666  // The stub returns nonzero for true.
667  Split(not_zero, if_true, if_false, fall_through);
668 }
669 
670 
671 void FullCodeGenerator::Split(Condition cc,
672  Label* if_true,
673  Label* if_false,
674  Label* fall_through) {
675  if (if_false == fall_through) {
676  __ j(cc, if_true);
677  } else if (if_true == fall_through) {
678  __ j(NegateCondition(cc), if_false);
679  } else {
680  __ j(cc, if_true);
681  __ jmp(if_false);
682  }
683 }
684 
685 
686 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
687  ASSERT(var->IsStackAllocated());
688  // Offset is negative because higher indexes are at lower addresses.
689  int offset = -var->index() * kPointerSize;
690  // Adjust by a (parameter or local) base offset.
691  if (var->IsParameter()) {
692  offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
693  } else {
695  }
696  return Operand(ebp, offset);
697 }
698 
699 
700 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
701  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
702  if (var->IsContextSlot()) {
703  int context_chain_length = scope()->ContextChainLength(var->scope());
704  __ LoadContext(scratch, context_chain_length);
705  return ContextOperand(scratch, var->index());
706  } else {
707  return StackOperand(var);
708  }
709 }
710 
711 
712 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
713  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
714  MemOperand location = VarOperand(var, dest);
715  __ mov(dest, location);
716 }
717 
718 
719 void FullCodeGenerator::SetVar(Variable* var,
720  Register src,
721  Register scratch0,
722  Register scratch1) {
723  ASSERT(var->IsContextSlot() || var->IsStackAllocated());
724  ASSERT(!scratch0.is(src));
725  ASSERT(!scratch0.is(scratch1));
726  ASSERT(!scratch1.is(src));
727  MemOperand location = VarOperand(var, scratch0);
728  __ mov(location, src);
729 
730  // Emit the write barrier code if the location is in the heap.
731  if (var->IsContextSlot()) {
732  int offset = Context::SlotOffset(var->index());
733  ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
734  __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
735  }
736 }
737 
738 
739 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
740  bool should_normalize,
741  Label* if_true,
742  Label* if_false) {
743  // Only prepare for bailouts before splits if we're in a test
744  // context. Otherwise, we let the Visit function deal with the
745  // preparation to avoid preparing with the same AST id twice.
746  if (!context()->IsTest() || !info_->IsOptimizable()) return;
747 
748  Label skip;
749  if (should_normalize) __ jmp(&skip, Label::kNear);
750  PrepareForBailout(expr, TOS_REG);
751  if (should_normalize) {
752  __ cmp(eax, isolate()->factory()->true_value());
753  Split(equal, if_true, if_false, NULL);
754  __ bind(&skip);
755  }
756 }
757 
758 
759 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
760  // The variable in the declaration always resides in the current function
761  // context.
762  ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
763  if (FLAG_debug_code) {
764  // Check that we're not inside a with or catch context.
766  __ cmp(ebx, isolate()->factory()->with_context_map());
767  __ Check(not_equal, "Declaration in with context.");
768  __ cmp(ebx, isolate()->factory()->catch_context_map());
769  __ Check(not_equal, "Declaration in catch context.");
770  }
771 }
772 
773 
774 void FullCodeGenerator::VisitVariableDeclaration(
775  VariableDeclaration* declaration) {
776  // If it was not possible to allocate the variable at compile time, we
777  // need to "declare" it at runtime to make sure it actually exists in the
778  // local context.
779  VariableProxy* proxy = declaration->proxy();
780  VariableMode mode = declaration->mode();
781  Variable* variable = proxy->var();
782  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
783  switch (variable->location()) {
785  globals_->Add(variable->name(), zone());
786  globals_->Add(variable->binding_needs_init()
787  ? isolate()->factory()->the_hole_value()
788  : isolate()->factory()->undefined_value(), zone());
789  break;
790 
791  case Variable::PARAMETER:
792  case Variable::LOCAL:
793  if (hole_init) {
794  Comment cmnt(masm_, "[ VariableDeclaration");
795  __ mov(StackOperand(variable),
796  Immediate(isolate()->factory()->the_hole_value()));
797  }
798  break;
799 
800  case Variable::CONTEXT:
801  if (hole_init) {
802  Comment cmnt(masm_, "[ VariableDeclaration");
803  EmitDebugCheckDeclarationContext(variable);
804  __ mov(ContextOperand(esi, variable->index()),
805  Immediate(isolate()->factory()->the_hole_value()));
806  // No write barrier since the hole value is in old space.
807  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
808  }
809  break;
810 
811  case Variable::LOOKUP: {
812  Comment cmnt(masm_, "[ VariableDeclaration");
813  __ push(esi);
814  __ push(Immediate(variable->name()));
815  // VariableDeclaration nodes are always introduced in one of four modes.
816  ASSERT(mode == VAR || mode == LET ||
817  mode == CONST || mode == CONST_HARMONY);
818  PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
819  ? READ_ONLY : NONE;
820  __ push(Immediate(Smi::FromInt(attr)));
821  // Push initial value, if any.
822  // Note: For variables we must not push an initial value (such as
823  // 'undefined') because we may have a (legal) redeclaration and we
824  // must not destroy the current value.
825  if (hole_init) {
826  __ push(Immediate(isolate()->factory()->the_hole_value()));
827  } else {
828  __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
829  }
830  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
831  break;
832  }
833  }
834 }
835 
836 
837 void FullCodeGenerator::VisitFunctionDeclaration(
838  FunctionDeclaration* declaration) {
839  VariableProxy* proxy = declaration->proxy();
840  Variable* variable = proxy->var();
841  switch (variable->location()) {
842  case Variable::UNALLOCATED: {
843  globals_->Add(variable->name(), zone());
844  Handle<SharedFunctionInfo> function =
845  Compiler::BuildFunctionInfo(declaration->fun(), script());
846  // Check for stack-overflow exception.
847  if (function.is_null()) return SetStackOverflow();
848  globals_->Add(function, zone());
849  break;
850  }
851 
852  case Variable::PARAMETER:
853  case Variable::LOCAL: {
854  Comment cmnt(masm_, "[ FunctionDeclaration");
855  VisitForAccumulatorValue(declaration->fun());
856  __ mov(StackOperand(variable), result_register());
857  break;
858  }
859 
860  case Variable::CONTEXT: {
861  Comment cmnt(masm_, "[ FunctionDeclaration");
862  EmitDebugCheckDeclarationContext(variable);
863  VisitForAccumulatorValue(declaration->fun());
864  __ mov(ContextOperand(esi, variable->index()), result_register());
865  // We know that we have written a function, which is not a smi.
866  __ RecordWriteContextSlot(esi,
867  Context::SlotOffset(variable->index()),
868  result_register(),
869  ecx,
873  PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
874  break;
875  }
876 
877  case Variable::LOOKUP: {
878  Comment cmnt(masm_, "[ FunctionDeclaration");
879  __ push(esi);
880  __ push(Immediate(variable->name()));
881  __ push(Immediate(Smi::FromInt(NONE)));
882  VisitForStackValue(declaration->fun());
883  __ CallRuntime(Runtime::kDeclareContextSlot, 4);
884  break;
885  }
886  }
887 }
888 
889 
890 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
891  VariableProxy* proxy = declaration->proxy();
892  Variable* variable = proxy->var();
893  Handle<JSModule> instance = declaration->module()->interface()->Instance();
894  ASSERT(!instance.is_null());
895 
896  switch (variable->location()) {
897  case Variable::UNALLOCATED: {
898  Comment cmnt(masm_, "[ ModuleDeclaration");
899  globals_->Add(variable->name(), zone());
900  globals_->Add(instance, zone());
901  Visit(declaration->module());
902  break;
903  }
904 
905  case Variable::CONTEXT: {
906  Comment cmnt(masm_, "[ ModuleDeclaration");
907  EmitDebugCheckDeclarationContext(variable);
908  __ mov(ContextOperand(esi, variable->index()), Immediate(instance));
909  Visit(declaration->module());
910  break;
911  }
912 
913  case Variable::PARAMETER:
914  case Variable::LOCAL:
915  case Variable::LOOKUP:
916  UNREACHABLE();
917  }
918 }
919 
920 
921 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
922  VariableProxy* proxy = declaration->proxy();
923  Variable* variable = proxy->var();
924  switch (variable->location()) {
926  // TODO(rossberg)
927  break;
928 
929  case Variable::CONTEXT: {
930  Comment cmnt(masm_, "[ ImportDeclaration");
931  EmitDebugCheckDeclarationContext(variable);
932  // TODO(rossberg)
933  break;
934  }
935 
936  case Variable::PARAMETER:
937  case Variable::LOCAL:
938  case Variable::LOOKUP:
939  UNREACHABLE();
940  }
941 }
942 
943 
944 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
945  // TODO(rossberg)
946 }
947 
948 
949 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
950  // Call the runtime to declare the globals.
951  __ push(esi); // The context is the first argument.
952  __ push(Immediate(pairs));
953  __ push(Immediate(Smi::FromInt(DeclareGlobalsFlags())));
954  __ CallRuntime(Runtime::kDeclareGlobals, 3);
955  // Return value is ignored.
956 }
957 
958 
959 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
960  Comment cmnt(masm_, "[ SwitchStatement");
961  Breakable nested_statement(this, stmt);
962  SetStatementPosition(stmt);
963 
964  // Keep the switch value on the stack until a case matches.
965  VisitForStackValue(stmt->tag());
966  PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
967 
968  ZoneList<CaseClause*>* clauses = stmt->cases();
969  CaseClause* default_clause = NULL; // Can occur anywhere in the list.
970 
971  Label next_test; // Recycled for each test.
972  // Compile all the tests with branches to their bodies.
973  for (int i = 0; i < clauses->length(); i++) {
974  CaseClause* clause = clauses->at(i);
975  clause->body_target()->Unuse();
976 
977  // The default is not a test, but remember it as final fall through.
978  if (clause->is_default()) {
979  default_clause = clause;
980  continue;
981  }
982 
983  Comment cmnt(masm_, "[ Case comparison");
984  __ bind(&next_test);
985  next_test.Unuse();
986 
987  // Compile the label expression.
988  VisitForAccumulatorValue(clause->label());
989 
990  // Perform the comparison as if via '==='.
991  __ mov(edx, Operand(esp, 0)); // Switch value.
992  bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
993  JumpPatchSite patch_site(masm_);
994  if (inline_smi_code) {
995  Label slow_case;
996  __ mov(ecx, edx);
997  __ or_(ecx, eax);
998  patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
999 
1000  __ cmp(edx, eax);
1001  __ j(not_equal, &next_test);
1002  __ Drop(1); // Switch value is no longer needed.
1003  __ jmp(clause->body_target());
1004  __ bind(&slow_case);
1005  }
1006 
1007  // Record position before stub call for type feedback.
1008  SetSourcePosition(clause->position());
1009  Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
1010  CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1011  patch_site.EmitPatchInfo();
1012  __ test(eax, eax);
1013  __ j(not_equal, &next_test);
1014  __ Drop(1); // Switch value is no longer needed.
1015  __ jmp(clause->body_target());
1016  }
1017 
1018  // Discard the test value and jump to the default if present, otherwise to
1019  // the end of the statement.
1020  __ bind(&next_test);
1021  __ Drop(1); // Switch value is no longer needed.
1022  if (default_clause == NULL) {
1023  __ jmp(nested_statement.break_label());
1024  } else {
1025  __ jmp(default_clause->body_target());
1026  }
1027 
1028  // Compile all the case bodies.
1029  for (int i = 0; i < clauses->length(); i++) {
1030  Comment cmnt(masm_, "[ Case body");
1031  CaseClause* clause = clauses->at(i);
1032  __ bind(clause->body_target());
1033  PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1034  VisitStatements(clause->statements());
1035  }
1036 
1037  __ bind(nested_statement.break_label());
1038  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1039 }
1040 
1041 
1042 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1043  Comment cmnt(masm_, "[ ForInStatement");
1044  SetStatementPosition(stmt);
1045 
1046  Label loop, exit;
1047  ForIn loop_statement(this, stmt);
1048  increment_loop_depth();
1049 
1050  // Get the object to enumerate over. Both SpiderMonkey and JSC
1051  // ignore null and undefined in contrast to the specification; see
1052  // ECMA-262 section 12.6.4.
1053  VisitForAccumulatorValue(stmt->enumerable());
1054  __ cmp(eax, isolate()->factory()->undefined_value());
1055  __ j(equal, &exit);
1056  __ cmp(eax, isolate()->factory()->null_value());
1057  __ j(equal, &exit);
1058 
1059  PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1060 
1061  // Convert the object to a JS object.
1062  Label convert, done_convert;
1063  __ JumpIfSmi(eax, &convert, Label::kNear);
1064  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1065  __ j(above_equal, &done_convert, Label::kNear);
1066  __ bind(&convert);
1067  __ push(eax);
1068  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1069  __ bind(&done_convert);
1070  __ push(eax);
1071 
1072  // Check for proxies.
1073  Label call_runtime, use_cache, fixed_array;
1075  __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1076  __ j(below_equal, &call_runtime);
1077 
1078  // Check cache validity in generated code. This is a fast case for
1079  // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1080  // guarantee cache validity, call the runtime system to check cache
1081  // validity or get the property names in a fixed array.
1082  __ CheckEnumCache(&call_runtime);
1083 
1085  __ jmp(&use_cache, Label::kNear);
1086 
1087  // Get the set of properties to enumerate.
1088  __ bind(&call_runtime);
1089  __ push(eax);
1090  __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1092  isolate()->factory()->meta_map());
1093  __ j(not_equal, &fixed_array);
1094 
1095 
1096  // We got a map in register eax. Get the enumeration cache from it.
1097  __ bind(&use_cache);
1098  __ LoadInstanceDescriptors(eax, ecx);
1101 
1102  // Set up the four remaining stack slots.
1103  __ push(eax); // Map.
1104  __ push(edx); // Enumeration cache.
1106  __ push(eax); // Enumeration cache length (as smi).
1107  __ push(Immediate(Smi::FromInt(0))); // Initial index.
1108  __ jmp(&loop);
1109 
1110  // We got a fixed array in register eax. Iterate through that.
1111  Label non_proxy;
1112  __ bind(&fixed_array);
1113 
1114  Handle<JSGlobalPropertyCell> cell =
1115  isolate()->factory()->NewJSGlobalPropertyCell(
1116  Handle<Object>(
1118  RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1119  __ LoadHeapObject(ebx, cell);
1122 
1123  __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1124  __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1126  __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1127  __ j(above, &non_proxy);
1128  __ mov(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1129  __ bind(&non_proxy);
1130  __ push(ebx); // Smi
1131  __ push(eax); // Array
1133  __ push(eax); // Fixed array length (as smi).
1134  __ push(Immediate(Smi::FromInt(0))); // Initial index.
1135 
1136  // Generate code for doing the condition check.
1137  PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1138  __ bind(&loop);
1139  __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1140  __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1141  __ j(above_equal, loop_statement.break_label());
1142 
1143  // Get the current entry of the array into register ebx.
1144  __ mov(ebx, Operand(esp, 2 * kPointerSize));
1146 
1147  // Get the expected map from the stack or a smi in the
1148  // permanent slow case into register edx.
1149  __ mov(edx, Operand(esp, 3 * kPointerSize));
1150 
1151  // Check if the expected map still matches that of the enumerable.
1152  // If not, we may have to filter the key.
1153  Label update_each;
1154  __ mov(ecx, Operand(esp, 4 * kPointerSize));
1156  __ j(equal, &update_each, Label::kNear);
1157 
1158  // For proxies, no filtering is done.
1159  // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1160  ASSERT(Smi::FromInt(0) == 0);
1161  __ test(edx, edx);
1162  __ j(zero, &update_each);
1163 
1164  // Convert the entry to a string or null if it isn't a property
1165  // anymore. If the property has been removed while iterating, we
1166  // just skip it.
1167  __ push(ecx); // Enumerable.
1168  __ push(ebx); // Current entry.
1169  __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1170  __ test(eax, eax);
1171  __ j(equal, loop_statement.continue_label());
1172  __ mov(ebx, eax);
1173 
1174  // Update the 'each' property or variable from the possibly filtered
1175  // entry in register ebx.
1176  __ bind(&update_each);
1177  __ mov(result_register(), ebx);
1178  // Perform the assignment as if via '='.
1179  { EffectContext context(this);
1180  EmitAssignment(stmt->each());
1181  }
1182 
1183  // Generate code for the body of the loop.
1184  Visit(stmt->body());
1185 
1186  // Generate code for going to the next element by incrementing the
1187  // index (smi) stored on top of the stack.
1188  __ bind(loop_statement.continue_label());
1189  __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1190 
1191  EmitStackCheck(stmt, &loop);
1192  __ jmp(&loop);
1193 
1194  // Remove the pointers stored on the stack.
1195  __ bind(loop_statement.break_label());
1196  __ add(esp, Immediate(5 * kPointerSize));
1197 
1198  // Exit and decrement the loop depth.
1199  PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1200  __ bind(&exit);
1201  decrement_loop_depth();
1202 }
1203 
1204 
1205 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1206  bool pretenure) {
1207  // Use the fast case closure allocation code that allocates in new
1208  // space for nested functions that don't need literals cloning. If
1209  // we're running with the --always-opt or the --prepare-always-opt
1210  // flag, we need to use the runtime function so that the new function
1211  // we are creating here gets a chance to have its code optimized and
1212  // doesn't just get a copy of the existing unoptimized code.
1213  if (!FLAG_always_opt &&
1214  !FLAG_prepare_always_opt &&
1215  !pretenure &&
1216  scope()->is_function_scope() &&
1217  info->num_literals() == 0) {
1218  FastNewClosureStub stub(info->language_mode());
1219  __ push(Immediate(info));
1220  __ CallStub(&stub);
1221  } else {
1222  __ push(esi);
1223  __ push(Immediate(info));
1224  __ push(Immediate(pretenure
1225  ? isolate()->factory()->true_value()
1226  : isolate()->factory()->false_value()));
1227  __ CallRuntime(Runtime::kNewClosure, 3);
1228  }
1229  context()->Plug(eax);
1230 }
1231 
1232 
1233 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1234  Comment cmnt(masm_, "[ VariableProxy");
1235  EmitVariableLoad(expr);
1236 }
1237 
1238 
1239 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1240  TypeofState typeof_state,
1241  Label* slow) {
1242  Register context = esi;
1243  Register temp = edx;
1244 
1245  Scope* s = scope();
1246  while (s != NULL) {
1247  if (s->num_heap_slots() > 0) {
1248  if (s->calls_non_strict_eval()) {
1249  // Check that extension is NULL.
1251  Immediate(0));
1252  __ j(not_equal, slow);
1253  }
1254  // Load next context in chain.
1255  __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1256  // Walk the rest of the chain without clobbering esi.
1257  context = temp;
1258  }
1259  // If no outer scope calls eval, we do not need to check more
1260  // context extensions. If we have reached an eval scope, we check
1261  // all extensions from this point.
1262  if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1263  s = s->outer_scope();
1264  }
1265 
1266  if (s != NULL && s->is_eval_scope()) {
1267  // Loop up the context chain. There is no frame effect so it is
1268  // safe to use raw labels here.
1269  Label next, fast;
1270  if (!context.is(temp)) {
1271  __ mov(temp, context);
1272  }
1273  __ bind(&next);
1274  // Terminate at global context.
1276  Immediate(isolate()->factory()->global_context_map()));
1277  __ j(equal, &fast, Label::kNear);
1278  // Check that extension is NULL.
1279  __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1280  __ j(not_equal, slow);
1281  // Load next context in chain.
1282  __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1283  __ jmp(&next);
1284  __ bind(&fast);
1285  }
1286 
1287  // All extension objects were empty and it is safe to use a global
1288  // load IC call.
1289  __ mov(edx, GlobalObjectOperand());
1290  __ mov(ecx, var->name());
1291  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1292  RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1293  ? RelocInfo::CODE_TARGET
1294  : RelocInfo::CODE_TARGET_CONTEXT;
1295  CallIC(ic, mode);
1296 }
1297 
1298 
1299 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1300  Label* slow) {
1301  ASSERT(var->IsContextSlot());
1302  Register context = esi;
1303  Register temp = ebx;
1304 
1305  for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1306  if (s->num_heap_slots() > 0) {
1307  if (s->calls_non_strict_eval()) {
1308  // Check that extension is NULL.
1310  Immediate(0));
1311  __ j(not_equal, slow);
1312  }
1313  __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1314  // Walk the rest of the chain without clobbering esi.
1315  context = temp;
1316  }
1317  }
1318  // Check that last extension is NULL.
1319  __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1320  __ j(not_equal, slow);
1321 
1322  // This function is used only for loads, not stores, so it's safe to
1323  // return an esi-based operand (the write barrier cannot be allowed to
1324  // destroy the esi register).
1325  return ContextOperand(context, var->index());
1326 }
1327 
1328 
1329 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1330  TypeofState typeof_state,
1331  Label* slow,
1332  Label* done) {
1333  // Generate fast-case code for variables that might be shadowed by
1334  // eval-introduced variables. Eval is used a lot without
1335  // introducing variables. In those cases, we do not want to
1336  // perform a runtime call for all variables in the scope
1337  // containing the eval.
1338  if (var->mode() == DYNAMIC_GLOBAL) {
1339  EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1340  __ jmp(done);
1341  } else if (var->mode() == DYNAMIC_LOCAL) {
1342  Variable* local = var->local_if_not_shadowed();
1343  __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1344  if (local->mode() == CONST ||
1345  local->mode() == CONST_HARMONY ||
1346  local->mode() == LET) {
1347  __ cmp(eax, isolate()->factory()->the_hole_value());
1348  __ j(not_equal, done);
1349  if (local->mode() == CONST) {
1350  __ mov(eax, isolate()->factory()->undefined_value());
1351  } else { // LET || CONST_HARMONY
1352  __ push(Immediate(var->name()));
1353  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1354  }
1355  }
1356  __ jmp(done);
1357  }
1358 }
1359 
1360 
1361 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1362  // Record position before possible IC call.
1363  SetSourcePosition(proxy->position());
1364  Variable* var = proxy->var();
1365 
1366  // Three cases: global variables, lookup variables, and all other types of
1367  // variables.
1368  switch (var->location()) {
1369  case Variable::UNALLOCATED: {
1370  Comment cmnt(masm_, "Global variable");
1371  // Use inline caching. Variable name is passed in ecx and the global
1372  // object in eax.
1373  __ mov(edx, GlobalObjectOperand());
1374  __ mov(ecx, var->name());
1375  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1376  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1377  context()->Plug(eax);
1378  break;
1379  }
1380 
1381  case Variable::PARAMETER:
1382  case Variable::LOCAL:
1383  case Variable::CONTEXT: {
1384  Comment cmnt(masm_, var->IsContextSlot()
1385  ? "Context variable"
1386  : "Stack variable");
1387  if (var->binding_needs_init()) {
1388  // var->scope() may be NULL when the proxy is located in eval code and
1389  // refers to a potential outside binding. Currently those bindings are
1390  // always looked up dynamically, i.e. in that case
1391  // var->location() == LOOKUP.
1392  // always holds.
1393  ASSERT(var->scope() != NULL);
1394 
1395  // Check if the binding really needs an initialization check. The check
1396  // can be skipped in the following situation: we have a LET or CONST
1397  // binding in harmony mode, both the Variable and the VariableProxy have
1398  // the same declaration scope (i.e. they are both in global code, in the
1399  // same function or in the same eval code) and the VariableProxy is in
1400  // the source physically located after the initializer of the variable.
1401  //
1402  // We cannot skip any initialization checks for CONST in non-harmony
1403  // mode because const variables may be declared but never initialized:
1404  // if (false) { const x; }; var y = x;
1405  //
1406  // The condition on the declaration scopes is a conservative check for
1407  // nested functions that access a binding and are called before the
1408  // binding is initialized:
1409  // function() { f(); let x = 1; function f() { x = 2; } }
1410  //
1411  bool skip_init_check;
1412  if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1413  skip_init_check = false;
1414  } else {
1415  // Check that we always have valid source position.
1416  ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1417  ASSERT(proxy->position() != RelocInfo::kNoPosition);
1418  skip_init_check = var->mode() != CONST &&
1419  var->initializer_position() < proxy->position();
1420  }
1421 
1422  if (!skip_init_check) {
1423  // Let and const need a read barrier.
1424  Label done;
1425  GetVar(eax, var);
1426  __ cmp(eax, isolate()->factory()->the_hole_value());
1427  __ j(not_equal, &done, Label::kNear);
1428  if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1429  // Throw a reference error when using an uninitialized let/const
1430  // binding in harmony mode.
1431  __ push(Immediate(var->name()));
1432  __ CallRuntime(Runtime::kThrowReferenceError, 1);
1433  } else {
1434  // Uninitalized const bindings outside of harmony mode are unholed.
1435  ASSERT(var->mode() == CONST);
1436  __ mov(eax, isolate()->factory()->undefined_value());
1437  }
1438  __ bind(&done);
1439  context()->Plug(eax);
1440  break;
1441  }
1442  }
1443  context()->Plug(var);
1444  break;
1445  }
1446 
1447  case Variable::LOOKUP: {
1448  Label done, slow;
1449  // Generate code for loading from variables potentially shadowed
1450  // by eval-introduced variables.
1451  EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1452  __ bind(&slow);
1453  Comment cmnt(masm_, "Lookup variable");
1454  __ push(esi); // Context.
1455  __ push(Immediate(var->name()));
1456  __ CallRuntime(Runtime::kLoadContextSlot, 2);
1457  __ bind(&done);
1458  context()->Plug(eax);
1459  break;
1460  }
1461  }
1462 }
1463 
1464 
1465 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1466  Comment cmnt(masm_, "[ RegExpLiteral");
1467  Label materialized;
1468  // Registers will be used as follows:
1469  // edi = JS function.
1470  // ecx = literals array.
1471  // ebx = regexp literal.
1472  // eax = regexp literal clone.
1475  int literal_offset =
1476  FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1477  __ mov(ebx, FieldOperand(ecx, literal_offset));
1478  __ cmp(ebx, isolate()->factory()->undefined_value());
1479  __ j(not_equal, &materialized, Label::kNear);
1480 
1481  // Create regexp literal using runtime function
1482  // Result will be in eax.
1483  __ push(ecx);
1484  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1485  __ push(Immediate(expr->pattern()));
1486  __ push(Immediate(expr->flags()));
1487  __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1488  __ mov(ebx, eax);
1489 
1490  __ bind(&materialized);
1492  Label allocated, runtime_allocate;
1493  __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1494  __ jmp(&allocated);
1495 
1496  __ bind(&runtime_allocate);
1497  __ push(ebx);
1498  __ push(Immediate(Smi::FromInt(size)));
1499  __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1500  __ pop(ebx);
1501 
1502  __ bind(&allocated);
1503  // Copy the content into the newly allocated memory.
1504  // (Unroll copy loop once for better throughput).
1505  for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1506  __ mov(edx, FieldOperand(ebx, i));
1507  __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1508  __ mov(FieldOperand(eax, i), edx);
1509  __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1510  }
1511  if ((size % (2 * kPointerSize)) != 0) {
1512  __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1513  __ mov(FieldOperand(eax, size - kPointerSize), edx);
1514  }
1515  context()->Plug(eax);
1516 }
1517 
1518 
1519 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1520  if (expression == NULL) {
1521  __ push(Immediate(isolate()->factory()->null_value()));
1522  } else {
1523  VisitForStackValue(expression);
1524  }
1525 }
1526 
1527 
1528 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1529  Comment cmnt(masm_, "[ ObjectLiteral");
1530  Handle<FixedArray> constant_properties = expr->constant_properties();
1533  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1534  __ push(Immediate(constant_properties));
1535  int flags = expr->fast_elements()
1538  flags |= expr->has_function()
1541  __ push(Immediate(Smi::FromInt(flags)));
1542  int properties_count = constant_properties->length() / 2;
1543  if (expr->depth() > 1) {
1544  __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1545  } else if (flags != ObjectLiteral::kFastElements ||
1547  __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1548  } else {
1549  FastCloneShallowObjectStub stub(properties_count);
1550  __ CallStub(&stub);
1551  }
1552 
1553  // If result_saved is true the result is on top of the stack. If
1554  // result_saved is false the result is in eax.
1555  bool result_saved = false;
1556 
1557  // Mark all computed expressions that are bound to a key that
1558  // is shadowed by a later occurrence of the same key. For the
1559  // marked expressions, no store code is emitted.
1560  expr->CalculateEmitStore(zone());
1561 
1562  AccessorTable accessor_table(isolate()->zone());
1563  for (int i = 0; i < expr->properties()->length(); i++) {
1564  ObjectLiteral::Property* property = expr->properties()->at(i);
1565  if (property->IsCompileTimeValue()) continue;
1566 
1567  Literal* key = property->key();
1568  Expression* value = property->value();
1569  if (!result_saved) {
1570  __ push(eax); // Save result on the stack
1571  result_saved = true;
1572  }
1573  switch (property->kind()) {
1575  UNREACHABLE();
1578  // Fall through.
1580  if (key->handle()->IsSymbol()) {
1581  if (property->emit_store()) {
1582  VisitForAccumulatorValue(value);
1583  __ mov(ecx, Immediate(key->handle()));
1584  __ mov(edx, Operand(esp, 0));
1585  Handle<Code> ic = is_classic_mode()
1586  ? isolate()->builtins()->StoreIC_Initialize()
1587  : isolate()->builtins()->StoreIC_Initialize_Strict();
1588  CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1589  PrepareForBailoutForId(key->id(), NO_REGISTERS);
1590  } else {
1591  VisitForEffect(value);
1592  }
1593  break;
1594  }
1595  // Fall through.
1597  __ push(Operand(esp, 0)); // Duplicate receiver.
1598  VisitForStackValue(key);
1599  VisitForStackValue(value);
1600  if (property->emit_store()) {
1601  __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1602  __ CallRuntime(Runtime::kSetProperty, 4);
1603  } else {
1604  __ Drop(3);
1605  }
1606  break;
1608  accessor_table.lookup(key)->second->getter = value;
1609  break;
1611  accessor_table.lookup(key)->second->setter = value;
1612  break;
1613  }
1614  }
1615 
1616  // Emit code to define accessors, using only a single call to the runtime for
1617  // each pair of corresponding getters and setters.
1618  for (AccessorTable::Iterator it = accessor_table.begin();
1619  it != accessor_table.end();
1620  ++it) {
1621  __ push(Operand(esp, 0)); // Duplicate receiver.
1622  VisitForStackValue(it->first);
1623  EmitAccessor(it->second->getter);
1624  EmitAccessor(it->second->setter);
1625  __ push(Immediate(Smi::FromInt(NONE)));
1626  __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1627  }
1628 
1629  if (expr->has_function()) {
1630  ASSERT(result_saved);
1631  __ push(Operand(esp, 0));
1632  __ CallRuntime(Runtime::kToFastProperties, 1);
1633  }
1634 
1635  if (result_saved) {
1636  context()->PlugTOS();
1637  } else {
1638  context()->Plug(eax);
1639  }
1640 }
1641 
1642 
1643 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1644  Comment cmnt(masm_, "[ ArrayLiteral");
1645 
1646  ZoneList<Expression*>* subexprs = expr->values();
1647  int length = subexprs->length();
1648  Handle<FixedArray> constant_elements = expr->constant_elements();
1649  ASSERT_EQ(2, constant_elements->length());
1650  ElementsKind constant_elements_kind =
1651  static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1652  bool has_constant_fast_elements =
1653  IsFastObjectElementsKind(constant_elements_kind);
1654  Handle<FixedArrayBase> constant_elements_values(
1655  FixedArrayBase::cast(constant_elements->get(1)));
1656 
1659  __ push(Immediate(Smi::FromInt(expr->literal_index())));
1660  __ push(Immediate(constant_elements));
1661  Heap* heap = isolate()->heap();
1662  if (has_constant_fast_elements &&
1663  constant_elements_values->map() == heap->fixed_cow_array_map()) {
1664  // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1665  // change, so it's possible to specialize the stub in advance.
1666  __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1667  FastCloneShallowArrayStub stub(
1669  length);
1670  __ CallStub(&stub);
1671  } else if (expr->depth() > 1) {
1672  __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1674  __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1675  } else {
1676  ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1677  FLAG_smi_only_arrays);
1678  // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1679  // change, so it's possible to specialize the stub in advance.
1680  FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
1683  FastCloneShallowArrayStub stub(mode, length);
1684  __ CallStub(&stub);
1685  }
1686 
1687  bool result_saved = false; // Is the result saved to the stack?
1688 
1689  // Emit code to evaluate all the non-constant subexpressions and to store
1690  // them into the newly cloned array.
1691  for (int i = 0; i < length; i++) {
1692  Expression* subexpr = subexprs->at(i);
1693  // If the subexpression is a literal or a simple materialized literal it
1694  // is already set in the cloned array.
1695  if (subexpr->AsLiteral() != NULL ||
1697  continue;
1698  }
1699 
1700  if (!result_saved) {
1701  __ push(eax);
1702  result_saved = true;
1703  }
1704  VisitForAccumulatorValue(subexpr);
1705 
1706  if (IsFastObjectElementsKind(constant_elements_kind)) {
1707  // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1708  // cannot transition and don't need to call the runtime stub.
1709  int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1710  __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
1712  // Store the subexpression value in the array's elements.
1713  __ mov(FieldOperand(ebx, offset), result_register());
1714  // Update the write barrier for the array store.
1715  __ RecordWriteField(ebx, offset, result_register(), ecx,
1716  kDontSaveFPRegs,
1717  EMIT_REMEMBERED_SET,
1719  } else {
1720  // Store the subexpression value in the array's elements.
1721  __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
1723  __ mov(ecx, Immediate(Smi::FromInt(i)));
1724  __ mov(edx, Immediate(Smi::FromInt(expr->literal_index())));
1725  StoreArrayLiteralElementStub stub;
1726  __ CallStub(&stub);
1727  }
1728 
1729  PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1730  }
1731 
1732  if (result_saved) {
1733  context()->PlugTOS();
1734  } else {
1735  context()->Plug(eax);
1736  }
1737 }
1738 
1739 
1740 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1741  Comment cmnt(masm_, "[ Assignment");
1742  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1743  // on the left-hand side.
1744  if (!expr->target()->IsValidLeftHandSide()) {
1745  VisitForEffect(expr->target());
1746  return;
1747  }
1748 
1749  // Left-hand side can only be a property, a global or a (parameter or local)
1750  // slot.
1751  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1752  LhsKind assign_type = VARIABLE;
1753  Property* property = expr->target()->AsProperty();
1754  if (property != NULL) {
1755  assign_type = (property->key()->IsPropertyName())
1756  ? NAMED_PROPERTY
1757  : KEYED_PROPERTY;
1758  }
1759 
1760  // Evaluate LHS expression.
1761  switch (assign_type) {
1762  case VARIABLE:
1763  // Nothing to do here.
1764  break;
1765  case NAMED_PROPERTY:
1766  if (expr->is_compound()) {
1767  // We need the receiver both on the stack and in edx.
1768  VisitForStackValue(property->obj());
1769  __ mov(edx, Operand(esp, 0));
1770  } else {
1771  VisitForStackValue(property->obj());
1772  }
1773  break;
1774  case KEYED_PROPERTY: {
1775  if (expr->is_compound()) {
1776  VisitForStackValue(property->obj());
1777  VisitForStackValue(property->key());
1778  __ mov(edx, Operand(esp, kPointerSize)); // Object.
1779  __ mov(ecx, Operand(esp, 0)); // Key.
1780  } else {
1781  VisitForStackValue(property->obj());
1782  VisitForStackValue(property->key());
1783  }
1784  break;
1785  }
1786  }
1787 
1788  // For compound assignments we need another deoptimization point after the
1789  // variable/property load.
1790  if (expr->is_compound()) {
1791  AccumulatorValueContext result_context(this);
1792  { AccumulatorValueContext left_operand_context(this);
1793  switch (assign_type) {
1794  case VARIABLE:
1795  EmitVariableLoad(expr->target()->AsVariableProxy());
1796  PrepareForBailout(expr->target(), TOS_REG);
1797  break;
1798  case NAMED_PROPERTY:
1799  EmitNamedPropertyLoad(property);
1800  PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1801  break;
1802  case KEYED_PROPERTY:
1803  EmitKeyedPropertyLoad(property);
1804  PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1805  break;
1806  }
1807  }
1808 
1809  Token::Value op = expr->binary_op();
1810  __ push(eax); // Left operand goes on the stack.
1811  VisitForAccumulatorValue(expr->value());
1812 
1813  OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1814  ? OVERWRITE_RIGHT
1815  : NO_OVERWRITE;
1816  SetSourcePosition(expr->position() + 1);
1817  if (ShouldInlineSmiCase(op)) {
1818  EmitInlineSmiBinaryOp(expr->binary_operation(),
1819  op,
1820  mode,
1821  expr->target(),
1822  expr->value());
1823  } else {
1824  EmitBinaryOp(expr->binary_operation(), op, mode);
1825  }
1826 
1827  // Deoptimization point in case the binary operation may have side effects.
1828  PrepareForBailout(expr->binary_operation(), TOS_REG);
1829  } else {
1830  VisitForAccumulatorValue(expr->value());
1831  }
1832 
1833  // Record source position before possible IC call.
1834  SetSourcePosition(expr->position());
1835 
1836  // Store the value.
1837  switch (assign_type) {
1838  case VARIABLE:
1839  EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1840  expr->op());
1841  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1842  context()->Plug(eax);
1843  break;
1844  case NAMED_PROPERTY:
1845  EmitNamedPropertyAssignment(expr);
1846  break;
1847  case KEYED_PROPERTY:
1848  EmitKeyedPropertyAssignment(expr);
1849  break;
1850  }
1851 }
1852 
1853 
1854 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1855  SetSourcePosition(prop->position());
1856  Literal* key = prop->key()->AsLiteral();
1857  ASSERT(!key->handle()->IsSmi());
1858  __ mov(ecx, Immediate(key->handle()));
1859  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1860  CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1861 }
1862 
1863 
1864 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1865  SetSourcePosition(prop->position());
1866  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1867  CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1868 }
1869 
1870 
1871 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1872  Token::Value op,
1873  OverwriteMode mode,
1874  Expression* left,
1875  Expression* right) {
1876  // Do combined smi check of the operands. Left operand is on the
1877  // stack. Right operand is in eax.
1878  Label smi_case, done, stub_call;
1879  __ pop(edx);
1880  __ mov(ecx, eax);
1881  __ or_(eax, edx);
1882  JumpPatchSite patch_site(masm_);
1883  patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1884 
1885  __ bind(&stub_call);
1886  __ mov(eax, ecx);
1887  BinaryOpStub stub(op, mode);
1888  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1889  patch_site.EmitPatchInfo();
1890  __ jmp(&done, Label::kNear);
1891 
1892  // Smi case.
1893  __ bind(&smi_case);
1894  __ mov(eax, edx); // Copy left operand in case of a stub call.
1895 
1896  switch (op) {
1897  case Token::SAR:
1898  __ SmiUntag(eax);
1899  __ SmiUntag(ecx);
1900  __ sar_cl(eax); // No checks of result necessary
1901  __ SmiTag(eax);
1902  break;
1903  case Token::SHL: {
1904  Label result_ok;
1905  __ SmiUntag(eax);
1906  __ SmiUntag(ecx);
1907  __ shl_cl(eax);
1908  // Check that the *signed* result fits in a smi.
1909  __ cmp(eax, 0xc0000000);
1910  __ j(positive, &result_ok);
1911  __ SmiTag(ecx);
1912  __ jmp(&stub_call);
1913  __ bind(&result_ok);
1914  __ SmiTag(eax);
1915  break;
1916  }
1917  case Token::SHR: {
1918  Label result_ok;
1919  __ SmiUntag(eax);
1920  __ SmiUntag(ecx);
1921  __ shr_cl(eax);
1922  __ test(eax, Immediate(0xc0000000));
1923  __ j(zero, &result_ok);
1924  __ SmiTag(ecx);
1925  __ jmp(&stub_call);
1926  __ bind(&result_ok);
1927  __ SmiTag(eax);
1928  break;
1929  }
1930  case Token::ADD:
1931  __ add(eax, ecx);
1932  __ j(overflow, &stub_call);
1933  break;
1934  case Token::SUB:
1935  __ sub(eax, ecx);
1936  __ j(overflow, &stub_call);
1937  break;
1938  case Token::MUL: {
1939  __ SmiUntag(eax);
1940  __ imul(eax, ecx);
1941  __ j(overflow, &stub_call);
1942  __ test(eax, eax);
1943  __ j(not_zero, &done, Label::kNear);
1944  __ mov(ebx, edx);
1945  __ or_(ebx, ecx);
1946  __ j(negative, &stub_call);
1947  break;
1948  }
1949  case Token::BIT_OR:
1950  __ or_(eax, ecx);
1951  break;
1952  case Token::BIT_AND:
1953  __ and_(eax, ecx);
1954  break;
1955  case Token::BIT_XOR:
1956  __ xor_(eax, ecx);
1957  break;
1958  default:
1959  UNREACHABLE();
1960  }
1961 
1962  __ bind(&done);
1963  context()->Plug(eax);
1964 }
1965 
1966 
1967 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1968  Token::Value op,
1969  OverwriteMode mode) {
1970  __ pop(edx);
1971  BinaryOpStub stub(op, mode);
1972  JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1973  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1974  patch_site.EmitPatchInfo();
1975  context()->Plug(eax);
1976 }
1977 
1978 
1979 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1980  // Invalid left-hand sides are rewritten to have a 'throw
1981  // ReferenceError' on the left-hand side.
1982  if (!expr->IsValidLeftHandSide()) {
1983  VisitForEffect(expr);
1984  return;
1985  }
1986 
1987  // Left-hand side can only be a property, a global or a (parameter or local)
1988  // slot.
1989  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1990  LhsKind assign_type = VARIABLE;
1991  Property* prop = expr->AsProperty();
1992  if (prop != NULL) {
1993  assign_type = (prop->key()->IsPropertyName())
1994  ? NAMED_PROPERTY
1995  : KEYED_PROPERTY;
1996  }
1997 
1998  switch (assign_type) {
1999  case VARIABLE: {
2000  Variable* var = expr->AsVariableProxy()->var();
2001  EffectContext context(this);
2002  EmitVariableAssignment(var, Token::ASSIGN);
2003  break;
2004  }
2005  case NAMED_PROPERTY: {
2006  __ push(eax); // Preserve value.
2007  VisitForAccumulatorValue(prop->obj());
2008  __ mov(edx, eax);
2009  __ pop(eax); // Restore value.
2010  __ mov(ecx, prop->key()->AsLiteral()->handle());
2011  Handle<Code> ic = is_classic_mode()
2012  ? isolate()->builtins()->StoreIC_Initialize()
2013  : isolate()->builtins()->StoreIC_Initialize_Strict();
2014  CallIC(ic);
2015  break;
2016  }
2017  case KEYED_PROPERTY: {
2018  __ push(eax); // Preserve value.
2019  VisitForStackValue(prop->obj());
2020  VisitForAccumulatorValue(prop->key());
2021  __ mov(ecx, eax);
2022  __ pop(edx); // Receiver.
2023  __ pop(eax); // Restore value.
2024  Handle<Code> ic = is_classic_mode()
2025  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2026  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2027  CallIC(ic);
2028  break;
2029  }
2030  }
2031  context()->Plug(eax);
2032 }
2033 
2034 
2035 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2036  Token::Value op) {
2037  if (var->IsUnallocated()) {
2038  // Global var, const, or let.
2039  __ mov(ecx, var->name());
2040  __ mov(edx, GlobalObjectOperand());
2041  Handle<Code> ic = is_classic_mode()
2042  ? isolate()->builtins()->StoreIC_Initialize()
2043  : isolate()->builtins()->StoreIC_Initialize_Strict();
2044  CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2045 
2046  } else if (op == Token::INIT_CONST) {
2047  // Const initializers need a write barrier.
2048  ASSERT(!var->IsParameter()); // No const parameters.
2049  if (var->IsStackLocal()) {
2050  Label skip;
2051  __ mov(edx, StackOperand(var));
2052  __ cmp(edx, isolate()->factory()->the_hole_value());
2053  __ j(not_equal, &skip);
2054  __ mov(StackOperand(var), eax);
2055  __ bind(&skip);
2056  } else {
2057  ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2058  // Like var declarations, const declarations are hoisted to function
2059  // scope. However, unlike var initializers, const initializers are
2060  // able to drill a hole to that function context, even from inside a
2061  // 'with' context. We thus bypass the normal static scope lookup for
2062  // var->IsContextSlot().
2063  __ push(eax);
2064  __ push(esi);
2065  __ push(Immediate(var->name()));
2066  __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2067  }
2068 
2069  } else if (var->mode() == LET && op != Token::INIT_LET) {
2070  // Non-initializing assignment to let variable needs a write barrier.
2071  if (var->IsLookupSlot()) {
2072  __ push(eax); // Value.
2073  __ push(esi); // Context.
2074  __ push(Immediate(var->name()));
2075  __ push(Immediate(Smi::FromInt(language_mode())));
2076  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2077  } else {
2078  ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2079  Label assign;
2080  MemOperand location = VarOperand(var, ecx);
2081  __ mov(edx, location);
2082  __ cmp(edx, isolate()->factory()->the_hole_value());
2083  __ j(not_equal, &assign, Label::kNear);
2084  __ push(Immediate(var->name()));
2085  __ CallRuntime(Runtime::kThrowReferenceError, 1);
2086  __ bind(&assign);
2087  __ mov(location, eax);
2088  if (var->IsContextSlot()) {
2089  __ mov(edx, eax);
2090  int offset = Context::SlotOffset(var->index());
2091  __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2092  }
2093  }
2094 
2095  } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2096  // Assignment to var or initializing assignment to let/const
2097  // in harmony mode.
2098  if (var->IsStackAllocated() || var->IsContextSlot()) {
2099  MemOperand location = VarOperand(var, ecx);
2100  if (FLAG_debug_code && op == Token::INIT_LET) {
2101  // Check for an uninitialized let binding.
2102  __ mov(edx, location);
2103  __ cmp(edx, isolate()->factory()->the_hole_value());
2104  __ Check(equal, "Let binding re-initialization.");
2105  }
2106  // Perform the assignment.
2107  __ mov(location, eax);
2108  if (var->IsContextSlot()) {
2109  __ mov(edx, eax);
2110  int offset = Context::SlotOffset(var->index());
2111  __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2112  }
2113  } else {
2114  ASSERT(var->IsLookupSlot());
2115  __ push(eax); // Value.
2116  __ push(esi); // Context.
2117  __ push(Immediate(var->name()));
2118  __ push(Immediate(Smi::FromInt(language_mode())));
2119  __ CallRuntime(Runtime::kStoreContextSlot, 4);
2120  }
2121  }
2122  // Non-initializing assignments to consts are ignored.
2123 }
2124 
2125 
2126 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2127  // Assignment to a property, using a named store IC.
2128  // eax : value
2129  // esp[0] : receiver
2130 
2131  Property* prop = expr->target()->AsProperty();
2132  ASSERT(prop != NULL);
2133  ASSERT(prop->key()->AsLiteral() != NULL);
2134 
2135  // If the assignment starts a block of assignments to the same object,
2136  // change to slow case to avoid the quadratic behavior of repeatedly
2137  // adding fast properties.
2138  if (expr->starts_initialization_block()) {
2139  __ push(result_register());
2140  __ push(Operand(esp, kPointerSize)); // Receiver is now under value.
2141  __ CallRuntime(Runtime::kToSlowProperties, 1);
2142  __ pop(result_register());
2143  }
2144 
2145  // Record source code position before IC call.
2146  SetSourcePosition(expr->position());
2147  __ mov(ecx, prop->key()->AsLiteral()->handle());
2148  if (expr->ends_initialization_block()) {
2149  __ mov(edx, Operand(esp, 0));
2150  } else {
2151  __ pop(edx);
2152  }
2153  Handle<Code> ic = is_classic_mode()
2154  ? isolate()->builtins()->StoreIC_Initialize()
2155  : isolate()->builtins()->StoreIC_Initialize_Strict();
2156  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2157 
2158  // If the assignment ends an initialization block, revert to fast case.
2159  if (expr->ends_initialization_block()) {
2160  __ push(eax); // Result of assignment, saved even if not needed.
2161  __ push(Operand(esp, kPointerSize)); // Receiver is under value.
2162  __ CallRuntime(Runtime::kToFastProperties, 1);
2163  __ pop(eax);
2164  __ Drop(1);
2165  }
2166  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2167  context()->Plug(eax);
2168 }
2169 
2170 
2171 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2172  // Assignment to a property, using a keyed store IC.
2173  // eax : value
2174  // esp[0] : key
2175  // esp[kPointerSize] : receiver
2176 
2177  // If the assignment starts a block of assignments to the same object,
2178  // change to slow case to avoid the quadratic behavior of repeatedly
2179  // adding fast properties.
2180  if (expr->starts_initialization_block()) {
2181  __ push(result_register());
2182  // Receiver is now under the key and value.
2183  __ push(Operand(esp, 2 * kPointerSize));
2184  __ CallRuntime(Runtime::kToSlowProperties, 1);
2185  __ pop(result_register());
2186  }
2187 
2188  __ pop(ecx); // Key.
2189  if (expr->ends_initialization_block()) {
2190  __ mov(edx, Operand(esp, 0)); // Leave receiver on the stack for later.
2191  } else {
2192  __ pop(edx);
2193  }
2194  // Record source code position before IC call.
2195  SetSourcePosition(expr->position());
2196  Handle<Code> ic = is_classic_mode()
2197  ? isolate()->builtins()->KeyedStoreIC_Initialize()
2198  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2199  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2200 
2201  // If the assignment ends an initialization block, revert to fast case.
2202  if (expr->ends_initialization_block()) {
2203  __ pop(edx);
2204  __ push(eax); // Result of assignment, saved even if not needed.
2205  __ push(edx);
2206  __ CallRuntime(Runtime::kToFastProperties, 1);
2207  __ pop(eax);
2208  }
2209 
2210  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2211  context()->Plug(eax);
2212 }
2213 
2214 
2215 void FullCodeGenerator::VisitProperty(Property* expr) {
2216  Comment cmnt(masm_, "[ Property");
2217  Expression* key = expr->key();
2218 
2219  if (key->IsPropertyName()) {
2220  VisitForAccumulatorValue(expr->obj());
2221  __ mov(edx, result_register());
2222  EmitNamedPropertyLoad(expr);
2223  context()->Plug(eax);
2224  } else {
2225  VisitForStackValue(expr->obj());
2226  VisitForAccumulatorValue(expr->key());
2227  __ pop(edx); // Object.
2228  __ mov(ecx, result_register()); // Key.
2229  EmitKeyedPropertyLoad(expr);
2230  context()->Plug(eax);
2231  }
2232 }
2233 
2234 
2235 void FullCodeGenerator::CallIC(Handle<Code> code,
2236  RelocInfo::Mode rmode,
2237  unsigned ast_id) {
2238  ic_total_count_++;
2239  __ call(code, rmode, ast_id);
2240 }
2241 
2242 
2243 
2244 
2245 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2246  Handle<Object> name,
2247  RelocInfo::Mode mode) {
2248  // Code common for calls using the IC.
2249  ZoneList<Expression*>* args = expr->arguments();
2250  int arg_count = args->length();
2251  { PreservePositionScope scope(masm()->positions_recorder());
2252  for (int i = 0; i < arg_count; i++) {
2253  VisitForStackValue(args->at(i));
2254  }
2255  __ Set(ecx, Immediate(name));
2256  }
2257  // Record source position of the IC call.
2258  SetSourcePosition(expr->position());
2259  Handle<Code> ic =
2260  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2261  CallIC(ic, mode, expr->id());
2262  RecordJSReturnSite(expr);
2263  // Restore context register.
2265  context()->Plug(eax);
2266 }
2267 
2268 
2269 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2270  Expression* key) {
2271  // Load the key.
2272  VisitForAccumulatorValue(key);
2273 
2274  // Swap the name of the function and the receiver on the stack to follow
2275  // the calling convention for call ICs.
2276  __ pop(ecx);
2277  __ push(eax);
2278  __ push(ecx);
2279 
2280  // Load the arguments.
2281  ZoneList<Expression*>* args = expr->arguments();
2282  int arg_count = args->length();
2283  { PreservePositionScope scope(masm()->positions_recorder());
2284  for (int i = 0; i < arg_count; i++) {
2285  VisitForStackValue(args->at(i));
2286  }
2287  }
2288  // Record source position of the IC call.
2289  SetSourcePosition(expr->position());
2290  Handle<Code> ic =
2291  isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2292  __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
2293  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2294  RecordJSReturnSite(expr);
2295  // Restore context register.
2297  context()->DropAndPlug(1, eax); // Drop the key still on the stack.
2298 }
2299 
2300 
2301 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2302  // Code common for calls using the call stub.
2303  ZoneList<Expression*>* args = expr->arguments();
2304  int arg_count = args->length();
2305  { PreservePositionScope scope(masm()->positions_recorder());
2306  for (int i = 0; i < arg_count; i++) {
2307  VisitForStackValue(args->at(i));
2308  }
2309  }
2310  // Record source position for debugger.
2311  SetSourcePosition(expr->position());
2312 
2313  // Record call targets in unoptimized code, but not in the snapshot.
2314  if (!Serializer::enabled()) {
2315  flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2316  Handle<Object> uninitialized =
2318  Handle<JSGlobalPropertyCell> cell =
2319  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2320  RecordTypeFeedbackCell(expr->id(), cell);
2321  __ mov(ebx, cell);
2322  }
2323 
2324  CallFunctionStub stub(arg_count, flags);
2325  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2326  __ CallStub(&stub, expr->id());
2327 
2328  RecordJSReturnSite(expr);
2329  // Restore context register.
2331  context()->DropAndPlug(1, eax);
2332 }
2333 
2334 
2335 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2336  // Push copy of the first argument or undefined if it doesn't exist.
2337  if (arg_count > 0) {
2338  __ push(Operand(esp, arg_count * kPointerSize));
2339  } else {
2340  __ push(Immediate(isolate()->factory()->undefined_value()));
2341  }
2342 
2343  // Push the receiver of the enclosing function.
2344  __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2345  // Push the language mode.
2346  __ push(Immediate(Smi::FromInt(language_mode())));
2347 
2348  // Push the start position of the scope the calls resides in.
2349  __ push(Immediate(Smi::FromInt(scope()->start_position())));
2350 
2351  // Do the runtime call.
2352  __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2353 }
2354 
2355 
2356 void FullCodeGenerator::VisitCall(Call* expr) {
2357 #ifdef DEBUG
2358  // We want to verify that RecordJSReturnSite gets called on all paths
2359  // through this function. Avoid early returns.
2360  expr->return_is_recorded_ = false;
2361 #endif
2362 
2363  Comment cmnt(masm_, "[ Call");
2364  Expression* callee = expr->expression();
2365  VariableProxy* proxy = callee->AsVariableProxy();
2366  Property* property = callee->AsProperty();
2367 
2368  if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2369  // In a call to eval, we first call %ResolvePossiblyDirectEval to
2370  // resolve the function we need to call and the receiver of the call.
2371  // Then we call the resolved function using the given arguments.
2372  ZoneList<Expression*>* args = expr->arguments();
2373  int arg_count = args->length();
2374  { PreservePositionScope pos_scope(masm()->positions_recorder());
2375  VisitForStackValue(callee);
2376  // Reserved receiver slot.
2377  __ push(Immediate(isolate()->factory()->undefined_value()));
2378  // Push the arguments.
2379  for (int i = 0; i < arg_count; i++) {
2380  VisitForStackValue(args->at(i));
2381  }
2382 
2383  // Push a copy of the function (found below the arguments) and
2384  // resolve eval.
2385  __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2386  EmitResolvePossiblyDirectEval(arg_count);
2387 
2388  // The runtime call returns a pair of values in eax (function) and
2389  // edx (receiver). Touch up the stack with the right values.
2390  __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2391  __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2392  }
2393  // Record source position for debugger.
2394  SetSourcePosition(expr->position());
2395  CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2396  __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2397  __ CallStub(&stub);
2398  RecordJSReturnSite(expr);
2399  // Restore context register.
2401  context()->DropAndPlug(1, eax);
2402 
2403  } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2404  // Push global object as receiver for the call IC.
2405  __ push(GlobalObjectOperand());
2406  EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2407 
2408  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2409  // Call to a lookup slot (dynamically introduced variable).
2410  Label slow, done;
2411  { PreservePositionScope scope(masm()->positions_recorder());
2412  // Generate code for loading from variables potentially shadowed by
2413  // eval-introduced variables.
2414  EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2415  }
2416  __ bind(&slow);
2417  // Call the runtime to find the function to call (returned in eax) and
2418  // the object holding it (returned in edx).
2419  __ push(context_register());
2420  __ push(Immediate(proxy->name()));
2421  __ CallRuntime(Runtime::kLoadContextSlot, 2);
2422  __ push(eax); // Function.
2423  __ push(edx); // Receiver.
2424 
2425  // If fast case code has been generated, emit code to push the function
2426  // and receiver and have the slow path jump around this code.
2427  if (done.is_linked()) {
2428  Label call;
2429  __ jmp(&call, Label::kNear);
2430  __ bind(&done);
2431  // Push function.
2432  __ push(eax);
2433  // The receiver is implicitly the global receiver. Indicate this by
2434  // passing the hole to the call function stub.
2435  __ push(Immediate(isolate()->factory()->the_hole_value()));
2436  __ bind(&call);
2437  }
2438 
2439  // The receiver is either the global receiver or an object found by
2440  // LoadContextSlot. That object could be the hole if the receiver is
2441  // implicitly the global object.
2442  EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2443 
2444  } else if (property != NULL) {
2445  { PreservePositionScope scope(masm()->positions_recorder());
2446  VisitForStackValue(property->obj());
2447  }
2448  if (property->key()->IsPropertyName()) {
2449  EmitCallWithIC(expr,
2450  property->key()->AsLiteral()->handle(),
2451  RelocInfo::CODE_TARGET);
2452  } else {
2453  EmitKeyedCallWithIC(expr, property->key());
2454  }
2455 
2456  } else {
2457  // Call to an arbitrary expression not handled specially above.
2458  { PreservePositionScope scope(masm()->positions_recorder());
2459  VisitForStackValue(callee);
2460  }
2461  // Load global receiver object.
2462  __ mov(ebx, GlobalObjectOperand());
2464  // Emit function call.
2465  EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2466  }
2467 
2468 #ifdef DEBUG
2469  // RecordJSReturnSite should have been called.
2470  ASSERT(expr->return_is_recorded_);
2471 #endif
2472 }
2473 
2474 
2475 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2476  Comment cmnt(masm_, "[ CallNew");
2477  // According to ECMA-262, section 11.2.2, page 44, the function
2478  // expression in new calls must be evaluated before the
2479  // arguments.
2480 
2481  // Push constructor on the stack. If it's not a function it's used as
2482  // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2483  // ignored.
2484  VisitForStackValue(expr->expression());
2485 
2486  // Push the arguments ("left-to-right") on the stack.
2487  ZoneList<Expression*>* args = expr->arguments();
2488  int arg_count = args->length();
2489  for (int i = 0; i < arg_count; i++) {
2490  VisitForStackValue(args->at(i));
2491  }
2492 
2493  // Call the construct call builtin that handles allocation and
2494  // constructor invocation.
2495  SetSourcePosition(expr->position());
2496 
2497  // Load function and argument count into edi and eax.
2498  __ SafeSet(eax, Immediate(arg_count));
2499  __ mov(edi, Operand(esp, arg_count * kPointerSize));
2500 
2501  // Record call targets in unoptimized code, but not in the snapshot.
2503  if (!Serializer::enabled()) {
2504  flags = RECORD_CALL_TARGET;
2505  Handle<Object> uninitialized =
2507  Handle<JSGlobalPropertyCell> cell =
2508  isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2509  RecordTypeFeedbackCell(expr->id(), cell);
2510  __ mov(ebx, cell);
2511  } else {
2512  flags = NO_CALL_FUNCTION_FLAGS;
2513  }
2514 
2515  CallConstructStub stub(flags);
2516  __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2517  PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2518  context()->Plug(eax);
2519 }
2520 
2521 
2522 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2523  ZoneList<Expression*>* args = expr->arguments();
2524  ASSERT(args->length() == 1);
2525 
2526  VisitForAccumulatorValue(args->at(0));
2527 
2528  Label materialize_true, materialize_false;
2529  Label* if_true = NULL;
2530  Label* if_false = NULL;
2531  Label* fall_through = NULL;
2532  context()->PrepareTest(&materialize_true, &materialize_false,
2533  &if_true, &if_false, &fall_through);
2534 
2535  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2536  __ test(eax, Immediate(kSmiTagMask));
2537  Split(zero, if_true, if_false, fall_through);
2538 
2539  context()->Plug(if_true, if_false);
2540 }
2541 
2542 
2543 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2544  ZoneList<Expression*>* args = expr->arguments();
2545  ASSERT(args->length() == 1);
2546 
2547  VisitForAccumulatorValue(args->at(0));
2548 
2549  Label materialize_true, materialize_false;
2550  Label* if_true = NULL;
2551  Label* if_false = NULL;
2552  Label* fall_through = NULL;
2553  context()->PrepareTest(&materialize_true, &materialize_false,
2554  &if_true, &if_false, &fall_through);
2555 
2556  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2557  __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2558  Split(zero, if_true, if_false, fall_through);
2559 
2560  context()->Plug(if_true, if_false);
2561 }
2562 
2563 
2564 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2565  ZoneList<Expression*>* args = expr->arguments();
2566  ASSERT(args->length() == 1);
2567 
2568  VisitForAccumulatorValue(args->at(0));
2569 
2570  Label materialize_true, materialize_false;
2571  Label* if_true = NULL;
2572  Label* if_false = NULL;
2573  Label* fall_through = NULL;
2574  context()->PrepareTest(&materialize_true, &materialize_false,
2575  &if_true, &if_false, &fall_through);
2576 
2577  __ JumpIfSmi(eax, if_false);
2578  __ cmp(eax, isolate()->factory()->null_value());
2579  __ j(equal, if_true);
2581  // Undetectable objects behave like undefined when tested with typeof.
2582  __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2583  __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2584  __ j(not_zero, if_false);
2585  __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2587  __ j(below, if_false);
2589  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2590  Split(below_equal, if_true, if_false, fall_through);
2591 
2592  context()->Plug(if_true, if_false);
2593 }
2594 
2595 
2596 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2597  ZoneList<Expression*>* args = expr->arguments();
2598  ASSERT(args->length() == 1);
2599 
2600  VisitForAccumulatorValue(args->at(0));
2601 
2602  Label materialize_true, materialize_false;
2603  Label* if_true = NULL;
2604  Label* if_false = NULL;
2605  Label* fall_through = NULL;
2606  context()->PrepareTest(&materialize_true, &materialize_false,
2607  &if_true, &if_false, &fall_through);
2608 
2609  __ JumpIfSmi(eax, if_false);
2610  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
2611  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2612  Split(above_equal, if_true, if_false, fall_through);
2613 
2614  context()->Plug(if_true, if_false);
2615 }
2616 
2617 
2618 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2619  ZoneList<Expression*>* args = expr->arguments();
2620  ASSERT(args->length() == 1);
2621 
2622  VisitForAccumulatorValue(args->at(0));
2623 
2624  Label materialize_true, materialize_false;
2625  Label* if_true = NULL;
2626  Label* if_false = NULL;
2627  Label* fall_through = NULL;
2628  context()->PrepareTest(&materialize_true, &materialize_false,
2629  &if_true, &if_false, &fall_through);
2630 
2631  __ JumpIfSmi(eax, if_false);
2634  __ test(ebx, Immediate(1 << Map::kIsUndetectable));
2635  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2636  Split(not_zero, if_true, if_false, fall_through);
2637 
2638  context()->Plug(if_true, if_false);
2639 }
2640 
2641 
2642 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2643  CallRuntime* expr) {
2644  ZoneList<Expression*>* args = expr->arguments();
2645  ASSERT(args->length() == 1);
2646 
2647  VisitForAccumulatorValue(args->at(0));
2648 
2649  Label materialize_true, materialize_false;
2650  Label* if_true = NULL;
2651  Label* if_false = NULL;
2652  Label* fall_through = NULL;
2653  context()->PrepareTest(&materialize_true, &materialize_false,
2654  &if_true, &if_false, &fall_through);
2655 
2656  if (FLAG_debug_code) __ AbortIfSmi(eax);
2657 
2658  // Check whether this map has already been checked to be safe for default
2659  // valueOf.
2663  __ j(not_zero, if_true);
2664 
2665  // Check for fast case object. Return false for slow case objects.
2667  __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2668  __ cmp(ecx, FACTORY->hash_table_map());
2669  __ j(equal, if_false);
2670 
2671  // Look for valueOf symbol in the descriptor array, and indicate false if
2672  // found. The type is not checked, so if it is a transition it is a false
2673  // negative.
2674  __ LoadInstanceDescriptors(ebx, ebx);
2676  // ebx: descriptor array
2677  // ecx: length of descriptor array
2678  // Calculate the end of the descriptor array.
2679  STATIC_ASSERT(kSmiTag == 0);
2680  STATIC_ASSERT(kSmiTagSize == 1);
2681  STATIC_ASSERT(kPointerSize == 4);
2682  __ lea(ecx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
2683  // Calculate location of the first key name.
2684  __ add(ebx,
2685  Immediate(FixedArray::kHeaderSize +
2686  DescriptorArray::kFirstIndex * kPointerSize));
2687  // Loop through all the keys in the descriptor array. If one of these is the
2688  // symbol valueOf the result is false.
2689  Label entry, loop;
2690  __ jmp(&entry);
2691  __ bind(&loop);
2692  __ mov(edx, FieldOperand(ebx, 0));
2693  __ cmp(edx, FACTORY->value_of_symbol());
2694  __ j(equal, if_false);
2695  __ add(ebx, Immediate(kPointerSize));
2696  __ bind(&entry);
2697  __ cmp(ebx, ecx);
2698  __ j(not_equal, &loop);
2699 
2700  // Reload map as register ebx was used as temporary above.
2702 
2703  // If a valueOf property is not found on the object check that it's
2704  // prototype is the un-modified String prototype. If not result is false.
2706  __ JumpIfSmi(ecx, if_false);
2707  __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2709  __ mov(edx,
2711  __ cmp(ecx,
2714  __ j(not_equal, if_false);
2715  // Set the bit in the map to indicate that it has been checked safe for
2716  // default valueOf and set true result.
2719  __ jmp(if_true);
2720 
2721  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2722  context()->Plug(if_true, if_false);
2723 }
2724 
2725 
2726 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2727  ZoneList<Expression*>* args = expr->arguments();
2728  ASSERT(args->length() == 1);
2729 
2730  VisitForAccumulatorValue(args->at(0));
2731 
2732  Label materialize_true, materialize_false;
2733  Label* if_true = NULL;
2734  Label* if_false = NULL;
2735  Label* fall_through = NULL;
2736  context()->PrepareTest(&materialize_true, &materialize_false,
2737  &if_true, &if_false, &fall_through);
2738 
2739  __ JumpIfSmi(eax, if_false);
2740  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2741  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2742  Split(equal, if_true, if_false, fall_through);
2743 
2744  context()->Plug(if_true, if_false);
2745 }
2746 
2747 
2748 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2749  ZoneList<Expression*>* args = expr->arguments();
2750  ASSERT(args->length() == 1);
2751 
2752  VisitForAccumulatorValue(args->at(0));
2753 
2754  Label materialize_true, materialize_false;
2755  Label* if_true = NULL;
2756  Label* if_false = NULL;
2757  Label* fall_through = NULL;
2758  context()->PrepareTest(&materialize_true, &materialize_false,
2759  &if_true, &if_false, &fall_through);
2760 
2761  __ JumpIfSmi(eax, if_false);
2762  __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2763  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2764  Split(equal, if_true, if_false, fall_through);
2765 
2766  context()->Plug(if_true, if_false);
2767 }
2768 
2769 
2770 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2771  ZoneList<Expression*>* args = expr->arguments();
2772  ASSERT(args->length() == 1);
2773 
2774  VisitForAccumulatorValue(args->at(0));
2775 
2776  Label materialize_true, materialize_false;
2777  Label* if_true = NULL;
2778  Label* if_false = NULL;
2779  Label* fall_through = NULL;
2780  context()->PrepareTest(&materialize_true, &materialize_false,
2781  &if_true, &if_false, &fall_through);
2782 
2783  __ JumpIfSmi(eax, if_false);
2784  __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2785  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2786  Split(equal, if_true, if_false, fall_through);
2787 
2788  context()->Plug(if_true, if_false);
2789 }
2790 
2791 
2792 
2793 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2794  ASSERT(expr->arguments()->length() == 0);
2795 
2796  Label materialize_true, materialize_false;
2797  Label* if_true = NULL;
2798  Label* if_false = NULL;
2799  Label* fall_through = NULL;
2800  context()->PrepareTest(&materialize_true, &materialize_false,
2801  &if_true, &if_false, &fall_through);
2802 
2803  // Get the frame pointer for the calling frame.
2805 
2806  // Skip the arguments adaptor frame if it exists.
2807  Label check_frame_marker;
2810  __ j(not_equal, &check_frame_marker);
2812 
2813  // Check the marker in the calling frame.
2814  __ bind(&check_frame_marker);
2816  Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
2817  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2818  Split(equal, if_true, if_false, fall_through);
2819 
2820  context()->Plug(if_true, if_false);
2821 }
2822 
2823 
2824 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2825  ZoneList<Expression*>* args = expr->arguments();
2826  ASSERT(args->length() == 2);
2827 
2828  // Load the two objects into registers and perform the comparison.
2829  VisitForStackValue(args->at(0));
2830  VisitForAccumulatorValue(args->at(1));
2831 
2832  Label materialize_true, materialize_false;
2833  Label* if_true = NULL;
2834  Label* if_false = NULL;
2835  Label* fall_through = NULL;
2836  context()->PrepareTest(&materialize_true, &materialize_false,
2837  &if_true, &if_false, &fall_through);
2838 
2839  __ pop(ebx);
2840  __ cmp(eax, ebx);
2841  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2842  Split(equal, if_true, if_false, fall_through);
2843 
2844  context()->Plug(if_true, if_false);
2845 }
2846 
2847 
2848 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2849  ZoneList<Expression*>* args = expr->arguments();
2850  ASSERT(args->length() == 1);
2851 
2852  // ArgumentsAccessStub expects the key in edx and the formal
2853  // parameter count in eax.
2854  VisitForAccumulatorValue(args->at(0));
2855  __ mov(edx, eax);
2856  __ SafeSet(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
2857  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2858  __ CallStub(&stub);
2859  context()->Plug(eax);
2860 }
2861 
2862 
2863 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2864  ASSERT(expr->arguments()->length() == 0);
2865 
2866  Label exit;
2867  // Get the number of formal parameters.
2868  __ SafeSet(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
2869 
2870  // Check if the calling frame is an arguments adaptor frame.
2874  __ j(not_equal, &exit);
2875 
2876  // Arguments adaptor case: Read the arguments length from the
2877  // adaptor frame.
2879 
2880  __ bind(&exit);
2881  if (FLAG_debug_code) __ AbortIfNotSmi(eax);
2882  context()->Plug(eax);
2883 }
2884 
2885 
2886 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2887  ZoneList<Expression*>* args = expr->arguments();
2888  ASSERT(args->length() == 1);
2889  Label done, null, function, non_function_constructor;
2890 
2891  VisitForAccumulatorValue(args->at(0));
2892 
2893  // If the object is a smi, we return null.
2894  __ JumpIfSmi(eax, &null);
2895 
2896  // Check that the object is a JS object but take special care of JS
2897  // functions to make sure they have 'Function' as their class.
2898  // Assume that there are only two callable types, and one of them is at
2899  // either end of the type range for JS object types. Saves extra comparisons.
2901  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
2902  // Map is now in eax.
2903  __ j(below, &null);
2906  __ j(equal, &function);
2907 
2908  __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
2910  LAST_SPEC_OBJECT_TYPE - 1);
2911  __ j(equal, &function);
2912  // Assume that there is no larger type.
2914 
2915  // Check if the constructor in the map is a JS function.
2917  __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2918  __ j(not_equal, &non_function_constructor);
2919 
2920  // eax now contains the constructor function. Grab the
2921  // instance class name from there.
2924  __ jmp(&done);
2925 
2926  // Functions have class 'Function'.
2927  __ bind(&function);
2928  __ mov(eax, isolate()->factory()->function_class_symbol());
2929  __ jmp(&done);
2930 
2931  // Objects with a non-function constructor have class 'Object'.
2932  __ bind(&non_function_constructor);
2933  __ mov(eax, isolate()->factory()->Object_symbol());
2934  __ jmp(&done);
2935 
2936  // Non-JS objects have class null.
2937  __ bind(&null);
2938  __ mov(eax, isolate()->factory()->null_value());
2939 
2940  // All done.
2941  __ bind(&done);
2942 
2943  context()->Plug(eax);
2944 }
2945 
2946 
2947 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2948  // Conditionally generate a log call.
2949  // Args:
2950  // 0 (literal string): The type of logging (corresponds to the flags).
2951  // This is used to determine whether or not to generate the log call.
2952  // 1 (string): Format string. Access the string at argument index 2
2953  // with '%2s' (see Logger::LogRuntime for all the formats).
2954  // 2 (array): Arguments to the format string.
2955  ZoneList<Expression*>* args = expr->arguments();
2956  ASSERT_EQ(args->length(), 3);
2957  if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2958  VisitForStackValue(args->at(1));
2959  VisitForStackValue(args->at(2));
2960  __ CallRuntime(Runtime::kLog, 2);
2961  }
2962  // Finally, we're expected to leave a value on the top of the stack.
2963  __ mov(eax, isolate()->factory()->undefined_value());
2964  context()->Plug(eax);
2965 }
2966 
2967 
2968 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2969  ASSERT(expr->arguments()->length() == 0);
2970 
2971  Label slow_allocate_heapnumber;
2972  Label heapnumber_allocated;
2973 
2974  __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
2975  __ jmp(&heapnumber_allocated);
2976 
2977  __ bind(&slow_allocate_heapnumber);
2978  // Allocate a heap number.
2979  __ CallRuntime(Runtime::kNumberAlloc, 0);
2980  __ mov(edi, eax);
2981 
2982  __ bind(&heapnumber_allocated);
2983 
2984  __ PrepareCallCFunction(1, ebx);
2985  __ mov(eax, ContextOperand(context_register(), Context::GLOBAL_INDEX));
2987  __ mov(Operand(esp, 0), eax);
2988  __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2989 
2990  // Convert 32 random bits in eax to 0.(32 random bits) in a double
2991  // by computing:
2992  // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2993  // This is implemented on both SSE2 and FPU.
2995  CpuFeatures::Scope fscope(SSE2);
2996  __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
2997  __ movd(xmm1, ebx);
2998  __ movd(xmm0, eax);
2999  __ cvtss2sd(xmm1, xmm1);
3000  __ xorps(xmm0, xmm1);
3001  __ subsd(xmm0, xmm1);
3003  } else {
3004  // 0x4130000000000000 is 1.0 x 2^20 as a double.
3006  Immediate(0x41300000));
3009  __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
3011  __ fsubp(1);
3013  }
3014  __ mov(eax, edi);
3015  context()->Plug(eax);
3016 }
3017 
3018 
3019 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3020  // Load the arguments on the stack and call the stub.
3021  SubStringStub stub;
3022  ZoneList<Expression*>* args = expr->arguments();
3023  ASSERT(args->length() == 3);
3024  VisitForStackValue(args->at(0));
3025  VisitForStackValue(args->at(1));
3026  VisitForStackValue(args->at(2));
3027  __ CallStub(&stub);
3028  context()->Plug(eax);
3029 }
3030 
3031 
3032 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3033  // Load the arguments on the stack and call the stub.
3034  RegExpExecStub stub;
3035  ZoneList<Expression*>* args = expr->arguments();
3036  ASSERT(args->length() == 4);
3037  VisitForStackValue(args->at(0));
3038  VisitForStackValue(args->at(1));
3039  VisitForStackValue(args->at(2));
3040  VisitForStackValue(args->at(3));
3041  __ CallStub(&stub);
3042  context()->Plug(eax);
3043 }
3044 
3045 
3046 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3047  ZoneList<Expression*>* args = expr->arguments();
3048  ASSERT(args->length() == 1);
3049 
3050  VisitForAccumulatorValue(args->at(0)); // Load the object.
3051 
3052  Label done;
3053  // If the object is a smi return the object.
3054  __ JumpIfSmi(eax, &done, Label::kNear);
3055  // If the object is not a value type, return the object.
3056  __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3057  __ j(not_equal, &done, Label::kNear);
3059 
3060  __ bind(&done);
3061  context()->Plug(eax);
3062 }
3063 
3064 
3065 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3066  ZoneList<Expression*>* args = expr->arguments();
3067  ASSERT(args->length() == 2);
3068  ASSERT_NE(NULL, args->at(1)->AsLiteral());
3069  Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3070 
3071  VisitForAccumulatorValue(args->at(0)); // Load the object.
3072 
3073  Label runtime, done;
3074  Register object = eax;
3075  Register result = eax;
3076  Register scratch = ecx;
3077 
3078 #ifdef DEBUG
3079  __ AbortIfSmi(object);
3080  __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3081  __ Assert(equal, "Trying to get date field from non-date.");
3082 #endif
3083 
3084  if (index->value() == 0) {
3085  __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3086  } else {
3087  if (index->value() < JSDate::kFirstUncachedField) {
3088  ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3089  __ mov(scratch, Operand::StaticVariable(stamp));
3090  __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3091  __ j(not_equal, &runtime, Label::kNear);
3092  __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3093  kPointerSize * index->value()));
3094  __ jmp(&done);
3095  }
3096  __ bind(&runtime);
3097  __ PrepareCallCFunction(2, scratch);
3098  __ mov(Operand(esp, 0), object);
3099  __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3100  __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3101  __ bind(&done);
3102  }
3103  context()->Plug(result);
3104 }
3105 
3106 
3107 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3108  // Load the arguments on the stack and call the runtime function.
3109  ZoneList<Expression*>* args = expr->arguments();
3110  ASSERT(args->length() == 2);
3111  VisitForStackValue(args->at(0));
3112  VisitForStackValue(args->at(1));
3113 
3115  MathPowStub stub(MathPowStub::ON_STACK);
3116  __ CallStub(&stub);
3117  } else {
3118  __ CallRuntime(Runtime::kMath_pow, 2);
3119  }
3120  context()->Plug(eax);
3121 }
3122 
3123 
3124 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3125  ZoneList<Expression*>* args = expr->arguments();
3126  ASSERT(args->length() == 2);
3127 
3128  VisitForStackValue(args->at(0)); // Load the object.
3129  VisitForAccumulatorValue(args->at(1)); // Load the value.
3130  __ pop(ebx); // eax = value. ebx = object.
3131 
3132  Label done;
3133  // If the object is a smi, return the value.
3134  __ JumpIfSmi(ebx, &done, Label::kNear);
3135 
3136  // If the object is not a value type, return the value.
3137  __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3138  __ j(not_equal, &done, Label::kNear);
3139 
3140  // Store the value.
3142 
3143  // Update the write barrier. Save the value as it will be
3144  // overwritten by the write barrier code and is needed afterward.
3145  __ mov(edx, eax);
3146  __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3147 
3148  __ bind(&done);
3149  context()->Plug(eax);
3150 }
3151 
3152 
3153 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3154  ZoneList<Expression*>* args = expr->arguments();
3155  ASSERT_EQ(args->length(), 1);
3156 
3157  // Load the argument on the stack and call the stub.
3158  VisitForStackValue(args->at(0));
3159 
3160  NumberToStringStub stub;
3161  __ CallStub(&stub);
3162  context()->Plug(eax);
3163 }
3164 
3165 
3166 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3167  ZoneList<Expression*>* args = expr->arguments();
3168  ASSERT(args->length() == 1);
3169 
3170  VisitForAccumulatorValue(args->at(0));
3171 
3172  Label done;
3173  StringCharFromCodeGenerator generator(eax, ebx);
3174  generator.GenerateFast(masm_);
3175  __ jmp(&done);
3176 
3177  NopRuntimeCallHelper call_helper;
3178  generator.GenerateSlow(masm_, call_helper);
3179 
3180  __ bind(&done);
3181  context()->Plug(ebx);
3182 }
3183 
3184 
3185 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3186  ZoneList<Expression*>* args = expr->arguments();
3187  ASSERT(args->length() == 2);
3188 
3189  VisitForStackValue(args->at(0));
3190  VisitForAccumulatorValue(args->at(1));
3191 
3192  Register object = ebx;
3193  Register index = eax;
3194  Register result = edx;
3195 
3196  __ pop(object);
3197 
3198  Label need_conversion;
3199  Label index_out_of_range;
3200  Label done;
3201  StringCharCodeAtGenerator generator(object,
3202  index,
3203  result,
3204  &need_conversion,
3205  &need_conversion,
3206  &index_out_of_range,
3208  generator.GenerateFast(masm_);
3209  __ jmp(&done);
3210 
3211  __ bind(&index_out_of_range);
3212  // When the index is out of range, the spec requires us to return
3213  // NaN.
3214  __ Set(result, Immediate(isolate()->factory()->nan_value()));
3215  __ jmp(&done);
3216 
3217  __ bind(&need_conversion);
3218  // Move the undefined value into the result register, which will
3219  // trigger conversion.
3220  __ Set(result, Immediate(isolate()->factory()->undefined_value()));
3221  __ jmp(&done);
3222 
3223  NopRuntimeCallHelper call_helper;
3224  generator.GenerateSlow(masm_, call_helper);
3225 
3226  __ bind(&done);
3227  context()->Plug(result);
3228 }
3229 
3230 
3231 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3232  ZoneList<Expression*>* args = expr->arguments();
3233  ASSERT(args->length() == 2);
3234 
3235  VisitForStackValue(args->at(0));
3236  VisitForAccumulatorValue(args->at(1));
3237 
3238  Register object = ebx;
3239  Register index = eax;
3240  Register scratch = edx;
3241  Register result = eax;
3242 
3243  __ pop(object);
3244 
3245  Label need_conversion;
3246  Label index_out_of_range;
3247  Label done;
3248  StringCharAtGenerator generator(object,
3249  index,
3250  scratch,
3251  result,
3252  &need_conversion,
3253  &need_conversion,
3254  &index_out_of_range,
3256  generator.GenerateFast(masm_);
3257  __ jmp(&done);
3258 
3259  __ bind(&index_out_of_range);
3260  // When the index is out of range, the spec requires us to return
3261  // the empty string.
3262  __ Set(result, Immediate(isolate()->factory()->empty_string()));
3263  __ jmp(&done);
3264 
3265  __ bind(&need_conversion);
3266  // Move smi zero into the result register, which will trigger
3267  // conversion.
3268  __ Set(result, Immediate(Smi::FromInt(0)));
3269  __ jmp(&done);
3270 
3271  NopRuntimeCallHelper call_helper;
3272  generator.GenerateSlow(masm_, call_helper);
3273 
3274  __ bind(&done);
3275  context()->Plug(result);
3276 }
3277 
3278 
3279 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3280  ZoneList<Expression*>* args = expr->arguments();
3281  ASSERT_EQ(2, args->length());
3282 
3283  VisitForStackValue(args->at(0));
3284  VisitForStackValue(args->at(1));
3285 
3286  StringAddStub stub(NO_STRING_ADD_FLAGS);
3287  __ CallStub(&stub);
3288  context()->Plug(eax);
3289 }
3290 
3291 
3292 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3293  ZoneList<Expression*>* args = expr->arguments();
3294  ASSERT_EQ(2, args->length());
3295 
3296  VisitForStackValue(args->at(0));
3297  VisitForStackValue(args->at(1));
3298 
3299  StringCompareStub stub;
3300  __ CallStub(&stub);
3301  context()->Plug(eax);
3302 }
3303 
3304 
3305 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3306  // Load the argument on the stack and call the stub.
3307  TranscendentalCacheStub stub(TranscendentalCache::SIN,
3309  ZoneList<Expression*>* args = expr->arguments();
3310  ASSERT(args->length() == 1);
3311  VisitForStackValue(args->at(0));
3312  __ CallStub(&stub);
3313  context()->Plug(eax);
3314 }
3315 
3316 
3317 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3318  // Load the argument on the stack and call the stub.
3319  TranscendentalCacheStub stub(TranscendentalCache::COS,
3321  ZoneList<Expression*>* args = expr->arguments();
3322  ASSERT(args->length() == 1);
3323  VisitForStackValue(args->at(0));
3324  __ CallStub(&stub);
3325  context()->Plug(eax);
3326 }
3327 
3328 
3329 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3330  // Load the argument on the stack and call the stub.
3331  TranscendentalCacheStub stub(TranscendentalCache::TAN,
3333  ZoneList<Expression*>* args = expr->arguments();
3334  ASSERT(args->length() == 1);
3335  VisitForStackValue(args->at(0));
3336  __ CallStub(&stub);
3337  context()->Plug(eax);
3338 }
3339 
3340 
3341 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3342  // Load the argument on the stack and call the stub.
3343  TranscendentalCacheStub stub(TranscendentalCache::LOG,
3345  ZoneList<Expression*>* args = expr->arguments();
3346  ASSERT(args->length() == 1);
3347  VisitForStackValue(args->at(0));
3348  __ CallStub(&stub);
3349  context()->Plug(eax);
3350 }
3351 
3352 
3353 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3354  // Load the argument on the stack and call the runtime function.
3355  ZoneList<Expression*>* args = expr->arguments();
3356  ASSERT(args->length() == 1);
3357  VisitForStackValue(args->at(0));
3358  __ CallRuntime(Runtime::kMath_sqrt, 1);
3359  context()->Plug(eax);
3360 }
3361 
3362 
3363 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3364  ZoneList<Expression*>* args = expr->arguments();
3365  ASSERT(args->length() >= 2);
3366 
3367  int arg_count = args->length() - 2; // 2 ~ receiver and function.
3368  for (int i = 0; i < arg_count + 1; ++i) {
3369  VisitForStackValue(args->at(i));
3370  }
3371  VisitForAccumulatorValue(args->last()); // Function.
3372 
3373  // Check for proxy.
3374  Label proxy, done;
3375  __ CmpObjectType(eax, JS_FUNCTION_PROXY_TYPE, ebx);
3376  __ j(equal, &proxy);
3377 
3378  // InvokeFunction requires the function in edi. Move it in there.
3379  __ mov(edi, result_register());
3380  ParameterCount count(arg_count);
3381  __ InvokeFunction(edi, count, CALL_FUNCTION,
3382  NullCallWrapper(), CALL_AS_METHOD);
3384  __ jmp(&done);
3385 
3386  __ bind(&proxy);
3387  __ push(eax);
3388  __ CallRuntime(Runtime::kCall, args->length());
3389  __ bind(&done);
3390 
3391  context()->Plug(eax);
3392 }
3393 
3394 
3395 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3396  // Load the arguments on the stack and call the stub.
3397  RegExpConstructResultStub stub;
3398  ZoneList<Expression*>* args = expr->arguments();
3399  ASSERT(args->length() == 3);
3400  VisitForStackValue(args->at(0));
3401  VisitForStackValue(args->at(1));
3402  VisitForStackValue(args->at(2));
3403  __ CallStub(&stub);
3404  context()->Plug(eax);
3405 }
3406 
3407 
3408 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3409  ZoneList<Expression*>* args = expr->arguments();
3410  ASSERT_EQ(2, args->length());
3411 
3412  ASSERT_NE(NULL, args->at(0)->AsLiteral());
3413  int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3414 
3415  Handle<FixedArray> jsfunction_result_caches(
3416  isolate()->global_context()->jsfunction_result_caches());
3417  if (jsfunction_result_caches->length() <= cache_id) {
3418  __ Abort("Attempt to use undefined cache.");
3419  __ mov(eax, isolate()->factory()->undefined_value());
3420  context()->Plug(eax);
3421  return;
3422  }
3423 
3424  VisitForAccumulatorValue(args->at(1));
3425 
3426  Register key = eax;
3427  Register cache = ebx;
3428  Register tmp = ecx;
3430  __ mov(cache,
3433  __ mov(cache,
3434  FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3435 
3436  Label done, not_found;
3437  // tmp now holds finger offset as a smi.
3438  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3440  __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp));
3441  __ j(not_equal, &not_found);
3442 
3443  __ mov(eax, CodeGenerator::FixedArrayElementOperand(cache, tmp, 1));
3444  __ jmp(&done);
3445 
3446  __ bind(&not_found);
3447  // Call runtime to perform the lookup.
3448  __ push(cache);
3449  __ push(key);
3450  __ CallRuntime(Runtime::kGetFromCache, 2);
3451 
3452  __ bind(&done);
3453  context()->Plug(eax);
3454 }
3455 
3456 
3457 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3458  ZoneList<Expression*>* args = expr->arguments();
3459  ASSERT_EQ(2, args->length());
3460 
3461  Register right = eax;
3462  Register left = ebx;
3463  Register tmp = ecx;
3464 
3465  VisitForStackValue(args->at(0));
3466  VisitForAccumulatorValue(args->at(1));
3467  __ pop(left);
3468 
3469  Label done, fail, ok;
3470  __ cmp(left, right);
3471  __ j(equal, &ok);
3472  // Fail if either is a non-HeapObject.
3473  __ mov(tmp, left);
3474  __ and_(tmp, right);
3475  __ JumpIfSmi(tmp, &fail);
3476  __ mov(tmp, FieldOperand(left, HeapObject::kMapOffset));
3477  __ CmpInstanceType(tmp, JS_REGEXP_TYPE);
3478  __ j(not_equal, &fail);
3479  __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
3480  __ j(not_equal, &fail);
3481  __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3482  __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3483  __ j(equal, &ok);
3484  __ bind(&fail);
3485  __ mov(eax, Immediate(isolate()->factory()->false_value()));
3486  __ jmp(&done);
3487  __ bind(&ok);
3488  __ mov(eax, Immediate(isolate()->factory()->true_value()));
3489  __ bind(&done);
3490 
3491  context()->Plug(eax);
3492 }
3493 
3494 
3495 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3496  ZoneList<Expression*>* args = expr->arguments();
3497  ASSERT(args->length() == 1);
3498 
3499  VisitForAccumulatorValue(args->at(0));
3500 
3501  if (FLAG_debug_code) {
3502  __ AbortIfNotString(eax);
3503  }
3504 
3505  Label materialize_true, materialize_false;
3506  Label* if_true = NULL;
3507  Label* if_false = NULL;
3508  Label* fall_through = NULL;
3509  context()->PrepareTest(&materialize_true, &materialize_false,
3510  &if_true, &if_false, &fall_through);
3511 
3514  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3515  Split(zero, if_true, if_false, fall_through);
3516 
3517  context()->Plug(if_true, if_false);
3518 }
3519 
3520 
3521 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3522  ZoneList<Expression*>* args = expr->arguments();
3523  ASSERT(args->length() == 1);
3524  VisitForAccumulatorValue(args->at(0));
3525 
3526  if (FLAG_debug_code) {
3527  __ AbortIfNotString(eax);
3528  }
3529 
3531  __ IndexFromHash(eax, eax);
3532 
3533  context()->Plug(eax);
3534 }
3535 
3536 
3537 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3538  Label bailout, done, one_char_separator, long_separator,
3539  non_trivial_array, not_size_one_array, loop,
3540  loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3541 
3542  ZoneList<Expression*>* args = expr->arguments();
3543  ASSERT(args->length() == 2);
3544  // We will leave the separator on the stack until the end of the function.
3545  VisitForStackValue(args->at(1));
3546  // Load this to eax (= array)
3547  VisitForAccumulatorValue(args->at(0));
3548  // All aliases of the same register have disjoint lifetimes.
3549  Register array = eax;
3550  Register elements = no_reg; // Will be eax.
3551 
3552  Register index = edx;
3553 
3554  Register string_length = ecx;
3555 
3556  Register string = esi;
3557 
3558  Register scratch = ebx;
3559 
3560  Register array_length = edi;
3561  Register result_pos = no_reg; // Will be edi.
3562 
3563  // Separator operand is already pushed.
3564  Operand separator_operand = Operand(esp, 2 * kPointerSize);
3565  Operand result_operand = Operand(esp, 1 * kPointerSize);
3566  Operand array_length_operand = Operand(esp, 0);
3567  __ sub(esp, Immediate(2 * kPointerSize));
3568  __ cld();
3569  // Check that the array is a JSArray
3570  __ JumpIfSmi(array, &bailout);
3571  __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3572  __ j(not_equal, &bailout);
3573 
3574  // Check that the array has fast elements.
3575  __ CheckFastElements(scratch, &bailout);
3576 
3577  // If the array has length zero, return the empty string.
3578  __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3579  __ SmiUntag(array_length);
3580  __ j(not_zero, &non_trivial_array);
3581  __ mov(result_operand, isolate()->factory()->empty_string());
3582  __ jmp(&done);
3583 
3584  // Save the array length.
3585  __ bind(&non_trivial_array);
3586  __ mov(array_length_operand, array_length);
3587 
3588  // Save the FixedArray containing array's elements.
3589  // End of array's live range.
3590  elements = array;
3591  __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3592  array = no_reg;
3593 
3594 
3595  // Check that all array elements are sequential ASCII strings, and
3596  // accumulate the sum of their lengths, as a smi-encoded value.
3597  __ Set(index, Immediate(0));
3598  __ Set(string_length, Immediate(0));
3599  // Loop condition: while (index < length).
3600  // Live loop registers: index, array_length, string,
3601  // scratch, string_length, elements.
3602  if (FLAG_debug_code) {
3603  __ cmp(index, array_length);
3604  __ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
3605  }
3606  __ bind(&loop);
3607  __ mov(string, FieldOperand(elements,
3608  index,
3611  __ JumpIfSmi(string, &bailout);
3612  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3613  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3614  __ and_(scratch, Immediate(
3616  __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
3617  __ j(not_equal, &bailout);
3618  __ add(string_length,
3620  __ j(overflow, &bailout);
3621  __ add(index, Immediate(1));
3622  __ cmp(index, array_length);
3623  __ j(less, &loop);
3624 
3625  // If array_length is 1, return elements[0], a string.
3626  __ cmp(array_length, 1);
3627  __ j(not_equal, &not_size_one_array);
3628  __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3629  __ mov(result_operand, scratch);
3630  __ jmp(&done);
3631 
3632  __ bind(&not_size_one_array);
3633 
3634  // End of array_length live range.
3635  result_pos = array_length;
3636  array_length = no_reg;
3637 
3638  // Live registers:
3639  // string_length: Sum of string lengths, as a smi.
3640  // elements: FixedArray of strings.
3641 
3642  // Check that the separator is a flat ASCII string.
3643  __ mov(string, separator_operand);
3644  __ JumpIfSmi(string, &bailout);
3645  __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3646  __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3647  __ and_(scratch, Immediate(
3649  __ cmp(scratch, ASCII_STRING_TYPE);
3650  __ j(not_equal, &bailout);
3651 
3652  // Add (separator length times array_length) - separator length
3653  // to string_length.
3654  __ mov(scratch, separator_operand);
3655  __ mov(scratch, FieldOperand(scratch, SeqAsciiString::kLengthOffset));
3656  __ sub(string_length, scratch); // May be negative, temporarily.
3657  __ imul(scratch, array_length_operand);
3658  __ j(overflow, &bailout);
3659  __ add(string_length, scratch);
3660  __ j(overflow, &bailout);
3661 
3662  __ shr(string_length, 1);
3663  // Live registers and stack values:
3664  // string_length
3665  // elements
3666  __ AllocateAsciiString(result_pos, string_length, scratch,
3667  index, string, &bailout);
3668  __ mov(result_operand, result_pos);
3669  __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3670 
3671 
3672  __ mov(string, separator_operand);
3674  Immediate(Smi::FromInt(1)));
3675  __ j(equal, &one_char_separator);
3676  __ j(greater, &long_separator);
3677 
3678 
3679  // Empty separator case
3680  __ mov(index, Immediate(0));
3681  __ jmp(&loop_1_condition);
3682  // Loop condition: while (index < length).
3683  __ bind(&loop_1);
3684  // Each iteration of the loop concatenates one string to the result.
3685  // Live values in registers:
3686  // index: which element of the elements array we are adding to the result.
3687  // result_pos: the position to which we are currently copying characters.
3688  // elements: the FixedArray of strings we are joining.
3689 
3690  // Get string = array[index].
3691  __ mov(string, FieldOperand(elements, index,
3694  __ mov(string_length,
3696  __ shr(string_length, 1);
3697  __ lea(string,
3699  __ CopyBytes(string, result_pos, string_length, scratch);
3700  __ add(index, Immediate(1));
3701  __ bind(&loop_1_condition);
3702  __ cmp(index, array_length_operand);
3703  __ j(less, &loop_1); // End while (index < length).
3704  __ jmp(&done);
3705 
3706 
3707 
3708  // One-character separator case
3709  __ bind(&one_char_separator);
3710  // Replace separator with its ASCII character value.
3711  __ mov_b(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3712  __ mov_b(separator_operand, scratch);
3713 
3714  __ Set(index, Immediate(0));
3715  // Jump into the loop after the code that copies the separator, so the first
3716  // element is not preceded by a separator
3717  __ jmp(&loop_2_entry);
3718  // Loop condition: while (index < length).
3719  __ bind(&loop_2);
3720  // Each iteration of the loop concatenates one string to the result.
3721  // Live values in registers:
3722  // index: which element of the elements array we are adding to the result.
3723  // result_pos: the position to which we are currently copying characters.
3724 
3725  // Copy the separator character to the result.
3726  __ mov_b(scratch, separator_operand);
3727  __ mov_b(Operand(result_pos, 0), scratch);
3728  __ inc(result_pos);
3729 
3730  __ bind(&loop_2_entry);
3731  // Get string = array[index].
3732  __ mov(string, FieldOperand(elements, index,
3735  __ mov(string_length,
3737  __ shr(string_length, 1);
3738  __ lea(string,
3740  __ CopyBytes(string, result_pos, string_length, scratch);
3741  __ add(index, Immediate(1));
3742 
3743  __ cmp(index, array_length_operand);
3744  __ j(less, &loop_2); // End while (index < length).
3745  __ jmp(&done);
3746 
3747 
3748  // Long separator case (separator is more than one character).
3749  __ bind(&long_separator);
3750 
3751  __ Set(index, Immediate(0));
3752  // Jump into the loop after the code that copies the separator, so the first
3753  // element is not preceded by a separator
3754  __ jmp(&loop_3_entry);
3755  // Loop condition: while (index < length).
3756  __ bind(&loop_3);
3757  // Each iteration of the loop concatenates one string to the result.
3758  // Live values in registers:
3759  // index: which element of the elements array we are adding to the result.
3760  // result_pos: the position to which we are currently copying characters.
3761 
3762  // Copy the separator to the result.
3763  __ mov(string, separator_operand);
3764  __ mov(string_length,
3766  __ shr(string_length, 1);
3767  __ lea(string,
3769  __ CopyBytes(string, result_pos, string_length, scratch);
3770 
3771  __ bind(&loop_3_entry);
3772  // Get string = array[index].
3773  __ mov(string, FieldOperand(elements, index,
3776  __ mov(string_length,
3778  __ shr(string_length, 1);
3779  __ lea(string,
3781  __ CopyBytes(string, result_pos, string_length, scratch);
3782  __ add(index, Immediate(1));
3783 
3784  __ cmp(index, array_length_operand);
3785  __ j(less, &loop_3); // End while (index < length).
3786  __ jmp(&done);
3787 
3788 
3789  __ bind(&bailout);
3790  __ mov(result_operand, isolate()->factory()->undefined_value());
3791  __ bind(&done);
3792  __ mov(eax, result_operand);
3793  // Drop temp values from the stack, and restore context register.
3794  __ add(esp, Immediate(3 * kPointerSize));
3795 
3797  context()->Plug(eax);
3798 }
3799 
3800 
3801 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3802  Handle<String> name = expr->name();
3803  if (name->length() > 0 && name->Get(0) == '_') {
3804  Comment cmnt(masm_, "[ InlineRuntimeCall");
3805  EmitInlineRuntimeCall(expr);
3806  return;
3807  }
3808 
3809  Comment cmnt(masm_, "[ CallRuntime");
3810  ZoneList<Expression*>* args = expr->arguments();
3811 
3812  if (expr->is_jsruntime()) {
3813  // Prepare for calling JS runtime function.
3814  __ mov(eax, GlobalObjectOperand());
3816  }
3817 
3818  // Push the arguments ("left-to-right").
3819  int arg_count = args->length();
3820  for (int i = 0; i < arg_count; i++) {
3821  VisitForStackValue(args->at(i));
3822  }
3823 
3824  if (expr->is_jsruntime()) {
3825  // Call the JS runtime function via a call IC.
3826  __ Set(ecx, Immediate(expr->name()));
3827  RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3828  Handle<Code> ic =
3829  isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3830  CallIC(ic, mode, expr->id());
3831  // Restore context register.
3833  } else {
3834  // Call the C runtime function.
3835  __ CallRuntime(expr->function(), arg_count);
3836  }
3837  context()->Plug(eax);
3838 }
3839 
3840 
3841 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3842  switch (expr->op()) {
3843  case Token::DELETE: {
3844  Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3845  Property* property = expr->expression()->AsProperty();
3846  VariableProxy* proxy = expr->expression()->AsVariableProxy();
3847 
3848  if (property != NULL) {
3849  VisitForStackValue(property->obj());
3850  VisitForStackValue(property->key());
3851  StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3853  __ push(Immediate(Smi::FromInt(strict_mode_flag)));
3854  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3855  context()->Plug(eax);
3856  } else if (proxy != NULL) {
3857  Variable* var = proxy->var();
3858  // Delete of an unqualified identifier is disallowed in strict mode
3859  // but "delete this" is allowed.
3860  ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3861  if (var->IsUnallocated()) {
3862  __ push(GlobalObjectOperand());
3863  __ push(Immediate(var->name()));
3864  __ push(Immediate(Smi::FromInt(kNonStrictMode)));
3865  __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3866  context()->Plug(eax);
3867  } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3868  // Result of deleting non-global variables is false. 'this' is
3869  // not really a variable, though we implement it as one. The
3870  // subexpression does not have side effects.
3871  context()->Plug(var->is_this());
3872  } else {
3873  // Non-global variable. Call the runtime to try to delete from the
3874  // context where the variable was introduced.
3875  __ push(context_register());
3876  __ push(Immediate(var->name()));
3877  __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3878  context()->Plug(eax);
3879  }
3880  } else {
3881  // Result of deleting non-property, non-variable reference is true.
3882  // The subexpression may have side effects.
3883  VisitForEffect(expr->expression());
3884  context()->Plug(true);
3885  }
3886  break;
3887  }
3888 
3889  case Token::VOID: {
3890  Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3891  VisitForEffect(expr->expression());
3892  context()->Plug(isolate()->factory()->undefined_value());
3893  break;
3894  }
3895 
3896  case Token::NOT: {
3897  Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3898  if (context()->IsEffect()) {
3899  // Unary NOT has no side effects so it's only necessary to visit the
3900  // subexpression. Match the optimizing compiler by not branching.
3901  VisitForEffect(expr->expression());
3902  } else if (context()->IsTest()) {
3903  const TestContext* test = TestContext::cast(context());
3904  // The labels are swapped for the recursive call.
3905  VisitForControl(expr->expression(),
3906  test->false_label(),
3907  test->true_label(),
3908  test->fall_through());
3909  context()->Plug(test->true_label(), test->false_label());
3910  } else {
3911  // We handle value contexts explicitly rather than simply visiting
3912  // for control and plugging the control flow into the context,
3913  // because we need to prepare a pair of extra administrative AST ids
3914  // for the optimizing compiler.
3915  ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3916  Label materialize_true, materialize_false, done;
3917  VisitForControl(expr->expression(),
3918  &materialize_false,
3919  &materialize_true,
3920  &materialize_true);
3921  __ bind(&materialize_true);
3922  PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3923  if (context()->IsAccumulatorValue()) {
3924  __ mov(eax, isolate()->factory()->true_value());
3925  } else {
3926  __ Push(isolate()->factory()->true_value());
3927  }
3928  __ jmp(&done, Label::kNear);
3929  __ bind(&materialize_false);
3930  PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3931  if (context()->IsAccumulatorValue()) {
3932  __ mov(eax, isolate()->factory()->false_value());
3933  } else {
3934  __ Push(isolate()->factory()->false_value());
3935  }
3936  __ bind(&done);
3937  }
3938  break;
3939  }
3940 
3941  case Token::TYPEOF: {
3942  Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3943  { StackValueContext context(this);
3944  VisitForTypeofValue(expr->expression());
3945  }
3946  __ CallRuntime(Runtime::kTypeof, 1);
3947  context()->Plug(eax);
3948  break;
3949  }
3950 
3951  case Token::ADD: {
3952  Comment cmt(masm_, "[ UnaryOperation (ADD)");
3953  VisitForAccumulatorValue(expr->expression());
3954  Label no_conversion;
3955  __ JumpIfSmi(result_register(), &no_conversion);
3956  ToNumberStub convert_stub;
3957  __ CallStub(&convert_stub);
3958  __ bind(&no_conversion);
3959  context()->Plug(result_register());
3960  break;
3961  }
3962 
3963  case Token::SUB:
3964  EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3965  break;
3966 
3967  case Token::BIT_NOT:
3968  EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3969  break;
3970 
3971  default:
3972  UNREACHABLE();
3973  }
3974 }
3975 
3976 
3977 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3978  const char* comment) {
3979  Comment cmt(masm_, comment);
3980  bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3981  UnaryOverwriteMode overwrite =
3982  can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3983  UnaryOpStub stub(expr->op(), overwrite);
3984  // UnaryOpStub expects the argument to be in the
3985  // accumulator register eax.
3986  VisitForAccumulatorValue(expr->expression());
3987  SetSourcePosition(expr->position());
3988  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3989  context()->Plug(eax);
3990 }
3991 
3992 
3993 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3994  Comment cmnt(masm_, "[ CountOperation");
3995  SetSourcePosition(expr->position());
3996 
3997  // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
3998  // as the left-hand side.
3999  if (!expr->expression()->IsValidLeftHandSide()) {
4000  VisitForEffect(expr->expression());
4001  return;
4002  }
4003 
4004  // Expression can only be a property, a global or a (parameter or local)
4005  // slot.
4006  enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4007  LhsKind assign_type = VARIABLE;
4008  Property* prop = expr->expression()->AsProperty();
4009  // In case of a property we use the uninitialized expression context
4010  // of the key to detect a named property.
4011  if (prop != NULL) {
4012  assign_type =
4013  (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4014  }
4015 
4016  // Evaluate expression and get value.
4017  if (assign_type == VARIABLE) {
4018  ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4019  AccumulatorValueContext context(this);
4020  EmitVariableLoad(expr->expression()->AsVariableProxy());
4021  } else {
4022  // Reserve space for result of postfix operation.
4023  if (expr->is_postfix() && !context()->IsEffect()) {
4024  __ push(Immediate(Smi::FromInt(0)));
4025  }
4026  if (assign_type == NAMED_PROPERTY) {
4027  // Put the object both on the stack and in edx.
4028  VisitForAccumulatorValue(prop->obj());
4029  __ push(eax);
4030  __ mov(edx, eax);
4031  EmitNamedPropertyLoad(prop);
4032  } else {
4033  VisitForStackValue(prop->obj());
4034  VisitForStackValue(prop->key());
4035  __ mov(edx, Operand(esp, kPointerSize)); // Object.
4036  __ mov(ecx, Operand(esp, 0)); // Key.
4037  EmitKeyedPropertyLoad(prop);
4038  }
4039  }
4040 
4041  // We need a second deoptimization point after loading the value
4042  // in case evaluating the property load my have a side effect.
4043  if (assign_type == VARIABLE) {
4044  PrepareForBailout(expr->expression(), TOS_REG);
4045  } else {
4046  PrepareForBailoutForId(expr->CountId(), TOS_REG);
4047  }
4048 
4049  // Call ToNumber only if operand is not a smi.
4050  Label no_conversion;
4051  if (ShouldInlineSmiCase(expr->op())) {
4052  __ JumpIfSmi(eax, &no_conversion, Label::kNear);
4053  }
4054  ToNumberStub convert_stub;
4055  __ CallStub(&convert_stub);
4056  __ bind(&no_conversion);
4057 
4058  // Save result for postfix expressions.
4059  if (expr->is_postfix()) {
4060  if (!context()->IsEffect()) {
4061  // Save the result on the stack. If we have a named or keyed property
4062  // we store the result under the receiver that is currently on top
4063  // of the stack.
4064  switch (assign_type) {
4065  case VARIABLE:
4066  __ push(eax);
4067  break;
4068  case NAMED_PROPERTY:
4069  __ mov(Operand(esp, kPointerSize), eax);
4070  break;
4071  case KEYED_PROPERTY:
4072  __ mov(Operand(esp, 2 * kPointerSize), eax);
4073  break;
4074  }
4075  }
4076  }
4077 
4078  // Inline smi case if we are in a loop.
4079  Label done, stub_call;
4080  JumpPatchSite patch_site(masm_);
4081 
4082  if (ShouldInlineSmiCase(expr->op())) {
4083  if (expr->op() == Token::INC) {
4084  __ add(eax, Immediate(Smi::FromInt(1)));
4085  } else {
4086  __ sub(eax, Immediate(Smi::FromInt(1)));
4087  }
4088  __ j(overflow, &stub_call, Label::kNear);
4089  // We could eliminate this smi check if we split the code at
4090  // the first smi check before calling ToNumber.
4091  patch_site.EmitJumpIfSmi(eax, &done, Label::kNear);
4092 
4093  __ bind(&stub_call);
4094  // Call stub. Undo operation first.
4095  if (expr->op() == Token::INC) {
4096  __ sub(eax, Immediate(Smi::FromInt(1)));
4097  } else {
4098  __ add(eax, Immediate(Smi::FromInt(1)));
4099  }
4100  }
4101 
4102  // Record position before stub call.
4103  SetSourcePosition(expr->position());
4104 
4105  // Call stub for +1/-1.
4106  __ mov(edx, eax);
4107  __ mov(eax, Immediate(Smi::FromInt(1)));
4108  BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
4109  CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4110  patch_site.EmitPatchInfo();
4111  __ bind(&done);
4112 
4113  // Store the value returned in eax.
4114  switch (assign_type) {
4115  case VARIABLE:
4116  if (expr->is_postfix()) {
4117  // Perform the assignment as if via '='.
4118  { EffectContext context(this);
4119  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4120  Token::ASSIGN);
4121  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4122  context.Plug(eax);
4123  }
4124  // For all contexts except EffectContext We have the result on
4125  // top of the stack.
4126  if (!context()->IsEffect()) {
4127  context()->PlugTOS();
4128  }
4129  } else {
4130  // Perform the assignment as if via '='.
4131  EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4132  Token::ASSIGN);
4133  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4134  context()->Plug(eax);
4135  }
4136  break;
4137  case NAMED_PROPERTY: {
4138  __ mov(ecx, prop->key()->AsLiteral()->handle());
4139  __ pop(edx);
4140  Handle<Code> ic = is_classic_mode()
4141  ? isolate()->builtins()->StoreIC_Initialize()
4142  : isolate()->builtins()->StoreIC_Initialize_Strict();
4143  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4144  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4145  if (expr->is_postfix()) {
4146  if (!context()->IsEffect()) {
4147  context()->PlugTOS();
4148  }
4149  } else {
4150  context()->Plug(eax);
4151  }
4152  break;
4153  }
4154  case KEYED_PROPERTY: {
4155  __ pop(ecx);
4156  __ pop(edx);
4157  Handle<Code> ic = is_classic_mode()
4158  ? isolate()->builtins()->KeyedStoreIC_Initialize()
4159  : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4160  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4161  PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4162  if (expr->is_postfix()) {
4163  // Result is on the stack
4164  if (!context()->IsEffect()) {
4165  context()->PlugTOS();
4166  }
4167  } else {
4168  context()->Plug(eax);
4169  }
4170  break;
4171  }
4172  }
4173 }
4174 
4175 
4176 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4177  VariableProxy* proxy = expr->AsVariableProxy();
4178  ASSERT(!context()->IsEffect());
4179  ASSERT(!context()->IsTest());
4180 
4181  if (proxy != NULL && proxy->var()->IsUnallocated()) {
4182  Comment cmnt(masm_, "Global variable");
4183  __ mov(edx, GlobalObjectOperand());
4184  __ mov(ecx, Immediate(proxy->name()));
4185  Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4186  // Use a regular load, not a contextual load, to avoid a reference
4187  // error.
4188  CallIC(ic);
4189  PrepareForBailout(expr, TOS_REG);
4190  context()->Plug(eax);
4191  } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4192  Label done, slow;
4193 
4194  // Generate code for loading from variables potentially shadowed
4195  // by eval-introduced variables.
4196  EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4197 
4198  __ bind(&slow);
4199  __ push(esi);
4200  __ push(Immediate(proxy->name()));
4201  __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4202  PrepareForBailout(expr, TOS_REG);
4203  __ bind(&done);
4204 
4205  context()->Plug(eax);
4206  } else {
4207  // This expression cannot throw a reference error at the top level.
4208  VisitInDuplicateContext(expr);
4209  }
4210 }
4211 
4212 
4213 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4214  Expression* sub_expr,
4215  Handle<String> check) {
4216  Label materialize_true, materialize_false;
4217  Label* if_true = NULL;
4218  Label* if_false = NULL;
4219  Label* fall_through = NULL;
4220  context()->PrepareTest(&materialize_true, &materialize_false,
4221  &if_true, &if_false, &fall_through);
4222 
4223  { AccumulatorValueContext context(this);
4224  VisitForTypeofValue(sub_expr);
4225  }
4226  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4227 
4228  if (check->Equals(isolate()->heap()->number_symbol())) {
4229  __ JumpIfSmi(eax, if_true);
4231  isolate()->factory()->heap_number_map());
4232  Split(equal, if_true, if_false, fall_through);
4233  } else if (check->Equals(isolate()->heap()->string_symbol())) {
4234  __ JumpIfSmi(eax, if_false);
4235  __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4236  __ j(above_equal, if_false);
4237  // Check for undetectable objects => false.
4239  1 << Map::kIsUndetectable);
4240  Split(zero, if_true, if_false, fall_through);
4241  } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4242  __ cmp(eax, isolate()->factory()->true_value());
4243  __ j(equal, if_true);
4244  __ cmp(eax, isolate()->factory()->false_value());
4245  Split(equal, if_true, if_false, fall_through);
4246  } else if (FLAG_harmony_typeof &&
4247  check->Equals(isolate()->heap()->null_symbol())) {
4248  __ cmp(eax, isolate()->factory()->null_value());
4249  Split(equal, if_true, if_false, fall_through);
4250  } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4251  __ cmp(eax, isolate()->factory()->undefined_value());
4252  __ j(equal, if_true);
4253  __ JumpIfSmi(eax, if_false);
4254  // Check for undetectable objects => true.
4256  __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4257  __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4258  Split(not_zero, if_true, if_false, fall_through);
4259  } else if (check->Equals(isolate()->heap()->function_symbol())) {
4260  __ JumpIfSmi(eax, if_false);
4262  __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4263  __ j(equal, if_true);
4264  __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4265  Split(equal, if_true, if_false, fall_through);
4266  } else if (check->Equals(isolate()->heap()->object_symbol())) {
4267  __ JumpIfSmi(eax, if_false);
4268  if (!FLAG_harmony_typeof) {
4269  __ cmp(eax, isolate()->factory()->null_value());
4270  __ j(equal, if_true);
4271  }
4272  __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4273  __ j(below, if_false);
4274  __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4275  __ j(above, if_false);
4276  // Check for undetectable objects => false.
4278  1 << Map::kIsUndetectable);
4279  Split(zero, if_true, if_false, fall_through);
4280  } else {
4281  if (if_false != fall_through) __ jmp(if_false);
4282  }
4283  context()->Plug(if_true, if_false);
4284 }
4285 
4286 
4287 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4288  Comment cmnt(masm_, "[ CompareOperation");
4289  SetSourcePosition(expr->position());
4290 
4291  // First we try a fast inlined version of the compare when one of
4292  // the operands is a literal.
4293  if (TryLiteralCompare(expr)) return;
4294 
4295  // Always perform the comparison for its control flow. Pack the result
4296  // into the expression's context after the comparison is performed.
4297  Label materialize_true, materialize_false;
4298  Label* if_true = NULL;
4299  Label* if_false = NULL;
4300  Label* fall_through = NULL;
4301  context()->PrepareTest(&materialize_true, &materialize_false,
4302  &if_true, &if_false, &fall_through);
4303 
4304  Token::Value op = expr->op();
4305  VisitForStackValue(expr->left());
4306  switch (op) {
4307  case Token::IN:
4308  VisitForStackValue(expr->right());
4309  __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4310  PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4311  __ cmp(eax, isolate()->factory()->true_value());
4312  Split(equal, if_true, if_false, fall_through);
4313  break;
4314 
4315  case Token::INSTANCEOF: {
4316  VisitForStackValue(expr->right());
4317  InstanceofStub stub(InstanceofStub::kNoFlags);
4318  __ CallStub(&stub);
4319  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4320  __ test(eax, eax);
4321  // The stub returns 0 for true.
4322  Split(zero, if_true, if_false, fall_through);
4323  break;
4324  }
4325 
4326  default: {
4327  VisitForAccumulatorValue(expr->right());
4328  Condition cc = no_condition;
4329  switch (op) {
4330  case Token::EQ_STRICT:
4331  case Token::EQ:
4332  cc = equal;
4333  break;
4334  case Token::LT:
4335  cc = less;
4336  break;
4337  case Token::GT:
4338  cc = greater;
4339  break;
4340  case Token::LTE:
4341  cc = less_equal;
4342  break;
4343  case Token::GTE:
4344  cc = greater_equal;
4345  break;
4346  case Token::IN:
4347  case Token::INSTANCEOF:
4348  default:
4349  UNREACHABLE();
4350  }
4351  __ pop(edx);
4352 
4353  bool inline_smi_code = ShouldInlineSmiCase(op);
4354  JumpPatchSite patch_site(masm_);
4355  if (inline_smi_code) {
4356  Label slow_case;
4357  __ mov(ecx, edx);
4358  __ or_(ecx, eax);
4359  patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4360  __ cmp(edx, eax);
4361  Split(cc, if_true, if_false, NULL);
4362  __ bind(&slow_case);
4363  }
4364 
4365  // Record position and call the compare IC.
4366  SetSourcePosition(expr->position());
4367  Handle<Code> ic = CompareIC::GetUninitialized(op);
4368  CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4369  patch_site.EmitPatchInfo();
4370 
4371  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4372  __ test(eax, eax);
4373  Split(cc, if_true, if_false, fall_through);
4374  }
4375  }
4376 
4377  // Convert the result of the comparison into one expected for this
4378  // expression's context.
4379  context()->Plug(if_true, if_false);
4380 }
4381 
4382 
4383 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4384  Expression* sub_expr,
4385  NilValue nil) {
4386  Label materialize_true, materialize_false;
4387  Label* if_true = NULL;
4388  Label* if_false = NULL;
4389  Label* fall_through = NULL;
4390  context()->PrepareTest(&materialize_true, &materialize_false,
4391  &if_true, &if_false, &fall_through);
4392 
4393  VisitForAccumulatorValue(sub_expr);
4394  PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4395  Handle<Object> nil_value = nil == kNullValue ?
4396  isolate()->factory()->null_value() :
4397  isolate()->factory()->undefined_value();
4398  __ cmp(eax, nil_value);
4399  if (expr->op() == Token::EQ_STRICT) {
4400  Split(equal, if_true, if_false, fall_through);
4401  } else {
4402  Handle<Object> other_nil_value = nil == kNullValue ?
4403  isolate()->factory()->undefined_value() :
4404  isolate()->factory()->null_value();
4405  __ j(equal, if_true);
4406  __ cmp(eax, other_nil_value);
4407  __ j(equal, if_true);
4408  __ JumpIfSmi(eax, if_false);
4409  // It can be an undetectable object.
4412  __ test(edx, Immediate(1 << Map::kIsUndetectable));
4413  Split(not_zero, if_true, if_false, fall_through);
4414  }
4415  context()->Plug(if_true, if_false);
4416 }
4417 
4418 
4419 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4421  context()->Plug(eax);
4422 }
4423 
4424 
4425 Register FullCodeGenerator::result_register() {
4426  return eax;
4427 }
4428 
4429 
4430 Register FullCodeGenerator::context_register() {
4431  return esi;
4432 }
4433 
4434 
4435 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4436  ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4437  __ mov(Operand(ebp, frame_offset), value);
4438 }
4439 
4440 
4441 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4442  __ mov(dst, ContextOperand(esi, context_index));
4443 }
4444 
4445 
4446 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4447  Scope* declaration_scope = scope()->DeclarationScope();
4448  if (declaration_scope->is_global_scope() ||
4449  declaration_scope->is_module_scope()) {
4450  // Contexts nested in the global context have a canonical empty function
4451  // as their closure, not the anonymous closure containing the global
4452  // code. Pass a smi sentinel and let the runtime look up the empty
4453  // function.
4454  __ push(Immediate(Smi::FromInt(0)));
4455  } else if (declaration_scope->is_eval_scope()) {
4456  // Contexts nested inside eval code have the same closure as the context
4457  // calling eval, not the anonymous closure containing the eval code.
4458  // Fetch it from the context.
4460  } else {
4461  ASSERT(declaration_scope->is_function_scope());
4463  }
4464 }
4465 
4466 
4467 // ----------------------------------------------------------------------------
4468 // Non-local control flow support.
4469 
4470 void FullCodeGenerator::EnterFinallyBlock() {
4471  // Cook return address on top of stack (smi encoded Code* delta)
4472  ASSERT(!result_register().is(edx));
4473  __ pop(edx);
4474  __ sub(edx, Immediate(masm_->CodeObject()));
4476  STATIC_ASSERT(kSmiTag == 0);
4477  __ SmiTag(edx);
4478  __ push(edx);
4479 
4480  // Store result register while executing finally block.
4481  __ push(result_register());
4482 
4483  // Store pending message while executing finally block.
4484  ExternalReference pending_message_obj =
4485  ExternalReference::address_of_pending_message_obj(isolate());
4486  __ mov(edx, Operand::StaticVariable(pending_message_obj));
4487  __ push(edx);
4488 
4489  ExternalReference has_pending_message =
4490  ExternalReference::address_of_has_pending_message(isolate());
4491  __ mov(edx, Operand::StaticVariable(has_pending_message));
4492  __ push(edx);
4493 
4494  ExternalReference pending_message_script =
4495  ExternalReference::address_of_pending_message_script(isolate());
4496  __ mov(edx, Operand::StaticVariable(pending_message_script));
4497  __ push(edx);
4498 }
4499 
4500 
4501 void FullCodeGenerator::ExitFinallyBlock() {
4502  ASSERT(!result_register().is(edx));
4503  // Restore pending message from stack.
4504  __ pop(edx);
4505  ExternalReference pending_message_script =
4506  ExternalReference::address_of_pending_message_script(isolate());
4507  __ mov(Operand::StaticVariable(pending_message_script), edx);
4508 
4509  __ pop(edx);
4510  ExternalReference has_pending_message =
4511  ExternalReference::address_of_has_pending_message(isolate());
4512  __ mov(Operand::StaticVariable(has_pending_message), edx);
4513 
4514  __ pop(edx);
4515  ExternalReference pending_message_obj =
4516  ExternalReference::address_of_pending_message_obj(isolate());
4517  __ mov(Operand::StaticVariable(pending_message_obj), edx);
4518 
4519  // Restore result register from stack.
4520  __ pop(result_register());
4521 
4522  // Uncook return address.
4523  __ pop(edx);
4524  __ SmiUntag(edx);
4525  __ add(edx, Immediate(masm_->CodeObject()));
4526  __ jmp(edx);
4527 }
4528 
4529 
4530 #undef __
4531 
4532 #define __ ACCESS_MASM(masm())
4533 
4534 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4535  int* stack_depth,
4536  int* context_length) {
4537  // The macros used here must preserve the result register.
4538 
4539  // Because the handler block contains the context of the finally
4540  // code, we can restore it directly from there for the finally code
4541  // rather than iteratively unwinding contexts via their previous
4542  // links.
4543  __ Drop(*stack_depth); // Down to the handler block.
4544  if (*context_length > 0) {
4545  // Restore the context to its dedicated register and the stack.
4548  }
4549  __ PopTryHandler();
4550  __ call(finally_entry_);
4551 
4552  *stack_depth = 0;
4553  *context_length = 0;
4554  return previous_;
4555 }
4556 
4557 
4558 #undef __
4559 
4560 } } // namespace v8::internal
4561 
4562 #endif // V8_TARGET_ARCH_IA32
static const int kBitFieldOffset
Definition: objects.h:4994
Scope * DeclarationScope()
Definition: scopes.cc:699
const intptr_t kSmiTagMask
Definition: v8.h:3855
VariableDeclaration * function() const
Definition: scopes.h:323
static int SlotOffset(int index)
Definition: contexts.h:408
static const int kBuiltinsOffset
Definition: objects.h:6083
static String * cast(Object *obj)
static const int kDeclarationsId
Definition: ast.h:202
static Smi * FromInt(int value)
Definition: objects-inl.h:973
bool IsFastObjectElementsKind(ElementsKind kind)
value format" "after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false, "print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false, "print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false, "report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true, "flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true, "use incremental marking") DEFINE_bool(incremental_marking_steps, true, "do incremental marking steps") DEFINE_bool(trace_incremental_marking, false, "trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true, "Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false, "Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true, "use inline caching") DEFINE_bool(native_code_counters, false, "generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false, "Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true, "Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false, "Never perform compaction on full GC-testing only") DEFINE_bool(compact_code_space, true, "Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and" "flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator" "(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true, "allows verbose printing") DEFINE_bool(allow_natives_syntax, false, "allow natives syntax") DEFINE_bool(trace_sim, false, "Trace simulator execution") DEFINE_bool(check_icache, false, "Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8, "Stack alingment in bytes in simulator(4 or 8, 8 is default)") DEFINE_bool(trace_exception, false, "print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false, "preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true, "randomize hashes to avoid predictable hash collisions" "(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0, "Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false, "activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") DEFINE_bool(testing_bool_flag, true, "testing_bool_flag") DEFINE_int(testing_int_flag, 13, "testing_int_flag") DEFINE_float(testing_float_flag, 2.5, "float-flag") DEFINE_string(testing_string_flag, "Hello, world!", "string-flag") DEFINE_int(testing_prng_seed, 42, "Seed used for threading test randomness") DEFINE_string(testing_serialization_file, "/tmp/serdes", "file in which to serialize heap") DEFINE_bool(help, false, "Print usage message, including flags, on console") DEFINE_bool(dump_counters, false, "Dump counters on exit") DEFINE_string(map_counters, "", "Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT, "Pass all remaining arguments to the script.Alias for\"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#43"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2#define FLAG_MODE_DEFINE_DEFAULTS#1"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flag-definitions.h"1#define FLAG_FULL(ftype, ctype, nam, def, cmt)#define FLAG_READONLY(ftype, ctype, nam, def, cmt)#define DEFINE_implication(whenflag, thenflag)#define DEFINE_bool(nam, def, cmt)#define DEFINE_int(nam, def, cmt)#define DEFINE_float(nam, def, cmt)#define DEFINE_string(nam, def, cmt)#define DEFINE_args(nam, def, cmt)#define FLAG DEFINE_bool(use_strict, false,"enforce strict mode") DEFINE_bool(es5_readonly, false,"activate correct semantics for inheriting readonliness") DEFINE_bool(es52_globals, false,"activate new semantics for global var declarations") DEFINE_bool(harmony_typeof, false,"enable harmony semantics for typeof") DEFINE_bool(harmony_scoping, false,"enable harmony block scoping") DEFINE_bool(harmony_modules, false,"enable harmony modules (implies block scoping)") DEFINE_bool(harmony_proxies, false,"enable harmony proxies") DEFINE_bool(harmony_collections, false,"enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false,"enable all harmony features (except typeof)") DEFINE_implication(harmony, harmony_scoping) DEFINE_implication(harmony, harmony_modules) DEFINE_implication(harmony, harmony_proxies) DEFINE_implication(harmony, harmony_collections) DEFINE_implication(harmony_modules, harmony_scoping) DEFINE_bool(packed_arrays, false,"optimizes arrays that have no holes") DEFINE_bool(smi_only_arrays, true,"tracks arrays with only smi values") DEFINE_bool(clever_optimizations, true,"Optimize object size, Array shift, DOM strings and string +") DEFINE_bool(unbox_double_arrays, true,"automatically unbox arrays of doubles") DEFINE_bool(string_slices, true,"use string slices") DEFINE_bool(crankshaft, true,"use crankshaft") DEFINE_string(hydrogen_filter,"","optimization filter") DEFINE_bool(use_range, true,"use hydrogen range analysis") DEFINE_bool(eliminate_dead_phis, true,"eliminate dead phis") DEFINE_bool(use_gvn, true,"use hydrogen global value numbering") DEFINE_bool(use_canonicalizing, true,"use hydrogen instruction canonicalizing") DEFINE_bool(use_inlining, true,"use function inlining") DEFINE_int(max_inlined_source_size, 600,"maximum source size in bytes considered for a single inlining") DEFINE_int(max_inlined_nodes, 196,"maximum number of AST nodes considered for a single inlining") DEFINE_int(max_inlined_nodes_cumulative, 196,"maximum cumulative number of AST nodes considered for inlining") DEFINE_bool(loop_invariant_code_motion, true,"loop invariant code motion") DEFINE_bool(collect_megamorphic_maps_from_stub_cache, true,"crankshaft harvests type feedback from stub cache") DEFINE_bool(hydrogen_stats, false,"print statistics for hydrogen") DEFINE_bool(trace_hydrogen, false,"trace generated hydrogen to file") DEFINE_string(trace_phase,"Z","trace generated IR for specified phases") DEFINE_bool(trace_inlining, false,"trace inlining decisions") DEFINE_bool(trace_alloc, false,"trace register allocator") DEFINE_bool(trace_all_uses, false,"trace all use positions") DEFINE_bool(trace_range, false,"trace range analysis") DEFINE_bool(trace_gvn, false,"trace global value numbering") DEFINE_bool(trace_representation, false,"trace representation types") DEFINE_bool(stress_pointer_maps, false,"pointer map for every instruction") DEFINE_bool(stress_environments, false,"environment for every instruction") DEFINE_int(deopt_every_n_times, 0,"deoptimize every n times a deopt point is passed") DEFINE_bool(trap_on_deopt, false,"put a break point before deoptimizing") DEFINE_bool(deoptimize_uncommon_cases, true,"deoptimize uncommon cases") DEFINE_bool(polymorphic_inlining, true,"polymorphic inlining") DEFINE_bool(use_osr, true,"use on-stack replacement") DEFINE_bool(array_bounds_checks_elimination, false,"perform array bounds checks elimination") DEFINE_bool(array_index_dehoisting, false,"perform array index dehoisting") DEFINE_bool(trace_osr, false,"trace on-stack replacement") DEFINE_int(stress_runs, 0,"number of stress runs") DEFINE_bool(optimize_closures, true,"optimize closures") DEFINE_bool(inline_construct, true,"inline constructor calls") DEFINE_bool(inline_arguments, true,"inline functions with arguments object") DEFINE_int(loop_weight, 1,"loop weight for representation inference") DEFINE_bool(optimize_for_in, true,"optimize functions containing for-in loops") DEFINE_bool(experimental_profiler, true,"enable all profiler experiments") DEFINE_bool(watch_ic_patching, false,"profiler considers IC stability") DEFINE_int(frame_count, 1,"number of stack frames inspected by the profiler") DEFINE_bool(self_optimization, false,"primitive functions trigger their own optimization") DEFINE_bool(direct_self_opt, false,"call recompile stub directly when self-optimizing") DEFINE_bool(retry_self_opt, false,"re-try self-optimization if it failed") DEFINE_bool(count_based_interrupts, false,"trigger profiler ticks based on counting instead of timing") DEFINE_bool(interrupt_at_exit, false,"insert an interrupt check at function exit") DEFINE_bool(weighted_back_edges, false,"weight back edges by jump distance for interrupt triggering") DEFINE_int(interrupt_budget, 5900,"execution budget before interrupt is triggered") DEFINE_int(type_info_threshold, 15,"percentage of ICs that must have type info to allow optimization") DEFINE_int(self_opt_count, 130,"call count before self-optimization") DEFINE_implication(experimental_profiler, watch_ic_patching) DEFINE_implication(experimental_profiler, self_optimization) DEFINE_implication(experimental_profiler, retry_self_opt) DEFINE_implication(experimental_profiler, count_based_interrupts) DEFINE_implication(experimental_profiler, interrupt_at_exit) DEFINE_implication(experimental_profiler, weighted_back_edges) DEFINE_bool(trace_opt_verbose, false,"extra verbose compilation tracing") DEFINE_implication(trace_opt_verbose, trace_opt) DEFINE_bool(debug_code, false,"generate extra code (assertions) for debugging") DEFINE_bool(code_comments, false,"emit comments in code disassembly") DEFINE_bool(enable_sse2, true,"enable use of SSE2 instructions if available") DEFINE_bool(enable_sse3, true,"enable use of SSE3 instructions if available") DEFINE_bool(enable_sse4_1, true,"enable use of SSE4.1 instructions if available") DEFINE_bool(enable_cmov, true,"enable use of CMOV instruction if available") DEFINE_bool(enable_rdtsc, true,"enable use of RDTSC instruction if available") DEFINE_bool(enable_sahf, true,"enable use of SAHF instruction if available (X64 only)") DEFINE_bool(enable_vfp3, true,"enable use of VFP3 instructions if available - this implies ""enabling ARMv7 instructions (ARM only)") DEFINE_bool(enable_armv7, true,"enable use of ARMv7 instructions if available (ARM only)") DEFINE_bool(enable_fpu, true,"enable use of MIPS FPU instructions if available (MIPS only)") DEFINE_string(expose_natives_as, NULL,"expose natives in global object") DEFINE_string(expose_debug_as, NULL,"expose debug in global object") DEFINE_bool(expose_gc, false,"expose gc extension") DEFINE_bool(expose_externalize_string, false,"expose externalize string extension") DEFINE_int(stack_trace_limit, 10,"number of stack frames to capture") DEFINE_bool(builtins_in_stack_traces, false,"show built-in functions in stack traces") DEFINE_bool(disable_native_files, false,"disable builtin natives files") DEFINE_bool(inline_new, true,"use fast inline allocation") DEFINE_bool(stack_trace_on_abort, true,"print a stack trace if an assertion failure occurs") DEFINE_bool(trace, false,"trace function calls") DEFINE_bool(mask_constants_with_cookie, true,"use random jit cookie to mask large constants") DEFINE_bool(lazy, true,"use lazy compilation") DEFINE_bool(trace_opt, false,"trace lazy optimization") DEFINE_bool(trace_opt_stats, false,"trace lazy optimization statistics") DEFINE_bool(opt, true,"use adaptive optimizations") DEFINE_bool(always_opt, false,"always try to optimize functions") DEFINE_bool(prepare_always_opt, false,"prepare for turning on always opt") DEFINE_bool(trace_deopt, false,"trace deoptimization") DEFINE_int(min_preparse_length, 1024,"minimum length for automatic enable preparsing") DEFINE_bool(always_full_compiler, false,"try to use the dedicated run-once backend for all code") DEFINE_bool(trace_bailout, false,"print reasons for falling back to using the classic V8 backend") DEFINE_bool(compilation_cache, true,"enable compilation cache") DEFINE_bool(cache_prototype_transitions, true,"cache prototype transitions") DEFINE_bool(trace_debug_json, false,"trace debugging JSON request/response") DEFINE_bool(debugger_auto_break, true,"automatically set the debug break flag when debugger commands are ""in the queue") DEFINE_bool(enable_liveedit, true,"enable liveedit experimental feature") DEFINE_bool(break_on_abort, true,"always cause a debug break before aborting") DEFINE_int(stack_size, kPointerSize *123,"default size of stack region v8 is allowed to use (in kBytes)") DEFINE_int(max_stack_trace_source_length, 300,"maximum length of function source code printed in a stack trace.") DEFINE_bool(always_inline_smi_code, false,"always inline smi code in non-opt code") DEFINE_int(max_new_space_size, 0,"max size of the new generation (in kBytes)") DEFINE_int(max_old_space_size, 0,"max size of the old generation (in Mbytes)") DEFINE_int(max_executable_size, 0,"max size of executable memory (in Mbytes)") DEFINE_bool(gc_global, false,"always perform global GCs") DEFINE_int(gc_interval,-1,"garbage collect after <n> allocations") DEFINE_bool(trace_gc, false,"print one trace line following each garbage collection") DEFINE_bool(trace_gc_nvp, false,"print one detailed trace line in name=value format ""after each garbage collection") DEFINE_bool(print_cumulative_gc_stat, false,"print cumulative GC statistics in name=value format on exit") DEFINE_bool(trace_gc_verbose, false,"print more details following each garbage collection") DEFINE_bool(trace_fragmentation, false,"report fragmentation for old pointer and data pages") DEFINE_bool(collect_maps, true,"garbage collect maps from which no objects can be reached") DEFINE_bool(flush_code, true,"flush code that we expect not to use again before full gc") DEFINE_bool(incremental_marking, true,"use incremental marking") DEFINE_bool(incremental_marking_steps, true,"do incremental marking steps") DEFINE_bool(trace_incremental_marking, false,"trace progress of the incremental marking") DEFINE_bool(use_idle_notification, true,"Use idle notification to reduce memory footprint.") DEFINE_bool(send_idle_notification, false,"Send idle notifcation between stress runs.") DEFINE_bool(use_ic, true,"use inline caching") DEFINE_bool(native_code_counters, false,"generate extra code for manipulating stats counters") DEFINE_bool(always_compact, false,"Perform compaction on every full GC") DEFINE_bool(lazy_sweeping, true,"Use lazy sweeping for old pointer and data spaces") DEFINE_bool(never_compact, false,"Never perform compaction on full GC - testing only") DEFINE_bool(compact_code_space, true,"Compact code space on full non-incremental collections") DEFINE_bool(cleanup_code_caches_at_gc, true,"Flush inline caches prior to mark compact collection and ""flush code caches in maps during mark compact cycle.") DEFINE_int(random_seed, 0,"Default seed for initializing random generator ""(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer, true,"allows verbose printing") DEFINE_bool(allow_natives_syntax, false,"allow natives syntax") DEFINE_bool(trace_sim, false,"Trace simulator execution") DEFINE_bool(check_icache, false,"Check icache flushes in ARM and MIPS simulator") DEFINE_int(stop_sim_at, 0,"Simulator stop after x number of instructions") DEFINE_int(sim_stack_alignment, 8,"Stack alingment in bytes in simulator (4 or 8, 8 is default)") DEFINE_bool(trace_exception, false,"print stack trace when throwing exceptions") DEFINE_bool(preallocate_message_memory, false,"preallocate some memory to build stack traces.") DEFINE_bool(randomize_hashes, true,"randomize hashes to avoid predictable hash collisions ""(with snapshots this option cannot override the baked-in seed)") DEFINE_int(hash_seed, 0,"Fixed seed to use to hash property keys (0 means random)""(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption, false,"activate a 100ms timer that switches between V8 threads") DEFINE_bool(regexp_optimization, true,"generate optimized regexp code") DEFINE_bool(testing_bool_flag, true,"testing_bool_flag") DEFINE_int(testing_int_flag, 13,"testing_int_flag") DEFINE_float(testing_float_flag, 2.5,"float-flag") DEFINE_string(testing_string_flag,"Hello, world!","string-flag") DEFINE_int(testing_prng_seed, 42,"Seed used for threading test randomness") DEFINE_string(testing_serialization_file,"/tmp/serdes","file in which to serialize heap") DEFINE_bool(help, false,"Print usage message, including flags, on console") DEFINE_bool(dump_counters, false,"Dump counters on exit") DEFINE_string(map_counters,"","Map counters to a file") DEFINE_args(js_arguments, JSARGUMENTS_INIT,"Pass all remaining arguments to the script. Alias for \"--\".") DEFINE_bool(debug_compile_events, true,"Enable debugger compile events") DEFINE_bool(debug_script_collected_events, true,"Enable debugger script collected events") DEFINE_bool(gdbjit, false,"enable GDBJIT interface (disables compacting GC)") DEFINE_bool(gdbjit_full, false,"enable GDBJIT interface for all code objects") DEFINE_bool(gdbjit_dump, false,"dump elf objects with debug info to disk") DEFINE_string(gdbjit_dump_filter,"","dump only objects containing this substring") DEFINE_bool(force_marking_deque_overflows, false,"force overflows of marking deque by reducing it's size ""to 64 words") DEFINE_bool(stress_compaction, false,"stress the GC compactor to flush out bugs (implies ""--force_marking_deque_overflows)")#define FLAG DEFINE_bool(enable_slow_asserts, false,"enable asserts that are slow to execute") DEFINE_bool(trace_codegen, false,"print name of functions for which code is generated") DEFINE_bool(print_source, false,"pretty print source code") DEFINE_bool(print_builtin_source, false,"pretty print source code for builtins") DEFINE_bool(print_ast, false,"print source AST") DEFINE_bool(print_builtin_ast, false,"print source AST for builtins") DEFINE_string(stop_at,"","function name where to insert a breakpoint") DEFINE_bool(print_builtin_scopes, false,"print scopes for builtins") DEFINE_bool(print_scopes, false,"print scopes") DEFINE_bool(trace_contexts, false,"trace contexts operations") DEFINE_bool(gc_greedy, false,"perform GC prior to some allocations") DEFINE_bool(gc_verbose, false,"print stuff during garbage collection") DEFINE_bool(heap_stats, false,"report heap statistics before and after GC") DEFINE_bool(code_stats, false,"report code statistics after GC") DEFINE_bool(verify_heap, false,"verify heap pointers before and after GC") DEFINE_bool(print_handles, false,"report handles after GC") DEFINE_bool(print_global_handles, false,"report global handles after GC") DEFINE_bool(trace_ic, false,"trace inline cache state transitions") DEFINE_bool(print_interfaces, false,"print interfaces") DEFINE_bool(print_interface_details, false,"print interface inference details") DEFINE_int(print_interface_depth, 5,"depth for printing interfaces") DEFINE_bool(trace_normalization, false,"prints when objects are turned into dictionaries.") DEFINE_bool(trace_lazy, false,"trace lazy compilation") DEFINE_bool(collect_heap_spill_statistics, false,"report heap spill statistics along with heap_stats ""(requires heap_stats)") DEFINE_bool(trace_isolates, false,"trace isolate state changes") DEFINE_bool(log_state_changes, false,"Log state changes.") DEFINE_bool(regexp_possessive_quantifier, false,"enable possessive quantifier syntax for testing") DEFINE_bool(trace_regexp_bytecodes, false,"trace regexp bytecode execution") DEFINE_bool(trace_regexp_assembler, false,"trace regexp macro assembler calls.")#define FLAG DEFINE_bool(log, false,"Minimal logging (no API, code, GC, suspect, or handles samples).") DEFINE_bool(log_all, false,"Log all events to the log file.") DEFINE_bool(log_runtime, false,"Activate runtime system %Log call.") DEFINE_bool(log_api, false,"Log API events to the log file.") DEFINE_bool(log_code, false,"Log code events to the log file without profiling.") DEFINE_bool(log_gc, false,"Log heap samples on garbage collection for the hp2ps tool.") DEFINE_bool(log_handles, false,"Log global handle events.") DEFINE_bool(log_snapshot_positions, false,"log positions of (de)serialized objects in the snapshot.") DEFINE_bool(log_suspect, false,"Log suspect operations.") DEFINE_bool(prof, false,"Log statistical profiling information (implies --log-code).") DEFINE_bool(prof_auto, true,"Used with --prof, starts profiling automatically") DEFINE_bool(prof_lazy, false,"Used with --prof, only does sampling and logging"" when profiler is active (implies --noprof_auto).") DEFINE_bool(prof_browser_mode, true,"Used with --prof, turns on browser-compatible mode for profiling.") DEFINE_bool(log_regexp, false,"Log regular expression execution.") DEFINE_bool(sliding_state_window, false,"Update sliding state window counters.") DEFINE_string(logfile,"v8.log","Specify the name of the log file.") DEFINE_bool(ll_prof, false,"Enable low-level linux profiler.")#define FLAG DEFINE_bool(trace_elements_transitions, false,"trace elements transitions") DEFINE_bool(print_code_stubs, false,"print code stubs") DEFINE_bool(test_secondary_stub_cache, false,"test secondary stub cache by disabling the primary one") DEFINE_bool(test_primary_stub_cache, false,"test primary stub cache by disabling the secondary one") DEFINE_bool(print_code, false,"print generated code") DEFINE_bool(print_opt_code, false,"print optimized code") DEFINE_bool(print_unopt_code, false,"print unoptimized code before ""printing optimized code based on it") DEFINE_bool(print_code_verbose, false,"print more information for code") DEFINE_bool(print_builtin_code, false,"print generated code for builtins")#47"/Users/thlorenz/dev/dx/v8-perf/build/v8/src/flags.cc"2 namespace{struct Flag{enum FlagType{TYPE_BOOL, TYPE_INT, TYPE_FLOAT, TYPE_STRING, TYPE_ARGS} name
Definition: flags.cc:1349
const Register esp
static const int kDataOffset
Definition: objects.h:6432
static const int kGlobalReceiverOffset
Definition: objects.h:6085
int SizeOfCodeGeneratedSince(Label *label)
T Max(T a, T b)
Definition: utils.h:222
Scope * outer_scope() const
Definition: scopes.h:347
Flag flags[]
Definition: flags.cc:1467
static Handle< Object > UninitializedSentinel(Isolate *isolate)
Definition: objects-inl.h:5052
static bool IsSupported(CpuFeature f)
static bool enabled()
Definition: serialize.h:480
bool is_int8(int x)
Definition: assembler.h:830
static const int kSize
Definition: objects.h:6433
#define ASSERT(condition)
Definition: checks.h:270
static const int kInObjectFieldCount
Definition: objects.h:6487
const char * comment() const
Definition: flags.cc:1362
static bool IsCompileTimeValue(Expression *expression)
Definition: parser.cc:3902
#define POINTER_SIZE_ALIGN(value)
Definition: v8globals.h:401
const uint32_t kStringRepresentationMask
Definition: objects.h:455
static const int kMaximumSlots
Definition: code-stubs.h:343
MemOperand GlobalObjectOperand()
static const int kInstanceClassNameOffset
Definition: objects.h:5609
static const int kGlobalContextOffset
Definition: objects.h:6084
Variable * parameter(int index) const
Definition: scopes.h:330
PropertyAttributes
MemOperand ContextOperand(Register context, int index)
static const int kFunctionEntryId
Definition: ast.h:198
static Smi * cast(Object *object)
const Register edi
int ContextChainLength(Scope *scope)
Definition: scopes.cc:689
static const int kHashFieldOffset
Definition: objects.h:7099
#define IN
static const int kLiteralsOffset
Definition: objects.h:5987
const Register ebp
#define UNREACHABLE()
Definition: checks.h:50
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static const int kLengthOffset
Definition: objects.h:7098
const Register eax
static const int kValueOffset
Definition: objects.h:1307
Variable * arguments() const
Definition: scopes.h:338
static const int kForInSlowCaseMarker
Definition: objects.h:4149
NilValue
Definition: v8.h:141
const XMMRegister xmm1
const int kPointerSize
Definition: globals.h:234
static const int kJSReturnSequenceLength
static const int kForInFastCaseMarker
Definition: objects.h:4148
static const int kStringWrapperSafeForDefaultValueOf
Definition: objects.h:5011
Operand FieldOperand(Register object, int offset)
const Register ecx
#define __
static const int kCacheStampOffset
Definition: objects.h:6280
static TestContext * cast(AstContext *context)
Definition: hydrogen.h:690
static const int kPropertiesOffset
Definition: objects.h:2113
static const int kHeaderSize
Definition: objects.h:7282
static const int kElementsOffset
Definition: objects.h:2114
static const int kContainsCachedArrayIndexMask
Definition: objects.h:7154
const uint32_t kStringTag
Definition: objects.h:437
#define BASE_EMBEDDED
Definition: allocation.h:68
Vector< const char > CStrVector(const char *data)
Definition: utils.h:525
static int OffsetOfElementAt(int index)
Definition: objects.h:2291
static const int kLengthOffset
Definition: objects.h:8111
static const int kMaxLoopNestingMarker
Definition: objects.h:4491
bool IsFastSmiOrObjectElementsKind(ElementsKind kind)
static const int kHeaderSize
Definition: objects.h:2233
static const int kEnumerationIndexOffset
Definition: objects.h:2622
static const int kMapOffset
Definition: objects.h:1219
static const int kValueOffset
Definition: objects.h:6272
static const int kEnumCacheBridgeCacheOffset
Definition: objects.h:2627
const uint32_t kIsNotStringMask
Definition: objects.h:436
void VisitIllegalRedeclaration(AstVisitor *visitor)
Definition: scopes.cc:536
static const int kLengthOffset
Definition: objects.h:2232
const Register ebx
const int kSmiShiftSize
Definition: v8.h:3899
const int kSmiTagSize
Definition: v8.h:3854
kPropertyAccessorsOffset kNamedPropertyHandlerOffset kInstanceTemplateOffset kAccessCheckInfoOffset kEvalFrominstructionsOffsetOffset kThisPropertyAssignmentsOffset flag
Definition: objects-inl.h:3682
Condition NegateCondition(Condition cond)
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination trace on stack replacement optimize closures functions with arguments object optimize functions containing for in loops profiler considers IC stability primitive functions trigger their own optimization re try self optimization if it failed insert an interrupt check at function exit execution budget before interrupt is triggered call count before self optimization self_optimization count_based_interrupts weighted_back_edges trace_opt emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 enable use of ARMv7 instructions if enable use of MIPS FPU instructions if NULL
Definition: flags.cc:274
const Register esi
static Handle< SharedFunctionInfo > BuildFunctionInfo(FunctionLiteral *node, Handle< Script > script)
Definition: compiler.cc:708
static const int kConstructorOffset
Definition: objects.h:4954
const int kSmiTag
Definition: v8.h:3853
#define ASSERT_NE(v1, v2)
Definition: checks.h:272
static Operand FixedArrayElementOperand(Register array, Register index_as_smi, int additional_offset=0)
Definition: codegen-ia32.h:63
static const int kIsUndetectable
Definition: objects.h:5005
static bool ShouldGenerateLog(Expression *type)
Definition: codegen.cc:153
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:38
#define FACTORY
Definition: isolate.h:1409
static const int kPrototypeOffset
Definition: objects.h:4953
const Register no_reg
static const int kValueOffset
Definition: objects.h:6188
const uint32_t kAsciiStringTag
Definition: objects.h:451
const Register edx
T Min(T a, T b)
Definition: utils.h:229
static const int kSharedFunctionInfoOffset
Definition: objects.h:5984
static FixedArrayBase * cast(Object *object)
Definition: objects-inl.h:1669
static const int kMaxValue
Definition: objects.h:1006
static const int kBitField2Offset
Definition: objects.h:4995
#define VOID
static Handle< Code > GetUninitialized(Token::Value op)
Definition: ic.cc:2544
void check(i::Vector< const char > string)
static const int kExponentOffset
Definition: objects.h:1313
FlagType type() const
Definition: flags.cc:1358
static const int kFirstIndex
Definition: objects.h:2611
const uint32_t kStringEncodingMask
Definition: objects.h:449
static const int kInstanceTypeOffset
Definition: objects.h:4992
static const int kMantissaOffset
Definition: objects.h:1312
TypeofState
Definition: codegen.h:70
const XMMRegister xmm0