v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
builtins-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_IA32
31 
32 #include "codegen.h"
33 #include "deoptimizer.h"
34 #include "full-codegen.h"
35 #include "stub-cache.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 
41 #define __ ACCESS_MASM(masm)
42 
43 
44 void Builtins::Generate_Adaptor(MacroAssembler* masm,
45  CFunctionId id,
46  BuiltinExtraArguments extra_args) {
47  // ----------- S t a t e -------------
48  // -- eax : number of arguments excluding receiver
49  // -- edi : called function (only guaranteed when
50  // extra_args requires it)
51  // -- esi : context
52  // -- esp[0] : return address
53  // -- esp[4] : last argument
54  // -- ...
55  // -- esp[4 * argc] : first argument (argc == eax)
56  // -- esp[4 * (argc +1)] : receiver
57  // -----------------------------------
58 
59  // Insert extra arguments.
60  int num_extra_args = 0;
61  if (extra_args == NEEDS_CALLED_FUNCTION) {
62  num_extra_args = 1;
63  Register scratch = ebx;
64  __ pop(scratch); // Save return address.
65  __ push(edi);
66  __ push(scratch); // Restore return address.
67  } else {
68  ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
69  }
70 
71  // JumpToExternalReference expects eax to contain the number of arguments
72  // including the receiver and the extra arguments.
73  __ add(eax, Immediate(num_extra_args + 1));
74  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
75 }
76 
77 
78 static void CallRuntimePassFunction(
79  MacroAssembler* masm, Runtime::FunctionId function_id) {
80  FrameScope scope(masm, StackFrame::INTERNAL);
81  // Push a copy of the function.
82  __ push(edi);
83  // Function is also the parameter to the runtime call.
84  __ push(edi);
85 
86  __ CallRuntime(function_id, 1);
87  // Restore receiver.
88  __ pop(edi);
89 }
90 
91 
92 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
96  __ jmp(eax);
97 }
98 
99 
100 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
102  __ jmp(eax);
103 }
104 
105 
106 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
107  // Checking whether the queued function is ready for install is optional,
108  // since we come across interrupts and stack checks elsewhere. However,
109  // not checking may delay installing ready functions, and always checking
110  // would be quite expensive. A good compromise is to first check against
111  // stack limit as a cue for an interrupt signal.
112  Label ok;
113  ExternalReference stack_limit =
114  ExternalReference::address_of_stack_limit(masm->isolate());
115  __ cmp(esp, Operand::StaticVariable(stack_limit));
116  __ j(above_equal, &ok, Label::kNear);
117 
118  CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
119  GenerateTailCallToReturnedCode(masm);
120 
121  __ bind(&ok);
122  GenerateTailCallToSharedCode(masm);
123 }
124 
125 
126 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
127  bool is_api_function,
128  bool count_constructions,
129  bool create_memento) {
130  // ----------- S t a t e -------------
131  // -- eax: number of arguments
132  // -- edi: constructor function
133  // -- ebx: allocation site or undefined
134  // -----------------------------------
135 
136  // Should never count constructions for api objects.
137  ASSERT(!is_api_function || !count_constructions);
138 
139  // Should never create mementos for api functions.
140  ASSERT(!is_api_function || !create_memento);
141 
142  // Should never create mementos before slack tracking is finished.
143  ASSERT(!count_constructions || !create_memento);
144 
145  // Enter a construct frame.
146  {
147  FrameScope scope(masm, StackFrame::CONSTRUCT);
148 
149  if (create_memento) {
150  __ AssertUndefinedOrAllocationSite(ebx);
151  __ push(ebx);
152  }
153 
154  // Store a smi-tagged arguments count on the stack.
155  __ SmiTag(eax);
156  __ push(eax);
157 
158  // Push the function to invoke on the stack.
159  __ push(edi);
160 
161  // Try to allocate the object without transitioning into C code. If any of
162  // the preconditions is not met, the code bails out to the runtime call.
163  Label rt_call, allocated;
164  if (FLAG_inline_new) {
165  Label undo_allocation;
166 #ifdef ENABLE_DEBUGGER_SUPPORT
167  ExternalReference debug_step_in_fp =
168  ExternalReference::debug_step_in_fp_address(masm->isolate());
169  __ cmp(Operand::StaticVariable(debug_step_in_fp), Immediate(0));
170  __ j(not_equal, &rt_call);
171 #endif
172 
173  // Verified that the constructor is a JSFunction.
174  // Load the initial map and verify that it is in fact a map.
175  // edi: constructor
177  // Will both indicate a NULL and a Smi
178  __ JumpIfSmi(eax, &rt_call);
179  // edi: constructor
180  // eax: initial map (if proven valid below)
181  __ CmpObjectType(eax, MAP_TYPE, ebx);
182  __ j(not_equal, &rt_call);
183 
184  // Check that the constructor is not constructing a JSFunction (see
185  // comments in Runtime_NewObject in runtime.cc). In which case the
186  // initial map's instance type would be JS_FUNCTION_TYPE.
187  // edi: constructor
188  // eax: initial map
189  __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
190  __ j(equal, &rt_call);
191 
192  if (count_constructions) {
193  Label allocate;
194  // Decrease generous allocation count.
196  __ dec_b(FieldOperand(ecx,
198  __ j(not_zero, &allocate);
199 
200  __ push(eax);
201  __ push(edi);
202 
203  __ push(edi); // constructor
204  // The call will replace the stub, so the countdown is only done once.
205  __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
206 
207  __ pop(edi);
208  __ pop(eax);
209 
210  __ bind(&allocate);
211  }
212 
213  // Now allocate the JSObject on the heap.
214  // edi: constructor
215  // eax: initial map
217  __ shl(edi, kPointerSizeLog2);
218  if (create_memento) {
219  __ add(edi, Immediate(AllocationMemento::kSize));
220  }
221 
222  __ Allocate(edi, ebx, edi, no_reg, &rt_call, NO_ALLOCATION_FLAGS);
223 
224  Factory* factory = masm->isolate()->factory();
225 
226  // Allocated the JSObject, now initialize the fields.
227  // eax: initial map
228  // ebx: JSObject
229  // edi: start of next object (including memento if create_memento)
230  __ mov(Operand(ebx, JSObject::kMapOffset), eax);
231  __ mov(ecx, factory->empty_fixed_array());
232  __ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
233  __ mov(Operand(ebx, JSObject::kElementsOffset), ecx);
234  // Set extra fields in the newly allocated object.
235  // eax: initial map
236  // ebx: JSObject
237  // edi: start of next object (including memento if create_memento)
238  __ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
239  __ mov(edx, factory->undefined_value());
240  if (count_constructions) {
241  __ movzx_b(esi,
243  __ lea(esi,
245  // esi: offset of first field after pre-allocated fields
246  if (FLAG_debug_code) {
247  __ cmp(esi, edi);
248  __ Assert(less_equal,
249  kUnexpectedNumberOfPreAllocatedPropertyFields);
250  }
251  __ InitializeFieldsWithFiller(ecx, esi, edx);
252  __ mov(edx, factory->one_pointer_filler_map());
253  __ InitializeFieldsWithFiller(ecx, edi, edx);
254  } else if (create_memento) {
255  __ lea(esi, Operand(edi, -AllocationMemento::kSize));
256  __ InitializeFieldsWithFiller(ecx, esi, edx);
257 
258  // Fill in memento fields if necessary.
259  // esi: points to the allocated but uninitialized memento.
260  Handle<Map> allocation_memento_map = factory->allocation_memento_map();
261  __ mov(Operand(esi, AllocationMemento::kMapOffset),
262  allocation_memento_map);
263  // Get the cell or undefined.
264  __ mov(edx, Operand(esp, kPointerSize*2));
266  edx);
267  } else {
268  __ InitializeFieldsWithFiller(ecx, edi, edx);
269  }
270 
271  // Add the object tag to make the JSObject real, so that we can continue
272  // and jump into the continuation code at any time from now on. Any
273  // failures need to undo the allocation, so that the heap is in a
274  // consistent state and verifiable.
275  // eax: initial map
276  // ebx: JSObject
277  // edi: start of next object
278  __ or_(ebx, Immediate(kHeapObjectTag));
279 
280  // Check if a non-empty properties array is needed.
281  // Allocate and initialize a FixedArray if it is.
282  // eax: initial map
283  // ebx: JSObject
284  // edi: start of next object
285  // Calculate the total number of properties described by the map.
287  __ movzx_b(ecx,
289  __ add(edx, ecx);
290  // Calculate unused properties past the end of the in-object properties.
292  __ sub(edx, ecx);
293  // Done if no extra properties are to be allocated.
294  __ j(zero, &allocated);
295  __ Assert(positive, kPropertyAllocationCountFailed);
296 
297  // Scale the number of elements by pointer size and add the header for
298  // FixedArrays to the start of the next object calculation from above.
299  // ebx: JSObject
300  // edi: start of next object (will be start of FixedArray)
301  // edx: number of elements in properties array
302  __ Allocate(FixedArray::kHeaderSize,
304  edx,
306  edi,
307  ecx,
308  no_reg,
309  &undo_allocation,
311 
312  // Initialize the FixedArray.
313  // ebx: JSObject
314  // edi: FixedArray
315  // edx: number of elements
316  // ecx: start of next object
317  __ mov(eax, factory->fixed_array_map());
318  __ mov(Operand(edi, FixedArray::kMapOffset), eax); // setup the map
319  __ SmiTag(edx);
320  __ mov(Operand(edi, FixedArray::kLengthOffset), edx); // and length
321 
322  // Initialize the fields to undefined.
323  // ebx: JSObject
324  // edi: FixedArray
325  // ecx: start of next object
326  { Label loop, entry;
327  __ mov(edx, factory->undefined_value());
328  __ lea(eax, Operand(edi, FixedArray::kHeaderSize));
329  __ jmp(&entry);
330  __ bind(&loop);
331  __ mov(Operand(eax, 0), edx);
332  __ add(eax, Immediate(kPointerSize));
333  __ bind(&entry);
334  __ cmp(eax, ecx);
335  __ j(below, &loop);
336  }
337 
338  // Store the initialized FixedArray into the properties field of
339  // the JSObject
340  // ebx: JSObject
341  // edi: FixedArray
342  __ or_(edi, Immediate(kHeapObjectTag)); // add the heap tag
344 
345 
346  // Continue with JSObject being successfully allocated
347  // ebx: JSObject
348  __ jmp(&allocated);
349 
350  // Undo the setting of the new top so that the heap is verifiable. For
351  // example, the map's unused properties potentially do not match the
352  // allocated objects unused properties.
353  // ebx: JSObject (previous new top)
354  __ bind(&undo_allocation);
355  __ UndoAllocationInNewSpace(ebx);
356  }
357 
358  // Allocate the new receiver object using the runtime call.
359  __ bind(&rt_call);
360  int offset = 0;
361  if (create_memento) {
362  // Get the cell or allocation site.
363  __ mov(edi, Operand(esp, kPointerSize * 2));
364  __ push(edi);
365  offset = kPointerSize;
366  }
367 
368  // Must restore edi (constructor) before calling runtime.
369  __ mov(edi, Operand(esp, offset));
370  // edi: function (constructor)
371  __ push(edi);
372  if (create_memento) {
373  __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
374  } else {
375  __ CallRuntime(Runtime::kHiddenNewObject, 1);
376  }
377  __ mov(ebx, eax); // store result in ebx
378 
379  // If we ended up using the runtime, and we want a memento, then the
380  // runtime call made it for us, and we shouldn't do create count
381  // increment.
382  Label count_incremented;
383  if (create_memento) {
384  __ jmp(&count_incremented);
385  }
386 
387  // New object allocated.
388  // ebx: newly allocated object
389  __ bind(&allocated);
390 
391  if (create_memento) {
392  __ mov(ecx, Operand(esp, kPointerSize * 2));
393  __ cmp(ecx, masm->isolate()->factory()->undefined_value());
394  __ j(equal, &count_incremented);
395  // ecx is an AllocationSite. We are creating a memento from it, so we
396  // need to increment the memento create count.
398  Immediate(Smi::FromInt(1)));
399  __ bind(&count_incremented);
400  }
401 
402  // Retrieve the function from the stack.
403  __ pop(edi);
404 
405  // Retrieve smi-tagged arguments count from the stack.
406  __ mov(eax, Operand(esp, 0));
407  __ SmiUntag(eax);
408 
409  // Push the allocated receiver to the stack. We need two copies
410  // because we may have to return the original one and the calling
411  // conventions dictate that the called function pops the receiver.
412  __ push(ebx);
413  __ push(ebx);
414 
415  // Set up pointer to last argument.
417 
418  // Copy arguments and receiver to the expression stack.
419  Label loop, entry;
420  __ mov(ecx, eax);
421  __ jmp(&entry);
422  __ bind(&loop);
423  __ push(Operand(ebx, ecx, times_4, 0));
424  __ bind(&entry);
425  __ dec(ecx);
426  __ j(greater_equal, &loop);
427 
428  // Call the function.
429  if (is_api_function) {
431  Handle<Code> code =
432  masm->isolate()->builtins()->HandleApiCallConstruct();
433  __ call(code, RelocInfo::CODE_TARGET);
434  } else {
435  ParameterCount actual(eax);
436  __ InvokeFunction(edi, actual, CALL_FUNCTION,
437  NullCallWrapper());
438  }
439 
440  // Store offset of return address for deoptimizer.
441  if (!is_api_function && !count_constructions) {
442  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
443  }
444 
445  // Restore context from the frame.
447 
448  // If the result is an object (in the ECMA sense), we should get rid
449  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
450  // on page 74.
451  Label use_receiver, exit;
452 
453  // If the result is a smi, it is *not* an object in the ECMA sense.
454  __ JumpIfSmi(eax, &use_receiver);
455 
456  // If the type of the result (stored in its map) is less than
457  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
458  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
459  __ j(above_equal, &exit);
460 
461  // Throw away the result of the constructor invocation and use the
462  // on-stack receiver as the result.
463  __ bind(&use_receiver);
464  __ mov(eax, Operand(esp, 0));
465 
466  // Restore the arguments count and leave the construct frame.
467  __ bind(&exit);
468  __ mov(ebx, Operand(esp, kPointerSize)); // Get arguments count.
469 
470  // Leave construct frame.
471  }
472 
473  // Remove caller arguments from the stack and return.
474  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
475  __ pop(ecx);
476  __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
477  __ push(ecx);
478  __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
479  __ ret(0);
480 }
481 
482 
483 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
484  Generate_JSConstructStubHelper(masm, false, true, false);
485 }
486 
487 
488 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
489  Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
490 }
491 
492 
493 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
494  Generate_JSConstructStubHelper(masm, true, false, false);
495 }
496 
497 
498 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
499  bool is_construct) {
501 
502  // Clear the context before we push it when entering the internal frame.
503  __ Move(esi, Immediate(0));
504 
505  {
506  FrameScope scope(masm, StackFrame::INTERNAL);
507 
508  // Load the previous frame pointer (ebx) to access C arguments
509  __ mov(ebx, Operand(ebp, 0));
510 
511  // Get the function from the frame and setup the context.
514 
515  // Push the function and the receiver onto the stack.
516  __ push(ecx);
518 
519  // Load the number of arguments and setup pointer to the arguments.
520  __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
521  __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
522 
523  // Copy arguments to the stack in a loop.
524  Label loop, entry;
525  __ Move(ecx, Immediate(0));
526  __ jmp(&entry);
527  __ bind(&loop);
528  __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
529  __ push(Operand(edx, 0)); // dereference handle
530  __ inc(ecx);
531  __ bind(&entry);
532  __ cmp(ecx, eax);
533  __ j(not_equal, &loop);
534 
535  // Get the function from the stack and call it.
536  // kPointerSize for the receiver.
537  __ mov(edi, Operand(esp, eax, times_4, kPointerSize));
538 
539  // Invoke the code.
540  if (is_construct) {
541  // No type feedback cell is available
542  __ mov(ebx, masm->isolate()->factory()->undefined_value());
543  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
544  __ CallStub(&stub);
545  } else {
546  ParameterCount actual(eax);
547  __ InvokeFunction(edi, actual, CALL_FUNCTION,
548  NullCallWrapper());
549  }
550 
551  // Exit the internal frame. Notice that this also removes the empty.
552  // context and the function left on the stack by the code
553  // invocation.
554  }
555  __ ret(kPointerSize); // Remove receiver.
556 }
557 
558 
559 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
560  Generate_JSEntryTrampolineHelper(masm, false);
561 }
562 
563 
564 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
565  Generate_JSEntryTrampolineHelper(masm, true);
566 }
567 
568 
569 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
570  CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
571  GenerateTailCallToReturnedCode(masm);
572 }
573 
574 
575 
576 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
577  FrameScope scope(masm, StackFrame::INTERNAL);
578  // Push a copy of the function.
579  __ push(edi);
580  // Function is also the parameter to the runtime call.
581  __ push(edi);
582  // Whether to compile in a background thread.
583  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
584 
585  __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
586  // Restore receiver.
587  __ pop(edi);
588 }
589 
590 
591 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
592  CallCompileOptimized(masm, false);
593  GenerateTailCallToReturnedCode(masm);
594 }
595 
596 
597 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
598  CallCompileOptimized(masm, true);
599  GenerateTailCallToReturnedCode(masm);
600 }
601 
602 
603 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
604  // For now, we are relying on the fact that make_code_young doesn't do any
605  // garbage collection which allows us to save/restore the registers without
606  // worrying about which of them contain pointers. We also don't build an
607  // internal frame to make the code faster, since we shouldn't have to do stack
608  // crawls in MakeCodeYoung. This seems a bit fragile.
609 
610  // Re-execute the code that was patched back to the young age when
611  // the stub returns.
612  __ sub(Operand(esp, 0), Immediate(5));
613  __ pushad();
614  __ mov(eax, Operand(esp, 8 * kPointerSize));
615  {
616  FrameScope scope(masm, StackFrame::MANUAL);
617  __ PrepareCallCFunction(2, ebx);
618  __ mov(Operand(esp, 1 * kPointerSize),
619  Immediate(ExternalReference::isolate_address(masm->isolate())));
620  __ mov(Operand(esp, 0), eax);
621  __ CallCFunction(
622  ExternalReference::get_make_code_young_function(masm->isolate()), 2);
623  }
624  __ popad();
625  __ ret(0);
626 }
627 
628 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
629 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
630  MacroAssembler* masm) { \
631  GenerateMakeCodeYoungAgainCommon(masm); \
632 } \
633 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
634  MacroAssembler* masm) { \
635  GenerateMakeCodeYoungAgainCommon(masm); \
636 }
637 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
638 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
639 
640 
641 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
642  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
643  // that make_code_young doesn't do any garbage collection which allows us to
644  // save/restore the registers without worrying about which of them contain
645  // pointers.
646  __ pushad();
647  __ mov(eax, Operand(esp, 8 * kPointerSize));
648  __ sub(eax, Immediate(Assembler::kCallInstructionLength));
649  { // NOLINT
650  FrameScope scope(masm, StackFrame::MANUAL);
651  __ PrepareCallCFunction(2, ebx);
652  __ mov(Operand(esp, 1 * kPointerSize),
653  Immediate(ExternalReference::isolate_address(masm->isolate())));
654  __ mov(Operand(esp, 0), eax);
655  __ CallCFunction(
656  ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
657  2);
658  }
659  __ popad();
660 
661  // Perform prologue operations usually performed by the young code stub.
662  __ pop(eax); // Pop return address into scratch register.
663  __ push(ebp); // Caller's frame pointer.
664  __ mov(ebp, esp);
665  __ push(esi); // Callee's context.
666  __ push(edi); // Callee's JS Function.
667  __ push(eax); // Push return address after frame prologue.
668 
669  // Jump to point after the code-age stub.
670  __ ret(0);
671 }
672 
673 
674 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
675  GenerateMakeCodeYoungAgainCommon(masm);
676 }
677 
678 
679 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
680  SaveFPRegsMode save_doubles) {
681  // Enter an internal frame.
682  {
683  FrameScope scope(masm, StackFrame::INTERNAL);
684 
685  // Preserve registers across notification, this is important for compiled
686  // stubs that tail call the runtime on deopts passing their parameters in
687  // registers.
688  __ pushad();
689  __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
690  __ popad();
691  // Tear down internal frame.
692  }
693 
694  __ pop(MemOperand(esp, 0)); // Ignore state offset
695  __ ret(0); // Return to IC Miss stub, continuation still on stack.
696 }
697 
698 
699 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
700  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
701 }
702 
703 
704 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
705  if (Serializer::enabled()) {
706  PlatformFeatureScope sse2(SSE2);
707  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
708  } else {
709  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
710  }
711 }
712 
713 
714 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
716  {
717  FrameScope scope(masm, StackFrame::INTERNAL);
718 
719  // Pass deoptimization type to the runtime system.
720  __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
721  __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
722 
723  // Tear down internal frame.
724  }
725 
726  // Get the full codegen state from the stack and untag it.
727  __ mov(ecx, Operand(esp, 1 * kPointerSize));
728  __ SmiUntag(ecx);
729 
730  // Switch on the state.
731  Label not_no_registers, not_tos_eax;
733  __ j(not_equal, &not_no_registers, Label::kNear);
734  __ ret(1 * kPointerSize); // Remove state.
735 
736  __ bind(&not_no_registers);
737  __ mov(eax, Operand(esp, 2 * kPointerSize));
739  __ j(not_equal, &not_tos_eax, Label::kNear);
740  __ ret(2 * kPointerSize); // Remove state, eax.
741 
742  __ bind(&not_tos_eax);
743  __ Abort(kNoCasesLeft);
744 }
745 
746 
747 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
748  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
749 }
750 
751 
752 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
753  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
754 }
755 
756 
757 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
758  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
759 }
760 
761 
762 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
763  Factory* factory = masm->isolate()->factory();
764 
765  // 1. Make sure we have at least one argument.
766  { Label done;
767  __ test(eax, eax);
768  __ j(not_zero, &done);
769  __ pop(ebx);
770  __ push(Immediate(factory->undefined_value()));
771  __ push(ebx);
772  __ inc(eax);
773  __ bind(&done);
774  }
775 
776  // 2. Get the function to call (passed as receiver) from the stack, check
777  // if it is a function.
778  Label slow, non_function;
779  // 1 ~ return address.
780  __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
781  __ JumpIfSmi(edi, &non_function);
782  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
783  __ j(not_equal, &slow);
784 
785 
786  // 3a. Patch the first argument if necessary when calling a function.
787  Label shift_arguments;
788  __ Move(edx, Immediate(0)); // indicate regular JS_FUNCTION
789  { Label convert_to_object, use_global_receiver, patch_receiver;
790  // Change context eagerly in case we need the global receiver.
792 
793  // Do not transform the receiver for strict mode functions.
797  __ j(not_equal, &shift_arguments);
798 
799  // Do not transform the receiver for natives (shared already in ebx).
802  __ j(not_equal, &shift_arguments);
803 
804  // Compute the receiver in sloppy mode.
805  __ mov(ebx, Operand(esp, eax, times_4, 0)); // First argument.
806 
807  // Call ToObject on the receiver if it is not an object, or use the
808  // global object if it is null or undefined.
809  __ JumpIfSmi(ebx, &convert_to_object);
810  __ cmp(ebx, factory->null_value());
811  __ j(equal, &use_global_receiver);
812  __ cmp(ebx, factory->undefined_value());
813  __ j(equal, &use_global_receiver);
815  __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
816  __ j(above_equal, &shift_arguments);
817 
818  __ bind(&convert_to_object);
819 
820  { // In order to preserve argument count.
821  FrameScope scope(masm, StackFrame::INTERNAL);
822  __ SmiTag(eax);
823  __ push(eax);
824 
825  __ push(ebx);
826  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
827  __ mov(ebx, eax);
828  __ Move(edx, Immediate(0)); // restore
829 
830  __ pop(eax);
831  __ SmiUntag(eax);
832  }
833 
834  // Restore the function to edi.
835  __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
836  __ jmp(&patch_receiver);
837 
838  __ bind(&use_global_receiver);
839  __ mov(ebx,
842 
843  __ bind(&patch_receiver);
844  __ mov(Operand(esp, eax, times_4, 0), ebx);
845 
846  __ jmp(&shift_arguments);
847  }
848 
849  // 3b. Check for function proxy.
850  __ bind(&slow);
851  __ Move(edx, Immediate(1)); // indicate function proxy
852  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
853  __ j(equal, &shift_arguments);
854  __ bind(&non_function);
855  __ Move(edx, Immediate(2)); // indicate non-function
856 
857  // 3c. Patch the first argument when calling a non-function. The
858  // CALL_NON_FUNCTION builtin expects the non-function callee as
859  // receiver, so overwrite the first argument which will ultimately
860  // become the receiver.
861  __ mov(Operand(esp, eax, times_4, 0), edi);
862 
863  // 4. Shift arguments and return address one slot down on the stack
864  // (overwriting the original receiver). Adjust argument count to make
865  // the original first argument the new receiver.
866  __ bind(&shift_arguments);
867  { Label loop;
868  __ mov(ecx, eax);
869  __ bind(&loop);
870  __ mov(ebx, Operand(esp, ecx, times_4, 0));
871  __ mov(Operand(esp, ecx, times_4, kPointerSize), ebx);
872  __ dec(ecx);
873  __ j(not_sign, &loop); // While non-negative (to copy return address).
874  __ pop(ebx); // Discard copy of return address.
875  __ dec(eax); // One fewer argument (first argument is new receiver).
876  }
877 
878  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
879  // or a function proxy via CALL_FUNCTION_PROXY.
880  { Label function, non_proxy;
881  __ test(edx, edx);
882  __ j(zero, &function);
883  __ Move(ebx, Immediate(0));
884  __ cmp(edx, Immediate(1));
885  __ j(not_equal, &non_proxy);
886 
887  __ pop(edx); // return address
888  __ push(edi); // re-add proxy object as additional argument
889  __ push(edx);
890  __ inc(eax);
891  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
892  __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
893  RelocInfo::CODE_TARGET);
894 
895  __ bind(&non_proxy);
896  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
897  __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
898  RelocInfo::CODE_TARGET);
899  __ bind(&function);
900  }
901 
902  // 5b. Get the code to call from the function and check that the number of
903  // expected arguments matches what we're providing. If so, jump
904  // (tail-call) to the code in register edx without checking arguments.
906  __ mov(ebx,
909  __ SmiUntag(ebx);
910  __ cmp(eax, ebx);
911  __ j(not_equal,
912  masm->isolate()->builtins()->ArgumentsAdaptorTrampoline());
913 
914  ParameterCount expected(0);
915  __ InvokeCode(edx, expected, expected, JUMP_FUNCTION, NullCallWrapper());
916 }
917 
918 
919 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
920  static const int kArgumentsOffset = 2 * kPointerSize;
921  static const int kReceiverOffset = 3 * kPointerSize;
922  static const int kFunctionOffset = 4 * kPointerSize;
923  {
924  FrameScope frame_scope(masm, StackFrame::INTERNAL);
925 
926  __ push(Operand(ebp, kFunctionOffset)); // push this
927  __ push(Operand(ebp, kArgumentsOffset)); // push arguments
928  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
929 
930  // Check the stack for overflow. We are not trying to catch
931  // interruptions (e.g. debug break and preemption) here, so the "real stack
932  // limit" is checked.
933  Label okay;
934  ExternalReference real_stack_limit =
935  ExternalReference::address_of_real_stack_limit(masm->isolate());
936  __ mov(edi, Operand::StaticVariable(real_stack_limit));
937  // Make ecx the space we have left. The stack might already be overflowed
938  // here which will cause ecx to become negative.
939  __ mov(ecx, esp);
940  __ sub(ecx, edi);
941  // Make edx the space we need for the array when it is unrolled onto the
942  // stack.
943  __ mov(edx, eax);
945  // Check if the arguments will overflow the stack.
946  __ cmp(ecx, edx);
947  __ j(greater, &okay); // Signed comparison.
948 
949  // Out of stack space.
950  __ push(Operand(ebp, 4 * kPointerSize)); // push this
951  __ push(eax);
952  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
953  __ bind(&okay);
954  // End of stack check.
955 
956  // Push current index and limit.
957  const int kLimitOffset =
959  const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
960  __ push(eax); // limit
961  __ push(Immediate(0)); // index
962 
963  // Get the receiver.
964  __ mov(ebx, Operand(ebp, kReceiverOffset));
965 
966  // Check that the function is a JS function (otherwise it must be a proxy).
967  Label push_receiver, use_global_receiver;
968  __ mov(edi, Operand(ebp, kFunctionOffset));
969  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
970  __ j(not_equal, &push_receiver);
971 
972  // Change context eagerly to get the right global object if necessary.
974 
975  // Compute the receiver.
976  // Do not transform the receiver for strict mode functions.
977  Label call_to_object;
981  __ j(not_equal, &push_receiver);
982 
983  Factory* factory = masm->isolate()->factory();
984 
985  // Do not transform the receiver for natives (shared already in ecx).
988  __ j(not_equal, &push_receiver);
989 
990  // Compute the receiver in sloppy mode.
991  // Call ToObject on the receiver if it is not an object, or use the
992  // global object if it is null or undefined.
993  __ JumpIfSmi(ebx, &call_to_object);
994  __ cmp(ebx, factory->null_value());
995  __ j(equal, &use_global_receiver);
996  __ cmp(ebx, factory->undefined_value());
997  __ j(equal, &use_global_receiver);
999  __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
1000  __ j(above_equal, &push_receiver);
1001 
1002  __ bind(&call_to_object);
1003  __ push(ebx);
1004  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1005  __ mov(ebx, eax);
1006  __ jmp(&push_receiver);
1007 
1008  __ bind(&use_global_receiver);
1009  __ mov(ebx,
1012 
1013  // Push the receiver.
1014  __ bind(&push_receiver);
1015  __ push(ebx);
1016 
1017  // Copy all arguments from the array to the stack.
1018  Label entry, loop;
1019  __ mov(ecx, Operand(ebp, kIndexOffset));
1020  __ jmp(&entry);
1021  __ bind(&loop);
1022  __ mov(edx, Operand(ebp, kArgumentsOffset)); // load arguments
1023 
1024  // Use inline caching to speed up access to arguments.
1025  Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Initialize();
1026  __ call(ic, RelocInfo::CODE_TARGET);
1027  // It is important that we do not have a test instruction after the
1028  // call. A test instruction after the call is used to indicate that
1029  // we have generated an inline version of the keyed load. In this
1030  // case, we know that we are not generating a test instruction next.
1031 
1032  // Push the nth argument.
1033  __ push(eax);
1034 
1035  // Update the index on the stack and in register eax.
1036  __ mov(ecx, Operand(ebp, kIndexOffset));
1037  __ add(ecx, Immediate(1 << kSmiTagSize));
1038  __ mov(Operand(ebp, kIndexOffset), ecx);
1039 
1040  __ bind(&entry);
1041  __ cmp(ecx, Operand(ebp, kLimitOffset));
1042  __ j(not_equal, &loop);
1043 
1044  // Call the function.
1045  Label call_proxy;
1046  __ mov(eax, ecx);
1047  ParameterCount actual(eax);
1048  __ SmiUntag(eax);
1049  __ mov(edi, Operand(ebp, kFunctionOffset));
1050  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
1051  __ j(not_equal, &call_proxy);
1052  __ InvokeFunction(edi, actual, CALL_FUNCTION, NullCallWrapper());
1053 
1054  frame_scope.GenerateLeaveFrame();
1055  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1056 
1057  // Call the function proxy.
1058  __ bind(&call_proxy);
1059  __ push(edi); // add function proxy as last argument
1060  __ inc(eax);
1061  __ Move(ebx, Immediate(0));
1062  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
1063  __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1064  RelocInfo::CODE_TARGET);
1065 
1066  // Leave internal frame.
1067  }
1068  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1069 }
1070 
1071 
1072 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1073  // ----------- S t a t e -------------
1074  // -- eax : argc
1075  // -- esp[0] : return address
1076  // -- esp[4] : last argument
1077  // -----------------------------------
1078  Label generic_array_code;
1079 
1080  // Get the InternalArray function.
1081  __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi);
1082 
1083  if (FLAG_debug_code) {
1084  // Initial map for the builtin InternalArray function should be a map.
1086  // Will both indicate a NULL and a Smi.
1087  __ test(ebx, Immediate(kSmiTagMask));
1088  __ Assert(not_zero, kUnexpectedInitialMapForInternalArrayFunction);
1089  __ CmpObjectType(ebx, MAP_TYPE, ecx);
1090  __ Assert(equal, kUnexpectedInitialMapForInternalArrayFunction);
1091  }
1092 
1093  // Run the native code for the InternalArray function called as a normal
1094  // function.
1095  // tail call a stub
1096  InternalArrayConstructorStub stub(masm->isolate());
1097  __ TailCallStub(&stub);
1098 }
1099 
1100 
1101 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1102  // ----------- S t a t e -------------
1103  // -- eax : argc
1104  // -- esp[0] : return address
1105  // -- esp[4] : last argument
1106  // -----------------------------------
1107  Label generic_array_code;
1108 
1109  // Get the Array function.
1110  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi);
1111 
1112  if (FLAG_debug_code) {
1113  // Initial map for the builtin Array function should be a map.
1115  // Will both indicate a NULL and a Smi.
1116  __ test(ebx, Immediate(kSmiTagMask));
1117  __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
1118  __ CmpObjectType(ebx, MAP_TYPE, ecx);
1119  __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
1120  }
1121 
1122  // Run the native code for the Array function called as a normal function.
1123  // tail call a stub
1124  __ mov(ebx, masm->isolate()->factory()->undefined_value());
1125  ArrayConstructorStub stub(masm->isolate());
1126  __ TailCallStub(&stub);
1127 }
1128 
1129 
1130 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1131  // ----------- S t a t e -------------
1132  // -- eax : number of arguments
1133  // -- edi : constructor function
1134  // -- esp[0] : return address
1135  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1136  // -- esp[(argc + 1) * 4] : receiver
1137  // -----------------------------------
1138  Counters* counters = masm->isolate()->counters();
1139  __ IncrementCounter(counters->string_ctor_calls(), 1);
1140 
1141  if (FLAG_debug_code) {
1142  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, ecx);
1143  __ cmp(edi, ecx);
1144  __ Assert(equal, kUnexpectedStringFunction);
1145  }
1146 
1147  // Load the first argument into eax and get rid of the rest
1148  // (including the receiver).
1149  Label no_arguments;
1150  __ test(eax, eax);
1151  __ j(zero, &no_arguments);
1152  __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1153  __ pop(ecx);
1154  __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1155  __ push(ecx);
1156  __ mov(eax, ebx);
1157 
1158  // Lookup the argument in the number to string cache.
1159  Label not_cached, argument_is_string;
1160  __ LookupNumberStringCache(eax, // Input.
1161  ebx, // Result.
1162  ecx, // Scratch 1.
1163  edx, // Scratch 2.
1164  &not_cached);
1165  __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1166  __ bind(&argument_is_string);
1167  // ----------- S t a t e -------------
1168  // -- ebx : argument converted to string
1169  // -- edi : constructor function
1170  // -- esp[0] : return address
1171  // -----------------------------------
1172 
1173  // Allocate a JSValue and put the tagged pointer into eax.
1174  Label gc_required;
1175  __ Allocate(JSValue::kSize,
1176  eax, // Result.
1177  ecx, // New allocation top (we ignore it).
1178  no_reg,
1179  &gc_required,
1180  TAG_OBJECT);
1181 
1182  // Set the map.
1183  __ LoadGlobalFunctionInitialMap(edi, ecx);
1184  if (FLAG_debug_code) {
1187  __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
1189  __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1190  }
1192 
1193  // Set properties and elements.
1194  Factory* factory = masm->isolate()->factory();
1195  __ Move(ecx, Immediate(factory->empty_fixed_array()));
1198 
1199  // Set the value.
1201 
1202  // Ensure the object is fully initialized.
1203  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1204 
1205  // We're done. Return.
1206  __ ret(0);
1207 
1208  // The argument was not found in the number to string cache. Check
1209  // if it's a string already before calling the conversion builtin.
1210  Label convert_argument;
1211  __ bind(&not_cached);
1212  STATIC_ASSERT(kSmiTag == 0);
1213  __ JumpIfSmi(eax, &convert_argument);
1214  Condition is_string = masm->IsObjectStringType(eax, ebx, ecx);
1215  __ j(NegateCondition(is_string), &convert_argument);
1216  __ mov(ebx, eax);
1217  __ IncrementCounter(counters->string_ctor_string_value(), 1);
1218  __ jmp(&argument_is_string);
1219 
1220  // Invoke the conversion builtin and put the result into ebx.
1221  __ bind(&convert_argument);
1222  __ IncrementCounter(counters->string_ctor_conversions(), 1);
1223  {
1224  FrameScope scope(masm, StackFrame::INTERNAL);
1225  __ push(edi); // Preserve the function.
1226  __ push(eax);
1227  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1228  __ pop(edi);
1229  }
1230  __ mov(ebx, eax);
1231  __ jmp(&argument_is_string);
1232 
1233  // Load the empty string into ebx, remove the receiver from the
1234  // stack, and jump back to the case where the argument is a string.
1235  __ bind(&no_arguments);
1236  __ Move(ebx, Immediate(factory->empty_string()));
1237  __ pop(ecx);
1238  __ lea(esp, Operand(esp, kPointerSize));
1239  __ push(ecx);
1240  __ jmp(&argument_is_string);
1241 
1242  // At this point the argument is already a string. Call runtime to
1243  // create a string wrapper.
1244  __ bind(&gc_required);
1245  __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1246  {
1247  FrameScope scope(masm, StackFrame::INTERNAL);
1248  __ push(ebx);
1249  __ CallRuntime(Runtime::kNewStringWrapper, 1);
1250  }
1251  __ ret(0);
1252 }
1253 
1254 
1255 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1256  __ push(ebp);
1257  __ mov(ebp, esp);
1258 
1259  // Store the arguments adaptor context sentinel.
1260  __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1261 
1262  // Push the function on the stack.
1263  __ push(edi);
1264 
1265  // Preserve the number of arguments on the stack. Must preserve eax,
1266  // ebx and ecx because these registers are used when copying the
1267  // arguments and the receiver.
1268  STATIC_ASSERT(kSmiTagSize == 1);
1269  __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
1270  __ push(edi);
1271 }
1272 
1273 
1274 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1275  // Retrieve the number of arguments from the stack.
1277 
1278  // Leave the frame.
1279  __ leave();
1280 
1281  // Remove caller arguments from the stack.
1282  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1283  __ pop(ecx);
1284  __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
1285  __ push(ecx);
1286 }
1287 
1288 
1289 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1290  // ----------- S t a t e -------------
1291  // -- eax : actual number of arguments
1292  // -- ebx : expected number of arguments
1293  // -- edi : function (passed through to callee)
1294  // -----------------------------------
1295 
1296  Label invoke, dont_adapt_arguments;
1297  __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
1298 
1299  Label enough, too_few;
1301  __ cmp(eax, ebx);
1302  __ j(less, &too_few);
1304  __ j(equal, &dont_adapt_arguments);
1305 
1306  { // Enough parameters: Actual >= expected.
1307  __ bind(&enough);
1308  EnterArgumentsAdaptorFrame(masm);
1309 
1310  // Copy receiver and all expected arguments.
1311  const int offset = StandardFrameConstants::kCallerSPOffset;
1312  __ lea(eax, Operand(ebp, eax, times_4, offset));
1313  __ mov(edi, -1); // account for receiver
1314 
1315  Label copy;
1316  __ bind(&copy);
1317  __ inc(edi);
1318  __ push(Operand(eax, 0));
1319  __ sub(eax, Immediate(kPointerSize));
1320  __ cmp(edi, ebx);
1321  __ j(less, &copy);
1322  __ jmp(&invoke);
1323  }
1324 
1325  { // Too few parameters: Actual < expected.
1326  __ bind(&too_few);
1327  EnterArgumentsAdaptorFrame(masm);
1328 
1329  // Copy receiver and all actual arguments.
1330  const int offset = StandardFrameConstants::kCallerSPOffset;
1331  __ lea(edi, Operand(ebp, eax, times_4, offset));
1332  // ebx = expected - actual.
1333  __ sub(ebx, eax);
1334  // eax = -actual - 1
1335  __ neg(eax);
1336  __ sub(eax, Immediate(1));
1337 
1338  Label copy;
1339  __ bind(&copy);
1340  __ inc(eax);
1341  __ push(Operand(edi, 0));
1342  __ sub(edi, Immediate(kPointerSize));
1343  __ test(eax, eax);
1344  __ j(not_zero, &copy);
1345 
1346  // Fill remaining expected arguments with undefined values.
1347  Label fill;
1348  __ bind(&fill);
1349  __ inc(eax);
1350  __ push(Immediate(masm->isolate()->factory()->undefined_value()));
1351  __ cmp(eax, ebx);
1352  __ j(less, &fill);
1353  }
1354 
1355  // Call the entry point.
1356  __ bind(&invoke);
1357  // Restore function pointer.
1359  __ call(edx);
1360 
1361  // Store offset of return address for deoptimizer.
1362  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1363 
1364  // Leave frame and return.
1365  LeaveArgumentsAdaptorFrame(masm);
1366  __ ret(0);
1367 
1368  // -------------------------------------------
1369  // Dont adapt arguments.
1370  // -------------------------------------------
1371  __ bind(&dont_adapt_arguments);
1372  __ jmp(edx);
1373 }
1374 
1375 
1376 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1377  // Lookup the function in the JavaScript frame.
1379  {
1380  FrameScope scope(masm, StackFrame::INTERNAL);
1381  // Pass function as argument.
1382  __ push(eax);
1383  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1384  }
1385 
1386  Label skip;
1387  // If the code object is null, just return to the unoptimized code.
1388  __ cmp(eax, Immediate(0));
1389  __ j(not_equal, &skip, Label::kNear);
1390  __ ret(0);
1391 
1392  __ bind(&skip);
1393 
1394  // Load deoptimization data from the code object.
1396 
1397  // Load the OSR entrypoint offset from the deoptimization data.
1398  __ mov(ebx, Operand(ebx, FixedArray::OffsetOfElementAt(
1400  __ SmiUntag(ebx);
1401 
1402  // Compute the target address = code_obj + header_size + osr_offset
1403  __ lea(eax, Operand(eax, ebx, times_1, Code::kHeaderSize - kHeapObjectTag));
1404 
1405  // Overwrite the return address on the stack.
1406  __ mov(Operand(esp, 0), eax);
1407 
1408  // And "return" to the OSR entry point of the function.
1409  __ ret(0);
1410 }
1411 
1412 
1413 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1414  // We check the stack limit as indicator that recompilation might be done.
1415  Label ok;
1416  ExternalReference stack_limit =
1417  ExternalReference::address_of_stack_limit(masm->isolate());
1418  __ cmp(esp, Operand::StaticVariable(stack_limit));
1419  __ j(above_equal, &ok, Label::kNear);
1420  {
1421  FrameScope scope(masm, StackFrame::INTERNAL);
1422  __ CallRuntime(Runtime::kHiddenStackGuard, 0);
1423  }
1424  __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1425  RelocInfo::CODE_TARGET);
1426 
1427  __ bind(&ok);
1428  __ ret(0);
1429 }
1430 
1431 #undef __
1432 }
1433 } // namespace v8::internal
1434 
1435 #endif // V8_TARGET_ARCH_IA32
const intptr_t kSmiTagMask
Definition: v8.h:5480
static const int kCodeOffset
Definition: objects.h:7103
static const int kCodeEntryOffset
Definition: objects.h:7518
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7519
static int SlotOffset(int index)
Definition: contexts.h:498
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
const Register esp
static const int kGlobalReceiverOffset
Definition: objects.h:7613
static const int kConstructionCountOffset
Definition: objects.h:7189
static const int kDeoptimizationDataOffset
Definition: objects.h:5584
static const int kNativeByteOffset
Definition: objects.h:7267
static const int kStrictModeBitWithinByte
Definition: objects.h:7258
static bool enabled()
Definition: serialize.h:485
#define ASSERT(condition)
Definition: checks.h:329
static const int kContextOffset
Definition: frames.h:185
const int kPointerSizeLog2
Definition: globals.h:281
static const int kInstanceSizeOffset
Definition: objects.h:6448
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:6460
static const int kContextOffset
Definition: objects.h:7523
const Register edi
static const int kPretenureCreateCountOffset
Definition: objects.h:8414
const Register ebp
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const Register eax
BuiltinExtraArguments
Definition: builtins.h:35
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:7098
const int kPointerSize
Definition: globals.h:268
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
const Register ecx
const int kHeapObjectTag
Definition: v8.h:5473
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define __
static const int kCallerSPOffset
Definition: frames.h:190
static const int kPropertiesOffset
Definition: objects.h:2755
static const int kInObjectPropertiesOffset
Definition: objects.h:6450
static const int kExpressionsOffset
Definition: frames.h:183
static const int kElementsOffset
Definition: objects.h:2756
static const int kNativeBitWithinByte
Definition: objects.h:7261
static const int kArgcOffset
Definition: frames-ia32.h:69
static const int kFunctionArgOffset
Definition: frames-ia32.h:67
static int OffsetOfElementAt(int index)
Definition: objects.h:3070
static const int kHeaderSize
Definition: objects.h:3016
static const int kSize
Definition: objects.h:7702
static const int kMapOffset
Definition: objects.h:1890
static const int kCallInstructionLength
static const int kLengthOffset
Definition: objects.h:3015
const Register ebx
static const int kReceiverArgOffset
Definition: frames-ia32.h:68
static const int kFormalParameterCountOffset
Definition: objects.h:7156
static const int kStrictModeByteOffset
Definition: objects.h:7265
const int kSmiTagSize
Definition: v8.h:5479
static const int kHeaderSize
Definition: objects.h:5604
static const int kAllocationSiteOffset
Definition: objects.h:8443
Condition NegateCondition(Condition cond)
const Register esi
const int kSmiTag
Definition: v8.h:5478
static const int kArgvOffset
Definition: frames-ia32.h:70
static const int kHeaderSize
Definition: objects.h:2757
const Register no_reg
static const int kValueOffset
Definition: objects.h:7701
#define CODE_AGE_LIST(V)
Definition: builtins.h:50
const Register edx
static const int kSharedFunctionInfoOffset
Definition: objects.h:7521
static const int kPreAllocatedPropertyFieldsOffset
Definition: objects.h:6453