v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
builtins-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_X64
31 
32 #include "codegen.h"
33 #include "deoptimizer.h"
34 #include "full-codegen.h"
35 #include "stub-cache.h"
36 
37 namespace v8 {
38 namespace internal {
39 
40 
41 #define __ ACCESS_MASM(masm)
42 
43 
44 void Builtins::Generate_Adaptor(MacroAssembler* masm,
45  CFunctionId id,
46  BuiltinExtraArguments extra_args) {
47  // ----------- S t a t e -------------
48  // -- rax : number of arguments excluding receiver
49  // -- rdi : called function (only guaranteed when
50  // extra_args requires it)
51  // -- rsi : context
52  // -- rsp[0] : return address
53  // -- rsp[8] : last argument
54  // -- ...
55  // -- rsp[8 * argc] : first argument (argc == rax)
56  // -- rsp[8 * (argc + 1)] : receiver
57  // -----------------------------------
58 
59  // Insert extra arguments.
60  int num_extra_args = 0;
61  if (extra_args == NEEDS_CALLED_FUNCTION) {
62  num_extra_args = 1;
63  __ PopReturnAddressTo(kScratchRegister);
64  __ Push(rdi);
65  __ PushReturnAddressFrom(kScratchRegister);
66  } else {
67  ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
68  }
69 
70  // JumpToExternalReference expects rax to contain the number of arguments
71  // including the receiver and the extra arguments.
72  __ addp(rax, Immediate(num_extra_args + 1));
73  __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
74 }
75 
76 
77 static void CallRuntimePassFunction(
78  MacroAssembler* masm, Runtime::FunctionId function_id) {
79  FrameScope scope(masm, StackFrame::INTERNAL);
80  // Push a copy of the function onto the stack.
81  __ Push(rdi);
82  // Function is also the parameter to the runtime call.
83  __ Push(rdi);
84 
85  __ CallRuntime(function_id, 1);
86  // Restore receiver.
87  __ Pop(rdi);
88 }
89 
90 
91 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
92  __ movp(kScratchRegister,
94  __ movp(kScratchRegister,
97  __ jmp(kScratchRegister);
98 }
99 
100 
101 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
103  __ jmp(rax);
104 }
105 
106 
107 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
108  // Checking whether the queued function is ready for install is optional,
109  // since we come across interrupts and stack checks elsewhere. However,
110  // not checking may delay installing ready functions, and always checking
111  // would be quite expensive. A good compromise is to first check against
112  // stack limit as a cue for an interrupt signal.
113  Label ok;
114  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
115  __ j(above_equal, &ok);
116 
117  CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
118  GenerateTailCallToReturnedCode(masm);
119 
120  __ bind(&ok);
121  GenerateTailCallToSharedCode(masm);
122 }
123 
124 
125 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
126  bool is_api_function,
127  bool count_constructions,
128  bool create_memento) {
129  // ----------- S t a t e -------------
130  // -- rax: number of arguments
131  // -- rdi: constructor function
132  // -- rbx: allocation site or undefined
133  // -----------------------------------
134 
135  // Should never count constructions for api objects.
136  ASSERT(!is_api_function || !count_constructions);\
137 
138  // Should never create mementos for api functions.
139  ASSERT(!is_api_function || !create_memento);
140 
141  // Should never create mementos before slack tracking is finished.
142  ASSERT(!count_constructions || !create_memento);
143 
144  // Enter a construct frame.
145  {
146  FrameScope scope(masm, StackFrame::CONSTRUCT);
147 
148  if (create_memento) {
149  __ AssertUndefinedOrAllocationSite(rbx);
150  __ Push(rbx);
151  }
152 
153  // Store a smi-tagged arguments count on the stack.
154  __ Integer32ToSmi(rax, rax);
155  __ Push(rax);
156 
157  // Push the function to invoke on the stack.
158  __ Push(rdi);
159 
160  // Try to allocate the object without transitioning into C code. If any of
161  // the preconditions is not met, the code bails out to the runtime call.
162  Label rt_call, allocated;
163  if (FLAG_inline_new) {
164  Label undo_allocation;
165 
166 #ifdef ENABLE_DEBUGGER_SUPPORT
167  ExternalReference debug_step_in_fp =
168  ExternalReference::debug_step_in_fp_address(masm->isolate());
169  __ Move(kScratchRegister, debug_step_in_fp);
170  __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
171  __ j(not_equal, &rt_call);
172 #endif
173 
174  // Verified that the constructor is a JSFunction.
175  // Load the initial map and verify that it is in fact a map.
176  // rdi: constructor
178  // Will both indicate a NULL and a Smi
179  ASSERT(kSmiTag == 0);
180  __ JumpIfSmi(rax, &rt_call);
181  // rdi: constructor
182  // rax: initial map (if proven valid below)
183  __ CmpObjectType(rax, MAP_TYPE, rbx);
184  __ j(not_equal, &rt_call);
185 
186  // Check that the constructor is not constructing a JSFunction (see
187  // comments in Runtime_NewObject in runtime.cc). In which case the
188  // initial map's instance type would be JS_FUNCTION_TYPE.
189  // rdi: constructor
190  // rax: initial map
191  __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
192  __ j(equal, &rt_call);
193 
194  if (count_constructions) {
195  Label allocate;
196  // Decrease generous allocation count.
198  __ decb(FieldOperand(rcx,
200  __ j(not_zero, &allocate);
201 
202  __ Push(rax);
203  __ Push(rdi);
204 
205  __ Push(rdi); // constructor
206  // The call will replace the stub, so the countdown is only done once.
207  __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
208 
209  __ Pop(rdi);
210  __ Pop(rax);
211 
212  __ bind(&allocate);
213  }
214 
215  // Now allocate the JSObject on the heap.
217  __ shl(rdi, Immediate(kPointerSizeLog2));
218  if (create_memento) {
219  __ addp(rdi, Immediate(AllocationMemento::kSize));
220  }
221  // rdi: size of new object
222  __ Allocate(rdi,
223  rbx,
224  rdi,
225  no_reg,
226  &rt_call,
228  Factory* factory = masm->isolate()->factory();
229  // Allocated the JSObject, now initialize the fields.
230  // rax: initial map
231  // rbx: JSObject (not HeapObject tagged - the actual address).
232  // rdi: start of next object (including memento if create_memento)
233  __ movp(Operand(rbx, JSObject::kMapOffset), rax);
234  __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
235  __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
236  __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
237  // Set extra fields in the newly allocated object.
238  // rax: initial map
239  // rbx: JSObject
240  // rdi: start of next object (including memento if create_memento)
241  __ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
242  __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
243  if (count_constructions) {
244  __ movzxbp(rsi,
246  __ leap(rsi,
248  // rsi: offset of first field after pre-allocated fields
249  if (FLAG_debug_code) {
250  __ cmpp(rsi, rdi);
251  __ Assert(less_equal,
252  kUnexpectedNumberOfPreAllocatedPropertyFields);
253  }
254  __ InitializeFieldsWithFiller(rcx, rsi, rdx);
255  __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
256  __ InitializeFieldsWithFiller(rcx, rdi, rdx);
257  } else if (create_memento) {
258  __ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
259  __ InitializeFieldsWithFiller(rcx, rsi, rdx);
260 
261  // Fill in memento fields if necessary.
262  // rsi: points to the allocated but uninitialized memento.
263  Handle<Map> allocation_memento_map = factory->allocation_memento_map();
264  __ Move(Operand(rsi, AllocationMemento::kMapOffset),
265  allocation_memento_map);
266  // Get the cell or undefined.
267  __ movp(rdx, Operand(rsp, kPointerSize*2));
269  rdx);
270  } else {
271  __ InitializeFieldsWithFiller(rcx, rdi, rdx);
272  }
273 
274  // Add the object tag to make the JSObject real, so that we can continue
275  // and jump into the continuation code at any time from now on. Any
276  // failures need to undo the allocation, so that the heap is in a
277  // consistent state and verifiable.
278  // rax: initial map
279  // rbx: JSObject
280  // rdi: start of next object
281  __ orp(rbx, Immediate(kHeapObjectTag));
282 
283  // Check if a non-empty properties array is needed.
284  // Allocate and initialize a FixedArray if it is.
285  // rax: initial map
286  // rbx: JSObject
287  // rdi: start of next object
288  // Calculate total properties described map.
290  __ movzxbp(rcx,
292  __ addp(rdx, rcx);
293  // Calculate unused properties past the end of the in-object properties.
295  __ subp(rdx, rcx);
296  // Done if no extra properties are to be allocated.
297  __ j(zero, &allocated);
298  __ Assert(positive, kPropertyAllocationCountFailed);
299 
300  // Scale the number of elements by pointer size and add the header for
301  // FixedArrays to the start of the next object calculation from above.
302  // rbx: JSObject
303  // rdi: start of next object (will be start of FixedArray)
304  // rdx: number of elements in properties array
305  __ Allocate(FixedArray::kHeaderSize,
307  rdx,
308  rdi,
309  rax,
310  no_reg,
311  &undo_allocation,
313 
314  // Initialize the FixedArray.
315  // rbx: JSObject
316  // rdi: FixedArray
317  // rdx: number of elements
318  // rax: start of next object
319  __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
320  __ movp(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
321  __ Integer32ToSmi(rdx, rdx);
322  __ movp(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
323 
324  // Initialize the fields to undefined.
325  // rbx: JSObject
326  // rdi: FixedArray
327  // rax: start of next object
328  // rdx: number of elements
329  { Label loop, entry;
330  __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
331  __ leap(rcx, Operand(rdi, FixedArray::kHeaderSize));
332  __ jmp(&entry);
333  __ bind(&loop);
334  __ movp(Operand(rcx, 0), rdx);
335  __ addp(rcx, Immediate(kPointerSize));
336  __ bind(&entry);
337  __ cmpp(rcx, rax);
338  __ j(below, &loop);
339  }
340 
341  // Store the initialized FixedArray into the properties field of
342  // the JSObject
343  // rbx: JSObject
344  // rdi: FixedArray
345  __ orp(rdi, Immediate(kHeapObjectTag)); // add the heap tag
347 
348 
349  // Continue with JSObject being successfully allocated
350  // rbx: JSObject
351  __ jmp(&allocated);
352 
353  // Undo the setting of the new top so that the heap is verifiable. For
354  // example, the map's unused properties potentially do not match the
355  // allocated objects unused properties.
356  // rbx: JSObject (previous new top)
357  __ bind(&undo_allocation);
358  __ UndoAllocationInNewSpace(rbx);
359  }
360 
361  // Allocate the new receiver object using the runtime call.
362  // rdi: function (constructor)
363  __ bind(&rt_call);
364  int offset = 0;
365  if (create_memento) {
366  // Get the cell or allocation site.
367  __ movp(rdi, Operand(rsp, kPointerSize*2));
368  __ Push(rdi);
369  offset = kPointerSize;
370  }
371 
372  // Must restore rdi (constructor) before calling runtime.
373  __ movp(rdi, Operand(rsp, offset));
374  __ Push(rdi);
375  if (create_memento) {
376  __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
377  } else {
378  __ CallRuntime(Runtime::kHiddenNewObject, 1);
379  }
380  __ movp(rbx, rax); // store result in rbx
381 
382  // If we ended up using the runtime, and we want a memento, then the
383  // runtime call made it for us, and we shouldn't do create count
384  // increment.
385  Label count_incremented;
386  if (create_memento) {
387  __ jmp(&count_incremented);
388  }
389 
390  // New object allocated.
391  // rbx: newly allocated object
392  __ bind(&allocated);
393 
394  if (create_memento) {
395  __ movp(rcx, Operand(rsp, kPointerSize*2));
396  __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
397  __ j(equal, &count_incremented);
398  // rcx is an AllocationSite. We are creating a memento from it, so we
399  // need to increment the memento create count.
400  __ SmiAddConstant(
402  Smi::FromInt(1));
403  __ bind(&count_incremented);
404  }
405 
406  // Retrieve the function from the stack.
407  __ Pop(rdi);
408 
409  // Retrieve smi-tagged arguments count from the stack.
410  __ movp(rax, Operand(rsp, 0));
411  __ SmiToInteger32(rax, rax);
412 
413  // Push the allocated receiver to the stack. We need two copies
414  // because we may have to return the original one and the calling
415  // conventions dictate that the called function pops the receiver.
416  __ Push(rbx);
417  __ Push(rbx);
418 
419  // Set up pointer to last argument.
421 
422  // Copy arguments and receiver to the expression stack.
423  Label loop, entry;
424  __ movp(rcx, rax);
425  __ jmp(&entry);
426  __ bind(&loop);
427  __ Push(Operand(rbx, rcx, times_pointer_size, 0));
428  __ bind(&entry);
429  __ decp(rcx);
430  __ j(greater_equal, &loop);
431 
432  // Call the function.
433  if (is_api_function) {
435  Handle<Code> code =
436  masm->isolate()->builtins()->HandleApiCallConstruct();
437  __ Call(code, RelocInfo::CODE_TARGET);
438  } else {
439  ParameterCount actual(rax);
440  __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
441  }
442 
443  // Store offset of return address for deoptimizer.
444  if (!is_api_function && !count_constructions) {
445  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
446  }
447 
448  // Restore context from the frame.
450 
451  // If the result is an object (in the ECMA sense), we should get rid
452  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
453  // on page 74.
454  Label use_receiver, exit;
455  // If the result is a smi, it is *not* an object in the ECMA sense.
456  __ JumpIfSmi(rax, &use_receiver);
457 
458  // If the type of the result (stored in its map) is less than
459  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
461  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
462  __ j(above_equal, &exit);
463 
464  // Throw away the result of the constructor invocation and use the
465  // on-stack receiver as the result.
466  __ bind(&use_receiver);
467  __ movp(rax, Operand(rsp, 0));
468 
469  // Restore the arguments count and leave the construct frame.
470  __ bind(&exit);
471  __ movp(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
472 
473  // Leave construct frame.
474  }
475 
476  // Remove caller arguments from the stack and return.
477  __ PopReturnAddressTo(rcx);
478  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
479  __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
480  __ PushReturnAddressFrom(rcx);
481  Counters* counters = masm->isolate()->counters();
482  __ IncrementCounter(counters->constructed_objects(), 1);
483  __ ret(0);
484 }
485 
486 
487 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
488  Generate_JSConstructStubHelper(masm, false, true, false);
489 }
490 
491 
492 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
493  Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
494 }
495 
496 
497 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
498  Generate_JSConstructStubHelper(masm, true, false, false);
499 }
500 
501 
502 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
503  bool is_construct) {
505 
506  // Expects five C++ function parameters.
507  // - Address entry (ignored)
508  // - JSFunction* function (
509  // - Object* receiver
510  // - int argc
511  // - Object*** argv
512  // (see Handle::Invoke in execution.cc).
513 
514  // Open a C++ scope for the FrameScope.
515  {
516  // Platform specific argument handling. After this, the stack contains
517  // an internal frame and the pushed function and receiver, and
518  // register rax and rbx holds the argument count and argument array,
519  // while rdi holds the function pointer and rsi the context.
520 
521 #ifdef _WIN64
522  // MSVC parameters in:
523  // rcx : entry (ignored)
524  // rdx : function
525  // r8 : receiver
526  // r9 : argc
527  // [rsp+0x20] : argv
528 
529  // Clear the context before we push it when entering the internal frame.
530  __ Set(rsi, 0);
531  // Enter an internal frame.
532  FrameScope scope(masm, StackFrame::INTERNAL);
533 
534  // Load the function context into rsi.
536 
537  // Push the function and the receiver onto the stack.
538  __ Push(rdx);
539  __ Push(r8);
540 
541  // Load the number of arguments and setup pointer to the arguments.
542  __ movp(rax, r9);
543  // Load the previous frame pointer to access C argument on stack
544  __ movp(kScratchRegister, Operand(rbp, 0));
546  // Load the function pointer into rdi.
547  __ movp(rdi, rdx);
548 #else // _WIN64
549  // GCC parameters in:
550  // rdi : entry (ignored)
551  // rsi : function
552  // rdx : receiver
553  // rcx : argc
554  // r8 : argv
555 
556  __ movp(rdi, rsi);
557  // rdi : function
558 
559  // Clear the context before we push it when entering the internal frame.
560  __ Set(rsi, 0);
561  // Enter an internal frame.
562  FrameScope scope(masm, StackFrame::INTERNAL);
563 
564  // Push the function and receiver and setup the context.
565  __ Push(rdi);
566  __ Push(rdx);
568 
569  // Load the number of arguments and setup pointer to the arguments.
570  __ movp(rax, rcx);
571  __ movp(rbx, r8);
572 #endif // _WIN64
573 
574  // Current stack contents:
575  // [rsp + 2 * kPointerSize ... ] : Internal frame
576  // [rsp + kPointerSize] : function
577  // [rsp] : receiver
578  // Current register contents:
579  // rax : argc
580  // rbx : argv
581  // rsi : context
582  // rdi : function
583 
584  // Copy arguments to the stack in a loop.
585  // Register rbx points to array of pointers to handle locations.
586  // Push the values of these handles.
587  Label loop, entry;
588  __ Set(rcx, 0); // Set loop variable to 0.
589  __ jmp(&entry);
590  __ bind(&loop);
591  __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
592  __ Push(Operand(kScratchRegister, 0)); // dereference handle
593  __ addp(rcx, Immediate(1));
594  __ bind(&entry);
595  __ cmpp(rcx, rax);
596  __ j(not_equal, &loop);
597 
598  // Invoke the code.
599  if (is_construct) {
600  // No type feedback cell is available
601  __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
602  // Expects rdi to hold function pointer.
603  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
604  __ CallStub(&stub);
605  } else {
606  ParameterCount actual(rax);
607  // Function must be in rdi.
608  __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
609  }
610  // Exit the internal frame. Notice that this also removes the empty
611  // context and the function left on the stack by the code
612  // invocation.
613  }
614 
615  // TODO(X64): Is argument correct? Is there a receiver to remove?
616  __ ret(1 * kPointerSize); // Remove receiver.
617 }
618 
619 
620 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
621  Generate_JSEntryTrampolineHelper(masm, false);
622 }
623 
624 
625 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
626  Generate_JSEntryTrampolineHelper(masm, true);
627 }
628 
629 
630 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
631  CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
632  GenerateTailCallToReturnedCode(masm);
633 }
634 
635 
636 static void CallCompileOptimized(MacroAssembler* masm,
637  bool concurrent) {
638  FrameScope scope(masm, StackFrame::INTERNAL);
639  // Push a copy of the function onto the stack.
640  __ Push(rdi);
641  // Function is also the parameter to the runtime call.
642  __ Push(rdi);
643  // Whether to compile in a background thread.
644  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
645 
646  __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
647  // Restore receiver.
648  __ Pop(rdi);
649 }
650 
651 
652 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
653  CallCompileOptimized(masm, false);
654  GenerateTailCallToReturnedCode(masm);
655 }
656 
657 
658 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
659  CallCompileOptimized(masm, true);
660  GenerateTailCallToReturnedCode(masm);
661 }
662 
663 
664 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
665  // For now, we are relying on the fact that make_code_young doesn't do any
666  // garbage collection which allows us to save/restore the registers without
667  // worrying about which of them contain pointers. We also don't build an
668  // internal frame to make the code faster, since we shouldn't have to do stack
669  // crawls in MakeCodeYoung. This seems a bit fragile.
670 
671  // Re-execute the code that was patched back to the young age when
672  // the stub returns.
673  __ subp(Operand(rsp, 0), Immediate(5));
674  __ Pushad();
675  __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
677  { // NOLINT
678  FrameScope scope(masm, StackFrame::MANUAL);
679  __ PrepareCallCFunction(2);
680  __ CallCFunction(
681  ExternalReference::get_make_code_young_function(masm->isolate()), 2);
682  }
683  __ Popad();
684  __ ret(0);
685 }
686 
687 
688 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
689 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
690  MacroAssembler* masm) { \
691  GenerateMakeCodeYoungAgainCommon(masm); \
692 } \
693 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
694  MacroAssembler* masm) { \
695  GenerateMakeCodeYoungAgainCommon(masm); \
696 }
697 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
698 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
699 
700 
701 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
702  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
703  // that make_code_young doesn't do any garbage collection which allows us to
704  // save/restore the registers without worrying about which of them contain
705  // pointers.
706  __ Pushad();
707  __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
710  { // NOLINT
711  FrameScope scope(masm, StackFrame::MANUAL);
712  __ PrepareCallCFunction(2);
713  __ CallCFunction(
714  ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
715  2);
716  }
717  __ Popad();
718 
719  // Perform prologue operations usually performed by the young code stub.
720  __ PopReturnAddressTo(kScratchRegister);
721  __ pushq(rbp); // Caller's frame pointer.
722  __ movp(rbp, rsp);
723  __ Push(rsi); // Callee's context.
724  __ Push(rdi); // Callee's JS Function.
725  __ PushReturnAddressFrom(kScratchRegister);
726 
727  // Jump to point after the code-age stub.
728  __ ret(0);
729 }
730 
731 
732 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
733  GenerateMakeCodeYoungAgainCommon(masm);
734 }
735 
736 
737 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
738  SaveFPRegsMode save_doubles) {
739  // Enter an internal frame.
740  {
741  FrameScope scope(masm, StackFrame::INTERNAL);
742 
743  // Preserve registers across notification, this is important for compiled
744  // stubs that tail call the runtime on deopts passing their parameters in
745  // registers.
746  __ Pushad();
747  __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
748  __ Popad();
749  // Tear down internal frame.
750  }
751 
752  __ Pop(MemOperand(rsp, 0)); // Ignore state offset
753  __ ret(0); // Return to IC Miss stub, continuation still on stack.
754 }
755 
756 
757 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
758  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
759 }
760 
761 
762 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
763  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
764 }
765 
766 
767 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
769  // Enter an internal frame.
770  {
771  FrameScope scope(masm, StackFrame::INTERNAL);
772 
773  // Pass the deoptimization type to the runtime system.
774  __ Push(Smi::FromInt(static_cast<int>(type)));
775 
776  __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
777  // Tear down internal frame.
778  }
779 
780  // Get the full codegen state from the stack and untag it.
781  __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
782 
783  // Switch on the state.
784  Label not_no_registers, not_tos_rax;
786  __ j(not_equal, &not_no_registers, Label::kNear);
787  __ ret(1 * kPointerSize); // Remove state.
788 
789  __ bind(&not_no_registers);
790  __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
792  __ j(not_equal, &not_tos_rax, Label::kNear);
793  __ ret(2 * kPointerSize); // Remove state, rax.
794 
795  __ bind(&not_tos_rax);
796  __ Abort(kNoCasesLeft);
797 }
798 
799 
800 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
801  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
802 }
803 
804 
805 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
806  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
807 }
808 
809 
810 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
811  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
812 }
813 
814 
815 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
816  // Stack Layout:
817  // rsp[0] : Return address
818  // rsp[8] : Argument n
819  // rsp[16] : Argument n-1
820  // ...
821  // rsp[8 * n] : Argument 1
822  // rsp[8 * (n + 1)] : Receiver (function to call)
823  //
824  // rax contains the number of arguments, n, not counting the receiver.
825  //
826  // 1. Make sure we have at least one argument.
827  { Label done;
828  __ testp(rax, rax);
829  __ j(not_zero, &done);
830  __ PopReturnAddressTo(rbx);
831  __ Push(masm->isolate()->factory()->undefined_value());
832  __ PushReturnAddressFrom(rbx);
833  __ incp(rax);
834  __ bind(&done);
835  }
836 
837  // 2. Get the function to call (passed as receiver) from the stack, check
838  // if it is a function.
839  Label slow, non_function;
840  StackArgumentsAccessor args(rsp, rax);
841  __ movp(rdi, args.GetReceiverOperand());
842  __ JumpIfSmi(rdi, &non_function);
843  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
844  __ j(not_equal, &slow);
845 
846  // 3a. Patch the first argument if necessary when calling a function.
847  Label shift_arguments;
848  __ Set(rdx, 0); // indicate regular JS_FUNCTION
849  { Label convert_to_object, use_global_receiver, patch_receiver;
850  // Change context eagerly in case we need the global receiver.
852 
853  // Do not transform the receiver for strict mode functions.
857  __ j(not_equal, &shift_arguments);
858 
859  // Do not transform the receiver for natives.
860  // SharedFunctionInfo is already loaded into rbx.
863  __ j(not_zero, &shift_arguments);
864 
865  // Compute the receiver in sloppy mode.
866  __ movp(rbx, args.GetArgumentOperand(1));
867  __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
868 
869  __ CompareRoot(rbx, Heap::kNullValueRootIndex);
870  __ j(equal, &use_global_receiver);
871  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
872  __ j(equal, &use_global_receiver);
873 
875  __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
876  __ j(above_equal, &shift_arguments);
877 
878  __ bind(&convert_to_object);
879  {
880  // Enter an internal frame in order to preserve argument count.
881  FrameScope scope(masm, StackFrame::INTERNAL);
882  __ Integer32ToSmi(rax, rax);
883  __ Push(rax);
884 
885  __ Push(rbx);
886  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
887  __ movp(rbx, rax);
888  __ Set(rdx, 0); // indicate regular JS_FUNCTION
889 
890  __ Pop(rax);
891  __ SmiToInteger32(rax, rax);
892  }
893 
894  // Restore the function to rdi.
895  __ movp(rdi, args.GetReceiverOperand());
896  __ jmp(&patch_receiver, Label::kNear);
897 
898  __ bind(&use_global_receiver);
899  __ movp(rbx,
902 
903  __ bind(&patch_receiver);
904  __ movp(args.GetArgumentOperand(1), rbx);
905 
906  __ jmp(&shift_arguments);
907  }
908 
909  // 3b. Check for function proxy.
910  __ bind(&slow);
911  __ Set(rdx, 1); // indicate function proxy
912  __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
913  __ j(equal, &shift_arguments);
914  __ bind(&non_function);
915  __ Set(rdx, 2); // indicate non-function
916 
917  // 3c. Patch the first argument when calling a non-function. The
918  // CALL_NON_FUNCTION builtin expects the non-function callee as
919  // receiver, so overwrite the first argument which will ultimately
920  // become the receiver.
921  __ movp(args.GetArgumentOperand(1), rdi);
922 
923  // 4. Shift arguments and return address one slot down on the stack
924  // (overwriting the original receiver). Adjust argument count to make
925  // the original first argument the new receiver.
926  __ bind(&shift_arguments);
927  { Label loop;
928  __ movp(rcx, rax);
929  __ bind(&loop);
930  __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0));
931  __ movp(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
932  __ decp(rcx);
933  __ j(not_sign, &loop); // While non-negative (to copy return address).
934  __ popq(rbx); // Discard copy of return address.
935  __ decp(rax); // One fewer argument (first argument is new receiver).
936  }
937 
938  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
939  // or a function proxy via CALL_FUNCTION_PROXY.
940  { Label function, non_proxy;
941  __ testp(rdx, rdx);
942  __ j(zero, &function);
943  __ Set(rbx, 0);
944  __ cmpp(rdx, Immediate(1));
945  __ j(not_equal, &non_proxy);
946 
947  __ PopReturnAddressTo(rdx);
948  __ Push(rdi); // re-add proxy object as additional argument
949  __ PushReturnAddressFrom(rdx);
950  __ incp(rax);
951  __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
952  __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
953  RelocInfo::CODE_TARGET);
954 
955  __ bind(&non_proxy);
956  __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
957  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
958  RelocInfo::CODE_TARGET);
959  __ bind(&function);
960  }
961 
962  // 5b. Get the code to call from the function and check that the number of
963  // expected arguments matches what we're providing. If so, jump
964  // (tail-call) to the code in register edx without checking arguments.
966  __ movsxlq(rbx,
970  __ cmpp(rax, rbx);
971  __ j(not_equal,
972  masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
973  RelocInfo::CODE_TARGET);
974 
975  ParameterCount expected(0);
976  __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper());
977 }
978 
979 
980 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
981  // Stack at entry:
982  // rsp : return address
983  // rsp[8] : arguments
984  // rsp[16] : receiver ("this")
985  // rsp[24] : function
986  {
987  FrameScope frame_scope(masm, StackFrame::INTERNAL);
988  // Stack frame:
989  // rbp : Old base pointer
990  // rbp[8] : return address
991  // rbp[16] : function arguments
992  // rbp[24] : receiver
993  // rbp[32] : function
994  static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
995  static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
996  static const int kFunctionOffset = kReceiverOffset + kPointerSize;
997 
998  __ Push(Operand(rbp, kFunctionOffset));
999  __ Push(Operand(rbp, kArgumentsOffset));
1000  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1001 
1002  // Check the stack for overflow. We are not trying to catch
1003  // interruptions (e.g. debug break and preemption) here, so the "real stack
1004  // limit" is checked.
1005  Label okay;
1006  __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
1007  __ movp(rcx, rsp);
1008  // Make rcx the space we have left. The stack might already be overflowed
1009  // here which will cause rcx to become negative.
1010  __ subp(rcx, kScratchRegister);
1011  // Make rdx the space we need for the array when it is unrolled onto the
1012  // stack.
1013  __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
1014  // Check if the arguments will overflow the stack.
1015  __ cmpp(rcx, rdx);
1016  __ j(greater, &okay); // Signed comparison.
1017 
1018  // Out of stack space.
1019  __ Push(Operand(rbp, kFunctionOffset));
1020  __ Push(rax);
1021  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1022  __ bind(&okay);
1023  // End of stack check.
1024 
1025  // Push current index and limit.
1026  const int kLimitOffset =
1028  const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
1029  __ Push(rax); // limit
1030  __ Push(Immediate(0)); // index
1031 
1032  // Get the receiver.
1033  __ movp(rbx, Operand(rbp, kReceiverOffset));
1034 
1035  // Check that the function is a JS function (otherwise it must be a proxy).
1036  Label push_receiver;
1037  __ movp(rdi, Operand(rbp, kFunctionOffset));
1038  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1039  __ j(not_equal, &push_receiver);
1040 
1041  // Change context eagerly to get the right global object if necessary.
1043 
1044  // Do not transform the receiver for strict mode functions.
1045  Label call_to_object, use_global_receiver;
1049  __ j(not_equal, &push_receiver);
1050 
1051  // Do not transform the receiver for natives.
1054  __ j(not_equal, &push_receiver);
1055 
1056  // Compute the receiver in sloppy mode.
1057  __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
1058  __ CompareRoot(rbx, Heap::kNullValueRootIndex);
1059  __ j(equal, &use_global_receiver);
1060  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1061  __ j(equal, &use_global_receiver);
1062 
1063  // If given receiver is already a JavaScript object then there's no
1064  // reason for converting it.
1066  __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
1067  __ j(above_equal, &push_receiver);
1068 
1069  // Convert the receiver to an object.
1070  __ bind(&call_to_object);
1071  __ Push(rbx);
1072  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1073  __ movp(rbx, rax);
1074  __ jmp(&push_receiver, Label::kNear);
1075 
1076  __ bind(&use_global_receiver);
1077  __ movp(rbx,
1080 
1081  // Push the receiver.
1082  __ bind(&push_receiver);
1083  __ Push(rbx);
1084 
1085  // Copy all arguments from the array to the stack.
1086  Label entry, loop;
1087  __ movp(rax, Operand(rbp, kIndexOffset));
1088  __ jmp(&entry);
1089  __ bind(&loop);
1090  __ movp(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
1091 
1092  // Use inline caching to speed up access to arguments.
1093  Handle<Code> ic =
1094  masm->isolate()->builtins()->KeyedLoadIC_Initialize();
1095  __ Call(ic, RelocInfo::CODE_TARGET);
1096  // It is important that we do not have a test instruction after the
1097  // call. A test instruction after the call is used to indicate that
1098  // we have generated an inline version of the keyed load. In this
1099  // case, we know that we are not generating a test instruction next.
1100 
1101  // Push the nth argument.
1102  __ Push(rax);
1103 
1104  // Update the index on the stack and in register rax.
1105  __ movp(rax, Operand(rbp, kIndexOffset));
1106  __ SmiAddConstant(rax, rax, Smi::FromInt(1));
1107  __ movp(Operand(rbp, kIndexOffset), rax);
1108 
1109  __ bind(&entry);
1110  __ cmpp(rax, Operand(rbp, kLimitOffset));
1111  __ j(not_equal, &loop);
1112 
1113  // Call the function.
1114  Label call_proxy;
1115  ParameterCount actual(rax);
1116  __ SmiToInteger32(rax, rax);
1117  __ movp(rdi, Operand(rbp, kFunctionOffset));
1118  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1119  __ j(not_equal, &call_proxy);
1120  __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
1121 
1122  frame_scope.GenerateLeaveFrame();
1123  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1124 
1125  // Call the function proxy.
1126  __ bind(&call_proxy);
1127  __ Push(rdi); // add function proxy as last argument
1128  __ incp(rax);
1129  __ Set(rbx, 0);
1130  __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1131  __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1132  RelocInfo::CODE_TARGET);
1133 
1134  // Leave internal frame.
1135  }
1136  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1137 }
1138 
1139 
1140 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1141  // ----------- S t a t e -------------
1142  // -- rax : argc
1143  // -- rsp[0] : return address
1144  // -- rsp[8] : last argument
1145  // -----------------------------------
1146  Label generic_array_code;
1147 
1148  // Get the InternalArray function.
1149  __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1150 
1151  if (FLAG_debug_code) {
1152  // Initial map for the builtin InternalArray functions should be maps.
1154  // Will both indicate a NULL and a Smi.
1155  STATIC_ASSERT(kSmiTag == 0);
1156  Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1157  __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1158  __ CmpObjectType(rbx, MAP_TYPE, rcx);
1159  __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1160  }
1161 
1162  // Run the native code for the InternalArray function called as a normal
1163  // function.
1164  // tail call a stub
1165  InternalArrayConstructorStub stub(masm->isolate());
1166  __ TailCallStub(&stub);
1167 }
1168 
1169 
1170 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1171  // ----------- S t a t e -------------
1172  // -- rax : argc
1173  // -- rsp[0] : return address
1174  // -- rsp[8] : last argument
1175  // -----------------------------------
1176  Label generic_array_code;
1177 
1178  // Get the Array function.
1179  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1180 
1181  if (FLAG_debug_code) {
1182  // Initial map for the builtin Array functions should be maps.
1184  // Will both indicate a NULL and a Smi.
1185  STATIC_ASSERT(kSmiTag == 0);
1186  Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1187  __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1188  __ CmpObjectType(rbx, MAP_TYPE, rcx);
1189  __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1190  }
1191 
1192  // Run the native code for the Array function called as a normal function.
1193  // tail call a stub
1194  __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1195  ArrayConstructorStub stub(masm->isolate());
1196  __ TailCallStub(&stub);
1197 }
1198 
1199 
1200 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1201  // ----------- S t a t e -------------
1202  // -- rax : number of arguments
1203  // -- rdi : constructor function
1204  // -- rsp[0] : return address
1205  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1206  // -- rsp[(argc + 1) * 8] : receiver
1207  // -----------------------------------
1208  Counters* counters = masm->isolate()->counters();
1209  __ IncrementCounter(counters->string_ctor_calls(), 1);
1210 
1211  if (FLAG_debug_code) {
1212  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
1213  __ cmpp(rdi, rcx);
1214  __ Assert(equal, kUnexpectedStringFunction);
1215  }
1216 
1217  // Load the first argument into rax and get rid of the rest
1218  // (including the receiver).
1219  StackArgumentsAccessor args(rsp, rax);
1220  Label no_arguments;
1221  __ testp(rax, rax);
1222  __ j(zero, &no_arguments);
1223  __ movp(rbx, args.GetArgumentOperand(1));
1224  __ PopReturnAddressTo(rcx);
1225  __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1226  __ PushReturnAddressFrom(rcx);
1227  __ movp(rax, rbx);
1228 
1229  // Lookup the argument in the number to string cache.
1230  Label not_cached, argument_is_string;
1231  __ LookupNumberStringCache(rax, // Input.
1232  rbx, // Result.
1233  rcx, // Scratch 1.
1234  rdx, // Scratch 2.
1235  &not_cached);
1236  __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1237  __ bind(&argument_is_string);
1238 
1239  // ----------- S t a t e -------------
1240  // -- rbx : argument converted to string
1241  // -- rdi : constructor function
1242  // -- rsp[0] : return address
1243  // -----------------------------------
1244 
1245  // Allocate a JSValue and put the tagged pointer into rax.
1246  Label gc_required;
1247  __ Allocate(JSValue::kSize,
1248  rax, // Result.
1249  rcx, // New allocation top (we ignore it).
1250  no_reg,
1251  &gc_required,
1252  TAG_OBJECT);
1253 
1254  // Set the map.
1255  __ LoadGlobalFunctionInitialMap(rdi, rcx);
1256  if (FLAG_debug_code) {
1258  Immediate(JSValue::kSize >> kPointerSizeLog2));
1259  __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
1260  __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1261  __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1262  }
1264 
1265  // Set properties and elements.
1266  __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1269 
1270  // Set the value.
1272 
1273  // Ensure the object is fully initialized.
1274  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1275 
1276  // We're done. Return.
1277  __ ret(0);
1278 
1279  // The argument was not found in the number to string cache. Check
1280  // if it's a string already before calling the conversion builtin.
1281  Label convert_argument;
1282  __ bind(&not_cached);
1283  STATIC_ASSERT(kSmiTag == 0);
1284  __ JumpIfSmi(rax, &convert_argument);
1285  Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1286  __ j(NegateCondition(is_string), &convert_argument);
1287  __ movp(rbx, rax);
1288  __ IncrementCounter(counters->string_ctor_string_value(), 1);
1289  __ jmp(&argument_is_string);
1290 
1291  // Invoke the conversion builtin and put the result into rbx.
1292  __ bind(&convert_argument);
1293  __ IncrementCounter(counters->string_ctor_conversions(), 1);
1294  {
1295  FrameScope scope(masm, StackFrame::INTERNAL);
1296  __ Push(rdi); // Preserve the function.
1297  __ Push(rax);
1298  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1299  __ Pop(rdi);
1300  }
1301  __ movp(rbx, rax);
1302  __ jmp(&argument_is_string);
1303 
1304  // Load the empty string into rbx, remove the receiver from the
1305  // stack, and jump back to the case where the argument is a string.
1306  __ bind(&no_arguments);
1307  __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1308  __ PopReturnAddressTo(rcx);
1309  __ leap(rsp, Operand(rsp, kPointerSize));
1310  __ PushReturnAddressFrom(rcx);
1311  __ jmp(&argument_is_string);
1312 
1313  // At this point the argument is already a string. Call runtime to
1314  // create a string wrapper.
1315  __ bind(&gc_required);
1316  __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1317  {
1318  FrameScope scope(masm, StackFrame::INTERNAL);
1319  __ Push(rbx);
1320  __ CallRuntime(Runtime::kNewStringWrapper, 1);
1321  }
1322  __ ret(0);
1323 }
1324 
1325 
1326 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1327  __ pushq(rbp);
1328  __ movp(rbp, rsp);
1329 
1330  // Store the arguments adaptor context sentinel.
1332 
1333  // Push the function on the stack.
1334  __ Push(rdi);
1335 
1336  // Preserve the number of arguments on the stack. Must preserve rax,
1337  // rbx and rcx because these registers are used when copying the
1338  // arguments and the receiver.
1339  __ Integer32ToSmi(r8, rax);
1340  __ Push(r8);
1341 }
1342 
1343 
1344 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1345  // Retrieve the number of arguments from the stack. Number is a Smi.
1347 
1348  // Leave the frame.
1349  __ movp(rsp, rbp);
1350  __ popq(rbp);
1351 
1352  // Remove caller arguments from the stack.
1353  __ PopReturnAddressTo(rcx);
1354  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1355  __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1356  __ PushReturnAddressFrom(rcx);
1357 }
1358 
1359 
1360 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1361  // ----------- S t a t e -------------
1362  // -- rax : actual number of arguments
1363  // -- rbx : expected number of arguments
1364  // -- rdi: function (passed through to callee)
1365  // -----------------------------------
1366 
1367  Label invoke, dont_adapt_arguments;
1368  Counters* counters = masm->isolate()->counters();
1369  __ IncrementCounter(counters->arguments_adaptors(), 1);
1370 
1371  Label enough, too_few;
1373  __ cmpp(rax, rbx);
1374  __ j(less, &too_few);
1376  __ j(equal, &dont_adapt_arguments);
1377 
1378  { // Enough parameters: Actual >= expected.
1379  __ bind(&enough);
1380  EnterArgumentsAdaptorFrame(masm);
1381 
1382  // Copy receiver and all expected arguments.
1383  const int offset = StandardFrameConstants::kCallerSPOffset;
1384  __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
1385  __ Set(r8, -1); // account for receiver
1386 
1387  Label copy;
1388  __ bind(&copy);
1389  __ incp(r8);
1390  __ Push(Operand(rax, 0));
1391  __ subp(rax, Immediate(kPointerSize));
1392  __ cmpp(r8, rbx);
1393  __ j(less, &copy);
1394  __ jmp(&invoke);
1395  }
1396 
1397  { // Too few parameters: Actual < expected.
1398  __ bind(&too_few);
1399  EnterArgumentsAdaptorFrame(masm);
1400 
1401  // Copy receiver and all actual arguments.
1402  const int offset = StandardFrameConstants::kCallerSPOffset;
1403  __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
1404  __ Set(r8, -1); // account for receiver
1405 
1406  Label copy;
1407  __ bind(&copy);
1408  __ incp(r8);
1409  __ Push(Operand(rdi, 0));
1410  __ subp(rdi, Immediate(kPointerSize));
1411  __ cmpp(r8, rax);
1412  __ j(less, &copy);
1413 
1414  // Fill remaining expected arguments with undefined values.
1415  Label fill;
1416  __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1417  __ bind(&fill);
1418  __ incp(r8);
1419  __ Push(kScratchRegister);
1420  __ cmpp(r8, rbx);
1421  __ j(less, &fill);
1422 
1423  // Restore function pointer.
1425  }
1426 
1427  // Call the entry point.
1428  __ bind(&invoke);
1429  __ call(rdx);
1430 
1431  // Store offset of return address for deoptimizer.
1432  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1433 
1434  // Leave frame and return.
1435  LeaveArgumentsAdaptorFrame(masm);
1436  __ ret(0);
1437 
1438  // -------------------------------------------
1439  // Dont adapt arguments.
1440  // -------------------------------------------
1441  __ bind(&dont_adapt_arguments);
1442  __ jmp(rdx);
1443 }
1444 
1445 
1446 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1447  // Lookup the function in the JavaScript frame.
1449  {
1450  FrameScope scope(masm, StackFrame::INTERNAL);
1451  // Pass function as argument.
1452  __ Push(rax);
1453  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1454  }
1455 
1456  Label skip;
1457  // If the code object is null, just return to the unoptimized code.
1458  __ cmpp(rax, Immediate(0));
1459  __ j(not_equal, &skip, Label::kNear);
1460  __ ret(0);
1461 
1462  __ bind(&skip);
1463 
1464  // Load deoptimization data from the code object.
1466 
1467  // Load the OSR entrypoint offset from the deoptimization data.
1468  __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
1470 
1471  // Compute the target address = code_obj + header_size + osr_offset
1472  __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
1473 
1474  // Overwrite the return address on the stack.
1476 
1477  // And "return" to the OSR entry point of the function.
1478  __ ret(0);
1479 }
1480 
1481 
1482 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1483  // We check the stack limit as indicator that recompilation might be done.
1484  Label ok;
1485  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1486  __ j(above_equal, &ok);
1487  {
1488  FrameScope scope(masm, StackFrame::INTERNAL);
1489  __ CallRuntime(Runtime::kHiddenStackGuard, 0);
1490  }
1491  __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1492  RelocInfo::CODE_TARGET);
1493 
1494  __ bind(&ok);
1495  __ ret(0);
1496 }
1497 
1498 
1499 #undef __
1500 
1501 } } // namespace v8::internal
1502 
1503 #endif // V8_TARGET_ARCH_X64
const Register rdx
static const int kCodeOffset
Definition: objects.h:7103
static const int kCodeEntryOffset
Definition: objects.h:7518
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7519
static int SlotOffset(int index)
Definition: contexts.h:498
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
const Register rbp
static const int kGlobalReceiverOffset
Definition: objects.h:7613
static const int kConstructionCountOffset
Definition: objects.h:7189
static const int kDeoptimizationDataOffset
Definition: objects.h:5584
const Register rsi
static const int kNativeByteOffset
Definition: objects.h:7267
static const int kStrictModeBitWithinByte
Definition: objects.h:7258
#define ASSERT(condition)
Definition: checks.h:329
static const int kContextOffset
Definition: frames.h:185
const int kPointerSizeLog2
Definition: globals.h:281
static const int kShortCallInstructionLength
static const int kInstanceSizeOffset
Definition: objects.h:6448
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:6460
static const int kContextOffset
Definition: objects.h:7523
static const int kPretenureCreateCountOffset
Definition: objects.h:8414
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
BuiltinExtraArguments
Definition: builtins.h:35
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:7098
const Register r9
const int kPointerSize
Definition: globals.h:268
static void MaybeCallEntryHook(MacroAssembler *masm)
Operand FieldOperand(Register object, int offset)
const int kHeapObjectTag
Definition: v8.h:5473
const Register rbx
const Register rsp
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define __
const int kFPOnStackSize
Definition: globals.h:271
static const int kCallerSPOffset
Definition: frames.h:190
static const int kPropertiesOffset
Definition: objects.h:2755
const Register rax
static const int kInObjectPropertiesOffset
Definition: objects.h:6450
static const int kExpressionsOffset
Definition: frames.h:183
const Register rdi
static const int kElementsOffset
Definition: objects.h:2756
static const int kNativeBitWithinByte
Definition: objects.h:7261
static int OffsetOfElementAt(int index)
Definition: objects.h:3070
static const int kHeaderSize
Definition: objects.h:3016
static const int kSize
Definition: objects.h:7702
const Register arg_reg_1
static const int kMapOffset
Definition: objects.h:1890
static const int kLengthOffset
Definition: objects.h:3015
const int kNumSafepointRegisters
Definition: frames-arm.h:92
const Register kScratchRegister
static const int kFormalParameterCountOffset
Definition: objects.h:7156
static const int kStrictModeByteOffset
Definition: objects.h:7265
static const int kHeaderSize
Definition: objects.h:5604
const Register r8
const Register rcx
static const int kAllocationSiteOffset
Definition: objects.h:8443
Condition NegateCondition(Condition cond)
const Register arg_reg_2
Operand StackOperandForReturnAddress(int32_t disp)
const int kSmiTag
Definition: v8.h:5478
static const int kArgvOffset
Definition: frames-ia32.h:70
static const int kHeaderSize
Definition: objects.h:2757
const Register no_reg
static const int kValueOffset
Definition: objects.h:7701
#define CODE_AGE_LIST(V)
Definition: builtins.h:50
const int kPCOnStackSize
Definition: globals.h:270
static const int kSharedFunctionInfoOffset
Definition: objects.h:7521
static const int kPreAllocatedPropertyFieldsOffset
Definition: objects.h:6453