v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
builtins-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "codegen.h"
33 #include "deoptimizer.h"
34 #include "full-codegen.h"
35 
36 namespace v8 {
37 namespace internal {
38 
39 
40 #define __ ACCESS_MASM(masm)
41 
42 
43 void Builtins::Generate_Adaptor(MacroAssembler* masm,
44  CFunctionId id,
45  BuiltinExtraArguments extra_args) {
46  // ----------- S t a t e -------------
47  // -- rax : number of arguments excluding receiver
48  // -- rdi : called function (only guaranteed when
49  // extra_args requires it)
50  // -- rsi : context
51  // -- rsp[0] : return address
52  // -- rsp[8] : last argument
53  // -- ...
54  // -- rsp[8 * argc] : first argument (argc == rax)
55  // -- rsp[8 * (argc +1)] : receiver
56  // -----------------------------------
57 
58  // Insert extra arguments.
59  int num_extra_args = 0;
60  if (extra_args == NEEDS_CALLED_FUNCTION) {
61  num_extra_args = 1;
62  __ pop(kScratchRegister); // Save return address.
63  __ push(rdi);
64  __ push(kScratchRegister); // Restore return address.
65  } else {
66  ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
67  }
68 
69  // JumpToExternalReference expects rax to contain the number of arguments
70  // including the receiver and the extra arguments.
71  __ addq(rax, Immediate(num_extra_args + 1));
72  __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
73 }
74 
75 
76 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
77  bool is_api_function,
78  bool count_constructions) {
79  // ----------- S t a t e -------------
80  // -- rax: number of arguments
81  // -- rdi: constructor function
82  // -----------------------------------
83 
84  // Should never count constructions for api objects.
85  ASSERT(!is_api_function || !count_constructions);
86 
87  // Enter a construct frame.
88  {
89  FrameScope scope(masm, StackFrame::CONSTRUCT);
90 
91  // Store a smi-tagged arguments count on the stack.
92  __ Integer32ToSmi(rax, rax);
93  __ push(rax);
94 
95  // Push the function to invoke on the stack.
96  __ push(rdi);
97 
98  // Try to allocate the object without transitioning into C code. If any of
99  // the preconditions is not met, the code bails out to the runtime call.
100  Label rt_call, allocated;
101  if (FLAG_inline_new) {
102  Label undo_allocation;
103 
104 #ifdef ENABLE_DEBUGGER_SUPPORT
105  ExternalReference debug_step_in_fp =
106  ExternalReference::debug_step_in_fp_address(masm->isolate());
107  __ movq(kScratchRegister, debug_step_in_fp);
108  __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
109  __ j(not_equal, &rt_call);
110 #endif
111 
112  // Verified that the constructor is a JSFunction.
113  // Load the initial map and verify that it is in fact a map.
114  // rdi: constructor
116  // Will both indicate a NULL and a Smi
117  ASSERT(kSmiTag == 0);
118  __ JumpIfSmi(rax, &rt_call);
119  // rdi: constructor
120  // rax: initial map (if proven valid below)
121  __ CmpObjectType(rax, MAP_TYPE, rbx);
122  __ j(not_equal, &rt_call);
123 
124  // Check that the constructor is not constructing a JSFunction (see
125  // comments in Runtime_NewObject in runtime.cc). In which case the
126  // initial map's instance type would be JS_FUNCTION_TYPE.
127  // rdi: constructor
128  // rax: initial map
129  __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
130  __ j(equal, &rt_call);
131 
132  if (count_constructions) {
133  Label allocate;
134  // Decrease generous allocation count.
136  __ decb(FieldOperand(rcx,
138  __ j(not_zero, &allocate);
139 
140  __ push(rax);
141  __ push(rdi);
142 
143  __ push(rdi); // constructor
144  // The call will replace the stub, so the countdown is only done once.
145  __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
146 
147  __ pop(rdi);
148  __ pop(rax);
149 
150  __ bind(&allocate);
151  }
152 
153  // Now allocate the JSObject on the heap.
155  __ shl(rdi, Immediate(kPointerSizeLog2));
156  // rdi: size of new object
157  __ AllocateInNewSpace(rdi,
158  rbx,
159  rdi,
160  no_reg,
161  &rt_call,
163  // Allocated the JSObject, now initialize the fields.
164  // rax: initial map
165  // rbx: JSObject (not HeapObject tagged - the actual address).
166  // rdi: start of next object
167  __ movq(Operand(rbx, JSObject::kMapOffset), rax);
168  __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
169  __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
170  __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
171  // Set extra fields in the newly allocated object.
172  // rax: initial map
173  // rbx: JSObject
174  // rdi: start of next object
175  __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
176  __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
177  if (count_constructions) {
178  __ movzxbq(rsi,
180  __ lea(rsi,
182  // rsi: offset of first field after pre-allocated fields
183  if (FLAG_debug_code) {
184  __ cmpq(rsi, rdi);
185  __ Assert(less_equal,
186  "Unexpected number of pre-allocated property fields.");
187  }
188  __ InitializeFieldsWithFiller(rcx, rsi, rdx);
189  __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
190  }
191  __ InitializeFieldsWithFiller(rcx, rdi, rdx);
192 
193  // Add the object tag to make the JSObject real, so that we can continue
194  // and jump into the continuation code at any time from now on. Any
195  // failures need to undo the allocation, so that the heap is in a
196  // consistent state and verifiable.
197  // rax: initial map
198  // rbx: JSObject
199  // rdi: start of next object
200  __ or_(rbx, Immediate(kHeapObjectTag));
201 
202  // Check if a non-empty properties array is needed.
203  // Allocate and initialize a FixedArray if it is.
204  // rax: initial map
205  // rbx: JSObject
206  // rdi: start of next object
207  // Calculate total properties described map.
209  __ movzxbq(rcx,
211  __ addq(rdx, rcx);
212  // Calculate unused properties past the end of the in-object properties.
214  __ subq(rdx, rcx);
215  // Done if no extra properties are to be allocated.
216  __ j(zero, &allocated);
217  __ Assert(positive, "Property allocation count failed.");
218 
219  // Scale the number of elements by pointer size and add the header for
220  // FixedArrays to the start of the next object calculation from above.
221  // rbx: JSObject
222  // rdi: start of next object (will be start of FixedArray)
223  // rdx: number of elements in properties array
224  __ AllocateInNewSpace(FixedArray::kHeaderSize,
226  rdx,
227  rdi,
228  rax,
229  no_reg,
230  &undo_allocation,
232 
233  // Initialize the FixedArray.
234  // rbx: JSObject
235  // rdi: FixedArray
236  // rdx: number of elements
237  // rax: start of next object
238  __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
239  __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
240  __ Integer32ToSmi(rdx, rdx);
241  __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
242 
243  // Initialize the fields to undefined.
244  // rbx: JSObject
245  // rdi: FixedArray
246  // rax: start of next object
247  // rdx: number of elements
248  { Label loop, entry;
249  __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
250  __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
251  __ jmp(&entry);
252  __ bind(&loop);
253  __ movq(Operand(rcx, 0), rdx);
254  __ addq(rcx, Immediate(kPointerSize));
255  __ bind(&entry);
256  __ cmpq(rcx, rax);
257  __ j(below, &loop);
258  }
259 
260  // Store the initialized FixedArray into the properties field of
261  // the JSObject
262  // rbx: JSObject
263  // rdi: FixedArray
264  __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
266 
267 
268  // Continue with JSObject being successfully allocated
269  // rbx: JSObject
270  __ jmp(&allocated);
271 
272  // Undo the setting of the new top so that the heap is verifiable. For
273  // example, the map's unused properties potentially do not match the
274  // allocated objects unused properties.
275  // rbx: JSObject (previous new top)
276  __ bind(&undo_allocation);
277  __ UndoAllocationInNewSpace(rbx);
278  }
279 
280  // Allocate the new receiver object using the runtime call.
281  // rdi: function (constructor)
282  __ bind(&rt_call);
283  // Must restore rdi (constructor) before calling runtime.
284  __ movq(rdi, Operand(rsp, 0));
285  __ push(rdi);
286  __ CallRuntime(Runtime::kNewObject, 1);
287  __ movq(rbx, rax); // store result in rbx
288 
289  // New object allocated.
290  // rbx: newly allocated object
291  __ bind(&allocated);
292  // Retrieve the function from the stack.
293  __ pop(rdi);
294 
295  // Retrieve smi-tagged arguments count from the stack.
296  __ movq(rax, Operand(rsp, 0));
297  __ SmiToInteger32(rax, rax);
298 
299  // Push the allocated receiver to the stack. We need two copies
300  // because we may have to return the original one and the calling
301  // conventions dictate that the called function pops the receiver.
302  __ push(rbx);
303  __ push(rbx);
304 
305  // Set up pointer to last argument.
307 
308  // Copy arguments and receiver to the expression stack.
309  Label loop, entry;
310  __ movq(rcx, rax);
311  __ jmp(&entry);
312  __ bind(&loop);
313  __ push(Operand(rbx, rcx, times_pointer_size, 0));
314  __ bind(&entry);
315  __ decq(rcx);
316  __ j(greater_equal, &loop);
317 
318  // Call the function.
319  if (is_api_function) {
321  Handle<Code> code =
322  masm->isolate()->builtins()->HandleApiCallConstruct();
323  ParameterCount expected(0);
324  __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
325  CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
326  } else {
327  ParameterCount actual(rax);
328  __ InvokeFunction(rdi, actual, CALL_FUNCTION,
329  NullCallWrapper(), CALL_AS_METHOD);
330  }
331 
332  // Store offset of return address for deoptimizer.
333  if (!is_api_function && !count_constructions) {
334  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
335  }
336 
337  // Restore context from the frame.
339 
340  // If the result is an object (in the ECMA sense), we should get rid
341  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
342  // on page 74.
343  Label use_receiver, exit;
344  // If the result is a smi, it is *not* an object in the ECMA sense.
345  __ JumpIfSmi(rax, &use_receiver);
346 
347  // If the type of the result (stored in its map) is less than
348  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
350  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
351  __ j(above_equal, &exit);
352 
353  // Throw away the result of the constructor invocation and use the
354  // on-stack receiver as the result.
355  __ bind(&use_receiver);
356  __ movq(rax, Operand(rsp, 0));
357 
358  // Restore the arguments count and leave the construct frame.
359  __ bind(&exit);
360  __ movq(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
361 
362  // Leave construct frame.
363  }
364 
365  // Remove caller arguments from the stack and return.
366  __ pop(rcx);
367  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
368  __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
369  __ push(rcx);
370  Counters* counters = masm->isolate()->counters();
371  __ IncrementCounter(counters->constructed_objects(), 1);
372  __ ret(0);
373 }
374 
375 
376 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
377  Generate_JSConstructStubHelper(masm, false, true);
378 }
379 
380 
381 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
382  Generate_JSConstructStubHelper(masm, false, false);
383 }
384 
385 
386 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
387  Generate_JSConstructStubHelper(masm, true, false);
388 }
389 
390 
391 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
392  bool is_construct) {
393  // Expects five C++ function parameters.
394  // - Address entry (ignored)
395  // - JSFunction* function (
396  // - Object* receiver
397  // - int argc
398  // - Object*** argv
399  // (see Handle::Invoke in execution.cc).
400 
401  // Open a C++ scope for the FrameScope.
402  {
403  // Platform specific argument handling. After this, the stack contains
404  // an internal frame and the pushed function and receiver, and
405  // register rax and rbx holds the argument count and argument array,
406  // while rdi holds the function pointer and rsi the context.
407 
408 #ifdef _WIN64
409  // MSVC parameters in:
410  // rcx : entry (ignored)
411  // rdx : function
412  // r8 : receiver
413  // r9 : argc
414  // [rsp+0x20] : argv
415 
416  // Clear the context before we push it when entering the internal frame.
417  __ Set(rsi, 0);
418  // Enter an internal frame.
419  FrameScope scope(masm, StackFrame::INTERNAL);
420 
421  // Load the function context into rsi.
423 
424  // Push the function and the receiver onto the stack.
425  __ push(rdx);
426  __ push(r8);
427 
428  // Load the number of arguments and setup pointer to the arguments.
429  __ movq(rax, r9);
430  // Load the previous frame pointer to access C argument on stack
431  __ movq(kScratchRegister, Operand(rbp, 0));
433  // Load the function pointer into rdi.
434  __ movq(rdi, rdx);
435 #else // _WIN64
436  // GCC parameters in:
437  // rdi : entry (ignored)
438  // rsi : function
439  // rdx : receiver
440  // rcx : argc
441  // r8 : argv
442 
443  __ movq(rdi, rsi);
444  // rdi : function
445 
446  // Clear the context before we push it when entering the internal frame.
447  __ Set(rsi, 0);
448  // Enter an internal frame.
449  FrameScope scope(masm, StackFrame::INTERNAL);
450 
451  // Push the function and receiver and setup the context.
452  __ push(rdi);
453  __ push(rdx);
455 
456  // Load the number of arguments and setup pointer to the arguments.
457  __ movq(rax, rcx);
458  __ movq(rbx, r8);
459 #endif // _WIN64
460 
461  // Current stack contents:
462  // [rsp + 2 * kPointerSize ... ]: Internal frame
463  // [rsp + kPointerSize] : function
464  // [rsp] : receiver
465  // Current register contents:
466  // rax : argc
467  // rbx : argv
468  // rsi : context
469  // rdi : function
470 
471  // Copy arguments to the stack in a loop.
472  // Register rbx points to array of pointers to handle locations.
473  // Push the values of these handles.
474  Label loop, entry;
475  __ Set(rcx, 0); // Set loop variable to 0.
476  __ jmp(&entry);
477  __ bind(&loop);
478  __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
479  __ push(Operand(kScratchRegister, 0)); // dereference handle
480  __ addq(rcx, Immediate(1));
481  __ bind(&entry);
482  __ cmpq(rcx, rax);
483  __ j(not_equal, &loop);
484 
485  // Invoke the code.
486  if (is_construct) {
487  // Expects rdi to hold function pointer.
488  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
489  __ CallStub(&stub);
490  } else {
491  ParameterCount actual(rax);
492  // Function must be in rdi.
493  __ InvokeFunction(rdi, actual, CALL_FUNCTION,
494  NullCallWrapper(), CALL_AS_METHOD);
495  }
496  // Exit the internal frame. Notice that this also removes the empty
497  // context and the function left on the stack by the code
498  // invocation.
499  }
500 
501  // TODO(X64): Is argument correct? Is there a receiver to remove?
502  __ ret(1 * kPointerSize); // Remove receiver.
503 }
504 
505 
506 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
507  Generate_JSEntryTrampolineHelper(masm, false);
508 }
509 
510 
511 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
512  Generate_JSEntryTrampolineHelper(masm, true);
513 }
514 
515 
516 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
517  // Enter an internal frame.
518  {
519  FrameScope scope(masm, StackFrame::INTERNAL);
520 
521  // Push a copy of the function onto the stack.
522  __ push(rdi);
523  // Push call kind information.
524  __ push(rcx);
525 
526  __ push(rdi); // Function is also the parameter to the runtime call.
527  __ CallRuntime(Runtime::kLazyCompile, 1);
528 
529  // Restore call kind information.
530  __ pop(rcx);
531  // Restore receiver.
532  __ pop(rdi);
533 
534  // Tear down internal frame.
535  }
536 
537  // Do a tail-call of the compiled function.
539  __ jmp(rax);
540 }
541 
542 
543 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
544  // Enter an internal frame.
545  {
546  FrameScope scope(masm, StackFrame::INTERNAL);
547 
548  // Push a copy of the function onto the stack.
549  __ push(rdi);
550  // Push call kind information.
551  __ push(rcx);
552 
553  __ push(rdi); // Function is also the parameter to the runtime call.
554  __ CallRuntime(Runtime::kLazyRecompile, 1);
555 
556  // Restore call kind information.
557  __ pop(rcx);
558  // Restore function.
559  __ pop(rdi);
560 
561  // Tear down internal frame.
562  }
563 
564  // Do a tail-call of the compiled function.
566  __ jmp(rax);
567 }
568 
569 
570 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
572  // Enter an internal frame.
573  {
574  FrameScope scope(masm, StackFrame::INTERNAL);
575 
576  // Pass the deoptimization type to the runtime system.
577  __ Push(Smi::FromInt(static_cast<int>(type)));
578 
579  __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
580  // Tear down internal frame.
581  }
582 
583  // Get the full codegen state from the stack and untag it.
584  __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
585 
586  // Switch on the state.
587  Label not_no_registers, not_tos_rax;
588  __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS));
589  __ j(not_equal, &not_no_registers, Label::kNear);
590  __ ret(1 * kPointerSize); // Remove state.
591 
592  __ bind(&not_no_registers);
593  __ movq(rax, Operand(rsp, 2 * kPointerSize));
594  __ cmpq(rcx, Immediate(FullCodeGenerator::TOS_REG));
595  __ j(not_equal, &not_tos_rax, Label::kNear);
596  __ ret(2 * kPointerSize); // Remove state, rax.
597 
598  __ bind(&not_tos_rax);
599  __ Abort("no cases left");
600 }
601 
602 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
603  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
604 }
605 
606 
607 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
608  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
609 }
610 
611 
612 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
613  // For now, we are relying on the fact that Runtime::NotifyOSR
614  // doesn't do any garbage collection which allows us to save/restore
615  // the registers without worrying about which of them contain
616  // pointers. This seems a bit fragile.
617  __ Pushad();
618  {
619  FrameScope scope(masm, StackFrame::INTERNAL);
620  __ CallRuntime(Runtime::kNotifyOSR, 0);
621  }
622  __ Popad();
623  __ ret(0);
624 }
625 
626 
627 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
628  // Stack Layout:
629  // rsp[0]: Return address
630  // rsp[1]: Argument n
631  // rsp[2]: Argument n-1
632  // ...
633  // rsp[n]: Argument 1
634  // rsp[n+1]: Receiver (function to call)
635  //
636  // rax contains the number of arguments, n, not counting the receiver.
637  //
638  // 1. Make sure we have at least one argument.
639  { Label done;
640  __ testq(rax, rax);
641  __ j(not_zero, &done);
642  __ pop(rbx);
643  __ Push(masm->isolate()->factory()->undefined_value());
644  __ push(rbx);
645  __ incq(rax);
646  __ bind(&done);
647  }
648 
649  // 2. Get the function to call (passed as receiver) from the stack, check
650  // if it is a function.
651  Label slow, non_function;
652  // The function to call is at position n+1 on the stack.
653  __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
654  __ JumpIfSmi(rdi, &non_function);
655  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
656  __ j(not_equal, &slow);
657 
658  // 3a. Patch the first argument if necessary when calling a function.
659  Label shift_arguments;
660  __ Set(rdx, 0); // indicate regular JS_FUNCTION
661  { Label convert_to_object, use_global_receiver, patch_receiver;
662  // Change context eagerly in case we need the global receiver.
664 
665  // Do not transform the receiver for strict mode functions.
669  __ j(not_equal, &shift_arguments);
670 
671  // Do not transform the receiver for natives.
672  // SharedFunctionInfo is already loaded into rbx.
675  __ j(not_zero, &shift_arguments);
676 
677  // Compute the receiver in non-strict mode.
678  __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
679  __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
680 
681  __ CompareRoot(rbx, Heap::kNullValueRootIndex);
682  __ j(equal, &use_global_receiver);
683  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
684  __ j(equal, &use_global_receiver);
685 
687  __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
688  __ j(above_equal, &shift_arguments);
689 
690  __ bind(&convert_to_object);
691  {
692  // Enter an internal frame in order to preserve argument count.
693  FrameScope scope(masm, StackFrame::INTERNAL);
694  __ Integer32ToSmi(rax, rax);
695  __ push(rax);
696 
697  __ push(rbx);
698  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
699  __ movq(rbx, rax);
700  __ Set(rdx, 0); // indicate regular JS_FUNCTION
701 
702  __ pop(rax);
703  __ SmiToInteger32(rax, rax);
704  }
705 
706  // Restore the function to rdi.
707  __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
708  __ jmp(&patch_receiver, Label::kNear);
709 
710  // Use the global receiver object from the called function as the
711  // receiver.
712  __ bind(&use_global_receiver);
713  const int kGlobalIndex =
715  __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
717  __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
719 
720  __ bind(&patch_receiver);
721  __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
722 
723  __ jmp(&shift_arguments);
724  }
725 
726  // 3b. Check for function proxy.
727  __ bind(&slow);
728  __ Set(rdx, 1); // indicate function proxy
729  __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
730  __ j(equal, &shift_arguments);
731  __ bind(&non_function);
732  __ Set(rdx, 2); // indicate non-function
733 
734  // 3c. Patch the first argument when calling a non-function. The
735  // CALL_NON_FUNCTION builtin expects the non-function callee as
736  // receiver, so overwrite the first argument which will ultimately
737  // become the receiver.
738  __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi);
739 
740  // 4. Shift arguments and return address one slot down on the stack
741  // (overwriting the original receiver). Adjust argument count to make
742  // the original first argument the new receiver.
743  __ bind(&shift_arguments);
744  { Label loop;
745  __ movq(rcx, rax);
746  __ bind(&loop);
747  __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
748  __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
749  __ decq(rcx);
750  __ j(not_sign, &loop); // While non-negative (to copy return address).
751  __ pop(rbx); // Discard copy of return address.
752  __ decq(rax); // One fewer argument (first argument is new receiver).
753  }
754 
755  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
756  // or a function proxy via CALL_FUNCTION_PROXY.
757  { Label function, non_proxy;
758  __ testq(rdx, rdx);
759  __ j(zero, &function);
760  __ Set(rbx, 0);
761  __ SetCallKind(rcx, CALL_AS_METHOD);
762  __ cmpq(rdx, Immediate(1));
763  __ j(not_equal, &non_proxy);
764 
765  __ pop(rdx); // return address
766  __ push(rdi); // re-add proxy object as additional argument
767  __ push(rdx);
768  __ incq(rax);
769  __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
770  __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
771  RelocInfo::CODE_TARGET);
772 
773  __ bind(&non_proxy);
774  __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
775  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
776  RelocInfo::CODE_TARGET);
777  __ bind(&function);
778  }
779 
780  // 5b. Get the code to call from the function and check that the number of
781  // expected arguments matches what we're providing. If so, jump
782  // (tail-call) to the code in register edx without checking arguments.
784  __ movsxlq(rbx,
788  __ SetCallKind(rcx, CALL_AS_METHOD);
789  __ cmpq(rax, rbx);
790  __ j(not_equal,
791  masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
792  RelocInfo::CODE_TARGET);
793 
794  ParameterCount expected(0);
795  __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION,
796  NullCallWrapper(), CALL_AS_METHOD);
797 }
798 
799 
800 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
801  // Stack at entry:
802  // rsp: return address
803  // rsp+8: arguments
804  // rsp+16: receiver ("this")
805  // rsp+24: function
806  {
807  FrameScope frame_scope(masm, StackFrame::INTERNAL);
808  // Stack frame:
809  // rbp: Old base pointer
810  // rbp[1]: return address
811  // rbp[2]: function arguments
812  // rbp[3]: receiver
813  // rbp[4]: function
814  static const int kArgumentsOffset = 2 * kPointerSize;
815  static const int kReceiverOffset = 3 * kPointerSize;
816  static const int kFunctionOffset = 4 * kPointerSize;
817 
818  __ push(Operand(rbp, kFunctionOffset));
819  __ push(Operand(rbp, kArgumentsOffset));
820  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
821 
822  // Check the stack for overflow. We are not trying to catch
823  // interruptions (e.g. debug break and preemption) here, so the "real stack
824  // limit" is checked.
825  Label okay;
826  __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
827  __ movq(rcx, rsp);
828  // Make rcx the space we have left. The stack might already be overflowed
829  // here which will cause rcx to become negative.
830  __ subq(rcx, kScratchRegister);
831  // Make rdx the space we need for the array when it is unrolled onto the
832  // stack.
833  __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
834  // Check if the arguments will overflow the stack.
835  __ cmpq(rcx, rdx);
836  __ j(greater, &okay); // Signed comparison.
837 
838  // Out of stack space.
839  __ push(Operand(rbp, kFunctionOffset));
840  __ push(rax);
841  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
842  __ bind(&okay);
843  // End of stack check.
844 
845  // Push current index and limit.
846  const int kLimitOffset =
848  const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
849  __ push(rax); // limit
850  __ push(Immediate(0)); // index
851 
852  // Get the receiver.
853  __ movq(rbx, Operand(rbp, kReceiverOffset));
854 
855  // Check that the function is a JS function (otherwise it must be a proxy).
856  Label push_receiver;
857  __ movq(rdi, Operand(rbp, kFunctionOffset));
858  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
859  __ j(not_equal, &push_receiver);
860 
861  // Change context eagerly to get the right global object if necessary.
863 
864  // Do not transform the receiver for strict mode functions.
865  Label call_to_object, use_global_receiver;
869  __ j(not_equal, &push_receiver);
870 
871  // Do not transform the receiver for natives.
874  __ j(not_equal, &push_receiver);
875 
876  // Compute the receiver in non-strict mode.
877  __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
878  __ CompareRoot(rbx, Heap::kNullValueRootIndex);
879  __ j(equal, &use_global_receiver);
880  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
881  __ j(equal, &use_global_receiver);
882 
883  // If given receiver is already a JavaScript object then there's no
884  // reason for converting it.
886  __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
887  __ j(above_equal, &push_receiver);
888 
889  // Convert the receiver to an object.
890  __ bind(&call_to_object);
891  __ push(rbx);
892  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
893  __ movq(rbx, rax);
894  __ jmp(&push_receiver, Label::kNear);
895 
896  // Use the current global receiver object as the receiver.
897  __ bind(&use_global_receiver);
898  const int kGlobalOffset =
900  __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
902  __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
904 
905  // Push the receiver.
906  __ bind(&push_receiver);
907  __ push(rbx);
908 
909  // Copy all arguments from the array to the stack.
910  Label entry, loop;
911  __ movq(rax, Operand(rbp, kIndexOffset));
912  __ jmp(&entry);
913  __ bind(&loop);
914  __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
915 
916  // Use inline caching to speed up access to arguments.
917  Handle<Code> ic =
918  masm->isolate()->builtins()->KeyedLoadIC_Initialize();
919  __ Call(ic, RelocInfo::CODE_TARGET);
920  // It is important that we do not have a test instruction after the
921  // call. A test instruction after the call is used to indicate that
922  // we have generated an inline version of the keyed load. In this
923  // case, we know that we are not generating a test instruction next.
924 
925  // Push the nth argument.
926  __ push(rax);
927 
928  // Update the index on the stack and in register rax.
929  __ movq(rax, Operand(rbp, kIndexOffset));
930  __ SmiAddConstant(rax, rax, Smi::FromInt(1));
931  __ movq(Operand(rbp, kIndexOffset), rax);
932 
933  __ bind(&entry);
934  __ cmpq(rax, Operand(rbp, kLimitOffset));
935  __ j(not_equal, &loop);
936 
937  // Invoke the function.
938  Label call_proxy;
939  ParameterCount actual(rax);
940  __ SmiToInteger32(rax, rax);
941  __ movq(rdi, Operand(rbp, kFunctionOffset));
942  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
943  __ j(not_equal, &call_proxy);
944  __ InvokeFunction(rdi, actual, CALL_FUNCTION,
945  NullCallWrapper(), CALL_AS_METHOD);
946 
947  frame_scope.GenerateLeaveFrame();
948  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
949 
950  // Invoke the function proxy.
951  __ bind(&call_proxy);
952  __ push(rdi); // add function proxy as last argument
953  __ incq(rax);
954  __ Set(rbx, 0);
955  __ SetCallKind(rcx, CALL_AS_METHOD);
956  __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
957  __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
958  RelocInfo::CODE_TARGET);
959 
960  // Leave internal frame.
961  }
962  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
963 }
964 
965 
966 // Allocate an empty JSArray. The allocated array is put into the result
967 // register. If the parameter initial_capacity is larger than zero an elements
968 // backing store is allocated with this size and filled with the hole values.
969 // Otherwise the elements backing store is set to the empty FixedArray.
970 static void AllocateEmptyJSArray(MacroAssembler* masm,
971  Register array_function,
972  Register result,
973  Register scratch1,
974  Register scratch2,
975  Register scratch3,
976  Label* gc_required) {
977  const int initial_capacity = JSArray::kPreallocatedArrayElements;
978  STATIC_ASSERT(initial_capacity >= 0);
979 
980  __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
981 
982  // Allocate the JSArray object together with space for a fixed array with the
983  // requested elements.
984  int size = JSArray::kSize;
985  if (initial_capacity > 0) {
986  size += FixedArray::SizeFor(initial_capacity);
987  }
988  __ AllocateInNewSpace(size,
989  result,
990  scratch2,
991  scratch3,
992  gc_required,
993  TAG_OBJECT);
994 
995  // Allocated the JSArray. Now initialize the fields except for the elements
996  // array.
997  // result: JSObject
998  // scratch1: initial map
999  // scratch2: start of next object
1000  Factory* factory = masm->isolate()->factory();
1001  __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
1003  factory->empty_fixed_array());
1004  // Field JSArray::kElementsOffset is initialized later.
1006 
1007  // If no storage is requested for the elements array just set the empty
1008  // fixed array.
1009  if (initial_capacity == 0) {
1011  factory->empty_fixed_array());
1012  return;
1013  }
1014 
1015  // Calculate the location of the elements array and set elements array member
1016  // of the JSArray.
1017  // result: JSObject
1018  // scratch2: start of next object
1019  __ lea(scratch1, Operand(result, JSArray::kSize));
1020  __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
1021 
1022  // Initialize the FixedArray and fill it with holes. FixedArray length is
1023  // stored as a smi.
1024  // result: JSObject
1025  // scratch1: elements array
1026  // scratch2: start of next object
1027  __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
1028  factory->fixed_array_map());
1029  __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
1030  Smi::FromInt(initial_capacity));
1031 
1032  // Fill the FixedArray with the hole value. Inline the code if short.
1033  // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
1034  static const int kLoopUnfoldLimit = 4;
1035  __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
1036  if (initial_capacity <= kLoopUnfoldLimit) {
1037  // Use a scratch register here to have only one reloc info when unfolding
1038  // the loop.
1039  for (int i = 0; i < initial_capacity; i++) {
1040  __ movq(FieldOperand(scratch1,
1041  FixedArray::kHeaderSize + i * kPointerSize),
1042  scratch3);
1043  }
1044  } else {
1045  Label loop, entry;
1046  __ movq(scratch2, Immediate(initial_capacity));
1047  __ jmp(&entry);
1048  __ bind(&loop);
1049  __ movq(FieldOperand(scratch1,
1050  scratch2,
1053  scratch3);
1054  __ bind(&entry);
1055  __ decq(scratch2);
1056  __ j(not_sign, &loop);
1057  }
1058 }
1059 
1060 
1061 // Allocate a JSArray with the number of elements stored in a register. The
1062 // register array_function holds the built-in Array function and the register
1063 // array_size holds the size of the array as a smi. The allocated array is put
1064 // into the result register and beginning and end of the FixedArray elements
1065 // storage is put into registers elements_array and elements_array_end (see
1066 // below for when that is not the case). If the parameter fill_with_holes is
1067 // true the allocated elements backing store is filled with the hole values
1068 // otherwise it is left uninitialized. When the backing store is filled the
1069 // register elements_array is scratched.
1070 static void AllocateJSArray(MacroAssembler* masm,
1071  Register array_function, // Array function.
1072  Register array_size, // As a smi, cannot be 0.
1073  Register result,
1074  Register elements_array,
1075  Register elements_array_end,
1076  Register scratch,
1077  bool fill_with_hole,
1078  Label* gc_required) {
1079  __ LoadInitialArrayMap(array_function, scratch,
1080  elements_array, fill_with_hole);
1081 
1082  if (FLAG_debug_code) { // Assert that array size is not zero.
1083  __ testq(array_size, array_size);
1084  __ Assert(not_zero, "array size is unexpectedly 0");
1085  }
1086 
1087  // Allocate the JSArray object together with space for a FixedArray with the
1088  // requested elements.
1089  SmiIndex index =
1090  masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
1091  __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
1092  index.scale,
1093  index.reg,
1094  result,
1095  elements_array_end,
1096  scratch,
1097  gc_required,
1098  TAG_OBJECT);
1099 
1100  // Allocated the JSArray. Now initialize the fields except for the elements
1101  // array.
1102  // result: JSObject
1103  // elements_array: initial map
1104  // elements_array_end: start of next object
1105  // array_size: size of array (smi)
1106  Factory* factory = masm->isolate()->factory();
1107  __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
1108  __ Move(elements_array, factory->empty_fixed_array());
1109  __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
1110  // Field JSArray::kElementsOffset is initialized later.
1111  __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
1112 
1113  // Calculate the location of the elements array and set elements array member
1114  // of the JSArray.
1115  // result: JSObject
1116  // elements_array_end: start of next object
1117  // array_size: size of array (smi)
1118  __ lea(elements_array, Operand(result, JSArray::kSize));
1119  __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
1120 
1121  // Initialize the fixed array. FixedArray length is stored as a smi.
1122  // result: JSObject
1123  // elements_array: elements array
1124  // elements_array_end: start of next object
1125  // array_size: size of array (smi)
1126  __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
1127  factory->fixed_array_map());
1128  // For non-empty JSArrays the length of the FixedArray and the JSArray is the
1129  // same.
1130  __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
1131 
1132  // Fill the allocated FixedArray with the hole value if requested.
1133  // result: JSObject
1134  // elements_array: elements array
1135  // elements_array_end: start of next object
1136  if (fill_with_hole) {
1137  Label loop, entry;
1138  __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
1139  __ lea(elements_array, Operand(elements_array,
1141  __ jmp(&entry);
1142  __ bind(&loop);
1143  __ movq(Operand(elements_array, 0), scratch);
1144  __ addq(elements_array, Immediate(kPointerSize));
1145  __ bind(&entry);
1146  __ cmpq(elements_array, elements_array_end);
1147  __ j(below, &loop);
1148  }
1149 }
1150 
1151 
1152 // Create a new array for the built-in Array function. This function allocates
1153 // the JSArray object and the FixedArray elements array and initializes these.
1154 // If the Array cannot be constructed in native code the runtime is called. This
1155 // function assumes the following state:
1156 // rdi: constructor (built-in Array function)
1157 // rax: argc
1158 // rsp[0]: return address
1159 // rsp[8]: last argument
1160 // This function is used for both construct and normal calls of Array. The only
1161 // difference between handling a construct call and a normal call is that for a
1162 // construct call the constructor function in rdi needs to be preserved for
1163 // entering the generic code. In both cases argc in rax needs to be preserved.
1164 // Both registers are preserved by this code so no need to differentiate between
1165 // a construct call and a normal call.
1166 static void ArrayNativeCode(MacroAssembler* masm,
1167  Label* call_generic_code) {
1168  Label argc_one_or_more, argc_two_or_more, empty_array, not_empty_array,
1169  has_non_smi_element, finish, cant_transition_map, not_double;
1170 
1171  // Check for array construction with zero arguments.
1172  __ testq(rax, rax);
1173  __ j(not_zero, &argc_one_or_more);
1174 
1175  __ bind(&empty_array);
1176  // Handle construction of an empty array.
1177  AllocateEmptyJSArray(masm,
1178  rdi,
1179  rbx,
1180  rcx,
1181  rdx,
1182  r8,
1183  call_generic_code);
1184  Counters* counters = masm->isolate()->counters();
1185  __ IncrementCounter(counters->array_function_native(), 1);
1186  __ movq(rax, rbx);
1187  __ ret(kPointerSize);
1188 
1189  // Check for one argument. Bail out if argument is not smi or if it is
1190  // negative.
1191  __ bind(&argc_one_or_more);
1192  __ cmpq(rax, Immediate(1));
1193  __ j(not_equal, &argc_two_or_more);
1194  __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
1195 
1196  __ SmiTest(rdx);
1197  __ j(not_zero, &not_empty_array);
1198  __ pop(r8); // Adjust stack.
1199  __ Drop(1);
1200  __ push(r8);
1201  __ movq(rax, Immediate(0)); // Treat this as a call with argc of zero.
1202  __ jmp(&empty_array);
1203 
1204  __ bind(&not_empty_array);
1205  __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
1206 
1207  // Handle construction of an empty array of a certain size. Bail out if size
1208  // is to large to actually allocate an elements array.
1210  __ j(greater_equal, call_generic_code);
1211 
1212  // rax: argc
1213  // rdx: array_size (smi)
1214  // rdi: constructor
1215  // esp[0]: return address
1216  // esp[8]: argument
1217  AllocateJSArray(masm,
1218  rdi,
1219  rdx,
1220  rbx,
1221  rcx,
1222  r8,
1223  r9,
1224  true,
1225  call_generic_code);
1226  __ IncrementCounter(counters->array_function_native(), 1);
1227  __ movq(rax, rbx);
1228  __ ret(2 * kPointerSize);
1229 
1230  // Handle construction of an array from a list of arguments.
1231  __ bind(&argc_two_or_more);
1232  __ movq(rdx, rax);
1233  __ Integer32ToSmi(rdx, rdx); // Convet argc to a smi.
1234  // rax: argc
1235  // rdx: array_size (smi)
1236  // rdi: constructor
1237  // esp[0] : return address
1238  // esp[8] : last argument
1239  AllocateJSArray(masm,
1240  rdi,
1241  rdx,
1242  rbx,
1243  rcx,
1244  r8,
1245  r9,
1246  false,
1247  call_generic_code);
1248  __ IncrementCounter(counters->array_function_native(), 1);
1249 
1250  // rax: argc
1251  // rbx: JSArray
1252  // rcx: elements_array
1253  // r8: elements_array_end (untagged)
1254  // esp[0]: return address
1255  // esp[8]: last argument
1256 
1257  // Location of the last argument
1258  __ lea(r9, Operand(rsp, kPointerSize));
1259 
1260  // Location of the first array element (Parameter fill_with_holes to
1261  // AllocateJSArrayis false, so the FixedArray is returned in rcx).
1262  __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
1263 
1264  // rax: argc
1265  // rbx: JSArray
1266  // rdx: location of the first array element
1267  // r9: location of the last argument
1268  // esp[0]: return address
1269  // esp[8]: last argument
1270  Label loop, entry;
1271  __ movq(rcx, rax);
1272  __ jmp(&entry);
1273  __ bind(&loop);
1274  __ movq(r8, Operand(r9, rcx, times_pointer_size, 0));
1275  if (FLAG_smi_only_arrays) {
1276  __ JumpIfNotSmi(r8, &has_non_smi_element);
1277  }
1278  __ movq(Operand(rdx, 0), r8);
1279  __ addq(rdx, Immediate(kPointerSize));
1280  __ bind(&entry);
1281  __ decq(rcx);
1282  __ j(greater_equal, &loop);
1283 
1284  // Remove caller arguments from the stack and return.
1285  // rax: argc
1286  // rbx: JSArray
1287  // esp[0]: return address
1288  // esp[8]: last argument
1289  __ bind(&finish);
1290  __ pop(rcx);
1291  __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
1292  __ push(rcx);
1293  __ movq(rax, rbx);
1294  __ ret(0);
1295 
1296  __ bind(&has_non_smi_element);
1297  // Double values are handled by the runtime.
1298  __ CheckMap(r8,
1299  masm->isolate()->factory()->heap_number_map(),
1300  &not_double,
1302  __ bind(&cant_transition_map);
1303  __ UndoAllocationInNewSpace(rbx);
1304  __ jmp(call_generic_code);
1305 
1306  __ bind(&not_double);
1307  // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
1308  // rbx: JSArray
1310  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1311  FAST_ELEMENTS,
1312  r11,
1314  &cant_transition_map);
1315 
1317  __ RecordWriteField(rbx, HeapObject::kMapOffset, r11, r8,
1319 
1320  // Finish the array initialization loop.
1321  Label loop2;
1322  __ bind(&loop2);
1323  __ movq(r8, Operand(r9, rcx, times_pointer_size, 0));
1324  __ movq(Operand(rdx, 0), r8);
1325  __ addq(rdx, Immediate(kPointerSize));
1326  __ decq(rcx);
1327  __ j(greater_equal, &loop2);
1328  __ jmp(&finish);
1329 }
1330 
1331 
1332 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1333  // ----------- S t a t e -------------
1334  // -- rax : argc
1335  // -- rsp[0] : return address
1336  // -- rsp[8] : last argument
1337  // -----------------------------------
1338  Label generic_array_code;
1339 
1340  // Get the InternalArray function.
1341  __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1342 
1343  if (FLAG_debug_code) {
1344  // Initial map for the builtin InternalArray functions should be maps.
1346  // Will both indicate a NULL and a Smi.
1347  STATIC_ASSERT(kSmiTag == 0);
1348  Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1349  __ Check(not_smi, "Unexpected initial map for InternalArray function");
1350  __ CmpObjectType(rbx, MAP_TYPE, rcx);
1351  __ Check(equal, "Unexpected initial map for InternalArray function");
1352  }
1353 
1354  // Run the native code for the InternalArray function called as a normal
1355  // function.
1356  ArrayNativeCode(masm, &generic_array_code);
1357 
1358  // Jump to the generic array code in case the specialized code cannot handle
1359  // the construction.
1360  __ bind(&generic_array_code);
1361  Handle<Code> array_code =
1362  masm->isolate()->builtins()->InternalArrayCodeGeneric();
1363  __ Jump(array_code, RelocInfo::CODE_TARGET);
1364 }
1365 
1366 
1367 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1368  // ----------- S t a t e -------------
1369  // -- rax : argc
1370  // -- rsp[0] : return address
1371  // -- rsp[8] : last argument
1372  // -----------------------------------
1373  Label generic_array_code;
1374 
1375  // Get the Array function.
1376  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1377 
1378  if (FLAG_debug_code) {
1379  // Initial map for the builtin Array functions should be maps.
1381  // Will both indicate a NULL and a Smi.
1382  STATIC_ASSERT(kSmiTag == 0);
1383  Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1384  __ Check(not_smi, "Unexpected initial map for Array function");
1385  __ CmpObjectType(rbx, MAP_TYPE, rcx);
1386  __ Check(equal, "Unexpected initial map for Array function");
1387  }
1388 
1389  // Run the native code for the Array function called as a normal function.
1390  ArrayNativeCode(masm, &generic_array_code);
1391 
1392  // Jump to the generic array code in case the specialized code cannot handle
1393  // the construction.
1394  __ bind(&generic_array_code);
1395  Handle<Code> array_code =
1396  masm->isolate()->builtins()->ArrayCodeGeneric();
1397  __ Jump(array_code, RelocInfo::CODE_TARGET);
1398 }
1399 
1400 
1401 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1402  // ----------- S t a t e -------------
1403  // -- rax : argc
1404  // -- rdi : constructor
1405  // -- rsp[0] : return address
1406  // -- rsp[8] : last argument
1407  // -----------------------------------
1408  Label generic_constructor;
1409 
1410  if (FLAG_debug_code) {
1411  // The array construct code is only set for the builtin and internal
1412  // Array functions which always have a map.
1413  // Initial map for the builtin Array function should be a map.
1415  // Will both indicate a NULL and a Smi.
1416  STATIC_ASSERT(kSmiTag == 0);
1417  Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1418  __ Check(not_smi, "Unexpected initial map for Array function");
1419  __ CmpObjectType(rbx, MAP_TYPE, rcx);
1420  __ Check(equal, "Unexpected initial map for Array function");
1421  }
1422 
1423  // Run the native code for the Array function called as constructor.
1424  ArrayNativeCode(masm, &generic_constructor);
1425 
1426  // Jump to the generic construct code in case the specialized code cannot
1427  // handle the construction.
1428  __ bind(&generic_constructor);
1429  Handle<Code> generic_construct_stub =
1430  masm->isolate()->builtins()->JSConstructStubGeneric();
1431  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1432 }
1433 
1434 
1435 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1436  // ----------- S t a t e -------------
1437  // -- rax : number of arguments
1438  // -- rdi : constructor function
1439  // -- rsp[0] : return address
1440  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1441  // -- rsp[(argc + 1) * 8] : receiver
1442  // -----------------------------------
1443  Counters* counters = masm->isolate()->counters();
1444  __ IncrementCounter(counters->string_ctor_calls(), 1);
1445 
1446  if (FLAG_debug_code) {
1447  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
1448  __ cmpq(rdi, rcx);
1449  __ Assert(equal, "Unexpected String function");
1450  }
1451 
1452  // Load the first argument into rax and get rid of the rest
1453  // (including the receiver).
1454  Label no_arguments;
1455  __ testq(rax, rax);
1456  __ j(zero, &no_arguments);
1457  __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
1458  __ pop(rcx);
1459  __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1460  __ push(rcx);
1461  __ movq(rax, rbx);
1462 
1463  // Lookup the argument in the number to string cache.
1464  Label not_cached, argument_is_string;
1466  masm,
1467  rax, // Input.
1468  rbx, // Result.
1469  rcx, // Scratch 1.
1470  rdx, // Scratch 2.
1471  false, // Input is known to be smi?
1472  &not_cached);
1473  __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1474  __ bind(&argument_is_string);
1475 
1476  // ----------- S t a t e -------------
1477  // -- rbx : argument converted to string
1478  // -- rdi : constructor function
1479  // -- rsp[0] : return address
1480  // -----------------------------------
1481 
1482  // Allocate a JSValue and put the tagged pointer into rax.
1483  Label gc_required;
1484  __ AllocateInNewSpace(JSValue::kSize,
1485  rax, // Result.
1486  rcx, // New allocation top (we ignore it).
1487  no_reg,
1488  &gc_required,
1489  TAG_OBJECT);
1490 
1491  // Set the map.
1492  __ LoadGlobalFunctionInitialMap(rdi, rcx);
1493  if (FLAG_debug_code) {
1495  Immediate(JSValue::kSize >> kPointerSizeLog2));
1496  __ Assert(equal, "Unexpected string wrapper instance size");
1497  __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1498  __ Assert(equal, "Unexpected unused properties of string wrapper");
1499  }
1501 
1502  // Set properties and elements.
1503  __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1506 
1507  // Set the value.
1509 
1510  // Ensure the object is fully initialized.
1511  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1512 
1513  // We're done. Return.
1514  __ ret(0);
1515 
1516  // The argument was not found in the number to string cache. Check
1517  // if it's a string already before calling the conversion builtin.
1518  Label convert_argument;
1519  __ bind(&not_cached);
1520  STATIC_ASSERT(kSmiTag == 0);
1521  __ JumpIfSmi(rax, &convert_argument);
1522  Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1523  __ j(NegateCondition(is_string), &convert_argument);
1524  __ movq(rbx, rax);
1525  __ IncrementCounter(counters->string_ctor_string_value(), 1);
1526  __ jmp(&argument_is_string);
1527 
1528  // Invoke the conversion builtin and put the result into rbx.
1529  __ bind(&convert_argument);
1530  __ IncrementCounter(counters->string_ctor_conversions(), 1);
1531  {
1532  FrameScope scope(masm, StackFrame::INTERNAL);
1533  __ push(rdi); // Preserve the function.
1534  __ push(rax);
1535  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1536  __ pop(rdi);
1537  }
1538  __ movq(rbx, rax);
1539  __ jmp(&argument_is_string);
1540 
1541  // Load the empty string into rbx, remove the receiver from the
1542  // stack, and jump back to the case where the argument is a string.
1543  __ bind(&no_arguments);
1544  __ LoadRoot(rbx, Heap::kEmptyStringRootIndex);
1545  __ pop(rcx);
1546  __ lea(rsp, Operand(rsp, kPointerSize));
1547  __ push(rcx);
1548  __ jmp(&argument_is_string);
1549 
1550  // At this point the argument is already a string. Call runtime to
1551  // create a string wrapper.
1552  __ bind(&gc_required);
1553  __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1554  {
1555  FrameScope scope(masm, StackFrame::INTERNAL);
1556  __ push(rbx);
1557  __ CallRuntime(Runtime::kNewStringWrapper, 1);
1558  }
1559  __ ret(0);
1560 }
1561 
1562 
1563 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1564  __ push(rbp);
1565  __ movq(rbp, rsp);
1566 
1567  // Store the arguments adaptor context sentinel.
1569 
1570  // Push the function on the stack.
1571  __ push(rdi);
1572 
1573  // Preserve the number of arguments on the stack. Must preserve rax,
1574  // rbx and rcx because these registers are used when copying the
1575  // arguments and the receiver.
1576  __ Integer32ToSmi(r8, rax);
1577  __ push(r8);
1578 }
1579 
1580 
1581 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1582  // Retrieve the number of arguments from the stack. Number is a Smi.
1584 
1585  // Leave the frame.
1586  __ movq(rsp, rbp);
1587  __ pop(rbp);
1588 
1589  // Remove caller arguments from the stack.
1590  __ pop(rcx);
1591  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1592  __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1593  __ push(rcx);
1594 }
1595 
1596 
1597 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1598  // ----------- S t a t e -------------
1599  // -- rax : actual number of arguments
1600  // -- rbx : expected number of arguments
1601  // -- rcx : call kind information
1602  // -- rdx : code entry to call
1603  // -----------------------------------
1604 
1605  Label invoke, dont_adapt_arguments;
1606  Counters* counters = masm->isolate()->counters();
1607  __ IncrementCounter(counters->arguments_adaptors(), 1);
1608 
1609  Label enough, too_few;
1610  __ cmpq(rax, rbx);
1611  __ j(less, &too_few);
1613  __ j(equal, &dont_adapt_arguments);
1614 
1615  { // Enough parameters: Actual >= expected.
1616  __ bind(&enough);
1617  EnterArgumentsAdaptorFrame(masm);
1618 
1619  // Copy receiver and all expected arguments.
1620  const int offset = StandardFrameConstants::kCallerSPOffset;
1621  __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
1622  __ Set(r8, -1); // account for receiver
1623 
1624  Label copy;
1625  __ bind(&copy);
1626  __ incq(r8);
1627  __ push(Operand(rax, 0));
1628  __ subq(rax, Immediate(kPointerSize));
1629  __ cmpq(r8, rbx);
1630  __ j(less, &copy);
1631  __ jmp(&invoke);
1632  }
1633 
1634  { // Too few parameters: Actual < expected.
1635  __ bind(&too_few);
1636  EnterArgumentsAdaptorFrame(masm);
1637 
1638  // Copy receiver and all actual arguments.
1639  const int offset = StandardFrameConstants::kCallerSPOffset;
1640  __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
1641  __ Set(r8, -1); // account for receiver
1642 
1643  Label copy;
1644  __ bind(&copy);
1645  __ incq(r8);
1646  __ push(Operand(rdi, 0));
1647  __ subq(rdi, Immediate(kPointerSize));
1648  __ cmpq(r8, rax);
1649  __ j(less, &copy);
1650 
1651  // Fill remaining expected arguments with undefined values.
1652  Label fill;
1653  __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1654  __ bind(&fill);
1655  __ incq(r8);
1656  __ push(kScratchRegister);
1657  __ cmpq(r8, rbx);
1658  __ j(less, &fill);
1659 
1660  // Restore function pointer.
1662  }
1663 
1664  // Call the entry point.
1665  __ bind(&invoke);
1666  __ call(rdx);
1667 
1668  // Store offset of return address for deoptimizer.
1669  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1670 
1671  // Leave frame and return.
1672  LeaveArgumentsAdaptorFrame(masm);
1673  __ ret(0);
1674 
1675  // -------------------------------------------
1676  // Dont adapt arguments.
1677  // -------------------------------------------
1678  __ bind(&dont_adapt_arguments);
1679  __ jmp(rdx);
1680 }
1681 
1682 
1683 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1684  // Get the loop depth of the stack guard check. This is recorded in
1685  // a test(rax, depth) instruction right after the call.
1686  Label stack_check;
1687  __ movq(rbx, Operand(rsp, 0)); // return address
1688  __ movzxbq(rbx, Operand(rbx, 1)); // depth
1689 
1690  // Get the loop nesting level at which we allow OSR from the
1691  // unoptimized code and check if we want to do OSR yet. If not we
1692  // should perform a stack guard check so we can get interrupts while
1693  // waiting for on-stack replacement.
1698  __ j(greater, &stack_check);
1699 
1700  // Pass the function to optimize as the argument to the on-stack
1701  // replacement runtime function.
1702  {
1703  FrameScope scope(masm, StackFrame::INTERNAL);
1704  __ push(rax);
1705  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1706  }
1707 
1708  // If the result was -1 it means that we couldn't optimize the
1709  // function. Just return and continue in the unoptimized version.
1710  Label skip;
1711  __ SmiCompare(rax, Smi::FromInt(-1));
1712  __ j(not_equal, &skip, Label::kNear);
1713  __ ret(0);
1714 
1715  // If we decide not to perform on-stack replacement we perform a
1716  // stack guard check to enable interrupts.
1717  __ bind(&stack_check);
1718  Label ok;
1719  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1720  __ j(above_equal, &ok, Label::kNear);
1721 
1722  StackCheckStub stub;
1723  __ TailCallStub(&stub);
1724  if (FLAG_debug_code) {
1725  __ Abort("Unreachable code: returned from tail call.");
1726  }
1727  __ bind(&ok);
1728  __ ret(0);
1729 
1730  __ bind(&skip);
1731  // Untag the AST id and push it on the stack.
1732  __ SmiToInteger32(rax, rax);
1733  __ push(rax);
1734 
1735  // Generate the code for doing the frame-to-frame translation using
1736  // the deoptimizer infrastructure.
1737  Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1738  generator.Generate();
1739 }
1740 
1741 
1742 #undef __
1743 
1744 } } // namespace v8::internal
1745 
1746 #endif // V8_TARGET_ARCH_X64
const Register rdx
static const int kCodeOffset
Definition: objects.h:5606
static const int kCodeEntryOffset
Definition: objects.h:5981
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:5982
const Register r11
static const int kAllowOSRAtLoopNestingLevelOffset
Definition: objects.h:4538
static Smi * FromInt(int value)
Definition: objects-inl.h:973
const Register rbp
static const int kGlobalReceiverOffset
Definition: objects.h:6085
static const int kConstructionCountOffset
Definition: objects.h:5697
const Register rsi
static const int kNativeByteOffset
Definition: objects.h:5778
static const int kStrictModeBitWithinByte
Definition: objects.h:5764
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:246
static const int kInstanceSizeOffset
Definition: objects.h:4981
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:4993
static const int kGlobalContextOffset
Definition: objects.h:6084
static const int kContextOffset
Definition: objects.h:5986
static const int kSize
Definition: objects.h:8112
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
BuiltinExtraArguments
Definition: builtins.h:35
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:5601
const Register r9
const int kPointerSize
Definition: globals.h:234
Operand FieldOperand(Register object, int offset)
const int kHeapObjectTag
Definition: v8.h:3848
const Register rbx
const Register rsp
#define __
static const int kPropertiesOffset
Definition: objects.h:2113
const Register rax
static const int kInObjectPropertiesOffset
Definition: objects.h:4983
const Register rdi
static const int kElementsOffset
Definition: objects.h:2114
static const int kNativeBitWithinByte
Definition: objects.h:5770
static const int kLengthOffset
Definition: objects.h:8111
static int SizeFor(int length)
Definition: objects.h:2288
static const int kHeaderSize
Definition: objects.h:2233
static const int kSize
Definition: objects.h:6189
static const int kMapOffset
Definition: objects.h:1219
static const int kLengthOffset
Definition: objects.h:2232
const Register kScratchRegister
static const int kFormalParameterCountOffset
Definition: objects.h:5662
static const int kStrictModeByteOffset
Definition: objects.h:5774
static const int kHeaderSize
Definition: objects.h:4513
const Register r8
const Register rcx
Condition NegateCondition(Condition cond)
const int kSmiTag
Definition: v8.h:3853
static const int kArgvOffset
Definition: frames-ia32.h:82
static const int kHeaderSize
Definition: objects.h:2115
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kPreallocatedArrayElements
Definition: objects.h:8108
const Register no_reg
static const int kValueOffset
Definition: objects.h:6188
static const int kSharedFunctionInfoOffset
Definition: objects.h:5984
static const int kInitialMaxFastElementArray
Definition: objects.h:2103
FlagType type() const
Definition: flags.cc:1358
static const int kPreAllocatedPropertyFieldsOffset
Definition: objects.h:4986