v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
builtins-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "codegen.h"
33 #include "deoptimizer.h"
34 #include "full-codegen.h"
35 
36 namespace v8 {
37 namespace internal {
38 
39 
40 #define __ ACCESS_MASM(masm)
41 
42 
43 void Builtins::Generate_Adaptor(MacroAssembler* masm,
44  CFunctionId id,
45  BuiltinExtraArguments extra_args) {
46  // ----------- S t a t e -------------
47  // -- rax : number of arguments excluding receiver
48  // -- rdi : called function (only guaranteed when
49  // extra_args requires it)
50  // -- rsi : context
51  // -- rsp[0] : return address
52  // -- rsp[8] : last argument
53  // -- ...
54  // -- rsp[8 * argc] : first argument (argc == rax)
55  // -- rsp[8 * (argc +1)] : receiver
56  // -----------------------------------
57 
58  // Insert extra arguments.
59  int num_extra_args = 0;
60  if (extra_args == NEEDS_CALLED_FUNCTION) {
61  num_extra_args = 1;
62  __ pop(kScratchRegister); // Save return address.
63  __ push(rdi);
64  __ push(kScratchRegister); // Restore return address.
65  } else {
66  ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
67  }
68 
69  // JumpToExternalReference expects rax to contain the number of arguments
70  // including the receiver and the extra arguments.
71  __ addq(rax, Immediate(num_extra_args + 1));
72  __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
73 }
74 
75 
76 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
77  __ movq(kScratchRegister,
79  __ movq(kScratchRegister,
82  __ jmp(kScratchRegister);
83 }
84 
85 
86 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
87  GenerateTailCallToSharedCode(masm);
88 }
89 
90 
91 void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) {
92  {
93  FrameScope scope(masm, StackFrame::INTERNAL);
94 
95  // Push a copy of the function onto the stack.
96  __ push(rdi);
97  // Push call kind information.
98  __ push(rcx);
99 
100  __ push(rdi); // Function is also the parameter to the runtime call.
101  __ CallRuntime(Runtime::kParallelRecompile, 1);
102 
103  // Restore call kind information.
104  __ pop(rcx);
105  // Restore receiver.
106  __ pop(rdi);
107 
108  // Tear down internal frame.
109  }
110 
111  GenerateTailCallToSharedCode(masm);
112 }
113 
114 
115 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
116  bool is_api_function,
117  bool count_constructions) {
118  // ----------- S t a t e -------------
119  // -- rax: number of arguments
120  // -- rdi: constructor function
121  // -----------------------------------
122 
123  // Should never count constructions for api objects.
124  ASSERT(!is_api_function || !count_constructions);
125 
126  // Enter a construct frame.
127  {
128  FrameScope scope(masm, StackFrame::CONSTRUCT);
129 
130  // Store a smi-tagged arguments count on the stack.
131  __ Integer32ToSmi(rax, rax);
132  __ push(rax);
133 
134  // Push the function to invoke on the stack.
135  __ push(rdi);
136 
137  // Try to allocate the object without transitioning into C code. If any of
138  // the preconditions is not met, the code bails out to the runtime call.
139  Label rt_call, allocated;
140  if (FLAG_inline_new) {
141  Label undo_allocation;
142 
143 #ifdef ENABLE_DEBUGGER_SUPPORT
144  ExternalReference debug_step_in_fp =
145  ExternalReference::debug_step_in_fp_address(masm->isolate());
146  __ movq(kScratchRegister, debug_step_in_fp);
147  __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
148  __ j(not_equal, &rt_call);
149 #endif
150 
151  // Verified that the constructor is a JSFunction.
152  // Load the initial map and verify that it is in fact a map.
153  // rdi: constructor
155  // Will both indicate a NULL and a Smi
156  ASSERT(kSmiTag == 0);
157  __ JumpIfSmi(rax, &rt_call);
158  // rdi: constructor
159  // rax: initial map (if proven valid below)
160  __ CmpObjectType(rax, MAP_TYPE, rbx);
161  __ j(not_equal, &rt_call);
162 
163  // Check that the constructor is not constructing a JSFunction (see
164  // comments in Runtime_NewObject in runtime.cc). In which case the
165  // initial map's instance type would be JS_FUNCTION_TYPE.
166  // rdi: constructor
167  // rax: initial map
168  __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
169  __ j(equal, &rt_call);
170 
171  if (count_constructions) {
172  Label allocate;
173  // Decrease generous allocation count.
175  __ decb(FieldOperand(rcx,
177  __ j(not_zero, &allocate);
178 
179  __ push(rax);
180  __ push(rdi);
181 
182  __ push(rdi); // constructor
183  // The call will replace the stub, so the countdown is only done once.
184  __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
185 
186  __ pop(rdi);
187  __ pop(rax);
188 
189  __ bind(&allocate);
190  }
191 
192  // Now allocate the JSObject on the heap.
194  __ shl(rdi, Immediate(kPointerSizeLog2));
195  // rdi: size of new object
196  __ AllocateInNewSpace(rdi,
197  rbx,
198  rdi,
199  no_reg,
200  &rt_call,
202  // Allocated the JSObject, now initialize the fields.
203  // rax: initial map
204  // rbx: JSObject (not HeapObject tagged - the actual address).
205  // rdi: start of next object
206  __ movq(Operand(rbx, JSObject::kMapOffset), rax);
207  __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
208  __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
209  __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
210  // Set extra fields in the newly allocated object.
211  // rax: initial map
212  // rbx: JSObject
213  // rdi: start of next object
214  __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
215  __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
216  if (count_constructions) {
217  __ movzxbq(rsi,
219  __ lea(rsi,
221  // rsi: offset of first field after pre-allocated fields
222  if (FLAG_debug_code) {
223  __ cmpq(rsi, rdi);
224  __ Assert(less_equal,
225  "Unexpected number of pre-allocated property fields.");
226  }
227  __ InitializeFieldsWithFiller(rcx, rsi, rdx);
228  __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
229  }
230  __ InitializeFieldsWithFiller(rcx, rdi, rdx);
231 
232  // Add the object tag to make the JSObject real, so that we can continue
233  // and jump into the continuation code at any time from now on. Any
234  // failures need to undo the allocation, so that the heap is in a
235  // consistent state and verifiable.
236  // rax: initial map
237  // rbx: JSObject
238  // rdi: start of next object
239  __ or_(rbx, Immediate(kHeapObjectTag));
240 
241  // Check if a non-empty properties array is needed.
242  // Allocate and initialize a FixedArray if it is.
243  // rax: initial map
244  // rbx: JSObject
245  // rdi: start of next object
246  // Calculate total properties described map.
248  __ movzxbq(rcx,
250  __ addq(rdx, rcx);
251  // Calculate unused properties past the end of the in-object properties.
253  __ subq(rdx, rcx);
254  // Done if no extra properties are to be allocated.
255  __ j(zero, &allocated);
256  __ Assert(positive, "Property allocation count failed.");
257 
258  // Scale the number of elements by pointer size and add the header for
259  // FixedArrays to the start of the next object calculation from above.
260  // rbx: JSObject
261  // rdi: start of next object (will be start of FixedArray)
262  // rdx: number of elements in properties array
263  __ AllocateInNewSpace(FixedArray::kHeaderSize,
265  rdx,
266  rdi,
267  rax,
268  no_reg,
269  &undo_allocation,
271 
272  // Initialize the FixedArray.
273  // rbx: JSObject
274  // rdi: FixedArray
275  // rdx: number of elements
276  // rax: start of next object
277  __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
278  __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
279  __ Integer32ToSmi(rdx, rdx);
280  __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
281 
282  // Initialize the fields to undefined.
283  // rbx: JSObject
284  // rdi: FixedArray
285  // rax: start of next object
286  // rdx: number of elements
287  { Label loop, entry;
288  __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
289  __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
290  __ jmp(&entry);
291  __ bind(&loop);
292  __ movq(Operand(rcx, 0), rdx);
293  __ addq(rcx, Immediate(kPointerSize));
294  __ bind(&entry);
295  __ cmpq(rcx, rax);
296  __ j(below, &loop);
297  }
298 
299  // Store the initialized FixedArray into the properties field of
300  // the JSObject
301  // rbx: JSObject
302  // rdi: FixedArray
303  __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
305 
306 
307  // Continue with JSObject being successfully allocated
308  // rbx: JSObject
309  __ jmp(&allocated);
310 
311  // Undo the setting of the new top so that the heap is verifiable. For
312  // example, the map's unused properties potentially do not match the
313  // allocated objects unused properties.
314  // rbx: JSObject (previous new top)
315  __ bind(&undo_allocation);
316  __ UndoAllocationInNewSpace(rbx);
317  }
318 
319  // Allocate the new receiver object using the runtime call.
320  // rdi: function (constructor)
321  __ bind(&rt_call);
322  // Must restore rdi (constructor) before calling runtime.
323  __ movq(rdi, Operand(rsp, 0));
324  __ push(rdi);
325  __ CallRuntime(Runtime::kNewObject, 1);
326  __ movq(rbx, rax); // store result in rbx
327 
328  // New object allocated.
329  // rbx: newly allocated object
330  __ bind(&allocated);
331  // Retrieve the function from the stack.
332  __ pop(rdi);
333 
334  // Retrieve smi-tagged arguments count from the stack.
335  __ movq(rax, Operand(rsp, 0));
336  __ SmiToInteger32(rax, rax);
337 
338  // Push the allocated receiver to the stack. We need two copies
339  // because we may have to return the original one and the calling
340  // conventions dictate that the called function pops the receiver.
341  __ push(rbx);
342  __ push(rbx);
343 
344  // Set up pointer to last argument.
346 
347  // Copy arguments and receiver to the expression stack.
348  Label loop, entry;
349  __ movq(rcx, rax);
350  __ jmp(&entry);
351  __ bind(&loop);
352  __ push(Operand(rbx, rcx, times_pointer_size, 0));
353  __ bind(&entry);
354  __ decq(rcx);
355  __ j(greater_equal, &loop);
356 
357  // Call the function.
358  if (is_api_function) {
360  Handle<Code> code =
361  masm->isolate()->builtins()->HandleApiCallConstruct();
362  ParameterCount expected(0);
363  __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
364  CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
365  } else {
366  ParameterCount actual(rax);
367  __ InvokeFunction(rdi, actual, CALL_FUNCTION,
368  NullCallWrapper(), CALL_AS_METHOD);
369  }
370 
371  // Store offset of return address for deoptimizer.
372  if (!is_api_function && !count_constructions) {
373  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
374  }
375 
376  // Restore context from the frame.
378 
379  // If the result is an object (in the ECMA sense), we should get rid
380  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
381  // on page 74.
382  Label use_receiver, exit;
383  // If the result is a smi, it is *not* an object in the ECMA sense.
384  __ JumpIfSmi(rax, &use_receiver);
385 
386  // If the type of the result (stored in its map) is less than
387  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
389  __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
390  __ j(above_equal, &exit);
391 
392  // Throw away the result of the constructor invocation and use the
393  // on-stack receiver as the result.
394  __ bind(&use_receiver);
395  __ movq(rax, Operand(rsp, 0));
396 
397  // Restore the arguments count and leave the construct frame.
398  __ bind(&exit);
399  __ movq(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
400 
401  // Leave construct frame.
402  }
403 
404  // Remove caller arguments from the stack and return.
405  __ pop(rcx);
406  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
407  __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
408  __ push(rcx);
409  Counters* counters = masm->isolate()->counters();
410  __ IncrementCounter(counters->constructed_objects(), 1);
411  __ ret(0);
412 }
413 
414 
415 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
416  Generate_JSConstructStubHelper(masm, false, true);
417 }
418 
419 
420 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
421  Generate_JSConstructStubHelper(masm, false, false);
422 }
423 
424 
425 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
426  Generate_JSConstructStubHelper(masm, true, false);
427 }
428 
429 
430 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
431  bool is_construct) {
432  // Expects five C++ function parameters.
433  // - Address entry (ignored)
434  // - JSFunction* function (
435  // - Object* receiver
436  // - int argc
437  // - Object*** argv
438  // (see Handle::Invoke in execution.cc).
439 
440  // Open a C++ scope for the FrameScope.
441  {
442  // Platform specific argument handling. After this, the stack contains
443  // an internal frame and the pushed function and receiver, and
444  // register rax and rbx holds the argument count and argument array,
445  // while rdi holds the function pointer and rsi the context.
446 
447 #ifdef _WIN64
448  // MSVC parameters in:
449  // rcx : entry (ignored)
450  // rdx : function
451  // r8 : receiver
452  // r9 : argc
453  // [rsp+0x20] : argv
454 
455  // Clear the context before we push it when entering the internal frame.
456  __ Set(rsi, 0);
457  // Enter an internal frame.
458  FrameScope scope(masm, StackFrame::INTERNAL);
459 
460  // Load the function context into rsi.
462 
463  // Push the function and the receiver onto the stack.
464  __ push(rdx);
465  __ push(r8);
466 
467  // Load the number of arguments and setup pointer to the arguments.
468  __ movq(rax, r9);
469  // Load the previous frame pointer to access C argument on stack
470  __ movq(kScratchRegister, Operand(rbp, 0));
472  // Load the function pointer into rdi.
473  __ movq(rdi, rdx);
474 #else // _WIN64
475  // GCC parameters in:
476  // rdi : entry (ignored)
477  // rsi : function
478  // rdx : receiver
479  // rcx : argc
480  // r8 : argv
481 
482  __ movq(rdi, rsi);
483  // rdi : function
484 
485  // Clear the context before we push it when entering the internal frame.
486  __ Set(rsi, 0);
487  // Enter an internal frame.
488  FrameScope scope(masm, StackFrame::INTERNAL);
489 
490  // Push the function and receiver and setup the context.
491  __ push(rdi);
492  __ push(rdx);
494 
495  // Load the number of arguments and setup pointer to the arguments.
496  __ movq(rax, rcx);
497  __ movq(rbx, r8);
498 #endif // _WIN64
499 
500  // Current stack contents:
501  // [rsp + 2 * kPointerSize ... ]: Internal frame
502  // [rsp + kPointerSize] : function
503  // [rsp] : receiver
504  // Current register contents:
505  // rax : argc
506  // rbx : argv
507  // rsi : context
508  // rdi : function
509 
510  // Copy arguments to the stack in a loop.
511  // Register rbx points to array of pointers to handle locations.
512  // Push the values of these handles.
513  Label loop, entry;
514  __ Set(rcx, 0); // Set loop variable to 0.
515  __ jmp(&entry);
516  __ bind(&loop);
517  __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
518  __ push(Operand(kScratchRegister, 0)); // dereference handle
519  __ addq(rcx, Immediate(1));
520  __ bind(&entry);
521  __ cmpq(rcx, rax);
522  __ j(not_equal, &loop);
523 
524  // Invoke the code.
525  if (is_construct) {
526  // Expects rdi to hold function pointer.
527  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
528  __ CallStub(&stub);
529  } else {
530  ParameterCount actual(rax);
531  // Function must be in rdi.
532  __ InvokeFunction(rdi, actual, CALL_FUNCTION,
533  NullCallWrapper(), CALL_AS_METHOD);
534  }
535  // Exit the internal frame. Notice that this also removes the empty
536  // context and the function left on the stack by the code
537  // invocation.
538  }
539 
540  // TODO(X64): Is argument correct? Is there a receiver to remove?
541  __ ret(1 * kPointerSize); // Remove receiver.
542 }
543 
544 
545 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
546  Generate_JSEntryTrampolineHelper(masm, false);
547 }
548 
549 
550 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
551  Generate_JSEntryTrampolineHelper(masm, true);
552 }
553 
554 
555 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
556  // Enter an internal frame.
557  {
558  FrameScope scope(masm, StackFrame::INTERNAL);
559 
560  // Push a copy of the function onto the stack.
561  __ push(rdi);
562  // Push call kind information.
563  __ push(rcx);
564 
565  __ push(rdi); // Function is also the parameter to the runtime call.
566  __ CallRuntime(Runtime::kLazyCompile, 1);
567 
568  // Restore call kind information.
569  __ pop(rcx);
570  // Restore receiver.
571  __ pop(rdi);
572 
573  // Tear down internal frame.
574  }
575 
576  // Do a tail-call of the compiled function.
578  __ jmp(rax);
579 }
580 
581 
582 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
583  // Enter an internal frame.
584  {
585  FrameScope scope(masm, StackFrame::INTERNAL);
586 
587  // Push a copy of the function onto the stack.
588  __ push(rdi);
589  // Push call kind information.
590  __ push(rcx);
591 
592  __ push(rdi); // Function is also the parameter to the runtime call.
593  __ CallRuntime(Runtime::kLazyRecompile, 1);
594 
595  // Restore call kind information.
596  __ pop(rcx);
597  // Restore function.
598  __ pop(rdi);
599 
600  // Tear down internal frame.
601  }
602 
603  // Do a tail-call of the compiled function.
605  __ jmp(rax);
606 }
607 
608 
609 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
611  // Enter an internal frame.
612  {
613  FrameScope scope(masm, StackFrame::INTERNAL);
614 
615  // Pass the deoptimization type to the runtime system.
616  __ Push(Smi::FromInt(static_cast<int>(type)));
617 
618  __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
619  // Tear down internal frame.
620  }
621 
622  // Get the full codegen state from the stack and untag it.
623  __ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
624 
625  // Switch on the state.
626  Label not_no_registers, not_tos_rax;
627  __ cmpq(rcx, Immediate(FullCodeGenerator::NO_REGISTERS));
628  __ j(not_equal, &not_no_registers, Label::kNear);
629  __ ret(1 * kPointerSize); // Remove state.
630 
631  __ bind(&not_no_registers);
632  __ movq(rax, Operand(rsp, 2 * kPointerSize));
633  __ cmpq(rcx, Immediate(FullCodeGenerator::TOS_REG));
634  __ j(not_equal, &not_tos_rax, Label::kNear);
635  __ ret(2 * kPointerSize); // Remove state, rax.
636 
637  __ bind(&not_tos_rax);
638  __ Abort("no cases left");
639 }
640 
641 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
642  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
643 }
644 
645 
646 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
647  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
648 }
649 
650 
651 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
652  // For now, we are relying on the fact that Runtime::NotifyOSR
653  // doesn't do any garbage collection which allows us to save/restore
654  // the registers without worrying about which of them contain
655  // pointers. This seems a bit fragile.
656  __ Pushad();
657  {
658  FrameScope scope(masm, StackFrame::INTERNAL);
659  __ CallRuntime(Runtime::kNotifyOSR, 0);
660  }
661  __ Popad();
662  __ ret(0);
663 }
664 
665 
666 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
667  // Stack Layout:
668  // rsp[0]: Return address
669  // rsp[1]: Argument n
670  // rsp[2]: Argument n-1
671  // ...
672  // rsp[n]: Argument 1
673  // rsp[n+1]: Receiver (function to call)
674  //
675  // rax contains the number of arguments, n, not counting the receiver.
676  //
677  // 1. Make sure we have at least one argument.
678  { Label done;
679  __ testq(rax, rax);
680  __ j(not_zero, &done);
681  __ pop(rbx);
682  __ Push(masm->isolate()->factory()->undefined_value());
683  __ push(rbx);
684  __ incq(rax);
685  __ bind(&done);
686  }
687 
688  // 2. Get the function to call (passed as receiver) from the stack, check
689  // if it is a function.
690  Label slow, non_function;
691  // The function to call is at position n+1 on the stack.
692  __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
693  __ JumpIfSmi(rdi, &non_function);
694  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
695  __ j(not_equal, &slow);
696 
697  // 3a. Patch the first argument if necessary when calling a function.
698  Label shift_arguments;
699  __ Set(rdx, 0); // indicate regular JS_FUNCTION
700  { Label convert_to_object, use_global_receiver, patch_receiver;
701  // Change context eagerly in case we need the global receiver.
703 
704  // Do not transform the receiver for strict mode functions.
708  __ j(not_equal, &shift_arguments);
709 
710  // Do not transform the receiver for natives.
711  // SharedFunctionInfo is already loaded into rbx.
714  __ j(not_zero, &shift_arguments);
715 
716  // Compute the receiver in non-strict mode.
717  __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
718  __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
719 
720  __ CompareRoot(rbx, Heap::kNullValueRootIndex);
721  __ j(equal, &use_global_receiver);
722  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
723  __ j(equal, &use_global_receiver);
724 
726  __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
727  __ j(above_equal, &shift_arguments);
728 
729  __ bind(&convert_to_object);
730  {
731  // Enter an internal frame in order to preserve argument count.
732  FrameScope scope(masm, StackFrame::INTERNAL);
733  __ Integer32ToSmi(rax, rax);
734  __ push(rax);
735 
736  __ push(rbx);
737  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
738  __ movq(rbx, rax);
739  __ Set(rdx, 0); // indicate regular JS_FUNCTION
740 
741  __ pop(rax);
742  __ SmiToInteger32(rax, rax);
743  }
744 
745  // Restore the function to rdi.
746  __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
747  __ jmp(&patch_receiver, Label::kNear);
748 
749  // Use the global receiver object from the called function as the
750  // receiver.
751  __ bind(&use_global_receiver);
752  const int kGlobalIndex =
754  __ movq(rbx, FieldOperand(rsi, kGlobalIndex));
756  __ movq(rbx, FieldOperand(rbx, kGlobalIndex));
758 
759  __ bind(&patch_receiver);
760  __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx);
761 
762  __ jmp(&shift_arguments);
763  }
764 
765  // 3b. Check for function proxy.
766  __ bind(&slow);
767  __ Set(rdx, 1); // indicate function proxy
768  __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
769  __ j(equal, &shift_arguments);
770  __ bind(&non_function);
771  __ Set(rdx, 2); // indicate non-function
772 
773  // 3c. Patch the first argument when calling a non-function. The
774  // CALL_NON_FUNCTION builtin expects the non-function callee as
775  // receiver, so overwrite the first argument which will ultimately
776  // become the receiver.
777  __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi);
778 
779  // 4. Shift arguments and return address one slot down on the stack
780  // (overwriting the original receiver). Adjust argument count to make
781  // the original first argument the new receiver.
782  __ bind(&shift_arguments);
783  { Label loop;
784  __ movq(rcx, rax);
785  __ bind(&loop);
786  __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0));
787  __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
788  __ decq(rcx);
789  __ j(not_sign, &loop); // While non-negative (to copy return address).
790  __ pop(rbx); // Discard copy of return address.
791  __ decq(rax); // One fewer argument (first argument is new receiver).
792  }
793 
794  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
795  // or a function proxy via CALL_FUNCTION_PROXY.
796  { Label function, non_proxy;
797  __ testq(rdx, rdx);
798  __ j(zero, &function);
799  __ Set(rbx, 0);
800  __ SetCallKind(rcx, CALL_AS_METHOD);
801  __ cmpq(rdx, Immediate(1));
802  __ j(not_equal, &non_proxy);
803 
804  __ pop(rdx); // return address
805  __ push(rdi); // re-add proxy object as additional argument
806  __ push(rdx);
807  __ incq(rax);
808  __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
809  __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
810  RelocInfo::CODE_TARGET);
811 
812  __ bind(&non_proxy);
813  __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
814  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
815  RelocInfo::CODE_TARGET);
816  __ bind(&function);
817  }
818 
819  // 5b. Get the code to call from the function and check that the number of
820  // expected arguments matches what we're providing. If so, jump
821  // (tail-call) to the code in register edx without checking arguments.
823  __ movsxlq(rbx,
827  __ SetCallKind(rcx, CALL_AS_METHOD);
828  __ cmpq(rax, rbx);
829  __ j(not_equal,
830  masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
831  RelocInfo::CODE_TARGET);
832 
833  ParameterCount expected(0);
834  __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION,
835  NullCallWrapper(), CALL_AS_METHOD);
836 }
837 
838 
839 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
840  // Stack at entry:
841  // rsp: return address
842  // rsp+8: arguments
843  // rsp+16: receiver ("this")
844  // rsp+24: function
845  {
846  FrameScope frame_scope(masm, StackFrame::INTERNAL);
847  // Stack frame:
848  // rbp: Old base pointer
849  // rbp[1]: return address
850  // rbp[2]: function arguments
851  // rbp[3]: receiver
852  // rbp[4]: function
853  static const int kArgumentsOffset = 2 * kPointerSize;
854  static const int kReceiverOffset = 3 * kPointerSize;
855  static const int kFunctionOffset = 4 * kPointerSize;
856 
857  __ push(Operand(rbp, kFunctionOffset));
858  __ push(Operand(rbp, kArgumentsOffset));
859  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
860 
861  // Check the stack for overflow. We are not trying to catch
862  // interruptions (e.g. debug break and preemption) here, so the "real stack
863  // limit" is checked.
864  Label okay;
865  __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
866  __ movq(rcx, rsp);
867  // Make rcx the space we have left. The stack might already be overflowed
868  // here which will cause rcx to become negative.
869  __ subq(rcx, kScratchRegister);
870  // Make rdx the space we need for the array when it is unrolled onto the
871  // stack.
872  __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
873  // Check if the arguments will overflow the stack.
874  __ cmpq(rcx, rdx);
875  __ j(greater, &okay); // Signed comparison.
876 
877  // Out of stack space.
878  __ push(Operand(rbp, kFunctionOffset));
879  __ push(rax);
880  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
881  __ bind(&okay);
882  // End of stack check.
883 
884  // Push current index and limit.
885  const int kLimitOffset =
887  const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
888  __ push(rax); // limit
889  __ push(Immediate(0)); // index
890 
891  // Get the receiver.
892  __ movq(rbx, Operand(rbp, kReceiverOffset));
893 
894  // Check that the function is a JS function (otherwise it must be a proxy).
895  Label push_receiver;
896  __ movq(rdi, Operand(rbp, kFunctionOffset));
897  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
898  __ j(not_equal, &push_receiver);
899 
900  // Change context eagerly to get the right global object if necessary.
902 
903  // Do not transform the receiver for strict mode functions.
904  Label call_to_object, use_global_receiver;
908  __ j(not_equal, &push_receiver);
909 
910  // Do not transform the receiver for natives.
913  __ j(not_equal, &push_receiver);
914 
915  // Compute the receiver in non-strict mode.
916  __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
917  __ CompareRoot(rbx, Heap::kNullValueRootIndex);
918  __ j(equal, &use_global_receiver);
919  __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
920  __ j(equal, &use_global_receiver);
921 
922  // If given receiver is already a JavaScript object then there's no
923  // reason for converting it.
925  __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
926  __ j(above_equal, &push_receiver);
927 
928  // Convert the receiver to an object.
929  __ bind(&call_to_object);
930  __ push(rbx);
931  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
932  __ movq(rbx, rax);
933  __ jmp(&push_receiver, Label::kNear);
934 
935  // Use the current global receiver object as the receiver.
936  __ bind(&use_global_receiver);
937  const int kGlobalOffset =
939  __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
941  __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
943 
944  // Push the receiver.
945  __ bind(&push_receiver);
946  __ push(rbx);
947 
948  // Copy all arguments from the array to the stack.
949  Label entry, loop;
950  __ movq(rax, Operand(rbp, kIndexOffset));
951  __ jmp(&entry);
952  __ bind(&loop);
953  __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
954 
955  // Use inline caching to speed up access to arguments.
956  Handle<Code> ic =
957  masm->isolate()->builtins()->KeyedLoadIC_Initialize();
958  __ Call(ic, RelocInfo::CODE_TARGET);
959  // It is important that we do not have a test instruction after the
960  // call. A test instruction after the call is used to indicate that
961  // we have generated an inline version of the keyed load. In this
962  // case, we know that we are not generating a test instruction next.
963 
964  // Push the nth argument.
965  __ push(rax);
966 
967  // Update the index on the stack and in register rax.
968  __ movq(rax, Operand(rbp, kIndexOffset));
969  __ SmiAddConstant(rax, rax, Smi::FromInt(1));
970  __ movq(Operand(rbp, kIndexOffset), rax);
971 
972  __ bind(&entry);
973  __ cmpq(rax, Operand(rbp, kLimitOffset));
974  __ j(not_equal, &loop);
975 
976  // Invoke the function.
977  Label call_proxy;
978  ParameterCount actual(rax);
979  __ SmiToInteger32(rax, rax);
980  __ movq(rdi, Operand(rbp, kFunctionOffset));
981  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
982  __ j(not_equal, &call_proxy);
983  __ InvokeFunction(rdi, actual, CALL_FUNCTION,
984  NullCallWrapper(), CALL_AS_METHOD);
985 
986  frame_scope.GenerateLeaveFrame();
987  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
988 
989  // Invoke the function proxy.
990  __ bind(&call_proxy);
991  __ push(rdi); // add function proxy as last argument
992  __ incq(rax);
993  __ Set(rbx, 0);
994  __ SetCallKind(rcx, CALL_AS_METHOD);
995  __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
996  __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
997  RelocInfo::CODE_TARGET);
998 
999  // Leave internal frame.
1000  }
1001  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1002 }
1003 
1004 
1005 // Allocate an empty JSArray. The allocated array is put into the result
1006 // register. If the parameter initial_capacity is larger than zero an elements
1007 // backing store is allocated with this size and filled with the hole values.
1008 // Otherwise the elements backing store is set to the empty FixedArray.
1009 static void AllocateEmptyJSArray(MacroAssembler* masm,
1010  Register array_function,
1011  Register result,
1012  Register scratch1,
1013  Register scratch2,
1014  Register scratch3,
1015  Label* gc_required) {
1016  const int initial_capacity = JSArray::kPreallocatedArrayElements;
1017  STATIC_ASSERT(initial_capacity >= 0);
1018 
1019  __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
1020 
1021  // Allocate the JSArray object together with space for a fixed array with the
1022  // requested elements.
1023  int size = JSArray::kSize;
1024  if (initial_capacity > 0) {
1025  size += FixedArray::SizeFor(initial_capacity);
1026  }
1027  __ AllocateInNewSpace(size,
1028  result,
1029  scratch2,
1030  scratch3,
1031  gc_required,
1032  TAG_OBJECT);
1033 
1034  // Allocated the JSArray. Now initialize the fields except for the elements
1035  // array.
1036  // result: JSObject
1037  // scratch1: initial map
1038  // scratch2: start of next object
1039  Factory* factory = masm->isolate()->factory();
1040  __ movq(FieldOperand(result, JSObject::kMapOffset), scratch1);
1042  factory->empty_fixed_array());
1043  // Field JSArray::kElementsOffset is initialized later.
1045 
1046  // If no storage is requested for the elements array just set the empty
1047  // fixed array.
1048  if (initial_capacity == 0) {
1050  factory->empty_fixed_array());
1051  return;
1052  }
1053 
1054  // Calculate the location of the elements array and set elements array member
1055  // of the JSArray.
1056  // result: JSObject
1057  // scratch2: start of next object
1058  __ lea(scratch1, Operand(result, JSArray::kSize));
1059  __ movq(FieldOperand(result, JSArray::kElementsOffset), scratch1);
1060 
1061  // Initialize the FixedArray and fill it with holes. FixedArray length is
1062  // stored as a smi.
1063  // result: JSObject
1064  // scratch1: elements array
1065  // scratch2: start of next object
1066  __ Move(FieldOperand(scratch1, HeapObject::kMapOffset),
1067  factory->fixed_array_map());
1068  __ Move(FieldOperand(scratch1, FixedArray::kLengthOffset),
1069  Smi::FromInt(initial_capacity));
1070 
1071  // Fill the FixedArray with the hole value. Inline the code if short.
1072  // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
1073  static const int kLoopUnfoldLimit = 4;
1074  __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
1075  if (initial_capacity <= kLoopUnfoldLimit) {
1076  // Use a scratch register here to have only one reloc info when unfolding
1077  // the loop.
1078  for (int i = 0; i < initial_capacity; i++) {
1079  __ movq(FieldOperand(scratch1,
1080  FixedArray::kHeaderSize + i * kPointerSize),
1081  scratch3);
1082  }
1083  } else {
1084  Label loop, entry;
1085  __ movq(scratch2, Immediate(initial_capacity));
1086  __ jmp(&entry);
1087  __ bind(&loop);
1088  __ movq(FieldOperand(scratch1,
1089  scratch2,
1092  scratch3);
1093  __ bind(&entry);
1094  __ decq(scratch2);
1095  __ j(not_sign, &loop);
1096  }
1097 }
1098 
1099 
1100 // Allocate a JSArray with the number of elements stored in a register. The
1101 // register array_function holds the built-in Array function and the register
1102 // array_size holds the size of the array as a smi. The allocated array is put
1103 // into the result register and beginning and end of the FixedArray elements
1104 // storage is put into registers elements_array and elements_array_end (see
1105 // below for when that is not the case). If the parameter fill_with_holes is
1106 // true the allocated elements backing store is filled with the hole values
1107 // otherwise it is left uninitialized. When the backing store is filled the
1108 // register elements_array is scratched.
1109 static void AllocateJSArray(MacroAssembler* masm,
1110  Register array_function, // Array function.
1111  Register array_size, // As a smi, cannot be 0.
1112  Register result,
1113  Register elements_array,
1114  Register elements_array_end,
1115  Register scratch,
1116  bool fill_with_hole,
1117  Label* gc_required) {
1118  __ LoadInitialArrayMap(array_function, scratch,
1119  elements_array, fill_with_hole);
1120 
1121  if (FLAG_debug_code) { // Assert that array size is not zero.
1122  __ testq(array_size, array_size);
1123  __ Assert(not_zero, "array size is unexpectedly 0");
1124  }
1125 
1126  // Allocate the JSArray object together with space for a FixedArray with the
1127  // requested elements.
1128  SmiIndex index =
1129  masm->SmiToIndex(kScratchRegister, array_size, kPointerSizeLog2);
1130  __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
1131  index.scale,
1132  index.reg,
1133  result,
1134  elements_array_end,
1135  scratch,
1136  gc_required,
1137  TAG_OBJECT);
1138 
1139  // Allocated the JSArray. Now initialize the fields except for the elements
1140  // array.
1141  // result: JSObject
1142  // elements_array: initial map
1143  // elements_array_end: start of next object
1144  // array_size: size of array (smi)
1145  Factory* factory = masm->isolate()->factory();
1146  __ movq(FieldOperand(result, JSObject::kMapOffset), elements_array);
1147  __ Move(elements_array, factory->empty_fixed_array());
1148  __ movq(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
1149  // Field JSArray::kElementsOffset is initialized later.
1150  __ movq(FieldOperand(result, JSArray::kLengthOffset), array_size);
1151 
1152  // Calculate the location of the elements array and set elements array member
1153  // of the JSArray.
1154  // result: JSObject
1155  // elements_array_end: start of next object
1156  // array_size: size of array (smi)
1157  __ lea(elements_array, Operand(result, JSArray::kSize));
1158  __ movq(FieldOperand(result, JSArray::kElementsOffset), elements_array);
1159 
1160  // Initialize the fixed array. FixedArray length is stored as a smi.
1161  // result: JSObject
1162  // elements_array: elements array
1163  // elements_array_end: start of next object
1164  // array_size: size of array (smi)
1165  __ Move(FieldOperand(elements_array, JSObject::kMapOffset),
1166  factory->fixed_array_map());
1167  // For non-empty JSArrays the length of the FixedArray and the JSArray is the
1168  // same.
1169  __ movq(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
1170 
1171  // Fill the allocated FixedArray with the hole value if requested.
1172  // result: JSObject
1173  // elements_array: elements array
1174  // elements_array_end: start of next object
1175  if (fill_with_hole) {
1176  Label loop, entry;
1177  __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
1178  __ lea(elements_array, Operand(elements_array,
1180  __ jmp(&entry);
1181  __ bind(&loop);
1182  __ movq(Operand(elements_array, 0), scratch);
1183  __ addq(elements_array, Immediate(kPointerSize));
1184  __ bind(&entry);
1185  __ cmpq(elements_array, elements_array_end);
1186  __ j(below, &loop);
1187  }
1188 }
1189 
1190 
1191 // Create a new array for the built-in Array function. This function allocates
1192 // the JSArray object and the FixedArray elements array and initializes these.
1193 // If the Array cannot be constructed in native code the runtime is called. This
1194 // function assumes the following state:
1195 // rdi: constructor (built-in Array function)
1196 // rax: argc
1197 // rsp[0]: return address
1198 // rsp[8]: last argument
1199 // This function is used for both construct and normal calls of Array. The only
1200 // difference between handling a construct call and a normal call is that for a
1201 // construct call the constructor function in rdi needs to be preserved for
1202 // entering the generic code. In both cases argc in rax needs to be preserved.
1203 // Both registers are preserved by this code so no need to differentiate between
1204 // a construct call and a normal call.
1205 static void ArrayNativeCode(MacroAssembler* masm,
1206  Label* call_generic_code) {
1207  Label argc_one_or_more, argc_two_or_more, empty_array, not_empty_array,
1208  has_non_smi_element, finish, cant_transition_map, not_double;
1209 
1210  // Check for array construction with zero arguments.
1211  __ testq(rax, rax);
1212  __ j(not_zero, &argc_one_or_more);
1213 
1214  __ bind(&empty_array);
1215  // Handle construction of an empty array.
1216  AllocateEmptyJSArray(masm,
1217  rdi,
1218  rbx,
1219  rcx,
1220  rdx,
1221  r8,
1222  call_generic_code);
1223  Counters* counters = masm->isolate()->counters();
1224  __ IncrementCounter(counters->array_function_native(), 1);
1225  __ movq(rax, rbx);
1226  __ ret(kPointerSize);
1227 
1228  // Check for one argument. Bail out if argument is not smi or if it is
1229  // negative.
1230  __ bind(&argc_one_or_more);
1231  __ cmpq(rax, Immediate(1));
1232  __ j(not_equal, &argc_two_or_more);
1233  __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
1234 
1235  __ SmiTest(rdx);
1236  __ j(not_zero, &not_empty_array);
1237  __ pop(r8); // Adjust stack.
1238  __ Drop(1);
1239  __ push(r8);
1240  __ movq(rax, Immediate(0)); // Treat this as a call with argc of zero.
1241  __ jmp(&empty_array);
1242 
1243  __ bind(&not_empty_array);
1244  __ JumpUnlessNonNegativeSmi(rdx, call_generic_code);
1245 
1246  // Handle construction of an empty array of a certain size. Bail out if size
1247  // is to large to actually allocate an elements array.
1249  __ j(greater_equal, call_generic_code);
1250 
1251  // rax: argc
1252  // rdx: array_size (smi)
1253  // rdi: constructor
1254  // esp[0]: return address
1255  // esp[8]: argument
1256  AllocateJSArray(masm,
1257  rdi,
1258  rdx,
1259  rbx,
1260  rcx,
1261  r8,
1262  r9,
1263  true,
1264  call_generic_code);
1265  __ IncrementCounter(counters->array_function_native(), 1);
1266  __ movq(rax, rbx);
1267  __ ret(2 * kPointerSize);
1268 
1269  // Handle construction of an array from a list of arguments.
1270  __ bind(&argc_two_or_more);
1271  __ movq(rdx, rax);
1272  __ Integer32ToSmi(rdx, rdx); // Convet argc to a smi.
1273  // rax: argc
1274  // rdx: array_size (smi)
1275  // rdi: constructor
1276  // esp[0] : return address
1277  // esp[8] : last argument
1278  AllocateJSArray(masm,
1279  rdi,
1280  rdx,
1281  rbx,
1282  rcx,
1283  r8,
1284  r9,
1285  false,
1286  call_generic_code);
1287  __ IncrementCounter(counters->array_function_native(), 1);
1288 
1289  // rax: argc
1290  // rbx: JSArray
1291  // rcx: elements_array
1292  // r8: elements_array_end (untagged)
1293  // esp[0]: return address
1294  // esp[8]: last argument
1295 
1296  // Location of the last argument
1297  __ lea(r9, Operand(rsp, kPointerSize));
1298 
1299  // Location of the first array element (Parameter fill_with_holes to
1300  // AllocateJSArrayis false, so the FixedArray is returned in rcx).
1301  __ lea(rdx, Operand(rcx, FixedArray::kHeaderSize - kHeapObjectTag));
1302 
1303  // rax: argc
1304  // rbx: JSArray
1305  // rdx: location of the first array element
1306  // r9: location of the last argument
1307  // esp[0]: return address
1308  // esp[8]: last argument
1309  Label loop, entry;
1310  __ movq(rcx, rax);
1311  __ jmp(&entry);
1312  __ bind(&loop);
1313  __ movq(r8, Operand(r9, rcx, times_pointer_size, 0));
1314  if (FLAG_smi_only_arrays) {
1315  __ JumpIfNotSmi(r8, &has_non_smi_element);
1316  }
1317  __ movq(Operand(rdx, 0), r8);
1318  __ addq(rdx, Immediate(kPointerSize));
1319  __ bind(&entry);
1320  __ decq(rcx);
1321  __ j(greater_equal, &loop);
1322 
1323  // Remove caller arguments from the stack and return.
1324  // rax: argc
1325  // rbx: JSArray
1326  // esp[0]: return address
1327  // esp[8]: last argument
1328  __ bind(&finish);
1329  __ pop(rcx);
1330  __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
1331  __ push(rcx);
1332  __ movq(rax, rbx);
1333  __ ret(0);
1334 
1335  __ bind(&has_non_smi_element);
1336  // Double values are handled by the runtime.
1337  __ CheckMap(r8,
1338  masm->isolate()->factory()->heap_number_map(),
1339  &not_double,
1341  __ bind(&cant_transition_map);
1342  __ UndoAllocationInNewSpace(rbx);
1343  __ jmp(call_generic_code);
1344 
1345  __ bind(&not_double);
1346  // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
1347  // rbx: JSArray
1349  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1350  FAST_ELEMENTS,
1351  r11,
1353  &cant_transition_map);
1354 
1356  __ RecordWriteField(rbx, HeapObject::kMapOffset, r11, r8,
1358 
1359  // Finish the array initialization loop.
1360  Label loop2;
1361  __ bind(&loop2);
1362  __ movq(r8, Operand(r9, rcx, times_pointer_size, 0));
1363  __ movq(Operand(rdx, 0), r8);
1364  __ addq(rdx, Immediate(kPointerSize));
1365  __ decq(rcx);
1366  __ j(greater_equal, &loop2);
1367  __ jmp(&finish);
1368 }
1369 
1370 
1371 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1372  // ----------- S t a t e -------------
1373  // -- rax : argc
1374  // -- rsp[0] : return address
1375  // -- rsp[8] : last argument
1376  // -----------------------------------
1377  Label generic_array_code;
1378 
1379  // Get the InternalArray function.
1380  __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1381 
1382  if (FLAG_debug_code) {
1383  // Initial map for the builtin InternalArray functions should be maps.
1385  // Will both indicate a NULL and a Smi.
1386  STATIC_ASSERT(kSmiTag == 0);
1387  Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1388  __ Check(not_smi, "Unexpected initial map for InternalArray function");
1389  __ CmpObjectType(rbx, MAP_TYPE, rcx);
1390  __ Check(equal, "Unexpected initial map for InternalArray function");
1391  }
1392 
1393  // Run the native code for the InternalArray function called as a normal
1394  // function.
1395  ArrayNativeCode(masm, &generic_array_code);
1396 
1397  // Jump to the generic array code in case the specialized code cannot handle
1398  // the construction.
1399  __ bind(&generic_array_code);
1400  Handle<Code> array_code =
1401  masm->isolate()->builtins()->InternalArrayCodeGeneric();
1402  __ Jump(array_code, RelocInfo::CODE_TARGET);
1403 }
1404 
1405 
1406 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1407  // ----------- S t a t e -------------
1408  // -- rax : argc
1409  // -- rsp[0] : return address
1410  // -- rsp[8] : last argument
1411  // -----------------------------------
1412  Label generic_array_code;
1413 
1414  // Get the Array function.
1415  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1416 
1417  if (FLAG_debug_code) {
1418  // Initial map for the builtin Array functions should be maps.
1420  // Will both indicate a NULL and a Smi.
1421  STATIC_ASSERT(kSmiTag == 0);
1422  Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1423  __ Check(not_smi, "Unexpected initial map for Array function");
1424  __ CmpObjectType(rbx, MAP_TYPE, rcx);
1425  __ Check(equal, "Unexpected initial map for Array function");
1426  }
1427 
1428  // Run the native code for the Array function called as a normal function.
1429  ArrayNativeCode(masm, &generic_array_code);
1430 
1431  // Jump to the generic array code in case the specialized code cannot handle
1432  // the construction.
1433  __ bind(&generic_array_code);
1434  Handle<Code> array_code =
1435  masm->isolate()->builtins()->ArrayCodeGeneric();
1436  __ Jump(array_code, RelocInfo::CODE_TARGET);
1437 }
1438 
1439 
1440 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1441  // ----------- S t a t e -------------
1442  // -- rax : argc
1443  // -- rdi : constructor
1444  // -- rsp[0] : return address
1445  // -- rsp[8] : last argument
1446  // -----------------------------------
1447  Label generic_constructor;
1448 
1449  if (FLAG_debug_code) {
1450  // The array construct code is only set for the builtin and internal
1451  // Array functions which always have a map.
1452  // Initial map for the builtin Array function should be a map.
1454  // Will both indicate a NULL and a Smi.
1455  STATIC_ASSERT(kSmiTag == 0);
1456  Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1457  __ Check(not_smi, "Unexpected initial map for Array function");
1458  __ CmpObjectType(rbx, MAP_TYPE, rcx);
1459  __ Check(equal, "Unexpected initial map for Array function");
1460  }
1461 
1462  // Run the native code for the Array function called as constructor.
1463  ArrayNativeCode(masm, &generic_constructor);
1464 
1465  // Jump to the generic construct code in case the specialized code cannot
1466  // handle the construction.
1467  __ bind(&generic_constructor);
1468  Handle<Code> generic_construct_stub =
1469  masm->isolate()->builtins()->JSConstructStubGeneric();
1470  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1471 }
1472 
1473 
1474 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1475  // ----------- S t a t e -------------
1476  // -- rax : number of arguments
1477  // -- rdi : constructor function
1478  // -- rsp[0] : return address
1479  // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1480  // -- rsp[(argc + 1) * 8] : receiver
1481  // -----------------------------------
1482  Counters* counters = masm->isolate()->counters();
1483  __ IncrementCounter(counters->string_ctor_calls(), 1);
1484 
1485  if (FLAG_debug_code) {
1486  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
1487  __ cmpq(rdi, rcx);
1488  __ Assert(equal, "Unexpected String function");
1489  }
1490 
1491  // Load the first argument into rax and get rid of the rest
1492  // (including the receiver).
1493  Label no_arguments;
1494  __ testq(rax, rax);
1495  __ j(zero, &no_arguments);
1496  __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
1497  __ pop(rcx);
1498  __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1499  __ push(rcx);
1500  __ movq(rax, rbx);
1501 
1502  // Lookup the argument in the number to string cache.
1503  Label not_cached, argument_is_string;
1505  masm,
1506  rax, // Input.
1507  rbx, // Result.
1508  rcx, // Scratch 1.
1509  rdx, // Scratch 2.
1510  false, // Input is known to be smi?
1511  &not_cached);
1512  __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1513  __ bind(&argument_is_string);
1514 
1515  // ----------- S t a t e -------------
1516  // -- rbx : argument converted to string
1517  // -- rdi : constructor function
1518  // -- rsp[0] : return address
1519  // -----------------------------------
1520 
1521  // Allocate a JSValue and put the tagged pointer into rax.
1522  Label gc_required;
1523  __ AllocateInNewSpace(JSValue::kSize,
1524  rax, // Result.
1525  rcx, // New allocation top (we ignore it).
1526  no_reg,
1527  &gc_required,
1528  TAG_OBJECT);
1529 
1530  // Set the map.
1531  __ LoadGlobalFunctionInitialMap(rdi, rcx);
1532  if (FLAG_debug_code) {
1534  Immediate(JSValue::kSize >> kPointerSizeLog2));
1535  __ Assert(equal, "Unexpected string wrapper instance size");
1536  __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1537  __ Assert(equal, "Unexpected unused properties of string wrapper");
1538  }
1540 
1541  // Set properties and elements.
1542  __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1545 
1546  // Set the value.
1548 
1549  // Ensure the object is fully initialized.
1550  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1551 
1552  // We're done. Return.
1553  __ ret(0);
1554 
1555  // The argument was not found in the number to string cache. Check
1556  // if it's a string already before calling the conversion builtin.
1557  Label convert_argument;
1558  __ bind(&not_cached);
1559  STATIC_ASSERT(kSmiTag == 0);
1560  __ JumpIfSmi(rax, &convert_argument);
1561  Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1562  __ j(NegateCondition(is_string), &convert_argument);
1563  __ movq(rbx, rax);
1564  __ IncrementCounter(counters->string_ctor_string_value(), 1);
1565  __ jmp(&argument_is_string);
1566 
1567  // Invoke the conversion builtin and put the result into rbx.
1568  __ bind(&convert_argument);
1569  __ IncrementCounter(counters->string_ctor_conversions(), 1);
1570  {
1571  FrameScope scope(masm, StackFrame::INTERNAL);
1572  __ push(rdi); // Preserve the function.
1573  __ push(rax);
1574  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1575  __ pop(rdi);
1576  }
1577  __ movq(rbx, rax);
1578  __ jmp(&argument_is_string);
1579 
1580  // Load the empty string into rbx, remove the receiver from the
1581  // stack, and jump back to the case where the argument is a string.
1582  __ bind(&no_arguments);
1583  __ LoadRoot(rbx, Heap::kEmptyStringRootIndex);
1584  __ pop(rcx);
1585  __ lea(rsp, Operand(rsp, kPointerSize));
1586  __ push(rcx);
1587  __ jmp(&argument_is_string);
1588 
1589  // At this point the argument is already a string. Call runtime to
1590  // create a string wrapper.
1591  __ bind(&gc_required);
1592  __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1593  {
1594  FrameScope scope(masm, StackFrame::INTERNAL);
1595  __ push(rbx);
1596  __ CallRuntime(Runtime::kNewStringWrapper, 1);
1597  }
1598  __ ret(0);
1599 }
1600 
1601 
1602 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1603  __ push(rbp);
1604  __ movq(rbp, rsp);
1605 
1606  // Store the arguments adaptor context sentinel.
1608 
1609  // Push the function on the stack.
1610  __ push(rdi);
1611 
1612  // Preserve the number of arguments on the stack. Must preserve rax,
1613  // rbx and rcx because these registers are used when copying the
1614  // arguments and the receiver.
1615  __ Integer32ToSmi(r8, rax);
1616  __ push(r8);
1617 }
1618 
1619 
1620 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1621  // Retrieve the number of arguments from the stack. Number is a Smi.
1623 
1624  // Leave the frame.
1625  __ movq(rsp, rbp);
1626  __ pop(rbp);
1627 
1628  // Remove caller arguments from the stack.
1629  __ pop(rcx);
1630  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1631  __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1632  __ push(rcx);
1633 }
1634 
1635 
1636 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1637  // ----------- S t a t e -------------
1638  // -- rax : actual number of arguments
1639  // -- rbx : expected number of arguments
1640  // -- rcx : call kind information
1641  // -- rdx : code entry to call
1642  // -----------------------------------
1643 
1644  Label invoke, dont_adapt_arguments;
1645  Counters* counters = masm->isolate()->counters();
1646  __ IncrementCounter(counters->arguments_adaptors(), 1);
1647 
1648  Label enough, too_few;
1649  __ cmpq(rax, rbx);
1650  __ j(less, &too_few);
1652  __ j(equal, &dont_adapt_arguments);
1653 
1654  { // Enough parameters: Actual >= expected.
1655  __ bind(&enough);
1656  EnterArgumentsAdaptorFrame(masm);
1657 
1658  // Copy receiver and all expected arguments.
1659  const int offset = StandardFrameConstants::kCallerSPOffset;
1660  __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
1661  __ Set(r8, -1); // account for receiver
1662 
1663  Label copy;
1664  __ bind(&copy);
1665  __ incq(r8);
1666  __ push(Operand(rax, 0));
1667  __ subq(rax, Immediate(kPointerSize));
1668  __ cmpq(r8, rbx);
1669  __ j(less, &copy);
1670  __ jmp(&invoke);
1671  }
1672 
1673  { // Too few parameters: Actual < expected.
1674  __ bind(&too_few);
1675  EnterArgumentsAdaptorFrame(masm);
1676 
1677  // Copy receiver and all actual arguments.
1678  const int offset = StandardFrameConstants::kCallerSPOffset;
1679  __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
1680  __ Set(r8, -1); // account for receiver
1681 
1682  Label copy;
1683  __ bind(&copy);
1684  __ incq(r8);
1685  __ push(Operand(rdi, 0));
1686  __ subq(rdi, Immediate(kPointerSize));
1687  __ cmpq(r8, rax);
1688  __ j(less, &copy);
1689 
1690  // Fill remaining expected arguments with undefined values.
1691  Label fill;
1692  __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1693  __ bind(&fill);
1694  __ incq(r8);
1695  __ push(kScratchRegister);
1696  __ cmpq(r8, rbx);
1697  __ j(less, &fill);
1698 
1699  // Restore function pointer.
1701  }
1702 
1703  // Call the entry point.
1704  __ bind(&invoke);
1705  __ call(rdx);
1706 
1707  // Store offset of return address for deoptimizer.
1708  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1709 
1710  // Leave frame and return.
1711  LeaveArgumentsAdaptorFrame(masm);
1712  __ ret(0);
1713 
1714  // -------------------------------------------
1715  // Dont adapt arguments.
1716  // -------------------------------------------
1717  __ bind(&dont_adapt_arguments);
1718  __ jmp(rdx);
1719 }
1720 
1721 
1722 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1723  // Get the loop depth of the stack guard check. This is recorded in
1724  // a test(rax, depth) instruction right after the call.
1725  Label stack_check;
1726  __ movq(rbx, Operand(rsp, 0)); // return address
1727  __ movzxbq(rbx, Operand(rbx, 1)); // depth
1728 
1729  // Get the loop nesting level at which we allow OSR from the
1730  // unoptimized code and check if we want to do OSR yet. If not we
1731  // should perform a stack guard check so we can get interrupts while
1732  // waiting for on-stack replacement.
1737  __ j(greater, &stack_check);
1738 
1739  // Pass the function to optimize as the argument to the on-stack
1740  // replacement runtime function.
1741  {
1742  FrameScope scope(masm, StackFrame::INTERNAL);
1743  __ push(rax);
1744  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1745  }
1746 
1747  // If the result was -1 it means that we couldn't optimize the
1748  // function. Just return and continue in the unoptimized version.
1749  Label skip;
1750  __ SmiCompare(rax, Smi::FromInt(-1));
1751  __ j(not_equal, &skip, Label::kNear);
1752  __ ret(0);
1753 
1754  // If we decide not to perform on-stack replacement we perform a
1755  // stack guard check to enable interrupts.
1756  __ bind(&stack_check);
1757  Label ok;
1758  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1759  __ j(above_equal, &ok, Label::kNear);
1760 
1761  StackCheckStub stub;
1762  __ TailCallStub(&stub);
1763  if (FLAG_debug_code) {
1764  __ Abort("Unreachable code: returned from tail call.");
1765  }
1766  __ bind(&ok);
1767  __ ret(0);
1768 
1769  __ bind(&skip);
1770  // Untag the AST id and push it on the stack.
1771  __ SmiToInteger32(rax, rax);
1772  __ push(rax);
1773 
1774  // Generate the code for doing the frame-to-frame translation using
1775  // the deoptimizer infrastructure.
1776  Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1777  generator.Generate();
1778 }
1779 
1780 
1781 #undef __
1782 
1783 } } // namespace v8::internal
1784 
1785 #endif // V8_TARGET_ARCH_X64
const Register rdx
static const int kCodeOffset
Definition: objects.h:5796
static const int kCodeEntryOffset
Definition: objects.h:6182
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:6183
const Register r11
static const int kAllowOSRAtLoopNestingLevelOffset
Definition: objects.h:4562
static Smi * FromInt(int value)
Definition: objects-inl.h:981
const Register rbp
static const int kGlobalReceiverOffset
Definition: objects.h:6288
static const int kConstructionCountOffset
Definition: objects.h:5888
const Register rsi
static const int kNativeByteOffset
Definition: objects.h:5971
static const int kStrictModeBitWithinByte
Definition: objects.h:5957
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:232
static const int kInstanceSizeOffset
Definition: objects.h:5147
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:5159
static const int kContextOffset
Definition: objects.h:6187
static const int kSize
Definition: objects.h:8333
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
BuiltinExtraArguments
Definition: builtins.h:35
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:5791
const Register r9
const int kPointerSize
Definition: globals.h:220
Operand FieldOperand(Register object, int offset)
const int kHeapObjectTag
Definition: v8.h:4009
const Register rbx
const Register rsp
#define __
static const int kPropertiesOffset
Definition: objects.h:2171
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
const Register rax
static const int kInObjectPropertiesOffset
Definition: objects.h:5149
const Register rdi
static const int kElementsOffset
Definition: objects.h:2172
static const int kNativeBitWithinByte
Definition: objects.h:5963
static const int kLengthOffset
Definition: objects.h:8332
static int SizeFor(int length)
Definition: objects.h:2353
static const int kHeaderSize
Definition: objects.h:2296
static const int kSize
Definition: objects.h:6386
static const int kMapOffset
Definition: objects.h:1261
static const int kLengthOffset
Definition: objects.h:2295
const Register kScratchRegister
static const int kFormalParameterCountOffset
Definition: objects.h:5853
static const int kStrictModeByteOffset
Definition: objects.h:5967
static const int kHeaderSize
Definition: objects.h:4549
const Register r8
const Register rcx
Condition NegateCondition(Condition cond)
const int kSmiTag
Definition: v8.h:4014
static const int kArgvOffset
Definition: frames-ia32.h:82
static const int kHeaderSize
Definition: objects.h:2173
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kPreallocatedArrayElements
Definition: objects.h:8329
const Register no_reg
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
Definition: objects.h:6385
static const int kNativeContextOffset
Definition: objects.h:6286
static const int kSharedFunctionInfoOffset
Definition: objects.h:6185
static const int kInitialMaxFastElementArray
Definition: objects.h:2161
static const int kPreAllocatedPropertyFieldsOffset
Definition: objects.h:5152