v8  3.11.10(node0.8.26)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
builtins-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_IA32)
31 
32 #include "codegen.h"
33 #include "deoptimizer.h"
34 #include "full-codegen.h"
35 
36 namespace v8 {
37 namespace internal {
38 
39 
40 #define __ ACCESS_MASM(masm)
41 
42 
43 void Builtins::Generate_Adaptor(MacroAssembler* masm,
44  CFunctionId id,
45  BuiltinExtraArguments extra_args) {
46  // ----------- S t a t e -------------
47  // -- eax : number of arguments excluding receiver
48  // -- edi : called function (only guaranteed when
49  // extra_args requires it)
50  // -- esi : context
51  // -- esp[0] : return address
52  // -- esp[4] : last argument
53  // -- ...
54  // -- esp[4 * argc] : first argument (argc == eax)
55  // -- esp[4 * (argc +1)] : receiver
56  // -----------------------------------
57 
58  // Insert extra arguments.
59  int num_extra_args = 0;
60  if (extra_args == NEEDS_CALLED_FUNCTION) {
61  num_extra_args = 1;
62  Register scratch = ebx;
63  __ pop(scratch); // Save return address.
64  __ push(edi);
65  __ push(scratch); // Restore return address.
66  } else {
67  ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
68  }
69 
70  // JumpToExternalReference expects eax to contain the number of arguments
71  // including the receiver and the extra arguments.
72  __ add(eax, Immediate(num_extra_args + 1));
73  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
74 }
75 
76 
77 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
78  bool is_api_function,
79  bool count_constructions) {
80  // ----------- S t a t e -------------
81  // -- eax: number of arguments
82  // -- edi: constructor function
83  // -----------------------------------
84 
85  // Should never count constructions for api objects.
86  ASSERT(!is_api_function || !count_constructions);
87 
88  // Enter a construct frame.
89  {
90  FrameScope scope(masm, StackFrame::CONSTRUCT);
91 
92  // Store a smi-tagged arguments count on the stack.
93  __ SmiTag(eax);
94  __ push(eax);
95 
96  // Push the function to invoke on the stack.
97  __ push(edi);
98 
99  // Try to allocate the object without transitioning into C code. If any of
100  // the preconditions is not met, the code bails out to the runtime call.
101  Label rt_call, allocated;
102  if (FLAG_inline_new) {
103  Label undo_allocation;
104 #ifdef ENABLE_DEBUGGER_SUPPORT
105  ExternalReference debug_step_in_fp =
106  ExternalReference::debug_step_in_fp_address(masm->isolate());
107  __ cmp(Operand::StaticVariable(debug_step_in_fp), Immediate(0));
108  __ j(not_equal, &rt_call);
109 #endif
110 
111  // Verified that the constructor is a JSFunction.
112  // Load the initial map and verify that it is in fact a map.
113  // edi: constructor
115  // Will both indicate a NULL and a Smi
116  __ JumpIfSmi(eax, &rt_call);
117  // edi: constructor
118  // eax: initial map (if proven valid below)
119  __ CmpObjectType(eax, MAP_TYPE, ebx);
120  __ j(not_equal, &rt_call);
121 
122  // Check that the constructor is not constructing a JSFunction (see
123  // comments in Runtime_NewObject in runtime.cc). In which case the
124  // initial map's instance type would be JS_FUNCTION_TYPE.
125  // edi: constructor
126  // eax: initial map
127  __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
128  __ j(equal, &rt_call);
129 
130  if (count_constructions) {
131  Label allocate;
132  // Decrease generous allocation count.
134  __ dec_b(FieldOperand(ecx,
136  __ j(not_zero, &allocate);
137 
138  __ push(eax);
139  __ push(edi);
140 
141  __ push(edi); // constructor
142  // The call will replace the stub, so the countdown is only done once.
143  __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
144 
145  __ pop(edi);
146  __ pop(eax);
147 
148  __ bind(&allocate);
149  }
150 
151  // Now allocate the JSObject on the heap.
152  // edi: constructor
153  // eax: initial map
155  __ shl(edi, kPointerSizeLog2);
156  __ AllocateInNewSpace(
157  edi, ebx, edi, no_reg, &rt_call, NO_ALLOCATION_FLAGS);
158  // Allocated the JSObject, now initialize the fields.
159  // eax: initial map
160  // ebx: JSObject
161  // edi: start of next object
162  __ mov(Operand(ebx, JSObject::kMapOffset), eax);
163  Factory* factory = masm->isolate()->factory();
164  __ mov(ecx, factory->empty_fixed_array());
165  __ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
166  __ mov(Operand(ebx, JSObject::kElementsOffset), ecx);
167  // Set extra fields in the newly allocated object.
168  // eax: initial map
169  // ebx: JSObject
170  // edi: start of next object
171  __ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
172  __ mov(edx, factory->undefined_value());
173  if (count_constructions) {
174  __ movzx_b(esi,
176  __ lea(esi,
178  // esi: offset of first field after pre-allocated fields
179  if (FLAG_debug_code) {
180  __ cmp(esi, edi);
181  __ Assert(less_equal,
182  "Unexpected number of pre-allocated property fields.");
183  }
184  __ InitializeFieldsWithFiller(ecx, esi, edx);
185  __ mov(edx, factory->one_pointer_filler_map());
186  }
187  __ InitializeFieldsWithFiller(ecx, edi, edx);
188 
189  // Add the object tag to make the JSObject real, so that we can continue
190  // and jump into the continuation code at any time from now on. Any
191  // failures need to undo the allocation, so that the heap is in a
192  // consistent state and verifiable.
193  // eax: initial map
194  // ebx: JSObject
195  // edi: start of next object
196  __ or_(ebx, Immediate(kHeapObjectTag));
197 
198  // Check if a non-empty properties array is needed.
199  // Allocate and initialize a FixedArray if it is.
200  // eax: initial map
201  // ebx: JSObject
202  // edi: start of next object
203  // Calculate the total number of properties described by the map.
205  __ movzx_b(ecx,
207  __ add(edx, ecx);
208  // Calculate unused properties past the end of the in-object properties.
210  __ sub(edx, ecx);
211  // Done if no extra properties are to be allocated.
212  __ j(zero, &allocated);
213  __ Assert(positive, "Property allocation count failed.");
214 
215  // Scale the number of elements by pointer size and add the header for
216  // FixedArrays to the start of the next object calculation from above.
217  // ebx: JSObject
218  // edi: start of next object (will be start of FixedArray)
219  // edx: number of elements in properties array
220  __ AllocateInNewSpace(FixedArray::kHeaderSize,
222  edx,
223  edi,
224  ecx,
225  no_reg,
226  &undo_allocation,
228 
229  // Initialize the FixedArray.
230  // ebx: JSObject
231  // edi: FixedArray
232  // edx: number of elements
233  // ecx: start of next object
234  __ mov(eax, factory->fixed_array_map());
235  __ mov(Operand(edi, FixedArray::kMapOffset), eax); // setup the map
236  __ SmiTag(edx);
237  __ mov(Operand(edi, FixedArray::kLengthOffset), edx); // and length
238 
239  // Initialize the fields to undefined.
240  // ebx: JSObject
241  // edi: FixedArray
242  // ecx: start of next object
243  { Label loop, entry;
244  __ mov(edx, factory->undefined_value());
245  __ lea(eax, Operand(edi, FixedArray::kHeaderSize));
246  __ jmp(&entry);
247  __ bind(&loop);
248  __ mov(Operand(eax, 0), edx);
249  __ add(eax, Immediate(kPointerSize));
250  __ bind(&entry);
251  __ cmp(eax, ecx);
252  __ j(below, &loop);
253  }
254 
255  // Store the initialized FixedArray into the properties field of
256  // the JSObject
257  // ebx: JSObject
258  // edi: FixedArray
259  __ or_(edi, Immediate(kHeapObjectTag)); // add the heap tag
261 
262 
263  // Continue with JSObject being successfully allocated
264  // ebx: JSObject
265  __ jmp(&allocated);
266 
267  // Undo the setting of the new top so that the heap is verifiable. For
268  // example, the map's unused properties potentially do not match the
269  // allocated objects unused properties.
270  // ebx: JSObject (previous new top)
271  __ bind(&undo_allocation);
272  __ UndoAllocationInNewSpace(ebx);
273  }
274 
275  // Allocate the new receiver object using the runtime call.
276  __ bind(&rt_call);
277  // Must restore edi (constructor) before calling runtime.
278  __ mov(edi, Operand(esp, 0));
279  // edi: function (constructor)
280  __ push(edi);
281  __ CallRuntime(Runtime::kNewObject, 1);
282  __ mov(ebx, eax); // store result in ebx
283 
284  // New object allocated.
285  // ebx: newly allocated object
286  __ bind(&allocated);
287  // Retrieve the function from the stack.
288  __ pop(edi);
289 
290  // Retrieve smi-tagged arguments count from the stack.
291  __ mov(eax, Operand(esp, 0));
292  __ SmiUntag(eax);
293 
294  // Push the allocated receiver to the stack. We need two copies
295  // because we may have to return the original one and the calling
296  // conventions dictate that the called function pops the receiver.
297  __ push(ebx);
298  __ push(ebx);
299 
300  // Set up pointer to last argument.
302 
303  // Copy arguments and receiver to the expression stack.
304  Label loop, entry;
305  __ mov(ecx, eax);
306  __ jmp(&entry);
307  __ bind(&loop);
308  __ push(Operand(ebx, ecx, times_4, 0));
309  __ bind(&entry);
310  __ dec(ecx);
311  __ j(greater_equal, &loop);
312 
313  // Call the function.
314  if (is_api_function) {
316  Handle<Code> code =
317  masm->isolate()->builtins()->HandleApiCallConstruct();
318  ParameterCount expected(0);
319  __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
320  CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
321  } else {
322  ParameterCount actual(eax);
323  __ InvokeFunction(edi, actual, CALL_FUNCTION,
324  NullCallWrapper(), CALL_AS_METHOD);
325  }
326 
327  // Store offset of return address for deoptimizer.
328  if (!is_api_function && !count_constructions) {
329  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
330  }
331 
332  // Restore context from the frame.
334 
335  // If the result is an object (in the ECMA sense), we should get rid
336  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
337  // on page 74.
338  Label use_receiver, exit;
339 
340  // If the result is a smi, it is *not* an object in the ECMA sense.
341  __ JumpIfSmi(eax, &use_receiver);
342 
343  // If the type of the result (stored in its map) is less than
344  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
345  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
346  __ j(above_equal, &exit);
347 
348  // Throw away the result of the constructor invocation and use the
349  // on-stack receiver as the result.
350  __ bind(&use_receiver);
351  __ mov(eax, Operand(esp, 0));
352 
353  // Restore the arguments count and leave the construct frame.
354  __ bind(&exit);
355  __ mov(ebx, Operand(esp, kPointerSize)); // Get arguments count.
356 
357  // Leave construct frame.
358  }
359 
360  // Remove caller arguments from the stack and return.
361  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
362  __ pop(ecx);
363  __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
364  __ push(ecx);
365  __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
366  __ ret(0);
367 }
368 
369 
370 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
371  Generate_JSConstructStubHelper(masm, false, true);
372 }
373 
374 
375 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
376  Generate_JSConstructStubHelper(masm, false, false);
377 }
378 
379 
380 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
381  Generate_JSConstructStubHelper(masm, true, false);
382 }
383 
384 
385 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
386  bool is_construct) {
387  // Clear the context before we push it when entering the internal frame.
388  __ Set(esi, Immediate(0));
389 
390  {
391  FrameScope scope(masm, StackFrame::INTERNAL);
392 
393  // Load the previous frame pointer (ebx) to access C arguments
394  __ mov(ebx, Operand(ebp, 0));
395 
396  // Get the function from the frame and setup the context.
399 
400  // Push the function and the receiver onto the stack.
401  __ push(ecx);
403 
404  // Load the number of arguments and setup pointer to the arguments.
405  __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
406  __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
407 
408  // Copy arguments to the stack in a loop.
409  Label loop, entry;
410  __ Set(ecx, Immediate(0));
411  __ jmp(&entry);
412  __ bind(&loop);
413  __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
414  __ push(Operand(edx, 0)); // dereference handle
415  __ inc(ecx);
416  __ bind(&entry);
417  __ cmp(ecx, eax);
418  __ j(not_equal, &loop);
419 
420  // Get the function from the stack and call it.
421  // kPointerSize for the receiver.
422  __ mov(edi, Operand(esp, eax, times_4, kPointerSize));
423 
424  // Invoke the code.
425  if (is_construct) {
426  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
427  __ CallStub(&stub);
428  } else {
429  ParameterCount actual(eax);
430  __ InvokeFunction(edi, actual, CALL_FUNCTION,
431  NullCallWrapper(), CALL_AS_METHOD);
432  }
433 
434  // Exit the internal frame. Notice that this also removes the empty.
435  // context and the function left on the stack by the code
436  // invocation.
437  }
438  __ ret(kPointerSize); // Remove receiver.
439 }
440 
441 
442 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
443  Generate_JSEntryTrampolineHelper(masm, false);
444 }
445 
446 
447 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
448  Generate_JSEntryTrampolineHelper(masm, true);
449 }
450 
451 
452 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
453  {
454  FrameScope scope(masm, StackFrame::INTERNAL);
455 
456  // Push a copy of the function.
457  __ push(edi);
458  // Push call kind information.
459  __ push(ecx);
460 
461  __ push(edi); // Function is also the parameter to the runtime call.
462  __ CallRuntime(Runtime::kLazyCompile, 1);
463 
464  // Restore call kind information.
465  __ pop(ecx);
466  // Restore receiver.
467  __ pop(edi);
468 
469  // Tear down internal frame.
470  }
471 
472  // Do a tail-call of the compiled function.
474  __ jmp(eax);
475 }
476 
477 
478 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
479  {
480  FrameScope scope(masm, StackFrame::INTERNAL);
481 
482  // Push a copy of the function onto the stack.
483  __ push(edi);
484  // Push call kind information.
485  __ push(ecx);
486 
487  __ push(edi); // Function is also the parameter to the runtime call.
488  __ CallRuntime(Runtime::kLazyRecompile, 1);
489 
490  // Restore call kind information.
491  __ pop(ecx);
492  // Restore receiver.
493  __ pop(edi);
494 
495  // Tear down internal frame.
496  }
497 
498  // Do a tail-call of the compiled function.
500  __ jmp(eax);
501 }
502 
503 
504 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
506  {
507  FrameScope scope(masm, StackFrame::INTERNAL);
508 
509  // Pass deoptimization type to the runtime system.
510  __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
511  __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
512 
513  // Tear down internal frame.
514  }
515 
516  // Get the full codegen state from the stack and untag it.
517  __ mov(ecx, Operand(esp, 1 * kPointerSize));
518  __ SmiUntag(ecx);
519 
520  // Switch on the state.
521  Label not_no_registers, not_tos_eax;
523  __ j(not_equal, &not_no_registers, Label::kNear);
524  __ ret(1 * kPointerSize); // Remove state.
525 
526  __ bind(&not_no_registers);
527  __ mov(eax, Operand(esp, 2 * kPointerSize));
529  __ j(not_equal, &not_tos_eax, Label::kNear);
530  __ ret(2 * kPointerSize); // Remove state, eax.
531 
532  __ bind(&not_tos_eax);
533  __ Abort("no cases left");
534 }
535 
536 
537 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
538  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
539 }
540 
541 
542 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
543  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
544 }
545 
546 
547 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
548  // TODO(kasperl): Do we need to save/restore the XMM registers too?
549 
550  // For now, we are relying on the fact that Runtime::NotifyOSR
551  // doesn't do any garbage collection which allows us to save/restore
552  // the registers without worrying about which of them contain
553  // pointers. This seems a bit fragile.
554  __ pushad();
555  {
556  FrameScope scope(masm, StackFrame::INTERNAL);
557  __ CallRuntime(Runtime::kNotifyOSR, 0);
558  }
559  __ popad();
560  __ ret(0);
561 }
562 
563 
564 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
565  Factory* factory = masm->isolate()->factory();
566 
567  // 1. Make sure we have at least one argument.
568  { Label done;
569  __ test(eax, eax);
570  __ j(not_zero, &done);
571  __ pop(ebx);
572  __ push(Immediate(factory->undefined_value()));
573  __ push(ebx);
574  __ inc(eax);
575  __ bind(&done);
576  }
577 
578  // 2. Get the function to call (passed as receiver) from the stack, check
579  // if it is a function.
580  Label slow, non_function;
581  // 1 ~ return address.
582  __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
583  __ JumpIfSmi(edi, &non_function);
584  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
585  __ j(not_equal, &slow);
586 
587 
588  // 3a. Patch the first argument if necessary when calling a function.
589  Label shift_arguments;
590  __ Set(edx, Immediate(0)); // indicate regular JS_FUNCTION
591  { Label convert_to_object, use_global_receiver, patch_receiver;
592  // Change context eagerly in case we need the global receiver.
594 
595  // Do not transform the receiver for strict mode functions.
599  __ j(not_equal, &shift_arguments);
600 
601  // Do not transform the receiver for natives (shared already in ebx).
604  __ j(not_equal, &shift_arguments);
605 
606  // Compute the receiver in non-strict mode.
607  __ mov(ebx, Operand(esp, eax, times_4, 0)); // First argument.
608 
609  // Call ToObject on the receiver if it is not an object, or use the
610  // global object if it is null or undefined.
611  __ JumpIfSmi(ebx, &convert_to_object);
612  __ cmp(ebx, factory->null_value());
613  __ j(equal, &use_global_receiver);
614  __ cmp(ebx, factory->undefined_value());
615  __ j(equal, &use_global_receiver);
617  __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
618  __ j(above_equal, &shift_arguments);
619 
620  __ bind(&convert_to_object);
621 
622  { // In order to preserve argument count.
623  FrameScope scope(masm, StackFrame::INTERNAL);
624  __ SmiTag(eax);
625  __ push(eax);
626 
627  __ push(ebx);
628  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
629  __ mov(ebx, eax);
630  __ Set(edx, Immediate(0)); // restore
631 
632  __ pop(eax);
633  __ SmiUntag(eax);
634  }
635 
636  // Restore the function to edi.
637  __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
638  __ jmp(&patch_receiver);
639 
640  // Use the global receiver object from the called function as the
641  // receiver.
642  __ bind(&use_global_receiver);
643  const int kGlobalIndex =
645  __ mov(ebx, FieldOperand(esi, kGlobalIndex));
647  __ mov(ebx, FieldOperand(ebx, kGlobalIndex));
649 
650  __ bind(&patch_receiver);
651  __ mov(Operand(esp, eax, times_4, 0), ebx);
652 
653  __ jmp(&shift_arguments);
654  }
655 
656  // 3b. Check for function proxy.
657  __ bind(&slow);
658  __ Set(edx, Immediate(1)); // indicate function proxy
659  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
660  __ j(equal, &shift_arguments);
661  __ bind(&non_function);
662  __ Set(edx, Immediate(2)); // indicate non-function
663 
664  // 3c. Patch the first argument when calling a non-function. The
665  // CALL_NON_FUNCTION builtin expects the non-function callee as
666  // receiver, so overwrite the first argument which will ultimately
667  // become the receiver.
668  __ mov(Operand(esp, eax, times_4, 0), edi);
669 
670  // 4. Shift arguments and return address one slot down on the stack
671  // (overwriting the original receiver). Adjust argument count to make
672  // the original first argument the new receiver.
673  __ bind(&shift_arguments);
674  { Label loop;
675  __ mov(ecx, eax);
676  __ bind(&loop);
677  __ mov(ebx, Operand(esp, ecx, times_4, 0));
678  __ mov(Operand(esp, ecx, times_4, kPointerSize), ebx);
679  __ dec(ecx);
680  __ j(not_sign, &loop); // While non-negative (to copy return address).
681  __ pop(ebx); // Discard copy of return address.
682  __ dec(eax); // One fewer argument (first argument is new receiver).
683  }
684 
685  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
686  // or a function proxy via CALL_FUNCTION_PROXY.
687  { Label function, non_proxy;
688  __ test(edx, edx);
689  __ j(zero, &function);
690  __ Set(ebx, Immediate(0));
691  __ cmp(edx, Immediate(1));
692  __ j(not_equal, &non_proxy);
693 
694  __ pop(edx); // return address
695  __ push(edi); // re-add proxy object as additional argument
696  __ push(edx);
697  __ inc(eax);
698  __ SetCallKind(ecx, CALL_AS_FUNCTION);
699  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
700  __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
701  RelocInfo::CODE_TARGET);
702 
703  __ bind(&non_proxy);
704  __ SetCallKind(ecx, CALL_AS_METHOD);
705  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
706  __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
707  RelocInfo::CODE_TARGET);
708  __ bind(&function);
709  }
710 
711  // 5b. Get the code to call from the function and check that the number of
712  // expected arguments matches what we're providing. If so, jump
713  // (tail-call) to the code in register edx without checking arguments.
715  __ mov(ebx,
718  __ SmiUntag(ebx);
719  __ SetCallKind(ecx, CALL_AS_METHOD);
720  __ cmp(eax, ebx);
721  __ j(not_equal,
722  masm->isolate()->builtins()->ArgumentsAdaptorTrampoline());
723 
724  ParameterCount expected(0);
725  __ InvokeCode(edx, expected, expected, JUMP_FUNCTION, NullCallWrapper(),
727 }
728 
729 
730 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
731  static const int kArgumentsOffset = 2 * kPointerSize;
732  static const int kReceiverOffset = 3 * kPointerSize;
733  static const int kFunctionOffset = 4 * kPointerSize;
734  {
735  FrameScope frame_scope(masm, StackFrame::INTERNAL);
736 
737  __ push(Operand(ebp, kFunctionOffset)); // push this
738  __ push(Operand(ebp, kArgumentsOffset)); // push arguments
739  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
740 
741  // Check the stack for overflow. We are not trying to catch
742  // interruptions (e.g. debug break and preemption) here, so the "real stack
743  // limit" is checked.
744  Label okay;
745  ExternalReference real_stack_limit =
746  ExternalReference::address_of_real_stack_limit(masm->isolate());
747  __ mov(edi, Operand::StaticVariable(real_stack_limit));
748  // Make ecx the space we have left. The stack might already be overflowed
749  // here which will cause ecx to become negative.
750  __ mov(ecx, esp);
751  __ sub(ecx, edi);
752  // Make edx the space we need for the array when it is unrolled onto the
753  // stack.
754  __ mov(edx, eax);
756  // Check if the arguments will overflow the stack.
757  __ cmp(ecx, edx);
758  __ j(greater, &okay); // Signed comparison.
759 
760  // Out of stack space.
761  __ push(Operand(ebp, 4 * kPointerSize)); // push this
762  __ push(eax);
763  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
764  __ bind(&okay);
765  // End of stack check.
766 
767  // Push current index and limit.
768  const int kLimitOffset =
770  const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
771  __ push(eax); // limit
772  __ push(Immediate(0)); // index
773 
774  // Get the receiver.
775  __ mov(ebx, Operand(ebp, kReceiverOffset));
776 
777  // Check that the function is a JS function (otherwise it must be a proxy).
778  Label push_receiver;
779  __ mov(edi, Operand(ebp, kFunctionOffset));
780  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
781  __ j(not_equal, &push_receiver);
782 
783  // Change context eagerly to get the right global object if necessary.
785 
786  // Compute the receiver.
787  // Do not transform the receiver for strict mode functions.
788  Label call_to_object, use_global_receiver;
792  __ j(not_equal, &push_receiver);
793 
794  Factory* factory = masm->isolate()->factory();
795 
796  // Do not transform the receiver for natives (shared already in ecx).
799  __ j(not_equal, &push_receiver);
800 
801  // Compute the receiver in non-strict mode.
802  // Call ToObject on the receiver if it is not an object, or use the
803  // global object if it is null or undefined.
804  __ JumpIfSmi(ebx, &call_to_object);
805  __ cmp(ebx, factory->null_value());
806  __ j(equal, &use_global_receiver);
807  __ cmp(ebx, factory->undefined_value());
808  __ j(equal, &use_global_receiver);
810  __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
811  __ j(above_equal, &push_receiver);
812 
813  __ bind(&call_to_object);
814  __ push(ebx);
815  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
816  __ mov(ebx, eax);
817  __ jmp(&push_receiver);
818 
819  // Use the current global receiver object as the receiver.
820  __ bind(&use_global_receiver);
821  const int kGlobalOffset =
823  __ mov(ebx, FieldOperand(esi, kGlobalOffset));
825  __ mov(ebx, FieldOperand(ebx, kGlobalOffset));
827 
828  // Push the receiver.
829  __ bind(&push_receiver);
830  __ push(ebx);
831 
832  // Copy all arguments from the array to the stack.
833  Label entry, loop;
834  __ mov(ecx, Operand(ebp, kIndexOffset));
835  __ jmp(&entry);
836  __ bind(&loop);
837  __ mov(edx, Operand(ebp, kArgumentsOffset)); // load arguments
838 
839  // Use inline caching to speed up access to arguments.
840  Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Initialize();
841  __ call(ic, RelocInfo::CODE_TARGET);
842  // It is important that we do not have a test instruction after the
843  // call. A test instruction after the call is used to indicate that
844  // we have generated an inline version of the keyed load. In this
845  // case, we know that we are not generating a test instruction next.
846 
847  // Push the nth argument.
848  __ push(eax);
849 
850  // Update the index on the stack and in register eax.
851  __ mov(ecx, Operand(ebp, kIndexOffset));
852  __ add(ecx, Immediate(1 << kSmiTagSize));
853  __ mov(Operand(ebp, kIndexOffset), ecx);
854 
855  __ bind(&entry);
856  __ cmp(ecx, Operand(ebp, kLimitOffset));
857  __ j(not_equal, &loop);
858 
859  // Invoke the function.
860  Label call_proxy;
861  __ mov(eax, ecx);
862  ParameterCount actual(eax);
863  __ SmiUntag(eax);
864  __ mov(edi, Operand(ebp, kFunctionOffset));
865  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
866  __ j(not_equal, &call_proxy);
867  __ InvokeFunction(edi, actual, CALL_FUNCTION,
868  NullCallWrapper(), CALL_AS_METHOD);
869 
870  frame_scope.GenerateLeaveFrame();
871  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
872 
873  // Invoke the function proxy.
874  __ bind(&call_proxy);
875  __ push(edi); // add function proxy as last argument
876  __ inc(eax);
877  __ Set(ebx, Immediate(0));
878  __ SetCallKind(ecx, CALL_AS_METHOD);
879  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
880  __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
881  RelocInfo::CODE_TARGET);
882 
883  // Leave internal frame.
884  }
885  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
886 }
887 
888 
889 // Allocate an empty JSArray. The allocated array is put into the result
890 // register. If the parameter initial_capacity is larger than zero an elements
891 // backing store is allocated with this size and filled with the hole values.
892 // Otherwise the elements backing store is set to the empty FixedArray.
893 static void AllocateEmptyJSArray(MacroAssembler* masm,
894  Register array_function,
895  Register result,
896  Register scratch1,
897  Register scratch2,
898  Register scratch3,
899  Label* gc_required) {
900  const int initial_capacity = JSArray::kPreallocatedArrayElements;
901  STATIC_ASSERT(initial_capacity >= 0);
902 
903  __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
904 
905  // Allocate the JSArray object together with space for a fixed array with the
906  // requested elements.
907  int size = JSArray::kSize;
908  if (initial_capacity > 0) {
909  size += FixedArray::SizeFor(initial_capacity);
910  }
911  __ AllocateInNewSpace(size,
912  result,
913  scratch2,
914  scratch3,
915  gc_required,
916  TAG_OBJECT);
917 
918  // Allocated the JSArray. Now initialize the fields except for the elements
919  // array.
920  // result: JSObject
921  // scratch1: initial map
922  // scratch2: start of next object
923  __ mov(FieldOperand(result, JSObject::kMapOffset), scratch1);
924  Factory* factory = masm->isolate()->factory();
926  factory->empty_fixed_array());
927  // Field JSArray::kElementsOffset is initialized later.
928  __ mov(FieldOperand(result, JSArray::kLengthOffset), Immediate(0));
929 
930  // If no storage is requested for the elements array just set the empty
931  // fixed array.
932  if (initial_capacity == 0) {
934  factory->empty_fixed_array());
935  return;
936  }
937 
938  // Calculate the location of the elements array and set elements array member
939  // of the JSArray.
940  // result: JSObject
941  // scratch2: start of next object
942  __ lea(scratch1, Operand(result, JSArray::kSize));
943  __ mov(FieldOperand(result, JSArray::kElementsOffset), scratch1);
944 
945  // Initialize the FixedArray and fill it with holes. FixedArray length is
946  // stored as a smi.
947  // result: JSObject
948  // scratch1: elements array
949  // scratch2: start of next object
950  __ mov(FieldOperand(scratch1, FixedArray::kMapOffset),
951  factory->fixed_array_map());
953  Immediate(Smi::FromInt(initial_capacity)));
954 
955  // Fill the FixedArray with the hole value. Inline the code if short.
956  // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
957  static const int kLoopUnfoldLimit = 4;
958  if (initial_capacity <= kLoopUnfoldLimit) {
959  // Use a scratch register here to have only one reloc info when unfolding
960  // the loop.
961  __ mov(scratch3, factory->the_hole_value());
962  for (int i = 0; i < initial_capacity; i++) {
963  __ mov(FieldOperand(scratch1,
964  FixedArray::kHeaderSize + i * kPointerSize),
965  scratch3);
966  }
967  } else {
968  Label loop, entry;
969  __ mov(scratch2, Immediate(initial_capacity));
970  __ jmp(&entry);
971  __ bind(&loop);
972  __ mov(FieldOperand(scratch1,
973  scratch2,
976  factory->the_hole_value());
977  __ bind(&entry);
978  __ dec(scratch2);
979  __ j(not_sign, &loop);
980  }
981 }
982 
983 
984 // Allocate a JSArray with the number of elements stored in a register. The
985 // register array_function holds the built-in Array function and the register
986 // array_size holds the size of the array as a smi. The allocated array is put
987 // into the result register and beginning and end of the FixedArray elements
988 // storage is put into registers elements_array and elements_array_end (see
989 // below for when that is not the case). If the parameter fill_with_holes is
990 // true the allocated elements backing store is filled with the hole values
991 // otherwise it is left uninitialized. When the backing store is filled the
992 // register elements_array is scratched.
993 static void AllocateJSArray(MacroAssembler* masm,
994  Register array_function, // Array function.
995  Register array_size, // As a smi, cannot be 0.
996  Register result,
997  Register elements_array,
998  Register elements_array_end,
999  Register scratch,
1000  bool fill_with_hole,
1001  Label* gc_required) {
1002  ASSERT(scratch.is(edi)); // rep stos destination
1003  ASSERT(!fill_with_hole || array_size.is(ecx)); // rep stos count
1004  ASSERT(!fill_with_hole || !result.is(eax)); // result is never eax
1005 
1006  __ LoadInitialArrayMap(array_function, scratch,
1007  elements_array, fill_with_hole);
1008 
1009  // Allocate the JSArray object together with space for a FixedArray with the
1010  // requested elements.
1011  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1012  __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
1013  times_half_pointer_size, // array_size is a smi.
1014  array_size,
1015  result,
1016  elements_array_end,
1017  scratch,
1018  gc_required,
1019  TAG_OBJECT);
1020 
1021  // Allocated the JSArray. Now initialize the fields except for the elements
1022  // array.
1023  // result: JSObject
1024  // elements_array: initial map
1025  // elements_array_end: start of next object
1026  // array_size: size of array (smi)
1027  __ mov(FieldOperand(result, JSObject::kMapOffset), elements_array);
1028  Factory* factory = masm->isolate()->factory();
1029  __ mov(elements_array, factory->empty_fixed_array());
1030  __ mov(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
1031  // Field JSArray::kElementsOffset is initialized later.
1032  __ mov(FieldOperand(result, JSArray::kLengthOffset), array_size);
1033 
1034  // Calculate the location of the elements array and set elements array member
1035  // of the JSArray.
1036  // result: JSObject
1037  // elements_array_end: start of next object
1038  // array_size: size of array (smi)
1039  __ lea(elements_array, Operand(result, JSArray::kSize));
1040  __ mov(FieldOperand(result, JSArray::kElementsOffset), elements_array);
1041 
1042  // Initialize the fixed array. FixedArray length is stored as a smi.
1043  // result: JSObject
1044  // elements_array: elements array
1045  // elements_array_end: start of next object
1046  // array_size: size of array (smi)
1047  __ mov(FieldOperand(elements_array, FixedArray::kMapOffset),
1048  factory->fixed_array_map());
1049  // For non-empty JSArrays the length of the FixedArray and the JSArray is the
1050  // same.
1051  __ mov(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
1052 
1053  // Fill the allocated FixedArray with the hole value if requested.
1054  // result: JSObject
1055  // elements_array: elements array
1056  if (fill_with_hole) {
1057  __ SmiUntag(array_size);
1058  __ lea(edi, Operand(elements_array,
1060  __ mov(eax, factory->the_hole_value());
1061  __ cld();
1062  // Do not use rep stos when filling less than kRepStosThreshold
1063  // words.
1064  const int kRepStosThreshold = 16;
1065  Label loop, entry, done;
1066  __ cmp(ecx, kRepStosThreshold);
1067  __ j(below, &loop); // Note: ecx > 0.
1068  __ rep_stos();
1069  __ jmp(&done);
1070  __ bind(&loop);
1071  __ stos();
1072  __ bind(&entry);
1073  __ cmp(edi, elements_array_end);
1074  __ j(below, &loop);
1075  __ bind(&done);
1076  }
1077 }
1078 
1079 
1080 // Create a new array for the built-in Array function. This function allocates
1081 // the JSArray object and the FixedArray elements array and initializes these.
1082 // If the Array cannot be constructed in native code the runtime is called. This
1083 // function assumes the following state:
1084 // edi: constructor (built-in Array function)
1085 // eax: argc
1086 // esp[0]: return address
1087 // esp[4]: last argument
1088 // This function is used for both construct and normal calls of Array. Whether
1089 // it is a construct call or not is indicated by the construct_call parameter.
1090 // The only difference between handling a construct call and a normal call is
1091 // that for a construct call the constructor function in edi needs to be
1092 // preserved for entering the generic code. In both cases argc in eax needs to
1093 // be preserved.
1094 static void ArrayNativeCode(MacroAssembler* masm,
1095  bool construct_call,
1096  Label* call_generic_code) {
1097  Label argc_one_or_more, argc_two_or_more, prepare_generic_code_call,
1098  empty_array, not_empty_array, finish, cant_transition_map, not_double;
1099 
1100  // Push the constructor and argc. No need to tag argc as a smi, as there will
1101  // be no garbage collection with this on the stack.
1102  int push_count = 0;
1103  if (construct_call) {
1104  push_count++;
1105  __ push(edi);
1106  }
1107  push_count++;
1108  __ push(eax);
1109 
1110  // Check for array construction with zero arguments.
1111  __ test(eax, eax);
1112  __ j(not_zero, &argc_one_or_more);
1113 
1114  __ bind(&empty_array);
1115  // Handle construction of an empty array.
1116  AllocateEmptyJSArray(masm,
1117  edi,
1118  eax,
1119  ebx,
1120  ecx,
1121  edi,
1122  &prepare_generic_code_call);
1123  __ IncrementCounter(masm->isolate()->counters()->array_function_native(), 1);
1124  __ pop(ebx);
1125  if (construct_call) {
1126  __ pop(edi);
1127  }
1128  __ ret(kPointerSize);
1129 
1130  // Check for one argument. Bail out if argument is not smi or if it is
1131  // negative.
1132  __ bind(&argc_one_or_more);
1133  __ cmp(eax, 1);
1134  __ j(not_equal, &argc_two_or_more);
1135  STATIC_ASSERT(kSmiTag == 0);
1136  __ mov(ecx, Operand(esp, (push_count + 1) * kPointerSize));
1137  __ test(ecx, ecx);
1138  __ j(not_zero, &not_empty_array);
1139 
1140  // The single argument passed is zero, so we jump to the code above used to
1141  // handle the case of no arguments passed. To adapt the stack for that we move
1142  // the return address and the pushed constructor (if pushed) one stack slot up
1143  // thereby removing the passed argument. Argc is also on the stack - at the
1144  // bottom - and it needs to be changed from 1 to 0 to have the call into the
1145  // runtime system work in case a GC is required.
1146  for (int i = push_count; i > 0; i--) {
1147  __ mov(eax, Operand(esp, i * kPointerSize));
1148  __ mov(Operand(esp, (i + 1) * kPointerSize), eax);
1149  }
1150  __ Drop(2); // Drop two stack slots.
1151  __ push(Immediate(0)); // Treat this as a call with argc of zero.
1152  __ jmp(&empty_array);
1153 
1154  __ bind(&not_empty_array);
1155  __ test(ecx, Immediate(kIntptrSignBit | kSmiTagMask));
1156  __ j(not_zero, &prepare_generic_code_call);
1157 
1158  // Handle construction of an empty array of a certain size. Get the size from
1159  // the stack and bail out if size is to large to actually allocate an elements
1160  // array.
1162  __ j(greater_equal, &prepare_generic_code_call);
1163 
1164  // edx: array_size (smi)
1165  // edi: constructor
1166  // esp[0]: argc (cannot be 0 here)
1167  // esp[4]: constructor (only if construct_call)
1168  // esp[8]: return address
1169  // esp[C]: argument
1170  AllocateJSArray(masm,
1171  edi,
1172  ecx,
1173  ebx,
1174  eax,
1175  edx,
1176  edi,
1177  true,
1178  &prepare_generic_code_call);
1179  Counters* counters = masm->isolate()->counters();
1180  __ IncrementCounter(counters->array_function_native(), 1);
1181  __ mov(eax, ebx);
1182  __ pop(ebx);
1183  if (construct_call) {
1184  __ pop(edi);
1185  }
1186  __ ret(2 * kPointerSize);
1187 
1188  // Handle construction of an array from a list of arguments.
1189  __ bind(&argc_two_or_more);
1190  STATIC_ASSERT(kSmiTag == 0);
1191  __ SmiTag(eax); // Convet argc to a smi.
1192  // eax: array_size (smi)
1193  // edi: constructor
1194  // esp[0] : argc
1195  // esp[4]: constructor (only if construct_call)
1196  // esp[8] : return address
1197  // esp[C] : last argument
1198  AllocateJSArray(masm,
1199  edi,
1200  eax,
1201  ebx,
1202  ecx,
1203  edx,
1204  edi,
1205  false,
1206  &prepare_generic_code_call);
1207  __ IncrementCounter(counters->array_function_native(), 1);
1208  __ push(ebx);
1209  __ mov(ebx, Operand(esp, kPointerSize));
1210  // ebx: argc
1211  // edx: elements_array_end (untagged)
1212  // esp[0]: JSArray
1213  // esp[4]: argc
1214  // esp[8]: constructor (only if construct_call)
1215  // esp[12]: return address
1216  // esp[16]: last argument
1217 
1218  // Location of the last argument
1219  int last_arg_offset = (construct_call ? 4 : 3) * kPointerSize;
1220  __ lea(edi, Operand(esp, last_arg_offset));
1221 
1222  // Location of the first array element (Parameter fill_with_holes to
1223  // AllocateJSArray is false, so the FixedArray is returned in ecx).
1224  __ lea(edx, Operand(ecx, FixedArray::kHeaderSize - kHeapObjectTag));
1225 
1226  Label has_non_smi_element;
1227 
1228  // ebx: argc
1229  // edx: location of the first array element
1230  // edi: location of the last argument
1231  // esp[0]: JSArray
1232  // esp[4]: argc
1233  // esp[8]: constructor (only if construct_call)
1234  // esp[12]: return address
1235  // esp[16]: last argument
1236  Label loop, entry;
1237  __ mov(ecx, ebx);
1238  __ jmp(&entry);
1239  __ bind(&loop);
1240  __ mov(eax, Operand(edi, ecx, times_pointer_size, 0));
1241  if (FLAG_smi_only_arrays) {
1242  __ JumpIfNotSmi(eax, &has_non_smi_element);
1243  }
1244  __ mov(Operand(edx, 0), eax);
1245  __ add(edx, Immediate(kPointerSize));
1246  __ bind(&entry);
1247  __ dec(ecx);
1248  __ j(greater_equal, &loop);
1249 
1250  // Remove caller arguments from the stack and return.
1251  // ebx: argc
1252  // esp[0]: JSArray
1253  // esp[4]: argc
1254  // esp[8]: constructor (only if construct_call)
1255  // esp[12]: return address
1256  // esp[16]: last argument
1257  __ bind(&finish);
1258  __ mov(ecx, Operand(esp, last_arg_offset - kPointerSize));
1259  __ pop(eax);
1260  __ pop(ebx);
1261  __ lea(esp, Operand(esp, ebx, times_pointer_size,
1262  last_arg_offset - kPointerSize));
1263  __ jmp(ecx);
1264 
1265  __ bind(&has_non_smi_element);
1266  // Double values are handled by the runtime.
1267  __ CheckMap(eax,
1268  masm->isolate()->factory()->heap_number_map(),
1269  &not_double,
1271  __ bind(&cant_transition_map);
1272  // Throw away the array that's only been partially constructed.
1273  __ pop(eax);
1274  __ UndoAllocationInNewSpace(eax);
1275  __ jmp(&prepare_generic_code_call);
1276 
1277  __ bind(&not_double);
1278  // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
1279  __ mov(ebx, Operand(esp, 0));
1281  __ LoadTransitionedArrayMapConditional(
1283  FAST_ELEMENTS,
1284  edi,
1285  eax,
1286  &cant_transition_map);
1288  __ RecordWriteField(ebx, HeapObject::kMapOffset, edi, eax,
1290 
1291  // Prepare to re-enter the loop
1292  __ lea(edi, Operand(esp, last_arg_offset));
1293 
1294  // Finish the array initialization loop.
1295  Label loop2;
1296  __ bind(&loop2);
1297  __ mov(eax, Operand(edi, ecx, times_pointer_size, 0));
1298  __ mov(Operand(edx, 0), eax);
1299  __ add(edx, Immediate(kPointerSize));
1300  __ dec(ecx);
1301  __ j(greater_equal, &loop2);
1302  __ jmp(&finish);
1303 
1304  // Restore argc and constructor before running the generic code.
1305  __ bind(&prepare_generic_code_call);
1306  __ pop(eax);
1307  if (construct_call) {
1308  __ pop(edi);
1309  }
1310  __ jmp(call_generic_code);
1311 }
1312 
1313 
1314 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1315  // ----------- S t a t e -------------
1316  // -- eax : argc
1317  // -- esp[0] : return address
1318  // -- esp[4] : last argument
1319  // -----------------------------------
1320  Label generic_array_code;
1321 
1322  // Get the InternalArray function.
1323  __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi);
1324 
1325  if (FLAG_debug_code) {
1326  // Initial map for the builtin InternalArray function should be a map.
1328  // Will both indicate a NULL and a Smi.
1329  __ test(ebx, Immediate(kSmiTagMask));
1330  __ Assert(not_zero, "Unexpected initial map for InternalArray function");
1331  __ CmpObjectType(ebx, MAP_TYPE, ecx);
1332  __ Assert(equal, "Unexpected initial map for InternalArray function");
1333  }
1334 
1335  // Run the native code for the InternalArray function called as a normal
1336  // function.
1337  ArrayNativeCode(masm, false, &generic_array_code);
1338 
1339  // Jump to the generic internal array code in case the specialized code cannot
1340  // handle the construction.
1341  __ bind(&generic_array_code);
1342  Handle<Code> array_code =
1343  masm->isolate()->builtins()->InternalArrayCodeGeneric();
1344  __ jmp(array_code, RelocInfo::CODE_TARGET);
1345 }
1346 
1347 
1348 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1349  // ----------- S t a t e -------------
1350  // -- eax : argc
1351  // -- esp[0] : return address
1352  // -- esp[4] : last argument
1353  // -----------------------------------
1354  Label generic_array_code;
1355 
1356  // Get the Array function.
1357  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi);
1358 
1359  if (FLAG_debug_code) {
1360  // Initial map for the builtin Array function should be a map.
1362  // Will both indicate a NULL and a Smi.
1363  __ test(ebx, Immediate(kSmiTagMask));
1364  __ Assert(not_zero, "Unexpected initial map for Array function");
1365  __ CmpObjectType(ebx, MAP_TYPE, ecx);
1366  __ Assert(equal, "Unexpected initial map for Array function");
1367  }
1368 
1369  // Run the native code for the Array function called as a normal function.
1370  ArrayNativeCode(masm, false, &generic_array_code);
1371 
1372  // Jump to the generic array code in case the specialized code cannot handle
1373  // the construction.
1374  __ bind(&generic_array_code);
1375  Handle<Code> array_code =
1376  masm->isolate()->builtins()->ArrayCodeGeneric();
1377  __ jmp(array_code, RelocInfo::CODE_TARGET);
1378 }
1379 
1380 
1381 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1382  // ----------- S t a t e -------------
1383  // -- eax : argc
1384  // -- edi : constructor
1385  // -- esp[0] : return address
1386  // -- esp[4] : last argument
1387  // -----------------------------------
1388  Label generic_constructor;
1389 
1390  if (FLAG_debug_code) {
1391  // The array construct code is only set for the global and natives
1392  // builtin Array functions which always have maps.
1393 
1394  // Initial map for the builtin Array function should be a map.
1396  // Will both indicate a NULL and a Smi.
1397  __ test(ebx, Immediate(kSmiTagMask));
1398  __ Assert(not_zero, "Unexpected initial map for Array function");
1399  __ CmpObjectType(ebx, MAP_TYPE, ecx);
1400  __ Assert(equal, "Unexpected initial map for Array function");
1401  }
1402 
1403  // Run the native code for the Array function called as constructor.
1404  ArrayNativeCode(masm, true, &generic_constructor);
1405 
1406  // Jump to the generic construct code in case the specialized code cannot
1407  // handle the construction.
1408  __ bind(&generic_constructor);
1409  Handle<Code> generic_construct_stub =
1410  masm->isolate()->builtins()->JSConstructStubGeneric();
1411  __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
1412 }
1413 
1414 
1415 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1416  // ----------- S t a t e -------------
1417  // -- eax : number of arguments
1418  // -- edi : constructor function
1419  // -- esp[0] : return address
1420  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1421  // -- esp[(argc + 1) * 4] : receiver
1422  // -----------------------------------
1423  Counters* counters = masm->isolate()->counters();
1424  __ IncrementCounter(counters->string_ctor_calls(), 1);
1425 
1426  if (FLAG_debug_code) {
1427  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, ecx);
1428  __ cmp(edi, ecx);
1429  __ Assert(equal, "Unexpected String function");
1430  }
1431 
1432  // Load the first argument into eax and get rid of the rest
1433  // (including the receiver).
1434  Label no_arguments;
1435  __ test(eax, eax);
1436  __ j(zero, &no_arguments);
1437  __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1438  __ pop(ecx);
1439  __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1440  __ push(ecx);
1441  __ mov(eax, ebx);
1442 
1443  // Lookup the argument in the number to string cache.
1444  Label not_cached, argument_is_string;
1446  masm,
1447  eax, // Input.
1448  ebx, // Result.
1449  ecx, // Scratch 1.
1450  edx, // Scratch 2.
1451  false, // Input is known to be smi?
1452  &not_cached);
1453  __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1454  __ bind(&argument_is_string);
1455  // ----------- S t a t e -------------
1456  // -- ebx : argument converted to string
1457  // -- edi : constructor function
1458  // -- esp[0] : return address
1459  // -----------------------------------
1460 
1461  // Allocate a JSValue and put the tagged pointer into eax.
1462  Label gc_required;
1463  __ AllocateInNewSpace(JSValue::kSize,
1464  eax, // Result.
1465  ecx, // New allocation top (we ignore it).
1466  no_reg,
1467  &gc_required,
1468  TAG_OBJECT);
1469 
1470  // Set the map.
1471  __ LoadGlobalFunctionInitialMap(edi, ecx);
1472  if (FLAG_debug_code) {
1475  __ Assert(equal, "Unexpected string wrapper instance size");
1477  __ Assert(equal, "Unexpected unused properties of string wrapper");
1478  }
1480 
1481  // Set properties and elements.
1482  Factory* factory = masm->isolate()->factory();
1483  __ Set(ecx, Immediate(factory->empty_fixed_array()));
1486 
1487  // Set the value.
1489 
1490  // Ensure the object is fully initialized.
1491  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1492 
1493  // We're done. Return.
1494  __ ret(0);
1495 
1496  // The argument was not found in the number to string cache. Check
1497  // if it's a string already before calling the conversion builtin.
1498  Label convert_argument;
1499  __ bind(&not_cached);
1500  STATIC_ASSERT(kSmiTag == 0);
1501  __ JumpIfSmi(eax, &convert_argument);
1502  Condition is_string = masm->IsObjectStringType(eax, ebx, ecx);
1503  __ j(NegateCondition(is_string), &convert_argument);
1504  __ mov(ebx, eax);
1505  __ IncrementCounter(counters->string_ctor_string_value(), 1);
1506  __ jmp(&argument_is_string);
1507 
1508  // Invoke the conversion builtin and put the result into ebx.
1509  __ bind(&convert_argument);
1510  __ IncrementCounter(counters->string_ctor_conversions(), 1);
1511  {
1512  FrameScope scope(masm, StackFrame::INTERNAL);
1513  __ push(edi); // Preserve the function.
1514  __ push(eax);
1515  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1516  __ pop(edi);
1517  }
1518  __ mov(ebx, eax);
1519  __ jmp(&argument_is_string);
1520 
1521  // Load the empty string into ebx, remove the receiver from the
1522  // stack, and jump back to the case where the argument is a string.
1523  __ bind(&no_arguments);
1524  __ Set(ebx, Immediate(factory->empty_string()));
1525  __ pop(ecx);
1526  __ lea(esp, Operand(esp, kPointerSize));
1527  __ push(ecx);
1528  __ jmp(&argument_is_string);
1529 
1530  // At this point the argument is already a string. Call runtime to
1531  // create a string wrapper.
1532  __ bind(&gc_required);
1533  __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1534  {
1535  FrameScope scope(masm, StackFrame::INTERNAL);
1536  __ push(ebx);
1537  __ CallRuntime(Runtime::kNewStringWrapper, 1);
1538  }
1539  __ ret(0);
1540 }
1541 
1542 
1543 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1544  __ push(ebp);
1545  __ mov(ebp, esp);
1546 
1547  // Store the arguments adaptor context sentinel.
1548  __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1549 
1550  // Push the function on the stack.
1551  __ push(edi);
1552 
1553  // Preserve the number of arguments on the stack. Must preserve eax,
1554  // ebx and ecx because these registers are used when copying the
1555  // arguments and the receiver.
1556  STATIC_ASSERT(kSmiTagSize == 1);
1557  __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
1558  __ push(edi);
1559 }
1560 
1561 
1562 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1563  // Retrieve the number of arguments from the stack.
1565 
1566  // Leave the frame.
1567  __ leave();
1568 
1569  // Remove caller arguments from the stack.
1570  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1571  __ pop(ecx);
1572  __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
1573  __ push(ecx);
1574 }
1575 
1576 
1577 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1578  // ----------- S t a t e -------------
1579  // -- eax : actual number of arguments
1580  // -- ebx : expected number of arguments
1581  // -- ecx : call kind information
1582  // -- edx : code entry to call
1583  // -----------------------------------
1584 
1585  Label invoke, dont_adapt_arguments;
1586  __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
1587 
1588  Label enough, too_few;
1589  __ cmp(eax, ebx);
1590  __ j(less, &too_few);
1592  __ j(equal, &dont_adapt_arguments);
1593 
1594  { // Enough parameters: Actual >= expected.
1595  __ bind(&enough);
1596  EnterArgumentsAdaptorFrame(masm);
1597 
1598  // Copy receiver and all expected arguments.
1599  const int offset = StandardFrameConstants::kCallerSPOffset;
1600  __ lea(eax, Operand(ebp, eax, times_4, offset));
1601  __ mov(edi, -1); // account for receiver
1602 
1603  Label copy;
1604  __ bind(&copy);
1605  __ inc(edi);
1606  __ push(Operand(eax, 0));
1607  __ sub(eax, Immediate(kPointerSize));
1608  __ cmp(edi, ebx);
1609  __ j(less, &copy);
1610  __ jmp(&invoke);
1611  }
1612 
1613  { // Too few parameters: Actual < expected.
1614  __ bind(&too_few);
1615  EnterArgumentsAdaptorFrame(masm);
1616 
1617  // Copy receiver and all actual arguments.
1618  const int offset = StandardFrameConstants::kCallerSPOffset;
1619  __ lea(edi, Operand(ebp, eax, times_4, offset));
1620  // ebx = expected - actual.
1621  __ sub(ebx, eax);
1622  // eax = -actual - 1
1623  __ neg(eax);
1624  __ sub(eax, Immediate(1));
1625 
1626  Label copy;
1627  __ bind(&copy);
1628  __ inc(eax);
1629  __ push(Operand(edi, 0));
1630  __ sub(edi, Immediate(kPointerSize));
1631  __ test(eax, eax);
1632  __ j(not_zero, &copy);
1633 
1634  // Fill remaining expected arguments with undefined values.
1635  Label fill;
1636  __ bind(&fill);
1637  __ inc(eax);
1638  __ push(Immediate(masm->isolate()->factory()->undefined_value()));
1639  __ cmp(eax, ebx);
1640  __ j(less, &fill);
1641  }
1642 
1643  // Call the entry point.
1644  __ bind(&invoke);
1645  // Restore function pointer.
1647  __ call(edx);
1648 
1649  // Store offset of return address for deoptimizer.
1650  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1651 
1652  // Leave frame and return.
1653  LeaveArgumentsAdaptorFrame(masm);
1654  __ ret(0);
1655 
1656  // -------------------------------------------
1657  // Dont adapt arguments.
1658  // -------------------------------------------
1659  __ bind(&dont_adapt_arguments);
1660  __ jmp(edx);
1661 }
1662 
1663 
1664 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1665  CpuFeatures::TryForceFeatureScope scope(SSE2);
1666  if (!CpuFeatures::IsSupported(SSE2) && FLAG_debug_code) {
1667  __ Abort("Unreachable code: Cannot optimize without SSE2 support.");
1668  return;
1669  }
1670 
1671  // Get the loop depth of the stack guard check. This is recorded in
1672  // a test(eax, depth) instruction right after the call.
1673  Label stack_check;
1674  __ mov(ebx, Operand(esp, 0)); // return address
1675  if (FLAG_debug_code) {
1676  __ cmpb(Operand(ebx, 0), Assembler::kTestAlByte);
1677  __ Assert(equal, "test eax instruction not found after loop stack check");
1678  }
1679  __ movzx_b(ebx, Operand(ebx, 1)); // depth
1680 
1681  // Get the loop nesting level at which we allow OSR from the
1682  // unoptimized code and check if we want to do OSR yet. If not we
1683  // should perform a stack guard check so we can get interrupts while
1684  // waiting for on-stack replacement.
1689  __ j(greater, &stack_check);
1690 
1691  // Pass the function to optimize as the argument to the on-stack
1692  // replacement runtime function.
1693  {
1694  FrameScope scope(masm, StackFrame::INTERNAL);
1695  __ push(eax);
1696  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1697  }
1698 
1699  // If the result was -1 it means that we couldn't optimize the
1700  // function. Just return and continue in the unoptimized version.
1701  Label skip;
1702  __ cmp(eax, Immediate(Smi::FromInt(-1)));
1703  __ j(not_equal, &skip, Label::kNear);
1704  __ ret(0);
1705 
1706  // Insert a stack guard check so that if we decide not to perform
1707  // on-stack replacement right away, the function calling this stub can
1708  // still be interrupted.
1709  __ bind(&stack_check);
1710  Label ok;
1711  ExternalReference stack_limit =
1712  ExternalReference::address_of_stack_limit(masm->isolate());
1713  __ cmp(esp, Operand::StaticVariable(stack_limit));
1714  __ j(above_equal, &ok, Label::kNear);
1715  StackCheckStub stub;
1716  __ TailCallStub(&stub);
1717  if (FLAG_debug_code) {
1718  __ Abort("Unreachable code: returned from tail call.");
1719  }
1720  __ bind(&ok);
1721  __ ret(0);
1722 
1723  __ bind(&skip);
1724  // Untag the AST id and push it on the stack.
1725  __ SmiUntag(eax);
1726  __ push(eax);
1727 
1728  // Generate the code for doing the frame-to-frame translation using
1729  // the deoptimizer infrastructure.
1730  Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1731  generator.Generate();
1732 }
1733 
1734 
1735 #undef __
1736 }
1737 } // namespace v8::internal
1738 
1739 #endif // V8_TARGET_ARCH_IA32
const intptr_t kSmiTagMask
Definition: v8.h:3855
static const int kCodeOffset
Definition: objects.h:5606
static const int kCodeEntryOffset
Definition: objects.h:5981
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:5982
static const int kAllowOSRAtLoopNestingLevelOffset
Definition: objects.h:4538
static Smi * FromInt(int value)
Definition: objects-inl.h:973
const intptr_t kIntptrSignBit
Definition: globals.h:247
const Register esp
static const int kGlobalReceiverOffset
Definition: objects.h:6085
static const int kConstructionCountOffset
Definition: objects.h:5697
static const int kNativeByteOffset
Definition: objects.h:5778
static bool IsSupported(CpuFeature f)
static const int kStrictModeBitWithinByte
Definition: objects.h:5764
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:246
static const int kInstanceSizeOffset
Definition: objects.h:4981
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:4993
static const int kGlobalContextOffset
Definition: objects.h:6084
static const byte kTestAlByte
static const int kContextOffset
Definition: objects.h:5986
const Register edi
static const int kSize
Definition: objects.h:8112
const Register ebp
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const Register eax
BuiltinExtraArguments
Definition: builtins.h:35
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:5601
const int kPointerSize
Definition: globals.h:234
Operand FieldOperand(Register object, int offset)
const Register ecx
const int kHeapObjectTag
Definition: v8.h:3848
#define __
static const int kPropertiesOffset
Definition: objects.h:2113
static const int kInObjectPropertiesOffset
Definition: objects.h:4983
static const int kElementsOffset
Definition: objects.h:2114
static const int kNativeBitWithinByte
Definition: objects.h:5770
static const int kArgcOffset
Definition: frames-ia32.h:81
static const int kFunctionArgOffset
Definition: frames-ia32.h:79
static const int kLengthOffset
Definition: objects.h:8111
static int SizeFor(int length)
Definition: objects.h:2288
static const int kHeaderSize
Definition: objects.h:2233
static const int kSize
Definition: objects.h:6189
static const int kMapOffset
Definition: objects.h:1219
static const int kLengthOffset
Definition: objects.h:2232
const Register ebx
static const int kReceiverArgOffset
Definition: frames-ia32.h:80
static const int kFormalParameterCountOffset
Definition: objects.h:5662
static const int kStrictModeByteOffset
Definition: objects.h:5774
const int kSmiTagSize
Definition: v8.h:3854
static const int kHeaderSize
Definition: objects.h:4513
Condition NegateCondition(Condition cond)
const Register esi
const int kSmiTag
Definition: v8.h:3853
static const int kArgvOffset
Definition: frames-ia32.h:82
static const int kHeaderSize
Definition: objects.h:2115
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kPreallocatedArrayElements
Definition: objects.h:8108
const Register no_reg
static const int kValueOffset
Definition: objects.h:6188
const Register edx
static const int kSharedFunctionInfoOffset
Definition: objects.h:5984
static const int kInitialMaxFastElementArray
Definition: objects.h:2103
FlagType type() const
Definition: flags.cc:1358
static const int kPreAllocatedPropertyFieldsOffset
Definition: objects.h:4986