v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
builtins-mips.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 
29 
30 #include "v8.h"
31 
32 #if V8_TARGET_ARCH_MIPS
33 
34 #include "codegen.h"
35 #include "debug.h"
36 #include "deoptimizer.h"
37 #include "full-codegen.h"
38 #include "runtime.h"
39 #include "stub-cache.h"
40 
41 namespace v8 {
42 namespace internal {
43 
44 
45 #define __ ACCESS_MASM(masm)
46 
47 
48 void Builtins::Generate_Adaptor(MacroAssembler* masm,
49  CFunctionId id,
50  BuiltinExtraArguments extra_args) {
51  // ----------- S t a t e -------------
52  // -- a0 : number of arguments excluding receiver
53  // -- a1 : called function (only guaranteed when
54  // -- extra_args requires it)
55  // -- cp : context
56  // -- sp[0] : last argument
57  // -- ...
58  // -- sp[4 * (argc - 1)] : first argument
59  // -- sp[4 * agrc] : receiver
60  // -----------------------------------
61 
62  // Insert extra arguments.
63  int num_extra_args = 0;
64  if (extra_args == NEEDS_CALLED_FUNCTION) {
65  num_extra_args = 1;
66  __ push(a1);
67  } else {
68  ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
69  }
70 
71  // JumpToExternalReference expects s0 to contain the number of arguments
72  // including the receiver and the extra arguments.
73  __ Addu(s0, a0, num_extra_args + 1);
74  __ sll(s1, s0, kPointerSizeLog2);
75  __ Subu(s1, s1, kPointerSize);
76  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
77 }
78 
79 
80 // Load the built-in InternalArray function from the current context.
81 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
82  Register result) {
83  // Load the native context.
84 
85  __ lw(result,
87  __ lw(result,
89  // Load the InternalArray function from the native context.
90  __ lw(result,
91  MemOperand(result,
94 }
95 
96 
97 // Load the built-in Array function from the current context.
98 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
99  // Load the native context.
100 
101  __ lw(result,
103  __ lw(result,
105  // Load the Array function from the native context.
106  __ lw(result,
107  MemOperand(result,
109 }
110 
111 
112 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
113  // ----------- S t a t e -------------
114  // -- a0 : number of arguments
115  // -- ra : return address
116  // -- sp[...]: constructor arguments
117  // -----------------------------------
118  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
119 
120  // Get the InternalArray function.
121  GenerateLoadInternalArrayFunction(masm, a1);
122 
123  if (FLAG_debug_code) {
124  // Initial map for the builtin InternalArray functions should be maps.
126  __ SmiTst(a2, t0);
127  __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
128  t0, Operand(zero_reg));
129  __ GetObjectType(a2, a3, t0);
130  __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
131  t0, Operand(MAP_TYPE));
132  }
133 
134  // Run the native code for the InternalArray function called as a normal
135  // function.
136  // Tail call a stub.
137  InternalArrayConstructorStub stub(masm->isolate());
138  __ TailCallStub(&stub);
139 }
140 
141 
142 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
143  // ----------- S t a t e -------------
144  // -- a0 : number of arguments
145  // -- ra : return address
146  // -- sp[...]: constructor arguments
147  // -----------------------------------
148  Label generic_array_code;
149 
150  // Get the Array function.
151  GenerateLoadArrayFunction(masm, a1);
152 
153  if (FLAG_debug_code) {
154  // Initial map for the builtin Array functions should be maps.
156  __ SmiTst(a2, t0);
157  __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
158  t0, Operand(zero_reg));
159  __ GetObjectType(a2, a3, t0);
160  __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
161  t0, Operand(MAP_TYPE));
162  }
163 
164  // Run the native code for the Array function called as a normal function.
165  // Tail call a stub.
166  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
167  ArrayConstructorStub stub(masm->isolate());
168  __ TailCallStub(&stub);
169 }
170 
171 
172 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
173  // ----------- S t a t e -------------
174  // -- a0 : number of arguments
175  // -- a1 : constructor function
176  // -- ra : return address
177  // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
178  // -- sp[argc * 4] : receiver
179  // -----------------------------------
180  Counters* counters = masm->isolate()->counters();
181  __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
182 
183  Register function = a1;
184  if (FLAG_debug_code) {
185  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
186  __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
187  }
188 
189  // Load the first arguments in a0 and get rid of the rest.
190  Label no_arguments;
191  __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
192  // First args = sp[(argc - 1) * 4].
193  __ Subu(a0, a0, Operand(1));
194  __ sll(a0, a0, kPointerSizeLog2);
195  __ Addu(sp, a0, sp);
196  __ lw(a0, MemOperand(sp));
197  // sp now point to args[0], drop args[0] + receiver.
198  __ Drop(2);
199 
200  Register argument = a2;
201  Label not_cached, argument_is_string;
202  __ LookupNumberStringCache(a0, // Input.
203  argument, // Result.
204  a3, // Scratch.
205  t0, // Scratch.
206  t1, // Scratch.
207  &not_cached);
208  __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
209  __ bind(&argument_is_string);
210 
211  // ----------- S t a t e -------------
212  // -- a2 : argument converted to string
213  // -- a1 : constructor function
214  // -- ra : return address
215  // -----------------------------------
216 
217  Label gc_required;
218  __ Allocate(JSValue::kSize,
219  v0, // Result.
220  a3, // Scratch.
221  t0, // Scratch.
222  &gc_required,
223  TAG_OBJECT);
224 
225  // Initialising the String Object.
226  Register map = a3;
227  __ LoadGlobalFunctionInitialMap(function, map, t0);
228  if (FLAG_debug_code) {
230  __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
231  t0, Operand(JSValue::kSize >> kPointerSizeLog2));
233  __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
234  t0, Operand(zero_reg));
235  }
237 
238  __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
241 
242  __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));
243 
244  // Ensure the object is fully initialized.
246 
247  __ Ret();
248 
249  // The argument was not found in the number to string cache. Check
250  // if it's a string already before calling the conversion builtin.
251  Label convert_argument;
252  __ bind(&not_cached);
253  __ JumpIfSmi(a0, &convert_argument);
254 
255  // Is it a String?
259  __ And(t0, a3, Operand(kIsNotStringMask));
260  __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
261  __ mov(argument, a0);
262  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
263  __ Branch(&argument_is_string);
264 
265  // Invoke the conversion builtin and put the result into a2.
266  __ bind(&convert_argument);
267  __ push(function); // Preserve the function.
268  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
269  {
270  FrameScope scope(masm, StackFrame::INTERNAL);
271  __ push(a0);
272  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
273  }
274  __ pop(function);
275  __ mov(argument, v0);
276  __ Branch(&argument_is_string);
277 
278  // Load the empty string into a2, remove the receiver from the
279  // stack, and jump back to the case where the argument is a string.
280  __ bind(&no_arguments);
281  __ LoadRoot(argument, Heap::kempty_stringRootIndex);
282  __ Drop(1);
283  __ Branch(&argument_is_string);
284 
285  // At this point the argument is already a string. Call runtime to
286  // create a string wrapper.
287  __ bind(&gc_required);
288  __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
289  {
290  FrameScope scope(masm, StackFrame::INTERNAL);
291  __ push(argument);
292  __ CallRuntime(Runtime::kNewStringWrapper, 1);
293  }
294  __ Ret();
295 }
296 
297 
298 static void CallRuntimePassFunction(
299  MacroAssembler* masm, Runtime::FunctionId function_id) {
300  FrameScope scope(masm, StackFrame::INTERNAL);
301  // Push a copy of the function onto the stack.
302  // Push call kind information and function as parameter to the runtime call.
303  __ Push(a1, a1);
304 
305  __ CallRuntime(function_id, 1);
306  // Restore call kind information and receiver.
307  __ Pop(a1);
308 }
309 
310 
311 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
314  __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
315  __ Jump(at);
316 }
317 
318 
319 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
320  __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
321  __ Jump(at);
322 }
323 
324 
325 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
326  // Checking whether the queued function is ready for install is optional,
327  // since we come across interrupts and stack checks elsewhere. However,
328  // not checking may delay installing ready functions, and always checking
329  // would be quite expensive. A good compromise is to first check against
330  // stack limit as a cue for an interrupt signal.
331  Label ok;
332  __ LoadRoot(t0, Heap::kStackLimitRootIndex);
333  __ Branch(&ok, hs, sp, Operand(t0));
334 
335  CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
336  GenerateTailCallToReturnedCode(masm);
337 
338  __ bind(&ok);
339  GenerateTailCallToSharedCode(masm);
340 }
341 
342 
343 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
344  bool is_api_function,
345  bool count_constructions,
346  bool create_memento) {
347  // ----------- S t a t e -------------
348  // -- a0 : number of arguments
349  // -- a1 : constructor function
350  // -- a2 : allocation site or undefined
351  // -- ra : return address
352  // -- sp[...]: constructor arguments
353  // -----------------------------------
354 
355  // Should never count constructions for api objects.
356  ASSERT(!is_api_function || !count_constructions);
357 
358  // Should never create mementos for api functions.
359  ASSERT(!is_api_function || !create_memento);
360 
361  // Should never create mementos before slack tracking is finished.
362  ASSERT(!count_constructions || !create_memento);
363 
364  Isolate* isolate = masm->isolate();
365 
366  // ----------- S t a t e -------------
367  // -- a0 : number of arguments
368  // -- a1 : constructor function
369  // -- ra : return address
370  // -- sp[...]: constructor arguments
371  // -----------------------------------
372 
373  // Enter a construct frame.
374  {
375  FrameScope scope(masm, StackFrame::CONSTRUCT);
376 
377  if (create_memento) {
378  __ AssertUndefinedOrAllocationSite(a2, a3);
379  __ push(a2);
380  }
381 
382  // Preserve the two incoming parameters on the stack.
383  __ sll(a0, a0, kSmiTagSize); // Tag arguments count.
384  __ MultiPushReversed(a0.bit() | a1.bit());
385 
386  // Use t7 to hold undefined, which is used in several places below.
387  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
388 
389  Label rt_call, allocated;
390  // Try to allocate the object without transitioning into C code. If any of
391  // the preconditions is not met, the code bails out to the runtime call.
392  if (FLAG_inline_new) {
393  Label undo_allocation;
394 #ifdef ENABLE_DEBUGGER_SUPPORT
395  ExternalReference debug_step_in_fp =
396  ExternalReference::debug_step_in_fp_address(isolate);
397  __ li(a2, Operand(debug_step_in_fp));
398  __ lw(a2, MemOperand(a2));
399  __ Branch(&rt_call, ne, a2, Operand(zero_reg));
400 #endif
401 
402  // Load the initial map and verify that it is in fact a map.
403  // a1: constructor function
405  __ JumpIfSmi(a2, &rt_call);
406  __ GetObjectType(a2, a3, t4);
407  __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
408 
409  // Check that the constructor is not constructing a JSFunction (see
410  // comments in Runtime_NewObject in runtime.cc). In which case the
411  // initial map's instance type would be JS_FUNCTION_TYPE.
412  // a1: constructor function
413  // a2: initial map
415  __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
416 
417  if (count_constructions) {
418  Label allocate;
419  // Decrease generous allocation count.
421  MemOperand constructor_count =
423  __ lbu(t0, constructor_count);
424  __ Subu(t0, t0, Operand(1));
425  __ sb(t0, constructor_count);
426  __ Branch(&allocate, ne, t0, Operand(zero_reg));
427 
428  __ Push(a1, a2, a1); // a1 = Constructor.
429  // The call will replace the stub, so the countdown is only done once.
430  __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
431 
432  __ Pop(a1, a2);
433 
434  __ bind(&allocate);
435  }
436 
437  // Now allocate the JSObject on the heap.
438  // a1: constructor function
439  // a2: initial map
441  if (create_memento) {
442  __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
443  }
444 
445  __ Allocate(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
446 
447  // Allocated the JSObject, now initialize the fields. Map is set to
448  // initial map and properties and elements are set to empty fixed array.
449  // a1: constructor function
450  // a2: initial map
451  // a3: object size (not including memento if create_memento)
452  // t4: JSObject (not tagged)
453  __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
454  __ mov(t5, t4);
455  __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
458  __ Addu(t5, t5, Operand(3*kPointerSize));
462 
463  // Fill all the in-object properties with appropriate filler.
464  // a1: constructor function
465  // a2: initial map
466  // a3: object size (in words, including memento if create_memento)
467  // t4: JSObject (not tagged)
468  // t5: First in-object property of JSObject (not tagged)
470 
471  if (count_constructions) {
472  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
475  kBitsPerByte);
476  __ sll(at, a0, kPointerSizeLog2);
477  __ addu(a0, t5, at);
478  __ sll(at, a3, kPointerSizeLog2);
479  __ Addu(t6, t4, Operand(at)); // End of object.
480  // a0: offset of first field after pre-allocated fields
481  if (FLAG_debug_code) {
482  __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
483  a0, Operand(t6));
484  }
485  __ InitializeFieldsWithFiller(t5, a0, t7);
486  // To allow for truncation.
487  __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
488  __ InitializeFieldsWithFiller(t5, t6, t7);
489  } else if (create_memento) {
490  __ Subu(t7, a3, Operand(AllocationMemento::kSize / kPointerSize));
491  __ sll(at, t7, kPointerSizeLog2);
492  __ Addu(a0, t4, Operand(at)); // End of object.
493  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
494  __ InitializeFieldsWithFiller(t5, a0, t7);
495 
496  // Fill in memento fields.
497  // t5: points to the allocated but uninitialized memento.
498  __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
500  __ sw(t7, MemOperand(t5));
501  __ Addu(t5, t5, kPointerSize);
502  // Load the AllocationSite.
503  __ lw(t7, MemOperand(sp, 2 * kPointerSize));
505  __ sw(t7, MemOperand(t5));
506  __ Addu(t5, t5, kPointerSize);
507  } else {
508  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
509  __ sll(at, a3, kPointerSizeLog2);
510  __ Addu(a0, t4, Operand(at)); // End of object.
511  __ InitializeFieldsWithFiller(t5, a0, t7);
512  }
513 
514  // Add the object tag to make the JSObject real, so that we can continue
515  // and jump into the continuation code at any time from now on. Any
516  // failures need to undo the allocation, so that the heap is in a
517  // consistent state and verifiable.
518  __ Addu(t4, t4, Operand(kHeapObjectTag));
519 
520  // Check if a non-empty properties array is needed. Continue with
521  // allocated object if not fall through to runtime call if it is.
522  // a1: constructor function
523  // t4: JSObject
524  // t5: start of next object (not tagged)
526  // The field instance sizes contains both pre-allocated property fields
527  // and in-object properties.
530  kBitsPerByte);
531  __ Addu(a3, a3, Operand(t6));
533  kBitsPerByte);
534  __ subu(a3, a3, t6);
535 
536  // Done if no extra properties are to be allocated.
537  __ Branch(&allocated, eq, a3, Operand(zero_reg));
538  __ Assert(greater_equal, kPropertyAllocationCountFailed,
539  a3, Operand(zero_reg));
540 
541  // Scale the number of elements by pointer size and add the header for
542  // FixedArrays to the start of the next object calculation from above.
543  // a1: constructor
544  // a3: number of elements in properties array
545  // t4: JSObject
546  // t5: start of next object
547  __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
548  __ Allocate(
549  a0,
550  t5,
551  t6,
552  a2,
553  &undo_allocation,
554  static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
555 
556  // Initialize the FixedArray.
557  // a1: constructor
558  // a3: number of elements in properties array (untagged)
559  // t4: JSObject
560  // t5: start of next object
561  __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
562  __ mov(a2, t5);
563  __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
564  __ sll(a0, a3, kSmiTagSize);
566  __ Addu(a2, a2, Operand(2 * kPointerSize));
567 
570 
571  // Initialize the fields to undefined.
572  // a1: constructor
573  // a2: First element of FixedArray (not tagged)
574  // a3: number of elements in properties array
575  // t4: JSObject
576  // t5: FixedArray (not tagged)
577  __ sll(t3, a3, kPointerSizeLog2);
578  __ addu(t6, a2, t3); // End of object.
580  { Label loop, entry;
581  if (count_constructions) {
582  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
583  } else if (FLAG_debug_code) {
584  __ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
585  __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t8));
586  }
587  __ jmp(&entry);
588  __ bind(&loop);
589  __ sw(t7, MemOperand(a2));
590  __ addiu(a2, a2, kPointerSize);
591  __ bind(&entry);
592  __ Branch(&loop, less, a2, Operand(t6));
593  }
594 
595  // Store the initialized FixedArray into the properties field of
596  // the JSObject.
597  // a1: constructor function
598  // t4: JSObject
599  // t5: FixedArray (not tagged)
600  __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag.
602 
603  // Continue with JSObject being successfully allocated.
604  // a1: constructor function
605  // a4: JSObject
606  __ jmp(&allocated);
607 
608  // Undo the setting of the new top so that the heap is verifiable. For
609  // example, the map's unused properties potentially do not match the
610  // allocated objects unused properties.
611  // t4: JSObject (previous new top)
612  __ bind(&undo_allocation);
613  __ UndoAllocationInNewSpace(t4, t5);
614  }
615 
616  // Allocate the new receiver object using the runtime call.
617  // a1: constructor function
618  __ bind(&rt_call);
619  if (create_memento) {
620  // Get the cell or allocation site.
621  __ lw(a2, MemOperand(sp, 2 * kPointerSize));
622  __ push(a2);
623  }
624 
625  __ push(a1); // Argument for Runtime_NewObject.
626  if (create_memento) {
627  __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
628  } else {
629  __ CallRuntime(Runtime::kHiddenNewObject, 1);
630  }
631  __ mov(t4, v0);
632 
633  // If we ended up using the runtime, and we want a memento, then the
634  // runtime call made it for us, and we shouldn't do create count
635  // increment.
636  Label count_incremented;
637  if (create_memento) {
638  __ jmp(&count_incremented);
639  }
640 
641  // Receiver for constructor call allocated.
642  // t4: JSObject
643 
644  if (create_memento) {
645  __ lw(a2, MemOperand(sp, kPointerSize * 2));
646  __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
647  __ Branch(&count_incremented, eq, a2, Operand(t5));
648  // a2 is an AllocationSite. We are creating a memento from it, so we
649  // need to increment the memento create count.
650  __ lw(a3, FieldMemOperand(a2,
652  __ Addu(a3, a3, Operand(Smi::FromInt(1)));
653  __ sw(a3, FieldMemOperand(a2,
655  __ bind(&count_incremented);
656  }
657 
658  __ bind(&allocated);
659  __ Push(t4, t4);
660 
661  // Reload the number of arguments from the stack.
662  // sp[0]: receiver
663  // sp[1]: receiver
664  // sp[2]: constructor function
665  // sp[3]: number of arguments (smi-tagged)
666  __ lw(a1, MemOperand(sp, 2 * kPointerSize));
667  __ lw(a3, MemOperand(sp, 3 * kPointerSize));
668 
669  // Set up pointer to last argument.
670  __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
671 
672  // Set up number of arguments for function call below.
673  __ srl(a0, a3, kSmiTagSize);
674 
675  // Copy arguments and receiver to the expression stack.
676  // a0: number of arguments
677  // a1: constructor function
678  // a2: address of last argument (caller sp)
679  // a3: number of arguments (smi-tagged)
680  // sp[0]: receiver
681  // sp[1]: receiver
682  // sp[2]: constructor function
683  // sp[3]: number of arguments (smi-tagged)
684  Label loop, entry;
685  __ jmp(&entry);
686  __ bind(&loop);
687  __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
688  __ Addu(t0, a2, Operand(t0));
689  __ lw(t1, MemOperand(t0));
690  __ push(t1);
691  __ bind(&entry);
692  __ Addu(a3, a3, Operand(-2));
693  __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
694 
695  // Call the function.
696  // a0: number of arguments
697  // a1: constructor function
698  if (is_api_function) {
700  Handle<Code> code =
701  masm->isolate()->builtins()->HandleApiCallConstruct();
702  __ Call(code, RelocInfo::CODE_TARGET);
703  } else {
704  ParameterCount actual(a0);
705  __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
706  }
707 
708  // Store offset of return address for deoptimizer.
709  if (!is_api_function && !count_constructions) {
710  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
711  }
712 
713  // Restore context from the frame.
715 
716  // If the result is an object (in the ECMA sense), we should get rid
717  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
718  // on page 74.
719  Label use_receiver, exit;
720 
721  // If the result is a smi, it is *not* an object in the ECMA sense.
722  // v0: result
723  // sp[0]: receiver (newly allocated object)
724  // sp[1]: constructor function
725  // sp[2]: number of arguments (smi-tagged)
726  __ JumpIfSmi(v0, &use_receiver);
727 
728  // If the type of the result (stored in its map) is less than
729  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
730  __ GetObjectType(v0, a1, a3);
731  __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
732 
733  // Throw away the result of the constructor invocation and use the
734  // on-stack receiver as the result.
735  __ bind(&use_receiver);
736  __ lw(v0, MemOperand(sp));
737 
738  // Remove receiver from the stack, remove caller arguments, and
739  // return.
740  __ bind(&exit);
741  // v0: result
742  // sp[0]: receiver (newly allocated object)
743  // sp[1]: constructor function
744  // sp[2]: number of arguments (smi-tagged)
745  __ lw(a1, MemOperand(sp, 2 * kPointerSize));
746 
747  // Leave construct frame.
748  }
749 
750  __ sll(t0, a1, kPointerSizeLog2 - 1);
751  __ Addu(sp, sp, t0);
752  __ Addu(sp, sp, kPointerSize);
753  __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
754  __ Ret();
755 }
756 
757 
758 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
759  Generate_JSConstructStubHelper(masm, false, true, false);
760 }
761 
762 
763 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
764  Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
765 }
766 
767 
768 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
769  Generate_JSConstructStubHelper(masm, true, false, false);
770 }
771 
772 
773 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
774  bool is_construct) {
775  // Called from JSEntryStub::GenerateBody
776 
777  // ----------- S t a t e -------------
778  // -- a0: code entry
779  // -- a1: function
780  // -- a2: receiver_pointer
781  // -- a3: argc
782  // -- s0: argv
783  // -----------------------------------
785 
786  // Clear the context before we push it when entering the JS frame.
787  __ mov(cp, zero_reg);
788 
789  // Enter an internal frame.
790  {
791  FrameScope scope(masm, StackFrame::INTERNAL);
792 
793  // Set up the context from the function argument.
795 
796  // Push the function and the receiver onto the stack.
797  __ Push(a1, a2);
798 
799  // Copy arguments to the stack in a loop.
800  // a3: argc
801  // s0: argv, i.e. points to first arg
802  Label loop, entry;
803  __ sll(t0, a3, kPointerSizeLog2);
804  __ addu(t2, s0, t0);
805  __ b(&entry);
806  __ nop(); // Branch delay slot nop.
807  // t2 points past last arg.
808  __ bind(&loop);
809  __ lw(t0, MemOperand(s0)); // Read next parameter.
810  __ addiu(s0, s0, kPointerSize);
811  __ lw(t0, MemOperand(t0)); // Dereference handle.
812  __ push(t0); // Push parameter.
813  __ bind(&entry);
814  __ Branch(&loop, ne, s0, Operand(t2));
815 
816  // Initialize all JavaScript callee-saved registers, since they will be seen
817  // by the garbage collector as part of handlers.
818  __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
819  __ mov(s1, t0);
820  __ mov(s2, t0);
821  __ mov(s3, t0);
822  __ mov(s4, t0);
823  __ mov(s5, t0);
824  // s6 holds the root address. Do not clobber.
825  // s7 is cp. Do not init.
826 
827  // Invoke the code and pass argc as a0.
828  __ mov(a0, a3);
829  if (is_construct) {
830  // No type feedback cell is available
831  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
832  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
833  __ CallStub(&stub);
834  } else {
835  ParameterCount actual(a0);
836  __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
837  }
838 
839  // Leave internal frame.
840  }
841 
842  __ Jump(ra);
843 }
844 
845 
846 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
847  Generate_JSEntryTrampolineHelper(masm, false);
848 }
849 
850 
851 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
852  Generate_JSEntryTrampolineHelper(masm, true);
853 }
854 
855 
856 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
857  CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
858  GenerateTailCallToReturnedCode(masm);
859 }
860 
861 
862 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
863  FrameScope scope(masm, StackFrame::INTERNAL);
864  // Push a copy of the function onto the stack.
865  // Push function as parameter to the runtime call.
866  __ Push(a1, a1);
867  // Whether to compile in a background thread.
868  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
869 
870  __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
871  // Restore receiver.
872  __ Pop(a1);
873 }
874 
875 
876 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
877  CallCompileOptimized(masm, false);
878  GenerateTailCallToReturnedCode(masm);
879 }
880 
881 
882 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
883  CallCompileOptimized(masm, true);
884  GenerateTailCallToReturnedCode(masm);
885 }
886 
887 
888 
889 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
890  // For now, we are relying on the fact that make_code_young doesn't do any
891  // garbage collection which allows us to save/restore the registers without
892  // worrying about which of them contain pointers. We also don't build an
893  // internal frame to make the code faster, since we shouldn't have to do stack
894  // crawls in MakeCodeYoung. This seems a bit fragile.
895 
896  // Set a0 to point to the head of the PlatformCodeAge sequence.
897  __ Subu(a0, a0,
898  Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize));
899 
900  // The following registers must be saved and restored when calling through to
901  // the runtime:
902  // a0 - contains return address (beginning of patch sequence)
903  // a1 - isolate
904  RegList saved_regs =
905  (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
906  FrameScope scope(masm, StackFrame::MANUAL);
907  __ MultiPush(saved_regs);
908  __ PrepareCallCFunction(2, 0, a2);
909  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
910  __ CallCFunction(
911  ExternalReference::get_make_code_young_function(masm->isolate()), 2);
912  __ MultiPop(saved_regs);
913  __ Jump(a0);
914 }
915 
916 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
917 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
918  MacroAssembler* masm) { \
919  GenerateMakeCodeYoungAgainCommon(masm); \
920 } \
921 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
922  MacroAssembler* masm) { \
923  GenerateMakeCodeYoungAgainCommon(masm); \
924 }
925 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
926 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
927 
928 
929 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
930  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
931  // that make_code_young doesn't do any garbage collection which allows us to
932  // save/restore the registers without worrying about which of them contain
933  // pointers.
934 
935  // Set a0 to point to the head of the PlatformCodeAge sequence.
936  __ Subu(a0, a0,
937  Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize));
938 
939  // The following registers must be saved and restored when calling through to
940  // the runtime:
941  // a0 - contains return address (beginning of patch sequence)
942  // a1 - isolate
943  RegList saved_regs =
944  (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
945  FrameScope scope(masm, StackFrame::MANUAL);
946  __ MultiPush(saved_regs);
947  __ PrepareCallCFunction(2, 0, a2);
948  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
949  __ CallCFunction(
950  ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
951  2);
952  __ MultiPop(saved_regs);
953 
954  // Perform prologue operations usually performed by the young code stub.
955  __ Push(ra, fp, cp, a1);
957 
958  // Jump to point after the code-age stub.
959  __ Addu(a0, a0, Operand((kNoCodeAgeSequenceLength) * Assembler::kInstrSize));
960  __ Jump(a0);
961 }
962 
963 
964 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
965  GenerateMakeCodeYoungAgainCommon(masm);
966 }
967 
968 
969 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
970  SaveFPRegsMode save_doubles) {
971  {
972  FrameScope scope(masm, StackFrame::INTERNAL);
973 
974  // Preserve registers across notification, this is important for compiled
975  // stubs that tail call the runtime on deopts passing their parameters in
976  // registers.
977  __ MultiPush(kJSCallerSaved | kCalleeSaved);
978  // Pass the function and deoptimization type to the runtime system.
979  __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
980  __ MultiPop(kJSCallerSaved | kCalleeSaved);
981  }
982 
983  __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
984  __ Jump(ra); // Jump to miss handler
985 }
986 
987 
988 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
989  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
990 }
991 
992 
993 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
994  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
995 }
996 
997 
998 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1000  {
1001  FrameScope scope(masm, StackFrame::INTERNAL);
1002  // Pass the function and deoptimization type to the runtime system.
1003  __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1004  __ push(a0);
1005  __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
1006  }
1007 
1008  // Get the full codegen state from the stack and untag it -> t2.
1009  __ lw(t2, MemOperand(sp, 0 * kPointerSize));
1010  __ SmiUntag(t2);
1011  // Switch on the state.
1012  Label with_tos_register, unknown_state;
1013  __ Branch(&with_tos_register,
1014  ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
1015  __ Ret(USE_DELAY_SLOT);
1016  // Safe to fill delay slot Addu will emit one instruction.
1017  __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1018 
1019  __ bind(&with_tos_register);
1020  __ lw(v0, MemOperand(sp, 1 * kPointerSize));
1021  __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
1022 
1023  __ Ret(USE_DELAY_SLOT);
1024  // Safe to fill delay slot Addu will emit one instruction.
1025  __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1026 
1027  __ bind(&unknown_state);
1028  __ stop("no cases left");
1029 }
1030 
1031 
1032 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1033  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1034 }
1035 
1036 
1037 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1038  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1039 }
1040 
1041 
1042 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1043  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1044 }
1045 
1046 
1047 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1048  // Lookup the function in the JavaScript frame.
1050  {
1051  FrameScope scope(masm, StackFrame::INTERNAL);
1052  // Pass function as argument.
1053  __ push(a0);
1054  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1055  }
1056 
1057  // If the code object is null, just return to the unoptimized code.
1058  __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1059 
1060  // Load deoptimization data from the code object.
1061  // <deopt_data> = <code>[#deoptimization_data_offset]
1063 
1064  // Load the OSR entrypoint offset from the deoptimization data.
1065  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1068  __ SmiUntag(a1);
1069 
1070  // Compute the target address = code_obj + header_size + osr_offset
1071  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1072  __ addu(v0, v0, a1);
1073  __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1074 
1075  // And "return" to the OSR entry point of the function.
1076  __ Ret();
1077 }
1078 
1079 
1080 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1081  // We check the stack limit as indicator that recompilation might be done.
1082  Label ok;
1083  __ LoadRoot(at, Heap::kStackLimitRootIndex);
1084  __ Branch(&ok, hs, sp, Operand(at));
1085  {
1086  FrameScope scope(masm, StackFrame::INTERNAL);
1087  __ CallRuntime(Runtime::kHiddenStackGuard, 0);
1088  }
1089  __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1090  RelocInfo::CODE_TARGET);
1091 
1092  __ bind(&ok);
1093  __ Ret();
1094 }
1095 
1096 
1097 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1098  // 1. Make sure we have at least one argument.
1099  // a0: actual number of arguments
1100  { Label done;
1101  __ Branch(&done, ne, a0, Operand(zero_reg));
1102  __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1103  __ push(t2);
1104  __ Addu(a0, a0, Operand(1));
1105  __ bind(&done);
1106  }
1107 
1108  // 2. Get the function to call (passed as receiver) from the stack, check
1109  // if it is a function.
1110  // a0: actual number of arguments
1111  Label slow, non_function;
1112  __ sll(at, a0, kPointerSizeLog2);
1113  __ addu(at, sp, at);
1114  __ lw(a1, MemOperand(at));
1115  __ JumpIfSmi(a1, &non_function);
1116  __ GetObjectType(a1, a2, a2);
1117  __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1118 
1119  // 3a. Patch the first argument if necessary when calling a function.
1120  // a0: actual number of arguments
1121  // a1: function
1122  Label shift_arguments;
1123  __ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1124  { Label convert_to_object, use_global_receiver, patch_receiver;
1125  // Change context eagerly in case we need the global receiver.
1127 
1128  // Do not transform the receiver for strict mode functions.
1131  __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1132  kSmiTagSize)));
1133  __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1134 
1135  // Do not transform the receiver for native (Compilerhints already in a3).
1136  __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1137  __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1138 
1139  // Compute the receiver in sloppy mode.
1140  // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1141  __ sll(at, a0, kPointerSizeLog2);
1142  __ addu(a2, sp, at);
1143  __ lw(a2, MemOperand(a2, -kPointerSize));
1144  // a0: actual number of arguments
1145  // a1: function
1146  // a2: first argument
1147  __ JumpIfSmi(a2, &convert_to_object, t2);
1148 
1149  __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1150  __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1151  __ LoadRoot(a3, Heap::kNullValueRootIndex);
1152  __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1153 
1155  __ GetObjectType(a2, a3, a3);
1156  __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1157 
1158  __ bind(&convert_to_object);
1159  // Enter an internal frame in order to preserve argument count.
1160  {
1161  FrameScope scope(masm, StackFrame::INTERNAL);
1162  __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1163  __ Push(a0, a2);
1164  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1165  __ mov(a2, v0);
1166 
1167  __ pop(a0);
1168  __ sra(a0, a0, kSmiTagSize); // Un-tag.
1169  // Leave internal frame.
1170  }
1171  // Restore the function to a1, and the flag to t0.
1172  __ sll(at, a0, kPointerSizeLog2);
1173  __ addu(at, sp, at);
1174  __ lw(a1, MemOperand(at));
1175  __ li(t0, Operand(0, RelocInfo::NONE32));
1176  __ Branch(&patch_receiver);
1177 
1178  __ bind(&use_global_receiver);
1181 
1182  __ bind(&patch_receiver);
1183  __ sll(at, a0, kPointerSizeLog2);
1184  __ addu(a3, sp, at);
1185  __ sw(a2, MemOperand(a3, -kPointerSize));
1186 
1187  __ Branch(&shift_arguments);
1188  }
1189 
1190  // 3b. Check for function proxy.
1191  __ bind(&slow);
1192  __ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
1193  __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1194 
1195  __ bind(&non_function);
1196  __ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
1197 
1198  // 3c. Patch the first argument when calling a non-function. The
1199  // CALL_NON_FUNCTION builtin expects the non-function callee as
1200  // receiver, so overwrite the first argument which will ultimately
1201  // become the receiver.
1202  // a0: actual number of arguments
1203  // a1: function
1204  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1205  __ sll(at, a0, kPointerSizeLog2);
1206  __ addu(a2, sp, at);
1207  __ sw(a1, MemOperand(a2, -kPointerSize));
1208 
1209  // 4. Shift arguments and return address one slot down on the stack
1210  // (overwriting the original receiver). Adjust argument count to make
1211  // the original first argument the new receiver.
1212  // a0: actual number of arguments
1213  // a1: function
1214  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1215  __ bind(&shift_arguments);
1216  { Label loop;
1217  // Calculate the copy start address (destination). Copy end address is sp.
1218  __ sll(at, a0, kPointerSizeLog2);
1219  __ addu(a2, sp, at);
1220 
1221  __ bind(&loop);
1222  __ lw(at, MemOperand(a2, -kPointerSize));
1223  __ sw(at, MemOperand(a2));
1224  __ Subu(a2, a2, Operand(kPointerSize));
1225  __ Branch(&loop, ne, a2, Operand(sp));
1226  // Adjust the actual number of arguments and remove the top element
1227  // (which is a copy of the last argument).
1228  __ Subu(a0, a0, Operand(1));
1229  __ Pop();
1230  }
1231 
1232  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1233  // or a function proxy via CALL_FUNCTION_PROXY.
1234  // a0: actual number of arguments
1235  // a1: function
1236  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1237  { Label function, non_proxy;
1238  __ Branch(&function, eq, t0, Operand(zero_reg));
1239  // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1240  __ mov(a2, zero_reg);
1241  __ Branch(&non_proxy, ne, t0, Operand(1));
1242 
1243  __ push(a1); // Re-add proxy object as additional argument.
1244  __ Addu(a0, a0, Operand(1));
1245  __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1246  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1247  RelocInfo::CODE_TARGET);
1248 
1249  __ bind(&non_proxy);
1250  __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
1251  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1252  RelocInfo::CODE_TARGET);
1253  __ bind(&function);
1254  }
1255 
1256  // 5b. Get the code to call from the function and check that the number of
1257  // expected arguments matches what we're providing. If so, jump
1258  // (tail-call) to the code in register edx without checking arguments.
1259  // a0: actual number of arguments
1260  // a1: function
1262  __ lw(a2,
1264  __ sra(a2, a2, kSmiTagSize);
1265  // Check formal and actual parameter counts.
1266  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1267  RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1268 
1270  ParameterCount expected(0);
1271  __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1272 }
1273 
1274 
1275 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1276  const int kIndexOffset =
1278  const int kLimitOffset =
1280  const int kArgsOffset = 2 * kPointerSize;
1281  const int kRecvOffset = 3 * kPointerSize;
1282  const int kFunctionOffset = 4 * kPointerSize;
1283 
1284  {
1285  FrameScope frame_scope(masm, StackFrame::INTERNAL);
1286  __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1287  __ push(a0);
1288  __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1289  __ push(a0);
1290  // Returns (in v0) number of arguments to copy to stack as Smi.
1291  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1292 
1293  // Check the stack for overflow. We are not trying to catch
1294  // interruptions (e.g. debug break and preemption) here, so the "real stack
1295  // limit" is checked.
1296  Label okay;
1297  __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1298  // Make a2 the space we have left. The stack might already be overflowed
1299  // here which will cause a2 to become negative.
1300  __ subu(a2, sp, a2);
1301  // Check if the arguments will overflow the stack.
1302  __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
1303  __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison.
1304 
1305  // Out of stack space.
1306  __ lw(a1, MemOperand(fp, kFunctionOffset));
1307  __ Push(a1, v0);
1308  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1309  // End of stack check.
1310 
1311  // Push current limit and index.
1312  __ bind(&okay);
1313  __ mov(a1, zero_reg);
1314  __ Push(v0, a1); // Limit and initial index.
1315 
1316  // Get the receiver.
1317  __ lw(a0, MemOperand(fp, kRecvOffset));
1318 
1319  // Check that the function is a JS function (otherwise it must be a proxy).
1320  Label push_receiver;
1321  __ lw(a1, MemOperand(fp, kFunctionOffset));
1322  __ GetObjectType(a1, a2, a2);
1323  __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1324 
1325  // Change context eagerly to get the right global object if necessary.
1327  // Load the shared function info while the function is still in a1.
1329 
1330  // Compute the receiver.
1331  // Do not transform the receiver for strict mode functions.
1332  Label call_to_object, use_global_receiver;
1334  __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1335  kSmiTagSize)));
1336  __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1337 
1338  // Do not transform the receiver for native (Compilerhints already in a2).
1339  __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1340  __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1341 
1342  // Compute the receiver in sloppy mode.
1343  __ JumpIfSmi(a0, &call_to_object);
1344  __ LoadRoot(a1, Heap::kNullValueRootIndex);
1345  __ Branch(&use_global_receiver, eq, a0, Operand(a1));
1346  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1347  __ Branch(&use_global_receiver, eq, a0, Operand(a2));
1348 
1349  // Check if the receiver is already a JavaScript object.
1350  // a0: receiver
1352  __ GetObjectType(a0, a1, a1);
1353  __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1354 
1355  // Convert the receiver to a regular object.
1356  // a0: receiver
1357  __ bind(&call_to_object);
1358  __ push(a0);
1359  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1360  __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1361  __ Branch(&push_receiver);
1362 
1363  __ bind(&use_global_receiver);
1366 
1367  // Push the receiver.
1368  // a0: receiver
1369  __ bind(&push_receiver);
1370  __ push(a0);
1371 
1372  // Copy all arguments from the array to the stack.
1373  Label entry, loop;
1374  __ lw(a0, MemOperand(fp, kIndexOffset));
1375  __ Branch(&entry);
1376 
1377  // Load the current argument from the arguments array and push it to the
1378  // stack.
1379  // a0: current argument index
1380  __ bind(&loop);
1381  __ lw(a1, MemOperand(fp, kArgsOffset));
1382  __ Push(a1, a0);
1383 
1384  // Call the runtime to access the property in the arguments array.
1385  __ CallRuntime(Runtime::kGetProperty, 2);
1386  __ push(v0);
1387 
1388  // Use inline caching to access the arguments.
1389  __ lw(a0, MemOperand(fp, kIndexOffset));
1390  __ Addu(a0, a0, Operand(1 << kSmiTagSize));
1391  __ sw(a0, MemOperand(fp, kIndexOffset));
1392 
1393  // Test if the copy loop has finished copying all the elements from the
1394  // arguments object.
1395  __ bind(&entry);
1396  __ lw(a1, MemOperand(fp, kLimitOffset));
1397  __ Branch(&loop, ne, a0, Operand(a1));
1398 
1399  // Call the function.
1400  Label call_proxy;
1401  ParameterCount actual(a0);
1402  __ sra(a0, a0, kSmiTagSize);
1403  __ lw(a1, MemOperand(fp, kFunctionOffset));
1404  __ GetObjectType(a1, a2, a2);
1405  __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1406 
1407  __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
1408 
1409  frame_scope.GenerateLeaveFrame();
1410  __ Ret(USE_DELAY_SLOT);
1411  __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1412 
1413  // Call the function proxy.
1414  __ bind(&call_proxy);
1415  __ push(a1); // Add function proxy as last argument.
1416  __ Addu(a0, a0, Operand(1));
1417  __ li(a2, Operand(0, RelocInfo::NONE32));
1418  __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1419  __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1420  RelocInfo::CODE_TARGET);
1421  // Tear down the internal frame and remove function, receiver and args.
1422  }
1423 
1424  __ Ret(USE_DELAY_SLOT);
1425  __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1426 }
1427 
1428 
1429 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1430  __ sll(a0, a0, kSmiTagSize);
1431  __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1432  __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1433  __ Addu(fp, sp,
1435 }
1436 
1437 
1438 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1439  // ----------- S t a t e -------------
1440  // -- v0 : result being passed through
1441  // -----------------------------------
1442  // Get the number of arguments passed (as a smi), tear down the frame and
1443  // then tear down the parameters.
1445  kPointerSize)));
1446  __ mov(sp, fp);
1447  __ MultiPop(fp.bit() | ra.bit());
1448  __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1449  __ Addu(sp, sp, t0);
1450  // Adjust for the receiver.
1451  __ Addu(sp, sp, Operand(kPointerSize));
1452 }
1453 
1454 
1455 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1456  // State setup as expected by MacroAssembler::InvokePrologue.
1457  // ----------- S t a t e -------------
1458  // -- a0: actual arguments count
1459  // -- a1: function (passed through to callee)
1460  // -- a2: expected arguments count
1461  // -----------------------------------
1462 
1463  Label invoke, dont_adapt_arguments;
1464 
1465  Label enough, too_few;
1467  __ Branch(&dont_adapt_arguments, eq,
1469  // We use Uless as the number of argument should always be greater than 0.
1470  __ Branch(&too_few, Uless, a0, Operand(a2));
1471 
1472  { // Enough parameters: actual >= expected.
1473  // a0: actual number of arguments as a smi
1474  // a1: function
1475  // a2: expected number of arguments
1476  // a3: code entry to call
1477  __ bind(&enough);
1478  EnterArgumentsAdaptorFrame(masm);
1479 
1480  // Calculate copy start address into a0 and copy end address into a2.
1481  __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1482  __ Addu(a0, fp, a0);
1483  // Adjust for return address and receiver.
1484  __ Addu(a0, a0, Operand(2 * kPointerSize));
1485  // Compute copy end address.
1486  __ sll(a2, a2, kPointerSizeLog2);
1487  __ subu(a2, a0, a2);
1488 
1489  // Copy the arguments (including the receiver) to the new stack frame.
1490  // a0: copy start address
1491  // a1: function
1492  // a2: copy end address
1493  // a3: code entry to call
1494 
1495  Label copy;
1496  __ bind(&copy);
1497  __ lw(t0, MemOperand(a0));
1498  __ push(t0);
1499  __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a2));
1500  __ addiu(a0, a0, -kPointerSize); // In delay slot.
1501 
1502  __ jmp(&invoke);
1503  }
1504 
1505  { // Too few parameters: Actual < expected.
1506  __ bind(&too_few);
1507  EnterArgumentsAdaptorFrame(masm);
1508 
1509  // Calculate copy start address into a0 and copy end address is fp.
1510  // a0: actual number of arguments as a smi
1511  // a1: function
1512  // a2: expected number of arguments
1513  // a3: code entry to call
1514  __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1515  __ Addu(a0, fp, a0);
1516  // Adjust for return address and receiver.
1517  __ Addu(a0, a0, Operand(2 * kPointerSize));
1518  // Compute copy end address. Also adjust for return address.
1519  __ Addu(t3, fp, kPointerSize);
1520 
1521  // Copy the arguments (including the receiver) to the new stack frame.
1522  // a0: copy start address
1523  // a1: function
1524  // a2: expected number of arguments
1525  // a3: code entry to call
1526  // t3: copy end address
1527  Label copy;
1528  __ bind(&copy);
1529  __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
1530  __ Subu(sp, sp, kPointerSize);
1531  __ Subu(a0, a0, kPointerSize);
1532  __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
1533  __ sw(t0, MemOperand(sp)); // In the delay slot.
1534 
1535  // Fill the remaining expected arguments with undefined.
1536  // a1: function
1537  // a2: expected number of arguments
1538  // a3: code entry to call
1539  __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1540  __ sll(t2, a2, kPointerSizeLog2);
1541  __ Subu(a2, fp, Operand(t2));
1542  // Adjust for frame.
1543  __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1544  2 * kPointerSize));
1545 
1546  Label fill;
1547  __ bind(&fill);
1548  __ Subu(sp, sp, kPointerSize);
1549  __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1550  __ sw(t0, MemOperand(sp));
1551  }
1552 
1553  // Call the entry point.
1554  __ bind(&invoke);
1555 
1556  __ Call(a3);
1557 
1558  // Store offset of return address for deoptimizer.
1559  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1560 
1561  // Exit frame and return.
1562  LeaveArgumentsAdaptorFrame(masm);
1563  __ Ret();
1564 
1565 
1566  // -------------------------------------------
1567  // Don't adapt arguments.
1568  // -------------------------------------------
1569  __ bind(&dont_adapt_arguments);
1570  __ Jump(a3);
1571 }
1572 
1573 
1574 #undef __
1575 
1576 } } // namespace v8::internal
1577 
1578 #endif // V8_TARGET_ARCH_MIPS
const SwVfpRegister s2
static const int kCodeOffset
Definition: objects.h:7103
static const int kCodeEntryOffset
Definition: objects.h:7518
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7519
static int SlotOffset(int index)
Definition: contexts.h:498
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
const Register cp
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
static const int kGlobalReceiverOffset
Definition: objects.h:7613
static const int kConstructionCountOffset
Definition: objects.h:7189
static const int kDeoptimizationDataOffset
Definition: objects.h:5584
uint32_t RegList
Definition: frames.h:41
#define ASSERT(condition)
Definition: checks.h:329
const RegList kJSCallerSaved
Definition: frames-arm.h:47
static const int kContextOffset
Definition: frames.h:185
const int kPointerSizeLog2
Definition: globals.h:281
static const int kInstanceSizeOffset
Definition: objects.h:6448
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:6460
static const int kInstanceSizesOffset
Definition: objects.h:6425
MemOperand ContextOperand(Register context, int index)
static const int kContextOffset
Definition: objects.h:7523
static const int kPretenureCreateCountOffset
Definition: objects.h:8414
static const int kInObjectPropertiesByte
Definition: objects.h:6449
const uint32_t kNotStringTag
Definition: objects.h:599
const Register sp
const SwVfpRegister s3
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
BuiltinExtraArguments
Definition: builtins.h:35
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:7098
const int kPointerSize
Definition: globals.h:268
static void MaybeCallEntryHook(MacroAssembler *masm)
const int kHeapObjectTag
Definition: v8.h:5473
const RegList kCalleeSaved
Definition: frames-arm.h:63
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define __
static const int kCallerSPOffset
Definition: frames.h:190
static const int kPropertiesOffset
Definition: objects.h:2755
const SwVfpRegister s0
static const int kExpressionsOffset
Definition: frames.h:183
const int kBitsPerByte
Definition: globals.h:287
static const int kElementsOffset
Definition: objects.h:2756
const SwVfpRegister s5
static int OffsetOfElementAt(int index)
Definition: objects.h:3070
const SwVfpRegister s1
static const int kHeaderSize
Definition: objects.h:3016
static const int kSize
Definition: objects.h:7702
static const int kMapOffset
Definition: objects.h:1890
static const int kFixedFrameSizeFromFp
Definition: frames.h:180
const uint32_t kIsNotStringMask
Definition: objects.h:597
static const int kLengthOffset
Definition: objects.h:3015
MemOperand FieldMemOperand(Register object, int offset)
static const int kFormalParameterCountOffset
Definition: objects.h:7156
const int kSmiTagSize
Definition: v8.h:5479
static const int kHeaderSize
Definition: objects.h:5604
static const int kAllocationSiteOffset
Definition: objects.h:8443
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
const SwVfpRegister s4
static const int kHeaderSize
Definition: objects.h:2757
static const int kPreAllocatedPropertyFieldsByte
Definition: objects.h:6452
static const int kInstrSize
static const int kValueOffset
Definition: objects.h:7701
#define CODE_AGE_LIST(V)
Definition: builtins.h:50
const Register fp
static const int kNativeContextOffset
Definition: objects.h:7611
static const int kCompilerHintsOffset
Definition: objects.h:7171
static const int kSharedFunctionInfoOffset
Definition: objects.h:7521
static const int kInstanceTypeOffset
Definition: objects.h:6459