v8  3.25.30(node0.11.13)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
builtins-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_ARM
31 
32 #include "codegen.h"
33 #include "debug.h"
34 #include "deoptimizer.h"
35 #include "full-codegen.h"
36 #include "runtime.h"
37 #include "stub-cache.h"
38 
39 namespace v8 {
40 namespace internal {
41 
42 
43 #define __ ACCESS_MASM(masm)
44 
45 
46 void Builtins::Generate_Adaptor(MacroAssembler* masm,
47  CFunctionId id,
48  BuiltinExtraArguments extra_args) {
49  // ----------- S t a t e -------------
50  // -- r0 : number of arguments excluding receiver
51  // -- r1 : called function (only guaranteed when
52  // extra_args requires it)
53  // -- cp : context
54  // -- sp[0] : last argument
55  // -- ...
56  // -- sp[4 * (argc - 1)] : first argument (argc == r0)
57  // -- sp[4 * argc] : receiver
58  // -----------------------------------
59 
60  // Insert extra arguments.
61  int num_extra_args = 0;
62  if (extra_args == NEEDS_CALLED_FUNCTION) {
63  num_extra_args = 1;
64  __ push(r1);
65  } else {
66  ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
67  }
68 
69  // JumpToExternalReference expects r0 to contain the number of arguments
70  // including the receiver and the extra arguments.
71  __ add(r0, r0, Operand(num_extra_args + 1));
72  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
73 }
74 
75 
76 // Load the built-in InternalArray function from the current context.
77 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
78  Register result) {
79  // Load the native context.
80 
81  __ ldr(result,
83  __ ldr(result,
85  // Load the InternalArray function from the native context.
86  __ ldr(result,
87  MemOperand(result,
90 }
91 
92 
93 // Load the built-in Array function from the current context.
94 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
95  // Load the native context.
96 
97  __ ldr(result,
99  __ ldr(result,
101  // Load the Array function from the native context.
102  __ ldr(result,
103  MemOperand(result,
105 }
106 
107 
108 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
109  // ----------- S t a t e -------------
110  // -- r0 : number of arguments
111  // -- lr : return address
112  // -- sp[...]: constructor arguments
113  // -----------------------------------
114  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
115 
116  // Get the InternalArray function.
117  GenerateLoadInternalArrayFunction(masm, r1);
118 
119  if (FLAG_debug_code) {
120  // Initial map for the builtin InternalArray functions should be maps.
122  __ SmiTst(r2);
123  __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
124  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
125  __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
126  }
127 
128  // Run the native code for the InternalArray function called as a normal
129  // function.
130  // tail call a stub
131  InternalArrayConstructorStub stub(masm->isolate());
132  __ TailCallStub(&stub);
133 }
134 
135 
136 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
137  // ----------- S t a t e -------------
138  // -- r0 : number of arguments
139  // -- lr : return address
140  // -- sp[...]: constructor arguments
141  // -----------------------------------
142  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
143 
144  // Get the Array function.
145  GenerateLoadArrayFunction(masm, r1);
146 
147  if (FLAG_debug_code) {
148  // Initial map for the builtin Array functions should be maps.
150  __ SmiTst(r2);
151  __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
152  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
153  __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
154  }
155 
156  // Run the native code for the Array function called as a normal function.
157  // tail call a stub
158  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
159  ArrayConstructorStub stub(masm->isolate());
160  __ TailCallStub(&stub);
161 }
162 
163 
164 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
165  // ----------- S t a t e -------------
166  // -- r0 : number of arguments
167  // -- r1 : constructor function
168  // -- lr : return address
169  // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
170  // -- sp[argc * 4] : receiver
171  // -----------------------------------
172  Counters* counters = masm->isolate()->counters();
173  __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
174 
175  Register function = r1;
176  if (FLAG_debug_code) {
177  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
178  __ cmp(function, Operand(r2));
179  __ Assert(eq, kUnexpectedStringFunction);
180  }
181 
182  // Load the first arguments in r0 and get rid of the rest.
183  Label no_arguments;
184  __ cmp(r0, Operand::Zero());
185  __ b(eq, &no_arguments);
186  // First args = sp[(argc - 1) * 4].
187  __ sub(r0, r0, Operand(1));
189  // sp now point to args[0], drop args[0] + receiver.
190  __ Drop(2);
191 
192  Register argument = r2;
193  Label not_cached, argument_is_string;
194  __ LookupNumberStringCache(r0, // Input.
195  argument, // Result.
196  r3, // Scratch.
197  r4, // Scratch.
198  r5, // Scratch.
199  &not_cached);
200  __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
201  __ bind(&argument_is_string);
202 
203  // ----------- S t a t e -------------
204  // -- r2 : argument converted to string
205  // -- r1 : constructor function
206  // -- lr : return address
207  // -----------------------------------
208 
209  Label gc_required;
210  __ Allocate(JSValue::kSize,
211  r0, // Result.
212  r3, // Scratch.
213  r4, // Scratch.
214  &gc_required,
215  TAG_OBJECT);
216 
217  // Initialising the String Object.
218  Register map = r3;
219  __ LoadGlobalFunctionInitialMap(function, map, r4);
220  if (FLAG_debug_code) {
222  __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
223  __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
225  __ cmp(r4, Operand::Zero());
226  __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
227  }
229 
230  __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
233 
234  __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset));
235 
236  // Ensure the object is fully initialized.
238 
239  __ Ret();
240 
241  // The argument was not found in the number to string cache. Check
242  // if it's a string already before calling the conversion builtin.
243  Label convert_argument;
244  __ bind(&not_cached);
245  __ JumpIfSmi(r0, &convert_argument);
246 
247  // Is it a String?
251  __ tst(r3, Operand(kIsNotStringMask));
252  __ b(ne, &convert_argument);
253  __ mov(argument, r0);
254  __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
255  __ b(&argument_is_string);
256 
257  // Invoke the conversion builtin and put the result into r2.
258  __ bind(&convert_argument);
259  __ push(function); // Preserve the function.
260  __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
261  {
262  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
263  __ push(r0);
264  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
265  }
266  __ pop(function);
267  __ mov(argument, r0);
268  __ b(&argument_is_string);
269 
270  // Load the empty string into r2, remove the receiver from the
271  // stack, and jump back to the case where the argument is a string.
272  __ bind(&no_arguments);
273  __ LoadRoot(argument, Heap::kempty_stringRootIndex);
274  __ Drop(1);
275  __ b(&argument_is_string);
276 
277  // At this point the argument is already a string. Call runtime to
278  // create a string wrapper.
279  __ bind(&gc_required);
280  __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
281  {
282  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
283  __ push(argument);
284  __ CallRuntime(Runtime::kNewStringWrapper, 1);
285  }
286  __ Ret();
287 }
288 
289 
290 static void CallRuntimePassFunction(
291  MacroAssembler* masm, Runtime::FunctionId function_id) {
292  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
293  // Push a copy of the function onto the stack.
294  __ push(r1);
295  // Push function as parameter to the runtime call.
296  __ Push(r1);
297 
298  __ CallRuntime(function_id, 1);
299  // Restore receiver.
300  __ pop(r1);
301 }
302 
303 
304 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
307  __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
308  __ Jump(r2);
309 }
310 
311 
312 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
313  __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
314  __ Jump(r0);
315 }
316 
317 
318 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
319  // Checking whether the queued function is ready for install is optional,
320  // since we come across interrupts and stack checks elsewhere. However,
321  // not checking may delay installing ready functions, and always checking
322  // would be quite expensive. A good compromise is to first check against
323  // stack limit as a cue for an interrupt signal.
324  Label ok;
325  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
326  __ cmp(sp, Operand(ip));
327  __ b(hs, &ok);
328 
329  CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
330  GenerateTailCallToReturnedCode(masm);
331 
332  __ bind(&ok);
333  GenerateTailCallToSharedCode(masm);
334 }
335 
336 
337 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
338  bool is_api_function,
339  bool count_constructions,
340  bool create_memento) {
341  // ----------- S t a t e -------------
342  // -- r0 : number of arguments
343  // -- r1 : constructor function
344  // -- r2 : allocation site or undefined
345  // -- lr : return address
346  // -- sp[...]: constructor arguments
347  // -----------------------------------
348 
349  // Should never count constructions for api objects.
350  ASSERT(!is_api_function || !count_constructions);
351 
352  // Should never create mementos for api functions.
353  ASSERT(!is_api_function || !create_memento);
354 
355  // Should never create mementos before slack tracking is finished.
356  ASSERT(!count_constructions || !create_memento);
357 
358  Isolate* isolate = masm->isolate();
359 
360  // Enter a construct frame.
361  {
362  FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
363 
364  if (create_memento) {
365  __ AssertUndefinedOrAllocationSite(r2, r3);
366  __ push(r2);
367  }
368 
369  // Preserve the two incoming parameters on the stack.
370  __ SmiTag(r0);
371  __ push(r0); // Smi-tagged arguments count.
372  __ push(r1); // Constructor function.
373 
374  // Try to allocate the object without transitioning into C code. If any of
375  // the preconditions is not met, the code bails out to the runtime call.
376  Label rt_call, allocated;
377  if (FLAG_inline_new) {
378  Label undo_allocation;
379 #ifdef ENABLE_DEBUGGER_SUPPORT
380  ExternalReference debug_step_in_fp =
381  ExternalReference::debug_step_in_fp_address(isolate);
382  __ mov(r2, Operand(debug_step_in_fp));
383  __ ldr(r2, MemOperand(r2));
384  __ tst(r2, r2);
385  __ b(ne, &rt_call);
386 #endif
387 
388  // Load the initial map and verify that it is in fact a map.
389  // r1: constructor function
391  __ JumpIfSmi(r2, &rt_call);
392  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
393  __ b(ne, &rt_call);
394 
395  // Check that the constructor is not constructing a JSFunction (see
396  // comments in Runtime_NewObject in runtime.cc). In which case the
397  // initial map's instance type would be JS_FUNCTION_TYPE.
398  // r1: constructor function
399  // r2: initial map
400  __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
401  __ b(eq, &rt_call);
402 
403  if (count_constructions) {
404  Label allocate;
405  // Decrease generous allocation count.
407  MemOperand constructor_count =
409  __ ldrb(r4, constructor_count);
410  __ sub(r4, r4, Operand(1), SetCC);
411  __ strb(r4, constructor_count);
412  __ b(ne, &allocate);
413 
414  __ push(r1);
415 
416  __ Push(r2, r1); // r1 = constructor
417  // The call will replace the stub, so the countdown is only done once.
418  __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
419 
420  __ pop(r2);
421  __ pop(r1);
422 
423  __ bind(&allocate);
424  }
425 
426  // Now allocate the JSObject on the heap.
427  // r1: constructor function
428  // r2: initial map
430  if (create_memento) {
431  __ add(r3, r3, Operand(AllocationMemento::kSize / kPointerSize));
432  }
433 
434  __ Allocate(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
435 
436  // Allocated the JSObject, now initialize the fields. Map is set to
437  // initial map and properties and elements are set to empty fixed array.
438  // r1: constructor function
439  // r2: initial map
440  // r3: object size (not including memento if create_memento)
441  // r4: JSObject (not tagged)
442  __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
443  __ mov(r5, r4);
450 
451  // Fill all the in-object properties with the appropriate filler.
452  // r1: constructor function
453  // r2: initial map
454  // r3: object size (in words, including memento if create_memento)
455  // r4: JSObject (not tagged)
456  // r5: First in-object property of JSObject (not tagged)
458 
459  if (count_constructions) {
460  __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
463  kBitsPerByte);
464  __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
465  // r0: offset of first field after pre-allocated fields
466  if (FLAG_debug_code) {
467  __ add(ip, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
468  __ cmp(r0, ip);
469  __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
470  }
471  __ InitializeFieldsWithFiller(r5, r0, r6);
472  // To allow for truncation.
473  __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
474  __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
475  __ InitializeFieldsWithFiller(r5, r0, r6);
476  } else if (create_memento) {
477  __ sub(r6, r3, Operand(AllocationMemento::kSize / kPointerSize));
478  __ add(r0, r4, Operand(r6, LSL, kPointerSizeLog2)); // End of object.
479  __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
480  __ InitializeFieldsWithFiller(r5, r0, r6);
481 
482  // Fill in memento fields.
483  // r5: points to the allocated but uninitialized memento.
484  __ LoadRoot(r6, Heap::kAllocationMementoMapRootIndex);
487  // Load the AllocationSite
488  __ ldr(r6, MemOperand(sp, 2 * kPointerSize));
491  } else {
492  __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
493  __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
494  __ InitializeFieldsWithFiller(r5, r0, r6);
495  }
496 
497  // Add the object tag to make the JSObject real, so that we can continue
498  // and jump into the continuation code at any time from now on. Any
499  // failures need to undo the allocation, so that the heap is in a
500  // consistent state and verifiable.
501  __ add(r4, r4, Operand(kHeapObjectTag));
502 
503  // Check if a non-empty properties array is needed. Continue with
504  // allocated object if not fall through to runtime call if it is.
505  // r1: constructor function
506  // r4: JSObject
507  // r5: start of next object (not tagged)
509  // The field instance sizes contains both pre-allocated property fields
510  // and in-object properties.
513  kBitsPerByte);
514  __ add(r3, r3, Operand(r6));
516  kBitsPerByte);
517  __ sub(r3, r3, Operand(r6), SetCC);
518 
519  // Done if no extra properties are to be allocated.
520  __ b(eq, &allocated);
521  __ Assert(pl, kPropertyAllocationCountFailed);
522 
523  // Scale the number of elements by pointer size and add the header for
524  // FixedArrays to the start of the next object calculation from above.
525  // r1: constructor
526  // r3: number of elements in properties array
527  // r4: JSObject
528  // r5: start of next object
529  __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
530  __ Allocate(
531  r0,
532  r5,
533  r6,
534  r2,
535  &undo_allocation,
536  static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
537 
538  // Initialize the FixedArray.
539  // r1: constructor
540  // r3: number of elements in properties array
541  // r4: JSObject
542  // r5: FixedArray (not tagged)
543  __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
544  __ mov(r2, r5);
548  __ SmiTag(r0, r3);
550 
551  // Initialize the fields to undefined.
552  // r1: constructor function
553  // r2: First element of FixedArray (not tagged)
554  // r3: number of elements in properties array
555  // r4: JSObject
556  // r5: FixedArray (not tagged)
557  __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
559  { Label loop, entry;
560  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
561  __ b(&entry);
562  __ bind(&loop);
564  __ bind(&entry);
565  __ cmp(r2, r6);
566  __ b(lt, &loop);
567  }
568 
569  // Store the initialized FixedArray into the properties field of
570  // the JSObject
571  // r1: constructor function
572  // r4: JSObject
573  // r5: FixedArray (not tagged)
574  __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
576 
577  // Continue with JSObject being successfully allocated
578  // r1: constructor function
579  // r4: JSObject
580  __ jmp(&allocated);
581 
582  // Undo the setting of the new top so that the heap is verifiable. For
583  // example, the map's unused properties potentially do not match the
584  // allocated objects unused properties.
585  // r4: JSObject (previous new top)
586  __ bind(&undo_allocation);
587  __ UndoAllocationInNewSpace(r4, r5);
588  }
589 
590  // Allocate the new receiver object using the runtime call.
591  // r1: constructor function
592  __ bind(&rt_call);
593  if (create_memento) {
594  // Get the cell or allocation site.
595  __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
596  __ push(r2);
597  }
598 
599  __ push(r1); // argument for Runtime_NewObject
600  if (create_memento) {
601  __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
602  } else {
603  __ CallRuntime(Runtime::kHiddenNewObject, 1);
604  }
605  __ mov(r4, r0);
606 
607  // If we ended up using the runtime, and we want a memento, then the
608  // runtime call made it for us, and we shouldn't do create count
609  // increment.
610  Label count_incremented;
611  if (create_memento) {
612  __ jmp(&count_incremented);
613  }
614 
615  // Receiver for constructor call allocated.
616  // r4: JSObject
617  __ bind(&allocated);
618 
619  if (create_memento) {
620  __ ldr(r2, MemOperand(sp, kPointerSize * 2));
621  __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
622  __ cmp(r2, r5);
623  __ b(eq, &count_incremented);
624  // r2 is an AllocationSite. We are creating a memento from it, so we
625  // need to increment the memento create count.
626  __ ldr(r3, FieldMemOperand(r2,
628  __ add(r3, r3, Operand(Smi::FromInt(1)));
629  __ str(r3, FieldMemOperand(r2,
631  __ bind(&count_incremented);
632  }
633 
634  __ push(r4);
635  __ push(r4);
636 
637  // Reload the number of arguments and the constructor from the stack.
638  // sp[0]: receiver
639  // sp[1]: receiver
640  // sp[2]: constructor function
641  // sp[3]: number of arguments (smi-tagged)
642  __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
643  __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
644 
645  // Set up pointer to last argument.
647 
648  // Set up number of arguments for function call below
649  __ SmiUntag(r0, r3);
650 
651  // Copy arguments and receiver to the expression stack.
652  // r0: number of arguments
653  // r1: constructor function
654  // r2: address of last argument (caller sp)
655  // r3: number of arguments (smi-tagged)
656  // sp[0]: receiver
657  // sp[1]: receiver
658  // sp[2]: constructor function
659  // sp[3]: number of arguments (smi-tagged)
660  Label loop, entry;
661  __ b(&entry);
662  __ bind(&loop);
663  __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
664  __ push(ip);
665  __ bind(&entry);
666  __ sub(r3, r3, Operand(2), SetCC);
667  __ b(ge, &loop);
668 
669  // Call the function.
670  // r0: number of arguments
671  // r1: constructor function
672  if (is_api_function) {
674  Handle<Code> code =
675  masm->isolate()->builtins()->HandleApiCallConstruct();
676  __ Call(code, RelocInfo::CODE_TARGET);
677  } else {
678  ParameterCount actual(r0);
679  __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
680  }
681 
682  // Store offset of return address for deoptimizer.
683  if (!is_api_function && !count_constructions) {
684  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
685  }
686 
687  // Restore context from the frame.
688  // r0: result
689  // sp[0]: receiver
690  // sp[1]: constructor function
691  // sp[2]: number of arguments (smi-tagged)
693 
694  // If the result is an object (in the ECMA sense), we should get rid
695  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
696  // on page 74.
697  Label use_receiver, exit;
698 
699  // If the result is a smi, it is *not* an object in the ECMA sense.
700  // r0: result
701  // sp[0]: receiver (newly allocated object)
702  // sp[1]: constructor function
703  // sp[2]: number of arguments (smi-tagged)
704  __ JumpIfSmi(r0, &use_receiver);
705 
706  // If the type of the result (stored in its map) is less than
707  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
708  __ CompareObjectType(r0, r1, r3, FIRST_SPEC_OBJECT_TYPE);
709  __ b(ge, &exit);
710 
711  // Throw away the result of the constructor invocation and use the
712  // on-stack receiver as the result.
713  __ bind(&use_receiver);
714  __ ldr(r0, MemOperand(sp));
715 
716  // Remove receiver from the stack, remove caller arguments, and
717  // return.
718  __ bind(&exit);
719  // r0: result
720  // sp[0]: receiver (newly allocated object)
721  // sp[1]: constructor function
722  // sp[2]: number of arguments (smi-tagged)
723  __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
724 
725  // Leave construct frame.
726  }
727 
728  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
729  __ add(sp, sp, Operand(kPointerSize));
730  __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
731  __ Jump(lr);
732 }
733 
734 
735 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
736  Generate_JSConstructStubHelper(masm, false, true, false);
737 }
738 
739 
740 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
741  Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
742 }
743 
744 
745 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
746  Generate_JSConstructStubHelper(masm, true, false, false);
747 }
748 
749 
750 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
751  bool is_construct) {
752  // Called from Generate_JS_Entry
753  // r0: code entry
754  // r1: function
755  // r2: receiver
756  // r3: argc
757  // r4: argv
758  // r5-r6, r8 (if not FLAG_enable_ool_constant_pool) and cp may be clobbered
760 
761  // Clear the context before we push it when entering the internal frame.
762  __ mov(cp, Operand::Zero());
763 
764  // Enter an internal frame.
765  {
766  FrameScope scope(masm, StackFrame::INTERNAL);
767 
768  // Set up the context from the function argument.
770 
771  __ InitializeRootRegister();
772 
773  // Push the function and the receiver onto the stack.
774  __ push(r1);
775  __ push(r2);
776 
777  // Copy arguments to the stack in a loop.
778  // r1: function
779  // r3: argc
780  // r4: argv, i.e. points to first arg
781  Label loop, entry;
782  __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
783  // r2 points past last arg.
784  __ b(&entry);
785  __ bind(&loop);
786  __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
787  __ ldr(r0, MemOperand(r0)); // dereference handle
788  __ push(r0); // push parameter
789  __ bind(&entry);
790  __ cmp(r4, r2);
791  __ b(ne, &loop);
792 
793  // Initialize all JavaScript callee-saved registers, since they will be seen
794  // by the garbage collector as part of handlers.
795  __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
796  __ mov(r5, Operand(r4));
797  __ mov(r6, Operand(r4));
798  if (!FLAG_enable_ool_constant_pool) {
799  __ mov(r8, Operand(r4));
800  }
801  if (kR9Available == 1) {
802  __ mov(r9, Operand(r4));
803  }
804 
805  // Invoke the code and pass argc as r0.
806  __ mov(r0, Operand(r3));
807  if (is_construct) {
808  // No type feedback cell is available
809  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
810  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
811  __ CallStub(&stub);
812  } else {
813  ParameterCount actual(r0);
814  __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
815  }
816  // Exit the JS frame and remove the parameters (except function), and
817  // return.
818  // Respect ABI stack constraint.
819  }
820  __ Jump(lr);
821 
822  // r0: result
823 }
824 
825 
826 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
827  Generate_JSEntryTrampolineHelper(masm, false);
828 }
829 
830 
831 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
832  Generate_JSEntryTrampolineHelper(masm, true);
833 }
834 
835 
836 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
837  CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
838  GenerateTailCallToReturnedCode(masm);
839 }
840 
841 
842 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
843  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
844  // Push a copy of the function onto the stack.
845  __ push(r1);
846  // Push function as parameter to the runtime call.
847  __ Push(r1);
848  // Whether to compile in a background thread.
849  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
850 
851  __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
852  // Restore receiver.
853  __ pop(r1);
854 }
855 
856 
857 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
858  CallCompileOptimized(masm, false);
859  GenerateTailCallToReturnedCode(masm);
860 }
861 
862 
863 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
864  CallCompileOptimized(masm, true);
865  GenerateTailCallToReturnedCode(masm);
866 }
867 
868 
869 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
870  // For now, we are relying on the fact that make_code_young doesn't do any
871  // garbage collection which allows us to save/restore the registers without
872  // worrying about which of them contain pointers. We also don't build an
873  // internal frame to make the code faster, since we shouldn't have to do stack
874  // crawls in MakeCodeYoung. This seems a bit fragile.
875 
876  // The following registers must be saved and restored when calling through to
877  // the runtime:
878  // r0 - contains return address (beginning of patch sequence)
879  // r1 - isolate
880  FrameScope scope(masm, StackFrame::MANUAL);
881  __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
882  __ PrepareCallCFunction(2, 0, r2);
883  __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
884  __ CallCFunction(
885  ExternalReference::get_make_code_young_function(masm->isolate()), 2);
886  __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
887  __ mov(pc, r0);
888 }
889 
890 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
891 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
892  MacroAssembler* masm) { \
893  GenerateMakeCodeYoungAgainCommon(masm); \
894 } \
895 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
896  MacroAssembler* masm) { \
897  GenerateMakeCodeYoungAgainCommon(masm); \
898 }
899 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
900 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
901 
902 
903 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
904  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
905  // that make_code_young doesn't do any garbage collection which allows us to
906  // save/restore the registers without worrying about which of them contain
907  // pointers.
908 
909  // The following registers must be saved and restored when calling through to
910  // the runtime:
911  // r0 - contains return address (beginning of patch sequence)
912  // r1 - isolate
913  FrameScope scope(masm, StackFrame::MANUAL);
914  __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
915  __ PrepareCallCFunction(2, 0, r2);
916  __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
917  __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
918  masm->isolate()), 2);
919  __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
920 
921  // Perform prologue operations usually performed by the young code stub.
922  __ PushFixedFrame(r1);
924 
925  // Jump to point after the code-age stub.
926  __ add(r0, r0, Operand(kNoCodeAgeSequenceLength * Assembler::kInstrSize));
927  __ mov(pc, r0);
928 }
929 
930 
931 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
932  GenerateMakeCodeYoungAgainCommon(masm);
933 }
934 
935 
936 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
937  SaveFPRegsMode save_doubles) {
938  {
939  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
940 
941  // Preserve registers across notification, this is important for compiled
942  // stubs that tail call the runtime on deopts passing their parameters in
943  // registers.
945  // Pass the function and deoptimization type to the runtime system.
946  __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
948  }
949 
950  __ add(sp, sp, Operand(kPointerSize)); // Ignore state
951  __ mov(pc, lr); // Jump to miss handler
952 }
953 
954 
955 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
956  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
957 }
958 
959 
960 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
961  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
962 }
963 
964 
965 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
967  {
968  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
969  // Pass the function and deoptimization type to the runtime system.
970  __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
971  __ push(r0);
972  __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
973  }
974 
975  // Get the full codegen state from the stack and untag it -> r6.
976  __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
977  __ SmiUntag(r6);
978  // Switch on the state.
979  Label with_tos_register, unknown_state;
980  __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
981  __ b(ne, &with_tos_register);
982  __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
983  __ Ret();
984 
985  __ bind(&with_tos_register);
986  __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
987  __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
988  __ b(ne, &unknown_state);
989  __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
990  __ Ret();
991 
992  __ bind(&unknown_state);
993  __ stop("no cases left");
994 }
995 
996 
997 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
998  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
999 }
1000 
1001 
1002 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1003  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1004 }
1005 
1006 
1007 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1008  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1009 }
1010 
1011 
1012 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1013  // Lookup the function in the JavaScript frame.
1015  {
1016  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1017  // Pass function as argument.
1018  __ push(r0);
1019  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1020  }
1021 
1022  // If the code object is null, just return to the unoptimized code.
1023  Label skip;
1024  __ cmp(r0, Operand(Smi::FromInt(0)));
1025  __ b(ne, &skip);
1026  __ Ret();
1027 
1028  __ bind(&skip);
1029 
1030  // Load deoptimization data from the code object.
1031  // <deopt_data> = <code>[#deoptimization_data_offset]
1033 
1034  { ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1035  if (FLAG_enable_ool_constant_pool) {
1037  }
1038 
1039  // Load the OSR entrypoint offset from the deoptimization data.
1040  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1043 
1044  // Compute the target address = code_obj + header_size + osr_offset
1045  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1046  __ add(r0, r0, Operand::SmiUntag(r1));
1047  __ add(lr, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1048 
1049  // And "return" to the OSR entry point of the function.
1050  __ Ret();
1051  }
1052 }
1053 
1054 
1055 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1056  // We check the stack limit as indicator that recompilation might be done.
1057  Label ok;
1058  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1059  __ cmp(sp, Operand(ip));
1060  __ b(hs, &ok);
1061  {
1062  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1063  __ CallRuntime(Runtime::kHiddenStackGuard, 0);
1064  }
1065  __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1066  RelocInfo::CODE_TARGET);
1067 
1068  __ bind(&ok);
1069  __ Ret();
1070 }
1071 
1072 
1073 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1074  // 1. Make sure we have at least one argument.
1075  // r0: actual number of arguments
1076  { Label done;
1077  __ cmp(r0, Operand::Zero());
1078  __ b(ne, &done);
1079  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1080  __ push(r2);
1081  __ add(r0, r0, Operand(1));
1082  __ bind(&done);
1083  }
1084 
1085  // 2. Get the function to call (passed as receiver) from the stack, check
1086  // if it is a function.
1087  // r0: actual number of arguments
1088  Label slow, non_function;
1089  __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1090  __ JumpIfSmi(r1, &non_function);
1091  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1092  __ b(ne, &slow);
1093 
1094  // 3a. Patch the first argument if necessary when calling a function.
1095  // r0: actual number of arguments
1096  // r1: function
1097  Label shift_arguments;
1098  __ mov(r4, Operand::Zero()); // indicate regular JS_FUNCTION
1099  { Label convert_to_object, use_global_receiver, patch_receiver;
1100  // Change context eagerly in case we need the global receiver.
1102 
1103  // Do not transform the receiver for strict mode functions.
1106  __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1107  kSmiTagSize)));
1108  __ b(ne, &shift_arguments);
1109 
1110  // Do not transform the receiver for native (Compilerhints already in r3).
1111  __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1112  __ b(ne, &shift_arguments);
1113 
1114  // Compute the receiver in sloppy mode.
1115  __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1116  __ ldr(r2, MemOperand(r2, -kPointerSize));
1117  // r0: actual number of arguments
1118  // r1: function
1119  // r2: first argument
1120  __ JumpIfSmi(r2, &convert_to_object);
1121 
1122  __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1123  __ cmp(r2, r3);
1124  __ b(eq, &use_global_receiver);
1125  __ LoadRoot(r3, Heap::kNullValueRootIndex);
1126  __ cmp(r2, r3);
1127  __ b(eq, &use_global_receiver);
1128 
1130  __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1131  __ b(ge, &shift_arguments);
1132 
1133  __ bind(&convert_to_object);
1134 
1135  {
1136  // Enter an internal frame in order to preserve argument count.
1137  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1138  __ SmiTag(r0);
1139  __ push(r0);
1140 
1141  __ push(r2);
1142  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1143  __ mov(r2, r0);
1144 
1145  __ pop(r0);
1146  __ SmiUntag(r0);
1147 
1148  // Exit the internal frame.
1149  }
1150 
1151  // Restore the function to r1, and the flag to r4.
1152  __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1153  __ mov(r4, Operand::Zero());
1154  __ jmp(&patch_receiver);
1155 
1156  __ bind(&use_global_receiver);
1159 
1160  __ bind(&patch_receiver);
1161  __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
1162  __ str(r2, MemOperand(r3, -kPointerSize));
1163 
1164  __ jmp(&shift_arguments);
1165  }
1166 
1167  // 3b. Check for function proxy.
1168  __ bind(&slow);
1169  __ mov(r4, Operand(1, RelocInfo::NONE32)); // indicate function proxy
1170  __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
1171  __ b(eq, &shift_arguments);
1172  __ bind(&non_function);
1173  __ mov(r4, Operand(2, RelocInfo::NONE32)); // indicate non-function
1174 
1175  // 3c. Patch the first argument when calling a non-function. The
1176  // CALL_NON_FUNCTION builtin expects the non-function callee as
1177  // receiver, so overwrite the first argument which will ultimately
1178  // become the receiver.
1179  // r0: actual number of arguments
1180  // r1: function
1181  // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1182  __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1183  __ str(r1, MemOperand(r2, -kPointerSize));
1184 
1185  // 4. Shift arguments and return address one slot down on the stack
1186  // (overwriting the original receiver). Adjust argument count to make
1187  // the original first argument the new receiver.
1188  // r0: actual number of arguments
1189  // r1: function
1190  // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1191  __ bind(&shift_arguments);
1192  { Label loop;
1193  // Calculate the copy start address (destination). Copy end address is sp.
1194  __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1195 
1196  __ bind(&loop);
1197  __ ldr(ip, MemOperand(r2, -kPointerSize));
1198  __ str(ip, MemOperand(r2));
1199  __ sub(r2, r2, Operand(kPointerSize));
1200  __ cmp(r2, sp);
1201  __ b(ne, &loop);
1202  // Adjust the actual number of arguments and remove the top element
1203  // (which is a copy of the last argument).
1204  __ sub(r0, r0, Operand(1));
1205  __ pop();
1206  }
1207 
1208  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1209  // or a function proxy via CALL_FUNCTION_PROXY.
1210  // r0: actual number of arguments
1211  // r1: function
1212  // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1213  { Label function, non_proxy;
1214  __ tst(r4, r4);
1215  __ b(eq, &function);
1216  // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1217  __ mov(r2, Operand::Zero());
1218  __ cmp(r4, Operand(1));
1219  __ b(ne, &non_proxy);
1220 
1221  __ push(r1); // re-add proxy object as additional argument
1222  __ add(r0, r0, Operand(1));
1223  __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
1224  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1225  RelocInfo::CODE_TARGET);
1226 
1227  __ bind(&non_proxy);
1228  __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION);
1229  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1230  RelocInfo::CODE_TARGET);
1231  __ bind(&function);
1232  }
1233 
1234  // 5b. Get the code to call from the function and check that the number of
1235  // expected arguments matches what we're providing. If so, jump
1236  // (tail-call) to the code in register edx without checking arguments.
1237  // r0: actual number of arguments
1238  // r1: function
1240  __ ldr(r2,
1242  __ SmiUntag(r2);
1243  __ cmp(r2, r0); // Check formal and actual parameter counts.
1244  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1245  RelocInfo::CODE_TARGET,
1246  ne);
1247 
1249  ParameterCount expected(0);
1250  __ InvokeCode(r3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1251 }
1252 
1253 
1254 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1255  const int kIndexOffset =
1257  const int kLimitOffset =
1259  const int kArgsOffset = 2 * kPointerSize;
1260  const int kRecvOffset = 3 * kPointerSize;
1261  const int kFunctionOffset = 4 * kPointerSize;
1262 
1263  {
1264  FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1265 
1266  __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1267  __ push(r0);
1268  __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1269  __ push(r0);
1270  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1271 
1272  // Check the stack for overflow. We are not trying to catch
1273  // interruptions (e.g. debug break and preemption) here, so the "real stack
1274  // limit" is checked.
1275  Label okay;
1276  __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1277  // Make r2 the space we have left. The stack might already be overflowed
1278  // here which will cause r2 to become negative.
1279  __ sub(r2, sp, r2);
1280  // Check if the arguments will overflow the stack.
1281  __ cmp(r2, Operand::PointerOffsetFromSmiKey(r0));
1282  __ b(gt, &okay); // Signed comparison.
1283 
1284  // Out of stack space.
1285  __ ldr(r1, MemOperand(fp, kFunctionOffset));
1286  __ Push(r1, r0);
1287  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1288  // End of stack check.
1289 
1290  // Push current limit and index.
1291  __ bind(&okay);
1292  __ push(r0); // limit
1293  __ mov(r1, Operand::Zero()); // initial index
1294  __ push(r1);
1295 
1296  // Get the receiver.
1297  __ ldr(r0, MemOperand(fp, kRecvOffset));
1298 
1299  // Check that the function is a JS function (otherwise it must be a proxy).
1300  Label push_receiver;
1301  __ ldr(r1, MemOperand(fp, kFunctionOffset));
1302  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1303  __ b(ne, &push_receiver);
1304 
1305  // Change context eagerly to get the right global object if necessary.
1307  // Load the shared function info while the function is still in r1.
1309 
1310  // Compute the receiver.
1311  // Do not transform the receiver for strict mode functions.
1312  Label call_to_object, use_global_receiver;
1314  __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1315  kSmiTagSize)));
1316  __ b(ne, &push_receiver);
1317 
1318  // Do not transform the receiver for strict mode functions.
1319  __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1320  __ b(ne, &push_receiver);
1321 
1322  // Compute the receiver in sloppy mode.
1323  __ JumpIfSmi(r0, &call_to_object);
1324  __ LoadRoot(r1, Heap::kNullValueRootIndex);
1325  __ cmp(r0, r1);
1326  __ b(eq, &use_global_receiver);
1327  __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1328  __ cmp(r0, r1);
1329  __ b(eq, &use_global_receiver);
1330 
1331  // Check if the receiver is already a JavaScript object.
1332  // r0: receiver
1334  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1335  __ b(ge, &push_receiver);
1336 
1337  // Convert the receiver to a regular object.
1338  // r0: receiver
1339  __ bind(&call_to_object);
1340  __ push(r0);
1341  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1342  __ b(&push_receiver);
1343 
1344  __ bind(&use_global_receiver);
1347 
1348  // Push the receiver.
1349  // r0: receiver
1350  __ bind(&push_receiver);
1351  __ push(r0);
1352 
1353  // Copy all arguments from the array to the stack.
1354  Label entry, loop;
1355  __ ldr(r0, MemOperand(fp, kIndexOffset));
1356  __ b(&entry);
1357 
1358  // Load the current argument from the arguments array and push it to the
1359  // stack.
1360  // r0: current argument index
1361  __ bind(&loop);
1362  __ ldr(r1, MemOperand(fp, kArgsOffset));
1363  __ Push(r1, r0);
1364 
1365  // Call the runtime to access the property in the arguments array.
1366  __ CallRuntime(Runtime::kGetProperty, 2);
1367  __ push(r0);
1368 
1369  // Use inline caching to access the arguments.
1370  __ ldr(r0, MemOperand(fp, kIndexOffset));
1371  __ add(r0, r0, Operand(1 << kSmiTagSize));
1372  __ str(r0, MemOperand(fp, kIndexOffset));
1373 
1374  // Test if the copy loop has finished copying all the elements from the
1375  // arguments object.
1376  __ bind(&entry);
1377  __ ldr(r1, MemOperand(fp, kLimitOffset));
1378  __ cmp(r0, r1);
1379  __ b(ne, &loop);
1380 
1381  // Call the function.
1382  Label call_proxy;
1383  ParameterCount actual(r0);
1384  __ SmiUntag(r0);
1385  __ ldr(r1, MemOperand(fp, kFunctionOffset));
1386  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1387  __ b(ne, &call_proxy);
1388  __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
1389 
1390  frame_scope.GenerateLeaveFrame();
1391  __ add(sp, sp, Operand(3 * kPointerSize));
1392  __ Jump(lr);
1393 
1394  // Call the function proxy.
1395  __ bind(&call_proxy);
1396  __ push(r1); // add function proxy as last argument
1397  __ add(r0, r0, Operand(1));
1398  __ mov(r2, Operand::Zero());
1399  __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
1400  __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1401  RelocInfo::CODE_TARGET);
1402 
1403  // Tear down the internal frame and remove function, receiver and args.
1404  }
1405  __ add(sp, sp, Operand(3 * kPointerSize));
1406  __ Jump(lr);
1407 }
1408 
1409 
1410 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1411  __ SmiTag(r0);
1413  __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
1414  (FLAG_enable_ool_constant_pool ? pp.bit() : 0) |
1415  fp.bit() | lr.bit());
1416  __ add(fp, sp,
1418 }
1419 
1420 
1421 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1422  // ----------- S t a t e -------------
1423  // -- r0 : result being passed through
1424  // -----------------------------------
1425  // Get the number of arguments passed (as a smi), tear down the frame and
1426  // then tear down the parameters.
1428  kPointerSize)));
1429 
1430  if (FLAG_enable_ool_constant_pool) {
1432  __ ldm(ia_w, sp, pp.bit() | fp.bit() | lr.bit());
1433  } else {
1434  __ mov(sp, fp);;
1435  __ ldm(ia_w, sp, fp.bit() | lr.bit());
1436  }
1437  __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
1438  __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1439 }
1440 
1441 
1442 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1443  // ----------- S t a t e -------------
1444  // -- r0 : actual number of arguments
1445  // -- r1 : function (passed through to callee)
1446  // -- r2 : expected number of arguments
1447  // -----------------------------------
1448 
1449  Label invoke, dont_adapt_arguments;
1450 
1451  Label enough, too_few;
1453  __ cmp(r0, r2);
1454  __ b(lt, &too_few);
1456  __ b(eq, &dont_adapt_arguments);
1457 
1458  { // Enough parameters: actual >= expected
1459  __ bind(&enough);
1460  EnterArgumentsAdaptorFrame(masm);
1461 
1462  // Calculate copy start address into r0 and copy end address into r2.
1463  // r0: actual number of arguments as a smi
1464  // r1: function
1465  // r2: expected number of arguments
1466  // r3: code entry to call
1467  __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1468  // adjust for return address and receiver
1469  __ add(r0, r0, Operand(2 * kPointerSize));
1470  __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1471 
1472  // Copy the arguments (including the receiver) to the new stack frame.
1473  // r0: copy start address
1474  // r1: function
1475  // r2: copy end address
1476  // r3: code entry to call
1477 
1478  Label copy;
1479  __ bind(&copy);
1480  __ ldr(ip, MemOperand(r0, 0));
1481  __ push(ip);
1482  __ cmp(r0, r2); // Compare before moving to next argument.
1483  __ sub(r0, r0, Operand(kPointerSize));
1484  __ b(ne, &copy);
1485 
1486  __ b(&invoke);
1487  }
1488 
1489  { // Too few parameters: Actual < expected
1490  __ bind(&too_few);
1491  EnterArgumentsAdaptorFrame(masm);
1492 
1493  // Calculate copy start address into r0 and copy end address is fp.
1494  // r0: actual number of arguments as a smi
1495  // r1: function
1496  // r2: expected number of arguments
1497  // r3: code entry to call
1498  __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1499 
1500  // Copy the arguments (including the receiver) to the new stack frame.
1501  // r0: copy start address
1502  // r1: function
1503  // r2: expected number of arguments
1504  // r3: code entry to call
1505  Label copy;
1506  __ bind(&copy);
1507  // Adjust load for return address and receiver.
1508  __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1509  __ push(ip);
1510  __ cmp(r0, fp); // Compare before moving to next argument.
1511  __ sub(r0, r0, Operand(kPointerSize));
1512  __ b(ne, &copy);
1513 
1514  // Fill the remaining expected arguments with undefined.
1515  // r1: function
1516  // r2: expected number of arguments
1517  // r3: code entry to call
1518  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1519  __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1520  // Adjust for frame.
1522  2 * kPointerSize));
1523 
1524  Label fill;
1525  __ bind(&fill);
1526  __ push(ip);
1527  __ cmp(sp, r2);
1528  __ b(ne, &fill);
1529  }
1530 
1531  // Call the entry point.
1532  __ bind(&invoke);
1533  __ Call(r3);
1534 
1535  // Store offset of return address for deoptimizer.
1536  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1537 
1538  // Exit frame and return.
1539  LeaveArgumentsAdaptorFrame(masm);
1540  __ Jump(lr);
1541 
1542 
1543  // -------------------------------------------
1544  // Dont adapt arguments.
1545  // -------------------------------------------
1546  __ bind(&dont_adapt_arguments);
1547  __ Jump(r3);
1548 }
1549 
1550 
1551 #undef __
1552 
1553 } } // namespace v8::internal
1554 
1555 #endif // V8_TARGET_ARCH_ARM
static const int kCodeOffset
Definition: objects.h:7103
static const int kCodeEntryOffset
Definition: objects.h:7518
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7519
static int SlotOffset(int index)
Definition: contexts.h:498
const Register r3
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf map
Definition: flags.cc:350
const Register cp
static Smi * FromInt(int value)
Definition: objects-inl.h:1209
static const int kGlobalReceiverOffset
Definition: objects.h:7613
const Register r6
static const int kConstructionCountOffset
Definition: objects.h:7189
static const int kDeoptimizationDataOffset
Definition: objects.h:5584
#define ASSERT(condition)
Definition: checks.h:329
const RegList kJSCallerSaved
Definition: frames-arm.h:47
static const int kContextOffset
Definition: frames.h:185
const int kPointerSizeLog2
Definition: globals.h:281
static const int kInstanceSizeOffset
Definition: objects.h:6448
const Register r2
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:6460
static const int kInstanceSizesOffset
Definition: objects.h:6425
MemOperand ContextOperand(Register context, int index)
const Register pp
static const int kContextOffset
Definition: objects.h:7523
static const int kPretenureCreateCountOffset
Definition: objects.h:8414
static const int kInObjectPropertiesByte
Definition: objects.h:6449
const uint32_t kNotStringTag
Definition: objects.h:599
const Register sp
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
BuiltinExtraArguments
Definition: builtins.h:35
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:7098
const Register ip
const Register r9
const int kPointerSize
Definition: globals.h:268
static void MaybeCallEntryHook(MacroAssembler *masm)
const int kHeapObjectTag
Definition: v8.h:5473
const RegList kCalleeSaved
Definition: frames-arm.h:63
static const int kConstantPoolOffset
Definition: frames.h:186
enable upcoming ES6 features enable harmony block scoping enable harmony enable harmony proxies enable harmony generators enable harmony numeric enable harmony string enable harmony math functions harmony_scoping harmony_symbols harmony_collections harmony_iteration harmony_strings harmony_scoping harmony_maths tracks arrays with only smi values Optimize object Array DOM strings and string pretenure call new trace pretenuring decisions of HAllocate instructions track fields with only smi values track fields with heap values track_fields track_fields Enables optimizations which favor memory size over execution speed use string slices optimization filter maximum number of GVN fix point iterations use function inlining use allocation folding eliminate write barriers targeting allocations in optimized code maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining crankshaft harvests type feedback from stub cache trace check elimination phase hydrogen tracing filter trace hydrogen to given file name trace inlining decisions trace store elimination trace all use positions trace global value numbering trace hydrogen escape analysis trace the tracking of allocation sites trace map generalization environment for every instruction deoptimize every n garbage collections put a break point before deoptimizing deoptimize uncommon cases use on stack replacement trace array bounds check elimination perform array index dehoisting use load elimination use store elimination use constant folding eliminate unreachable code number of stress runs when picking a function to watch for shared function not JSFunction itself flushes the cache of optimized code for closures on every GC functions with arguments object maximum number of escape analysis fix point iterations allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms concurrent on stack replacement do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes number of stack frames inspected by the profiler percentage of ICs that must have type info to allow optimization extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
#define __
static const int kCallerSPOffset
Definition: frames.h:190
const Register pc
static const int kPropertiesOffset
Definition: objects.h:2755
static const int kExpressionsOffset
Definition: frames.h:183
const int kBitsPerByte
Definition: globals.h:287
const Register r0
static const int kElementsOffset
Definition: objects.h:2756
static int OffsetOfElementAt(int index)
Definition: objects.h:3070
static const int kHeaderSize
Definition: objects.h:3016
const Register lr
static const int kSize
Definition: objects.h:7702
static const int kMapOffset
Definition: objects.h:1890
static const int kFixedFrameSizeFromFp
Definition: frames.h:180
const uint32_t kIsNotStringMask
Definition: objects.h:597
const Register r1
static const int kLengthOffset
Definition: objects.h:3015
MemOperand FieldMemOperand(Register object, int offset)
static const int kFormalParameterCountOffset
Definition: objects.h:7156
const int kSmiTagSize
Definition: v8.h:5479
static const int kHeaderSize
Definition: objects.h:5604
const Register r8
static const int kAllocationSiteOffset
Definition: objects.h:8443
#define ASSERT_EQ(v1, v2)
Definition: checks.h:330
static const int kHeaderSize
Definition: objects.h:2757
const int kR9Available
Definition: frames-arm.h:38
static const int kPreAllocatedPropertyFieldsByte
Definition: objects.h:6452
static const int kInstrSize
static const int kValueOffset
Definition: objects.h:7701
#define CODE_AGE_LIST(V)
Definition: builtins.h:50
const Register fp
static const int kNativeContextOffset
Definition: objects.h:7611
static const int kCompilerHintsOffset
Definition: objects.h:7171
static const int kSharedFunctionInfoOffset
Definition: objects.h:7521
static const int kConstantPoolOffset
Definition: objects.h:5598
const Register r5
static const int kInstanceTypeOffset
Definition: objects.h:6459
const Register r4