v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
builtins-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_ARM)
31 
32 #include "codegen.h"
33 #include "debug.h"
34 #include "deoptimizer.h"
35 #include "full-codegen.h"
36 #include "runtime.h"
37 
38 namespace v8 {
39 namespace internal {
40 
41 
42 #define __ ACCESS_MASM(masm)
43 
44 
45 void Builtins::Generate_Adaptor(MacroAssembler* masm,
46  CFunctionId id,
47  BuiltinExtraArguments extra_args) {
48  // ----------- S t a t e -------------
49  // -- r0 : number of arguments excluding receiver
50  // -- r1 : called function (only guaranteed when
51  // extra_args requires it)
52  // -- cp : context
53  // -- sp[0] : last argument
54  // -- ...
55  // -- sp[4 * (argc - 1)] : first argument (argc == r0)
56  // -- sp[4 * argc] : receiver
57  // -----------------------------------
58 
59  // Insert extra arguments.
60  int num_extra_args = 0;
61  if (extra_args == NEEDS_CALLED_FUNCTION) {
62  num_extra_args = 1;
63  __ push(r1);
64  } else {
65  ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
66  }
67 
68  // JumpToExternalReference expects r0 to contain the number of arguments
69  // including the receiver and the extra arguments.
70  __ add(r0, r0, Operand(num_extra_args + 1));
71  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
72 }
73 
74 
75 // Load the built-in InternalArray function from the current context.
76 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
77  Register result) {
78  // Load the native context.
79 
80  __ ldr(result,
82  __ ldr(result,
84  // Load the InternalArray function from the native context.
85  __ ldr(result,
86  MemOperand(result,
89 }
90 
91 
92 // Load the built-in Array function from the current context.
93 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
94  // Load the native context.
95 
96  __ ldr(result,
98  __ ldr(result,
100  // Load the Array function from the native context.
101  __ ldr(result,
102  MemOperand(result,
104 }
105 
106 
107 // Allocate an empty JSArray. The allocated array is put into the result
108 // register. An elements backing store is allocated with size initial_capacity
109 // and filled with the hole values.
110 static void AllocateEmptyJSArray(MacroAssembler* masm,
111  Register array_function,
112  Register result,
113  Register scratch1,
114  Register scratch2,
115  Register scratch3,
116  Label* gc_required) {
117  const int initial_capacity = JSArray::kPreallocatedArrayElements;
118  STATIC_ASSERT(initial_capacity >= 0);
119  __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
120 
121  // Allocate the JSArray object together with space for a fixed array with the
122  // requested elements.
123  int size = JSArray::kSize;
124  if (initial_capacity > 0) {
125  size += FixedArray::SizeFor(initial_capacity);
126  }
127  __ AllocateInNewSpace(size,
128  result,
129  scratch2,
130  scratch3,
131  gc_required,
132  TAG_OBJECT);
133 
134  // Allocated the JSArray. Now initialize the fields except for the elements
135  // array.
136  // result: JSObject
137  // scratch1: initial map
138  // scratch2: start of next object
139  __ str(scratch1, FieldMemOperand(result, JSObject::kMapOffset));
140  __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
141  __ str(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset));
142  // Field JSArray::kElementsOffset is initialized later.
143  __ mov(scratch3, Operand(0, RelocInfo::NONE));
144  __ str(scratch3, FieldMemOperand(result, JSArray::kLengthOffset));
145 
146  if (initial_capacity == 0) {
147  __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
148  return;
149  }
150 
151  // Calculate the location of the elements array and set elements array member
152  // of the JSArray.
153  // result: JSObject
154  // scratch2: start of next object
155  __ add(scratch1, result, Operand(JSArray::kSize));
156  __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
157 
158  // Clear the heap tag on the elements array.
159  __ sub(scratch1, scratch1, Operand(kHeapObjectTag));
160 
161  // Initialize the FixedArray and fill it with holes. FixedArray length is
162  // stored as a smi.
163  // result: JSObject
164  // scratch1: elements array (untagged)
165  // scratch2: start of next object
166  __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
168  __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
169  __ mov(scratch3, Operand(Smi::FromInt(initial_capacity)));
171  __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
172 
173  // Fill the FixedArray with the hole value. Inline the code if short.
175  __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
176  static const int kLoopUnfoldLimit = 4;
177  if (initial_capacity <= kLoopUnfoldLimit) {
178  for (int i = 0; i < initial_capacity; i++) {
179  __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
180  }
181  } else {
182  Label loop, entry;
183  __ add(scratch2, scratch1, Operand(initial_capacity * kPointerSize));
184  __ b(&entry);
185  __ bind(&loop);
186  __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
187  __ bind(&entry);
188  __ cmp(scratch1, scratch2);
189  __ b(lt, &loop);
190  }
191 }
192 
193 // Allocate a JSArray with the number of elements stored in a register. The
194 // register array_function holds the built-in Array function and the register
195 // array_size holds the size of the array as a smi. The allocated array is put
196 // into the result register and beginning and end of the FixedArray elements
197 // storage is put into registers elements_array_storage and elements_array_end
198 // (see below for when that is not the case). If the parameter fill_with_holes
199 // is true the allocated elements backing store is filled with the hole values
200 // otherwise it is left uninitialized. When the backing store is filled the
201 // register elements_array_storage is scratched.
202 static void AllocateJSArray(MacroAssembler* masm,
203  Register array_function, // Array function.
204  Register array_size, // As a smi, cannot be 0.
205  Register result,
206  Register elements_array_storage,
207  Register elements_array_end,
208  Register scratch1,
209  Register scratch2,
210  bool fill_with_hole,
211  Label* gc_required) {
212  // Load the initial map from the array function.
213  __ LoadInitialArrayMap(array_function, scratch2,
214  elements_array_storage, fill_with_hole);
215 
216  if (FLAG_debug_code) { // Assert that array size is not zero.
217  __ tst(array_size, array_size);
218  __ Assert(ne, "array size is unexpectedly 0");
219  }
220 
221  // Allocate the JSArray object together with space for a FixedArray with the
222  // requested number of elements.
223  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
224  __ mov(elements_array_end,
226  __ add(elements_array_end,
227  elements_array_end,
228  Operand(array_size, ASR, kSmiTagSize));
229  __ AllocateInNewSpace(
230  elements_array_end,
231  result,
232  scratch1,
233  scratch2,
234  gc_required,
235  static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
236 
237  // Allocated the JSArray. Now initialize the fields except for the elements
238  // array.
239  // result: JSObject
240  // elements_array_storage: initial map
241  // array_size: size of array (smi)
242  __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
243  __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
244  __ str(elements_array_storage,
246  // Field JSArray::kElementsOffset is initialized later.
247  __ str(array_size, FieldMemOperand(result, JSArray::kLengthOffset));
248 
249  // Calculate the location of the elements array and set elements array member
250  // of the JSArray.
251  // result: JSObject
252  // array_size: size of array (smi)
253  __ add(elements_array_storage, result, Operand(JSArray::kSize));
254  __ str(elements_array_storage,
256 
257  // Clear the heap tag on the elements array.
258  STATIC_ASSERT(kSmiTag == 0);
259  __ sub(elements_array_storage,
260  elements_array_storage,
261  Operand(kHeapObjectTag));
262  // Initialize the fixed array and fill it with holes. FixedArray length is
263  // stored as a smi.
264  // result: JSObject
265  // elements_array_storage: elements array (untagged)
266  // array_size: size of array (smi)
267  __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
269  __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
270  STATIC_ASSERT(kSmiTag == 0);
272  __ str(array_size,
273  MemOperand(elements_array_storage, kPointerSize, PostIndex));
274 
275  // Calculate elements array and elements array end.
276  // result: JSObject
277  // elements_array_storage: elements array element storage
278  // array_size: smi-tagged size of elements array
280  __ add(elements_array_end,
281  elements_array_storage,
282  Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize));
283 
284  // Fill the allocated FixedArray with the hole value if requested.
285  // result: JSObject
286  // elements_array_storage: elements array element storage
287  // elements_array_end: start of next object
288  if (fill_with_hole) {
289  Label loop, entry;
290  __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
291  __ jmp(&entry);
292  __ bind(&loop);
293  __ str(scratch1,
294  MemOperand(elements_array_storage, kPointerSize, PostIndex));
295  __ bind(&entry);
296  __ cmp(elements_array_storage, elements_array_end);
297  __ b(lt, &loop);
298  }
299 }
300 
301 // Create a new array for the built-in Array function. This function allocates
302 // the JSArray object and the FixedArray elements array and initializes these.
303 // If the Array cannot be constructed in native code the runtime is called. This
304 // function assumes the following state:
305 // r0: argc
306 // r1: constructor (built-in Array function)
307 // lr: return address
308 // sp[0]: last argument
309 // This function is used for both construct and normal calls of Array. The only
310 // difference between handling a construct call and a normal call is that for a
311 // construct call the constructor function in r1 needs to be preserved for
312 // entering the generic code. In both cases argc in r0 needs to be preserved.
313 // Both registers are preserved by this code so no need to differentiate between
314 // construct call and normal call.
315 static void ArrayNativeCode(MacroAssembler* masm,
316  Label* call_generic_code) {
317  Counters* counters = masm->isolate()->counters();
318  Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
319  has_non_smi_element, finish, cant_transition_map, not_double;
320 
321  // Check for array construction with zero arguments or one.
322  __ cmp(r0, Operand(0, RelocInfo::NONE));
323  __ b(ne, &argc_one_or_more);
324 
325  // Handle construction of an empty array.
326  __ bind(&empty_array);
327  AllocateEmptyJSArray(masm,
328  r1,
329  r2,
330  r3,
331  r4,
332  r5,
333  call_generic_code);
334  __ IncrementCounter(counters->array_function_native(), 1, r3, r4);
335  // Set up return value, remove receiver from stack and return.
336  __ mov(r0, r2);
337  __ add(sp, sp, Operand(kPointerSize));
338  __ Jump(lr);
339 
340  // Check for one argument. Bail out if argument is not smi or if it is
341  // negative.
342  __ bind(&argc_one_or_more);
343  __ cmp(r0, Operand(1));
344  __ b(ne, &argc_two_or_more);
345  STATIC_ASSERT(kSmiTag == 0);
346  __ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
347  __ tst(r2, r2);
348  __ b(ne, &not_empty_array);
349  __ Drop(1); // Adjust stack.
350  __ mov(r0, Operand(0)); // Treat this as a call with argc of zero.
351  __ b(&empty_array);
352 
353  __ bind(&not_empty_array);
354  __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
355  __ b(ne, call_generic_code);
356 
357  // Handle construction of an empty array of a certain size. Bail out if size
358  // is too large to actually allocate an elements array.
359  STATIC_ASSERT(kSmiTag == 0);
361  __ b(ge, call_generic_code);
362 
363  // r0: argc
364  // r1: constructor
365  // r2: array_size (smi)
366  // sp[0]: argument
367  AllocateJSArray(masm,
368  r1,
369  r2,
370  r3,
371  r4,
372  r5,
373  r6,
374  r7,
375  true,
376  call_generic_code);
377  __ IncrementCounter(counters->array_function_native(), 1, r2, r4);
378  // Set up return value, remove receiver and argument from stack and return.
379  __ mov(r0, r3);
380  __ add(sp, sp, Operand(2 * kPointerSize));
381  __ Jump(lr);
382 
383  // Handle construction of an array from a list of arguments.
384  __ bind(&argc_two_or_more);
385  __ mov(r2, Operand(r0, LSL, kSmiTagSize)); // Convet argc to a smi.
386 
387  // r0: argc
388  // r1: constructor
389  // r2: array_size (smi)
390  // sp[0]: last argument
391  AllocateJSArray(masm,
392  r1,
393  r2,
394  r3,
395  r4,
396  r5,
397  r6,
398  r7,
399  false,
400  call_generic_code);
401  __ IncrementCounter(counters->array_function_native(), 1, r2, r6);
402 
403  // Fill arguments as array elements. Copy from the top of the stack (last
404  // element) to the array backing store filling it backwards. Note:
405  // elements_array_end points after the backing store therefore PreIndex is
406  // used when filling the backing store.
407  // r0: argc
408  // r3: JSArray
409  // r4: elements_array storage start (untagged)
410  // r5: elements_array_end (untagged)
411  // sp[0]: last argument
412  Label loop, entry;
413  __ mov(r7, sp);
414  __ jmp(&entry);
415  __ bind(&loop);
417  if (FLAG_smi_only_arrays) {
418  __ JumpIfNotSmi(r2, &has_non_smi_element);
419  }
421  __ bind(&entry);
422  __ cmp(r4, r5);
423  __ b(lt, &loop);
424 
425  __ bind(&finish);
426  __ mov(sp, r7);
427 
428  // Remove caller arguments and receiver from the stack, setup return value and
429  // return.
430  // r0: argc
431  // r3: JSArray
432  // sp[0]: receiver
433  __ add(sp, sp, Operand(kPointerSize));
434  __ mov(r0, r3);
435  __ Jump(lr);
436 
437  __ bind(&has_non_smi_element);
438  // Double values are handled by the runtime.
439  __ CheckMap(
440  r2, r9, Heap::kHeapNumberMapRootIndex, &not_double, DONT_DO_SMI_CHECK);
441  __ bind(&cant_transition_map);
442  __ UndoAllocationInNewSpace(r3, r4);
443  __ b(call_generic_code);
444 
445  __ bind(&not_double);
446  // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
447  // r3: JSArray
449  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
451  r2,
452  r9,
453  &cant_transition_map);
455  __ RecordWriteField(r3,
457  r2,
458  r9,
463  Label loop2;
464  __ sub(r7, r7, Operand(kPointerSize));
465  __ bind(&loop2);
468  __ cmp(r4, r5);
469  __ b(lt, &loop2);
470  __ b(&finish);
471 }
472 
473 
474 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
475  // ----------- S t a t e -------------
476  // -- r0 : number of arguments
477  // -- lr : return address
478  // -- sp[...]: constructor arguments
479  // -----------------------------------
480  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
481 
482  // Get the InternalArray function.
483  GenerateLoadInternalArrayFunction(masm, r1);
484 
485  if (FLAG_debug_code) {
486  // Initial map for the builtin InternalArray functions should be maps.
488  __ tst(r2, Operand(kSmiTagMask));
489  __ Assert(ne, "Unexpected initial map for InternalArray function");
490  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
491  __ Assert(eq, "Unexpected initial map for InternalArray function");
492  }
493 
494  // Run the native code for the InternalArray function called as a normal
495  // function.
496  ArrayNativeCode(masm, &generic_array_code);
497 
498  // Jump to the generic array code if the specialized code cannot handle the
499  // construction.
500  __ bind(&generic_array_code);
501 
502  Handle<Code> array_code =
503  masm->isolate()->builtins()->InternalArrayCodeGeneric();
504  __ Jump(array_code, RelocInfo::CODE_TARGET);
505 }
506 
507 
508 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
509  // ----------- S t a t e -------------
510  // -- r0 : number of arguments
511  // -- lr : return address
512  // -- sp[...]: constructor arguments
513  // -----------------------------------
514  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
515 
516  // Get the Array function.
517  GenerateLoadArrayFunction(masm, r1);
518 
519  if (FLAG_debug_code) {
520  // Initial map for the builtin Array functions should be maps.
522  __ tst(r2, Operand(kSmiTagMask));
523  __ Assert(ne, "Unexpected initial map for Array function");
524  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
525  __ Assert(eq, "Unexpected initial map for Array function");
526  }
527 
528  // Run the native code for the Array function called as a normal function.
529  ArrayNativeCode(masm, &generic_array_code);
530 
531  // Jump to the generic array code if the specialized code cannot handle
532  // the construction.
533  __ bind(&generic_array_code);
534 
535  Handle<Code> array_code =
536  masm->isolate()->builtins()->ArrayCodeGeneric();
537  __ Jump(array_code, RelocInfo::CODE_TARGET);
538 }
539 
540 
541 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
542  // ----------- S t a t e -------------
543  // -- r0 : number of arguments
544  // -- r1 : constructor function
545  // -- lr : return address
546  // -- sp[...]: constructor arguments
547  // -----------------------------------
548  Label generic_constructor;
549 
550  if (FLAG_debug_code) {
551  // The array construct code is only set for the builtin and internal
552  // Array functions which always have a map.
553  // Initial map for the builtin Array function should be a map.
555  __ tst(r2, Operand(kSmiTagMask));
556  __ Assert(ne, "Unexpected initial map for Array function");
557  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
558  __ Assert(eq, "Unexpected initial map for Array function");
559  }
560 
561  // Run the native code for the Array function called as a constructor.
562  ArrayNativeCode(masm, &generic_constructor);
563 
564  // Jump to the generic construct code in case the specialized code cannot
565  // handle the construction.
566  __ bind(&generic_constructor);
567  Handle<Code> generic_construct_stub =
568  masm->isolate()->builtins()->JSConstructStubGeneric();
569  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
570 }
571 
572 
573 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
574  // ----------- S t a t e -------------
575  // -- r0 : number of arguments
576  // -- r1 : constructor function
577  // -- lr : return address
578  // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
579  // -- sp[argc * 4] : receiver
580  // -----------------------------------
581  Counters* counters = masm->isolate()->counters();
582  __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
583 
584  Register function = r1;
585  if (FLAG_debug_code) {
586  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
587  __ cmp(function, Operand(r2));
588  __ Assert(eq, "Unexpected String function");
589  }
590 
591  // Load the first arguments in r0 and get rid of the rest.
592  Label no_arguments;
593  __ cmp(r0, Operand(0, RelocInfo::NONE));
594  __ b(eq, &no_arguments);
595  // First args = sp[(argc - 1) * 4].
596  __ sub(r0, r0, Operand(1));
598  // sp now point to args[0], drop args[0] + receiver.
599  __ Drop(2);
600 
601  Register argument = r2;
602  Label not_cached, argument_is_string;
604  masm,
605  r0, // Input.
606  argument, // Result.
607  r3, // Scratch.
608  r4, // Scratch.
609  r5, // Scratch.
610  false, // Is it a Smi?
611  &not_cached);
612  __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
613  __ bind(&argument_is_string);
614 
615  // ----------- S t a t e -------------
616  // -- r2 : argument converted to string
617  // -- r1 : constructor function
618  // -- lr : return address
619  // -----------------------------------
620 
621  Label gc_required;
622  __ AllocateInNewSpace(JSValue::kSize,
623  r0, // Result.
624  r3, // Scratch.
625  r4, // Scratch.
626  &gc_required,
627  TAG_OBJECT);
628 
629  // Initialising the String Object.
630  Register map = r3;
631  __ LoadGlobalFunctionInitialMap(function, map, r4);
632  if (FLAG_debug_code) {
634  __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
635  __ Assert(eq, "Unexpected string wrapper instance size");
637  __ cmp(r4, Operand(0, RelocInfo::NONE));
638  __ Assert(eq, "Unexpected unused properties of string wrapper");
639  }
641 
642  __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
645 
646  __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset));
647 
648  // Ensure the object is fully initialized.
650 
651  __ Ret();
652 
653  // The argument was not found in the number to string cache. Check
654  // if it's a string already before calling the conversion builtin.
655  Label convert_argument;
656  __ bind(&not_cached);
657  __ JumpIfSmi(r0, &convert_argument);
658 
659  // Is it a String?
663  __ tst(r3, Operand(kIsNotStringMask));
664  __ b(ne, &convert_argument);
665  __ mov(argument, r0);
666  __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
667  __ b(&argument_is_string);
668 
669  // Invoke the conversion builtin and put the result into r2.
670  __ bind(&convert_argument);
671  __ push(function); // Preserve the function.
672  __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
673  {
674  FrameScope scope(masm, StackFrame::INTERNAL);
675  __ push(r0);
676  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
677  }
678  __ pop(function);
679  __ mov(argument, r0);
680  __ b(&argument_is_string);
681 
682  // Load the empty string into r2, remove the receiver from the
683  // stack, and jump back to the case where the argument is a string.
684  __ bind(&no_arguments);
685  __ LoadRoot(argument, Heap::kEmptyStringRootIndex);
686  __ Drop(1);
687  __ b(&argument_is_string);
688 
689  // At this point the argument is already a string. Call runtime to
690  // create a string wrapper.
691  __ bind(&gc_required);
692  __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
693  {
694  FrameScope scope(masm, StackFrame::INTERNAL);
695  __ push(argument);
696  __ CallRuntime(Runtime::kNewStringWrapper, 1);
697  }
698  __ Ret();
699 }
700 
701 
702 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
705  __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
706  __ mov(pc, r2);
707 }
708 
709 
710 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
711  GenerateTailCallToSharedCode(masm);
712 }
713 
714 
715 void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) {
716  {
717  FrameScope scope(masm, StackFrame::INTERNAL);
718 
719  // Push a copy of the function onto the stack.
720  __ push(r1);
721  // Push call kind information.
722  __ push(r5);
723 
724  __ push(r1); // Function is also the parameter to the runtime call.
725  __ CallRuntime(Runtime::kParallelRecompile, 1);
726 
727  // Restore call kind information.
728  __ pop(r5);
729  // Restore receiver.
730  __ pop(r1);
731 
732  // Tear down internal frame.
733  }
734 
735  GenerateTailCallToSharedCode(masm);
736 }
737 
738 
739 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
740  bool is_api_function,
741  bool count_constructions) {
742  // ----------- S t a t e -------------
743  // -- r0 : number of arguments
744  // -- r1 : constructor function
745  // -- lr : return address
746  // -- sp[...]: constructor arguments
747  // -----------------------------------
748 
749  // Should never count constructions for api objects.
750  ASSERT(!is_api_function || !count_constructions);
751 
752  Isolate* isolate = masm->isolate();
753 
754  // Enter a construct frame.
755  {
756  FrameScope scope(masm, StackFrame::CONSTRUCT);
757 
758  // Preserve the two incoming parameters on the stack.
759  __ mov(r0, Operand(r0, LSL, kSmiTagSize));
760  __ push(r0); // Smi-tagged arguments count.
761  __ push(r1); // Constructor function.
762 
763  // Try to allocate the object without transitioning into C code. If any of
764  // the preconditions is not met, the code bails out to the runtime call.
765  Label rt_call, allocated;
766  if (FLAG_inline_new) {
767  Label undo_allocation;
768 #ifdef ENABLE_DEBUGGER_SUPPORT
769  ExternalReference debug_step_in_fp =
770  ExternalReference::debug_step_in_fp_address(isolate);
771  __ mov(r2, Operand(debug_step_in_fp));
772  __ ldr(r2, MemOperand(r2));
773  __ tst(r2, r2);
774  __ b(ne, &rt_call);
775 #endif
776 
777  // Load the initial map and verify that it is in fact a map.
778  // r1: constructor function
780  __ JumpIfSmi(r2, &rt_call);
781  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
782  __ b(ne, &rt_call);
783 
784  // Check that the constructor is not constructing a JSFunction (see
785  // comments in Runtime_NewObject in runtime.cc). In which case the
786  // initial map's instance type would be JS_FUNCTION_TYPE.
787  // r1: constructor function
788  // r2: initial map
789  __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
790  __ b(eq, &rt_call);
791 
792  if (count_constructions) {
793  Label allocate;
794  // Decrease generous allocation count.
796  MemOperand constructor_count =
798  __ ldrb(r4, constructor_count);
799  __ sub(r4, r4, Operand(1), SetCC);
800  __ strb(r4, constructor_count);
801  __ b(ne, &allocate);
802 
803  __ Push(r1, r2);
804 
805  __ push(r1); // constructor
806  // The call will replace the stub, so the countdown is only done once.
807  __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
808 
809  __ pop(r2);
810  __ pop(r1);
811 
812  __ bind(&allocate);
813  }
814 
815  // Now allocate the JSObject on the heap.
816  // r1: constructor function
817  // r2: initial map
819  __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
820 
821  // Allocated the JSObject, now initialize the fields. Map is set to
822  // initial map and properties and elements are set to empty fixed array.
823  // r1: constructor function
824  // r2: initial map
825  // r3: object size
826  // r4: JSObject (not tagged)
827  __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
828  __ mov(r5, r4);
835 
836  // Fill all the in-object properties with the appropriate filler.
837  // r1: constructor function
838  // r2: initial map
839  // r3: object size (in words)
840  // r4: JSObject (not tagged)
841  // r5: First in-object property of JSObject (not tagged)
842  __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
844  __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
845  if (count_constructions) {
848  kBitsPerByte);
849  __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
850  // r0: offset of first field after pre-allocated fields
851  if (FLAG_debug_code) {
852  __ cmp(r0, r6);
853  __ Assert(le, "Unexpected number of pre-allocated property fields.");
854  }
855  __ InitializeFieldsWithFiller(r5, r0, r7);
856  // To allow for truncation.
857  __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
858  }
859  __ InitializeFieldsWithFiller(r5, r6, r7);
860 
861  // Add the object tag to make the JSObject real, so that we can continue
862  // and jump into the continuation code at any time from now on. Any
863  // failures need to undo the allocation, so that the heap is in a
864  // consistent state and verifiable.
865  __ add(r4, r4, Operand(kHeapObjectTag));
866 
867  // Check if a non-empty properties array is needed. Continue with
868  // allocated object if not fall through to runtime call if it is.
869  // r1: constructor function
870  // r4: JSObject
871  // r5: start of next object (not tagged)
873  // The field instance sizes contains both pre-allocated property fields
874  // and in-object properties.
877  kBitsPerByte);
878  __ add(r3, r3, Operand(r6));
880  kBitsPerByte);
881  __ sub(r3, r3, Operand(r6), SetCC);
882 
883  // Done if no extra properties are to be allocated.
884  __ b(eq, &allocated);
885  __ Assert(pl, "Property allocation count failed.");
886 
887  // Scale the number of elements by pointer size and add the header for
888  // FixedArrays to the start of the next object calculation from above.
889  // r1: constructor
890  // r3: number of elements in properties array
891  // r4: JSObject
892  // r5: start of next object
893  __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
894  __ AllocateInNewSpace(
895  r0,
896  r5,
897  r6,
898  r2,
899  &undo_allocation,
900  static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
901 
902  // Initialize the FixedArray.
903  // r1: constructor
904  // r3: number of elements in properties array
905  // r4: JSObject
906  // r5: FixedArray (not tagged)
907  __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
908  __ mov(r2, r5);
912  __ mov(r0, Operand(r3, LSL, kSmiTagSize));
914 
915  // Initialize the fields to undefined.
916  // r1: constructor function
917  // r2: First element of FixedArray (not tagged)
918  // r3: number of elements in properties array
919  // r4: JSObject
920  // r5: FixedArray (not tagged)
921  __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
923  { Label loop, entry;
924  if (count_constructions) {
925  __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
926  } else if (FLAG_debug_code) {
927  __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
928  __ cmp(r7, r8);
929  __ Assert(eq, "Undefined value not loaded.");
930  }
931  __ b(&entry);
932  __ bind(&loop);
934  __ bind(&entry);
935  __ cmp(r2, r6);
936  __ b(lt, &loop);
937  }
938 
939  // Store the initialized FixedArray into the properties field of
940  // the JSObject
941  // r1: constructor function
942  // r4: JSObject
943  // r5: FixedArray (not tagged)
944  __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
946 
947  // Continue with JSObject being successfully allocated
948  // r1: constructor function
949  // r4: JSObject
950  __ jmp(&allocated);
951 
952  // Undo the setting of the new top so that the heap is verifiable. For
953  // example, the map's unused properties potentially do not match the
954  // allocated objects unused properties.
955  // r4: JSObject (previous new top)
956  __ bind(&undo_allocation);
957  __ UndoAllocationInNewSpace(r4, r5);
958  }
959 
960  // Allocate the new receiver object using the runtime call.
961  // r1: constructor function
962  __ bind(&rt_call);
963  __ push(r1); // argument for Runtime_NewObject
964  __ CallRuntime(Runtime::kNewObject, 1);
965  __ mov(r4, r0);
966 
967  // Receiver for constructor call allocated.
968  // r4: JSObject
969  __ bind(&allocated);
970  __ push(r4);
971  __ push(r4);
972 
973  // Reload the number of arguments and the constructor from the stack.
974  // sp[0]: receiver
975  // sp[1]: receiver
976  // sp[2]: constructor function
977  // sp[3]: number of arguments (smi-tagged)
978  __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
979  __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
980 
981  // Set up pointer to last argument.
983 
984  // Set up number of arguments for function call below
985  __ mov(r0, Operand(r3, LSR, kSmiTagSize));
986 
987  // Copy arguments and receiver to the expression stack.
988  // r0: number of arguments
989  // r1: constructor function
990  // r2: address of last argument (caller sp)
991  // r3: number of arguments (smi-tagged)
992  // sp[0]: receiver
993  // sp[1]: receiver
994  // sp[2]: constructor function
995  // sp[3]: number of arguments (smi-tagged)
996  Label loop, entry;
997  __ b(&entry);
998  __ bind(&loop);
999  __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
1000  __ push(ip);
1001  __ bind(&entry);
1002  __ sub(r3, r3, Operand(2), SetCC);
1003  __ b(ge, &loop);
1004 
1005  // Call the function.
1006  // r0: number of arguments
1007  // r1: constructor function
1008  if (is_api_function) {
1010  Handle<Code> code =
1011  masm->isolate()->builtins()->HandleApiCallConstruct();
1012  ParameterCount expected(0);
1013  __ InvokeCode(code, expected, expected,
1014  RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
1015  } else {
1016  ParameterCount actual(r0);
1017  __ InvokeFunction(r1, actual, CALL_FUNCTION,
1018  NullCallWrapper(), CALL_AS_METHOD);
1019  }
1020 
1021  // Store offset of return address for deoptimizer.
1022  if (!is_api_function && !count_constructions) {
1023  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
1024  }
1025 
1026  // Restore context from the frame.
1027  // r0: result
1028  // sp[0]: receiver
1029  // sp[1]: constructor function
1030  // sp[2]: number of arguments (smi-tagged)
1032 
1033  // If the result is an object (in the ECMA sense), we should get rid
1034  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
1035  // on page 74.
1036  Label use_receiver, exit;
1037 
1038  // If the result is a smi, it is *not* an object in the ECMA sense.
1039  // r0: result
1040  // sp[0]: receiver (newly allocated object)
1041  // sp[1]: constructor function
1042  // sp[2]: number of arguments (smi-tagged)
1043  __ JumpIfSmi(r0, &use_receiver);
1044 
1045  // If the type of the result (stored in its map) is less than
1046  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
1047  __ CompareObjectType(r0, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1048  __ b(ge, &exit);
1049 
1050  // Throw away the result of the constructor invocation and use the
1051  // on-stack receiver as the result.
1052  __ bind(&use_receiver);
1053  __ ldr(r0, MemOperand(sp));
1054 
1055  // Remove receiver from the stack, remove caller arguments, and
1056  // return.
1057  __ bind(&exit);
1058  // r0: result
1059  // sp[0]: receiver (newly allocated object)
1060  // sp[1]: constructor function
1061  // sp[2]: number of arguments (smi-tagged)
1062  __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1063 
1064  // Leave construct frame.
1065  }
1066 
1067  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
1068  __ add(sp, sp, Operand(kPointerSize));
1069  __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
1070  __ Jump(lr);
1071 }
1072 
1073 
1074 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
1075  Generate_JSConstructStubHelper(masm, false, true);
1076 }
1077 
1078 
1079 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
1080  Generate_JSConstructStubHelper(masm, false, false);
1081 }
1082 
1083 
1084 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
1085  Generate_JSConstructStubHelper(masm, true, false);
1086 }
1087 
1088 
1089 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1090  bool is_construct) {
1091  // Called from Generate_JS_Entry
1092  // r0: code entry
1093  // r1: function
1094  // r2: receiver
1095  // r3: argc
1096  // r4: argv
1097  // r5-r7, cp may be clobbered
1098 
1099  // Clear the context before we push it when entering the internal frame.
1100  __ mov(cp, Operand(0, RelocInfo::NONE));
1101 
1102  // Enter an internal frame.
1103  {
1104  FrameScope scope(masm, StackFrame::INTERNAL);
1105 
1106  // Set up the context from the function argument.
1108 
1109  __ InitializeRootRegister();
1110 
1111  // Push the function and the receiver onto the stack.
1112  __ push(r1);
1113  __ push(r2);
1114 
1115  // Copy arguments to the stack in a loop.
1116  // r1: function
1117  // r3: argc
1118  // r4: argv, i.e. points to first arg
1119  Label loop, entry;
1120  __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
1121  // r2 points past last arg.
1122  __ b(&entry);
1123  __ bind(&loop);
1124  __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
1125  __ ldr(r0, MemOperand(r0)); // dereference handle
1126  __ push(r0); // push parameter
1127  __ bind(&entry);
1128  __ cmp(r4, r2);
1129  __ b(ne, &loop);
1130 
1131  // Initialize all JavaScript callee-saved registers, since they will be seen
1132  // by the garbage collector as part of handlers.
1133  __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1134  __ mov(r5, Operand(r4));
1135  __ mov(r6, Operand(r4));
1136  __ mov(r7, Operand(r4));
1137  if (kR9Available == 1) {
1138  __ mov(r9, Operand(r4));
1139  }
1140 
1141  // Invoke the code and pass argc as r0.
1142  __ mov(r0, Operand(r3));
1143  if (is_construct) {
1144  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
1145  __ CallStub(&stub);
1146  } else {
1147  ParameterCount actual(r0);
1148  __ InvokeFunction(r1, actual, CALL_FUNCTION,
1149  NullCallWrapper(), CALL_AS_METHOD);
1150  }
1151  // Exit the JS frame and remove the parameters (except function), and
1152  // return.
1153  // Respect ABI stack constraint.
1154  }
1155  __ Jump(lr);
1156 
1157  // r0: result
1158 }
1159 
1160 
1161 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1162  Generate_JSEntryTrampolineHelper(masm, false);
1163 }
1164 
1165 
1166 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1167  Generate_JSEntryTrampolineHelper(masm, true);
1168 }
1169 
1170 
1171 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
1172  // Enter an internal frame.
1173  {
1174  FrameScope scope(masm, StackFrame::INTERNAL);
1175 
1176  // Preserve the function.
1177  __ push(r1);
1178  // Push call kind information.
1179  __ push(r5);
1180 
1181  // Push the function on the stack as the argument to the runtime function.
1182  __ push(r1);
1183  __ CallRuntime(Runtime::kLazyCompile, 1);
1184  // Calculate the entry point.
1185  __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1186 
1187  // Restore call kind information.
1188  __ pop(r5);
1189  // Restore saved function.
1190  __ pop(r1);
1191 
1192  // Tear down internal frame.
1193  }
1194 
1195  // Do a tail-call of the compiled function.
1196  __ Jump(r2);
1197 }
1198 
1199 
1200 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
1201  // Enter an internal frame.
1202  {
1203  FrameScope scope(masm, StackFrame::INTERNAL);
1204 
1205  // Preserve the function.
1206  __ push(r1);
1207  // Push call kind information.
1208  __ push(r5);
1209 
1210  // Push the function on the stack as the argument to the runtime function.
1211  __ push(r1);
1212  __ CallRuntime(Runtime::kLazyRecompile, 1);
1213  // Calculate the entry point.
1214  __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1215 
1216  // Restore call kind information.
1217  __ pop(r5);
1218  // Restore saved function.
1219  __ pop(r1);
1220 
1221  // Tear down internal frame.
1222  }
1223 
1224  // Do a tail-call of the compiled function.
1225  __ Jump(r2);
1226 }
1227 
1228 
1229 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1230  Deoptimizer::BailoutType type) {
1231  {
1232  FrameScope scope(masm, StackFrame::INTERNAL);
1233  // Pass the function and deoptimization type to the runtime system.
1234  __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1235  __ push(r0);
1236  __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1237  }
1238 
1239  // Get the full codegen state from the stack and untag it -> r6.
1240  __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1241  __ SmiUntag(r6);
1242  // Switch on the state.
1243  Label with_tos_register, unknown_state;
1244  __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1245  __ b(ne, &with_tos_register);
1246  __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1247  __ Ret();
1248 
1249  __ bind(&with_tos_register);
1250  __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1251  __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
1252  __ b(ne, &unknown_state);
1253  __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1254  __ Ret();
1255 
1256  __ bind(&unknown_state);
1257  __ stop("no cases left");
1258 }
1259 
1260 
1261 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1262  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1263 }
1264 
1265 
1266 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1267  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1268 }
1269 
1270 
1271 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
1272  // For now, we are relying on the fact that Runtime::NotifyOSR
1273  // doesn't do any garbage collection which allows us to save/restore
1274  // the registers without worrying about which of them contain
1275  // pointers. This seems a bit fragile.
1276  __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
1277  {
1278  FrameScope scope(masm, StackFrame::INTERNAL);
1279  __ CallRuntime(Runtime::kNotifyOSR, 0);
1280  }
1281  __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
1282  __ Ret();
1283 }
1284 
1285 
1286 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1287  CpuFeatures::TryForceFeatureScope scope(VFP3);
1288  if (!CPU::SupportsCrankshaft()) {
1289  __ Abort("Unreachable code: Cannot optimize without VFP3 support.");
1290  return;
1291  }
1292 
1293  // Lookup the function in the JavaScript frame and push it as an
1294  // argument to the on-stack replacement function.
1296  {
1297  FrameScope scope(masm, StackFrame::INTERNAL);
1298  __ push(r0);
1299  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1300  }
1301 
1302  // If the result was -1 it means that we couldn't optimize the
1303  // function. Just return and continue in the unoptimized version.
1304  Label skip;
1305  __ cmp(r0, Operand(Smi::FromInt(-1)));
1306  __ b(ne, &skip);
1307  __ Ret();
1308 
1309  __ bind(&skip);
1310  // Untag the AST id and push it on the stack.
1311  __ SmiUntag(r0);
1312  __ push(r0);
1313 
1314  // Generate the code for doing the frame-to-frame translation using
1315  // the deoptimizer infrastructure.
1316  Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1317  generator.Generate();
1318 }
1319 
1320 
1321 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1322  // 1. Make sure we have at least one argument.
1323  // r0: actual number of arguments
1324  { Label done;
1325  __ cmp(r0, Operand(0));
1326  __ b(ne, &done);
1327  __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1328  __ push(r2);
1329  __ add(r0, r0, Operand(1));
1330  __ bind(&done);
1331  }
1332 
1333  // 2. Get the function to call (passed as receiver) from the stack, check
1334  // if it is a function.
1335  // r0: actual number of arguments
1336  Label slow, non_function;
1337  __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1338  __ JumpIfSmi(r1, &non_function);
1339  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1340  __ b(ne, &slow);
1341 
1342  // 3a. Patch the first argument if necessary when calling a function.
1343  // r0: actual number of arguments
1344  // r1: function
1345  Label shift_arguments;
1346  __ mov(r4, Operand(0, RelocInfo::NONE)); // indicate regular JS_FUNCTION
1347  { Label convert_to_object, use_global_receiver, patch_receiver;
1348  // Change context eagerly in case we need the global receiver.
1350 
1351  // Do not transform the receiver for strict mode functions.
1354  __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1355  kSmiTagSize)));
1356  __ b(ne, &shift_arguments);
1357 
1358  // Do not transform the receiver for native (Compilerhints already in r3).
1359  __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1360  __ b(ne, &shift_arguments);
1361 
1362  // Compute the receiver in non-strict mode.
1363  __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1364  __ ldr(r2, MemOperand(r2, -kPointerSize));
1365  // r0: actual number of arguments
1366  // r1: function
1367  // r2: first argument
1368  __ JumpIfSmi(r2, &convert_to_object);
1369 
1370  __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1371  __ cmp(r2, r3);
1372  __ b(eq, &use_global_receiver);
1373  __ LoadRoot(r3, Heap::kNullValueRootIndex);
1374  __ cmp(r2, r3);
1375  __ b(eq, &use_global_receiver);
1376 
1378  __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1379  __ b(ge, &shift_arguments);
1380 
1381  __ bind(&convert_to_object);
1382 
1383  {
1384  // Enter an internal frame in order to preserve argument count.
1385  FrameScope scope(masm, StackFrame::INTERNAL);
1386  __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
1387  __ push(r0);
1388 
1389  __ push(r2);
1390  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1391  __ mov(r2, r0);
1392 
1393  __ pop(r0);
1394  __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1395 
1396  // Exit the internal frame.
1397  }
1398 
1399  // Restore the function to r1, and the flag to r4.
1400  __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1401  __ mov(r4, Operand(0, RelocInfo::NONE));
1402  __ jmp(&patch_receiver);
1403 
1404  // Use the global receiver object from the called function as the
1405  // receiver.
1406  __ bind(&use_global_receiver);
1407  const int kGlobalIndex =
1409  __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
1411  __ ldr(r2, FieldMemOperand(r2, kGlobalIndex));
1413 
1414  __ bind(&patch_receiver);
1415  __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
1416  __ str(r2, MemOperand(r3, -kPointerSize));
1417 
1418  __ jmp(&shift_arguments);
1419  }
1420 
1421  // 3b. Check for function proxy.
1422  __ bind(&slow);
1423  __ mov(r4, Operand(1, RelocInfo::NONE)); // indicate function proxy
1424  __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
1425  __ b(eq, &shift_arguments);
1426  __ bind(&non_function);
1427  __ mov(r4, Operand(2, RelocInfo::NONE)); // indicate non-function
1428 
1429  // 3c. Patch the first argument when calling a non-function. The
1430  // CALL_NON_FUNCTION builtin expects the non-function callee as
1431  // receiver, so overwrite the first argument which will ultimately
1432  // become the receiver.
1433  // r0: actual number of arguments
1434  // r1: function
1435  // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1436  __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1437  __ str(r1, MemOperand(r2, -kPointerSize));
1438 
1439  // 4. Shift arguments and return address one slot down on the stack
1440  // (overwriting the original receiver). Adjust argument count to make
1441  // the original first argument the new receiver.
1442  // r0: actual number of arguments
1443  // r1: function
1444  // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1445  __ bind(&shift_arguments);
1446  { Label loop;
1447  // Calculate the copy start address (destination). Copy end address is sp.
1448  __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1449 
1450  __ bind(&loop);
1451  __ ldr(ip, MemOperand(r2, -kPointerSize));
1452  __ str(ip, MemOperand(r2));
1453  __ sub(r2, r2, Operand(kPointerSize));
1454  __ cmp(r2, sp);
1455  __ b(ne, &loop);
1456  // Adjust the actual number of arguments and remove the top element
1457  // (which is a copy of the last argument).
1458  __ sub(r0, r0, Operand(1));
1459  __ pop();
1460  }
1461 
1462  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1463  // or a function proxy via CALL_FUNCTION_PROXY.
1464  // r0: actual number of arguments
1465  // r1: function
1466  // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1467  { Label function, non_proxy;
1468  __ tst(r4, r4);
1469  __ b(eq, &function);
1470  // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1471  __ mov(r2, Operand(0, RelocInfo::NONE));
1472  __ SetCallKind(r5, CALL_AS_METHOD);
1473  __ cmp(r4, Operand(1));
1474  __ b(ne, &non_proxy);
1475 
1476  __ push(r1); // re-add proxy object as additional argument
1477  __ add(r0, r0, Operand(1));
1478  __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1479  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1480  RelocInfo::CODE_TARGET);
1481 
1482  __ bind(&non_proxy);
1483  __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
1484  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1485  RelocInfo::CODE_TARGET);
1486  __ bind(&function);
1487  }
1488 
1489  // 5b. Get the code to call from the function and check that the number of
1490  // expected arguments matches what we're providing. If so, jump
1491  // (tail-call) to the code in register edx without checking arguments.
1492  // r0: actual number of arguments
1493  // r1: function
1495  __ ldr(r2,
1497  __ mov(r2, Operand(r2, ASR, kSmiTagSize));
1499  __ SetCallKind(r5, CALL_AS_METHOD);
1500  __ cmp(r2, r0); // Check formal and actual parameter counts.
1501  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1502  RelocInfo::CODE_TARGET,
1503  ne);
1504 
1505  ParameterCount expected(0);
1506  __ InvokeCode(r3, expected, expected, JUMP_FUNCTION,
1507  NullCallWrapper(), CALL_AS_METHOD);
1508 }
1509 
1510 
1511 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1512  const int kIndexOffset = -5 * kPointerSize;
1513  const int kLimitOffset = -4 * kPointerSize;
1514  const int kArgsOffset = 2 * kPointerSize;
1515  const int kRecvOffset = 3 * kPointerSize;
1516  const int kFunctionOffset = 4 * kPointerSize;
1517 
1518  {
1519  FrameScope frame_scope(masm, StackFrame::INTERNAL);
1520 
1521  __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1522  __ push(r0);
1523  __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1524  __ push(r0);
1525  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1526 
1527  // Check the stack for overflow. We are not trying to catch
1528  // interruptions (e.g. debug break and preemption) here, so the "real stack
1529  // limit" is checked.
1530  Label okay;
1531  __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1532  // Make r2 the space we have left. The stack might already be overflowed
1533  // here which will cause r2 to become negative.
1534  __ sub(r2, sp, r2);
1535  // Check if the arguments will overflow the stack.
1536  __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1537  __ b(gt, &okay); // Signed comparison.
1538 
1539  // Out of stack space.
1540  __ ldr(r1, MemOperand(fp, kFunctionOffset));
1541  __ push(r1);
1542  __ push(r0);
1543  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1544  // End of stack check.
1545 
1546  // Push current limit and index.
1547  __ bind(&okay);
1548  __ push(r0); // limit
1549  __ mov(r1, Operand(0, RelocInfo::NONE)); // initial index
1550  __ push(r1);
1551 
1552  // Get the receiver.
1553  __ ldr(r0, MemOperand(fp, kRecvOffset));
1554 
1555  // Check that the function is a JS function (otherwise it must be a proxy).
1556  Label push_receiver;
1557  __ ldr(r1, MemOperand(fp, kFunctionOffset));
1558  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1559  __ b(ne, &push_receiver);
1560 
1561  // Change context eagerly to get the right global object if necessary.
1563  // Load the shared function info while the function is still in r1.
1565 
1566  // Compute the receiver.
1567  // Do not transform the receiver for strict mode functions.
1568  Label call_to_object, use_global_receiver;
1570  __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1571  kSmiTagSize)));
1572  __ b(ne, &push_receiver);
1573 
1574  // Do not transform the receiver for strict mode functions.
1575  __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1576  __ b(ne, &push_receiver);
1577 
1578  // Compute the receiver in non-strict mode.
1579  __ JumpIfSmi(r0, &call_to_object);
1580  __ LoadRoot(r1, Heap::kNullValueRootIndex);
1581  __ cmp(r0, r1);
1582  __ b(eq, &use_global_receiver);
1583  __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1584  __ cmp(r0, r1);
1585  __ b(eq, &use_global_receiver);
1586 
1587  // Check if the receiver is already a JavaScript object.
1588  // r0: receiver
1590  __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1591  __ b(ge, &push_receiver);
1592 
1593  // Convert the receiver to a regular object.
1594  // r0: receiver
1595  __ bind(&call_to_object);
1596  __ push(r0);
1597  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1598  __ b(&push_receiver);
1599 
1600  // Use the current global receiver object as the receiver.
1601  __ bind(&use_global_receiver);
1602  const int kGlobalOffset =
1604  __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
1606  __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
1608 
1609  // Push the receiver.
1610  // r0: receiver
1611  __ bind(&push_receiver);
1612  __ push(r0);
1613 
1614  // Copy all arguments from the array to the stack.
1615  Label entry, loop;
1616  __ ldr(r0, MemOperand(fp, kIndexOffset));
1617  __ b(&entry);
1618 
1619  // Load the current argument from the arguments array and push it to the
1620  // stack.
1621  // r0: current argument index
1622  __ bind(&loop);
1623  __ ldr(r1, MemOperand(fp, kArgsOffset));
1624  __ push(r1);
1625  __ push(r0);
1626 
1627  // Call the runtime to access the property in the arguments array.
1628  __ CallRuntime(Runtime::kGetProperty, 2);
1629  __ push(r0);
1630 
1631  // Use inline caching to access the arguments.
1632  __ ldr(r0, MemOperand(fp, kIndexOffset));
1633  __ add(r0, r0, Operand(1 << kSmiTagSize));
1634  __ str(r0, MemOperand(fp, kIndexOffset));
1635 
1636  // Test if the copy loop has finished copying all the elements from the
1637  // arguments object.
1638  __ bind(&entry);
1639  __ ldr(r1, MemOperand(fp, kLimitOffset));
1640  __ cmp(r0, r1);
1641  __ b(ne, &loop);
1642 
1643  // Invoke the function.
1644  Label call_proxy;
1645  ParameterCount actual(r0);
1646  __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1647  __ ldr(r1, MemOperand(fp, kFunctionOffset));
1648  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1649  __ b(ne, &call_proxy);
1650  __ InvokeFunction(r1, actual, CALL_FUNCTION,
1651  NullCallWrapper(), CALL_AS_METHOD);
1652 
1653  frame_scope.GenerateLeaveFrame();
1654  __ add(sp, sp, Operand(3 * kPointerSize));
1655  __ Jump(lr);
1656 
1657  // Invoke the function proxy.
1658  __ bind(&call_proxy);
1659  __ push(r1); // add function proxy as last argument
1660  __ add(r0, r0, Operand(1));
1661  __ mov(r2, Operand(0, RelocInfo::NONE));
1662  __ SetCallKind(r5, CALL_AS_METHOD);
1663  __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1664  __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1665  RelocInfo::CODE_TARGET);
1666 
1667  // Tear down the internal frame and remove function, receiver and args.
1668  }
1669  __ add(sp, sp, Operand(3 * kPointerSize));
1670  __ Jump(lr);
1671 }
1672 
1673 
1674 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1675  __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1677  __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1678  __ add(fp, sp, Operand(3 * kPointerSize));
1679 }
1680 
1681 
1682 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1683  // ----------- S t a t e -------------
1684  // -- r0 : result being passed through
1685  // -----------------------------------
1686  // Get the number of arguments passed (as a smi), tear down the frame and
1687  // then tear down the parameters.
1688  __ ldr(r1, MemOperand(fp, -3 * kPointerSize));
1689  __ mov(sp, fp);
1690  __ ldm(ia_w, sp, fp.bit() | lr.bit());
1691  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1692  __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1693 }
1694 
1695 
1696 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1697  // ----------- S t a t e -------------
1698  // -- r0 : actual number of arguments
1699  // -- r1 : function (passed through to callee)
1700  // -- r2 : expected number of arguments
1701  // -- r3 : code entry to call
1702  // -- r5 : call kind information
1703  // -----------------------------------
1704 
1705  Label invoke, dont_adapt_arguments;
1706 
1707  Label enough, too_few;
1708  __ cmp(r0, r2);
1709  __ b(lt, &too_few);
1711  __ b(eq, &dont_adapt_arguments);
1712 
1713  { // Enough parameters: actual >= expected
1714  __ bind(&enough);
1715  EnterArgumentsAdaptorFrame(masm);
1716 
1717  // Calculate copy start address into r0 and copy end address into r2.
1718  // r0: actual number of arguments as a smi
1719  // r1: function
1720  // r2: expected number of arguments
1721  // r3: code entry to call
1722  __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1723  // adjust for return address and receiver
1724  __ add(r0, r0, Operand(2 * kPointerSize));
1725  __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1726 
1727  // Copy the arguments (including the receiver) to the new stack frame.
1728  // r0: copy start address
1729  // r1: function
1730  // r2: copy end address
1731  // r3: code entry to call
1732 
1733  Label copy;
1734  __ bind(&copy);
1735  __ ldr(ip, MemOperand(r0, 0));
1736  __ push(ip);
1737  __ cmp(r0, r2); // Compare before moving to next argument.
1738  __ sub(r0, r0, Operand(kPointerSize));
1739  __ b(ne, &copy);
1740 
1741  __ b(&invoke);
1742  }
1743 
1744  { // Too few parameters: Actual < expected
1745  __ bind(&too_few);
1746  EnterArgumentsAdaptorFrame(masm);
1747 
1748  // Calculate copy start address into r0 and copy end address is fp.
1749  // r0: actual number of arguments as a smi
1750  // r1: function
1751  // r2: expected number of arguments
1752  // r3: code entry to call
1753  __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1754 
1755  // Copy the arguments (including the receiver) to the new stack frame.
1756  // r0: copy start address
1757  // r1: function
1758  // r2: expected number of arguments
1759  // r3: code entry to call
1760  Label copy;
1761  __ bind(&copy);
1762  // Adjust load for return address and receiver.
1763  __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1764  __ push(ip);
1765  __ cmp(r0, fp); // Compare before moving to next argument.
1766  __ sub(r0, r0, Operand(kPointerSize));
1767  __ b(ne, &copy);
1768 
1769  // Fill the remaining expected arguments with undefined.
1770  // r1: function
1771  // r2: expected number of arguments
1772  // r3: code entry to call
1773  __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1774  __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1775  __ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.
1776 
1777  Label fill;
1778  __ bind(&fill);
1779  __ push(ip);
1780  __ cmp(sp, r2);
1781  __ b(ne, &fill);
1782  }
1783 
1784  // Call the entry point.
1785  __ bind(&invoke);
1786  __ Call(r3);
1787 
1788  // Store offset of return address for deoptimizer.
1789  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1790 
1791  // Exit frame and return.
1792  LeaveArgumentsAdaptorFrame(masm);
1793  __ Jump(lr);
1794 
1795 
1796  // -------------------------------------------
1797  // Dont adapt arguments.
1798  // -------------------------------------------
1799  __ bind(&dont_adapt_arguments);
1800  __ Jump(r3);
1801 }
1802 
1803 
1804 #undef __
1805 
1806 } } // namespace v8::internal
1807 
1808 #endif // V8_TARGET_ARCH_ARM
const intptr_t kSmiTagMask
Definition: v8.h:4016
static const int kCodeOffset
Definition: objects.h:5796
static const int kCodeEntryOffset
Definition: objects.h:6182
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:6183
static int SlotOffset(int index)
Definition: contexts.h:425
const Register r3
const Register cp
static Smi * FromInt(int value)
Definition: objects-inl.h:981
const intptr_t kIntptrSignBit
Definition: globals.h:233
static const int kGlobalReceiverOffset
Definition: objects.h:6288
static const int kConstructionCountOffset
Definition: objects.h:5888
const Register r6
#define ASSERT(condition)
Definition: checks.h:270
const RegList kJSCallerSaved
Definition: frames-arm.h:47
const int kPointerSizeLog2
Definition: globals.h:232
static const int kInstanceSizeOffset
Definition: objects.h:5147
const Register r2
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:5159
static const int kInstanceSizesOffset
Definition: objects.h:5124
static const int kContextOffset
Definition: objects.h:6187
static const int kSize
Definition: objects.h:8333
static const int kInObjectPropertiesByte
Definition: objects.h:5148
const uint32_t kNotStringTag
Definition: objects.h:457
const Register sp
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
BuiltinExtraArguments
Definition: builtins.h:35
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:5791
const Register ip
const Register r9
const int kPointerSize
Definition: globals.h:220
const int kHeapObjectTag
Definition: v8.h:4009
const RegList kCalleeSaved
Definition: frames-arm.h:63
#define __
const Register pc
static const int kPropertiesOffset
Definition: objects.h:2171
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
const int kBitsPerByte
Definition: globals.h:237
const Register r0
static const int kElementsOffset
Definition: objects.h:2172
static bool SupportsCrankshaft()
static const int kLengthOffset
Definition: objects.h:8332
static int SizeFor(int length)
Definition: objects.h:2353
static const int kHeaderSize
Definition: objects.h:2296
const Register lr
static const int kSize
Definition: objects.h:6386
static const int kMapOffset
Definition: objects.h:1261
const uint32_t kIsNotStringMask
Definition: objects.h:455
const Register r1
static const int kLengthOffset
Definition: objects.h:2295
MemOperand FieldMemOperand(Register object, int offset)
static const int kFormalParameterCountOffset
Definition: objects.h:5853
const int kSmiTagSize
Definition: v8.h:4015
static const int kHeaderSize
Definition: objects.h:4549
const Register r8
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
const int kSmiTag
Definition: v8.h:4014
static const int kHeaderSize
Definition: objects.h:2173
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
const int kR9Available
Definition: frames-arm.h:38
static const int kPreAllocatedPropertyFieldsByte
Definition: objects.h:5151
static const int kPreallocatedArrayElements
Definition: objects.h:8329
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
Definition: objects.h:6385
const Register fp
static const int kNativeContextOffset
Definition: objects.h:6286
static const int kCompilerHintsOffset
Definition: objects.h:5868
static const int kSharedFunctionInfoOffset
Definition: objects.h:6185
static const int kInitialMaxFastElementArray
Definition: objects.h:2161
const Register r5
static const int kInstanceTypeOffset
Definition: objects.h:5158
const Register r4
const Register r7