v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
builtins-mips.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 
29 
30 #include "v8.h"
31 
32 #if defined(V8_TARGET_ARCH_MIPS)
33 
34 #include "codegen.h"
35 #include "debug.h"
36 #include "deoptimizer.h"
37 #include "full-codegen.h"
38 #include "runtime.h"
39 
40 namespace v8 {
41 namespace internal {
42 
43 
44 #define __ ACCESS_MASM(masm)
45 
46 
47 void Builtins::Generate_Adaptor(MacroAssembler* masm,
48  CFunctionId id,
49  BuiltinExtraArguments extra_args) {
50  // ----------- S t a t e -------------
51  // -- a0 : number of arguments excluding receiver
52  // -- a1 : called function (only guaranteed when
53  // -- extra_args requires it)
54  // -- cp : context
55  // -- sp[0] : last argument
56  // -- ...
57  // -- sp[4 * (argc - 1)] : first argument
58  // -- sp[4 * agrc] : receiver
59  // -----------------------------------
60 
61  // Insert extra arguments.
62  int num_extra_args = 0;
63  if (extra_args == NEEDS_CALLED_FUNCTION) {
64  num_extra_args = 1;
65  __ push(a1);
66  } else {
67  ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
68  }
69 
70  // JumpToExternalReference expects s0 to contain the number of arguments
71  // including the receiver and the extra arguments.
72  __ Addu(s0, a0, num_extra_args + 1);
73  __ sll(s1, s0, kPointerSizeLog2);
74  __ Subu(s1, s1, kPointerSize);
75  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
76 }
77 
78 
79 // Load the built-in InternalArray function from the current context.
80 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
81  Register result) {
82  // Load the native context.
83 
84  __ lw(result,
86  __ lw(result,
88  // Load the InternalArray function from the native context.
89  __ lw(result,
90  MemOperand(result,
93 }
94 
95 
96 // Load the built-in Array function from the current context.
97 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
98  // Load the native context.
99 
100  __ lw(result,
102  __ lw(result,
104  // Load the Array function from the native context.
105  __ lw(result,
106  MemOperand(result,
108 }
109 
110 
111 // Allocate an empty JSArray. The allocated array is put into the result
112 // register. An elements backing store is allocated with size initial_capacity
113 // and filled with the hole values.
114 static void AllocateEmptyJSArray(MacroAssembler* masm,
115  Register array_function,
116  Register result,
117  Register scratch1,
118  Register scratch2,
119  Register scratch3,
120  Label* gc_required) {
121  const int initial_capacity = JSArray::kPreallocatedArrayElements;
122  STATIC_ASSERT(initial_capacity >= 0);
123  __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
124 
125  // Allocate the JSArray object together with space for a fixed array with the
126  // requested elements.
127  int size = JSArray::kSize;
128  if (initial_capacity > 0) {
129  size += FixedArray::SizeFor(initial_capacity);
130  }
131  __ AllocateInNewSpace(size,
132  result,
133  scratch2,
134  scratch3,
135  gc_required,
136  TAG_OBJECT);
137  // Allocated the JSArray. Now initialize the fields except for the elements
138  // array.
139  // result: JSObject
140  // scratch1: initial map
141  // scratch2: start of next object
142  __ sw(scratch1, FieldMemOperand(result, JSObject::kMapOffset));
143  __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
144  __ sw(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset));
145  // Field JSArray::kElementsOffset is initialized later.
146  __ mov(scratch3, zero_reg);
147  __ sw(scratch3, FieldMemOperand(result, JSArray::kLengthOffset));
148 
149  if (initial_capacity == 0) {
150  __ sw(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
151  return;
152  }
153 
154  // Calculate the location of the elements array and set elements array member
155  // of the JSArray.
156  // result: JSObject
157  // scratch2: start of next object
158  __ Addu(scratch1, result, Operand(JSArray::kSize));
159  __ sw(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
160 
161  // Clear the heap tag on the elements array.
162  __ And(scratch1, scratch1, Operand(~kHeapObjectTagMask));
163 
164  // Initialize the FixedArray and fill it with holes. FixedArray length is
165  // stored as a smi.
166  // result: JSObject
167  // scratch1: elements array (untagged)
168  // scratch2: start of next object
169  __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
171  __ sw(scratch3, MemOperand(scratch1));
172  __ Addu(scratch1, scratch1, kPointerSize);
173  __ li(scratch3, Operand(Smi::FromInt(initial_capacity)));
175  __ sw(scratch3, MemOperand(scratch1));
176  __ Addu(scratch1, scratch1, kPointerSize);
177 
178  // Fill the FixedArray with the hole value. Inline the code if short.
180  __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
181  static const int kLoopUnfoldLimit = 4;
182  if (initial_capacity <= kLoopUnfoldLimit) {
183  for (int i = 0; i < initial_capacity; i++) {
184  __ sw(scratch3, MemOperand(scratch1, i * kPointerSize));
185  }
186  } else {
187  Label loop, entry;
188  __ Addu(scratch2, scratch1, Operand(initial_capacity * kPointerSize));
189  __ Branch(&entry);
190  __ bind(&loop);
191  __ sw(scratch3, MemOperand(scratch1));
192  __ Addu(scratch1, scratch1, kPointerSize);
193  __ bind(&entry);
194  __ Branch(&loop, lt, scratch1, Operand(scratch2));
195  }
196 }
197 
198 
199 // Allocate a JSArray with the number of elements stored in a register. The
200 // register array_function holds the built-in Array function and the register
201 // array_size holds the size of the array as a smi. The allocated array is put
202 // into the result register and beginning and end of the FixedArray elements
203 // storage is put into registers elements_array_storage and elements_array_end
204 // (see below for when that is not the case). If the parameter fill_with_holes
205 // is true the allocated elements backing store is filled with the hole values
206 // otherwise it is left uninitialized. When the backing store is filled the
207 // register elements_array_storage is scratched.
208 static void AllocateJSArray(MacroAssembler* masm,
209  Register array_function, // Array function.
210  Register array_size, // As a smi, cannot be 0.
211  Register result,
212  Register elements_array_storage,
213  Register elements_array_end,
214  Register scratch1,
215  Register scratch2,
216  bool fill_with_hole,
217  Label* gc_required) {
218  // Load the initial map from the array function.
219  __ LoadInitialArrayMap(array_function, scratch2,
220  elements_array_storage, fill_with_hole);
221 
222  if (FLAG_debug_code) { // Assert that array size is not zero.
223  __ Assert(
224  ne, "array size is unexpectedly 0", array_size, Operand(zero_reg));
225  }
226 
227  // Allocate the JSArray object together with space for a FixedArray with the
228  // requested number of elements.
229  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
230  __ li(elements_array_end,
232  __ sra(scratch1, array_size, kSmiTagSize);
233  __ Addu(elements_array_end, elements_array_end, scratch1);
234  __ AllocateInNewSpace(
235  elements_array_end,
236  result,
237  scratch1,
238  scratch2,
239  gc_required,
240  static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
241 
242  // Allocated the JSArray. Now initialize the fields except for the elements
243  // array.
244  // result: JSObject
245  // elements_array_storage: initial map
246  // array_size: size of array (smi)
247  __ sw(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
248  __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
249  __ sw(elements_array_storage,
251  // Field JSArray::kElementsOffset is initialized later.
252  __ sw(array_size, FieldMemOperand(result, JSArray::kLengthOffset));
253 
254  // Calculate the location of the elements array and set elements array member
255  // of the JSArray.
256  // result: JSObject
257  // array_size: size of array (smi)
258  __ Addu(elements_array_storage, result, Operand(JSArray::kSize));
259  __ sw(elements_array_storage,
261 
262  // Clear the heap tag on the elements array.
263  __ And(elements_array_storage,
264  elements_array_storage,
265  Operand(~kHeapObjectTagMask));
266  // Initialize the fixed array and fill it with holes. FixedArray length is
267  // stored as a smi.
268  // result: JSObject
269  // elements_array_storage: elements array (untagged)
270  // array_size: size of array (smi)
271  __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
273  __ sw(scratch1, MemOperand(elements_array_storage));
274  __ Addu(elements_array_storage, elements_array_storage, kPointerSize);
275 
276  // Length of the FixedArray is the number of pre-allocated elements if
277  // the actual JSArray has length 0 and the size of the JSArray for non-empty
278  // JSArrays. The length of a FixedArray is stored as a smi.
279  STATIC_ASSERT(kSmiTag == 0);
280 
282  __ sw(array_size, MemOperand(elements_array_storage));
283  __ Addu(elements_array_storage, elements_array_storage, kPointerSize);
284 
285  // Calculate elements array and elements array end.
286  // result: JSObject
287  // elements_array_storage: elements array element storage
288  // array_size: smi-tagged size of elements array
290  __ sll(elements_array_end, array_size, kPointerSizeLog2 - kSmiTagSize);
291  __ Addu(elements_array_end, elements_array_storage, elements_array_end);
292 
293  // Fill the allocated FixedArray with the hole value if requested.
294  // result: JSObject
295  // elements_array_storage: elements array element storage
296  // elements_array_end: start of next object
297  if (fill_with_hole) {
298  Label loop, entry;
299  __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
300  __ Branch(&entry);
301  __ bind(&loop);
302  __ sw(scratch1, MemOperand(elements_array_storage));
303  __ Addu(elements_array_storage, elements_array_storage, kPointerSize);
304 
305  __ bind(&entry);
306  __ Branch(&loop, lt, elements_array_storage, Operand(elements_array_end));
307  }
308 }
309 
310 
311 // Create a new array for the built-in Array function. This function allocates
312 // the JSArray object and the FixedArray elements array and initializes these.
313 // If the Array cannot be constructed in native code the runtime is called. This
314 // function assumes the following state:
315 // a0: argc
316 // a1: constructor (built-in Array function)
317 // ra: return address
318 // sp[0]: last argument
319 // This function is used for both construct and normal calls of Array. The only
320 // difference between handling a construct call and a normal call is that for a
321 // construct call the constructor function in a1 needs to be preserved for
322 // entering the generic code. In both cases argc in a0 needs to be preserved.
323 // Both registers are preserved by this code so no need to differentiate between
324 // construct call and normal call.
325 static void ArrayNativeCode(MacroAssembler* masm,
326  Label* call_generic_code) {
327  Counters* counters = masm->isolate()->counters();
328  Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
329  has_non_smi_element, finish, cant_transition_map, not_double;
330 
331  // Check for array construction with zero arguments or one.
332  __ Branch(&argc_one_or_more, ne, a0, Operand(zero_reg));
333  // Handle construction of an empty array.
334  __ bind(&empty_array);
335  AllocateEmptyJSArray(masm,
336  a1,
337  a2,
338  a3,
339  t0,
340  t1,
341  call_generic_code);
342  __ IncrementCounter(counters->array_function_native(), 1, a3, t0);
343  // Set up return value, remove receiver from stack and return.
344  __ mov(v0, a2);
345  __ Addu(sp, sp, Operand(kPointerSize));
346  __ Ret();
347 
348  // Check for one argument. Bail out if argument is not smi or if it is
349  // negative.
350  __ bind(&argc_one_or_more);
351  __ Branch(&argc_two_or_more, ne, a0, Operand(1));
352 
353  STATIC_ASSERT(kSmiTag == 0);
354  __ lw(a2, MemOperand(sp)); // Get the argument from the stack.
355  __ Branch(&not_empty_array, ne, a2, Operand(zero_reg));
356  __ Drop(1); // Adjust stack.
357  __ mov(a0, zero_reg); // Treat this as a call with argc of zero.
358  __ Branch(&empty_array);
359 
360  __ bind(&not_empty_array);
361  __ And(a3, a2, Operand(kIntptrSignBit | kSmiTagMask));
362  __ Branch(call_generic_code, eq, a3, Operand(zero_reg));
363 
364  // Handle construction of an empty array of a certain size. Bail out if size
365  // is too large to actually allocate an elements array.
366  STATIC_ASSERT(kSmiTag == 0);
367  __ Branch(call_generic_code, Ugreater_equal, a2,
369 
370  // a0: argc
371  // a1: constructor
372  // a2: array_size (smi)
373  // sp[0]: argument
374  AllocateJSArray(masm,
375  a1,
376  a2,
377  a3,
378  t0,
379  t1,
380  t2,
381  t3,
382  true,
383  call_generic_code);
384  __ IncrementCounter(counters->array_function_native(), 1, a2, t0);
385 
386  // Set up return value, remove receiver and argument from stack and return.
387  __ mov(v0, a3);
388  __ Addu(sp, sp, Operand(2 * kPointerSize));
389  __ Ret();
390 
391  // Handle construction of an array from a list of arguments.
392  __ bind(&argc_two_or_more);
393  __ sll(a2, a0, kSmiTagSize); // Convert argc to a smi.
394 
395  // a0: argc
396  // a1: constructor
397  // a2: array_size (smi)
398  // sp[0]: last argument
399  AllocateJSArray(masm,
400  a1,
401  a2,
402  a3,
403  t0,
404  t1,
405  t2,
406  t3,
407  false,
408  call_generic_code);
409  __ IncrementCounter(counters->array_function_native(), 1, a2, t2);
410 
411  // Fill arguments as array elements. Copy from the top of the stack (last
412  // element) to the array backing store filling it backwards. Note:
413  // elements_array_end points after the backing store.
414  // a0: argc
415  // a3: JSArray
416  // t0: elements_array storage start (untagged)
417  // t1: elements_array_end (untagged)
418  // sp[0]: last argument
419 
420  Label loop, entry;
421  __ Branch(USE_DELAY_SLOT, &entry);
422  __ mov(t3, sp);
423  __ bind(&loop);
424  __ lw(a2, MemOperand(t3));
425  if (FLAG_smi_only_arrays) {
426  __ JumpIfNotSmi(a2, &has_non_smi_element);
427  }
428  __ Addu(t3, t3, kPointerSize);
429  __ Addu(t1, t1, -kPointerSize);
430  __ sw(a2, MemOperand(t1));
431  __ bind(&entry);
432  __ Branch(&loop, lt, t0, Operand(t1));
433 
434  __ bind(&finish);
435  __ mov(sp, t3);
436 
437  // Remove caller arguments and receiver from the stack, setup return value and
438  // return.
439  // a0: argc
440  // a3: JSArray
441  // sp[0]: receiver
442  __ Addu(sp, sp, Operand(kPointerSize));
443  __ mov(v0, a3);
444  __ Ret();
445 
446  __ bind(&has_non_smi_element);
447  // Double values are handled by the runtime.
448  __ CheckMap(
449  a2, t5, Heap::kHeapNumberMapRootIndex, &not_double, DONT_DO_SMI_CHECK);
450  __ bind(&cant_transition_map);
451  __ UndoAllocationInNewSpace(a3, t0);
452  __ Branch(call_generic_code);
453 
454  __ bind(&not_double);
455  // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
456  // a3: JSArray
458  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
460  a2,
461  t5,
462  &cant_transition_map);
464  __ RecordWriteField(a3,
466  a2,
467  t5,
472  Label loop2;
473  __ bind(&loop2);
474  __ lw(a2, MemOperand(t3));
475  __ Addu(t3, t3, kPointerSize);
476  __ Subu(t1, t1, kPointerSize);
477  __ sw(a2, MemOperand(t1));
478  __ Branch(&loop2, lt, t0, Operand(t1));
479  __ Branch(&finish);
480 }
481 
482 
483 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
484  // ----------- S t a t e -------------
485  // -- a0 : number of arguments
486  // -- ra : return address
487  // -- sp[...]: constructor arguments
488  // -----------------------------------
489  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
490 
491  // Get the InternalArray function.
492  GenerateLoadInternalArrayFunction(masm, a1);
493 
494  if (FLAG_debug_code) {
495  // Initial map for the builtin InternalArray functions should be maps.
497  __ And(t0, a2, Operand(kSmiTagMask));
498  __ Assert(ne, "Unexpected initial map for InternalArray function",
499  t0, Operand(zero_reg));
500  __ GetObjectType(a2, a3, t0);
501  __ Assert(eq, "Unexpected initial map for InternalArray function",
502  t0, Operand(MAP_TYPE));
503  }
504 
505  // Run the native code for the InternalArray function called as a normal
506  // function.
507  ArrayNativeCode(masm, &generic_array_code);
508 
509  // Jump to the generic array code if the specialized code cannot handle the
510  // construction.
511  __ bind(&generic_array_code);
512 
513  Handle<Code> array_code =
514  masm->isolate()->builtins()->InternalArrayCodeGeneric();
515  __ Jump(array_code, RelocInfo::CODE_TARGET);
516 }
517 
518 
519 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
520  // ----------- S t a t e -------------
521  // -- a0 : number of arguments
522  // -- ra : return address
523  // -- sp[...]: constructor arguments
524  // -----------------------------------
525  Label generic_array_code;
526 
527  // Get the Array function.
528  GenerateLoadArrayFunction(masm, a1);
529 
530  if (FLAG_debug_code) {
531  // Initial map for the builtin Array functions should be maps.
533  __ And(t0, a2, Operand(kSmiTagMask));
534  __ Assert(ne, "Unexpected initial map for Array function (1)",
535  t0, Operand(zero_reg));
536  __ GetObjectType(a2, a3, t0);
537  __ Assert(eq, "Unexpected initial map for Array function (2)",
538  t0, Operand(MAP_TYPE));
539  }
540 
541  // Run the native code for the Array function called as a normal function.
542  ArrayNativeCode(masm, &generic_array_code);
543 
544  // Jump to the generic array code if the specialized code cannot handle
545  // the construction.
546  __ bind(&generic_array_code);
547 
548  Handle<Code> array_code =
549  masm->isolate()->builtins()->ArrayCodeGeneric();
550  __ Jump(array_code, RelocInfo::CODE_TARGET);
551 }
552 
553 
554 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
555  // ----------- S t a t e -------------
556  // -- a0 : number of arguments
557  // -- a1 : constructor function
558  // -- ra : return address
559  // -- sp[...]: constructor arguments
560  // -----------------------------------
561  Label generic_constructor;
562 
563  if (FLAG_debug_code) {
564  // The array construct code is only set for the builtin and internal
565  // Array functions which always have a map.
566  // Initial map for the builtin Array function should be a map.
568  __ And(t0, a2, Operand(kSmiTagMask));
569  __ Assert(ne, "Unexpected initial map for Array function (3)",
570  t0, Operand(zero_reg));
571  __ GetObjectType(a2, a3, t0);
572  __ Assert(eq, "Unexpected initial map for Array function (4)",
573  t0, Operand(MAP_TYPE));
574  }
575 
576  // Run the native code for the Array function called as a constructor.
577  ArrayNativeCode(masm, &generic_constructor);
578 
579  // Jump to the generic construct code in case the specialized code cannot
580  // handle the construction.
581  __ bind(&generic_constructor);
582 
583  Handle<Code> generic_construct_stub =
584  masm->isolate()->builtins()->JSConstructStubGeneric();
585  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
586 }
587 
588 
589 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
590  // ----------- S t a t e -------------
591  // -- a0 : number of arguments
592  // -- a1 : constructor function
593  // -- ra : return address
594  // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
595  // -- sp[argc * 4] : receiver
596  // -----------------------------------
597  Counters* counters = masm->isolate()->counters();
598  __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
599 
600  Register function = a1;
601  if (FLAG_debug_code) {
602  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
603  __ Assert(eq, "Unexpected String function", function, Operand(a2));
604  }
605 
606  // Load the first arguments in a0 and get rid of the rest.
607  Label no_arguments;
608  __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
609  // First args = sp[(argc - 1) * 4].
610  __ Subu(a0, a0, Operand(1));
611  __ sll(a0, a0, kPointerSizeLog2);
612  __ Addu(sp, a0, sp);
613  __ lw(a0, MemOperand(sp));
614  // sp now point to args[0], drop args[0] + receiver.
615  __ Drop(2);
616 
617  Register argument = a2;
618  Label not_cached, argument_is_string;
620  masm,
621  a0, // Input.
622  argument, // Result.
623  a3, // Scratch.
624  t0, // Scratch.
625  t1, // Scratch.
626  false, // Is it a Smi?
627  &not_cached);
628  __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
629  __ bind(&argument_is_string);
630 
631  // ----------- S t a t e -------------
632  // -- a2 : argument converted to string
633  // -- a1 : constructor function
634  // -- ra : return address
635  // -----------------------------------
636 
637  Label gc_required;
638  __ AllocateInNewSpace(JSValue::kSize,
639  v0, // Result.
640  a3, // Scratch.
641  t0, // Scratch.
642  &gc_required,
643  TAG_OBJECT);
644 
645  // Initialising the String Object.
646  Register map = a3;
647  __ LoadGlobalFunctionInitialMap(function, map, t0);
648  if (FLAG_debug_code) {
650  __ Assert(eq, "Unexpected string wrapper instance size",
651  t0, Operand(JSValue::kSize >> kPointerSizeLog2));
653  __ Assert(eq, "Unexpected unused properties of string wrapper",
654  t0, Operand(zero_reg));
655  }
657 
658  __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
661 
662  __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));
663 
664  // Ensure the object is fully initialized.
666 
667  __ Ret();
668 
669  // The argument was not found in the number to string cache. Check
670  // if it's a string already before calling the conversion builtin.
671  Label convert_argument;
672  __ bind(&not_cached);
673  __ JumpIfSmi(a0, &convert_argument);
674 
675  // Is it a String?
679  __ And(t0, a3, Operand(kIsNotStringMask));
680  __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
681  __ mov(argument, a0);
682  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
683  __ Branch(&argument_is_string);
684 
685  // Invoke the conversion builtin and put the result into a2.
686  __ bind(&convert_argument);
687  __ push(function); // Preserve the function.
688  __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
689  {
690  FrameScope scope(masm, StackFrame::INTERNAL);
691  __ push(v0);
692  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
693  }
694  __ pop(function);
695  __ mov(argument, v0);
696  __ Branch(&argument_is_string);
697 
698  // Load the empty string into a2, remove the receiver from the
699  // stack, and jump back to the case where the argument is a string.
700  __ bind(&no_arguments);
701  __ LoadRoot(argument, Heap::kEmptyStringRootIndex);
702  __ Drop(1);
703  __ Branch(&argument_is_string);
704 
705  // At this point the argument is already a string. Call runtime to
706  // create a string wrapper.
707  __ bind(&gc_required);
708  __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
709  {
710  FrameScope scope(masm, StackFrame::INTERNAL);
711  __ push(argument);
712  __ CallRuntime(Runtime::kNewStringWrapper, 1);
713  }
714  __ Ret();
715 }
716 
717 
718 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
721  __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
722  __ Jump(at);
723 }
724 
725 
726 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
727  GenerateTailCallToSharedCode(masm);
728 }
729 
730 
731 void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) {
732  {
733  FrameScope scope(masm, StackFrame::INTERNAL);
734 
735  // Push a copy of the function onto the stack.
736  __ push(a1);
737  // Push call kind information.
738  __ push(t1);
739 
740  __ push(a1); // Function is also the parameter to the runtime call.
741  __ CallRuntime(Runtime::kParallelRecompile, 1);
742 
743  // Restore call kind information.
744  __ pop(t1);
745  // Restore receiver.
746  __ pop(a1);
747 
748  // Tear down internal frame.
749  }
750 
751  GenerateTailCallToSharedCode(masm);
752 }
753 
754 
755 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
756  bool is_api_function,
757  bool count_constructions) {
758  // ----------- S t a t e -------------
759  // -- a0 : number of arguments
760  // -- a1 : constructor function
761  // -- ra : return address
762  // -- sp[...]: constructor arguments
763  // -----------------------------------
764 
765  // Should never count constructions for api objects.
766  ASSERT(!is_api_function || !count_constructions);
767 
768  Isolate* isolate = masm->isolate();
769 
770  // ----------- S t a t e -------------
771  // -- a0 : number of arguments
772  // -- a1 : constructor function
773  // -- ra : return address
774  // -- sp[...]: constructor arguments
775  // -----------------------------------
776 
777  // Enter a construct frame.
778  {
779  FrameScope scope(masm, StackFrame::CONSTRUCT);
780 
781  // Preserve the two incoming parameters on the stack.
782  __ sll(a0, a0, kSmiTagSize); // Tag arguments count.
783  __ MultiPushReversed(a0.bit() | a1.bit());
784 
785  // Use t7 to hold undefined, which is used in several places below.
786  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
787 
788  Label rt_call, allocated;
789  // Try to allocate the object without transitioning into C code. If any of
790  // the preconditions is not met, the code bails out to the runtime call.
791  if (FLAG_inline_new) {
792  Label undo_allocation;
793 #ifdef ENABLE_DEBUGGER_SUPPORT
794  ExternalReference debug_step_in_fp =
795  ExternalReference::debug_step_in_fp_address(isolate);
796  __ li(a2, Operand(debug_step_in_fp));
797  __ lw(a2, MemOperand(a2));
798  __ Branch(&rt_call, ne, a2, Operand(zero_reg));
799 #endif
800 
801  // Load the initial map and verify that it is in fact a map.
802  // a1: constructor function
804  __ JumpIfSmi(a2, &rt_call);
805  __ GetObjectType(a2, a3, t4);
806  __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
807 
808  // Check that the constructor is not constructing a JSFunction (see
809  // comments in Runtime_NewObject in runtime.cc). In which case the
810  // initial map's instance type would be JS_FUNCTION_TYPE.
811  // a1: constructor function
812  // a2: initial map
814  __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
815 
816  if (count_constructions) {
817  Label allocate;
818  // Decrease generous allocation count.
820  MemOperand constructor_count =
822  __ lbu(t0, constructor_count);
823  __ Subu(t0, t0, Operand(1));
824  __ sb(t0, constructor_count);
825  __ Branch(&allocate, ne, t0, Operand(zero_reg));
826 
827  __ Push(a1, a2);
828 
829  __ push(a1); // Constructor.
830  // The call will replace the stub, so the countdown is only done once.
831  __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
832 
833  __ pop(a2);
834  __ pop(a1);
835 
836  __ bind(&allocate);
837  }
838 
839  // Now allocate the JSObject on the heap.
840  // a1: constructor function
841  // a2: initial map
843  __ AllocateInNewSpace(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
844 
845  // Allocated the JSObject, now initialize the fields. Map is set to
846  // initial map and properties and elements are set to empty fixed array.
847  // a1: constructor function
848  // a2: initial map
849  // a3: object size
850  // t4: JSObject (not tagged)
851  __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
852  __ mov(t5, t4);
853  __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
856  __ Addu(t5, t5, Operand(3*kPointerSize));
860 
861  // Fill all the in-object properties with appropriate filler.
862  // a1: constructor function
863  // a2: initial map
864  // a3: object size (in words)
865  // t4: JSObject (not tagged)
866  // t5: First in-object property of JSObject (not tagged)
867  __ sll(t0, a3, kPointerSizeLog2);
868  __ addu(t6, t4, t0); // End of object.
870  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
871  if (count_constructions) {
874  kBitsPerByte);
875  __ sll(t0, a0, kPointerSizeLog2);
876  __ addu(a0, t5, t0);
877  // a0: offset of first field after pre-allocated fields
878  if (FLAG_debug_code) {
879  __ Assert(le, "Unexpected number of pre-allocated property fields.",
880  a0, Operand(t6));
881  }
882  __ InitializeFieldsWithFiller(t5, a0, t7);
883  // To allow for truncation.
884  __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
885  }
886  __ InitializeFieldsWithFiller(t5, t6, t7);
887 
888  // Add the object tag to make the JSObject real, so that we can continue
889  // and jump into the continuation code at any time from now on. Any
890  // failures need to undo the allocation, so that the heap is in a
891  // consistent state and verifiable.
892  __ Addu(t4, t4, Operand(kHeapObjectTag));
893 
894  // Check if a non-empty properties array is needed. Continue with
895  // allocated object if not fall through to runtime call if it is.
896  // a1: constructor function
897  // t4: JSObject
898  // t5: start of next object (not tagged)
900  // The field instance sizes contains both pre-allocated property fields
901  // and in-object properties.
904  kBitsPerByte);
905  __ Addu(a3, a3, Operand(t6));
907  kBitsPerByte);
908  __ subu(a3, a3, t6);
909 
910  // Done if no extra properties are to be allocated.
911  __ Branch(&allocated, eq, a3, Operand(zero_reg));
912  __ Assert(greater_equal, "Property allocation count failed.",
913  a3, Operand(zero_reg));
914 
915  // Scale the number of elements by pointer size and add the header for
916  // FixedArrays to the start of the next object calculation from above.
917  // a1: constructor
918  // a3: number of elements in properties array
919  // t4: JSObject
920  // t5: start of next object
921  __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
922  __ AllocateInNewSpace(
923  a0,
924  t5,
925  t6,
926  a2,
927  &undo_allocation,
928  static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
929 
930  // Initialize the FixedArray.
931  // a1: constructor
932  // a3: number of elements in properties array (untagged)
933  // t4: JSObject
934  // t5: start of next object
935  __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
936  __ mov(a2, t5);
937  __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
938  __ sll(a0, a3, kSmiTagSize);
940  __ Addu(a2, a2, Operand(2 * kPointerSize));
941 
944 
945  // Initialize the fields to undefined.
946  // a1: constructor
947  // a2: First element of FixedArray (not tagged)
948  // a3: number of elements in properties array
949  // t4: JSObject
950  // t5: FixedArray (not tagged)
951  __ sll(t3, a3, kPointerSizeLog2);
952  __ addu(t6, a2, t3); // End of object.
954  { Label loop, entry;
955  if (count_constructions) {
956  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
957  } else if (FLAG_debug_code) {
958  __ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
959  __ Assert(eq, "Undefined value not loaded.", t7, Operand(t8));
960  }
961  __ jmp(&entry);
962  __ bind(&loop);
963  __ sw(t7, MemOperand(a2));
964  __ addiu(a2, a2, kPointerSize);
965  __ bind(&entry);
966  __ Branch(&loop, less, a2, Operand(t6));
967  }
968 
969  // Store the initialized FixedArray into the properties field of
970  // the JSObject.
971  // a1: constructor function
972  // t4: JSObject
973  // t5: FixedArray (not tagged)
974  __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag.
976 
977  // Continue with JSObject being successfully allocated.
978  // a1: constructor function
979  // a4: JSObject
980  __ jmp(&allocated);
981 
982  // Undo the setting of the new top so that the heap is verifiable. For
983  // example, the map's unused properties potentially do not match the
984  // allocated objects unused properties.
985  // t4: JSObject (previous new top)
986  __ bind(&undo_allocation);
987  __ UndoAllocationInNewSpace(t4, t5);
988  }
989 
990  __ bind(&rt_call);
991  // Allocate the new receiver object using the runtime call.
992  // a1: constructor function
993  __ push(a1); // Argument for Runtime_NewObject.
994  __ CallRuntime(Runtime::kNewObject, 1);
995  __ mov(t4, v0);
996 
997  // Receiver for constructor call allocated.
998  // t4: JSObject
999  __ bind(&allocated);
1000  __ push(t4);
1001  __ push(t4);
1002 
1003  // Reload the number of arguments from the stack.
1004  // sp[0]: receiver
1005  // sp[1]: receiver
1006  // sp[2]: constructor function
1007  // sp[3]: number of arguments (smi-tagged)
1008  __ lw(a1, MemOperand(sp, 2 * kPointerSize));
1009  __ lw(a3, MemOperand(sp, 3 * kPointerSize));
1010 
1011  // Set up pointer to last argument.
1012  __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
1013 
1014  // Set up number of arguments for function call below.
1015  __ srl(a0, a3, kSmiTagSize);
1016 
1017  // Copy arguments and receiver to the expression stack.
1018  // a0: number of arguments
1019  // a1: constructor function
1020  // a2: address of last argument (caller sp)
1021  // a3: number of arguments (smi-tagged)
1022  // sp[0]: receiver
1023  // sp[1]: receiver
1024  // sp[2]: constructor function
1025  // sp[3]: number of arguments (smi-tagged)
1026  Label loop, entry;
1027  __ jmp(&entry);
1028  __ bind(&loop);
1029  __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
1030  __ Addu(t0, a2, Operand(t0));
1031  __ lw(t1, MemOperand(t0));
1032  __ push(t1);
1033  __ bind(&entry);
1034  __ Addu(a3, a3, Operand(-2));
1035  __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
1036 
1037  // Call the function.
1038  // a0: number of arguments
1039  // a1: constructor function
1040  if (is_api_function) {
1042  Handle<Code> code =
1043  masm->isolate()->builtins()->HandleApiCallConstruct();
1044  ParameterCount expected(0);
1045  __ InvokeCode(code, expected, expected,
1046  RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
1047  } else {
1048  ParameterCount actual(a0);
1049  __ InvokeFunction(a1, actual, CALL_FUNCTION,
1050  NullCallWrapper(), CALL_AS_METHOD);
1051  }
1052 
1053  // Store offset of return address for deoptimizer.
1054  if (!is_api_function && !count_constructions) {
1055  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
1056  }
1057 
1058  // Restore context from the frame.
1060 
1061  // If the result is an object (in the ECMA sense), we should get rid
1062  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
1063  // on page 74.
1064  Label use_receiver, exit;
1065 
1066  // If the result is a smi, it is *not* an object in the ECMA sense.
1067  // v0: result
1068  // sp[0]: receiver (newly allocated object)
1069  // sp[1]: constructor function
1070  // sp[2]: number of arguments (smi-tagged)
1071  __ JumpIfSmi(v0, &use_receiver);
1072 
1073  // If the type of the result (stored in its map) is less than
1074  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
1075  __ GetObjectType(v0, a3, a3);
1076  __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1077 
1078  // Throw away the result of the constructor invocation and use the
1079  // on-stack receiver as the result.
1080  __ bind(&use_receiver);
1081  __ lw(v0, MemOperand(sp));
1082 
1083  // Remove receiver from the stack, remove caller arguments, and
1084  // return.
1085  __ bind(&exit);
1086  // v0: result
1087  // sp[0]: receiver (newly allocated object)
1088  // sp[1]: constructor function
1089  // sp[2]: number of arguments (smi-tagged)
1090  __ lw(a1, MemOperand(sp, 2 * kPointerSize));
1091 
1092  // Leave construct frame.
1093  }
1094 
1095  __ sll(t0, a1, kPointerSizeLog2 - 1);
1096  __ Addu(sp, sp, t0);
1097  __ Addu(sp, sp, kPointerSize);
1098  __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
1099  __ Ret();
1100 }
1101 
1102 
1103 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
1104  Generate_JSConstructStubHelper(masm, false, true);
1105 }
1106 
1107 
1108 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
1109  Generate_JSConstructStubHelper(masm, false, false);
1110 }
1111 
1112 
1113 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
1114  Generate_JSConstructStubHelper(masm, true, false);
1115 }
1116 
1117 
1118 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1119  bool is_construct) {
1120  // Called from JSEntryStub::GenerateBody
1121 
1122  // ----------- S t a t e -------------
1123  // -- a0: code entry
1124  // -- a1: function
1125  // -- a2: receiver_pointer
1126  // -- a3: argc
1127  // -- s0: argv
1128  // -----------------------------------
1129 
1130  // Clear the context before we push it when entering the JS frame.
1131  __ mov(cp, zero_reg);
1132 
1133  // Enter an internal frame.
1134  {
1135  FrameScope scope(masm, StackFrame::INTERNAL);
1136 
1137  // Set up the context from the function argument.
1139 
1140  // Push the function and the receiver onto the stack.
1141  __ Push(a1, a2);
1142 
1143  // Copy arguments to the stack in a loop.
1144  // a3: argc
1145  // s0: argv, i.e. points to first arg
1146  Label loop, entry;
1147  __ sll(t0, a3, kPointerSizeLog2);
1148  __ addu(t2, s0, t0);
1149  __ b(&entry);
1150  __ nop(); // Branch delay slot nop.
1151  // t2 points past last arg.
1152  __ bind(&loop);
1153  __ lw(t0, MemOperand(s0)); // Read next parameter.
1154  __ addiu(s0, s0, kPointerSize);
1155  __ lw(t0, MemOperand(t0)); // Dereference handle.
1156  __ push(t0); // Push parameter.
1157  __ bind(&entry);
1158  __ Branch(&loop, ne, s0, Operand(t2));
1159 
1160  // Initialize all JavaScript callee-saved registers, since they will be seen
1161  // by the garbage collector as part of handlers.
1162  __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1163  __ mov(s1, t0);
1164  __ mov(s2, t0);
1165  __ mov(s3, t0);
1166  __ mov(s4, t0);
1167  __ mov(s5, t0);
1168  // s6 holds the root address. Do not clobber.
1169  // s7 is cp. Do not init.
1170 
1171  // Invoke the code and pass argc as a0.
1172  __ mov(a0, a3);
1173  if (is_construct) {
1174  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
1175  __ CallStub(&stub);
1176  } else {
1177  ParameterCount actual(a0);
1178  __ InvokeFunction(a1, actual, CALL_FUNCTION,
1179  NullCallWrapper(), CALL_AS_METHOD);
1180  }
1181 
1182  // Leave internal frame.
1183  }
1184 
1185  __ Jump(ra);
1186 }
1187 
1188 
1189 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1190  Generate_JSEntryTrampolineHelper(masm, false);
1191 }
1192 
1193 
1194 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1195  Generate_JSEntryTrampolineHelper(masm, true);
1196 }
1197 
1198 
1199 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
1200  // Enter an internal frame.
1201  {
1202  FrameScope scope(masm, StackFrame::INTERNAL);
1203 
1204  // Preserve the function.
1205  __ push(a1);
1206  // Push call kind information.
1207  __ push(t1);
1208 
1209  // Push the function on the stack as the argument to the runtime function.
1210  __ push(a1);
1211  // Call the runtime function.
1212  __ CallRuntime(Runtime::kLazyCompile, 1);
1213  // Calculate the entry point.
1214  __ addiu(t9, v0, Code::kHeaderSize - kHeapObjectTag);
1215 
1216  // Restore call kind information.
1217  __ pop(t1);
1218  // Restore saved function.
1219  __ pop(a1);
1220 
1221  // Tear down temporary frame.
1222  }
1223 
1224  // Do a tail-call of the compiled function.
1225  __ Jump(t9);
1226 }
1227 
1228 
1229 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
1230  // Enter an internal frame.
1231  {
1232  FrameScope scope(masm, StackFrame::INTERNAL);
1233 
1234  // Preserve the function.
1235  __ push(a1);
1236  // Push call kind information.
1237  __ push(t1);
1238 
1239  // Push the function on the stack as the argument to the runtime function.
1240  __ push(a1);
1241  __ CallRuntime(Runtime::kLazyRecompile, 1);
1242  // Calculate the entry point.
1243  __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
1244 
1245  // Restore call kind information.
1246  __ pop(t1);
1247  // Restore saved function.
1248  __ pop(a1);
1249 
1250  // Tear down temporary frame.
1251  }
1252 
1253  // Do a tail-call of the compiled function.
1254  __ Jump(t9);
1255 }
1256 
1257 
1258 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1259  Deoptimizer::BailoutType type) {
1260  {
1261  FrameScope scope(masm, StackFrame::INTERNAL);
1262  // Pass the function and deoptimization type to the runtime system.
1263  __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1264  __ push(a0);
1265  __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1266  }
1267 
1268  // Get the full codegen state from the stack and untag it -> t2.
1269  __ lw(t2, MemOperand(sp, 0 * kPointerSize));
1270  __ SmiUntag(t2);
1271  // Switch on the state.
1272  Label with_tos_register, unknown_state;
1273  __ Branch(&with_tos_register,
1274  ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
1275  __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1276  __ Ret();
1277 
1278  __ bind(&with_tos_register);
1279  __ lw(v0, MemOperand(sp, 1 * kPointerSize));
1280  __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
1281 
1282  __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1283  __ Ret();
1284 
1285  __ bind(&unknown_state);
1286  __ stop("no cases left");
1287 }
1288 
1289 
1290 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1291  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1292 }
1293 
1294 
1295 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1296  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1297 }
1298 
1299 
1300 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
1301  // For now, we are relying on the fact that Runtime::NotifyOSR
1302  // doesn't do any garbage collection which allows us to save/restore
1303  // the registers without worrying about which of them contain
1304  // pointers. This seems a bit fragile.
1305  RegList saved_regs =
1306  (kJSCallerSaved | kCalleeSaved | ra.bit() | fp.bit()) & ~sp.bit();
1307  __ MultiPush(saved_regs);
1308  {
1309  FrameScope scope(masm, StackFrame::INTERNAL);
1310  __ CallRuntime(Runtime::kNotifyOSR, 0);
1311  }
1312  __ MultiPop(saved_regs);
1313  __ Ret();
1314 }
1315 
1316 
1317 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1318  CpuFeatures::TryForceFeatureScope scope(VFP3);
1319  if (!CpuFeatures::IsSupported(FPU)) {
1320  __ Abort("Unreachable code: Cannot optimize without FPU support.");
1321  return;
1322  }
1323 
1324  // Lookup the function in the JavaScript frame and push it as an
1325  // argument to the on-stack replacement function.
1327  {
1328  FrameScope scope(masm, StackFrame::INTERNAL);
1329  __ push(a0);
1330  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1331  }
1332 
1333  // If the result was -1 it means that we couldn't optimize the
1334  // function. Just return and continue in the unoptimized version.
1335  __ Ret(eq, v0, Operand(Smi::FromInt(-1)));
1336 
1337  // Untag the AST id and push it on the stack.
1338  __ SmiUntag(v0);
1339  __ push(v0);
1340 
1341  // Generate the code for doing the frame-to-frame translation using
1342  // the deoptimizer infrastructure.
1343  Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1344  generator.Generate();
1345 }
1346 
1347 
1348 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1349  // 1. Make sure we have at least one argument.
1350  // a0: actual number of arguments
1351  { Label done;
1352  __ Branch(&done, ne, a0, Operand(zero_reg));
1353  __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1354  __ push(t2);
1355  __ Addu(a0, a0, Operand(1));
1356  __ bind(&done);
1357  }
1358 
1359  // 2. Get the function to call (passed as receiver) from the stack, check
1360  // if it is a function.
1361  // a0: actual number of arguments
1362  Label slow, non_function;
1363  __ sll(at, a0, kPointerSizeLog2);
1364  __ addu(at, sp, at);
1365  __ lw(a1, MemOperand(at));
1366  __ JumpIfSmi(a1, &non_function);
1367  __ GetObjectType(a1, a2, a2);
1368  __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1369 
1370  // 3a. Patch the first argument if necessary when calling a function.
1371  // a0: actual number of arguments
1372  // a1: function
1373  Label shift_arguments;
1374  __ li(t0, Operand(0, RelocInfo::NONE)); // Indicate regular JS_FUNCTION.
1375  { Label convert_to_object, use_global_receiver, patch_receiver;
1376  // Change context eagerly in case we need the global receiver.
1378 
1379  // Do not transform the receiver for strict mode functions.
1382  __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1383  kSmiTagSize)));
1384  __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1385 
1386  // Do not transform the receiver for native (Compilerhints already in a3).
1387  __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1388  __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1389 
1390  // Compute the receiver in non-strict mode.
1391  // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1392  __ sll(at, a0, kPointerSizeLog2);
1393  __ addu(a2, sp, at);
1394  __ lw(a2, MemOperand(a2, -kPointerSize));
1395  // a0: actual number of arguments
1396  // a1: function
1397  // a2: first argument
1398  __ JumpIfSmi(a2, &convert_to_object, t2);
1399 
1400  __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1401  __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1402  __ LoadRoot(a3, Heap::kNullValueRootIndex);
1403  __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1404 
1406  __ GetObjectType(a2, a3, a3);
1407  __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1408 
1409  __ bind(&convert_to_object);
1410  // Enter an internal frame in order to preserve argument count.
1411  {
1412  FrameScope scope(masm, StackFrame::INTERNAL);
1413  __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1414  __ push(a0);
1415 
1416  __ push(a2);
1417  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1418  __ mov(a2, v0);
1419 
1420  __ pop(a0);
1421  __ sra(a0, a0, kSmiTagSize); // Un-tag.
1422  // Leave internal frame.
1423  }
1424  // Restore the function to a1, and the flag to t0.
1425  __ sll(at, a0, kPointerSizeLog2);
1426  __ addu(at, sp, at);
1427  __ lw(a1, MemOperand(at));
1428  __ li(t0, Operand(0, RelocInfo::NONE));
1429  __ Branch(&patch_receiver);
1430 
1431  // Use the global receiver object from the called function as the
1432  // receiver.
1433  __ bind(&use_global_receiver);
1434  const int kGlobalIndex =
1436  __ lw(a2, FieldMemOperand(cp, kGlobalIndex));
1438  __ lw(a2, FieldMemOperand(a2, kGlobalIndex));
1440 
1441  __ bind(&patch_receiver);
1442  __ sll(at, a0, kPointerSizeLog2);
1443  __ addu(a3, sp, at);
1444  __ sw(a2, MemOperand(a3, -kPointerSize));
1445 
1446  __ Branch(&shift_arguments);
1447  }
1448 
1449  // 3b. Check for function proxy.
1450  __ bind(&slow);
1451  __ li(t0, Operand(1, RelocInfo::NONE)); // Indicate function proxy.
1452  __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1453 
1454  __ bind(&non_function);
1455  __ li(t0, Operand(2, RelocInfo::NONE)); // Indicate non-function.
1456 
1457  // 3c. Patch the first argument when calling a non-function. The
1458  // CALL_NON_FUNCTION builtin expects the non-function callee as
1459  // receiver, so overwrite the first argument which will ultimately
1460  // become the receiver.
1461  // a0: actual number of arguments
1462  // a1: function
1463  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1464  __ sll(at, a0, kPointerSizeLog2);
1465  __ addu(a2, sp, at);
1466  __ sw(a1, MemOperand(a2, -kPointerSize));
1467 
1468  // 4. Shift arguments and return address one slot down on the stack
1469  // (overwriting the original receiver). Adjust argument count to make
1470  // the original first argument the new receiver.
1471  // a0: actual number of arguments
1472  // a1: function
1473  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1474  __ bind(&shift_arguments);
1475  { Label loop;
1476  // Calculate the copy start address (destination). Copy end address is sp.
1477  __ sll(at, a0, kPointerSizeLog2);
1478  __ addu(a2, sp, at);
1479 
1480  __ bind(&loop);
1481  __ lw(at, MemOperand(a2, -kPointerSize));
1482  __ sw(at, MemOperand(a2));
1483  __ Subu(a2, a2, Operand(kPointerSize));
1484  __ Branch(&loop, ne, a2, Operand(sp));
1485  // Adjust the actual number of arguments and remove the top element
1486  // (which is a copy of the last argument).
1487  __ Subu(a0, a0, Operand(1));
1488  __ Pop();
1489  }
1490 
1491  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1492  // or a function proxy via CALL_FUNCTION_PROXY.
1493  // a0: actual number of arguments
1494  // a1: function
1495  // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1496  { Label function, non_proxy;
1497  __ Branch(&function, eq, t0, Operand(zero_reg));
1498  // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1499  __ mov(a2, zero_reg);
1500  __ SetCallKind(t1, CALL_AS_METHOD);
1501  __ Branch(&non_proxy, ne, t0, Operand(1));
1502 
1503  __ push(a1); // Re-add proxy object as additional argument.
1504  __ Addu(a0, a0, Operand(1));
1505  __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
1506  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1507  RelocInfo::CODE_TARGET);
1508 
1509  __ bind(&non_proxy);
1510  __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION);
1511  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1512  RelocInfo::CODE_TARGET);
1513  __ bind(&function);
1514  }
1515 
1516  // 5b. Get the code to call from the function and check that the number of
1517  // expected arguments matches what we're providing. If so, jump
1518  // (tail-call) to the code in register edx without checking arguments.
1519  // a0: actual number of arguments
1520  // a1: function
1522  __ lw(a2,
1524  __ sra(a2, a2, kSmiTagSize);
1526  __ SetCallKind(t1, CALL_AS_METHOD);
1527  // Check formal and actual parameter counts.
1528  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1529  RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1530 
1531  ParameterCount expected(0);
1532  __ InvokeCode(a3, expected, expected, JUMP_FUNCTION,
1533  NullCallWrapper(), CALL_AS_METHOD);
1534 }
1535 
1536 
1537 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1538  const int kIndexOffset = -5 * kPointerSize;
1539  const int kLimitOffset = -4 * kPointerSize;
1540  const int kArgsOffset = 2 * kPointerSize;
1541  const int kRecvOffset = 3 * kPointerSize;
1542  const int kFunctionOffset = 4 * kPointerSize;
1543 
1544  {
1545  FrameScope frame_scope(masm, StackFrame::INTERNAL);
1546  __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1547  __ push(a0);
1548  __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1549  __ push(a0);
1550  // Returns (in v0) number of arguments to copy to stack as Smi.
1551  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1552 
1553  // Check the stack for overflow. We are not trying to catch
1554  // interruptions (e.g. debug break and preemption) here, so the "real stack
1555  // limit" is checked.
1556  Label okay;
1557  __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1558  // Make a2 the space we have left. The stack might already be overflowed
1559  // here which will cause a2 to become negative.
1560  __ subu(a2, sp, a2);
1561  // Check if the arguments will overflow the stack.
1562  __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
1563  __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison.
1564 
1565  // Out of stack space.
1566  __ lw(a1, MemOperand(fp, kFunctionOffset));
1567  __ push(a1);
1568  __ push(v0);
1569  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1570  // End of stack check.
1571 
1572  // Push current limit and index.
1573  __ bind(&okay);
1574  __ push(v0); // Limit.
1575  __ mov(a1, zero_reg); // Initial index.
1576  __ push(a1);
1577 
1578  // Get the receiver.
1579  __ lw(a0, MemOperand(fp, kRecvOffset));
1580 
1581  // Check that the function is a JS function (otherwise it must be a proxy).
1582  Label push_receiver;
1583  __ lw(a1, MemOperand(fp, kFunctionOffset));
1584  __ GetObjectType(a1, a2, a2);
1585  __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1586 
1587  // Change context eagerly to get the right global object if necessary.
1589  // Load the shared function info while the function is still in a1.
1591 
1592  // Compute the receiver.
1593  // Do not transform the receiver for strict mode functions.
1594  Label call_to_object, use_global_receiver;
1596  __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1597  kSmiTagSize)));
1598  __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1599 
1600  // Do not transform the receiver for native (Compilerhints already in a2).
1601  __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1602  __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1603 
1604  // Compute the receiver in non-strict mode.
1605  __ JumpIfSmi(a0, &call_to_object);
1606  __ LoadRoot(a1, Heap::kNullValueRootIndex);
1607  __ Branch(&use_global_receiver, eq, a0, Operand(a1));
1608  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1609  __ Branch(&use_global_receiver, eq, a0, Operand(a2));
1610 
1611  // Check if the receiver is already a JavaScript object.
1612  // a0: receiver
1614  __ GetObjectType(a0, a1, a1);
1615  __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1616 
1617  // Convert the receiver to a regular object.
1618  // a0: receiver
1619  __ bind(&call_to_object);
1620  __ push(a0);
1621  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1622  __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1623  __ Branch(&push_receiver);
1624 
1625  // Use the current global receiver object as the receiver.
1626  __ bind(&use_global_receiver);
1627  const int kGlobalOffset =
1629  __ lw(a0, FieldMemOperand(cp, kGlobalOffset));
1631  __ lw(a0, FieldMemOperand(a0, kGlobalOffset));
1633 
1634  // Push the receiver.
1635  // a0: receiver
1636  __ bind(&push_receiver);
1637  __ push(a0);
1638 
1639  // Copy all arguments from the array to the stack.
1640  Label entry, loop;
1641  __ lw(a0, MemOperand(fp, kIndexOffset));
1642  __ Branch(&entry);
1643 
1644  // Load the current argument from the arguments array and push it to the
1645  // stack.
1646  // a0: current argument index
1647  __ bind(&loop);
1648  __ lw(a1, MemOperand(fp, kArgsOffset));
1649  __ push(a1);
1650  __ push(a0);
1651 
1652  // Call the runtime to access the property in the arguments array.
1653  __ CallRuntime(Runtime::kGetProperty, 2);
1654  __ push(v0);
1655 
1656  // Use inline caching to access the arguments.
1657  __ lw(a0, MemOperand(fp, kIndexOffset));
1658  __ Addu(a0, a0, Operand(1 << kSmiTagSize));
1659  __ sw(a0, MemOperand(fp, kIndexOffset));
1660 
1661  // Test if the copy loop has finished copying all the elements from the
1662  // arguments object.
1663  __ bind(&entry);
1664  __ lw(a1, MemOperand(fp, kLimitOffset));
1665  __ Branch(&loop, ne, a0, Operand(a1));
1666 
1667  // Invoke the function.
1668  Label call_proxy;
1669  ParameterCount actual(a0);
1670  __ sra(a0, a0, kSmiTagSize);
1671  __ lw(a1, MemOperand(fp, kFunctionOffset));
1672  __ GetObjectType(a1, a2, a2);
1673  __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1674 
1675  __ InvokeFunction(a1, actual, CALL_FUNCTION,
1676  NullCallWrapper(), CALL_AS_METHOD);
1677 
1678  frame_scope.GenerateLeaveFrame();
1679  __ Ret(USE_DELAY_SLOT);
1680  __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1681 
1682  // Invoke the function proxy.
1683  __ bind(&call_proxy);
1684  __ push(a1); // Add function proxy as last argument.
1685  __ Addu(a0, a0, Operand(1));
1686  __ li(a2, Operand(0, RelocInfo::NONE));
1687  __ SetCallKind(t1, CALL_AS_METHOD);
1688  __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
1689  __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1690  RelocInfo::CODE_TARGET);
1691  // Tear down the internal frame and remove function, receiver and args.
1692  }
1693 
1694  __ Ret(USE_DELAY_SLOT);
1695  __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1696 }
1697 
1698 
1699 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1700  __ sll(a0, a0, kSmiTagSize);
1701  __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1702  __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1703  __ Addu(fp, sp, Operand(3 * kPointerSize));
1704 }
1705 
1706 
1707 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1708  // ----------- S t a t e -------------
1709  // -- v0 : result being passed through
1710  // -----------------------------------
1711  // Get the number of arguments passed (as a smi), tear down the frame and
1712  // then tear down the parameters.
1713  __ lw(a1, MemOperand(fp, -3 * kPointerSize));
1714  __ mov(sp, fp);
1715  __ MultiPop(fp.bit() | ra.bit());
1716  __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1717  __ Addu(sp, sp, t0);
1718  // Adjust for the receiver.
1719  __ Addu(sp, sp, Operand(kPointerSize));
1720 }
1721 
1722 
1723 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1724  // State setup as expected by MacroAssembler::InvokePrologue.
1725  // ----------- S t a t e -------------
1726  // -- a0: actual arguments count
1727  // -- a1: function (passed through to callee)
1728  // -- a2: expected arguments count
1729  // -- a3: callee code entry
1730  // -- t1: call kind information
1731  // -----------------------------------
1732 
1733  Label invoke, dont_adapt_arguments;
1734 
1735  Label enough, too_few;
1736  __ Branch(&dont_adapt_arguments, eq,
1738  // We use Uless as the number of argument should always be greater than 0.
1739  __ Branch(&too_few, Uless, a0, Operand(a2));
1740 
1741  { // Enough parameters: actual >= expected.
1742  // a0: actual number of arguments as a smi
1743  // a1: function
1744  // a2: expected number of arguments
1745  // a3: code entry to call
1746  __ bind(&enough);
1747  EnterArgumentsAdaptorFrame(masm);
1748 
1749  // Calculate copy start address into a0 and copy end address into a2.
1750  __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1751  __ Addu(a0, fp, a0);
1752  // Adjust for return address and receiver.
1753  __ Addu(a0, a0, Operand(2 * kPointerSize));
1754  // Compute copy end address.
1755  __ sll(a2, a2, kPointerSizeLog2);
1756  __ subu(a2, a0, a2);
1757 
1758  // Copy the arguments (including the receiver) to the new stack frame.
1759  // a0: copy start address
1760  // a1: function
1761  // a2: copy end address
1762  // a3: code entry to call
1763 
1764  Label copy;
1765  __ bind(&copy);
1766  __ lw(t0, MemOperand(a0));
1767  __ push(t0);
1768  __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a2));
1769  __ addiu(a0, a0, -kPointerSize); // In delay slot.
1770 
1771  __ jmp(&invoke);
1772  }
1773 
1774  { // Too few parameters: Actual < expected.
1775  __ bind(&too_few);
1776  EnterArgumentsAdaptorFrame(masm);
1777 
1778  // Calculate copy start address into a0 and copy end address is fp.
1779  // a0: actual number of arguments as a smi
1780  // a1: function
1781  // a2: expected number of arguments
1782  // a3: code entry to call
1783  __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1784  __ Addu(a0, fp, a0);
1785  // Adjust for return address and receiver.
1786  __ Addu(a0, a0, Operand(2 * kPointerSize));
1787  // Compute copy end address. Also adjust for return address.
1788  __ Addu(t3, fp, kPointerSize);
1789 
1790  // Copy the arguments (including the receiver) to the new stack frame.
1791  // a0: copy start address
1792  // a1: function
1793  // a2: expected number of arguments
1794  // a3: code entry to call
1795  // t3: copy end address
1796  Label copy;
1797  __ bind(&copy);
1798  __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
1799  __ Subu(sp, sp, kPointerSize);
1800  __ Subu(a0, a0, kPointerSize);
1801  __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
1802  __ sw(t0, MemOperand(sp)); // In the delay slot.
1803 
1804  // Fill the remaining expected arguments with undefined.
1805  // a1: function
1806  // a2: expected number of arguments
1807  // a3: code entry to call
1808  __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1809  __ sll(t2, a2, kPointerSizeLog2);
1810  __ Subu(a2, fp, Operand(t2));
1811  __ Addu(a2, a2, Operand(-4 * kPointerSize)); // Adjust for frame.
1812 
1813  Label fill;
1814  __ bind(&fill);
1815  __ Subu(sp, sp, kPointerSize);
1816  __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1817  __ sw(t0, MemOperand(sp));
1818  }
1819 
1820  // Call the entry point.
1821  __ bind(&invoke);
1822 
1823  __ Call(a3);
1824 
1825  // Store offset of return address for deoptimizer.
1826  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1827 
1828  // Exit frame and return.
1829  LeaveArgumentsAdaptorFrame(masm);
1830  __ Ret();
1831 
1832 
1833  // -------------------------------------------
1834  // Don't adapt arguments.
1835  // -------------------------------------------
1836  __ bind(&dont_adapt_arguments);
1837  __ Jump(a3);
1838 }
1839 
1840 
1841 #undef __
1842 
1843 } } // namespace v8::internal
1844 
1845 #endif // V8_TARGET_ARCH_MIPS
const SwVfpRegister s2
const intptr_t kSmiTagMask
Definition: v8.h:4016
static const int kCodeOffset
Definition: objects.h:5796
static const int kCodeEntryOffset
Definition: objects.h:6182
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:6183
static int SlotOffset(int index)
Definition: contexts.h:425
const Register cp
static Smi * FromInt(int value)
Definition: objects-inl.h:981
const intptr_t kIntptrSignBit
Definition: globals.h:233
static const int kGlobalReceiverOffset
Definition: objects.h:6288
static const int kConstructionCountOffset
Definition: objects.h:5888
uint32_t RegList
Definition: frames.h:38
static bool IsSupported(CpuFeature f)
#define ASSERT(condition)
Definition: checks.h:270
const RegList kJSCallerSaved
Definition: frames-arm.h:47
const int kPointerSizeLog2
Definition: globals.h:232
static const int kInstanceSizeOffset
Definition: objects.h:5147
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:5159
static const int kInstanceSizesOffset
Definition: objects.h:5124
static const int kContextOffset
Definition: objects.h:6187
const intptr_t kHeapObjectTagMask
Definition: v8.h:4011
static const int kSize
Definition: objects.h:8333
static const int kInObjectPropertiesByte
Definition: objects.h:5148
const uint32_t kNotStringTag
Definition: objects.h:457
const Register sp
const SwVfpRegister s3
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
BuiltinExtraArguments
Definition: builtins.h:35
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:5791
const int kPointerSize
Definition: globals.h:220
const int kHeapObjectTag
Definition: v8.h:4009
const RegList kCalleeSaved
Definition: frames-arm.h:63
#define __
static const int kPropertiesOffset
Definition: objects.h:2171
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
const SwVfpRegister s0
const int kBitsPerByte
Definition: globals.h:237
static const int kElementsOffset
Definition: objects.h:2172
const SwVfpRegister s5
static const int kLengthOffset
Definition: objects.h:8332
static int SizeFor(int length)
Definition: objects.h:2353
const SwVfpRegister s1
static const int kHeaderSize
Definition: objects.h:2296
static const int kSize
Definition: objects.h:6386
static const int kMapOffset
Definition: objects.h:1261
const uint32_t kIsNotStringMask
Definition: objects.h:455
static const int kLengthOffset
Definition: objects.h:2295
MemOperand FieldMemOperand(Register object, int offset)
static const int kFormalParameterCountOffset
Definition: objects.h:5853
const int kSmiTagSize
Definition: v8.h:4015
static const int kHeaderSize
Definition: objects.h:4549
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
const SwVfpRegister s4
const int kSmiTag
Definition: v8.h:4014
static const int kHeaderSize
Definition: objects.h:2173
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kPreAllocatedPropertyFieldsByte
Definition: objects.h:5151
static const int kPreallocatedArrayElements
Definition: objects.h:8329
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
Definition: objects.h:6385
const Register fp
static const int kNativeContextOffset
Definition: objects.h:6286
static const int kCompilerHintsOffset
Definition: objects.h:5868
static const int kSharedFunctionInfoOffset
Definition: objects.h:6185
static const int kInitialMaxFastElementArray
Definition: objects.h:2161
static const int kInstanceTypeOffset
Definition: objects.h:5158