v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
builtins-ia32.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_IA32)
31 
32 #include "codegen.h"
33 #include "deoptimizer.h"
34 #include "full-codegen.h"
35 
36 namespace v8 {
37 namespace internal {
38 
39 
40 #define __ ACCESS_MASM(masm)
41 
42 
43 void Builtins::Generate_Adaptor(MacroAssembler* masm,
44  CFunctionId id,
45  BuiltinExtraArguments extra_args) {
46  // ----------- S t a t e -------------
47  // -- eax : number of arguments excluding receiver
48  // -- edi : called function (only guaranteed when
49  // extra_args requires it)
50  // -- esi : context
51  // -- esp[0] : return address
52  // -- esp[4] : last argument
53  // -- ...
54  // -- esp[4 * argc] : first argument (argc == eax)
55  // -- esp[4 * (argc +1)] : receiver
56  // -----------------------------------
57 
58  // Insert extra arguments.
59  int num_extra_args = 0;
60  if (extra_args == NEEDS_CALLED_FUNCTION) {
61  num_extra_args = 1;
62  Register scratch = ebx;
63  __ pop(scratch); // Save return address.
64  __ push(edi);
65  __ push(scratch); // Restore return address.
66  } else {
67  ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
68  }
69 
70  // JumpToExternalReference expects eax to contain the number of arguments
71  // including the receiver and the extra arguments.
72  __ add(eax, Immediate(num_extra_args + 1));
73  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
74 }
75 
76 
77 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
81  __ jmp(eax);
82 }
83 
84 
85 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
86  GenerateTailCallToSharedCode(masm);
87 }
88 
89 
90 void Builtins::Generate_ParallelRecompile(MacroAssembler* masm) {
91  {
92  FrameScope scope(masm, StackFrame::INTERNAL);
93 
94  // Push a copy of the function onto the stack.
95  __ push(edi);
96  // Push call kind information.
97  __ push(ecx);
98 
99  __ push(edi); // Function is also the parameter to the runtime call.
100  __ CallRuntime(Runtime::kParallelRecompile, 1);
101 
102  // Restore call kind information.
103  __ pop(ecx);
104  // Restore receiver.
105  __ pop(edi);
106 
107  // Tear down internal frame.
108  }
109 
110  GenerateTailCallToSharedCode(masm);
111 }
112 
113 
114 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
115  bool is_api_function,
116  bool count_constructions) {
117  // ----------- S t a t e -------------
118  // -- eax: number of arguments
119  // -- edi: constructor function
120  // -----------------------------------
121 
122  // Should never count constructions for api objects.
123  ASSERT(!is_api_function || !count_constructions);
124 
125  // Enter a construct frame.
126  {
127  FrameScope scope(masm, StackFrame::CONSTRUCT);
128 
129  // Store a smi-tagged arguments count on the stack.
130  __ SmiTag(eax);
131  __ push(eax);
132 
133  // Push the function to invoke on the stack.
134  __ push(edi);
135 
136  // Try to allocate the object without transitioning into C code. If any of
137  // the preconditions is not met, the code bails out to the runtime call.
138  Label rt_call, allocated;
139  if (FLAG_inline_new) {
140  Label undo_allocation;
141 #ifdef ENABLE_DEBUGGER_SUPPORT
142  ExternalReference debug_step_in_fp =
143  ExternalReference::debug_step_in_fp_address(masm->isolate());
144  __ cmp(Operand::StaticVariable(debug_step_in_fp), Immediate(0));
145  __ j(not_equal, &rt_call);
146 #endif
147 
148  // Verified that the constructor is a JSFunction.
149  // Load the initial map and verify that it is in fact a map.
150  // edi: constructor
152  // Will both indicate a NULL and a Smi
153  __ JumpIfSmi(eax, &rt_call);
154  // edi: constructor
155  // eax: initial map (if proven valid below)
156  __ CmpObjectType(eax, MAP_TYPE, ebx);
157  __ j(not_equal, &rt_call);
158 
159  // Check that the constructor is not constructing a JSFunction (see
160  // comments in Runtime_NewObject in runtime.cc). In which case the
161  // initial map's instance type would be JS_FUNCTION_TYPE.
162  // edi: constructor
163  // eax: initial map
164  __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
165  __ j(equal, &rt_call);
166 
167  if (count_constructions) {
168  Label allocate;
169  // Decrease generous allocation count.
171  __ dec_b(FieldOperand(ecx,
173  __ j(not_zero, &allocate);
174 
175  __ push(eax);
176  __ push(edi);
177 
178  __ push(edi); // constructor
179  // The call will replace the stub, so the countdown is only done once.
180  __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
181 
182  __ pop(edi);
183  __ pop(eax);
184 
185  __ bind(&allocate);
186  }
187 
188  // Now allocate the JSObject on the heap.
189  // edi: constructor
190  // eax: initial map
192  __ shl(edi, kPointerSizeLog2);
193  __ AllocateInNewSpace(
194  edi, ebx, edi, no_reg, &rt_call, NO_ALLOCATION_FLAGS);
195  // Allocated the JSObject, now initialize the fields.
196  // eax: initial map
197  // ebx: JSObject
198  // edi: start of next object
199  __ mov(Operand(ebx, JSObject::kMapOffset), eax);
200  Factory* factory = masm->isolate()->factory();
201  __ mov(ecx, factory->empty_fixed_array());
202  __ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
203  __ mov(Operand(ebx, JSObject::kElementsOffset), ecx);
204  // Set extra fields in the newly allocated object.
205  // eax: initial map
206  // ebx: JSObject
207  // edi: start of next object
208  __ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
209  __ mov(edx, factory->undefined_value());
210  if (count_constructions) {
211  __ movzx_b(esi,
213  __ lea(esi,
215  // esi: offset of first field after pre-allocated fields
216  if (FLAG_debug_code) {
217  __ cmp(esi, edi);
218  __ Assert(less_equal,
219  "Unexpected number of pre-allocated property fields.");
220  }
221  __ InitializeFieldsWithFiller(ecx, esi, edx);
222  __ mov(edx, factory->one_pointer_filler_map());
223  }
224  __ InitializeFieldsWithFiller(ecx, edi, edx);
225 
226  // Add the object tag to make the JSObject real, so that we can continue
227  // and jump into the continuation code at any time from now on. Any
228  // failures need to undo the allocation, so that the heap is in a
229  // consistent state and verifiable.
230  // eax: initial map
231  // ebx: JSObject
232  // edi: start of next object
233  __ or_(ebx, Immediate(kHeapObjectTag));
234 
235  // Check if a non-empty properties array is needed.
236  // Allocate and initialize a FixedArray if it is.
237  // eax: initial map
238  // ebx: JSObject
239  // edi: start of next object
240  // Calculate the total number of properties described by the map.
242  __ movzx_b(ecx,
244  __ add(edx, ecx);
245  // Calculate unused properties past the end of the in-object properties.
247  __ sub(edx, ecx);
248  // Done if no extra properties are to be allocated.
249  __ j(zero, &allocated);
250  __ Assert(positive, "Property allocation count failed.");
251 
252  // Scale the number of elements by pointer size and add the header for
253  // FixedArrays to the start of the next object calculation from above.
254  // ebx: JSObject
255  // edi: start of next object (will be start of FixedArray)
256  // edx: number of elements in properties array
257  __ AllocateInNewSpace(FixedArray::kHeaderSize,
259  edx,
260  edi,
261  ecx,
262  no_reg,
263  &undo_allocation,
265 
266  // Initialize the FixedArray.
267  // ebx: JSObject
268  // edi: FixedArray
269  // edx: number of elements
270  // ecx: start of next object
271  __ mov(eax, factory->fixed_array_map());
272  __ mov(Operand(edi, FixedArray::kMapOffset), eax); // setup the map
273  __ SmiTag(edx);
274  __ mov(Operand(edi, FixedArray::kLengthOffset), edx); // and length
275 
276  // Initialize the fields to undefined.
277  // ebx: JSObject
278  // edi: FixedArray
279  // ecx: start of next object
280  { Label loop, entry;
281  __ mov(edx, factory->undefined_value());
282  __ lea(eax, Operand(edi, FixedArray::kHeaderSize));
283  __ jmp(&entry);
284  __ bind(&loop);
285  __ mov(Operand(eax, 0), edx);
286  __ add(eax, Immediate(kPointerSize));
287  __ bind(&entry);
288  __ cmp(eax, ecx);
289  __ j(below, &loop);
290  }
291 
292  // Store the initialized FixedArray into the properties field of
293  // the JSObject
294  // ebx: JSObject
295  // edi: FixedArray
296  __ or_(edi, Immediate(kHeapObjectTag)); // add the heap tag
298 
299 
300  // Continue with JSObject being successfully allocated
301  // ebx: JSObject
302  __ jmp(&allocated);
303 
304  // Undo the setting of the new top so that the heap is verifiable. For
305  // example, the map's unused properties potentially do not match the
306  // allocated objects unused properties.
307  // ebx: JSObject (previous new top)
308  __ bind(&undo_allocation);
309  __ UndoAllocationInNewSpace(ebx);
310  }
311 
312  // Allocate the new receiver object using the runtime call.
313  __ bind(&rt_call);
314  // Must restore edi (constructor) before calling runtime.
315  __ mov(edi, Operand(esp, 0));
316  // edi: function (constructor)
317  __ push(edi);
318  __ CallRuntime(Runtime::kNewObject, 1);
319  __ mov(ebx, eax); // store result in ebx
320 
321  // New object allocated.
322  // ebx: newly allocated object
323  __ bind(&allocated);
324  // Retrieve the function from the stack.
325  __ pop(edi);
326 
327  // Retrieve smi-tagged arguments count from the stack.
328  __ mov(eax, Operand(esp, 0));
329  __ SmiUntag(eax);
330 
331  // Push the allocated receiver to the stack. We need two copies
332  // because we may have to return the original one and the calling
333  // conventions dictate that the called function pops the receiver.
334  __ push(ebx);
335  __ push(ebx);
336 
337  // Set up pointer to last argument.
339 
340  // Copy arguments and receiver to the expression stack.
341  Label loop, entry;
342  __ mov(ecx, eax);
343  __ jmp(&entry);
344  __ bind(&loop);
345  __ push(Operand(ebx, ecx, times_4, 0));
346  __ bind(&entry);
347  __ dec(ecx);
348  __ j(greater_equal, &loop);
349 
350  // Call the function.
351  if (is_api_function) {
353  Handle<Code> code =
354  masm->isolate()->builtins()->HandleApiCallConstruct();
355  ParameterCount expected(0);
356  __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
357  CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
358  } else {
359  ParameterCount actual(eax);
360  __ InvokeFunction(edi, actual, CALL_FUNCTION,
361  NullCallWrapper(), CALL_AS_METHOD);
362  }
363 
364  // Store offset of return address for deoptimizer.
365  if (!is_api_function && !count_constructions) {
366  masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
367  }
368 
369  // Restore context from the frame.
371 
372  // If the result is an object (in the ECMA sense), we should get rid
373  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
374  // on page 74.
375  Label use_receiver, exit;
376 
377  // If the result is a smi, it is *not* an object in the ECMA sense.
378  __ JumpIfSmi(eax, &use_receiver);
379 
380  // If the type of the result (stored in its map) is less than
381  // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
382  __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
383  __ j(above_equal, &exit);
384 
385  // Throw away the result of the constructor invocation and use the
386  // on-stack receiver as the result.
387  __ bind(&use_receiver);
388  __ mov(eax, Operand(esp, 0));
389 
390  // Restore the arguments count and leave the construct frame.
391  __ bind(&exit);
392  __ mov(ebx, Operand(esp, kPointerSize)); // Get arguments count.
393 
394  // Leave construct frame.
395  }
396 
397  // Remove caller arguments from the stack and return.
398  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
399  __ pop(ecx);
400  __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
401  __ push(ecx);
402  __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
403  __ ret(0);
404 }
405 
406 
407 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
408  Generate_JSConstructStubHelper(masm, false, true);
409 }
410 
411 
412 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
413  Generate_JSConstructStubHelper(masm, false, false);
414 }
415 
416 
417 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
418  Generate_JSConstructStubHelper(masm, true, false);
419 }
420 
421 
422 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
423  bool is_construct) {
424  // Clear the context before we push it when entering the internal frame.
425  __ Set(esi, Immediate(0));
426 
427  {
428  FrameScope scope(masm, StackFrame::INTERNAL);
429 
430  // Load the previous frame pointer (ebx) to access C arguments
431  __ mov(ebx, Operand(ebp, 0));
432 
433  // Get the function from the frame and setup the context.
436 
437  // Push the function and the receiver onto the stack.
438  __ push(ecx);
440 
441  // Load the number of arguments and setup pointer to the arguments.
442  __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
443  __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
444 
445  // Copy arguments to the stack in a loop.
446  Label loop, entry;
447  __ Set(ecx, Immediate(0));
448  __ jmp(&entry);
449  __ bind(&loop);
450  __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
451  __ push(Operand(edx, 0)); // dereference handle
452  __ inc(ecx);
453  __ bind(&entry);
454  __ cmp(ecx, eax);
455  __ j(not_equal, &loop);
456 
457  // Get the function from the stack and call it.
458  // kPointerSize for the receiver.
459  __ mov(edi, Operand(esp, eax, times_4, kPointerSize));
460 
461  // Invoke the code.
462  if (is_construct) {
463  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
464  __ CallStub(&stub);
465  } else {
466  ParameterCount actual(eax);
467  __ InvokeFunction(edi, actual, CALL_FUNCTION,
468  NullCallWrapper(), CALL_AS_METHOD);
469  }
470 
471  // Exit the internal frame. Notice that this also removes the empty.
472  // context and the function left on the stack by the code
473  // invocation.
474  }
475  __ ret(kPointerSize); // Remove receiver.
476 }
477 
478 
479 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
480  Generate_JSEntryTrampolineHelper(masm, false);
481 }
482 
483 
484 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
485  Generate_JSEntryTrampolineHelper(masm, true);
486 }
487 
488 
489 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
490  {
491  FrameScope scope(masm, StackFrame::INTERNAL);
492 
493  // Push a copy of the function.
494  __ push(edi);
495  // Push call kind information.
496  __ push(ecx);
497 
498  __ push(edi); // Function is also the parameter to the runtime call.
499  __ CallRuntime(Runtime::kLazyCompile, 1);
500 
501  // Restore call kind information.
502  __ pop(ecx);
503  // Restore receiver.
504  __ pop(edi);
505 
506  // Tear down internal frame.
507  }
508 
509  // Do a tail-call of the compiled function.
511  __ jmp(eax);
512 }
513 
514 
515 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
516  {
517  FrameScope scope(masm, StackFrame::INTERNAL);
518 
519  // Push a copy of the function onto the stack.
520  __ push(edi);
521  // Push call kind information.
522  __ push(ecx);
523 
524  __ push(edi); // Function is also the parameter to the runtime call.
525  __ CallRuntime(Runtime::kLazyRecompile, 1);
526 
527  // Restore call kind information.
528  __ pop(ecx);
529  // Restore receiver.
530  __ pop(edi);
531 
532  // Tear down internal frame.
533  }
534 
535  // Do a tail-call of the compiled function.
537  __ jmp(eax);
538 }
539 
540 
541 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
543  {
544  FrameScope scope(masm, StackFrame::INTERNAL);
545 
546  // Pass deoptimization type to the runtime system.
547  __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
548  __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
549 
550  // Tear down internal frame.
551  }
552 
553  // Get the full codegen state from the stack and untag it.
554  __ mov(ecx, Operand(esp, 1 * kPointerSize));
555  __ SmiUntag(ecx);
556 
557  // Switch on the state.
558  Label not_no_registers, not_tos_eax;
560  __ j(not_equal, &not_no_registers, Label::kNear);
561  __ ret(1 * kPointerSize); // Remove state.
562 
563  __ bind(&not_no_registers);
564  __ mov(eax, Operand(esp, 2 * kPointerSize));
566  __ j(not_equal, &not_tos_eax, Label::kNear);
567  __ ret(2 * kPointerSize); // Remove state, eax.
568 
569  __ bind(&not_tos_eax);
570  __ Abort("no cases left");
571 }
572 
573 
574 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
575  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
576 }
577 
578 
579 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
580  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
581 }
582 
583 
584 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
585  // TODO(kasperl): Do we need to save/restore the XMM registers too?
586 
587  // For now, we are relying on the fact that Runtime::NotifyOSR
588  // doesn't do any garbage collection which allows us to save/restore
589  // the registers without worrying about which of them contain
590  // pointers. This seems a bit fragile.
591  __ pushad();
592  {
593  FrameScope scope(masm, StackFrame::INTERNAL);
594  __ CallRuntime(Runtime::kNotifyOSR, 0);
595  }
596  __ popad();
597  __ ret(0);
598 }
599 
600 
601 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
602  Factory* factory = masm->isolate()->factory();
603 
604  // 1. Make sure we have at least one argument.
605  { Label done;
606  __ test(eax, eax);
607  __ j(not_zero, &done);
608  __ pop(ebx);
609  __ push(Immediate(factory->undefined_value()));
610  __ push(ebx);
611  __ inc(eax);
612  __ bind(&done);
613  }
614 
615  // 2. Get the function to call (passed as receiver) from the stack, check
616  // if it is a function.
617  Label slow, non_function;
618  // 1 ~ return address.
619  __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
620  __ JumpIfSmi(edi, &non_function);
621  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
622  __ j(not_equal, &slow);
623 
624 
625  // 3a. Patch the first argument if necessary when calling a function.
626  Label shift_arguments;
627  __ Set(edx, Immediate(0)); // indicate regular JS_FUNCTION
628  { Label convert_to_object, use_global_receiver, patch_receiver;
629  // Change context eagerly in case we need the global receiver.
631 
632  // Do not transform the receiver for strict mode functions.
636  __ j(not_equal, &shift_arguments);
637 
638  // Do not transform the receiver for natives (shared already in ebx).
641  __ j(not_equal, &shift_arguments);
642 
643  // Compute the receiver in non-strict mode.
644  __ mov(ebx, Operand(esp, eax, times_4, 0)); // First argument.
645 
646  // Call ToObject on the receiver if it is not an object, or use the
647  // global object if it is null or undefined.
648  __ JumpIfSmi(ebx, &convert_to_object);
649  __ cmp(ebx, factory->null_value());
650  __ j(equal, &use_global_receiver);
651  __ cmp(ebx, factory->undefined_value());
652  __ j(equal, &use_global_receiver);
654  __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
655  __ j(above_equal, &shift_arguments);
656 
657  __ bind(&convert_to_object);
658 
659  { // In order to preserve argument count.
660  FrameScope scope(masm, StackFrame::INTERNAL);
661  __ SmiTag(eax);
662  __ push(eax);
663 
664  __ push(ebx);
665  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
666  __ mov(ebx, eax);
667  __ Set(edx, Immediate(0)); // restore
668 
669  __ pop(eax);
670  __ SmiUntag(eax);
671  }
672 
673  // Restore the function to edi.
674  __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
675  __ jmp(&patch_receiver);
676 
677  // Use the global receiver object from the called function as the
678  // receiver.
679  __ bind(&use_global_receiver);
680  const int kGlobalIndex =
682  __ mov(ebx, FieldOperand(esi, kGlobalIndex));
684  __ mov(ebx, FieldOperand(ebx, kGlobalIndex));
686 
687  __ bind(&patch_receiver);
688  __ mov(Operand(esp, eax, times_4, 0), ebx);
689 
690  __ jmp(&shift_arguments);
691  }
692 
693  // 3b. Check for function proxy.
694  __ bind(&slow);
695  __ Set(edx, Immediate(1)); // indicate function proxy
696  __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
697  __ j(equal, &shift_arguments);
698  __ bind(&non_function);
699  __ Set(edx, Immediate(2)); // indicate non-function
700 
701  // 3c. Patch the first argument when calling a non-function. The
702  // CALL_NON_FUNCTION builtin expects the non-function callee as
703  // receiver, so overwrite the first argument which will ultimately
704  // become the receiver.
705  __ mov(Operand(esp, eax, times_4, 0), edi);
706 
707  // 4. Shift arguments and return address one slot down on the stack
708  // (overwriting the original receiver). Adjust argument count to make
709  // the original first argument the new receiver.
710  __ bind(&shift_arguments);
711  { Label loop;
712  __ mov(ecx, eax);
713  __ bind(&loop);
714  __ mov(ebx, Operand(esp, ecx, times_4, 0));
715  __ mov(Operand(esp, ecx, times_4, kPointerSize), ebx);
716  __ dec(ecx);
717  __ j(not_sign, &loop); // While non-negative (to copy return address).
718  __ pop(ebx); // Discard copy of return address.
719  __ dec(eax); // One fewer argument (first argument is new receiver).
720  }
721 
722  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
723  // or a function proxy via CALL_FUNCTION_PROXY.
724  { Label function, non_proxy;
725  __ test(edx, edx);
726  __ j(zero, &function);
727  __ Set(ebx, Immediate(0));
728  __ cmp(edx, Immediate(1));
729  __ j(not_equal, &non_proxy);
730 
731  __ pop(edx); // return address
732  __ push(edi); // re-add proxy object as additional argument
733  __ push(edx);
734  __ inc(eax);
735  __ SetCallKind(ecx, CALL_AS_FUNCTION);
736  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
737  __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
738  RelocInfo::CODE_TARGET);
739 
740  __ bind(&non_proxy);
741  __ SetCallKind(ecx, CALL_AS_METHOD);
742  __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
743  __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
744  RelocInfo::CODE_TARGET);
745  __ bind(&function);
746  }
747 
748  // 5b. Get the code to call from the function and check that the number of
749  // expected arguments matches what we're providing. If so, jump
750  // (tail-call) to the code in register edx without checking arguments.
752  __ mov(ebx,
755  __ SmiUntag(ebx);
756  __ SetCallKind(ecx, CALL_AS_METHOD);
757  __ cmp(eax, ebx);
758  __ j(not_equal,
759  masm->isolate()->builtins()->ArgumentsAdaptorTrampoline());
760 
761  ParameterCount expected(0);
762  __ InvokeCode(edx, expected, expected, JUMP_FUNCTION, NullCallWrapper(),
764 }
765 
766 
767 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
768  static const int kArgumentsOffset = 2 * kPointerSize;
769  static const int kReceiverOffset = 3 * kPointerSize;
770  static const int kFunctionOffset = 4 * kPointerSize;
771  {
772  FrameScope frame_scope(masm, StackFrame::INTERNAL);
773 
774  __ push(Operand(ebp, kFunctionOffset)); // push this
775  __ push(Operand(ebp, kArgumentsOffset)); // push arguments
776  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
777 
778  // Check the stack for overflow. We are not trying to catch
779  // interruptions (e.g. debug break and preemption) here, so the "real stack
780  // limit" is checked.
781  Label okay;
782  ExternalReference real_stack_limit =
783  ExternalReference::address_of_real_stack_limit(masm->isolate());
784  __ mov(edi, Operand::StaticVariable(real_stack_limit));
785  // Make ecx the space we have left. The stack might already be overflowed
786  // here which will cause ecx to become negative.
787  __ mov(ecx, esp);
788  __ sub(ecx, edi);
789  // Make edx the space we need for the array when it is unrolled onto the
790  // stack.
791  __ mov(edx, eax);
793  // Check if the arguments will overflow the stack.
794  __ cmp(ecx, edx);
795  __ j(greater, &okay); // Signed comparison.
796 
797  // Out of stack space.
798  __ push(Operand(ebp, 4 * kPointerSize)); // push this
799  __ push(eax);
800  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
801  __ bind(&okay);
802  // End of stack check.
803 
804  // Push current index and limit.
805  const int kLimitOffset =
807  const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
808  __ push(eax); // limit
809  __ push(Immediate(0)); // index
810 
811  // Get the receiver.
812  __ mov(ebx, Operand(ebp, kReceiverOffset));
813 
814  // Check that the function is a JS function (otherwise it must be a proxy).
815  Label push_receiver;
816  __ mov(edi, Operand(ebp, kFunctionOffset));
817  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
818  __ j(not_equal, &push_receiver);
819 
820  // Change context eagerly to get the right global object if necessary.
822 
823  // Compute the receiver.
824  // Do not transform the receiver for strict mode functions.
825  Label call_to_object, use_global_receiver;
829  __ j(not_equal, &push_receiver);
830 
831  Factory* factory = masm->isolate()->factory();
832 
833  // Do not transform the receiver for natives (shared already in ecx).
836  __ j(not_equal, &push_receiver);
837 
838  // Compute the receiver in non-strict mode.
839  // Call ToObject on the receiver if it is not an object, or use the
840  // global object if it is null or undefined.
841  __ JumpIfSmi(ebx, &call_to_object);
842  __ cmp(ebx, factory->null_value());
843  __ j(equal, &use_global_receiver);
844  __ cmp(ebx, factory->undefined_value());
845  __ j(equal, &use_global_receiver);
847  __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
848  __ j(above_equal, &push_receiver);
849 
850  __ bind(&call_to_object);
851  __ push(ebx);
852  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
853  __ mov(ebx, eax);
854  __ jmp(&push_receiver);
855 
856  // Use the current global receiver object as the receiver.
857  __ bind(&use_global_receiver);
858  const int kGlobalOffset =
860  __ mov(ebx, FieldOperand(esi, kGlobalOffset));
862  __ mov(ebx, FieldOperand(ebx, kGlobalOffset));
864 
865  // Push the receiver.
866  __ bind(&push_receiver);
867  __ push(ebx);
868 
869  // Copy all arguments from the array to the stack.
870  Label entry, loop;
871  __ mov(ecx, Operand(ebp, kIndexOffset));
872  __ jmp(&entry);
873  __ bind(&loop);
874  __ mov(edx, Operand(ebp, kArgumentsOffset)); // load arguments
875 
876  // Use inline caching to speed up access to arguments.
877  Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Initialize();
878  __ call(ic, RelocInfo::CODE_TARGET);
879  // It is important that we do not have a test instruction after the
880  // call. A test instruction after the call is used to indicate that
881  // we have generated an inline version of the keyed load. In this
882  // case, we know that we are not generating a test instruction next.
883 
884  // Push the nth argument.
885  __ push(eax);
886 
887  // Update the index on the stack and in register eax.
888  __ mov(ecx, Operand(ebp, kIndexOffset));
889  __ add(ecx, Immediate(1 << kSmiTagSize));
890  __ mov(Operand(ebp, kIndexOffset), ecx);
891 
892  __ bind(&entry);
893  __ cmp(ecx, Operand(ebp, kLimitOffset));
894  __ j(not_equal, &loop);
895 
896  // Invoke the function.
897  Label call_proxy;
898  __ mov(eax, ecx);
899  ParameterCount actual(eax);
900  __ SmiUntag(eax);
901  __ mov(edi, Operand(ebp, kFunctionOffset));
902  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
903  __ j(not_equal, &call_proxy);
904  __ InvokeFunction(edi, actual, CALL_FUNCTION,
905  NullCallWrapper(), CALL_AS_METHOD);
906 
907  frame_scope.GenerateLeaveFrame();
908  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
909 
910  // Invoke the function proxy.
911  __ bind(&call_proxy);
912  __ push(edi); // add function proxy as last argument
913  __ inc(eax);
914  __ Set(ebx, Immediate(0));
915  __ SetCallKind(ecx, CALL_AS_METHOD);
916  __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
917  __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
918  RelocInfo::CODE_TARGET);
919 
920  // Leave internal frame.
921  }
922  __ ret(3 * kPointerSize); // remove this, receiver, and arguments
923 }
924 
925 
926 // Allocate an empty JSArray. The allocated array is put into the result
927 // register. If the parameter initial_capacity is larger than zero an elements
928 // backing store is allocated with this size and filled with the hole values.
929 // Otherwise the elements backing store is set to the empty FixedArray.
930 static void AllocateEmptyJSArray(MacroAssembler* masm,
931  Register array_function,
932  Register result,
933  Register scratch1,
934  Register scratch2,
935  Register scratch3,
936  Label* gc_required) {
937  const int initial_capacity = JSArray::kPreallocatedArrayElements;
938  STATIC_ASSERT(initial_capacity >= 0);
939 
940  __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
941 
942  // Allocate the JSArray object together with space for a fixed array with the
943  // requested elements.
944  int size = JSArray::kSize;
945  if (initial_capacity > 0) {
946  size += FixedArray::SizeFor(initial_capacity);
947  }
948  __ AllocateInNewSpace(size,
949  result,
950  scratch2,
951  scratch3,
952  gc_required,
953  TAG_OBJECT);
954 
955  // Allocated the JSArray. Now initialize the fields except for the elements
956  // array.
957  // result: JSObject
958  // scratch1: initial map
959  // scratch2: start of next object
960  __ mov(FieldOperand(result, JSObject::kMapOffset), scratch1);
961  Factory* factory = masm->isolate()->factory();
963  factory->empty_fixed_array());
964  // Field JSArray::kElementsOffset is initialized later.
965  __ mov(FieldOperand(result, JSArray::kLengthOffset), Immediate(0));
966 
967  // If no storage is requested for the elements array just set the empty
968  // fixed array.
969  if (initial_capacity == 0) {
971  factory->empty_fixed_array());
972  return;
973  }
974 
975  // Calculate the location of the elements array and set elements array member
976  // of the JSArray.
977  // result: JSObject
978  // scratch2: start of next object
979  __ lea(scratch1, Operand(result, JSArray::kSize));
980  __ mov(FieldOperand(result, JSArray::kElementsOffset), scratch1);
981 
982  // Initialize the FixedArray and fill it with holes. FixedArray length is
983  // stored as a smi.
984  // result: JSObject
985  // scratch1: elements array
986  // scratch2: start of next object
987  __ mov(FieldOperand(scratch1, FixedArray::kMapOffset),
988  factory->fixed_array_map());
990  Immediate(Smi::FromInt(initial_capacity)));
991 
992  // Fill the FixedArray with the hole value. Inline the code if short.
993  // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
994  static const int kLoopUnfoldLimit = 4;
995  if (initial_capacity <= kLoopUnfoldLimit) {
996  // Use a scratch register here to have only one reloc info when unfolding
997  // the loop.
998  __ mov(scratch3, factory->the_hole_value());
999  for (int i = 0; i < initial_capacity; i++) {
1000  __ mov(FieldOperand(scratch1,
1001  FixedArray::kHeaderSize + i * kPointerSize),
1002  scratch3);
1003  }
1004  } else {
1005  Label loop, entry;
1006  __ mov(scratch2, Immediate(initial_capacity));
1007  __ jmp(&entry);
1008  __ bind(&loop);
1009  __ mov(FieldOperand(scratch1,
1010  scratch2,
1013  factory->the_hole_value());
1014  __ bind(&entry);
1015  __ dec(scratch2);
1016  __ j(not_sign, &loop);
1017  }
1018 }
1019 
1020 
1021 // Allocate a JSArray with the number of elements stored in a register. The
1022 // register array_function holds the built-in Array function and the register
1023 // array_size holds the size of the array as a smi. The allocated array is put
1024 // into the result register and beginning and end of the FixedArray elements
1025 // storage is put into registers elements_array and elements_array_end (see
1026 // below for when that is not the case). If the parameter fill_with_holes is
1027 // true the allocated elements backing store is filled with the hole values
1028 // otherwise it is left uninitialized. When the backing store is filled the
1029 // register elements_array is scratched.
1030 static void AllocateJSArray(MacroAssembler* masm,
1031  Register array_function, // Array function.
1032  Register array_size, // As a smi, cannot be 0.
1033  Register result,
1034  Register elements_array,
1035  Register elements_array_end,
1036  Register scratch,
1037  bool fill_with_hole,
1038  Label* gc_required) {
1039  ASSERT(scratch.is(edi)); // rep stos destination
1040  ASSERT(!fill_with_hole || array_size.is(ecx)); // rep stos count
1041  ASSERT(!fill_with_hole || !result.is(eax)); // result is never eax
1042 
1043  __ LoadInitialArrayMap(array_function, scratch,
1044  elements_array, fill_with_hole);
1045 
1046  // Allocate the JSArray object together with space for a FixedArray with the
1047  // requested elements.
1048  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1049  __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
1050  times_half_pointer_size, // array_size is a smi.
1051  array_size,
1052  result,
1053  elements_array_end,
1054  scratch,
1055  gc_required,
1056  TAG_OBJECT);
1057 
1058  // Allocated the JSArray. Now initialize the fields except for the elements
1059  // array.
1060  // result: JSObject
1061  // elements_array: initial map
1062  // elements_array_end: start of next object
1063  // array_size: size of array (smi)
1064  __ mov(FieldOperand(result, JSObject::kMapOffset), elements_array);
1065  Factory* factory = masm->isolate()->factory();
1066  __ mov(elements_array, factory->empty_fixed_array());
1067  __ mov(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
1068  // Field JSArray::kElementsOffset is initialized later.
1069  __ mov(FieldOperand(result, JSArray::kLengthOffset), array_size);
1070 
1071  // Calculate the location of the elements array and set elements array member
1072  // of the JSArray.
1073  // result: JSObject
1074  // elements_array_end: start of next object
1075  // array_size: size of array (smi)
1076  __ lea(elements_array, Operand(result, JSArray::kSize));
1077  __ mov(FieldOperand(result, JSArray::kElementsOffset), elements_array);
1078 
1079  // Initialize the fixed array. FixedArray length is stored as a smi.
1080  // result: JSObject
1081  // elements_array: elements array
1082  // elements_array_end: start of next object
1083  // array_size: size of array (smi)
1084  __ mov(FieldOperand(elements_array, FixedArray::kMapOffset),
1085  factory->fixed_array_map());
1086  // For non-empty JSArrays the length of the FixedArray and the JSArray is the
1087  // same.
1088  __ mov(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
1089 
1090  // Fill the allocated FixedArray with the hole value if requested.
1091  // result: JSObject
1092  // elements_array: elements array
1093  if (fill_with_hole) {
1094  __ SmiUntag(array_size);
1095  __ lea(edi, Operand(elements_array,
1097  __ mov(eax, factory->the_hole_value());
1098  __ cld();
1099  // Do not use rep stos when filling less than kRepStosThreshold
1100  // words.
1101  const int kRepStosThreshold = 16;
1102  Label loop, entry, done;
1103  __ cmp(ecx, kRepStosThreshold);
1104  __ j(below, &loop); // Note: ecx > 0.
1105  __ rep_stos();
1106  __ jmp(&done);
1107  __ bind(&loop);
1108  __ stos();
1109  __ bind(&entry);
1110  __ cmp(edi, elements_array_end);
1111  __ j(below, &loop);
1112  __ bind(&done);
1113  }
1114 }
1115 
1116 
1117 // Create a new array for the built-in Array function. This function allocates
1118 // the JSArray object and the FixedArray elements array and initializes these.
1119 // If the Array cannot be constructed in native code the runtime is called. This
1120 // function assumes the following state:
1121 // edi: constructor (built-in Array function)
1122 // eax: argc
1123 // esp[0]: return address
1124 // esp[4]: last argument
1125 // This function is used for both construct and normal calls of Array. Whether
1126 // it is a construct call or not is indicated by the construct_call parameter.
1127 // The only difference between handling a construct call and a normal call is
1128 // that for a construct call the constructor function in edi needs to be
1129 // preserved for entering the generic code. In both cases argc in eax needs to
1130 // be preserved.
1131 static void ArrayNativeCode(MacroAssembler* masm,
1132  bool construct_call,
1133  Label* call_generic_code) {
1134  Label argc_one_or_more, argc_two_or_more, prepare_generic_code_call,
1135  empty_array, not_empty_array, finish, cant_transition_map, not_double;
1136 
1137  // Push the constructor and argc. No need to tag argc as a smi, as there will
1138  // be no garbage collection with this on the stack.
1139  int push_count = 0;
1140  if (construct_call) {
1141  push_count++;
1142  __ push(edi);
1143  }
1144  push_count++;
1145  __ push(eax);
1146 
1147  // Check for array construction with zero arguments.
1148  __ test(eax, eax);
1149  __ j(not_zero, &argc_one_or_more);
1150 
1151  __ bind(&empty_array);
1152  // Handle construction of an empty array.
1153  AllocateEmptyJSArray(masm,
1154  edi,
1155  eax,
1156  ebx,
1157  ecx,
1158  edi,
1159  &prepare_generic_code_call);
1160  __ IncrementCounter(masm->isolate()->counters()->array_function_native(), 1);
1161  __ pop(ebx);
1162  if (construct_call) {
1163  __ pop(edi);
1164  }
1165  __ ret(kPointerSize);
1166 
1167  // Check for one argument. Bail out if argument is not smi or if it is
1168  // negative.
1169  __ bind(&argc_one_or_more);
1170  __ cmp(eax, 1);
1171  __ j(not_equal, &argc_two_or_more);
1172  STATIC_ASSERT(kSmiTag == 0);
1173  __ mov(ecx, Operand(esp, (push_count + 1) * kPointerSize));
1174  __ test(ecx, ecx);
1175  __ j(not_zero, &not_empty_array);
1176 
1177  // The single argument passed is zero, so we jump to the code above used to
1178  // handle the case of no arguments passed. To adapt the stack for that we move
1179  // the return address and the pushed constructor (if pushed) one stack slot up
1180  // thereby removing the passed argument. Argc is also on the stack - at the
1181  // bottom - and it needs to be changed from 1 to 0 to have the call into the
1182  // runtime system work in case a GC is required.
1183  for (int i = push_count; i > 0; i--) {
1184  __ mov(eax, Operand(esp, i * kPointerSize));
1185  __ mov(Operand(esp, (i + 1) * kPointerSize), eax);
1186  }
1187  __ Drop(2); // Drop two stack slots.
1188  __ push(Immediate(0)); // Treat this as a call with argc of zero.
1189  __ jmp(&empty_array);
1190 
1191  __ bind(&not_empty_array);
1192  __ test(ecx, Immediate(kIntptrSignBit | kSmiTagMask));
1193  __ j(not_zero, &prepare_generic_code_call);
1194 
1195  // Handle construction of an empty array of a certain size. Get the size from
1196  // the stack and bail out if size is to large to actually allocate an elements
1197  // array.
1199  __ j(greater_equal, &prepare_generic_code_call);
1200 
1201  // edx: array_size (smi)
1202  // edi: constructor
1203  // esp[0]: argc (cannot be 0 here)
1204  // esp[4]: constructor (only if construct_call)
1205  // esp[8]: return address
1206  // esp[C]: argument
1207  AllocateJSArray(masm,
1208  edi,
1209  ecx,
1210  ebx,
1211  eax,
1212  edx,
1213  edi,
1214  true,
1215  &prepare_generic_code_call);
1216  Counters* counters = masm->isolate()->counters();
1217  __ IncrementCounter(counters->array_function_native(), 1);
1218  __ mov(eax, ebx);
1219  __ pop(ebx);
1220  if (construct_call) {
1221  __ pop(edi);
1222  }
1223  __ ret(2 * kPointerSize);
1224 
1225  // Handle construction of an array from a list of arguments.
1226  __ bind(&argc_two_or_more);
1227  STATIC_ASSERT(kSmiTag == 0);
1228  __ SmiTag(eax); // Convet argc to a smi.
1229  // eax: array_size (smi)
1230  // edi: constructor
1231  // esp[0] : argc
1232  // esp[4]: constructor (only if construct_call)
1233  // esp[8] : return address
1234  // esp[C] : last argument
1235  AllocateJSArray(masm,
1236  edi,
1237  eax,
1238  ebx,
1239  ecx,
1240  edx,
1241  edi,
1242  false,
1243  &prepare_generic_code_call);
1244  __ IncrementCounter(counters->array_function_native(), 1);
1245  __ push(ebx);
1246  __ mov(ebx, Operand(esp, kPointerSize));
1247  // ebx: argc
1248  // edx: elements_array_end (untagged)
1249  // esp[0]: JSArray
1250  // esp[4]: argc
1251  // esp[8]: constructor (only if construct_call)
1252  // esp[12]: return address
1253  // esp[16]: last argument
1254 
1255  // Location of the last argument
1256  int last_arg_offset = (construct_call ? 4 : 3) * kPointerSize;
1257  __ lea(edi, Operand(esp, last_arg_offset));
1258 
1259  // Location of the first array element (Parameter fill_with_holes to
1260  // AllocateJSArray is false, so the FixedArray is returned in ecx).
1261  __ lea(edx, Operand(ecx, FixedArray::kHeaderSize - kHeapObjectTag));
1262 
1263  Label has_non_smi_element;
1264 
1265  // ebx: argc
1266  // edx: location of the first array element
1267  // edi: location of the last argument
1268  // esp[0]: JSArray
1269  // esp[4]: argc
1270  // esp[8]: constructor (only if construct_call)
1271  // esp[12]: return address
1272  // esp[16]: last argument
1273  Label loop, entry;
1274  __ mov(ecx, ebx);
1275  __ jmp(&entry);
1276  __ bind(&loop);
1277  __ mov(eax, Operand(edi, ecx, times_pointer_size, 0));
1278  if (FLAG_smi_only_arrays) {
1279  __ JumpIfNotSmi(eax, &has_non_smi_element);
1280  }
1281  __ mov(Operand(edx, 0), eax);
1282  __ add(edx, Immediate(kPointerSize));
1283  __ bind(&entry);
1284  __ dec(ecx);
1285  __ j(greater_equal, &loop);
1286 
1287  // Remove caller arguments from the stack and return.
1288  // ebx: argc
1289  // esp[0]: JSArray
1290  // esp[4]: argc
1291  // esp[8]: constructor (only if construct_call)
1292  // esp[12]: return address
1293  // esp[16]: last argument
1294  __ bind(&finish);
1295  __ mov(ecx, Operand(esp, last_arg_offset - kPointerSize));
1296  __ pop(eax);
1297  __ pop(ebx);
1298  __ lea(esp, Operand(esp, ebx, times_pointer_size,
1299  last_arg_offset - kPointerSize));
1300  __ jmp(ecx);
1301 
1302  __ bind(&has_non_smi_element);
1303  // Double values are handled by the runtime.
1304  __ CheckMap(eax,
1305  masm->isolate()->factory()->heap_number_map(),
1306  &not_double,
1308  __ bind(&cant_transition_map);
1309  // Throw away the array that's only been partially constructed.
1310  __ pop(eax);
1311  __ UndoAllocationInNewSpace(eax);
1312  __ jmp(&prepare_generic_code_call);
1313 
1314  __ bind(&not_double);
1315  // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
1316  __ mov(ebx, Operand(esp, 0));
1318  __ LoadTransitionedArrayMapConditional(
1320  FAST_ELEMENTS,
1321  edi,
1322  eax,
1323  &cant_transition_map);
1325  __ RecordWriteField(ebx, HeapObject::kMapOffset, edi, eax,
1327 
1328  // Prepare to re-enter the loop
1329  __ lea(edi, Operand(esp, last_arg_offset));
1330 
1331  // Finish the array initialization loop.
1332  Label loop2;
1333  __ bind(&loop2);
1334  __ mov(eax, Operand(edi, ecx, times_pointer_size, 0));
1335  __ mov(Operand(edx, 0), eax);
1336  __ add(edx, Immediate(kPointerSize));
1337  __ dec(ecx);
1338  __ j(greater_equal, &loop2);
1339  __ jmp(&finish);
1340 
1341  // Restore argc and constructor before running the generic code.
1342  __ bind(&prepare_generic_code_call);
1343  __ pop(eax);
1344  if (construct_call) {
1345  __ pop(edi);
1346  }
1347  __ jmp(call_generic_code);
1348 }
1349 
1350 
1351 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1352  // ----------- S t a t e -------------
1353  // -- eax : argc
1354  // -- esp[0] : return address
1355  // -- esp[4] : last argument
1356  // -----------------------------------
1357  Label generic_array_code;
1358 
1359  // Get the InternalArray function.
1360  __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi);
1361 
1362  if (FLAG_debug_code) {
1363  // Initial map for the builtin InternalArray function should be a map.
1365  // Will both indicate a NULL and a Smi.
1366  __ test(ebx, Immediate(kSmiTagMask));
1367  __ Assert(not_zero, "Unexpected initial map for InternalArray function");
1368  __ CmpObjectType(ebx, MAP_TYPE, ecx);
1369  __ Assert(equal, "Unexpected initial map for InternalArray function");
1370  }
1371 
1372  // Run the native code for the InternalArray function called as a normal
1373  // function.
1374  ArrayNativeCode(masm, false, &generic_array_code);
1375 
1376  // Jump to the generic internal array code in case the specialized code cannot
1377  // handle the construction.
1378  __ bind(&generic_array_code);
1379  Handle<Code> array_code =
1380  masm->isolate()->builtins()->InternalArrayCodeGeneric();
1381  __ jmp(array_code, RelocInfo::CODE_TARGET);
1382 }
1383 
1384 
1385 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1386  // ----------- S t a t e -------------
1387  // -- eax : argc
1388  // -- esp[0] : return address
1389  // -- esp[4] : last argument
1390  // -----------------------------------
1391  Label generic_array_code;
1392 
1393  // Get the Array function.
1394  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi);
1395 
1396  if (FLAG_debug_code) {
1397  // Initial map for the builtin Array function should be a map.
1399  // Will both indicate a NULL and a Smi.
1400  __ test(ebx, Immediate(kSmiTagMask));
1401  __ Assert(not_zero, "Unexpected initial map for Array function");
1402  __ CmpObjectType(ebx, MAP_TYPE, ecx);
1403  __ Assert(equal, "Unexpected initial map for Array function");
1404  }
1405 
1406  // Run the native code for the Array function called as a normal function.
1407  ArrayNativeCode(masm, false, &generic_array_code);
1408 
1409  // Jump to the generic array code in case the specialized code cannot handle
1410  // the construction.
1411  __ bind(&generic_array_code);
1412  Handle<Code> array_code =
1413  masm->isolate()->builtins()->ArrayCodeGeneric();
1414  __ jmp(array_code, RelocInfo::CODE_TARGET);
1415 }
1416 
1417 
1418 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1419  // ----------- S t a t e -------------
1420  // -- eax : argc
1421  // -- edi : constructor
1422  // -- esp[0] : return address
1423  // -- esp[4] : last argument
1424  // -----------------------------------
1425  Label generic_constructor;
1426 
1427  if (FLAG_debug_code) {
1428  // The array construct code is only set for the global and natives
1429  // builtin Array functions which always have maps.
1430 
1431  // Initial map for the builtin Array function should be a map.
1433  // Will both indicate a NULL and a Smi.
1434  __ test(ebx, Immediate(kSmiTagMask));
1435  __ Assert(not_zero, "Unexpected initial map for Array function");
1436  __ CmpObjectType(ebx, MAP_TYPE, ecx);
1437  __ Assert(equal, "Unexpected initial map for Array function");
1438  }
1439 
1440  // Run the native code for the Array function called as constructor.
1441  ArrayNativeCode(masm, true, &generic_constructor);
1442 
1443  // Jump to the generic construct code in case the specialized code cannot
1444  // handle the construction.
1445  __ bind(&generic_constructor);
1446  Handle<Code> generic_construct_stub =
1447  masm->isolate()->builtins()->JSConstructStubGeneric();
1448  __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
1449 }
1450 
1451 
1452 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1453  // ----------- S t a t e -------------
1454  // -- eax : number of arguments
1455  // -- edi : constructor function
1456  // -- esp[0] : return address
1457  // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1458  // -- esp[(argc + 1) * 4] : receiver
1459  // -----------------------------------
1460  Counters* counters = masm->isolate()->counters();
1461  __ IncrementCounter(counters->string_ctor_calls(), 1);
1462 
1463  if (FLAG_debug_code) {
1464  __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, ecx);
1465  __ cmp(edi, ecx);
1466  __ Assert(equal, "Unexpected String function");
1467  }
1468 
1469  // Load the first argument into eax and get rid of the rest
1470  // (including the receiver).
1471  Label no_arguments;
1472  __ test(eax, eax);
1473  __ j(zero, &no_arguments);
1474  __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1475  __ pop(ecx);
1476  __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1477  __ push(ecx);
1478  __ mov(eax, ebx);
1479 
1480  // Lookup the argument in the number to string cache.
1481  Label not_cached, argument_is_string;
1483  masm,
1484  eax, // Input.
1485  ebx, // Result.
1486  ecx, // Scratch 1.
1487  edx, // Scratch 2.
1488  false, // Input is known to be smi?
1489  &not_cached);
1490  __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1491  __ bind(&argument_is_string);
1492  // ----------- S t a t e -------------
1493  // -- ebx : argument converted to string
1494  // -- edi : constructor function
1495  // -- esp[0] : return address
1496  // -----------------------------------
1497 
1498  // Allocate a JSValue and put the tagged pointer into eax.
1499  Label gc_required;
1500  __ AllocateInNewSpace(JSValue::kSize,
1501  eax, // Result.
1502  ecx, // New allocation top (we ignore it).
1503  no_reg,
1504  &gc_required,
1505  TAG_OBJECT);
1506 
1507  // Set the map.
1508  __ LoadGlobalFunctionInitialMap(edi, ecx);
1509  if (FLAG_debug_code) {
1512  __ Assert(equal, "Unexpected string wrapper instance size");
1514  __ Assert(equal, "Unexpected unused properties of string wrapper");
1515  }
1517 
1518  // Set properties and elements.
1519  Factory* factory = masm->isolate()->factory();
1520  __ Set(ecx, Immediate(factory->empty_fixed_array()));
1523 
1524  // Set the value.
1526 
1527  // Ensure the object is fully initialized.
1528  STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1529 
1530  // We're done. Return.
1531  __ ret(0);
1532 
1533  // The argument was not found in the number to string cache. Check
1534  // if it's a string already before calling the conversion builtin.
1535  Label convert_argument;
1536  __ bind(&not_cached);
1537  STATIC_ASSERT(kSmiTag == 0);
1538  __ JumpIfSmi(eax, &convert_argument);
1539  Condition is_string = masm->IsObjectStringType(eax, ebx, ecx);
1540  __ j(NegateCondition(is_string), &convert_argument);
1541  __ mov(ebx, eax);
1542  __ IncrementCounter(counters->string_ctor_string_value(), 1);
1543  __ jmp(&argument_is_string);
1544 
1545  // Invoke the conversion builtin and put the result into ebx.
1546  __ bind(&convert_argument);
1547  __ IncrementCounter(counters->string_ctor_conversions(), 1);
1548  {
1549  FrameScope scope(masm, StackFrame::INTERNAL);
1550  __ push(edi); // Preserve the function.
1551  __ push(eax);
1552  __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1553  __ pop(edi);
1554  }
1555  __ mov(ebx, eax);
1556  __ jmp(&argument_is_string);
1557 
1558  // Load the empty string into ebx, remove the receiver from the
1559  // stack, and jump back to the case where the argument is a string.
1560  __ bind(&no_arguments);
1561  __ Set(ebx, Immediate(factory->empty_string()));
1562  __ pop(ecx);
1563  __ lea(esp, Operand(esp, kPointerSize));
1564  __ push(ecx);
1565  __ jmp(&argument_is_string);
1566 
1567  // At this point the argument is already a string. Call runtime to
1568  // create a string wrapper.
1569  __ bind(&gc_required);
1570  __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1571  {
1572  FrameScope scope(masm, StackFrame::INTERNAL);
1573  __ push(ebx);
1574  __ CallRuntime(Runtime::kNewStringWrapper, 1);
1575  }
1576  __ ret(0);
1577 }
1578 
1579 
1580 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1581  __ push(ebp);
1582  __ mov(ebp, esp);
1583 
1584  // Store the arguments adaptor context sentinel.
1585  __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1586 
1587  // Push the function on the stack.
1588  __ push(edi);
1589 
1590  // Preserve the number of arguments on the stack. Must preserve eax,
1591  // ebx and ecx because these registers are used when copying the
1592  // arguments and the receiver.
1593  STATIC_ASSERT(kSmiTagSize == 1);
1594  __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
1595  __ push(edi);
1596 }
1597 
1598 
1599 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1600  // Retrieve the number of arguments from the stack.
1602 
1603  // Leave the frame.
1604  __ leave();
1605 
1606  // Remove caller arguments from the stack.
1607  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1608  __ pop(ecx);
1609  __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
1610  __ push(ecx);
1611 }
1612 
1613 
1614 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1615  // ----------- S t a t e -------------
1616  // -- eax : actual number of arguments
1617  // -- ebx : expected number of arguments
1618  // -- ecx : call kind information
1619  // -- edx : code entry to call
1620  // -----------------------------------
1621 
1622  Label invoke, dont_adapt_arguments;
1623  __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
1624 
1625  Label enough, too_few;
1626  __ cmp(eax, ebx);
1627  __ j(less, &too_few);
1629  __ j(equal, &dont_adapt_arguments);
1630 
1631  { // Enough parameters: Actual >= expected.
1632  __ bind(&enough);
1633  EnterArgumentsAdaptorFrame(masm);
1634 
1635  // Copy receiver and all expected arguments.
1636  const int offset = StandardFrameConstants::kCallerSPOffset;
1637  __ lea(eax, Operand(ebp, eax, times_4, offset));
1638  __ mov(edi, -1); // account for receiver
1639 
1640  Label copy;
1641  __ bind(&copy);
1642  __ inc(edi);
1643  __ push(Operand(eax, 0));
1644  __ sub(eax, Immediate(kPointerSize));
1645  __ cmp(edi, ebx);
1646  __ j(less, &copy);
1647  __ jmp(&invoke);
1648  }
1649 
1650  { // Too few parameters: Actual < expected.
1651  __ bind(&too_few);
1652  EnterArgumentsAdaptorFrame(masm);
1653 
1654  // Copy receiver and all actual arguments.
1655  const int offset = StandardFrameConstants::kCallerSPOffset;
1656  __ lea(edi, Operand(ebp, eax, times_4, offset));
1657  // ebx = expected - actual.
1658  __ sub(ebx, eax);
1659  // eax = -actual - 1
1660  __ neg(eax);
1661  __ sub(eax, Immediate(1));
1662 
1663  Label copy;
1664  __ bind(&copy);
1665  __ inc(eax);
1666  __ push(Operand(edi, 0));
1667  __ sub(edi, Immediate(kPointerSize));
1668  __ test(eax, eax);
1669  __ j(not_zero, &copy);
1670 
1671  // Fill remaining expected arguments with undefined values.
1672  Label fill;
1673  __ bind(&fill);
1674  __ inc(eax);
1675  __ push(Immediate(masm->isolate()->factory()->undefined_value()));
1676  __ cmp(eax, ebx);
1677  __ j(less, &fill);
1678  }
1679 
1680  // Call the entry point.
1681  __ bind(&invoke);
1682  // Restore function pointer.
1684  __ call(edx);
1685 
1686  // Store offset of return address for deoptimizer.
1687  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1688 
1689  // Leave frame and return.
1690  LeaveArgumentsAdaptorFrame(masm);
1691  __ ret(0);
1692 
1693  // -------------------------------------------
1694  // Dont adapt arguments.
1695  // -------------------------------------------
1696  __ bind(&dont_adapt_arguments);
1697  __ jmp(edx);
1698 }
1699 
1700 
1701 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1702  CpuFeatures::TryForceFeatureScope scope(SSE2);
1703  if (!CpuFeatures::IsSupported(SSE2) && FLAG_debug_code) {
1704  __ Abort("Unreachable code: Cannot optimize without SSE2 support.");
1705  return;
1706  }
1707 
1708  // Get the loop depth of the stack guard check. This is recorded in
1709  // a test(eax, depth) instruction right after the call.
1710  Label stack_check;
1711  __ mov(ebx, Operand(esp, 0)); // return address
1712  if (FLAG_debug_code) {
1713  __ cmpb(Operand(ebx, 0), Assembler::kTestAlByte);
1714  __ Assert(equal, "test eax instruction not found after loop stack check");
1715  }
1716  __ movzx_b(ebx, Operand(ebx, 1)); // depth
1717 
1718  // Get the loop nesting level at which we allow OSR from the
1719  // unoptimized code and check if we want to do OSR yet. If not we
1720  // should perform a stack guard check so we can get interrupts while
1721  // waiting for on-stack replacement.
1726  __ j(greater, &stack_check);
1727 
1728  // Pass the function to optimize as the argument to the on-stack
1729  // replacement runtime function.
1730  {
1731  FrameScope scope(masm, StackFrame::INTERNAL);
1732  __ push(eax);
1733  __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1734  }
1735 
1736  // If the result was -1 it means that we couldn't optimize the
1737  // function. Just return and continue in the unoptimized version.
1738  Label skip;
1739  __ cmp(eax, Immediate(Smi::FromInt(-1)));
1740  __ j(not_equal, &skip, Label::kNear);
1741  __ ret(0);
1742 
1743  // Insert a stack guard check so that if we decide not to perform
1744  // on-stack replacement right away, the function calling this stub can
1745  // still be interrupted.
1746  __ bind(&stack_check);
1747  Label ok;
1748  ExternalReference stack_limit =
1749  ExternalReference::address_of_stack_limit(masm->isolate());
1750  __ cmp(esp, Operand::StaticVariable(stack_limit));
1751  __ j(above_equal, &ok, Label::kNear);
1752  StackCheckStub stub;
1753  __ TailCallStub(&stub);
1754  if (FLAG_debug_code) {
1755  __ Abort("Unreachable code: returned from tail call.");
1756  }
1757  __ bind(&ok);
1758  __ ret(0);
1759 
1760  __ bind(&skip);
1761  // Untag the AST id and push it on the stack.
1762  __ SmiUntag(eax);
1763  __ push(eax);
1764 
1765  // Generate the code for doing the frame-to-frame translation using
1766  // the deoptimizer infrastructure.
1767  Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1768  generator.Generate();
1769 }
1770 
1771 
1772 #undef __
1773 }
1774 } // namespace v8::internal
1775 
1776 #endif // V8_TARGET_ARCH_IA32
const intptr_t kSmiTagMask
Definition: v8.h:4016
static const int kCodeOffset
Definition: objects.h:5796
static const int kCodeEntryOffset
Definition: objects.h:6182
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:6183
static const int kAllowOSRAtLoopNestingLevelOffset
Definition: objects.h:4562
static Smi * FromInt(int value)
Definition: objects-inl.h:981
const intptr_t kIntptrSignBit
Definition: globals.h:233
const Register esp
static const int kGlobalReceiverOffset
Definition: objects.h:6288
static const int kConstructionCountOffset
Definition: objects.h:5888
static const int kNativeByteOffset
Definition: objects.h:5971
static bool IsSupported(CpuFeature f)
static const int kStrictModeBitWithinByte
Definition: objects.h:5957
#define ASSERT(condition)
Definition: checks.h:270
const int kPointerSizeLog2
Definition: globals.h:232
static const int kInstanceSizeOffset
Definition: objects.h:5147
static const int kUnusedPropertyFieldsOffset
Definition: objects.h:5159
static const byte kTestAlByte
static const int kContextOffset
Definition: objects.h:6187
const Register edi
static const int kSize
Definition: objects.h:8333
const Register ebp
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
const Register eax
BuiltinExtraArguments
Definition: builtins.h:35
static const int kDontAdaptArgumentsSentinel
Definition: objects.h:5791
const int kPointerSize
Definition: globals.h:220
Operand FieldOperand(Register object, int offset)
const Register ecx
const int kHeapObjectTag
Definition: v8.h:4009
#define __
static const int kPropertiesOffset
Definition: objects.h:2171
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random generator(0, the default, means to use system random).") DEFINE_bool(use_verbose_printer
static const int kInObjectPropertiesOffset
Definition: objects.h:5149
static const int kElementsOffset
Definition: objects.h:2172
static const int kNativeBitWithinByte
Definition: objects.h:5963
static const int kArgcOffset
Definition: frames-ia32.h:81
static const int kFunctionArgOffset
Definition: frames-ia32.h:79
static const int kLengthOffset
Definition: objects.h:8332
static int SizeFor(int length)
Definition: objects.h:2353
static const int kHeaderSize
Definition: objects.h:2296
static const int kSize
Definition: objects.h:6386
static const int kMapOffset
Definition: objects.h:1261
static const int kLengthOffset
Definition: objects.h:2295
const Register ebx
static const int kReceiverArgOffset
Definition: frames-ia32.h:80
static const int kFormalParameterCountOffset
Definition: objects.h:5853
static const int kStrictModeByteOffset
Definition: objects.h:5967
const int kSmiTagSize
Definition: v8.h:4015
static const int kHeaderSize
Definition: objects.h:4549
Condition NegateCondition(Condition cond)
const Register esi
const int kSmiTag
Definition: v8.h:4014
static const int kArgvOffset
Definition: frames-ia32.h:82
static const int kHeaderSize
Definition: objects.h:2173
static void GenerateLookupNumberStringCache(MacroAssembler *masm, Register object, Register result, Register scratch1, Register scratch2, Register scratch3, bool object_is_smi, Label *not_found)
static const int kPreallocatedArrayElements
Definition: objects.h:8329
const Register no_reg
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
static const int kValueOffset
Definition: objects.h:6385
static const int kNativeContextOffset
Definition: objects.h:6286
const Register edx
static const int kSharedFunctionInfoOffset
Definition: objects.h:6185
static const int kInitialMaxFastElementArray
Definition: objects.h:2161
static const int kPreAllocatedPropertyFieldsOffset
Definition: objects.h:5152