v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
builtins.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "api.h"
31 #include "arguments.h"
32 #include "bootstrapper.h"
33 #include "builtins.h"
34 #include "gdb-jit.h"
35 #include "ic-inl.h"
36 #include "heap-profiler.h"
37 #include "mark-compact.h"
38 #include "stub-cache.h"
39 #include "vm-state-inl.h"
40 
41 namespace v8 {
42 namespace internal {
43 
44 namespace {
45 
46 // Arguments object passed to C++ builtins.
47 template <BuiltinExtraArguments extra_args>
48 class BuiltinArguments : public Arguments {
49  public:
50  BuiltinArguments(int length, Object** arguments)
51  : Arguments(length, arguments) { }
52 
53  Object*& operator[] (int index) {
54  ASSERT(index < length());
55  return Arguments::operator[](index);
56  }
57 
58  template <class S> Handle<S> at(int index) {
59  ASSERT(index < length());
60  return Arguments::at<S>(index);
61  }
62 
63  Handle<Object> receiver() {
64  return Arguments::at<Object>(0);
65  }
66 
67  Handle<JSFunction> called_function() {
68  STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
69  return Arguments::at<JSFunction>(Arguments::length() - 1);
70  }
71 
72  // Gets the total number of arguments including the receiver (but
73  // excluding extra arguments).
74  int length() const {
75  STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
76  return Arguments::length();
77  }
78 
79 #ifdef DEBUG
80  void Verify() {
81  // Check we have at least the receiver.
82  ASSERT(Arguments::length() >= 1);
83  }
84 #endif
85 };
86 
87 
88 // Specialize BuiltinArguments for the called function extra argument.
89 
90 template <>
91 int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
92  return Arguments::length() - 1;
93 }
94 
95 #ifdef DEBUG
96 template <>
97 void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
98  // Check we have at least the receiver and the called function.
99  ASSERT(Arguments::length() >= 2);
100  // Make sure cast to JSFunction succeeds.
101  called_function();
102 }
103 #endif
104 
105 
106 #define DEF_ARG_TYPE(name, spec) \
107  typedef BuiltinArguments<spec> name##ArgumentsType;
109 #undef DEF_ARG_TYPE
110 
111 } // namespace
112 
113 // ----------------------------------------------------------------------------
114 // Support macro for defining builtins in C++.
115 // ----------------------------------------------------------------------------
116 //
117 // A builtin function is defined by writing:
118 //
119 // BUILTIN(name) {
120 // ...
121 // }
122 //
123 // In the body of the builtin function the arguments can be accessed
124 // through the BuiltinArguments object args.
125 
126 #ifdef DEBUG
127 
128 #define BUILTIN(name) \
129  MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
130  name##ArgumentsType args, Isolate* isolate); \
131  MUST_USE_RESULT static MaybeObject* Builtin_##name( \
132  name##ArgumentsType args, Isolate* isolate) { \
133  ASSERT(isolate == Isolate::Current()); \
134  args.Verify(); \
135  return Builtin_Impl_##name(args, isolate); \
136  } \
137  MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
138  name##ArgumentsType args, Isolate* isolate)
139 
140 #else // For release mode.
141 
142 #define BUILTIN(name) \
143  static MaybeObject* Builtin_##name(name##ArgumentsType args, Isolate* isolate)
144 
145 #endif
146 
147 
148 static inline bool CalledAsConstructor(Isolate* isolate) {
149 #ifdef DEBUG
150  // Calculate the result using a full stack frame iterator and check
151  // that the state of the stack is as we assume it to be in the
152  // code below.
153  StackFrameIterator it;
154  ASSERT(it.frame()->is_exit());
155  it.Advance();
156  StackFrame* frame = it.frame();
157  bool reference_result = frame->is_construct();
158 #endif
159  Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
160  // Because we know fp points to an exit frame we can use the relevant
161  // part of ExitFrame::ComputeCallerState directly.
162  const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
163  Address caller_fp = Memory::Address_at(fp + kCallerOffset);
164  // This inlines the part of StackFrame::ComputeType that grabs the
165  // type of the current frame. Note that StackFrame::ComputeType
166  // has been specialized for each architecture so if any one of them
167  // changes this code has to be changed as well.
168  const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
169  const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
170  Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
171  bool result = (marker == kConstructMarker);
172  ASSERT_EQ(result, reference_result);
173  return result;
174 }
175 
176 // ----------------------------------------------------------------------------
177 
178 BUILTIN(Illegal) {
179  UNREACHABLE();
180  return isolate->heap()->undefined_value(); // Make compiler happy.
181 }
182 
183 
184 BUILTIN(EmptyFunction) {
185  return isolate->heap()->undefined_value();
186 }
187 
188 
189 static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
190  Isolate* isolate,
191  JSFunction* constructor) {
192  Heap* heap = isolate->heap();
193  isolate->counters()->array_function_runtime()->Increment();
194 
195  JSArray* array;
196  if (CalledAsConstructor(isolate)) {
197  array = JSArray::cast((*args)[0]);
198  // Initialize elements and length in case later allocations fail so that the
199  // array object is initialized in a valid state.
200  array->set_length(Smi::FromInt(0));
201  array->set_elements(heap->empty_fixed_array());
202  if (!FLAG_smi_only_arrays) {
203  Context* native_context = isolate->context()->native_context();
204  if (array->GetElementsKind() == GetInitialFastElementsKind() &&
205  !native_context->js_array_maps()->IsUndefined()) {
206  FixedArray* map_array =
207  FixedArray::cast(native_context->js_array_maps());
208  array->set_map(Map::cast(map_array->
210  }
211  }
212  } else {
213  // Allocate the JS Array
214  MaybeObject* maybe_obj = heap->AllocateJSObject(constructor);
215  if (!maybe_obj->To(&array)) return maybe_obj;
216  }
217 
218  // Optimize the case where there is one argument and the argument is a
219  // small smi.
220  if (args->length() == 2) {
221  Object* obj = (*args)[1];
222  if (obj->IsSmi()) {
223  int len = Smi::cast(obj)->value();
224  if (len >= 0 && len < JSObject::kInitialMaxFastElementArray) {
225  Object* fixed_array;
226  { MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(len);
227  if (!maybe_obj->ToObject(&fixed_array)) return maybe_obj;
228  }
229  ElementsKind elements_kind = array->GetElementsKind();
230  if (!IsFastHoleyElementsKind(elements_kind)) {
231  elements_kind = GetHoleyElementsKind(elements_kind);
232  MaybeObject* maybe_array =
233  array->TransitionElementsKind(elements_kind);
234  if (maybe_array->IsFailure()) return maybe_array;
235  }
236  // We do not use SetContent to skip the unnecessary elements type check.
237  array->set_elements(FixedArray::cast(fixed_array));
238  array->set_length(Smi::cast(obj));
239  return array;
240  }
241  }
242  // Take the argument as the length.
243  { MaybeObject* maybe_obj = array->Initialize(0);
244  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
245  }
246  return array->SetElementsLength((*args)[1]);
247  }
248 
249  // Optimize the case where there are no parameters passed.
250  if (args->length() == 1) {
251  return array->Initialize(JSArray::kPreallocatedArrayElements);
252  }
253 
254  // Set length and elements on the array.
255  int number_of_elements = args->length() - 1;
256  MaybeObject* maybe_object =
257  array->EnsureCanContainElements(args, 1, number_of_elements,
259  if (maybe_object->IsFailure()) return maybe_object;
260 
261  // Allocate an appropriately typed elements array.
262  MaybeObject* maybe_elms;
263  ElementsKind elements_kind = array->GetElementsKind();
264  if (IsFastDoubleElementsKind(elements_kind)) {
265  maybe_elms = heap->AllocateUninitializedFixedDoubleArray(
266  number_of_elements);
267  } else {
268  maybe_elms = heap->AllocateFixedArrayWithHoles(number_of_elements);
269  }
270  FixedArrayBase* elms;
271  if (!maybe_elms->To<FixedArrayBase>(&elms)) return maybe_elms;
272 
273  // Fill in the content
274  switch (array->GetElementsKind()) {
276  case FAST_SMI_ELEMENTS: {
277  FixedArray* smi_elms = FixedArray::cast(elms);
278  for (int index = 0; index < number_of_elements; index++) {
279  smi_elms->set(index, (*args)[index+1], SKIP_WRITE_BARRIER);
280  }
281  break;
282  }
283  case FAST_HOLEY_ELEMENTS:
284  case FAST_ELEMENTS: {
285  AssertNoAllocation no_gc;
286  WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
287  FixedArray* object_elms = FixedArray::cast(elms);
288  for (int index = 0; index < number_of_elements; index++) {
289  object_elms->set(index, (*args)[index+1], mode);
290  }
291  break;
292  }
294  case FAST_DOUBLE_ELEMENTS: {
295  FixedDoubleArray* double_elms = FixedDoubleArray::cast(elms);
296  for (int index = 0; index < number_of_elements; index++) {
297  double_elms->set(index, (*args)[index+1]->Number());
298  }
299  break;
300  }
301  default:
302  UNREACHABLE();
303  break;
304  }
305 
306  array->set_elements(elms);
307  array->set_length(Smi::FromInt(number_of_elements));
308  return array;
309 }
310 
311 
312 BUILTIN(InternalArrayCodeGeneric) {
313  return ArrayCodeGenericCommon(
314  &args,
315  isolate,
316  isolate->context()->native_context()->internal_array_function());
317 }
318 
319 
320 BUILTIN(ArrayCodeGeneric) {
321  return ArrayCodeGenericCommon(
322  &args,
323  isolate,
324  isolate->context()->native_context()->array_function());
325 }
326 
327 
328 static void MoveElements(Heap* heap,
329  AssertNoAllocation* no_gc,
330  FixedArray* dst,
331  int dst_index,
332  FixedArray* src,
333  int src_index,
334  int len) {
335  if (len == 0) return;
336  ASSERT(dst->map() != HEAP->fixed_cow_array_map());
337  memmove(dst->data_start() + dst_index,
338  src->data_start() + src_index,
339  len * kPointerSize);
340  WriteBarrierMode mode = dst->GetWriteBarrierMode(*no_gc);
341  if (mode == UPDATE_WRITE_BARRIER) {
342  heap->RecordWrites(dst->address(), dst->OffsetOfElementAt(dst_index), len);
343  }
344  heap->incremental_marking()->RecordWrites(dst);
345 }
346 
347 
348 static void FillWithHoles(Heap* heap, FixedArray* dst, int from, int to) {
349  ASSERT(dst->map() != heap->fixed_cow_array_map());
350  MemsetPointer(dst->data_start() + from, heap->the_hole_value(), to - from);
351 }
352 
353 
354 static FixedArray* LeftTrimFixedArray(Heap* heap,
355  FixedArray* elms,
356  int to_trim) {
357  ASSERT(elms->map() != HEAP->fixed_cow_array_map());
358  // For now this trick is only applied to fixed arrays in new and paged space.
359  // In large object space the object's start must coincide with chunk
360  // and thus the trick is just not applicable.
361  ASSERT(!HEAP->lo_space()->Contains(elms));
362 
366 
367  Object** former_start = HeapObject::RawField(elms, 0);
368 
369  const int len = elms->length();
370 
371  if (to_trim > FixedArray::kHeaderSize / kPointerSize &&
372  !heap->new_space()->Contains(elms)) {
373  // If we are doing a big trim in old space then we zap the space that was
374  // formerly part of the array so that the GC (aided by the card-based
375  // remembered set) won't find pointers to new-space there.
376  Object** zap = reinterpret_cast<Object**>(elms->address());
377  zap++; // Header of filler must be at least one word so skip that.
378  for (int i = 1; i < to_trim; i++) {
379  *zap++ = Smi::FromInt(0);
380  }
381  }
382  // Technically in new space this write might be omitted (except for
383  // debug mode which iterates through the heap), but to play safer
384  // we still do it.
385  heap->CreateFillerObjectAt(elms->address(), to_trim * kPointerSize);
386 
387  former_start[to_trim] = heap->fixed_array_map();
388  former_start[to_trim + 1] = Smi::FromInt(len - to_trim);
389 
390  // Maintain marking consistency for HeapObjectIterator and
391  // IncrementalMarking.
392  int size_delta = to_trim * kPointerSize;
393  if (heap->marking()->TransferMark(elms->address(),
394  elms->address() + size_delta)) {
395  MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta);
396  }
397 
398  HEAP_PROFILE(heap, ObjectMoveEvent(elms->address(),
399  elms->address() + size_delta));
401  elms->address() + to_trim * kPointerSize));
402 }
403 
404 
405 static bool ArrayPrototypeHasNoElements(Heap* heap,
406  Context* native_context,
407  JSObject* array_proto) {
408  // This method depends on non writability of Object and Array prototype
409  // fields.
410  if (array_proto->elements() != heap->empty_fixed_array()) return false;
411  // Object.prototype
412  Object* proto = array_proto->GetPrototype();
413  if (proto == heap->null_value()) return false;
414  array_proto = JSObject::cast(proto);
415  if (array_proto != native_context->initial_object_prototype()) return false;
416  if (array_proto->elements() != heap->empty_fixed_array()) return false;
417  return array_proto->GetPrototype()->IsNull();
418 }
419 
420 
422 static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
423  Heap* heap, Object* receiver, Arguments* args, int first_added_arg) {
424  if (!receiver->IsJSArray()) return NULL;
425  JSArray* array = JSArray::cast(receiver);
426  HeapObject* elms = array->elements();
427  Map* map = elms->map();
428  if (map == heap->fixed_array_map()) {
429  if (args == NULL || array->HasFastObjectElements()) return elms;
430  if (array->HasFastDoubleElements()) {
431  ASSERT(elms == heap->empty_fixed_array());
432  MaybeObject* maybe_transition =
433  array->TransitionElementsKind(FAST_ELEMENTS);
434  if (maybe_transition->IsFailure()) return maybe_transition;
435  return elms;
436  }
437  } else if (map == heap->fixed_cow_array_map()) {
438  MaybeObject* maybe_writable_result = array->EnsureWritableFastElements();
439  if (args == NULL || array->HasFastObjectElements() ||
440  maybe_writable_result->IsFailure()) {
441  return maybe_writable_result;
442  }
443  } else {
444  return NULL;
445  }
446 
447  // Need to ensure that the arguments passed in args can be contained in
448  // the array.
449  int args_length = args->length();
450  if (first_added_arg >= args_length) return array->elements();
451 
452  MaybeObject* maybe_array = array->EnsureCanContainElements(
453  args,
454  first_added_arg,
455  args_length - first_added_arg,
457  if (maybe_array->IsFailure()) return maybe_array;
458  return array->elements();
459 }
460 
461 
462 static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
463  JSArray* receiver) {
464  if (!FLAG_clever_optimizations) return false;
465  Context* native_context = heap->isolate()->context()->native_context();
466  JSObject* array_proto =
467  JSObject::cast(native_context->array_function()->prototype());
468  return receiver->GetPrototype() == array_proto &&
469  ArrayPrototypeHasNoElements(heap, native_context, array_proto);
470 }
471 
472 
473 MUST_USE_RESULT static MaybeObject* CallJsBuiltin(
474  Isolate* isolate,
475  const char* name,
476  BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
477  HandleScope handleScope(isolate);
478 
479  Handle<Object> js_builtin =
480  GetProperty(Handle<JSObject>(isolate->native_context()->builtins()),
481  name);
482  Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
483  int argc = args.length() - 1;
484  ScopedVector<Handle<Object> > argv(argc);
485  for (int i = 0; i < argc; ++i) {
486  argv[i] = args.at<Object>(i + 1);
487  }
488  bool pending_exception;
489  Handle<Object> result = Execution::Call(function,
490  args.receiver(),
491  argc,
492  argv.start(),
493  &pending_exception);
494  if (pending_exception) return Failure::Exception();
495  return *result;
496 }
497 
498 
499 BUILTIN(ArrayPush) {
500  Heap* heap = isolate->heap();
501  Object* receiver = *args.receiver();
502  Object* elms_obj;
503  { MaybeObject* maybe_elms_obj =
504  EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 1);
505  if (maybe_elms_obj == NULL) {
506  return CallJsBuiltin(isolate, "ArrayPush", args);
507  }
508  if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
509  }
510  FixedArray* elms = FixedArray::cast(elms_obj);
511  JSArray* array = JSArray::cast(receiver);
512 
513  int len = Smi::cast(array->length())->value();
514  int to_add = args.length() - 1;
515  if (to_add == 0) {
516  return Smi::FromInt(len);
517  }
518  // Currently fixed arrays cannot grow too big, so
519  // we should never hit this case.
520  ASSERT(to_add <= (Smi::kMaxValue - len));
521 
522  int new_length = len + to_add;
523 
524  if (new_length > elms->length()) {
525  // New backing storage is needed.
526  int capacity = new_length + (new_length >> 1) + 16;
527  Object* obj;
528  { MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
529  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
530  }
531  FixedArray* new_elms = FixedArray::cast(obj);
532 
533  ElementsKind kind = array->GetElementsKind();
534  CopyObjectToObjectElements(elms, kind, 0, new_elms, kind, 0, len);
535  FillWithHoles(heap, new_elms, new_length, capacity);
536 
537  elms = new_elms;
538  }
539 
540  // Add the provided values.
541  AssertNoAllocation no_gc;
542  WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
543  for (int index = 0; index < to_add; index++) {
544  elms->set(index + len, args[index + 1], mode);
545  }
546 
547  if (elms != array->elements()) {
548  array->set_elements(elms);
549  }
550 
551  // Set the length.
552  array->set_length(Smi::FromInt(new_length));
553  return Smi::FromInt(new_length);
554 }
555 
556 
557 BUILTIN(ArrayPop) {
558  Heap* heap = isolate->heap();
559  Object* receiver = *args.receiver();
560  Object* elms_obj;
561  { MaybeObject* maybe_elms_obj =
562  EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
563  if (maybe_elms_obj == NULL) return CallJsBuiltin(isolate, "ArrayPop", args);
564  if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
565  }
566  FixedArray* elms = FixedArray::cast(elms_obj);
567  JSArray* array = JSArray::cast(receiver);
568 
569  int len = Smi::cast(array->length())->value();
570  if (len == 0) return heap->undefined_value();
571 
572  // Get top element
573  MaybeObject* top = elms->get(len - 1);
574 
575  // Set the length.
576  array->set_length(Smi::FromInt(len - 1));
577 
578  if (!top->IsTheHole()) {
579  // Delete the top element.
580  elms->set_the_hole(len - 1);
581  return top;
582  }
583 
584  top = array->GetPrototype()->GetElement(len - 1);
585 
586  return top;
587 }
588 
589 
590 BUILTIN(ArrayShift) {
591  Heap* heap = isolate->heap();
592  Object* receiver = *args.receiver();
593  Object* elms_obj;
594  { MaybeObject* maybe_elms_obj =
595  EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
596  if (maybe_elms_obj == NULL)
597  return CallJsBuiltin(isolate, "ArrayShift", args);
598  if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
599  }
600  if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
601  return CallJsBuiltin(isolate, "ArrayShift", args);
602  }
603  FixedArray* elms = FixedArray::cast(elms_obj);
604  JSArray* array = JSArray::cast(receiver);
606 
607  int len = Smi::cast(array->length())->value();
608  if (len == 0) return heap->undefined_value();
609 
610  // Get first element
611  Object* first = elms->get(0);
612  if (first->IsTheHole()) {
613  first = heap->undefined_value();
614  }
615 
616  if (!heap->lo_space()->Contains(elms)) {
617  array->set_elements(LeftTrimFixedArray(heap, elms, 1));
618  } else {
619  // Shift the elements.
620  AssertNoAllocation no_gc;
621  MoveElements(heap, &no_gc, elms, 0, elms, 1, len - 1);
622  elms->set(len - 1, heap->the_hole_value());
623  }
624 
625  // Set the length.
626  array->set_length(Smi::FromInt(len - 1));
627 
628  return first;
629 }
630 
631 
632 BUILTIN(ArrayUnshift) {
633  Heap* heap = isolate->heap();
634  Object* receiver = *args.receiver();
635  Object* elms_obj;
636  { MaybeObject* maybe_elms_obj =
637  EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
638  if (maybe_elms_obj == NULL)
639  return CallJsBuiltin(isolate, "ArrayUnshift", args);
640  if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
641  }
642  if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
643  return CallJsBuiltin(isolate, "ArrayUnshift", args);
644  }
645  FixedArray* elms = FixedArray::cast(elms_obj);
646  JSArray* array = JSArray::cast(receiver);
648 
649  int len = Smi::cast(array->length())->value();
650  int to_add = args.length() - 1;
651  int new_length = len + to_add;
652  // Currently fixed arrays cannot grow too big, so
653  // we should never hit this case.
654  ASSERT(to_add <= (Smi::kMaxValue - len));
655 
656  MaybeObject* maybe_object =
657  array->EnsureCanContainElements(&args, 1, to_add,
659  if (maybe_object->IsFailure()) return maybe_object;
660 
661  if (new_length > elms->length()) {
662  // New backing storage is needed.
663  int capacity = new_length + (new_length >> 1) + 16;
664  Object* obj;
665  { MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
666  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
667  }
668  FixedArray* new_elms = FixedArray::cast(obj);
669  ElementsKind kind = array->GetElementsKind();
670  CopyObjectToObjectElements(elms, kind, 0, new_elms, kind, to_add, len);
671  FillWithHoles(heap, new_elms, new_length, capacity);
672  elms = new_elms;
673  array->set_elements(elms);
674  } else {
675  AssertNoAllocation no_gc;
676  MoveElements(heap, &no_gc, elms, to_add, elms, 0, len);
677  }
678 
679  // Add the provided values.
680  AssertNoAllocation no_gc;
681  WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
682  for (int i = 0; i < to_add; i++) {
683  elms->set(i, args[i + 1], mode);
684  }
685 
686  // Set the length.
687  array->set_length(Smi::FromInt(new_length));
688  return Smi::FromInt(new_length);
689 }
690 
691 
692 BUILTIN(ArraySlice) {
693  Heap* heap = isolate->heap();
694  Object* receiver = *args.receiver();
695  FixedArray* elms;
696  int len = -1;
697  if (receiver->IsJSArray()) {
698  JSArray* array = JSArray::cast(receiver);
699  if (!array->HasFastSmiOrObjectElements() ||
700  !IsJSArrayFastElementMovingAllowed(heap, array)) {
701  return CallJsBuiltin(isolate, "ArraySlice", args);
702  }
703 
704  elms = FixedArray::cast(array->elements());
705  len = Smi::cast(array->length())->value();
706  } else {
707  // Array.slice(arguments, ...) is quite a common idiom (notably more
708  // than 50% of invocations in Web apps). Treat it in C++ as well.
709  Map* arguments_map =
710  isolate->context()->native_context()->arguments_boilerplate()->map();
711 
712  bool is_arguments_object_with_fast_elements =
713  receiver->IsJSObject()
714  && JSObject::cast(receiver)->map() == arguments_map
716  if (!is_arguments_object_with_fast_elements) {
717  return CallJsBuiltin(isolate, "ArraySlice", args);
718  }
719  elms = FixedArray::cast(JSObject::cast(receiver)->elements());
720  Object* len_obj = JSObject::cast(receiver)
722  if (!len_obj->IsSmi()) {
723  return CallJsBuiltin(isolate, "ArraySlice", args);
724  }
725  len = Smi::cast(len_obj)->value();
726  if (len > elms->length()) {
727  return CallJsBuiltin(isolate, "ArraySlice", args);
728  }
729  for (int i = 0; i < len; i++) {
730  if (elms->get(i) == heap->the_hole_value()) {
731  return CallJsBuiltin(isolate, "ArraySlice", args);
732  }
733  }
734  }
735  ASSERT(len >= 0);
736  int n_arguments = args.length() - 1;
737 
738  // Note carefully choosen defaults---if argument is missing,
739  // it's undefined which gets converted to 0 for relative_start
740  // and to len for relative_end.
741  int relative_start = 0;
742  int relative_end = len;
743  if (n_arguments > 0) {
744  Object* arg1 = args[1];
745  if (arg1->IsSmi()) {
746  relative_start = Smi::cast(arg1)->value();
747  } else if (!arg1->IsUndefined()) {
748  return CallJsBuiltin(isolate, "ArraySlice", args);
749  }
750  if (n_arguments > 1) {
751  Object* arg2 = args[2];
752  if (arg2->IsSmi()) {
753  relative_end = Smi::cast(arg2)->value();
754  } else if (!arg2->IsUndefined()) {
755  return CallJsBuiltin(isolate, "ArraySlice", args);
756  }
757  }
758  }
759 
760  // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
761  int k = (relative_start < 0) ? Max(len + relative_start, 0)
762  : Min(relative_start, len);
763 
764  // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
765  int final = (relative_end < 0) ? Max(len + relative_end, 0)
766  : Min(relative_end, len);
767 
768  ElementsKind elements_kind = JSObject::cast(receiver)->GetElementsKind();
769 
770  // Calculate the length of result array.
771  int result_len = Max(final - k, 0);
772 
773  MaybeObject* maybe_array =
774  heap->AllocateJSArrayAndStorage(elements_kind,
775  result_len,
776  result_len);
777  JSArray* result_array;
778  if (!maybe_array->To(&result_array)) return maybe_array;
779 
780  CopyObjectToObjectElements(elms, elements_kind, k,
781  FixedArray::cast(result_array->elements()),
782  elements_kind, 0, result_len);
783 
784  return result_array;
785 }
786 
787 
788 BUILTIN(ArraySplice) {
789  Heap* heap = isolate->heap();
790  Object* receiver = *args.receiver();
791  Object* elms_obj;
792  { MaybeObject* maybe_elms_obj =
793  EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 3);
794  if (maybe_elms_obj == NULL)
795  return CallJsBuiltin(isolate, "ArraySplice", args);
796  if (!maybe_elms_obj->ToObject(&elms_obj)) return maybe_elms_obj;
797  }
798  if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
799  return CallJsBuiltin(isolate, "ArraySplice", args);
800  }
801  FixedArray* elms = FixedArray::cast(elms_obj);
802  JSArray* array = JSArray::cast(receiver);
804 
805  int len = Smi::cast(array->length())->value();
806 
807  int n_arguments = args.length() - 1;
808 
809  int relative_start = 0;
810  if (n_arguments > 0) {
811  Object* arg1 = args[1];
812  if (arg1->IsSmi()) {
813  relative_start = Smi::cast(arg1)->value();
814  } else if (!arg1->IsUndefined()) {
815  return CallJsBuiltin(isolate, "ArraySplice", args);
816  }
817  }
818  int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
819  : Min(relative_start, len);
820 
821  // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
822  // given as a request to delete all the elements from the start.
823  // And it differs from the case of undefined delete count.
824  // This does not follow ECMA-262, but we do the same for
825  // compatibility.
826  int actual_delete_count;
827  if (n_arguments == 1) {
828  ASSERT(len - actual_start >= 0);
829  actual_delete_count = len - actual_start;
830  } else {
831  int value = 0; // ToInteger(undefined) == 0
832  if (n_arguments > 1) {
833  Object* arg2 = args[2];
834  if (arg2->IsSmi()) {
835  value = Smi::cast(arg2)->value();
836  } else {
837  return CallJsBuiltin(isolate, "ArraySplice", args);
838  }
839  }
840  actual_delete_count = Min(Max(value, 0), len - actual_start);
841  }
842 
843  JSArray* result_array = NULL;
844  ElementsKind elements_kind =
845  JSObject::cast(receiver)->GetElementsKind();
846  MaybeObject* maybe_array =
847  heap->AllocateJSArrayAndStorage(elements_kind,
848  actual_delete_count,
849  actual_delete_count);
850  if (!maybe_array->To(&result_array)) return maybe_array;
851 
852  {
853  // Fill newly created array.
854  CopyObjectToObjectElements(elms, elements_kind, actual_start,
855  FixedArray::cast(result_array->elements()),
856  elements_kind, 0, actual_delete_count);
857  }
858 
859  int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
860  int new_length = len - actual_delete_count + item_count;
861 
862  bool elms_changed = false;
863  if (item_count < actual_delete_count) {
864  // Shrink the array.
865  const bool trim_array = !heap->lo_space()->Contains(elms) &&
866  ((actual_start + item_count) <
867  (len - actual_delete_count - actual_start));
868  if (trim_array) {
869  const int delta = actual_delete_count - item_count;
870 
871  {
872  AssertNoAllocation no_gc;
873  MoveElements(heap, &no_gc, elms, delta, elms, 0, actual_start);
874  }
875 
876  elms = LeftTrimFixedArray(heap, elms, delta);
877 
878  elms_changed = true;
879  } else {
880  AssertNoAllocation no_gc;
881  MoveElements(heap, &no_gc,
882  elms, actual_start + item_count,
883  elms, actual_start + actual_delete_count,
884  (len - actual_delete_count - actual_start));
885  FillWithHoles(heap, elms, new_length, len);
886  }
887  } else if (item_count > actual_delete_count) {
888  // Currently fixed arrays cannot grow too big, so
889  // we should never hit this case.
890  ASSERT((item_count - actual_delete_count) <= (Smi::kMaxValue - len));
891 
892  // Check if array need to grow.
893  if (new_length > elms->length()) {
894  // New backing storage is needed.
895  int capacity = new_length + (new_length >> 1) + 16;
896  Object* obj;
897  { MaybeObject* maybe_obj =
898  heap->AllocateUninitializedFixedArray(capacity);
899  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
900  }
901  FixedArray* new_elms = FixedArray::cast(obj);
902 
903  {
904  // Copy the part before actual_start as is.
905  ElementsKind kind = array->GetElementsKind();
906  CopyObjectToObjectElements(elms, kind, 0,
907  new_elms, kind, 0, actual_start);
908  const int to_copy = len - actual_delete_count - actual_start;
909  CopyObjectToObjectElements(elms, kind,
910  actual_start + actual_delete_count,
911  new_elms, kind,
912  actual_start + item_count, to_copy);
913  }
914 
915  FillWithHoles(heap, new_elms, new_length, capacity);
916 
917  elms = new_elms;
918  elms_changed = true;
919  } else {
920  AssertNoAllocation no_gc;
921  MoveElements(heap, &no_gc,
922  elms, actual_start + item_count,
923  elms, actual_start + actual_delete_count,
924  (len - actual_delete_count - actual_start));
925  }
926  }
927 
928  AssertNoAllocation no_gc;
929  WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
930  for (int k = actual_start; k < actual_start + item_count; k++) {
931  elms->set(k, args[3 + k - actual_start], mode);
932  }
933 
934  if (elms_changed) {
935  array->set_elements(elms);
936  }
937 
938  // Set the length.
939  array->set_length(Smi::FromInt(new_length));
940 
941  return result_array;
942 }
943 
944 
945 BUILTIN(ArrayConcat) {
946  Heap* heap = isolate->heap();
947  Context* native_context = isolate->context()->native_context();
948  JSObject* array_proto =
949  JSObject::cast(native_context->array_function()->prototype());
950  if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) {
951  return CallJsBuiltin(isolate, "ArrayConcat", args);
952  }
953 
954  // Iterate through all the arguments performing checks
955  // and calculating total length.
956  int n_arguments = args.length();
957  int result_len = 0;
958  ElementsKind elements_kind = GetInitialFastElementsKind();
959  for (int i = 0; i < n_arguments; i++) {
960  Object* arg = args[i];
961  if (!arg->IsJSArray() ||
963  JSArray::cast(arg)->GetPrototype() != array_proto) {
964  return CallJsBuiltin(isolate, "ArrayConcat", args);
965  }
966 
967  int len = Smi::cast(JSArray::cast(arg)->length())->value();
968 
969  // We shouldn't overflow when adding another len.
970  const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
971  STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
972  USE(kHalfOfMaxInt);
973  result_len += len;
974  ASSERT(result_len >= 0);
975 
976  if (result_len > FixedArray::kMaxLength) {
977  return CallJsBuiltin(isolate, "ArrayConcat", args);
978  }
979 
980  if (!JSArray::cast(arg)->HasFastSmiElements()) {
981  if (IsFastSmiElementsKind(elements_kind)) {
982  if (IsFastHoleyElementsKind(elements_kind)) {
983  elements_kind = FAST_HOLEY_ELEMENTS;
984  } else {
985  elements_kind = FAST_ELEMENTS;
986  }
987  }
988  }
989 
990  if (JSArray::cast(arg)->HasFastHoleyElements()) {
991  elements_kind = GetHoleyElementsKind(elements_kind);
992  }
993  }
994 
995  // Allocate result.
996  JSArray* result_array;
997  MaybeObject* maybe_array =
998  heap->AllocateJSArrayAndStorage(elements_kind,
999  result_len,
1000  result_len);
1001  if (!maybe_array->To(&result_array)) return maybe_array;
1002  if (result_len == 0) return result_array;
1003 
1004  // Copy data.
1005  int start_pos = 0;
1006  FixedArray* result_elms(FixedArray::cast(result_array->elements()));
1007  for (int i = 0; i < n_arguments; i++) {
1008  JSArray* array = JSArray::cast(args[i]);
1009  int len = Smi::cast(array->length())->value();
1010  FixedArray* elms = FixedArray::cast(array->elements());
1011  CopyObjectToObjectElements(elms, elements_kind, 0,
1012  result_elms, elements_kind,
1013  start_pos, len);
1014  start_pos += len;
1015  }
1016  ASSERT(start_pos == result_len);
1017 
1018  return result_array;
1019 }
1020 
1021 
1022 // -----------------------------------------------------------------------------
1023 // Strict mode poison pills
1024 
1025 
1026 BUILTIN(StrictModePoisonPill) {
1027  HandleScope scope;
1028  return isolate->Throw(*isolate->factory()->NewTypeError(
1029  "strict_poison_pill", HandleVector<Object>(NULL, 0)));
1030 }
1031 
1032 // -----------------------------------------------------------------------------
1033 //
1034 
1035 
1036 // Returns the holder JSObject if the function can legally be called
1037 // with this receiver. Returns Heap::null_value() if the call is
1038 // illegal. Any arguments that don't fit the expected type is
1039 // overwritten with undefined. Arguments that do fit the expected
1040 // type is overwritten with the object in the prototype chain that
1041 // actually has that type.
1042 static inline Object* TypeCheck(Heap* heap,
1043  int argc,
1044  Object** argv,
1045  FunctionTemplateInfo* info) {
1046  Object* recv = argv[0];
1047  // API calls are only supported with JSObject receivers.
1048  if (!recv->IsJSObject()) return heap->null_value();
1049  Object* sig_obj = info->signature();
1050  if (sig_obj->IsUndefined()) return recv;
1051  SignatureInfo* sig = SignatureInfo::cast(sig_obj);
1052  // If necessary, check the receiver
1053  Object* recv_type = sig->receiver();
1054 
1055  Object* holder = recv;
1056  if (!recv_type->IsUndefined()) {
1057  for (; holder != heap->null_value(); holder = holder->GetPrototype()) {
1058  if (holder->IsInstanceOf(FunctionTemplateInfo::cast(recv_type))) {
1059  break;
1060  }
1061  }
1062  if (holder == heap->null_value()) return holder;
1063  }
1064  Object* args_obj = sig->args();
1065  // If there is no argument signature we're done
1066  if (args_obj->IsUndefined()) return holder;
1067  FixedArray* args = FixedArray::cast(args_obj);
1068  int length = args->length();
1069  if (argc <= length) length = argc - 1;
1070  for (int i = 0; i < length; i++) {
1071  Object* argtype = args->get(i);
1072  if (argtype->IsUndefined()) continue;
1073  Object** arg = &argv[-1 - i];
1074  Object* current = *arg;
1075  for (; current != heap->null_value(); current = current->GetPrototype()) {
1076  if (current->IsInstanceOf(FunctionTemplateInfo::cast(argtype))) {
1077  *arg = current;
1078  break;
1079  }
1080  }
1081  if (current == heap->null_value()) *arg = heap->undefined_value();
1082  }
1083  return holder;
1084 }
1085 
1086 
1087 template <bool is_construct>
1088 MUST_USE_RESULT static MaybeObject* HandleApiCallHelper(
1089  BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) {
1090  ASSERT(is_construct == CalledAsConstructor(isolate));
1091  Heap* heap = isolate->heap();
1092 
1093  HandleScope scope(isolate);
1094  Handle<JSFunction> function = args.called_function();
1095  ASSERT(function->shared()->IsApiFunction());
1096 
1097  FunctionTemplateInfo* fun_data = function->shared()->get_api_func_data();
1098  if (is_construct) {
1099  Handle<FunctionTemplateInfo> desc(fun_data, isolate);
1100  bool pending_exception = false;
1101  isolate->factory()->ConfigureInstance(
1102  desc, Handle<JSObject>::cast(args.receiver()), &pending_exception);
1103  ASSERT(isolate->has_pending_exception() == pending_exception);
1104  if (pending_exception) return Failure::Exception();
1105  fun_data = *desc;
1106  }
1107 
1108  Object* raw_holder = TypeCheck(heap, args.length(), &args[0], fun_data);
1109 
1110  if (raw_holder->IsNull()) {
1111  // This function cannot be called with the given receiver. Abort!
1112  Handle<Object> obj =
1113  isolate->factory()->NewTypeError(
1114  "illegal_invocation", HandleVector(&function, 1));
1115  return isolate->Throw(*obj);
1116  }
1117 
1118  Object* raw_call_data = fun_data->call_code();
1119  if (!raw_call_data->IsUndefined()) {
1120  CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
1121  Object* callback_obj = call_data->callback();
1122  v8::InvocationCallback callback =
1123  v8::ToCData<v8::InvocationCallback>(callback_obj);
1124  Object* data_obj = call_data->data();
1125  Object* result;
1126 
1127  LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
1128  ASSERT(raw_holder->IsJSObject());
1129 
1130  CustomArguments custom(isolate);
1132  isolate, data_obj, *function, raw_holder);
1133 
1135  custom.end(),
1136  &args[0] - 1,
1137  args.length() - 1,
1138  is_construct);
1139 
1140  v8::Handle<v8::Value> value;
1141  {
1142  // Leaving JavaScript.
1143  VMState state(isolate, EXTERNAL);
1144  ExternalCallbackScope call_scope(isolate,
1145  v8::ToCData<Address>(callback_obj));
1146  value = callback(new_args);
1147  }
1148  if (value.IsEmpty()) {
1149  result = heap->undefined_value();
1150  } else {
1151  result = *reinterpret_cast<Object**>(*value);
1152  result->VerifyApiCallResultType();
1153  }
1154 
1156  if (!is_construct || result->IsJSObject()) return result;
1157  }
1158 
1159  return *args.receiver();
1160 }
1161 
1162 
1163 BUILTIN(HandleApiCall) {
1164  return HandleApiCallHelper<false>(args, isolate);
1165 }
1166 
1167 
1168 BUILTIN(HandleApiCallConstruct) {
1169  return HandleApiCallHelper<true>(args, isolate);
1170 }
1171 
1172 
1173 // Helper function to handle calls to non-function objects created through the
1174 // API. The object can be called as either a constructor (using new) or just as
1175 // a function (without new).
1176 MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
1177  Isolate* isolate,
1178  bool is_construct_call,
1179  BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
1180  // Non-functions are never called as constructors. Even if this is an object
1181  // called as a constructor the delegate call is not a construct call.
1182  ASSERT(!CalledAsConstructor(isolate));
1183  Heap* heap = isolate->heap();
1184 
1185  Handle<Object> receiver = args.receiver();
1186 
1187  // Get the object called.
1188  JSObject* obj = JSObject::cast(*receiver);
1189 
1190  // Get the invocation callback from the function descriptor that was
1191  // used to create the called object.
1192  ASSERT(obj->map()->has_instance_call_handler());
1193  JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
1194  ASSERT(constructor->shared()->IsApiFunction());
1195  Object* handler =
1196  constructor->shared()->get_api_func_data()->instance_call_handler();
1197  ASSERT(!handler->IsUndefined());
1198  CallHandlerInfo* call_data = CallHandlerInfo::cast(handler);
1199  Object* callback_obj = call_data->callback();
1200  v8::InvocationCallback callback =
1201  v8::ToCData<v8::InvocationCallback>(callback_obj);
1202 
1203  // Get the data for the call and perform the callback.
1204  Object* result;
1205  {
1206  HandleScope scope(isolate);
1207  LOG(isolate, ApiObjectAccess("call non-function", obj));
1208 
1209  CustomArguments custom(isolate);
1211  isolate, call_data->data(), constructor, obj);
1213  custom.end(),
1214  &args[0] - 1,
1215  args.length() - 1,
1216  is_construct_call);
1217  v8::Handle<v8::Value> value;
1218  {
1219  // Leaving JavaScript.
1220  VMState state(isolate, EXTERNAL);
1221  ExternalCallbackScope call_scope(isolate,
1222  v8::ToCData<Address>(callback_obj));
1223  value = callback(new_args);
1224  }
1225  if (value.IsEmpty()) {
1226  result = heap->undefined_value();
1227  } else {
1228  result = *reinterpret_cast<Object**>(*value);
1229  result->VerifyApiCallResultType();
1230  }
1231  }
1232  // Check for exceptions and return result.
1234  return result;
1235 }
1236 
1237 
1238 // Handle calls to non-function objects created through the API. This delegate
1239 // function is used when the call is a normal function call.
1240 BUILTIN(HandleApiCallAsFunction) {
1241  return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
1242 }
1243 
1244 
1245 // Handle calls to non-function objects created through the API. This delegate
1246 // function is used when the call is a construct call.
1247 BUILTIN(HandleApiCallAsConstructor) {
1248  return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
1249 }
1250 
1251 
1252 static void Generate_LoadIC_ArrayLength(MacroAssembler* masm) {
1254 }
1255 
1256 
1257 static void Generate_LoadIC_StringLength(MacroAssembler* masm) {
1258  LoadIC::GenerateStringLength(masm, false);
1259 }
1260 
1261 
1262 static void Generate_LoadIC_StringWrapperLength(MacroAssembler* masm) {
1263  LoadIC::GenerateStringLength(masm, true);
1264 }
1265 
1266 
1267 static void Generate_LoadIC_FunctionPrototype(MacroAssembler* masm) {
1269 }
1270 
1271 
1272 static void Generate_LoadIC_Initialize(MacroAssembler* masm) {
1274 }
1275 
1276 
1277 static void Generate_LoadIC_PreMonomorphic(MacroAssembler* masm) {
1279 }
1280 
1281 
1282 static void Generate_LoadIC_Miss(MacroAssembler* masm) {
1283  LoadIC::GenerateMiss(masm);
1284 }
1285 
1286 
1287 static void Generate_LoadIC_Megamorphic(MacroAssembler* masm) {
1289 }
1290 
1291 
1292 static void Generate_LoadIC_Normal(MacroAssembler* masm) {
1293  LoadIC::GenerateNormal(masm);
1294 }
1295 
1296 
1297 static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
1298  LoadStubCompiler::GenerateLoadViaGetter(masm, Handle<JSFunction>());
1299 }
1300 
1301 
1302 static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
1304 }
1305 
1306 
1307 static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
1309 }
1310 
1311 
1312 static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
1313  KeyedLoadIC::GenerateMiss(masm, false);
1314 }
1315 
1316 
1317 static void Generate_KeyedLoadIC_MissForceGeneric(MacroAssembler* masm) {
1318  KeyedLoadIC::GenerateMiss(masm, true);
1319 }
1320 
1321 
1322 static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) {
1324 }
1325 
1326 
1327 static void Generate_KeyedLoadIC_String(MacroAssembler* masm) {
1329 }
1330 
1331 
1332 static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
1334 }
1335 
1336 static void Generate_KeyedLoadIC_IndexedInterceptor(MacroAssembler* masm) {
1338 }
1339 
1340 static void Generate_KeyedLoadIC_NonStrictArguments(MacroAssembler* masm) {
1342 }
1343 
1344 static void Generate_StoreIC_Initialize(MacroAssembler* masm) {
1346 }
1347 
1348 
1349 static void Generate_StoreIC_Initialize_Strict(MacroAssembler* masm) {
1351 }
1352 
1353 
1354 static void Generate_StoreIC_Miss(MacroAssembler* masm) {
1355  StoreIC::GenerateMiss(masm);
1356 }
1357 
1358 
1359 static void Generate_StoreIC_Normal(MacroAssembler* masm) {
1361 }
1362 
1363 
1364 static void Generate_StoreIC_Normal_Strict(MacroAssembler* masm) {
1366 }
1367 
1368 
1369 static void Generate_StoreIC_Megamorphic(MacroAssembler* masm) {
1371 }
1372 
1373 
1374 static void Generate_StoreIC_Megamorphic_Strict(MacroAssembler* masm) {
1376 }
1377 
1378 
1379 static void Generate_StoreIC_ArrayLength(MacroAssembler* masm) {
1381 }
1382 
1383 
1384 static void Generate_StoreIC_ArrayLength_Strict(MacroAssembler* masm) {
1386 }
1387 
1388 
1389 static void Generate_StoreIC_GlobalProxy(MacroAssembler* masm) {
1391 }
1392 
1393 
1394 static void Generate_StoreIC_GlobalProxy_Strict(MacroAssembler* masm) {
1396 }
1397 
1398 
1399 static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
1400  StoreStubCompiler::GenerateStoreViaSetter(masm, Handle<JSFunction>());
1401 }
1402 
1403 
1404 static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
1406 }
1407 
1408 
1409 static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
1411 }
1412 
1413 
1414 static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
1415  KeyedStoreIC::GenerateMiss(masm, false);
1416 }
1417 
1418 
1419 static void Generate_KeyedStoreIC_MissForceGeneric(MacroAssembler* masm) {
1420  KeyedStoreIC::GenerateMiss(masm, true);
1421 }
1422 
1423 
1424 static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
1426 }
1427 
1428 
1429 static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
1431 }
1432 
1433 
1434 static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
1436 }
1437 
1438 static void Generate_KeyedStoreIC_NonStrictArguments(MacroAssembler* masm) {
1440 }
1441 
1442 static void Generate_TransitionElementsSmiToDouble(MacroAssembler* masm) {
1444 }
1445 
1446 static void Generate_TransitionElementsDoubleToObject(MacroAssembler* masm) {
1448 }
1449 
1450 #ifdef ENABLE_DEBUGGER_SUPPORT
1451 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
1452  Debug::GenerateLoadICDebugBreak(masm);
1453 }
1454 
1455 
1456 static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
1457  Debug::GenerateStoreICDebugBreak(masm);
1458 }
1459 
1460 
1461 static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
1462  Debug::GenerateKeyedLoadICDebugBreak(masm);
1463 }
1464 
1465 
1466 static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
1467  Debug::GenerateKeyedStoreICDebugBreak(masm);
1468 }
1469 
1470 
1471 static void Generate_Return_DebugBreak(MacroAssembler* masm) {
1472  Debug::GenerateReturnDebugBreak(masm);
1473 }
1474 
1475 
1476 static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
1477  Debug::GenerateCallFunctionStubDebugBreak(masm);
1478 }
1479 
1480 
1481 static void Generate_CallFunctionStub_Recording_DebugBreak(
1482  MacroAssembler* masm) {
1483  Debug::GenerateCallFunctionStubRecordDebugBreak(masm);
1484 }
1485 
1486 
1487 static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
1488  Debug::GenerateCallConstructStubDebugBreak(masm);
1489 }
1490 
1491 
1492 static void Generate_CallConstructStub_Recording_DebugBreak(
1493  MacroAssembler* masm) {
1494  Debug::GenerateCallConstructStubRecordDebugBreak(masm);
1495 }
1496 
1497 
1498 static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
1499  Debug::GenerateSlotDebugBreak(masm);
1500 }
1501 
1502 
1503 static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
1504  Debug::GeneratePlainReturnLiveEdit(masm);
1505 }
1506 
1507 
1508 static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
1509  Debug::GenerateFrameDropperLiveEdit(masm);
1510 }
1511 #endif
1512 
1513 
1514 Builtins::Builtins() : initialized_(false) {
1515  memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
1516  memset(names_, 0, sizeof(names_[0]) * builtin_count);
1517 }
1518 
1519 
1520 Builtins::~Builtins() {
1521 }
1522 
1523 
1524 #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
1525 Address const Builtins::c_functions_[cfunction_count] = {
1527 };
1528 #undef DEF_ENUM_C
1529 
1530 #define DEF_JS_NAME(name, ignore) #name,
1531 #define DEF_JS_ARGC(ignore, argc) argc,
1532 const char* const Builtins::javascript_names_[id_count] = {
1534 };
1535 
1536 int const Builtins::javascript_argc_[id_count] = {
1538 };
1539 #undef DEF_JS_NAME
1540 #undef DEF_JS_ARGC
1541 
1542 struct BuiltinDesc {
1545  const char* s_name; // name is only used for generating log information.
1546  int name;
1549 };
1550 
1551 #define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
1552 
1554  public:
1556  CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
1557  return functions_;
1558  }
1559 
1561  BuiltinDesc functions_[Builtins::builtin_count + 1];
1562 
1563  friend class Builtins;
1564 };
1565 
1566 static BuiltinFunctionTable builtin_function_table =
1568 
1569 // Define array of pointers to generators and C builtin functions.
1570 // We do this in a sort of roundabout way so that we can do the initialization
1571 // within the lexical scope of Builtins:: and within a context where
1572 // Code::Flags names a non-abstract type.
1573 void Builtins::InitBuiltinFunctionTable() {
1574  BuiltinDesc* functions = builtin_function_table.functions_;
1575  functions[builtin_count].generator = NULL;
1576  functions[builtin_count].c_code = NULL;
1577  functions[builtin_count].s_name = NULL;
1578  functions[builtin_count].name = builtin_count;
1579  functions[builtin_count].flags = static_cast<Code::Flags>(0);
1580  functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;
1581 
1582 #define DEF_FUNCTION_PTR_C(aname, aextra_args) \
1583  functions->generator = FUNCTION_ADDR(Generate_Adaptor); \
1584  functions->c_code = FUNCTION_ADDR(Builtin_##aname); \
1585  functions->s_name = #aname; \
1586  functions->name = c_##aname; \
1587  functions->flags = Code::ComputeFlags(Code::BUILTIN); \
1588  functions->extra_args = aextra_args; \
1589  ++functions;
1590 
1591 #define DEF_FUNCTION_PTR_A(aname, kind, state, extra) \
1592  functions->generator = FUNCTION_ADDR(Generate_##aname); \
1593  functions->c_code = NULL; \
1594  functions->s_name = #aname; \
1595  functions->name = k##aname; \
1596  functions->flags = Code::ComputeFlags(Code::kind, \
1597  state, \
1598  extra); \
1599  functions->extra_args = NO_EXTRA_ARGUMENTS; \
1600  ++functions;
1601 
1605 
1606 #undef DEF_FUNCTION_PTR_C
1607 #undef DEF_FUNCTION_PTR_A
1608 }
1609 
1610 void Builtins::SetUp(bool create_heap_objects) {
1611  ASSERT(!initialized_);
1612  Isolate* isolate = Isolate::Current();
1613  Heap* heap = isolate->heap();
1614 
1615  // Create a scope for the handles in the builtins.
1616  HandleScope scope(isolate);
1617 
1618  const BuiltinDesc* functions = builtin_function_table.functions();
1619 
1620  // For now we generate builtin adaptor code into a stack-allocated
1621  // buffer, before copying it into individual code objects. Be careful
1622  // with alignment, some platforms don't like unaligned code.
1623  union { int force_alignment; byte buffer[8*KB]; } u;
1624 
1625  // Traverse the list of builtins and generate an adaptor in a
1626  // separate code object for each one.
1627  for (int i = 0; i < builtin_count; i++) {
1628  if (create_heap_objects) {
1629  MacroAssembler masm(isolate, u.buffer, sizeof u.buffer);
1630  // Generate the code/adaptor.
1631  typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
1632  Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
1633  // We pass all arguments to the generator, but it may not use all of
1634  // them. This works because the first arguments are on top of the
1635  // stack.
1636  ASSERT(!masm.has_frame());
1637  g(&masm, functions[i].name, functions[i].extra_args);
1638  // Move the code into the object heap.
1639  CodeDesc desc;
1640  masm.GetCode(&desc);
1641  Code::Flags flags = functions[i].flags;
1642  Object* code = NULL;
1643  {
1644  // During startup it's OK to always allocate and defer GC to later.
1645  // This simplifies things because we don't need to retry.
1646  AlwaysAllocateScope __scope__;
1647  { MaybeObject* maybe_code =
1648  heap->CreateCode(desc, flags, masm.CodeObject());
1649  if (!maybe_code->ToObject(&code)) {
1651  }
1652  }
1653  }
1654  // Log the event and add the code to the builtins array.
1655  PROFILE(isolate,
1656  CodeCreateEvent(Logger::BUILTIN_TAG,
1657  Code::cast(code),
1658  functions[i].s_name));
1660  functions[i].s_name,
1661  Code::cast(code)));
1662  builtins_[i] = code;
1663 #ifdef ENABLE_DISASSEMBLER
1664  if (FLAG_print_builtin_code) {
1665  PrintF("Builtin: %s\n", functions[i].s_name);
1666  Code::cast(code)->Disassemble(functions[i].s_name);
1667  PrintF("\n");
1668  }
1669 #endif
1670  } else {
1671  // Deserializing. The values will be filled in during IterateBuiltins.
1672  builtins_[i] = NULL;
1673  }
1674  names_[i] = functions[i].s_name;
1675  }
1676 
1677  // Mark as initialized.
1678  initialized_ = true;
1679 }
1680 
1681 
1682 void Builtins::TearDown() {
1683  initialized_ = false;
1684 }
1685 
1686 
1687 void Builtins::IterateBuiltins(ObjectVisitor* v) {
1688  v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
1689 }
1690 
1691 
1692 const char* Builtins::Lookup(byte* pc) {
1693  // may be called during initialization (disassembler!)
1694  if (initialized_) {
1695  for (int i = 0; i < builtin_count; i++) {
1696  Code* entry = Code::cast(builtins_[i]);
1697  if (entry->contains(pc)) {
1698  return names_[i];
1699  }
1700  }
1701  }
1702  return NULL;
1703 }
1704 
1705 
1706 #define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \
1707 Handle<Code> Builtins::name() { \
1708  Code** code_address = \
1709  reinterpret_cast<Code**>(builtin_address(k##name)); \
1710  return Handle<Code>(code_address); \
1711 }
1712 #define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
1713 Handle<Code> Builtins::name() { \
1714  Code** code_address = \
1715  reinterpret_cast<Code**>(builtin_address(k##name)); \
1716  return Handle<Code>(code_address); \
1717 }
1721 #undef DEFINE_BUILTIN_ACCESSOR_C
1722 #undef DEFINE_BUILTIN_ACCESSOR_A
1723 
1724 
1725 } } // namespace v8::internal
byte * Address
Definition: globals.h:157
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
BuiltinDesc functions_[Builtins::builtin_count+1]
Definition: builtins.cc:1561
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
static const int kMaxLength
Definition: objects.h:2366
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< JSFunction > setter)
static Object *& Object_at(Address addr)
Definition: v8memory.h:75
static CallHandlerInfo * cast(Object *obj)
#define BUILTINS_LIST_JS(V)
Definition: builtins.h:233
#define RETURN_IF_SCHEDULED_EXCEPTION(isolate)
Definition: isolate.h:112
void set(int index, Object *value)
Definition: objects-inl.h:1757
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static void PrepareArgumentsData(internal::Object **implicit_args, internal::Isolate *isolate, internal::Object *data, internal::JSFunction *callee, internal::Object *holder)
Definition: apiutils.h:44
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
void set_length(Smi *length)
Definition: objects-inl.h:5278
Object * InObjectPropertyAt(int index)
Definition: objects-inl.h:1602
static Smi * FromInt(int value)
Definition: objects-inl.h:981
#define LOG(isolate, Call)
Definition: log.h:81
const int KB
Definition: globals.h:207
static Handle< T > cast(Handle< S > that)
Definition: handles.h:81
void CallOnce(OnceType *once, NoArgFunction init_func)
Definition: once.h:105
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
T Max(T a, T b)
Definition: utils.h:222
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static Map * cast(Object *obj)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
static Failure * Exception()
Definition: objects-inl.h:1024
void set_map(Map *value)
Definition: objects-inl.h:1143
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
Definition: checks.h:270
AtomicWord OnceType
Definition: once.h:83
#define PROFILE(isolate, Call)
Definition: cpu-profiler.h:190
#define DEFINE_BUILTIN_ACCESSOR_C(name, ignore)
Definition: builtins.cc:1706
Handle< Object > GetProperty(Handle< JSReceiver > obj, const char *name)
Definition: handles.cc:282
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
void set_the_hole(int index)
Definition: objects-inl.h:1900
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:971
static Smi * cast(Object *object)
static void GenerateArrayLength(MacroAssembler *masm)
bool contains(byte *pc)
Definition: objects-inl.h:4690
uint8_t byte
Definition: globals.h:156
HANDLE HANDLE LPSTACKFRAME64 StackFrame
#define UNREACHABLE()
Definition: checks.h:50
Handle< Value >(* InvocationCallback)(const Arguments &args)
Definition: v8.h:2047
static v8::Arguments NewArguments(internal::Object **implicit_args, internal::Object **argv, int argc, bool is_construct_call)
Definition: apiutils.h:56
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static void GenerateInitialize(MacroAssembler *masm)
Definition: ic.h:493
static Address c_entry_fp(ThreadLocalTop *thread)
Definition: isolate.h:624
#define MUST_USE_RESULT
Definition: globals.h:346
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< JSFunction > getter)
BuiltinExtraArguments
Definition: builtins.h:35
#define HEAP_PROFILE(heap, call)
Definition: heap-profiler.h:39
static const int kCallerFPOffset
Definition: frames-arm.h:127
Context * native_context()
Definition: contexts.cc:58
V8EXPORT Local< Value > GetPrototype()
Definition: api.cc:2900
#define BUILTIN_FUNCTION_TABLE_INIT
Definition: builtins.cc:1551
ElementsKind GetElementsKind()
Definition: objects-inl.h:4776
void GetCode(CodeDesc *desc)
const int kPointerSize
Definition: globals.h:220
static void GeneratePreMonomorphic(MacroAssembler *masm)
Definition: ic.h:334
static void GenerateGeneric(MacroAssembler *masm)
static Address & Address_at(Address addr)
Definition: v8memory.h:71
static void GenerateInitialize(MacroAssembler *masm)
Definition: ic.h:333
static void GenerateMiss(MacroAssembler *masm)
const Register pc
static FunctionTemplateInfo * cast(Object *obj)
static FixedDoubleArray * cast(Object *obj)
#define DEF_FUNCTION_PTR_C(aname, aextra_args)
bool IsFastSmiElementsKind(ElementsKind kind)
MUST_USE_RESULT MaybeObject * AllocateJSArrayAndStorage(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4079
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
WriteBarrierMode GetWriteBarrierMode(const AssertNoAllocation &)
Definition: objects-inl.h:1831
LargeObjectSpace * lo_space()
Definition: heap.h:511
bool IsUndefined() const
Definition: v8.h:4472
MUST_USE_RESULT MaybeObject * CreateCode(const CodeDesc &desc, Code::Flags flags, Handle< Object > self_reference, bool immovable=false)
Definition: heap.cc:3594
const int kBitsPerInt
Definition: globals.h:240
activate correct semantics for inheriting readonliness false
Definition: flags.cc:141
static Handle< Object > Call(Handle< Object > callable, Handle< Object > receiver, int argc, Handle< Object > argv[], bool *pending_exception, bool convert_receiver=false)
Definition: execution.cc:150
static JSArray * cast(Object *obj)
#define GDBJIT(action)
Definition: gdb-jit.h:141
static const int kHeaderSize
Definition: objects.h:2296
#define DEF_ENUM_C(name, ignore)
Definition: builtins.cc:1524
void CopyObjectToObjectElements(FixedArray *from, ElementsKind from_kind, uint32_t from_start, FixedArray *to, ElementsKind to_kind, uint32_t to_start, int raw_copy_size)
Definition: elements.cc:149
static void GenerateSlow(MacroAssembler *masm)
bool Contains(HeapObject *obj)
Definition: spaces.cc:2781
static const int kMapOffset
Definition: objects.h:1261
#define BUILTIN_LIST_DEBUG_A(V)
Definition: builtins.h:229
static const int kLengthOffset
Definition: objects.h:2295
Local< Value > operator[](int i) const
Definition: v8.h:4280
static const int kArgumentsLengthIndex
Definition: heap.h:901
ElementsKind GetInitialFastElementsKind()
static void GenerateString(MacroAssembler *masm)
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedArray(int length)
Definition: heap.cc:4836
#define DEF_JS_ARGC(ignore, argc)
Definition: builtins.cc:1531
void MemsetPointer(T **dest, U *value, int counter)
Definition: v8utils.h:149
static void IncrementLiveBytesFromMutator(Address address, int by)
Definition: spaces.cc:769
#define HEAP
Definition: isolate.h:1433
#define ASSERT_EQ(v1, v2)
Definition: checks.h:271
static void GenerateNonStrictArguments(MacroAssembler *masm)
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1171
void USE(T)
Definition: globals.h:289
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static FixedArray * cast(Object *obj)
static void GenerateNormal(MacroAssembler *masm)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
static Handle< Object > GetElement(Handle< Object > object, uint32_t index)
Definition: objects.cc:250
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
Object * get(int index)
Definition: objects-inl.h:1737
static const int kPreallocatedArrayElements
Definition: objects.h:8329
bool IsFastHoleyElementsKind(ElementsKind kind)
#define BUILTIN(name)
Definition: builtins.cc:142
static void GenerateInitialize(MacroAssembler *masm)
Definition: ic.h:578
#define DEF_FUNCTION_PTR_A(aname, kind, state, extra)
const Register fp
Vector< Handle< Object > > HandleVector(v8::internal::Handle< T > *elms, int length)
Definition: v8utils.h:117
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
#define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra)
Definition: builtins.cc:1712
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
#define DEF_ARG_TYPE(name, spec)
Definition: builtins.cc:106
T Min(T a, T b)
Definition: utils.h:229
static SignatureInfo * cast(Object *obj)
static void GenerateArrayLength(MacroAssembler *masm)
static const int kInitialMaxFastElementArray
Definition: objects.h:2161
static const int kMaxValue
Definition: objects.h:1050
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
BuiltinExtraArguments extra_args
Definition: builtins.cc:1548
#define BUILTIN_LIST_C(V)
Definition: builtins.h:42
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
static void GenerateInitialize(MacroAssembler *masm)
Definition: ic.h:660
#define DEF_JS_NAME(name, ignore)
Definition: builtins.cc:1530
static JSObject * cast(Object *obj)
static void GeneratePreMonomorphic(MacroAssembler *masm)
Definition: ic.h:496
#define BUILTIN_LIST_A(V)
Definition: builtins.h:66
bool IsFastDoubleElementsKind(ElementsKind kind)
MUST_USE_RESULT MaybeObject * EnsureCanContainElements(Object **elements, uint32_t count, EnsureElementsMode mode)
Definition: objects-inl.h:1268
static void GenerateNormal(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static JSFunction * cast(Object *obj)