47 template <BuiltinExtraArguments extra_args>
48 class BuiltinArguments :
public Arguments {
50 BuiltinArguments(
int length,
Object** arguments)
51 : Arguments(length, arguments) { }
53 Object*& operator[] (
int index) {
58 template <
class S> Handle<S> at(
int index) {
60 return Arguments::at<S>(index);
63 Handle<Object> receiver() {
64 return Arguments::at<Object>(0);
67 Handle<JSFunction> called_function() {
69 return Arguments::at<JSFunction>(Arguments::length() - 1);
76 return Arguments::length();
82 ASSERT(Arguments::length() >= 1);
91 int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length()
const {
92 return Arguments::length() - 1;
97 void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
99 ASSERT(Arguments::length() >= 2);
106 #define DEF_ARG_TYPE(name, spec) \
107 typedef BuiltinArguments<spec> name##ArgumentsType;
128 #define BUILTIN(name) \
129 MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
130 name##ArgumentsType args, Isolate* isolate); \
131 MUST_USE_RESULT static MaybeObject* Builtin_##name( \
132 name##ArgumentsType args, Isolate* isolate) { \
133 ASSERT(isolate == Isolate::Current()); \
135 return Builtin_Impl_##name(args, isolate); \
137 MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
138 name##ArgumentsType args, Isolate* isolate)
140 #else // For release mode.
142 #define BUILTIN(name) \
143 static MaybeObject* Builtin_##name(name##ArgumentsType args, Isolate* isolate)
148 static inline bool CalledAsConstructor(Isolate* isolate) {
153 StackFrameIterator it;
154 ASSERT(it.frame()->is_exit());
157 bool reference_result = frame->is_construct();
169 const Smi* kConstructMarker =
Smi::FromInt(StackFrame::CONSTRUCT);
171 bool result = (marker == kConstructMarker);
180 return isolate->heap()->undefined_value();
185 return isolate->heap()->undefined_value();
189 static MaybeObject* ArrayCodeGenericCommon(
Arguments* args,
191 JSFunction* constructor) {
192 Heap* heap = isolate->heap();
193 isolate->counters()->array_function_runtime()->Increment();
196 if (CalledAsConstructor(isolate)) {
201 array->set_elements(heap->empty_fixed_array());
202 if (!FLAG_smi_only_arrays) {
203 Context* native_context = isolate->context()->native_context();
205 !native_context->js_array_maps()->IsUndefined()) {
206 FixedArray* map_array =
214 MaybeObject* maybe_obj = heap->AllocateJSObject(constructor);
215 if (!maybe_obj->To(&array))
return maybe_obj;
220 if (args->length() == 2) {
226 { MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(len);
227 if (!maybe_obj->ToObject(&fixed_array))
return maybe_obj;
232 MaybeObject* maybe_array =
233 array->TransitionElementsKind(elements_kind);
234 if (maybe_array->IsFailure())
return maybe_array;
243 { MaybeObject* maybe_obj = array->Initialize(0);
244 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
246 return array->SetElementsLength((*args)[1]);
250 if (args->length() == 1) {
255 int number_of_elements = args->length() - 1;
256 MaybeObject* maybe_object =
257 array->EnsureCanContainElements(args, 1, number_of_elements,
259 if (maybe_object->IsFailure())
return maybe_object;
262 MaybeObject* maybe_elms;
265 maybe_elms = heap->AllocateUninitializedFixedDoubleArray(
268 maybe_elms = heap->AllocateFixedArrayWithHoles(number_of_elements);
270 FixedArrayBase* elms;
271 if (!maybe_elms->To<FixedArrayBase>(&elms))
return maybe_elms;
274 switch (array->GetElementsKind()) {
278 for (
int index = 0; index < number_of_elements; index++) {
285 AssertNoAllocation no_gc;
288 for (
int index = 0; index < number_of_elements; index++) {
289 object_elms->set(index, (*args)[index+1], mode);
296 for (
int index = 0; index < number_of_elements; index++) {
297 double_elms->set(index, (*args)[index+1]->Number());
306 array->set_elements(elms);
313 return ArrayCodeGenericCommon(
316 isolate->context()->native_context()->internal_array_function());
321 return ArrayCodeGenericCommon(
324 isolate->context()->native_context()->array_function());
328 static void MoveElements(Heap* heap,
329 AssertNoAllocation* no_gc,
335 if (len == 0)
return;
336 ASSERT(dst->map() !=
HEAP->fixed_cow_array_map());
337 memmove(dst->data_start() + dst_index,
338 src->data_start() + src_index,
342 heap->RecordWrites(dst->address(), dst->OffsetOfElementAt(dst_index), len);
344 heap->incremental_marking()->RecordWrites(dst);
348 static void FillWithHoles(Heap* heap, FixedArray* dst,
int from,
int to) {
349 ASSERT(dst->map() != heap->fixed_cow_array_map());
350 MemsetPointer(dst->data_start() + from, heap->the_hole_value(), to - from);
354 static FixedArray* LeftTrimFixedArray(Heap* heap,
357 ASSERT(elms->map() !=
HEAP->fixed_cow_array_map());
369 const int len = elms->length();
372 !heap->new_space()->Contains(elms)) {
376 Object** zap =
reinterpret_cast<Object**
>(elms->address());
378 for (
int i = 1; i < to_trim; i++) {
385 heap->CreateFillerObjectAt(elms->address(), to_trim *
kPointerSize);
387 former_start[to_trim] = heap->fixed_array_map();
388 former_start[to_trim + 1] =
Smi::FromInt(len - to_trim);
393 if (heap->marking()->TransferMark(elms->address(),
394 elms->address() + size_delta)) {
399 elms->address() + size_delta));
405 static bool ArrayPrototypeHasNoElements(Heap* heap,
406 Context* native_context,
407 JSObject* array_proto) {
410 if (array_proto->elements() != heap->empty_fixed_array())
return false;
412 Object* proto = array_proto->GetPrototype();
413 if (proto == heap->null_value())
return false;
415 if (array_proto != native_context->initial_object_prototype())
return false;
416 if (array_proto->elements() != heap->empty_fixed_array())
return false;
417 return array_proto->GetPrototype()->IsNull();
422 static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
423 Heap* heap,
Object* receiver, Arguments* args,
int first_added_arg) {
424 if (!receiver->IsJSArray())
return NULL;
426 HeapObject* elms = array->elements();
427 Map* map = elms->map();
428 if (map == heap->fixed_array_map()) {
429 if (args ==
NULL || array->HasFastObjectElements())
return elms;
430 if (array->HasFastDoubleElements()) {
431 ASSERT(elms == heap->empty_fixed_array());
432 MaybeObject* maybe_transition =
434 if (maybe_transition->IsFailure())
return maybe_transition;
437 }
else if (map == heap->fixed_cow_array_map()) {
438 MaybeObject* maybe_writable_result = array->EnsureWritableFastElements();
439 if (args ==
NULL || array->HasFastObjectElements() ||
440 maybe_writable_result->IsFailure()) {
441 return maybe_writable_result;
449 int args_length = args->length();
450 if (first_added_arg >= args_length)
return array->elements();
452 MaybeObject* maybe_array = array->EnsureCanContainElements(
455 args_length - first_added_arg,
457 if (maybe_array->IsFailure())
return maybe_array;
458 return array->elements();
462 static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
464 if (!FLAG_clever_optimizations)
return false;
465 Context* native_context = heap->isolate()->context()->native_context();
466 JSObject* array_proto =
469 ArrayPrototypeHasNoElements(heap, native_context, array_proto);
476 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
477 HandleScope handleScope(isolate);
479 Handle<Object> js_builtin =
480 GetProperty(Handle<JSObject>(isolate->native_context()->builtins()),
483 int argc = args.length() - 1;
484 ScopedVector<Handle<Object> > argv(argc);
485 for (
int i = 0; i < argc; ++i) {
486 argv[i] = args.at<
Object>(i + 1);
488 bool pending_exception;
500 Heap* heap = isolate->heap();
501 Object* receiver = *args.receiver();
503 { MaybeObject* maybe_elms_obj =
504 EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 1);
505 if (maybe_elms_obj ==
NULL) {
506 return CallJsBuiltin(isolate,
"ArrayPush", args);
508 if (!maybe_elms_obj->ToObject(&elms_obj))
return maybe_elms_obj;
513 int len =
Smi::cast(array->length())->value();
514 int to_add = args.length() - 1;
522 int new_length = len + to_add;
524 if (new_length > elms->
length()) {
526 int capacity = new_length + (new_length >> 1) + 16;
529 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
535 FillWithHoles(heap, new_elms, new_length, capacity);
543 for (
int index = 0; index < to_add; index++) {
544 elms->
set(index + len, args[index + 1], mode);
547 if (elms != array->elements()) {
548 array->set_elements(elms);
558 Heap* heap = isolate->heap();
559 Object* receiver = *args.receiver();
561 { MaybeObject* maybe_elms_obj =
562 EnsureJSArrayWithWritableFastElements(heap, receiver,
NULL, 0);
563 if (maybe_elms_obj ==
NULL)
return CallJsBuiltin(isolate,
"ArrayPop", args);
564 if (!maybe_elms_obj->ToObject(&elms_obj))
return maybe_elms_obj;
569 int len =
Smi::cast(array->length())->value();
570 if (len == 0)
return heap->undefined_value();
573 MaybeObject* top = elms->
get(len - 1);
578 if (!top->IsTheHole()) {
591 Heap* heap = isolate->heap();
592 Object* receiver = *args.receiver();
594 { MaybeObject* maybe_elms_obj =
595 EnsureJSArrayWithWritableFastElements(heap, receiver,
NULL, 0);
596 if (maybe_elms_obj ==
NULL)
597 return CallJsBuiltin(isolate,
"ArrayShift", args);
598 if (!maybe_elms_obj->ToObject(&elms_obj))
return maybe_elms_obj;
600 if (!IsJSArrayFastElementMovingAllowed(heap,
JSArray::cast(receiver))) {
601 return CallJsBuiltin(isolate,
"ArrayShift", args);
607 int len =
Smi::cast(array->length())->value();
608 if (len == 0)
return heap->undefined_value();
612 if (first->IsTheHole()) {
613 first = heap->undefined_value();
617 array->set_elements(LeftTrimFixedArray(heap, elms, 1));
621 MoveElements(heap, &no_gc, elms, 0, elms, 1, len - 1);
622 elms->
set(len - 1, heap->the_hole_value());
633 Heap* heap = isolate->heap();
634 Object* receiver = *args.receiver();
636 { MaybeObject* maybe_elms_obj =
637 EnsureJSArrayWithWritableFastElements(heap, receiver,
NULL, 0);
638 if (maybe_elms_obj ==
NULL)
639 return CallJsBuiltin(isolate,
"ArrayUnshift", args);
640 if (!maybe_elms_obj->ToObject(&elms_obj))
return maybe_elms_obj;
642 if (!IsJSArrayFastElementMovingAllowed(heap,
JSArray::cast(receiver))) {
643 return CallJsBuiltin(isolate,
"ArrayUnshift", args);
649 int len =
Smi::cast(array->length())->value();
650 int to_add = args.length() - 1;
651 int new_length = len + to_add;
656 MaybeObject* maybe_object =
659 if (maybe_object->IsFailure())
return maybe_object;
661 if (new_length > elms->
length()) {
663 int capacity = new_length + (new_length >> 1) + 16;
666 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
671 FillWithHoles(heap, new_elms, new_length, capacity);
673 array->set_elements(elms);
676 MoveElements(heap, &no_gc, elms, to_add, elms, 0, len);
682 for (
int i = 0; i < to_add; i++) {
683 elms->
set(i, args[i + 1], mode);
693 Heap* heap = isolate->heap();
694 Object* receiver = *args.receiver();
697 if (receiver->IsJSArray()) {
700 !IsJSArrayFastElementMovingAllowed(heap, array)) {
701 return CallJsBuiltin(isolate,
"ArraySlice", args);
705 len =
Smi::cast(array->length())->value();
710 isolate->context()->native_context()->arguments_boilerplate()->
map();
712 bool is_arguments_object_with_fast_elements =
713 receiver->IsJSObject()
716 if (!is_arguments_object_with_fast_elements) {
717 return CallJsBuiltin(isolate,
"ArraySlice", args);
722 if (!len_obj->IsSmi()) {
723 return CallJsBuiltin(isolate,
"ArraySlice", args);
726 if (len > elms->
length()) {
727 return CallJsBuiltin(isolate,
"ArraySlice", args);
729 for (
int i = 0; i < len; i++) {
730 if (elms->
get(i) == heap->the_hole_value()) {
731 return CallJsBuiltin(isolate,
"ArraySlice", args);
736 int n_arguments = args.length() - 1;
741 int relative_start = 0;
742 int relative_end = len;
743 if (n_arguments > 0) {
747 }
else if (!arg1->IsUndefined()) {
748 return CallJsBuiltin(isolate,
"ArraySlice", args);
750 if (n_arguments > 1) {
754 }
else if (!arg2->IsUndefined()) {
755 return CallJsBuiltin(isolate,
"ArraySlice", args);
761 int k = (relative_start < 0) ?
Max(len + relative_start, 0)
762 :
Min(relative_start, len);
765 int final = (relative_end < 0) ?
Max(len + relative_end, 0)
766 :
Min(relative_end, len);
771 int result_len =
Max(
final - k, 0);
773 MaybeObject* maybe_array =
778 if (!maybe_array->To(&result_array))
return maybe_array;
782 elements_kind, 0, result_len);
789 Heap* heap = isolate->heap();
790 Object* receiver = *args.receiver();
792 { MaybeObject* maybe_elms_obj =
793 EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 3);
794 if (maybe_elms_obj ==
NULL)
795 return CallJsBuiltin(isolate,
"ArraySplice", args);
796 if (!maybe_elms_obj->ToObject(&elms_obj))
return maybe_elms_obj;
798 if (!IsJSArrayFastElementMovingAllowed(heap,
JSArray::cast(receiver))) {
799 return CallJsBuiltin(isolate,
"ArraySplice", args);
805 int len =
Smi::cast(array->length())->value();
807 int n_arguments = args.length() - 1;
809 int relative_start = 0;
810 if (n_arguments > 0) {
814 }
else if (!arg1->IsUndefined()) {
815 return CallJsBuiltin(isolate,
"ArraySplice", args);
818 int actual_start = (relative_start < 0) ?
Max(len + relative_start, 0)
819 :
Min(relative_start, len);
826 int actual_delete_count;
827 if (n_arguments == 1) {
828 ASSERT(len - actual_start >= 0);
829 actual_delete_count = len - actual_start;
832 if (n_arguments > 1) {
837 return CallJsBuiltin(isolate,
"ArraySplice", args);
840 actual_delete_count =
Min(
Max(value, 0), len - actual_start);
846 MaybeObject* maybe_array =
849 actual_delete_count);
850 if (!maybe_array->To(&result_array))
return maybe_array;
856 elements_kind, 0, actual_delete_count);
859 int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
860 int new_length = len - actual_delete_count + item_count;
862 bool elms_changed =
false;
863 if (item_count < actual_delete_count) {
866 ((actual_start + item_count) <
867 (len - actual_delete_count - actual_start));
869 const int delta = actual_delete_count - item_count;
873 MoveElements(heap, &no_gc, elms, delta, elms, 0, actual_start);
876 elms = LeftTrimFixedArray(heap, elms, delta);
881 MoveElements(heap, &no_gc,
882 elms, actual_start + item_count,
883 elms, actual_start + actual_delete_count,
884 (len - actual_delete_count - actual_start));
885 FillWithHoles(heap, elms, new_length, len);
887 }
else if (item_count > actual_delete_count) {
893 if (new_length > elms->
length()) {
895 int capacity = new_length + (new_length >> 1) + 16;
897 { MaybeObject* maybe_obj =
899 if (!maybe_obj->ToObject(&obj))
return maybe_obj;
907 new_elms, kind, 0, actual_start);
908 const int to_copy = len - actual_delete_count - actual_start;
910 actual_start + actual_delete_count,
912 actual_start + item_count, to_copy);
915 FillWithHoles(heap, new_elms, new_length, capacity);
921 MoveElements(heap, &no_gc,
922 elms, actual_start + item_count,
923 elms, actual_start + actual_delete_count,
924 (len - actual_delete_count - actual_start));
930 for (
int k = actual_start; k < actual_start + item_count; k++) {
931 elms->
set(k, args[3 + k - actual_start], mode);
935 array->set_elements(elms);
946 Heap* heap = isolate->heap();
950 if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) {
951 return CallJsBuiltin(isolate,
"ArrayConcat", args);
956 int n_arguments = args.length();
959 for (
int i = 0; i < n_arguments; i++) {
961 if (!arg->IsJSArray() ||
964 return CallJsBuiltin(isolate,
"ArrayConcat", args);
977 return CallJsBuiltin(isolate,
"ArrayConcat", args);
997 MaybeObject* maybe_array =
1001 if (!maybe_array->To(&result_array))
return maybe_array;
1002 if (result_len == 0)
return result_array;
1007 for (
int i = 0; i < n_arguments; i++) {
1009 int len =
Smi::cast(array->length())->value();
1012 result_elms, elements_kind,
1016 ASSERT(start_pos == result_len);
1018 return result_array;
1028 return isolate->Throw(*isolate->factory()->NewTypeError(
1029 "strict_poison_pill", HandleVector<Object>(
NULL, 0)));
1042 static inline Object* TypeCheck(Heap* heap,
1045 FunctionTemplateInfo* info) {
1048 if (!recv->IsJSObject())
return heap->null_value();
1049 Object* sig_obj = info->signature();
1053 Object* recv_type = sig->receiver();
1057 for (; holder != heap->null_value(); holder = holder->
GetPrototype()) {
1062 if (holder == heap->null_value())
return holder;
1064 Object* args_obj = sig->args();
1066 if (args_obj->IsUndefined())
return holder;
1068 int length = args->length();
1069 if (argc <= length) length = argc - 1;
1070 for (
int i = 0; i < length; i++) {
1071 Object* argtype = args->get(i);
1072 if (argtype->IsUndefined())
continue;
1073 Object** arg = &argv[-1 - i];
1075 for (; current != heap->null_value(); current = current->GetPrototype()) {
1081 if (current == heap->null_value()) *arg = heap->undefined_value();
1087 template <
bool is_construct>
1089 BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) {
1090 ASSERT(is_construct == CalledAsConstructor(isolate));
1091 Heap* heap = isolate->heap();
1093 HandleScope scope(isolate);
1094 Handle<JSFunction>
function = args.called_function();
1095 ASSERT(function->shared()->IsApiFunction());
1097 FunctionTemplateInfo* fun_data =
function->shared()->get_api_func_data();
1099 Handle<FunctionTemplateInfo> desc(fun_data, isolate);
1100 bool pending_exception =
false;
1101 isolate->factory()->ConfigureInstance(
1102 desc, Handle<JSObject>::cast(args.receiver()), &pending_exception);
1103 ASSERT(isolate->has_pending_exception() == pending_exception);
1108 Object* raw_holder = TypeCheck(heap, args.length(), &args[0], fun_data);
1110 if (raw_holder->IsNull()) {
1112 Handle<Object> obj =
1113 isolate->factory()->NewTypeError(
1115 return isolate->Throw(*obj);
1118 Object* raw_call_data = fun_data->call_code();
1119 if (!raw_call_data->IsUndefined()) {
1121 Object* callback_obj = call_data->callback();
1123 v8::ToCData<v8::InvocationCallback>(callback_obj);
1124 Object* data_obj = call_data->data();
1128 ASSERT(raw_holder->IsJSObject());
1130 CustomArguments custom(isolate);
1132 isolate, data_obj, *
function, raw_holder);
1143 VMState state(isolate, EXTERNAL);
1144 ExternalCallbackScope call_scope(isolate,
1145 v8::ToCData<Address>(callback_obj));
1146 value = callback(new_args);
1148 if (value.IsEmpty()) {
1149 result = heap->undefined_value();
1151 result = *
reinterpret_cast<Object**
>(*value);
1152 result->VerifyApiCallResultType();
1156 if (!is_construct || result->IsJSObject())
return result;
1159 return *args.receiver();
1164 return HandleApiCallHelper<false>(args, isolate);
1169 return HandleApiCallHelper<true>(args, isolate);
1176 MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
1178 bool is_construct_call,
1179 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
1182 ASSERT(!CalledAsConstructor(isolate));
1183 Heap* heap = isolate->heap();
1192 ASSERT(obj->map()->has_instance_call_handler());
1194 ASSERT(constructor->shared()->IsApiFunction());
1196 constructor->shared()->get_api_func_data()->instance_call_handler();
1197 ASSERT(!handler->IsUndefined());
1199 Object* callback_obj = call_data->callback();
1201 v8::ToCData<v8::InvocationCallback>(callback_obj);
1207 LOG(isolate, ApiObjectAccess(
"call non-function", obj));
1209 CustomArguments custom(isolate);
1211 isolate, call_data->data(), constructor, obj);
1220 VMState state(isolate, EXTERNAL);
1221 ExternalCallbackScope call_scope(isolate,
1222 v8::ToCData<Address>(callback_obj));
1223 value = callback(new_args);
1225 if (value.IsEmpty()) {
1226 result = heap->undefined_value();
1228 result = *
reinterpret_cast<Object**
>(*value);
1229 result->VerifyApiCallResultType();
1241 return HandleApiCallAsFunctionOrConstructor(isolate,
false, args);
1248 return HandleApiCallAsFunctionOrConstructor(isolate,
true, args);
1252 static void Generate_LoadIC_ArrayLength(MacroAssembler* masm) {
1257 static void Generate_LoadIC_StringLength(MacroAssembler* masm) {
1262 static void Generate_LoadIC_StringWrapperLength(MacroAssembler* masm) {
1267 static void Generate_LoadIC_FunctionPrototype(MacroAssembler* masm) {
1272 static void Generate_LoadIC_Initialize(MacroAssembler* masm) {
1277 static void Generate_LoadIC_PreMonomorphic(MacroAssembler* masm) {
1282 static void Generate_LoadIC_Miss(MacroAssembler* masm) {
1287 static void Generate_LoadIC_Megamorphic(MacroAssembler* masm) {
1292 static void Generate_LoadIC_Normal(MacroAssembler* masm) {
1297 static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
1302 static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
1307 static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
1312 static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
1317 static void Generate_KeyedLoadIC_MissForceGeneric(MacroAssembler* masm) {
1322 static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) {
1327 static void Generate_KeyedLoadIC_String(MacroAssembler* masm) {
1332 static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
1336 static void Generate_KeyedLoadIC_IndexedInterceptor(MacroAssembler* masm) {
1340 static void Generate_KeyedLoadIC_NonStrictArguments(MacroAssembler* masm) {
1344 static void Generate_StoreIC_Initialize(MacroAssembler* masm) {
1349 static void Generate_StoreIC_Initialize_Strict(MacroAssembler* masm) {
1354 static void Generate_StoreIC_Miss(MacroAssembler* masm) {
1359 static void Generate_StoreIC_Normal(MacroAssembler* masm) {
1364 static void Generate_StoreIC_Normal_Strict(MacroAssembler* masm) {
1369 static void Generate_StoreIC_Megamorphic(MacroAssembler* masm) {
1374 static void Generate_StoreIC_Megamorphic_Strict(MacroAssembler* masm) {
1379 static void Generate_StoreIC_ArrayLength(MacroAssembler* masm) {
1384 static void Generate_StoreIC_ArrayLength_Strict(MacroAssembler* masm) {
1389 static void Generate_StoreIC_GlobalProxy(MacroAssembler* masm) {
1394 static void Generate_StoreIC_GlobalProxy_Strict(MacroAssembler* masm) {
1399 static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
1404 static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
1409 static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
1414 static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
1419 static void Generate_KeyedStoreIC_MissForceGeneric(MacroAssembler* masm) {
1424 static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
1429 static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
1434 static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
1438 static void Generate_KeyedStoreIC_NonStrictArguments(MacroAssembler* masm) {
1442 static void Generate_TransitionElementsSmiToDouble(MacroAssembler* masm) {
1446 static void Generate_TransitionElementsDoubleToObject(MacroAssembler* masm) {
1450 #ifdef ENABLE_DEBUGGER_SUPPORT
1451 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
1452 Debug::GenerateLoadICDebugBreak(masm);
1456 static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
1457 Debug::GenerateStoreICDebugBreak(masm);
1461 static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
1462 Debug::GenerateKeyedLoadICDebugBreak(masm);
1466 static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
1467 Debug::GenerateKeyedStoreICDebugBreak(masm);
1471 static void Generate_Return_DebugBreak(MacroAssembler* masm) {
1472 Debug::GenerateReturnDebugBreak(masm);
1476 static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
1477 Debug::GenerateCallFunctionStubDebugBreak(masm);
1481 static void Generate_CallFunctionStub_Recording_DebugBreak(
1482 MacroAssembler* masm) {
1483 Debug::GenerateCallFunctionStubRecordDebugBreak(masm);
1487 static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
1488 Debug::GenerateCallConstructStubDebugBreak(masm);
1492 static void Generate_CallConstructStub_Recording_DebugBreak(
1493 MacroAssembler* masm) {
1494 Debug::GenerateCallConstructStubRecordDebugBreak(masm);
1498 static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
1499 Debug::GenerateSlotDebugBreak(masm);
1503 static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
1504 Debug::GeneratePlainReturnLiveEdit(masm);
1508 static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
1509 Debug::GenerateFrameDropperLiveEdit(masm);
1514 Builtins::Builtins() : initialized_(
false) {
1515 memset(builtins_, 0,
sizeof(builtins_[0]) * builtin_count);
1516 memset(names_, 0,
sizeof(names_[0]) * builtin_count);
1520 Builtins::~Builtins() {
1524 #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
1525 Address const Builtins::c_functions_[cfunction_count] = {
1530 #define DEF_JS_NAME(name, ignore) #name,
1531 #define DEF_JS_ARGC(ignore, argc) argc,
1532 const char*
const Builtins::javascript_names_[id_count] = {
1536 int const Builtins::javascript_argc_[id_count] = {
1551 #define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
1556 CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
1573 void Builtins::InitBuiltinFunctionTable() {
1578 functions[builtin_count].
name = builtin_count;
1582 #define DEF_FUNCTION_PTR_C(aname, aextra_args) \
1583 functions->generator = FUNCTION_ADDR(Generate_Adaptor); \
1584 functions->c_code = FUNCTION_ADDR(Builtin_##aname); \
1585 functions->s_name = #aname; \
1586 functions->name = c_##aname; \
1587 functions->flags = Code::ComputeFlags(Code::BUILTIN); \
1588 functions->extra_args = aextra_args; \
1591 #define DEF_FUNCTION_PTR_A(aname, kind, state, extra) \
1592 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1593 functions->c_code = NULL; \
1594 functions->s_name = #aname; \
1595 functions->name = k##aname; \
1596 functions->flags = Code::ComputeFlags(Code::kind, \
1599 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1606 #undef DEF_FUNCTION_PTR_C
1607 #undef DEF_FUNCTION_PTR_A
1610 void Builtins::SetUp(
bool create_heap_objects) {
1612 Isolate* isolate = Isolate::Current();
1623 union {
int force_alignment;
byte buffer[8*
KB]; } u;
1627 for (
int i = 0; i < builtin_count; i++) {
1628 if (create_heap_objects) {
1632 Generator g = FUNCTION_CAST<Generator>(functions[i].
generator);
1637 g(&masm, functions[i].name, functions[i].extra_args);
1647 { MaybeObject* maybe_code =
1649 if (!maybe_code->ToObject(&code)) {
1656 CodeCreateEvent(Logger::BUILTIN_TAG,
1658 functions[i].s_name));
1660 functions[i].s_name,
1662 builtins_[i] =
code;
1663 #ifdef ENABLE_DISASSEMBLER
1664 if (FLAG_print_builtin_code) {
1665 PrintF(
"Builtin: %s\n", functions[i].s_name);
1666 Code::cast(code)->Disassemble(functions[i].s_name);
1672 builtins_[i] =
NULL;
1674 names_[i] = functions[i].
s_name;
1678 initialized_ =
true;
1682 void Builtins::TearDown() {
1683 initialized_ =
false;
1687 void Builtins::IterateBuiltins(ObjectVisitor* v) {
1688 v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
1695 for (
int i = 0; i < builtin_count; i++) {
1696 Code* entry = Code::cast(builtins_[i]);
1706 #define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \
1707 Handle<Code> Builtins::name() { \
1708 Code** code_address = \
1709 reinterpret_cast<Code**>(builtin_address(k##name)); \
1710 return Handle<Code>(code_address); \
1712 #define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
1713 Handle<Code> Builtins::name() { \
1714 Code** code_address = \
1715 reinterpret_cast<Code**>(builtin_address(k##name)); \
1716 return Handle<Code>(code_address); \
1721 #undef DEFINE_BUILTIN_ACCESSOR_C
1722 #undef DEFINE_BUILTIN_ACCESSOR_A
static void GenerateTransitionElementsDoubleToObject(MacroAssembler *masm)
BuiltinDesc functions_[Builtins::builtin_count+1]
static void GenerateRuntimeGetProperty(MacroAssembler *masm)
static const int kMaxLength
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< JSFunction > setter)
static Object *& Object_at(Address addr)
static CallHandlerInfo * cast(Object *obj)
#define BUILTINS_LIST_JS(V)
#define RETURN_IF_SCHEDULED_EXCEPTION(isolate)
void set(int index, Object *value)
void PrintF(const char *format,...)
static void PrepareArgumentsData(internal::Object **implicit_args, internal::Isolate *isolate, internal::Object *data, internal::JSFunction *callee, internal::Object *holder)
static void GenerateGeneric(MacroAssembler *masm, StrictModeFlag strict_mode)
void set_length(Smi *length)
Object * InObjectPropertyAt(int index)
static Smi * FromInt(int value)
#define LOG(isolate, Call)
static Handle< T > cast(Handle< S > that)
BuiltinDesc * functions()
void CallOnce(OnceType *once, NoArgFunction init_func)
static void GenerateGlobalProxy(MacroAssembler *masm, StrictModeFlag strict_mode)
static void GenerateMegamorphic(MacroAssembler *masm, StrictModeFlag strict_mode)
static Map * cast(Object *obj)
static void GenerateStringLength(MacroAssembler *masm, bool support_wrappers)
static Failure * Exception()
static void GenerateMegamorphic(MacroAssembler *masm)
#define ASSERT(condition)
#define PROFILE(isolate, Call)
#define DEFINE_BUILTIN_ACCESSOR_C(name, ignore)
Handle< Object > GetProperty(Handle< JSReceiver > obj, const char *name)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property activate a timer that switches between V8 threads testing_bool_flag float flag Seed used for threading test randomness A filename with extra code to be included in the Print usage including flags
void set_the_hole(int index)
static Object ** RawField(HeapObject *obj, int offset)
static Smi * cast(Object *object)
static void GenerateArrayLength(MacroAssembler *masm)
bool HasFastSmiOrObjectElements()
HANDLE HANDLE LPSTACKFRAME64 StackFrame
Handle< Value >(* InvocationCallback)(const Arguments &args)
static v8::Arguments NewArguments(internal::Object **implicit_args, internal::Object **argv, int argc, bool is_construct_call)
STATIC_ASSERT((FixedDoubleArray::kHeaderSize &kDoubleAlignmentMask)==0)
static void GenerateInitialize(MacroAssembler *masm)
static Address c_entry_fp(ThreadLocalTop *thread)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< JSFunction > getter)
#define HEAP_PROFILE(heap, call)
static const int kCallerFPOffset
Context * native_context()
V8EXPORT Local< Value > GetPrototype()
#define BUILTIN_FUNCTION_TABLE_INIT
ElementsKind GetElementsKind()
void GetCode(CodeDesc *desc)
static void GeneratePreMonomorphic(MacroAssembler *masm)
static void GenerateGeneric(MacroAssembler *masm)
static Address & Address_at(Address addr)
static void GenerateInitialize(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm)
static FunctionTemplateInfo * cast(Object *obj)
static FixedDoubleArray * cast(Object *obj)
#define DEF_FUNCTION_PTR_C(aname, aextra_args)
bool IsFastSmiElementsKind(ElementsKind kind)
MUST_USE_RESULT MaybeObject * AllocateJSArrayAndStorage(ElementsKind elements_kind, int length, int capacity, ArrayStorageAllocationMode mode=DONT_INITIALIZE_ARRAY_ELEMENTS, PretenureFlag pretenure=NOT_TENURED)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
WriteBarrierMode GetWriteBarrierMode(const AssertNoAllocation &)
LargeObjectSpace * lo_space()
MUST_USE_RESULT MaybeObject * CreateCode(const CodeDesc &desc, Code::Flags flags, Handle< Object > self_reference, bool immovable=false)
activate correct semantics for inheriting readonliness false
static Handle< Object > Call(Handle< Object > callable, Handle< Object > receiver, int argc, Handle< Object > argv[], bool *pending_exception, bool convert_receiver=false)
static JSArray * cast(Object *obj)
static const int kHeaderSize
#define DEF_ENUM_C(name, ignore)
void CopyObjectToObjectElements(FixedArray *from, ElementsKind from_kind, uint32_t from_start, FixedArray *to, ElementsKind to_kind, uint32_t to_start, int raw_copy_size)
static void GenerateSlow(MacroAssembler *masm)
bool Contains(HeapObject *obj)
static const int kMapOffset
#define BUILTIN_LIST_DEBUG_A(V)
static const int kLengthOffset
Local< Value > operator[](int i) const
static const int kArgumentsLengthIndex
ElementsKind GetInitialFastElementsKind()
static void GenerateString(MacroAssembler *masm)
MUST_USE_RESULT MaybeObject * AllocateUninitializedFixedArray(int length)
#define DEF_JS_ARGC(ignore, argc)
Handle< Object > CodeObject()
void MemsetPointer(T **dest, U *value, int counter)
static void IncrementLiveBytesFromMutator(Address address, int by)
#define ASSERT_EQ(v1, v2)
static void GenerateNonStrictArguments(MacroAssembler *masm)
static HeapObject * FromAddress(Address address)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static FixedArray * cast(Object *obj)
static void GenerateNormal(MacroAssembler *masm)
static void GenerateFunctionPrototype(MacroAssembler *masm)
static void GenerateIndexedInterceptor(MacroAssembler *masm)
static Handle< Object > GetElement(Handle< Object > object, uint32_t index)
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static const int kPreallocatedArrayElements
bool IsFastHoleyElementsKind(ElementsKind kind)
static void GenerateInitialize(MacroAssembler *masm)
#define DEF_FUNCTION_PTR_A(aname, kind, state, extra)
Vector< Handle< Object > > HandleVector(v8::internal::Handle< T > *elms, int length)
static void GenerateNonStrictArguments(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
#define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra)
static void GenerateMiss(MacroAssembler *masm)
static void GenerateTransitionElementsSmiToDouble(MacroAssembler *masm)
static const int kMarkerOffset
#define DEF_ARG_TYPE(name, spec)
static SignatureInfo * cast(Object *obj)
static void GenerateArrayLength(MacroAssembler *masm)
static const int kInitialMaxFastElementArray
static const int kMaxValue
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
BuiltinExtraArguments extra_args
#define BUILTIN_LIST_C(V)
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
static void GenerateInitialize(MacroAssembler *masm)
#define DEF_JS_NAME(name, ignore)
static JSObject * cast(Object *obj)
static void GeneratePreMonomorphic(MacroAssembler *masm)
#define BUILTIN_LIST_A(V)
bool IsFastDoubleElementsKind(ElementsKind kind)
MUST_USE_RESULT MaybeObject * EnsureCanContainElements(Object **elements, uint32_t count, EnsureElementsMode mode)
static void GenerateNormal(MacroAssembler *masm)
static void GenerateMiss(MacroAssembler *masm, bool force_generic)
static JSFunction * cast(Object *obj)