31 #include <sys/types.h>
44 using namespace v8::internal;
48 static void InitializeVM() {
83 if (FLAG_never_compact)
return;
87 FLAG_gc_global =
true;
88 FLAG_always_compact =
true;
99 Object* obj =
HEAP->AllocateFixedArray(array_size)->ToObjectChecked();
131 Object* obj = i::Isolate::Current()->heap()->AllocateFixedArray(length)->
143 { MaybeObject* maybe_obj =
HEAP->AllocateFixedArray(100,
TENURED);
144 if (!maybe_obj->ToObject(&obj))
break;
166 MaybeObject* maybe_array;
168 maybe_array =
HEAP->AllocateFixedArray(ARRAY_SIZE);
169 }
while (maybe_array->ToObject(&array));
172 array =
HEAP->AllocateFixedArray(ARRAY_SIZE)->ToObjectChecked();
176 MaybeObject* maybe_mapp;
179 }
while (maybe_mapp->ToObject(&mapp));
188 HEAP->AllocateSharedFunctionInfo(func_name)->ToObjectChecked());
190 HEAP->AllocateFunction(*Isolate::Current()->function_map(),
192 HEAP->undefined_value())->ToObjectChecked());
196 function->set_initial_map(initial_map);
197 Isolate::Current()->context()->global_object()->SetProperty(
201 HEAP->AllocateJSObject(
function)->ToObjectChecked());
206 CHECK(Isolate::Current()->context()->global_object()->
207 HasLocalProperty(func_name));
208 Object* func_value = Isolate::Current()->context()->global_object()->
210 CHECK(func_value->IsJSFunction());
216 Isolate::Current()->context()->global_object()->SetProperty(
229 CHECK(Isolate::Current()->context()->global_object()->
230 HasLocalProperty(obj_name));
231 CHECK(Isolate::Current()->context()->global_object()->
232 GetProperty(obj_name)->ToObjectChecked()->IsJSObject());
233 obj =
JSObject::cast(Isolate::Current()->context()->global_object()->
249 FLAG_max_map_space_pages = 16;
259 map->set_prototype(*root);
260 root =
FACTORY->NewJSObjectFromMap(map);
261 }
while (
HEAP->map_space()->MapPointersEncodable());
267 HEAP->CollectAllGarbage(Heap::kForceCompactionMask);
269 CHECK(
HEAP->map_space()->MapPointersEncodable());
273 static int gc_starts = 0;
274 static int gc_ends = 0;
276 static void GCPrologueCallbackFunc() {
277 CHECK(gc_starts == gc_ends);
282 static void GCEpilogueCallbackFunc() {
283 CHECK(gc_starts == gc_ends + 1);
291 HEAP->SetGlobalGCPrologueCallback(&GCPrologueCallbackFunc);
292 HEAP->SetGlobalGCEpilogueCallback(&GCEpilogueCallbackFunc);
295 HEAP->PerformScavenge();
306 static int NumberOfWeakCalls = 0;
308 ASSERT(
id == reinterpret_cast<void*>(1234));
315 GlobalHandles* global_handles = Isolate::Current()->global_handles();
317 NumberOfWeakCalls = 0;
321 global_handles->
Create(
HEAP->AllocateFixedArray(1)->ToObjectChecked());
323 global_handles->
Create(
HEAP->AllocateFixedArray(1)->ToObjectChecked());
325 global_handles->
Create(
HEAP->AllocateFixedArray(1)->ToObjectChecked());
327 reinterpret_cast<void*
>(1234),
328 &WeakPointerCallback);
330 reinterpret_cast<void*
>(1234),
331 &WeakPointerCallback);
333 reinterpret_cast<void*
>(1234),
334 &WeakPointerCallback);
337 global_handles->
Create(
HEAP->AllocateFixedArray(1)->ToObjectChecked());
339 global_handles->
Create(
HEAP->AllocateFixedArray(1)->ToObjectChecked());
341 global_handles->
Create(
HEAP->AllocateFixedArray(1)->ToObjectChecked());
343 reinterpret_cast<void*
>(1234),
344 &WeakPointerCallback);
346 reinterpret_cast<void*
>(1234),
347 &WeakPointerCallback);
349 reinterpret_cast<void*
>(1234),
350 &WeakPointerCallback);
378 reinterpret_cast<void*
>(1234),
379 &WeakPointerCallback);
406 reinterpret_cast<void*
>(1234),
407 &WeakPointerCallback);
409 reinterpret_cast<void*
>(1234),
410 &WeakPointerCallback);
424 ASSERT(!has_been_disposed_);
425 has_been_disposed_ =
true;
429 return other ==
this;
434 virtual const char*
GetLabel() {
return "whatever"; }
437 bool has_been_disposed_;
443 GlobalHandles* global_handles = Isolate::Current()->global_handles();
448 global_handles->
Create(
HEAP->AllocateFixedArray(1)->ToObjectChecked());
462 #if defined(__linux__) && !defined(USE_SIMULATOR)
465 static uintptr_t ReadLong(
char* buffer, intptr_t* position,
int base) {
466 char* end_address = buffer + *position;
467 uintptr_t result = strtoul(buffer + *position, &end_address, base);
468 CHECK(result != ULONG_MAX || errno != ERANGE);
469 CHECK(end_address > buffer + *position);
470 *position = end_address - buffer;
475 static intptr_t MemoryInUse() {
476 intptr_t memory_use = 0;
478 int fd = open(
"/proc/self/maps", O_RDONLY);
479 if (fd < 0)
return -1;
481 const int kBufSize = 10000;
482 char buffer[kBufSize];
483 int length = read(fd, buffer, kBufSize);
484 intptr_t line_start = 0;
487 while (line_start < length) {
488 if (buffer[line_start] ==
'\n') {
492 intptr_t position = line_start;
493 uintptr_t start = ReadLong(buffer, &position, 16);
495 uintptr_t end = ReadLong(buffer, &position, 16);
497 CHECK(buffer[position] ==
'-' || buffer[position] ==
'r');
498 bool read_permission = (buffer[position++] ==
'r');
499 CHECK(buffer[position] ==
'-' || buffer[position] ==
'w');
500 bool write_permission = (buffer[position++] ==
'w');
501 CHECK(buffer[position] ==
'-' || buffer[position] ==
'x');
502 bool execute_permission = (buffer[position++] ==
'x');
503 CHECK(buffer[position] ==
'-' || buffer[position] ==
'p');
504 bool private_mapping = (buffer[position++] ==
'p');
506 uintptr_t offset = ReadLong(buffer, &position, 16);
509 uintptr_t major = ReadLong(buffer, &position, 16);
512 uintptr_t minor = ReadLong(buffer, &position, 16);
515 uintptr_t inode = ReadLong(buffer, &position, 10);
516 while (position < length && buffer[position] !=
'\n') position++;
517 if ((read_permission || write_permission || execute_permission) &&
518 private_mapping && inode == 0) {
519 memory_use += (end - start);
522 line_start = position;
529 TEST(BootUpMemoryUse) {
530 intptr_t initial_memory = MemoryInUse();
532 FLAG_crankshaft =
false;
533 FLAG_parallel_recompilation =
false;
537 if (initial_memory >= 0) {
539 intptr_t delta = MemoryInUse() - initial_memory;
540 if (
sizeof(initial_memory) == 8) {
556 #endif // __linux__ and !USE_SIMULATOR
static Handle< Object > SetProperty(Handle< JSReceiver > object, Handle< String > key, Handle< Object > value, PropertyAttributes attributes, StrictModeFlag strict_mode)
#define CHECK_EQ(expected, value)
static String * cast(Object *obj)
static Smi * FromInt(int value)
static Handle< T > cast(Handle< S > that)
void AddImplicitReferences(HeapObject **parent, Object ***children, size_t length)
static Map * cast(Object *obj)
#define ASSERT(condition)
void ClearWeakness(Object **location)
static SharedFunctionInfo * cast(Object *obj)
Handle< Object > GetProperty(Handle< JSReceiver > obj, const char *name)
void AddObjectGroup(Object ***handles, size_t length, v8::RetainedObjectInfo *info)
virtual bool IsEquivalent(v8::RetainedObjectInfo *other)
void PushBlack(HeapObject *object)
Handle< Object > Create(Object *value)
void Initialize(Address low, Address high)
static const int kMaxNonCodeHeapObjectSize
activate correct semantics for inheriting readonliness false
virtual intptr_t GetHash()
static const int kHeaderSize
virtual const char * GetLabel()
MUST_USE_RESULT MaybeObject * GetProperty(String *key)
static HeapObject * FromAddress(Address address)
static FixedArray * cast(Object *obj)
static const int kHeaderSize
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
static Persistent< Context > New(ExtensionConfiguration *extensions=NULL, Handle< ObjectTemplate > global_template=Handle< ObjectTemplate >(), Handle< Value > global_object=Handle< Value >())
void MakeWeak(Object **location, void *parameter, WeakReferenceCallback callback)
void DeleteArray(T *array)
static JSObject * cast(Object *obj)
static JSFunction * cast(Object *obj)