v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
test-mark-compact.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include <stdlib.h>
29 
30 #ifdef __linux__
31 #include <sys/types.h>
32 #include <sys/stat.h>
33 #include <fcntl.h>
34 #include <unistd.h>
35 #include <errno.h>
36 #endif
37 
38 #include "v8.h"
39 
40 #include "global-handles.h"
41 #include "snapshot.h"
42 #include "cctest.h"
43 
44 using namespace v8::internal;
45 
47 
48 static void InitializeVM() {
49  if (env.IsEmpty()) env = v8::Context::New();
50  v8::HandleScope scope;
51  env->Enter();
52 }
53 
54 
56  int mem_size = 20 * kPointerSize;
57  byte* mem = NewArray<byte>(20*kPointerSize);
58  Address low = reinterpret_cast<Address>(mem);
59  Address high = low + mem_size;
60  MarkingDeque s;
61  s.Initialize(low, high);
62 
63  Address address = NULL;
64  while (!s.IsFull()) {
66  address += kPointerSize;
67  }
68 
69  while (!s.IsEmpty()) {
70  Address value = s.Pop()->address();
71  address -= kPointerSize;
72  CHECK_EQ(address, value);
73  }
74 
75  CHECK_EQ(NULL, address);
76  DeleteArray(mem);
77 }
78 
79 
80 TEST(Promotion) {
81  // This test requires compaction. If compaction is turned off, we
82  // skip the entire test.
83  if (FLAG_never_compact) return;
84 
85  // Ensure that we get a compacting collection so that objects are promoted
86  // from new space.
87  FLAG_gc_global = true;
88  FLAG_always_compact = true;
89  HEAP->ConfigureHeap(2*256*KB, 8*MB, 8*MB);
90 
91  InitializeVM();
92 
93  v8::HandleScope sc;
94 
95  // Allocate a fixed array in the new space.
96  int array_size =
98  (kPointerSize * 4);
99  Object* obj = HEAP->AllocateFixedArray(array_size)->ToObjectChecked();
100 
102 
103  // Array should be in the new space.
104  CHECK(HEAP->InSpace(*array, NEW_SPACE));
105 
106  // Call the m-c collector, so array becomes an old object.
107  HEAP->CollectGarbage(OLD_POINTER_SPACE);
108 
109  // Array now sits in the old space
110  CHECK(HEAP->InSpace(*array, OLD_POINTER_SPACE));
111 }
112 
113 
114 TEST(NoPromotion) {
115  HEAP->ConfigureHeap(2*256*KB, 8*MB, 8*MB);
116 
117  // Test the situation that some objects in new space are promoted to
118  // the old space
119  InitializeVM();
120 
121  v8::HandleScope sc;
122 
123  // Do a mark compact GC to shrink the heap.
124  HEAP->CollectGarbage(OLD_POINTER_SPACE);
125 
126  // Allocate a big Fixed array in the new space.
127  int max_size =
128  Min(Page::kMaxNonCodeHeapObjectSize, HEAP->MaxObjectSizeInNewSpace());
129 
130  int length = (max_size - FixedArray::kHeaderSize) / (2*kPointerSize);
131  Object* obj = i::Isolate::Current()->heap()->AllocateFixedArray(length)->
132  ToObjectChecked();
133 
135 
136  // Array still stays in the new space.
137  CHECK(HEAP->InSpace(*array, NEW_SPACE));
138 
139  // Allocate objects in the old space until out of memory.
140  FixedArray* host = *array;
141  while (true) {
142  Object* obj;
143  { MaybeObject* maybe_obj = HEAP->AllocateFixedArray(100, TENURED);
144  if (!maybe_obj->ToObject(&obj)) break;
145  }
146 
147  host->set(0, obj);
148  host = FixedArray::cast(obj);
149  }
150 
151  // Call mark compact GC, and it should pass.
152  HEAP->CollectGarbage(OLD_POINTER_SPACE);
153 }
154 
155 
157  InitializeVM();
158 
159  v8::HandleScope sc;
160  // call mark-compact when heap is empty
161  HEAP->CollectGarbage(OLD_POINTER_SPACE);
162 
163  // keep allocating garbage in new space until it fails
164  const int ARRAY_SIZE = 100;
165  Object* array;
166  MaybeObject* maybe_array;
167  do {
168  maybe_array = HEAP->AllocateFixedArray(ARRAY_SIZE);
169  } while (maybe_array->ToObject(&array));
170  HEAP->CollectGarbage(NEW_SPACE);
171 
172  array = HEAP->AllocateFixedArray(ARRAY_SIZE)->ToObjectChecked();
173 
174  // keep allocating maps until it fails
175  Object* mapp;
176  MaybeObject* maybe_mapp;
177  do {
178  maybe_mapp = HEAP->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
179  } while (maybe_mapp->ToObject(&mapp));
180  HEAP->CollectGarbage(MAP_SPACE);
181  mapp = HEAP->AllocateMap(JS_OBJECT_TYPE,
182  JSObject::kHeaderSize)->ToObjectChecked();
183 
184  // allocate a garbage
185  String* func_name =
186  String::cast(HEAP->LookupAsciiSymbol("theFunction")->ToObjectChecked());
188  HEAP->AllocateSharedFunctionInfo(func_name)->ToObjectChecked());
189  JSFunction* function = JSFunction::cast(
190  HEAP->AllocateFunction(*Isolate::Current()->function_map(),
191  function_share,
192  HEAP->undefined_value())->ToObjectChecked());
193  Map* initial_map =
194  Map::cast(HEAP->AllocateMap(JS_OBJECT_TYPE,
195  JSObject::kHeaderSize)->ToObjectChecked());
196  function->set_initial_map(initial_map);
197  Isolate::Current()->context()->global_object()->SetProperty(
198  func_name, function, NONE, kNonStrictMode)->ToObjectChecked();
199 
200  JSObject* obj = JSObject::cast(
201  HEAP->AllocateJSObject(function)->ToObjectChecked());
202  HEAP->CollectGarbage(OLD_POINTER_SPACE);
203 
204  func_name =
205  String::cast(HEAP->LookupAsciiSymbol("theFunction")->ToObjectChecked());
206  CHECK(Isolate::Current()->context()->global_object()->
207  HasLocalProperty(func_name));
208  Object* func_value = Isolate::Current()->context()->global_object()->
209  GetProperty(func_name)->ToObjectChecked();
210  CHECK(func_value->IsJSFunction());
211  function = JSFunction::cast(func_value);
212 
213  obj = JSObject::cast(HEAP->AllocateJSObject(function)->ToObjectChecked());
214  String* obj_name =
215  String::cast(HEAP->LookupAsciiSymbol("theObject")->ToObjectChecked());
216  Isolate::Current()->context()->global_object()->SetProperty(
217  obj_name, obj, NONE, kNonStrictMode)->ToObjectChecked();
218  String* prop_name =
219  String::cast(HEAP->LookupAsciiSymbol("theSlot")->ToObjectChecked());
220  obj->SetProperty(prop_name,
221  Smi::FromInt(23),
222  NONE,
223  kNonStrictMode)->ToObjectChecked();
224 
225  HEAP->CollectGarbage(OLD_POINTER_SPACE);
226 
227  obj_name =
228  String::cast(HEAP->LookupAsciiSymbol("theObject")->ToObjectChecked());
229  CHECK(Isolate::Current()->context()->global_object()->
230  HasLocalProperty(obj_name));
231  CHECK(Isolate::Current()->context()->global_object()->
232  GetProperty(obj_name)->ToObjectChecked()->IsJSObject());
233  obj = JSObject::cast(Isolate::Current()->context()->global_object()->
234  GetProperty(obj_name)->ToObjectChecked());
235  prop_name =
236  String::cast(HEAP->LookupAsciiSymbol("theSlot")->ToObjectChecked());
237  CHECK(obj->GetProperty(prop_name) == Smi::FromInt(23));
238 }
239 
240 
241 // TODO(1600): compaction of map space is temporary removed from GC.
242 #if 0
243 static Handle<Map> CreateMap() {
245 }
246 
247 
248 TEST(MapCompact) {
249  FLAG_max_map_space_pages = 16;
250  InitializeVM();
251 
252  {
253  v8::HandleScope sc;
254  // keep allocating maps while pointers are still encodable and thus
255  // mark compact is permitted.
256  Handle<JSObject> root = FACTORY->NewJSObjectFromMap(CreateMap());
257  do {
258  Handle<Map> map = CreateMap();
259  map->set_prototype(*root);
260  root = FACTORY->NewJSObjectFromMap(map);
261  } while (HEAP->map_space()->MapPointersEncodable());
262  }
263  // Now, as we don't have any handles to just allocated maps, we should
264  // be able to trigger map compaction.
265  // To give an additional chance to fail, try to force compaction which
266  // should be impossible right now.
267  HEAP->CollectAllGarbage(Heap::kForceCompactionMask);
268  // And now map pointers should be encodable again.
269  CHECK(HEAP->map_space()->MapPointersEncodable());
270 }
271 #endif
272 
273 static int gc_starts = 0;
274 static int gc_ends = 0;
275 
276 static void GCPrologueCallbackFunc() {
277  CHECK(gc_starts == gc_ends);
278  gc_starts++;
279 }
280 
281 
282 static void GCEpilogueCallbackFunc() {
283  CHECK(gc_starts == gc_ends + 1);
284  gc_ends++;
285 }
286 
287 
289  InitializeVM();
290 
291  HEAP->SetGlobalGCPrologueCallback(&GCPrologueCallbackFunc);
292  HEAP->SetGlobalGCEpilogueCallback(&GCEpilogueCallbackFunc);
293 
294  // Scavenge does not call GC callback functions.
295  HEAP->PerformScavenge();
296 
297  CHECK_EQ(0, gc_starts);
298  CHECK_EQ(gc_ends, gc_starts);
299 
300  HEAP->CollectGarbage(OLD_POINTER_SPACE);
301  CHECK_EQ(1, gc_starts);
302  CHECK_EQ(gc_ends, gc_starts);
303 }
304 
305 
306 static int NumberOfWeakCalls = 0;
307 static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void* id) {
308  ASSERT(id == reinterpret_cast<void*>(1234));
309  NumberOfWeakCalls++;
310  handle.Dispose();
311 }
312 
313 TEST(ObjectGroups) {
314  InitializeVM();
315  GlobalHandles* global_handles = Isolate::Current()->global_handles();
316 
317  NumberOfWeakCalls = 0;
318  v8::HandleScope handle_scope;
319 
320  Handle<Object> g1s1 =
321  global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
322  Handle<Object> g1s2 =
323  global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
324  Handle<Object> g1c1 =
325  global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
326  global_handles->MakeWeak(g1s1.location(),
327  reinterpret_cast<void*>(1234),
328  &WeakPointerCallback);
329  global_handles->MakeWeak(g1s2.location(),
330  reinterpret_cast<void*>(1234),
331  &WeakPointerCallback);
332  global_handles->MakeWeak(g1c1.location(),
333  reinterpret_cast<void*>(1234),
334  &WeakPointerCallback);
335 
336  Handle<Object> g2s1 =
337  global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
338  Handle<Object> g2s2 =
339  global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
340  Handle<Object> g2c1 =
341  global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
342  global_handles->MakeWeak(g2s1.location(),
343  reinterpret_cast<void*>(1234),
344  &WeakPointerCallback);
345  global_handles->MakeWeak(g2s2.location(),
346  reinterpret_cast<void*>(1234),
347  &WeakPointerCallback);
348  global_handles->MakeWeak(g2c1.location(),
349  reinterpret_cast<void*>(1234),
350  &WeakPointerCallback);
351 
352  Handle<Object> root = global_handles->Create(*g1s1); // make a root.
353 
354  // Connect group 1 and 2, make a cycle.
355  Handle<FixedArray>::cast(g1s2)->set(0, *g2s2);
356  Handle<FixedArray>::cast(g2s1)->set(0, *g1s1);
357 
358  {
359  Object** g1_objects[] = { g1s1.location(), g1s2.location() };
360  Object** g1_children[] = { g1c1.location() };
361  Object** g2_objects[] = { g2s1.location(), g2s2.location() };
362  Object** g2_children[] = { g2c1.location() };
363  global_handles->AddObjectGroup(g1_objects, 2, NULL);
364  global_handles->AddImplicitReferences(
365  Handle<HeapObject>::cast(g1s1).location(), g1_children, 1);
366  global_handles->AddObjectGroup(g2_objects, 2, NULL);
367  global_handles->AddImplicitReferences(
368  Handle<HeapObject>::cast(g2s2).location(), g2_children, 1);
369  }
370  // Do a full GC
371  HEAP->CollectGarbage(OLD_POINTER_SPACE);
372 
373  // All object should be alive.
374  CHECK_EQ(0, NumberOfWeakCalls);
375 
376  // Weaken the root.
377  global_handles->MakeWeak(root.location(),
378  reinterpret_cast<void*>(1234),
379  &WeakPointerCallback);
380  // But make children strong roots---all the objects (except for children)
381  // should be collectable now.
382  global_handles->ClearWeakness(g1c1.location());
383  global_handles->ClearWeakness(g2c1.location());
384 
385  // Groups are deleted, rebuild groups.
386  {
387  Object** g1_objects[] = { g1s1.location(), g1s2.location() };
388  Object** g1_children[] = { g1c1.location() };
389  Object** g2_objects[] = { g2s1.location(), g2s2.location() };
390  Object** g2_children[] = { g2c1.location() };
391  global_handles->AddObjectGroup(g1_objects, 2, NULL);
392  global_handles->AddImplicitReferences(
393  Handle<HeapObject>::cast(g1s1).location(), g1_children, 1);
394  global_handles->AddObjectGroup(g2_objects, 2, NULL);
395  global_handles->AddImplicitReferences(
396  Handle<HeapObject>::cast(g2s2).location(), g2_children, 1);
397  }
398 
399  HEAP->CollectGarbage(OLD_POINTER_SPACE);
400 
401  // All objects should be gone. 5 global handles in total.
402  CHECK_EQ(5, NumberOfWeakCalls);
403 
404  // And now make children weak again and collect them.
405  global_handles->MakeWeak(g1c1.location(),
406  reinterpret_cast<void*>(1234),
407  &WeakPointerCallback);
408  global_handles->MakeWeak(g2c1.location(),
409  reinterpret_cast<void*>(1234),
410  &WeakPointerCallback);
411 
412  HEAP->CollectGarbage(OLD_POINTER_SPACE);
413  CHECK_EQ(7, NumberOfWeakCalls);
414 }
415 
416 
418  public:
419  TestRetainedObjectInfo() : has_been_disposed_(false) {}
420 
421  bool has_been_disposed() { return has_been_disposed_; }
422 
423  virtual void Dispose() {
424  ASSERT(!has_been_disposed_);
425  has_been_disposed_ = true;
426  }
427 
428  virtual bool IsEquivalent(v8::RetainedObjectInfo* other) {
429  return other == this;
430  }
431 
432  virtual intptr_t GetHash() { return 0; }
433 
434  virtual const char* GetLabel() { return "whatever"; }
435 
436  private:
437  bool has_been_disposed_;
438 };
439 
440 
441 TEST(EmptyObjectGroups) {
442  InitializeVM();
443  GlobalHandles* global_handles = Isolate::Current()->global_handles();
444 
445  v8::HandleScope handle_scope;
446 
447  Handle<Object> object =
448  global_handles->Create(HEAP->AllocateFixedArray(1)->ToObjectChecked());
449 
451  global_handles->AddObjectGroup(NULL, 0, &info);
452  ASSERT(info.has_been_disposed());
453 
454  global_handles->AddImplicitReferences(
455  Handle<HeapObject>::cast(object).location(), NULL, 0);
456 }
457 
458 
459 // Here is a memory use test that uses /proc, and is therefore Linux-only. We
460 // do not care how much memory the simulator uses, since it is only there for
461 // debugging purposes.
462 #if defined(__linux__) && !defined(USE_SIMULATOR)
463 
464 
465 static uintptr_t ReadLong(char* buffer, intptr_t* position, int base) {
466  char* end_address = buffer + *position;
467  uintptr_t result = strtoul(buffer + *position, &end_address, base);
468  CHECK(result != ULONG_MAX || errno != ERANGE);
469  CHECK(end_address > buffer + *position);
470  *position = end_address - buffer;
471  return result;
472 }
473 
474 
475 static intptr_t MemoryInUse() {
476  intptr_t memory_use = 0;
477 
478  int fd = open("/proc/self/maps", O_RDONLY);
479  if (fd < 0) return -1;
480 
481  const int kBufSize = 10000;
482  char buffer[kBufSize];
483  int length = read(fd, buffer, kBufSize);
484  intptr_t line_start = 0;
485  CHECK_LT(length, kBufSize); // Make the buffer bigger.
486  CHECK_GT(length, 0); // We have to find some data in the file.
487  while (line_start < length) {
488  if (buffer[line_start] == '\n') {
489  line_start++;
490  continue;
491  }
492  intptr_t position = line_start;
493  uintptr_t start = ReadLong(buffer, &position, 16);
494  CHECK_EQ(buffer[position++], '-');
495  uintptr_t end = ReadLong(buffer, &position, 16);
496  CHECK_EQ(buffer[position++], ' ');
497  CHECK(buffer[position] == '-' || buffer[position] == 'r');
498  bool read_permission = (buffer[position++] == 'r');
499  CHECK(buffer[position] == '-' || buffer[position] == 'w');
500  bool write_permission = (buffer[position++] == 'w');
501  CHECK(buffer[position] == '-' || buffer[position] == 'x');
502  bool execute_permission = (buffer[position++] == 'x');
503  CHECK(buffer[position] == '-' || buffer[position] == 'p');
504  bool private_mapping = (buffer[position++] == 'p');
505  CHECK_EQ(buffer[position++], ' ');
506  uintptr_t offset = ReadLong(buffer, &position, 16);
507  USE(offset);
508  CHECK_EQ(buffer[position++], ' ');
509  uintptr_t major = ReadLong(buffer, &position, 16);
510  USE(major);
511  CHECK_EQ(buffer[position++], ':');
512  uintptr_t minor = ReadLong(buffer, &position, 16);
513  USE(minor);
514  CHECK_EQ(buffer[position++], ' ');
515  uintptr_t inode = ReadLong(buffer, &position, 10);
516  while (position < length && buffer[position] != '\n') position++;
517  if ((read_permission || write_permission || execute_permission) &&
518  private_mapping && inode == 0) {
519  memory_use += (end - start);
520  }
521 
522  line_start = position;
523  }
524  close(fd);
525  return memory_use;
526 }
527 
528 
529 TEST(BootUpMemoryUse) {
530  intptr_t initial_memory = MemoryInUse();
531  // Avoid flakiness.
532  FLAG_crankshaft = false;
533  FLAG_parallel_recompilation = false;
534 
535  // Only Linux has the proc filesystem and only if it is mapped. If it's not
536  // there we just skip the test.
537  if (initial_memory >= 0) {
538  InitializeVM();
539  intptr_t delta = MemoryInUse() - initial_memory;
540  if (sizeof(initial_memory) == 8) {
542  CHECK_LE(delta, 3600 * 1024); // 3396.
543  } else {
544  CHECK_LE(delta, 4000 * 1024); // 3948.
545  }
546  } else {
548  CHECK_LE(delta, 2500 * 1024); // 2400.
549  } else {
550  CHECK_LE(delta, 2860 * 1024); // 2760.
551  }
552  }
553  }
554 }
555 
556 #endif // __linux__ and !USE_SIMULATOR
byte * Address
Definition: globals.h:157
static Handle< Object > SetProperty(Handle< JSReceiver > object, Handle< String > key, Handle< Object > value, PropertyAttributes attributes, StrictModeFlag strict_mode)
Definition: objects.cc:1893
void(* GCCallback)()
Definition: v8.h:2762
#define CHECK_EQ(expected, value)
Definition: checks.h:219
void Dispose()
Definition: v8.h:4235
static String * cast(Object *obj)
static Smi * FromInt(int value)
Definition: objects-inl.h:981
const int KB
Definition: globals.h:207
#define CHECK_GT(a, b)
Definition: checks.h:227
static Handle< T > cast(Handle< S > that)
Definition: handles.h:81
void AddImplicitReferences(HeapObject **parent, Object ***children, size_t length)
static Map * cast(Object *obj)
#define CHECK_LT(a, b)
Definition: checks.h:229
#define ASSERT(condition)
Definition: checks.h:270
void ClearWeakness(Object **location)
static SharedFunctionInfo * cast(Object *obj)
static bool IsEnabled()
Definition: snapshot.h:49
#define CHECK(condition)
Definition: checks.h:56
Handle< Object > GetProperty(Handle< JSReceiver > obj, const char *name)
Definition: handles.cc:282
void AddObjectGroup(Object ***handles, size_t length, v8::RetainedObjectInfo *info)
virtual bool IsEquivalent(v8::RetainedObjectInfo *other)
uint8_t byte
Definition: globals.h:156
T ** location() const
Definition: handles.h:75
void PushBlack(HeapObject *object)
Definition: mark-compact.h:197
Handle< Object > Create(Object *value)
void Initialize(Address low, Address high)
Definition: mark-compact.h:175
const int kPointerSize
Definition: globals.h:220
static const int kMaxNonCodeHeapObjectSize
Definition: spaces.h:717
activate correct semantics for inheriting readonliness false
Definition: flags.cc:141
#define CHECK_LE(a, b)
Definition: checks.h:230
virtual intptr_t GetHash()
static const int kHeaderSize
Definition: objects.h:2296
virtual const char * GetLabel()
#define HEAP
Definition: isolate.h:1433
MUST_USE_RESULT MaybeObject * GetProperty(String *key)
Definition: objects-inl.h:859
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1171
void USE(T)
Definition: globals.h:289
static FixedArray * cast(Object *obj)
static const int kHeaderSize
Definition: objects.h:2173
bool IsEmpty() const
Definition: v8.h:209
#define FACTORY
Definition: isolate.h:1434
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
static Persistent< Context > New(ExtensionConfiguration *extensions=NULL, Handle< ObjectTemplate > global_template=Handle< ObjectTemplate >(), Handle< Value > global_object=Handle< Value >())
Definition: api.cc:4411
void MakeWeak(Object **location, void *parameter, WeakReferenceCallback callback)
void DeleteArray(T *array)
Definition: allocation.h:91
T Min(T a, T b)
Definition: utils.h:229
#define ARRAY_SIZE(a)
Definition: globals.h:281
static JSObject * cast(Object *obj)
const int MB
Definition: globals.h:208
static JSFunction * cast(Object *obj)