v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
test-heap.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 
3 #include <stdlib.h>
4 
5 #include "v8.h"
6 
7 #include "compilation-cache.h"
8 #include "execution.h"
9 #include "factory.h"
10 #include "macro-assembler.h"
11 #include "global-handles.h"
12 #include "stub-cache.h"
13 #include "cctest.h"
14 
15 using namespace v8::internal;
16 
18 
19 static void InitializeVM() {
20  if (env.IsEmpty()) env = v8::Context::New();
21  v8::HandleScope scope;
22  env->Enter();
23 }
24 
25 
26 static void CheckMap(Map* map, int type, int instance_size) {
27  CHECK(map->IsHeapObject());
28 #ifdef DEBUG
29  CHECK(HEAP->Contains(map));
30 #endif
31  CHECK_EQ(HEAP->meta_map(), map->map());
32  CHECK_EQ(type, map->instance_type());
33  CHECK_EQ(instance_size, map->instance_size());
34 }
35 
36 
37 TEST(HeapMaps) {
38  InitializeVM();
39  CheckMap(HEAP->meta_map(), MAP_TYPE, Map::kSize);
40  CheckMap(HEAP->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
41  CheckMap(HEAP->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
42  CheckMap(HEAP->string_map(), STRING_TYPE, kVariableSizeSentinel);
43 }
44 
45 
46 static void CheckOddball(Object* obj, const char* string) {
47  CHECK(obj->IsOddball());
48  bool exc;
49  Object* print_string = *Execution::ToString(Handle<Object>(obj), &exc);
50  CHECK(String::cast(print_string)->IsEqualTo(CStrVector(string)));
51 }
52 
53 
54 static void CheckSmi(int value, const char* string) {
55  bool exc;
56  Object* print_string =
58  CHECK(String::cast(print_string)->IsEqualTo(CStrVector(string)));
59 }
60 
61 
62 static void CheckNumber(double value, const char* string) {
63  Object* obj = HEAP->NumberFromDouble(value)->ToObjectChecked();
64  CHECK(obj->IsNumber());
65  bool exc;
66  Object* print_string = *Execution::ToString(Handle<Object>(obj), &exc);
67  CHECK(String::cast(print_string)->IsEqualTo(CStrVector(string)));
68 }
69 
70 
71 static void CheckFindCodeObject() {
72  // Test FindCodeObject
73 #define __ assm.
74 
75  Assembler assm(Isolate::Current(), NULL, 0);
76 
77  __ nop(); // supported on all architectures
78 
79  CodeDesc desc;
80  assm.GetCode(&desc);
81  Object* code = HEAP->CreateCode(
82  desc,
83  Code::ComputeFlags(Code::STUB),
84  Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
85  CHECK(code->IsCode());
86 
87  HeapObject* obj = HeapObject::cast(code);
88  Address obj_addr = obj->address();
89 
90  for (int i = 0; i < obj->Size(); i += kPointerSize) {
91  Object* found = HEAP->FindCodeObject(obj_addr + i);
92  CHECK_EQ(code, found);
93  }
94 
95  Object* copy = HEAP->CreateCode(
96  desc,
97  Code::ComputeFlags(Code::STUB),
98  Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
99  CHECK(copy->IsCode());
100  HeapObject* obj_copy = HeapObject::cast(copy);
101  Object* not_right = HEAP->FindCodeObject(obj_copy->address() +
102  obj_copy->Size() / 2);
103  CHECK(not_right != code);
104 }
105 
106 
107 TEST(HeapObjects) {
108  InitializeVM();
109 
110  v8::HandleScope sc;
111  Object* value = HEAP->NumberFromDouble(1.000123)->ToObjectChecked();
112  CHECK(value->IsHeapNumber());
113  CHECK(value->IsNumber());
114  CHECK_EQ(1.000123, value->Number());
115 
116  value = HEAP->NumberFromDouble(1.0)->ToObjectChecked();
117  CHECK(value->IsSmi());
118  CHECK(value->IsNumber());
119  CHECK_EQ(1.0, value->Number());
120 
121  value = HEAP->NumberFromInt32(1024)->ToObjectChecked();
122  CHECK(value->IsSmi());
123  CHECK(value->IsNumber());
124  CHECK_EQ(1024.0, value->Number());
125 
126  value = HEAP->NumberFromInt32(Smi::kMinValue)->ToObjectChecked();
127  CHECK(value->IsSmi());
128  CHECK(value->IsNumber());
129  CHECK_EQ(Smi::kMinValue, Smi::cast(value)->value());
130 
131  value = HEAP->NumberFromInt32(Smi::kMaxValue)->ToObjectChecked();
132  CHECK(value->IsSmi());
133  CHECK(value->IsNumber());
134  CHECK_EQ(Smi::kMaxValue, Smi::cast(value)->value());
135 
136 #ifndef V8_TARGET_ARCH_X64
137  // TODO(lrn): We need a NumberFromIntptr function in order to test this.
138  value = HEAP->NumberFromInt32(Smi::kMinValue - 1)->ToObjectChecked();
139  CHECK(value->IsHeapNumber());
140  CHECK(value->IsNumber());
141  CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
142 #endif
143 
144  MaybeObject* maybe_value =
145  HEAP->NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
146  value = maybe_value->ToObjectChecked();
147  CHECK(value->IsHeapNumber());
148  CHECK(value->IsNumber());
149  CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
150  value->Number());
151 
152  // nan oddball checks
153  CHECK(HEAP->nan_value()->IsNumber());
154  CHECK(isnan(HEAP->nan_value()->Number()));
155 
156  Handle<String> s = FACTORY->NewStringFromAscii(CStrVector("fisk hest "));
157  CHECK(s->IsString());
158  CHECK_EQ(10, s->length());
159 
160  String* object_symbol = String::cast(HEAP->Object_symbol());
161  CHECK(
162  Isolate::Current()->context()->global_object()->HasLocalProperty(
163  object_symbol));
164 
165  // Check ToString for oddballs
166  CheckOddball(HEAP->true_value(), "true");
167  CheckOddball(HEAP->false_value(), "false");
168  CheckOddball(HEAP->null_value(), "null");
169  CheckOddball(HEAP->undefined_value(), "undefined");
170 
171  // Check ToString for Smis
172  CheckSmi(0, "0");
173  CheckSmi(42, "42");
174  CheckSmi(-42, "-42");
175 
176  // Check ToString for Numbers
177  CheckNumber(1.1, "1.1");
178 
179  CheckFindCodeObject();
180 }
181 
182 
183 TEST(Tagging) {
184  InitializeVM();
185  int request = 24;
186  CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
187  CHECK(Smi::FromInt(42)->IsSmi());
188  CHECK(Failure::RetryAfterGC(NEW_SPACE)->IsFailure());
190  Failure::RetryAfterGC(NEW_SPACE)->allocation_space());
192  Failure::RetryAfterGC(OLD_POINTER_SPACE)->allocation_space());
193  CHECK(Failure::Exception()->IsFailure());
194  CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
195  CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
196 }
197 
198 
199 TEST(GarbageCollection) {
200  InitializeVM();
201 
202  v8::HandleScope sc;
203  // Check GC.
204  HEAP->CollectGarbage(NEW_SPACE);
205 
206  Handle<String> name = FACTORY->LookupAsciiSymbol("theFunction");
207  Handle<String> prop_name = FACTORY->LookupAsciiSymbol("theSlot");
208  Handle<String> prop_namex = FACTORY->LookupAsciiSymbol("theSlotx");
209  Handle<String> obj_name = FACTORY->LookupAsciiSymbol("theObject");
210 
211  {
212  v8::HandleScope inner_scope;
213  // Allocate a function and keep it in global object's property.
214  Handle<JSFunction> function =
215  FACTORY->NewFunction(name, FACTORY->undefined_value());
216  Handle<Map> initial_map =
218  function->set_initial_map(*initial_map);
219  Isolate::Current()->context()->global_object()->SetProperty(
220  *name, *function, NONE, kNonStrictMode)->ToObjectChecked();
221  // Allocate an object. Unrooted after leaving the scope.
222  Handle<JSObject> obj = FACTORY->NewJSObject(function);
223  obj->SetProperty(
224  *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
225  obj->SetProperty(
226  *prop_namex, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked();
227 
228  CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
229  CHECK_EQ(Smi::FromInt(24), obj->GetProperty(*prop_namex));
230  }
231 
232  HEAP->CollectGarbage(NEW_SPACE);
233 
234  // Function should be alive.
235  CHECK(Isolate::Current()->context()->global_object()->
236  HasLocalProperty(*name));
237  // Check function is retained.
238  Object* func_value = Isolate::Current()->context()->global_object()->
239  GetProperty(*name)->ToObjectChecked();
240  CHECK(func_value->IsJSFunction());
241  Handle<JSFunction> function(JSFunction::cast(func_value));
242 
243  {
244  HandleScope inner_scope;
245  // Allocate another object, make it reachable from global.
246  Handle<JSObject> obj = FACTORY->NewJSObject(function);
247  Isolate::Current()->context()->global_object()->SetProperty(
248  *obj_name, *obj, NONE, kNonStrictMode)->ToObjectChecked();
249  obj->SetProperty(
250  *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
251  }
252 
253  // After gc, it should survive.
254  HEAP->CollectGarbage(NEW_SPACE);
255 
256  CHECK(Isolate::Current()->context()->global_object()->
257  HasLocalProperty(*obj_name));
258  CHECK(Isolate::Current()->context()->global_object()->
259  GetProperty(*obj_name)->ToObjectChecked()->IsJSObject());
260  Object* obj = Isolate::Current()->context()->global_object()->
261  GetProperty(*obj_name)->ToObjectChecked();
262  JSObject* js_obj = JSObject::cast(obj);
263  CHECK_EQ(Smi::FromInt(23), js_obj->GetProperty(*prop_name));
264 }
265 
266 
267 static void VerifyStringAllocation(const char* string) {
268  v8::HandleScope scope;
269  Handle<String> s = FACTORY->NewStringFromUtf8(CStrVector(string));
270  CHECK_EQ(StrLength(string), s->length());
271  for (int index = 0; index < s->length(); index++) {
272  CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
273  }
274 }
275 
276 
278  InitializeVM();
279 
280  VerifyStringAllocation("a");
281  VerifyStringAllocation("ab");
282  VerifyStringAllocation("abc");
283  VerifyStringAllocation("abcd");
284  VerifyStringAllocation("fiskerdrengen er paa havet");
285 }
286 
287 
288 TEST(LocalHandles) {
289  InitializeVM();
290 
291  v8::HandleScope scope;
292  const char* name = "Kasper the spunky";
293  Handle<String> string = FACTORY->NewStringFromAscii(CStrVector(name));
294  CHECK_EQ(StrLength(name), string->length());
295 }
296 
297 
299  InitializeVM();
300  GlobalHandles* global_handles = Isolate::Current()->global_handles();
301 
302  Handle<Object> h1;
303  Handle<Object> h2;
304  Handle<Object> h3;
305  Handle<Object> h4;
306 
307  {
308  HandleScope scope;
309 
310  Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
311  Handle<Object> u = FACTORY->NewNumber(1.12344);
312 
313  h1 = global_handles->Create(*i);
314  h2 = global_handles->Create(*u);
315  h3 = global_handles->Create(*i);
316  h4 = global_handles->Create(*u);
317  }
318 
319  // after gc, it should survive
320  HEAP->CollectGarbage(NEW_SPACE);
321 
322  CHECK((*h1)->IsString());
323  CHECK((*h2)->IsHeapNumber());
324  CHECK((*h3)->IsString());
325  CHECK((*h4)->IsHeapNumber());
326 
327  CHECK_EQ(*h3, *h1);
328  global_handles->Destroy(h1.location());
329  global_handles->Destroy(h3.location());
330 
331  CHECK_EQ(*h4, *h2);
332  global_handles->Destroy(h2.location());
333  global_handles->Destroy(h4.location());
334 }
335 
336 
337 static bool WeakPointerCleared = false;
338 
339 static void TestWeakGlobalHandleCallback(v8::Persistent<v8::Value> handle,
340  void* id) {
341  if (1234 == reinterpret_cast<intptr_t>(id)) WeakPointerCleared = true;
342  handle.Dispose();
343 }
344 
345 
346 TEST(WeakGlobalHandlesScavenge) {
347  InitializeVM();
348  GlobalHandles* global_handles = Isolate::Current()->global_handles();
349 
350  WeakPointerCleared = false;
351 
352  Handle<Object> h1;
353  Handle<Object> h2;
354 
355  {
356  HandleScope scope;
357 
358  Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
359  Handle<Object> u = FACTORY->NewNumber(1.12344);
360 
361  h1 = global_handles->Create(*i);
362  h2 = global_handles->Create(*u);
363  }
364 
365  global_handles->MakeWeak(h2.location(),
366  reinterpret_cast<void*>(1234),
367  &TestWeakGlobalHandleCallback);
368 
369  // Scavenge treats weak pointers as normal roots.
370  HEAP->PerformScavenge();
371 
372  CHECK((*h1)->IsString());
373  CHECK((*h2)->IsHeapNumber());
374 
375  CHECK(!WeakPointerCleared);
376  CHECK(!global_handles->IsNearDeath(h2.location()));
377  CHECK(!global_handles->IsNearDeath(h1.location()));
378 
379  global_handles->Destroy(h1.location());
380  global_handles->Destroy(h2.location());
381 }
382 
383 
384 TEST(WeakGlobalHandlesMark) {
385  InitializeVM();
386  GlobalHandles* global_handles = Isolate::Current()->global_handles();
387 
388  WeakPointerCleared = false;
389 
390  Handle<Object> h1;
391  Handle<Object> h2;
392 
393  {
394  HandleScope scope;
395 
396  Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
397  Handle<Object> u = FACTORY->NewNumber(1.12344);
398 
399  h1 = global_handles->Create(*i);
400  h2 = global_handles->Create(*u);
401  }
402 
403  HEAP->CollectGarbage(OLD_POINTER_SPACE);
404  HEAP->CollectGarbage(NEW_SPACE);
405  // Make sure the object is promoted.
406 
407  global_handles->MakeWeak(h2.location(),
408  reinterpret_cast<void*>(1234),
409  &TestWeakGlobalHandleCallback);
412 
413  HEAP->CollectGarbage(OLD_POINTER_SPACE);
414 
415  CHECK((*h1)->IsString());
416 
417  CHECK(WeakPointerCleared);
419 
420  global_handles->Destroy(h1.location());
421 }
422 
423 
424 TEST(DeleteWeakGlobalHandle) {
425  InitializeVM();
426  GlobalHandles* global_handles = Isolate::Current()->global_handles();
427 
428  WeakPointerCleared = false;
429 
430  Handle<Object> h;
431 
432  {
433  HandleScope scope;
434 
435  Handle<Object> i = FACTORY->NewStringFromAscii(CStrVector("fisk"));
436  h = global_handles->Create(*i);
437  }
438 
439  global_handles->MakeWeak(h.location(),
440  reinterpret_cast<void*>(1234),
441  &TestWeakGlobalHandleCallback);
442 
443  // Scanvenge does not recognize weak reference.
444  HEAP->PerformScavenge();
445 
446  CHECK(!WeakPointerCleared);
447 
448  // Mark-compact treats weak reference properly.
449  HEAP->CollectGarbage(OLD_POINTER_SPACE);
450 
451  CHECK(WeakPointerCleared);
452 }
453 
454 
455 static const char* not_so_random_string_table[] = {
456  "abstract",
457  "boolean",
458  "break",
459  "byte",
460  "case",
461  "catch",
462  "char",
463  "class",
464  "const",
465  "continue",
466  "debugger",
467  "default",
468  "delete",
469  "do",
470  "double",
471  "else",
472  "enum",
473  "export",
474  "extends",
475  "false",
476  "final",
477  "finally",
478  "float",
479  "for",
480  "function",
481  "goto",
482  "if",
483  "implements",
484  "import",
485  "in",
486  "instanceof",
487  "int",
488  "interface",
489  "long",
490  "native",
491  "new",
492  "null",
493  "package",
494  "private",
495  "protected",
496  "public",
497  "return",
498  "short",
499  "static",
500  "super",
501  "switch",
502  "synchronized",
503  "this",
504  "throw",
505  "throws",
506  "transient",
507  "true",
508  "try",
509  "typeof",
510  "var",
511  "void",
512  "volatile",
513  "while",
514  "with",
515  0
516 };
517 
518 
519 static void CheckSymbols(const char** strings) {
520  for (const char* string = *strings; *strings != 0; string = *strings++) {
521  Object* a;
522  MaybeObject* maybe_a = HEAP->LookupAsciiSymbol(string);
523  // LookupAsciiSymbol may return a failure if a GC is needed.
524  if (!maybe_a->ToObject(&a)) continue;
525  CHECK(a->IsSymbol());
526  Object* b;
527  MaybeObject* maybe_b = HEAP->LookupAsciiSymbol(string);
528  if (!maybe_b->ToObject(&b)) continue;
529  CHECK_EQ(b, a);
530  CHECK(String::cast(b)->IsEqualTo(CStrVector(string)));
531  }
532 }
533 
534 
536  InitializeVM();
537 
538  CheckSymbols(not_so_random_string_table);
539  CheckSymbols(not_so_random_string_table);
540 }
541 
542 
543 TEST(FunctionAllocation) {
544  InitializeVM();
545 
546  v8::HandleScope sc;
547  Handle<String> name = FACTORY->LookupAsciiSymbol("theFunction");
548  Handle<JSFunction> function =
549  FACTORY->NewFunction(name, FACTORY->undefined_value());
550  Handle<Map> initial_map =
552  function->set_initial_map(*initial_map);
553 
554  Handle<String> prop_name = FACTORY->LookupAsciiSymbol("theSlot");
555  Handle<JSObject> obj = FACTORY->NewJSObject(function);
556  obj->SetProperty(
557  *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
558  CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
559  // Check that we can add properties to function objects.
560  function->SetProperty(
561  *prop_name, Smi::FromInt(24), NONE, kNonStrictMode)->ToObjectChecked();
562  CHECK_EQ(Smi::FromInt(24), function->GetProperty(*prop_name));
563 }
564 
565 
566 TEST(ObjectProperties) {
567  InitializeVM();
568 
569  v8::HandleScope sc;
570  String* object_symbol = String::cast(HEAP->Object_symbol());
571  Object* raw_object = Isolate::Current()->context()->global_object()->
572  GetProperty(object_symbol)->ToObjectChecked();
573  JSFunction* object_function = JSFunction::cast(raw_object);
574  Handle<JSFunction> constructor(object_function);
575  Handle<JSObject> obj = FACTORY->NewJSObject(constructor);
576  Handle<String> first = FACTORY->LookupAsciiSymbol("first");
577  Handle<String> second = FACTORY->LookupAsciiSymbol("second");
578 
579  // check for empty
580  CHECK(!obj->HasLocalProperty(*first));
581 
582  // add first
583  obj->SetProperty(
584  *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
585  CHECK(obj->HasLocalProperty(*first));
586 
587  // delete first
588  CHECK(obj->DeleteProperty(*first, JSObject::NORMAL_DELETION));
589  CHECK(!obj->HasLocalProperty(*first));
590 
591  // add first and then second
592  obj->SetProperty(
593  *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
594  obj->SetProperty(
595  *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
596  CHECK(obj->HasLocalProperty(*first));
597  CHECK(obj->HasLocalProperty(*second));
598 
599  // delete first and then second
600  CHECK(obj->DeleteProperty(*first, JSObject::NORMAL_DELETION));
601  CHECK(obj->HasLocalProperty(*second));
602  CHECK(obj->DeleteProperty(*second, JSObject::NORMAL_DELETION));
603  CHECK(!obj->HasLocalProperty(*first));
604  CHECK(!obj->HasLocalProperty(*second));
605 
606  // add first and then second
607  obj->SetProperty(
608  *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
609  obj->SetProperty(
610  *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
611  CHECK(obj->HasLocalProperty(*first));
612  CHECK(obj->HasLocalProperty(*second));
613 
614  // delete second and then first
615  CHECK(obj->DeleteProperty(*second, JSObject::NORMAL_DELETION));
616  CHECK(obj->HasLocalProperty(*first));
617  CHECK(obj->DeleteProperty(*first, JSObject::NORMAL_DELETION));
618  CHECK(!obj->HasLocalProperty(*first));
619  CHECK(!obj->HasLocalProperty(*second));
620 
621  // check string and symbol match
622  const char* string1 = "fisk";
623  Handle<String> s1 = FACTORY->NewStringFromAscii(CStrVector(string1));
624  obj->SetProperty(
625  *s1, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
626  Handle<String> s1_symbol = FACTORY->LookupAsciiSymbol(string1);
627  CHECK(obj->HasLocalProperty(*s1_symbol));
628 
629  // check symbol and string match
630  const char* string2 = "fugl";
631  Handle<String> s2_symbol = FACTORY->LookupAsciiSymbol(string2);
632  obj->SetProperty(
633  *s2_symbol, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
634  Handle<String> s2 = FACTORY->NewStringFromAscii(CStrVector(string2));
635  CHECK(obj->HasLocalProperty(*s2));
636 }
637 
638 
639 TEST(JSObjectMaps) {
640  InitializeVM();
641 
642  v8::HandleScope sc;
643  Handle<String> name = FACTORY->LookupAsciiSymbol("theFunction");
644  Handle<JSFunction> function =
645  FACTORY->NewFunction(name, FACTORY->undefined_value());
646  Handle<Map> initial_map =
648  function->set_initial_map(*initial_map);
649 
650  Handle<String> prop_name = FACTORY->LookupAsciiSymbol("theSlot");
651  Handle<JSObject> obj = FACTORY->NewJSObject(function);
652 
653  // Set a propery
654  obj->SetProperty(
655  *prop_name, Smi::FromInt(23), NONE, kNonStrictMode)->ToObjectChecked();
656  CHECK_EQ(Smi::FromInt(23), obj->GetProperty(*prop_name));
657 
658  // Check the map has changed
659  CHECK(*initial_map != obj->map());
660 }
661 
662 
664  InitializeVM();
665 
666  v8::HandleScope sc;
667  Handle<String> name = FACTORY->LookupAsciiSymbol("Array");
668  Object* raw_object = Isolate::Current()->context()->global_object()->
669  GetProperty(*name)->ToObjectChecked();
671  JSFunction::cast(raw_object));
672 
673  // Allocate the object.
674  Handle<JSObject> object = FACTORY->NewJSObject(function);
675  Handle<JSArray> array = Handle<JSArray>::cast(object);
676  // We just initialized the VM, no heap allocation failure yet.
677  array->Initialize(0)->ToObjectChecked();
678 
679  // Set array length to 0.
680  array->SetElementsLength(Smi::FromInt(0))->ToObjectChecked();
681  CHECK_EQ(Smi::FromInt(0), array->length());
682  // Must be in fast mode.
683  CHECK(array->HasFastSmiOrObjectElements());
684 
685  // array[length] = name.
686  array->SetElement(0, *name, NONE, kNonStrictMode)->ToObjectChecked();
687  CHECK_EQ(Smi::FromInt(1), array->length());
688  CHECK_EQ(array->GetElement(0), *name);
689 
690  // Set array length with larger than smi value.
691  Handle<Object> length =
692  FACTORY->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
693  array->SetElementsLength(*length)->ToObjectChecked();
694 
695  uint32_t int_length = 0;
696  CHECK(length->ToArrayIndex(&int_length));
697  CHECK_EQ(*length, array->length());
698  CHECK(array->HasDictionaryElements()); // Must be in slow mode.
699 
700  // array[length] = name.
701  array->SetElement(int_length, *name, NONE, kNonStrictMode)->ToObjectChecked();
702  uint32_t new_int_length = 0;
703  CHECK(array->length()->ToArrayIndex(&new_int_length));
704  CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
705  CHECK_EQ(array->GetElement(int_length), *name);
706  CHECK_EQ(array->GetElement(0), *name);
707 }
708 
709 
710 TEST(JSObjectCopy) {
711  InitializeVM();
712 
713  v8::HandleScope sc;
714  String* object_symbol = String::cast(HEAP->Object_symbol());
715  Object* raw_object = Isolate::Current()->context()->global_object()->
716  GetProperty(object_symbol)->ToObjectChecked();
717  JSFunction* object_function = JSFunction::cast(raw_object);
718  Handle<JSFunction> constructor(object_function);
719  Handle<JSObject> obj = FACTORY->NewJSObject(constructor);
720  Handle<String> first = FACTORY->LookupAsciiSymbol("first");
721  Handle<String> second = FACTORY->LookupAsciiSymbol("second");
722 
723  obj->SetProperty(
724  *first, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
725  obj->SetProperty(
726  *second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
727 
728  obj->SetElement(0, *first, NONE, kNonStrictMode)->ToObjectChecked();
729  obj->SetElement(1, *second, NONE, kNonStrictMode)->ToObjectChecked();
730 
731  // Make the clone.
732  Handle<JSObject> clone = Copy(obj);
733  CHECK(!clone.is_identical_to(obj));
734 
735  CHECK_EQ(obj->GetElement(0), clone->GetElement(0));
736  CHECK_EQ(obj->GetElement(1), clone->GetElement(1));
737 
738  CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*first));
739  CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*second));
740 
741  // Flip the values.
742  clone->SetProperty(
743  *first, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
744  clone->SetProperty(
745  *second, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
746 
747  clone->SetElement(0, *second, NONE, kNonStrictMode)->ToObjectChecked();
748  clone->SetElement(1, *first, NONE, kNonStrictMode)->ToObjectChecked();
749 
750  CHECK_EQ(obj->GetElement(1), clone->GetElement(0));
751  CHECK_EQ(obj->GetElement(0), clone->GetElement(1));
752 
753  CHECK_EQ(obj->GetProperty(*second), clone->GetProperty(*first));
754  CHECK_EQ(obj->GetProperty(*first), clone->GetProperty(*second));
755 }
756 
757 
758 TEST(StringAllocation) {
759  InitializeVM();
760 
761 
762  const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
763  for (int length = 0; length < 100; length++) {
764  v8::HandleScope scope;
765  char* non_ascii = NewArray<char>(3 * length + 1);
766  char* ascii = NewArray<char>(length + 1);
767  non_ascii[3 * length] = 0;
768  ascii[length] = 0;
769  for (int i = 0; i < length; i++) {
770  ascii[i] = 'a';
771  non_ascii[3 * i] = chars[0];
772  non_ascii[3 * i + 1] = chars[1];
773  non_ascii[3 * i + 2] = chars[2];
774  }
775  Handle<String> non_ascii_sym =
776  FACTORY->LookupSymbol(Vector<const char>(non_ascii, 3 * length));
777  CHECK_EQ(length, non_ascii_sym->length());
778  Handle<String> ascii_sym =
779  FACTORY->LookupSymbol(Vector<const char>(ascii, length));
780  CHECK_EQ(length, ascii_sym->length());
781  Handle<String> non_ascii_str =
782  FACTORY->NewStringFromUtf8(Vector<const char>(non_ascii, 3 * length));
783  non_ascii_str->Hash();
784  CHECK_EQ(length, non_ascii_str->length());
785  Handle<String> ascii_str =
786  FACTORY->NewStringFromUtf8(Vector<const char>(ascii, length));
787  ascii_str->Hash();
788  CHECK_EQ(length, ascii_str->length());
789  DeleteArray(non_ascii);
790  DeleteArray(ascii);
791  }
792 }
793 
794 
795 static int ObjectsFoundInHeap(Handle<Object> objs[], int size) {
796  // Count the number of objects found in the heap.
797  int found_count = 0;
798  HeapIterator iterator;
799  for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
800  for (int i = 0; i < size; i++) {
801  if (*objs[i] == obj) {
802  found_count++;
803  }
804  }
805  }
806  return found_count;
807 }
808 
809 
810 TEST(Iteration) {
811  InitializeVM();
812  v8::HandleScope scope;
813 
814  // Array of objects to scan haep for.
815  const int objs_count = 6;
816  Handle<Object> objs[objs_count];
817  int next_objs_index = 0;
818 
819  // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
820  objs[next_objs_index++] = FACTORY->NewJSArray(10);
821  objs[next_objs_index++] = FACTORY->NewJSArray(10,
823  TENURED);
824 
825  // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
826  objs[next_objs_index++] =
827  FACTORY->NewStringFromAscii(CStrVector("abcdefghij"));
828  objs[next_objs_index++] =
829  FACTORY->NewStringFromAscii(CStrVector("abcdefghij"), TENURED);
830 
831  // Allocate a large string (for large object space).
832  int large_size = Page::kMaxNonCodeHeapObjectSize + 1;
833  char* str = new char[large_size];
834  for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
835  str[large_size - 1] = '\0';
836  objs[next_objs_index++] =
837  FACTORY->NewStringFromAscii(CStrVector(str), TENURED);
838  delete[] str;
839 
840  // Add a Map object to look for.
841  objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
842 
843  CHECK_EQ(objs_count, next_objs_index);
844  CHECK_EQ(objs_count, ObjectsFoundInHeap(objs, objs_count));
845 }
846 
847 
848 TEST(EmptyHandleEscapeFrom) {
849  InitializeVM();
850 
851  v8::HandleScope scope;
852  Handle<JSObject> runaway;
853 
854  {
855  v8::HandleScope nested;
856  Handle<JSObject> empty;
857  runaway = empty.EscapeFrom(&nested);
858  }
859 
860  CHECK(runaway.is_null());
861 }
862 
863 
864 static int LenFromSize(int size) {
865  return (size - FixedArray::kHeaderSize) / kPointerSize;
866 }
867 
868 
869 TEST(Regression39128) {
870  // Test case for crbug.com/39128.
871  InitializeVM();
872 
873  // Increase the chance of 'bump-the-pointer' allocation in old space.
874  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
875 
876  v8::HandleScope scope;
877 
878  // The plan: create JSObject which references objects in new space.
879  // Then clone this object (forcing it to go into old space) and check
880  // that region dirty marks are updated correctly.
881 
882  // Step 1: prepare a map for the object. We add 1 inobject property to it.
883  Handle<JSFunction> object_ctor(
884  Isolate::Current()->native_context()->object_function());
885  CHECK(object_ctor->has_initial_map());
886  Handle<Map> object_map(object_ctor->initial_map());
887  // Create a map with single inobject property.
888  Handle<Map> my_map = FACTORY->CopyMap(object_map, 1);
889  int n_properties = my_map->inobject_properties();
890  CHECK_GT(n_properties, 0);
891 
892  int object_size = my_map->instance_size();
893 
894  // Step 2: allocate a lot of objects so to almost fill new space: we need
895  // just enough room to allocate JSObject and thus fill the newspace.
896 
897  int allocation_amount = Min(FixedArray::kMaxSize,
898  HEAP->MaxObjectSizeInNewSpace());
899  int allocation_len = LenFromSize(allocation_amount);
900  NewSpace* new_space = HEAP->new_space();
901  Address* top_addr = new_space->allocation_top_address();
902  Address* limit_addr = new_space->allocation_limit_address();
903  while ((*limit_addr - *top_addr) > allocation_amount) {
904  CHECK(!HEAP->always_allocate());
905  Object* array = HEAP->AllocateFixedArray(allocation_len)->ToObjectChecked();
906  CHECK(!array->IsFailure());
907  CHECK(new_space->Contains(array));
908  }
909 
910  // Step 3: now allocate fixed array and JSObject to fill the whole new space.
911  int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size);
912  int fixed_array_len = LenFromSize(to_fill);
913  CHECK(fixed_array_len < FixedArray::kMaxLength);
914 
915  CHECK(!HEAP->always_allocate());
916  Object* array = HEAP->AllocateFixedArray(fixed_array_len)->ToObjectChecked();
917  CHECK(!array->IsFailure());
918  CHECK(new_space->Contains(array));
919 
920  Object* object = HEAP->AllocateJSObjectFromMap(*my_map)->ToObjectChecked();
921  CHECK(new_space->Contains(object));
922  JSObject* jsobject = JSObject::cast(object);
923  CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length());
924  CHECK_EQ(0, jsobject->properties()->length());
925  // Create a reference to object in new space in jsobject.
926  jsobject->FastPropertyAtPut(-1, array);
927 
928  CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr));
929 
930  // Step 4: clone jsobject, but force always allocate first to create a clone
931  // in old pointer space.
932  Address old_pointer_space_top = HEAP->old_pointer_space()->top();
933  AlwaysAllocateScope aa_scope;
934  Object* clone_obj = HEAP->CopyJSObject(jsobject)->ToObjectChecked();
935  JSObject* clone = JSObject::cast(clone_obj);
936  if (clone->address() != old_pointer_space_top) {
937  // Alas, got allocated from free list, we cannot do checks.
938  return;
939  }
940  CHECK(HEAP->old_pointer_space()->Contains(clone->address()));
941 }
942 
943 
944 TEST(TestCodeFlushing) {
945  i::FLAG_allow_natives_syntax = true;
946  // If we do not flush code this test is invalid.
947  if (!FLAG_flush_code) return;
948  InitializeVM();
949  v8::HandleScope scope;
950  const char* source = "function foo() {"
951  " var x = 42;"
952  " var y = 42;"
953  " var z = x + y;"
954  "};"
955  "foo()";
956  Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
957 
958  // This compile will add the code to the compilation cache.
959  { v8::HandleScope scope;
960  CompileRun(source);
961  }
962 
963  // Check function is compiled.
964  Object* func_value = Isolate::Current()->context()->global_object()->
965  GetProperty(*foo_name)->ToObjectChecked();
966  CHECK(func_value->IsJSFunction());
967  Handle<JSFunction> function(JSFunction::cast(func_value));
968  CHECK(function->shared()->is_compiled());
969 
970  // TODO(1609) Currently incremental marker does not support code flushing.
971  HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
972  HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
973 
974  CHECK(function->shared()->is_compiled());
975 
976  HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
977  HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
978  HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
979  HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
980  HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
981  HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
982 
983  // foo should no longer be in the compilation cache
984  CHECK(!function->shared()->is_compiled() || function->IsOptimized());
985  CHECK(!function->is_compiled() || function->IsOptimized());
986  // Call foo to get it recompiled.
987  CompileRun("foo()");
988  CHECK(function->shared()->is_compiled());
989  CHECK(function->is_compiled());
990 }
991 
992 
993 // Count the number of native contexts in the weak list of native contexts.
995  int count = 0;
996  Object* object = HEAP->native_contexts_list();
997  while (!object->IsUndefined()) {
998  count++;
999  object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
1000  }
1001  return count;
1002 }
1003 
1004 
1005 // Count the number of user functions in the weak list of optimized
1006 // functions attached to a native context.
1007 static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
1008  int count = 0;
1009  Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1010  Object* object = icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST);
1011  while (object->IsJSFunction() && !JSFunction::cast(object)->IsBuiltin()) {
1012  count++;
1013  object = JSFunction::cast(object)->next_function_link();
1014  }
1015  return count;
1016 }
1017 
1018 
1019 TEST(TestInternalWeakLists) {
1021 
1022  static const int kNumTestContexts = 10;
1023 
1024  v8::HandleScope scope;
1025  v8::Persistent<v8::Context> ctx[kNumTestContexts];
1026 
1028 
1029  // Create a number of global contests which gets linked together.
1030  for (int i = 0; i < kNumTestContexts; i++) {
1031  ctx[i] = v8::Context::New();
1032 
1033  bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
1034 
1035  CHECK_EQ(i + 1, CountNativeContexts());
1036 
1037  ctx[i]->Enter();
1038 
1039  // Create a handle scope so no function objects get stuch in the outer
1040  // handle scope
1041  v8::HandleScope scope;
1042  const char* source = "function f1() { };"
1043  "function f2() { };"
1044  "function f3() { };"
1045  "function f4() { };"
1046  "function f5() { };";
1047  CompileRun(source);
1048  CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1049  CompileRun("f1()");
1050  CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
1051  CompileRun("f2()");
1052  CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1053  CompileRun("f3()");
1054  CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1055  CompileRun("f4()");
1056  CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1057  CompileRun("f5()");
1058  CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1059 
1060  // Remove function f1, and
1061  CompileRun("f1=null");
1062 
1063  // Scavenge treats these references as strong.
1064  for (int j = 0; j < 10; j++) {
1065  HEAP->PerformScavenge();
1066  CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
1067  }
1068 
1069  // Mark compact handles the weak references.
1070  ISOLATE->compilation_cache()->Clear();
1071  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1072  CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1073 
1074  // Get rid of f3 and f5 in the same way.
1075  CompileRun("f3=null");
1076  for (int j = 0; j < 10; j++) {
1077  HEAP->PerformScavenge();
1078  CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
1079  }
1080  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1081  CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1082  CompileRun("f5=null");
1083  for (int j = 0; j < 10; j++) {
1084  HEAP->PerformScavenge();
1085  CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
1086  }
1087  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1088  CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
1089 
1090  ctx[i]->Exit();
1091  }
1092 
1093  // Force compilation cache cleanup.
1094  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1095 
1096  // Dispose the native contexts one by one.
1097  for (int i = 0; i < kNumTestContexts; i++) {
1098  ctx[i].Dispose();
1099  ctx[i].Clear();
1100 
1101  // Scavenge treats these references as strong.
1102  for (int j = 0; j < 10; j++) {
1103  HEAP->PerformScavenge();
1104  CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1105  }
1106 
1107  // Mark compact handles the weak references.
1108  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1109  CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1110  }
1111 
1113 }
1114 
1115 
1116 // Count the number of native contexts in the weak list of native contexts
1117 // causing a GC after the specified number of elements.
1118 static int CountNativeContextsWithGC(int n) {
1119  int count = 0;
1120  Handle<Object> object(HEAP->native_contexts_list());
1121  while (!object->IsUndefined()) {
1122  count++;
1123  if (count == n) HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1124  object =
1126  }
1127  return count;
1128 }
1129 
1130 
1131 // Count the number of user functions in the weak list of optimized
1132 // functions attached to a native context causing a GC after the
1133 // specified number of elements.
1134 static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
1135  int n) {
1136  int count = 0;
1137  Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1138  Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST));
1139  while (object->IsJSFunction() &&
1140  !Handle<JSFunction>::cast(object)->IsBuiltin()) {
1141  count++;
1142  if (count == n) HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1143  object = Handle<Object>(
1144  Object::cast(JSFunction::cast(*object)->next_function_link()));
1145  }
1146  return count;
1147 }
1148 
1149 
1150 TEST(TestInternalWeakListsTraverseWithGC) {
1152 
1153  static const int kNumTestContexts = 10;
1154 
1155  v8::HandleScope scope;
1156  v8::Persistent<v8::Context> ctx[kNumTestContexts];
1157 
1159 
1160  // Create an number of contexts and check the length of the weak list both
1161  // with and without GCs while iterating the list.
1162  for (int i = 0; i < kNumTestContexts; i++) {
1163  ctx[i] = v8::Context::New();
1164  CHECK_EQ(i + 1, CountNativeContexts());
1165  CHECK_EQ(i + 1, CountNativeContextsWithGC(i / 2 + 1));
1166  }
1167 
1168  bool opt = (FLAG_always_opt && i::V8::UseCrankshaft());
1169 
1170  // Compile a number of functions the length of the weak list of optimized
1171  // functions both with and without GCs while iterating the list.
1172  ctx[0]->Enter();
1173  const char* source = "function f1() { };"
1174  "function f2() { };"
1175  "function f3() { };"
1176  "function f4() { };"
1177  "function f5() { };";
1178  CompileRun(source);
1179  CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
1180  CompileRun("f1()");
1181  CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
1182  CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1183  CompileRun("f2()");
1184  CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
1185  CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1186  CompileRun("f3()");
1187  CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
1188  CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
1189  CompileRun("f4()");
1190  CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
1191  CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
1192  CompileRun("f5()");
1193  CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
1194  CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
1195 
1196  ctx[0]->Exit();
1197 }
1198 
1199 
1200 TEST(TestSizeOfObjects) {
1202 
1203  // Get initial heap size after several full GCs, which will stabilize
1204  // the heap size and return with sweeping finished completely.
1205  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1206  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1207  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1208  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1209  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1210  CHECK(HEAP->old_pointer_space()->IsSweepingComplete());
1211  int initial_size = static_cast<int>(HEAP->SizeOfObjects());
1212 
1213  {
1214  // Allocate objects on several different old-space pages so that
1215  // lazy sweeping kicks in for subsequent GC runs.
1216  AlwaysAllocateScope always_allocate;
1217  int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1218  for (int i = 1; i <= 100; i++) {
1219  HEAP->AllocateFixedArray(8192, TENURED)->ToObjectChecked();
1220  CHECK_EQ(initial_size + i * filler_size,
1221  static_cast<int>(HEAP->SizeOfObjects()));
1222  }
1223  }
1224 
1225  // The heap size should go back to initial size after a full GC, even
1226  // though sweeping didn't finish yet.
1227  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1228 
1229  // Normally sweeping would not be complete here, but no guarantees.
1230 
1231  CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
1232 
1233  // Advancing the sweeper step-wise should not change the heap size.
1234  while (!HEAP->old_pointer_space()->IsSweepingComplete()) {
1235  HEAP->old_pointer_space()->AdvanceSweeper(KB);
1236  CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
1237  }
1238 }
1239 
1240 
1241 TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1242  InitializeVM();
1243  HEAP->EnsureHeapIsIterable();
1244  intptr_t size_of_objects_1 = HEAP->SizeOfObjects();
1245  HeapIterator iterator;
1246  intptr_t size_of_objects_2 = 0;
1247  for (HeapObject* obj = iterator.next();
1248  obj != NULL;
1249  obj = iterator.next()) {
1250  if (!obj->IsFreeSpace()) {
1251  size_of_objects_2 += obj->Size();
1252  }
1253  }
1254  // Delta must be within 5% of the larger result.
1255  // TODO(gc): Tighten this up by distinguishing between byte
1256  // arrays that are real and those that merely mark free space
1257  // on the heap.
1258  if (size_of_objects_1 > size_of_objects_2) {
1259  intptr_t delta = size_of_objects_1 - size_of_objects_2;
1260  PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1261  "Iterator: %" V8_PTR_PREFIX "d, "
1262  "delta: %" V8_PTR_PREFIX "d\n",
1263  size_of_objects_1, size_of_objects_2, delta);
1264  CHECK_GT(size_of_objects_1 / 20, delta);
1265  } else {
1266  intptr_t delta = size_of_objects_2 - size_of_objects_1;
1267  PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
1268  "Iterator: %" V8_PTR_PREFIX "d, "
1269  "delta: %" V8_PTR_PREFIX "d\n",
1270  size_of_objects_1, size_of_objects_2, delta);
1271  CHECK_GT(size_of_objects_2 / 20, delta);
1272  }
1273 }
1274 
1275 
1276 static void FillUpNewSpace(NewSpace* new_space) {
1277  // Fill up new space to the point that it is completely full. Make sure
1278  // that the scavenger does not undo the filling.
1279  v8::HandleScope scope;
1280  AlwaysAllocateScope always_allocate;
1281  intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
1282  intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
1283  for (intptr_t i = 0; i < number_of_fillers; i++) {
1284  CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(32, NOT_TENURED)));
1285  }
1286 }
1287 
1288 
1289 TEST(GrowAndShrinkNewSpace) {
1290  InitializeVM();
1291  NewSpace* new_space = HEAP->new_space();
1292 
1293  if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
1294  HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
1295  // The max size cannot exceed the reserved size, since semispaces must be
1296  // always within the reserved space. We can't test new space growing and
1297  // shrinking if the reserved size is the same as the minimum (initial) size.
1298  return;
1299  }
1300 
1301  // Explicitly growing should double the space capacity.
1302  intptr_t old_capacity, new_capacity;
1303  old_capacity = new_space->Capacity();
1304  new_space->Grow();
1305  new_capacity = new_space->Capacity();
1306  CHECK(2 * old_capacity == new_capacity);
1307 
1308  old_capacity = new_space->Capacity();
1309  FillUpNewSpace(new_space);
1310  new_capacity = new_space->Capacity();
1311  CHECK(old_capacity == new_capacity);
1312 
1313  // Explicitly shrinking should not affect space capacity.
1314  old_capacity = new_space->Capacity();
1315  new_space->Shrink();
1316  new_capacity = new_space->Capacity();
1317  CHECK(old_capacity == new_capacity);
1318 
1319  // Let the scavenger empty the new space.
1320  HEAP->CollectGarbage(NEW_SPACE);
1321  CHECK_LE(new_space->Size(), old_capacity);
1322 
1323  // Explicitly shrinking should halve the space capacity.
1324  old_capacity = new_space->Capacity();
1325  new_space->Shrink();
1326  new_capacity = new_space->Capacity();
1327  CHECK(old_capacity == 2 * new_capacity);
1328 
1329  // Consecutive shrinking should not affect space capacity.
1330  old_capacity = new_space->Capacity();
1331  new_space->Shrink();
1332  new_space->Shrink();
1333  new_space->Shrink();
1334  new_capacity = new_space->Capacity();
1335  CHECK(old_capacity == new_capacity);
1336 }
1337 
1338 
1339 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
1340  InitializeVM();
1341 
1342  if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize() ||
1343  HEAP->MaxSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
1344  // The max size cannot exceed the reserved size, since semispaces must be
1345  // always within the reserved space. We can't test new space growing and
1346  // shrinking if the reserved size is the same as the minimum (initial) size.
1347  return;
1348  }
1349 
1350  v8::HandleScope scope;
1351  NewSpace* new_space = HEAP->new_space();
1352  intptr_t old_capacity, new_capacity;
1353  old_capacity = new_space->Capacity();
1354  new_space->Grow();
1355  new_capacity = new_space->Capacity();
1356  CHECK(2 * old_capacity == new_capacity);
1357  FillUpNewSpace(new_space);
1358  HEAP->CollectAllAvailableGarbage();
1359  new_capacity = new_space->Capacity();
1360  CHECK(old_capacity == new_capacity);
1361 }
1362 
1363 
1364 static int NumberOfGlobalObjects() {
1365  int count = 0;
1366  HeapIterator iterator;
1367  for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1368  if (obj->IsGlobalObject()) count++;
1369  }
1370  return count;
1371 }
1372 
1373 
1374 // Test that we don't embed maps from foreign contexts into
1375 // optimized code.
1376 TEST(LeakNativeContextViaMap) {
1377  i::FLAG_allow_natives_syntax = true;
1378  v8::HandleScope outer_scope;
1381  ctx1->Enter();
1382 
1383  HEAP->CollectAllAvailableGarbage();
1384  CHECK_EQ(4, NumberOfGlobalObjects());
1385 
1386  {
1387  v8::HandleScope inner_scope;
1388  CompileRun("var v = {x: 42}");
1389  v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1390  ctx2->Enter();
1391  ctx2->Global()->Set(v8_str("o"), v);
1392  v8::Local<v8::Value> res = CompileRun(
1393  "function f() { return o.x; }"
1394  "for (var i = 0; i < 10; ++i) f();"
1395  "%OptimizeFunctionOnNextCall(f);"
1396  "f();");
1397  CHECK_EQ(42, res->Int32Value());
1398  ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1399  ctx2->Exit();
1400  ctx1->Exit();
1401  ctx1.Dispose();
1403  }
1404  HEAP->CollectAllAvailableGarbage();
1405  CHECK_EQ(2, NumberOfGlobalObjects());
1406  ctx2.Dispose();
1407  HEAP->CollectAllAvailableGarbage();
1408  CHECK_EQ(0, NumberOfGlobalObjects());
1409 }
1410 
1411 
1412 // Test that we don't embed functions from foreign contexts into
1413 // optimized code.
1414 TEST(LeakNativeContextViaFunction) {
1415  i::FLAG_allow_natives_syntax = true;
1416  v8::HandleScope outer_scope;
1419  ctx1->Enter();
1420 
1421  HEAP->CollectAllAvailableGarbage();
1422  CHECK_EQ(4, NumberOfGlobalObjects());
1423 
1424  {
1425  v8::HandleScope inner_scope;
1426  CompileRun("var v = function() { return 42; }");
1427  v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1428  ctx2->Enter();
1429  ctx2->Global()->Set(v8_str("o"), v);
1430  v8::Local<v8::Value> res = CompileRun(
1431  "function f(x) { return x(); }"
1432  "for (var i = 0; i < 10; ++i) f(o);"
1433  "%OptimizeFunctionOnNextCall(f);"
1434  "f(o);");
1435  CHECK_EQ(42, res->Int32Value());
1436  ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1437  ctx2->Exit();
1438  ctx1->Exit();
1439  ctx1.Dispose();
1441  }
1442  HEAP->CollectAllAvailableGarbage();
1443  CHECK_EQ(2, NumberOfGlobalObjects());
1444  ctx2.Dispose();
1445  HEAP->CollectAllAvailableGarbage();
1446  CHECK_EQ(0, NumberOfGlobalObjects());
1447 }
1448 
1449 
1450 TEST(LeakNativeContextViaMapKeyed) {
1451  i::FLAG_allow_natives_syntax = true;
1452  v8::HandleScope outer_scope;
1455  ctx1->Enter();
1456 
1457  HEAP->CollectAllAvailableGarbage();
1458  CHECK_EQ(4, NumberOfGlobalObjects());
1459 
1460  {
1461  v8::HandleScope inner_scope;
1462  CompileRun("var v = [42, 43]");
1463  v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1464  ctx2->Enter();
1465  ctx2->Global()->Set(v8_str("o"), v);
1466  v8::Local<v8::Value> res = CompileRun(
1467  "function f() { return o[0]; }"
1468  "for (var i = 0; i < 10; ++i) f();"
1469  "%OptimizeFunctionOnNextCall(f);"
1470  "f();");
1471  CHECK_EQ(42, res->Int32Value());
1472  ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1473  ctx2->Exit();
1474  ctx1->Exit();
1475  ctx1.Dispose();
1477  }
1478  HEAP->CollectAllAvailableGarbage();
1479  CHECK_EQ(2, NumberOfGlobalObjects());
1480  ctx2.Dispose();
1481  HEAP->CollectAllAvailableGarbage();
1482  CHECK_EQ(0, NumberOfGlobalObjects());
1483 }
1484 
1485 
1486 TEST(LeakNativeContextViaMapProto) {
1487  i::FLAG_allow_natives_syntax = true;
1488  v8::HandleScope outer_scope;
1491  ctx1->Enter();
1492 
1493  HEAP->CollectAllAvailableGarbage();
1494  CHECK_EQ(4, NumberOfGlobalObjects());
1495 
1496  {
1497  v8::HandleScope inner_scope;
1498  CompileRun("var v = { y: 42}");
1499  v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
1500  ctx2->Enter();
1501  ctx2->Global()->Set(v8_str("o"), v);
1502  v8::Local<v8::Value> res = CompileRun(
1503  "function f() {"
1504  " var p = {x: 42};"
1505  " p.__proto__ = o;"
1506  " return p.x;"
1507  "}"
1508  "for (var i = 0; i < 10; ++i) f();"
1509  "%OptimizeFunctionOnNextCall(f);"
1510  "f();");
1511  CHECK_EQ(42, res->Int32Value());
1512  ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
1513  ctx2->Exit();
1514  ctx1->Exit();
1515  ctx1.Dispose();
1517  }
1518  HEAP->CollectAllAvailableGarbage();
1519  CHECK_EQ(2, NumberOfGlobalObjects());
1520  ctx2.Dispose();
1521  HEAP->CollectAllAvailableGarbage();
1522  CHECK_EQ(0, NumberOfGlobalObjects());
1523 }
1524 
1525 
1526 TEST(InstanceOfStubWriteBarrier) {
1527  i::FLAG_allow_natives_syntax = true;
1528 #ifdef VERIFY_HEAP
1529  i::FLAG_verify_heap = true;
1530 #endif
1531 
1532  InitializeVM();
1533  if (!i::V8::UseCrankshaft()) return;
1534  v8::HandleScope outer_scope;
1535 
1536  {
1537  v8::HandleScope scope;
1538  CompileRun(
1539  "function foo () { }"
1540  "function mkbar () { return new (new Function(\"\")) (); }"
1541  "function f (x) { return (x instanceof foo); }"
1542  "function g () { f(mkbar()); }"
1543  "f(new foo()); f(new foo());"
1544  "%OptimizeFunctionOnNextCall(f);"
1545  "f(new foo()); g();");
1546  }
1547 
1548  IncrementalMarking* marking = HEAP->incremental_marking();
1549  marking->Abort();
1550  marking->Start();
1551 
1552  Handle<JSFunction> f =
1553  v8::Utils::OpenHandle(
1555  v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
1556 
1557  CHECK(f->IsOptimized());
1558 
1559  while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
1560  !marking->IsStopped()) {
1561  // Discard any pending GC requests otherwise we will get GC when we enter
1562  // code below.
1564  }
1565 
1566  CHECK(marking->IsMarking());
1567 
1568  {
1569  v8::HandleScope scope;
1570  v8::Handle<v8::Object> global = v8::Context::GetCurrent()->Global();
1572  v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
1573  g->Call(global, 0, NULL);
1574  }
1575 
1576  HEAP->incremental_marking()->set_should_hurry(true);
1577  HEAP->CollectGarbage(OLD_POINTER_SPACE);
1578 }
1579 
1580 
1581 TEST(PrototypeTransitionClearing) {
1582  InitializeVM();
1583  v8::HandleScope scope;
1584 
1585  CompileRun(
1586  "var base = {};"
1587  "var live = [];"
1588  "for (var i = 0; i < 10; i++) {"
1589  " var object = {};"
1590  " var prototype = {};"
1591  " object.__proto__ = prototype;"
1592  " if (i >= 3) live.push(object, prototype);"
1593  "}");
1594 
1595  Handle<JSObject> baseObject =
1596  v8::Utils::OpenHandle(
1598  v8::Context::GetCurrent()->Global()->Get(v8_str("base"))));
1599 
1600  // Verify that only dead prototype transitions are cleared.
1601  CHECK_EQ(10, baseObject->map()->NumberOfProtoTransitions());
1602  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1603  const int transitions = 10 - 3;
1604  CHECK_EQ(transitions, baseObject->map()->NumberOfProtoTransitions());
1605 
1606  // Verify that prototype transitions array was compacted.
1607  FixedArray* trans = baseObject->map()->GetPrototypeTransitions();
1608  for (int i = 0; i < transitions; i++) {
1611  CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
1612  Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
1613  CHECK(proto->IsTheHole() || proto->IsJSObject());
1614  }
1615 
1616  // Make sure next prototype is placed on an old-space evacuation candidate.
1617  Handle<JSObject> prototype;
1618  PagedSpace* space = HEAP->old_pointer_space();
1619  do {
1620  prototype = FACTORY->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
1621  } while (space->FirstPage() == space->LastPage() ||
1622  !space->LastPage()->Contains(prototype->address()));
1623 
1624  // Add a prototype on an evacuation candidate and verify that transition
1625  // clearing correctly records slots in prototype transition array.
1626  i::FLAG_always_compact = true;
1627  Handle<Map> map(baseObject->map());
1628  CHECK(!space->LastPage()->Contains(
1629  map->GetPrototypeTransitions()->address()));
1630  CHECK(space->LastPage()->Contains(prototype->address()));
1631  baseObject->SetPrototype(*prototype, false)->ToObjectChecked();
1632  CHECK(map->GetPrototypeTransition(*prototype)->IsMap());
1633  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1634  CHECK(map->GetPrototypeTransition(*prototype)->IsMap());
1635 }
1636 
1637 
1638 TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
1639  i::FLAG_allow_natives_syntax = true;
1640 #ifdef VERIFY_HEAP
1641  i::FLAG_verify_heap = true;
1642 #endif
1643 
1644  InitializeVM();
1645  if (!i::V8::UseCrankshaft()) return;
1646  v8::HandleScope outer_scope;
1647 
1648  {
1649  v8::HandleScope scope;
1650  CompileRun(
1651  "function f () {"
1652  " var s = 0;"
1653  " for (var i = 0; i < 100; i++) s += i;"
1654  " return s;"
1655  "}"
1656  "f(); f();"
1657  "%OptimizeFunctionOnNextCall(f);"
1658  "f();");
1659  }
1660  Handle<JSFunction> f =
1661  v8::Utils::OpenHandle(
1663  v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
1664  CHECK(f->IsOptimized());
1665 
1666  IncrementalMarking* marking = HEAP->incremental_marking();
1667  marking->Abort();
1668  marking->Start();
1669 
1670  // The following two calls will increment HEAP->global_ic_age().
1671  const int kLongIdlePauseInMs = 1000;
1673  v8::V8::IdleNotification(kLongIdlePauseInMs);
1674 
1675  while (!marking->IsStopped() && !marking->IsComplete()) {
1676  marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
1677  }
1678  if (!marking->IsStopped() || marking->should_hurry()) {
1679  // We don't normally finish a GC via Step(), we normally finish by
1680  // setting the stack guard and then do the final steps in the stack
1681  // guard interrupt. But here we didn't ask for that, and there is no
1682  // JS code running to trigger the interrupt, so we explicitly finalize
1683  // here.
1684  HEAP->CollectAllGarbage(Heap::kNoGCFlags,
1685  "Test finalizing incremental mark-sweep");
1686  }
1687 
1688  CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
1689  CHECK_EQ(0, f->shared()->opt_count());
1690  CHECK_EQ(0, f->shared()->code()->profiler_ticks());
1691 }
1692 
1693 
1694 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
1695  i::FLAG_allow_natives_syntax = true;
1696 #ifdef VERIFY_HEAP
1697  i::FLAG_verify_heap = true;
1698 #endif
1699 
1700  InitializeVM();
1701  if (!i::V8::UseCrankshaft()) return;
1702  v8::HandleScope outer_scope;
1703 
1704  {
1705  v8::HandleScope scope;
1706  CompileRun(
1707  "function f () {"
1708  " var s = 0;"
1709  " for (var i = 0; i < 100; i++) s += i;"
1710  " return s;"
1711  "}"
1712  "f(); f();"
1713  "%OptimizeFunctionOnNextCall(f);"
1714  "f();");
1715  }
1716  Handle<JSFunction> f =
1717  v8::Utils::OpenHandle(
1719  v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
1720  CHECK(f->IsOptimized());
1721 
1722  HEAP->incremental_marking()->Abort();
1723 
1724  // The following two calls will increment HEAP->global_ic_age().
1725  // Since incremental marking is off, IdleNotification will do full GC.
1726  const int kLongIdlePauseInMs = 1000;
1728  v8::V8::IdleNotification(kLongIdlePauseInMs);
1729 
1730  CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
1731  CHECK_EQ(0, f->shared()->opt_count());
1732  CHECK_EQ(0, f->shared()->code()->profiler_ticks());
1733 }
1734 
1735 
1736 // Test that HAllocateObject will always return an object in new-space.
1737 TEST(OptimizedAllocationAlwaysInNewSpace) {
1738  i::FLAG_allow_natives_syntax = true;
1739  InitializeVM();
1740  if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
1741  v8::HandleScope scope;
1742 
1743  FillUpNewSpace(HEAP->new_space());
1744  AlwaysAllocateScope always_allocate;
1745  v8::Local<v8::Value> res = CompileRun(
1746  "function c(x) {"
1747  " this.x = x;"
1748  " for (var i = 0; i < 32; i++) {"
1749  " this['x' + i] = x;"
1750  " }"
1751  "}"
1752  "function f(x) { return new c(x); };"
1753  "f(1); f(2); f(3);"
1754  "%OptimizeFunctionOnNextCall(f);"
1755  "f(4);");
1756  CHECK_EQ(4, res->ToObject()->GetRealNamedProperty(v8_str("x"))->Int32Value());
1757 
1758  Handle<JSObject> o =
1759  v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
1760 
1761  CHECK(HEAP->InNewSpace(*o));
1762 }
1763 
1764 
1765 static int CountMapTransitions(Map* map) {
1766  return map->transitions()->number_of_transitions();
1767 }
1768 
1769 
1770 // Go through all incremental marking steps in one swoop.
1771 static void SimulateIncrementalMarking() {
1772  IncrementalMarking* marking = HEAP->incremental_marking();
1773  CHECK(marking->IsStopped());
1774  marking->Start();
1775  CHECK(marking->IsMarking());
1776  while (!marking->IsComplete()) {
1778  }
1779  CHECK(marking->IsComplete());
1780 }
1781 
1782 
1783 // Test that map transitions are cleared and maps are collected with
1784 // incremental marking as well.
1785 TEST(Regress1465) {
1786  i::FLAG_allow_natives_syntax = true;
1787  i::FLAG_trace_incremental_marking = true;
1788  InitializeVM();
1789  v8::HandleScope scope;
1790  static const int transitions_count = 256;
1791 
1792  {
1793  AlwaysAllocateScope always_allocate;
1794  for (int i = 0; i < transitions_count; i++) {
1795  EmbeddedVector<char, 64> buffer;
1796  OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
1797  CompileRun(buffer.start());
1798  }
1799  CompileRun("var root = new Object;");
1800  }
1801 
1802  Handle<JSObject> root =
1803  v8::Utils::OpenHandle(
1805  v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
1806 
1807  // Count number of live transitions before marking.
1808  int transitions_before = CountMapTransitions(root->map());
1809  CompileRun("%DebugPrint(root);");
1810  CHECK_EQ(transitions_count, transitions_before);
1811 
1812  SimulateIncrementalMarking();
1813  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1814 
1815  // Count number of live transitions after marking. Note that one transition
1816  // is left, because 'o' still holds an instance of one transition target.
1817  int transitions_after = CountMapTransitions(root->map());
1818  CompileRun("%DebugPrint(root);");
1819  CHECK_EQ(1, transitions_after);
1820 }
1821 
1822 
1823 TEST(Regress2143a) {
1824  i::FLAG_collect_maps = true;
1825  i::FLAG_incremental_marking = true;
1826  InitializeVM();
1827  v8::HandleScope scope;
1828 
1829  // Prepare a map transition from the root object together with a yet
1830  // untransitioned root object.
1831  CompileRun("var root = new Object;"
1832  "root.foo = 0;"
1833  "root = new Object;");
1834 
1835  SimulateIncrementalMarking();
1836 
1837  // Compile a StoreIC that performs the prepared map transition. This
1838  // will restart incremental marking and should make sure the root is
1839  // marked grey again.
1840  CompileRun("function f(o) {"
1841  " o.foo = 0;"
1842  "}"
1843  "f(new Object);"
1844  "f(root);");
1845 
1846  // This bug only triggers with aggressive IC clearing.
1847  HEAP->AgeInlineCaches();
1848 
1849  // Explicitly request GC to perform final marking step and sweeping.
1850  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1851 
1852  Handle<JSObject> root =
1853  v8::Utils::OpenHandle(
1855  v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
1856 
1857  // The root object should be in a sane state.
1858  CHECK(root->IsJSObject());
1859  CHECK(root->map()->IsMap());
1860 }
1861 
1862 
1863 TEST(Regress2143b) {
1864  i::FLAG_collect_maps = true;
1865  i::FLAG_incremental_marking = true;
1866  i::FLAG_allow_natives_syntax = true;
1867  InitializeVM();
1868  v8::HandleScope scope;
1869 
1870  // Prepare a map transition from the root object together with a yet
1871  // untransitioned root object.
1872  CompileRun("var root = new Object;"
1873  "root.foo = 0;"
1874  "root = new Object;");
1875 
1876  SimulateIncrementalMarking();
1877 
1878  // Compile an optimized LStoreNamedField that performs the prepared
1879  // map transition. This will restart incremental marking and should
1880  // make sure the root is marked grey again.
1881  CompileRun("function f(o) {"
1882  " o.foo = 0;"
1883  "}"
1884  "f(new Object);"
1885  "f(new Object);"
1886  "%OptimizeFunctionOnNextCall(f);"
1887  "f(root);"
1888  "%DeoptimizeFunction(f);");
1889 
1890  // This bug only triggers with aggressive IC clearing.
1891  HEAP->AgeInlineCaches();
1892 
1893  // Explicitly request GC to perform final marking step and sweeping.
1894  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1895 
1896  Handle<JSObject> root =
1897  v8::Utils::OpenHandle(
1899  v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
1900 
1901  // The root object should be in a sane state.
1902  CHECK(root->IsJSObject());
1903  CHECK(root->map()->IsMap());
1904 }
1905 
1906 
1907 // Implemented in the test-alloc.cc test suite.
1908 void SimulateFullSpace(PagedSpace* space);
1909 
1910 
1911 TEST(ReleaseOverReservedPages) {
1912  i::FLAG_trace_gc = true;
1913  // The optimizer can allocate stuff, messing up the test.
1914  i::FLAG_crankshaft = false;
1915  i::FLAG_always_opt = false;
1916  InitializeVM();
1917  v8::HandleScope scope;
1918  static const int number_of_test_pages = 20;
1919 
1920  // Prepare many pages with low live-bytes count.
1921  PagedSpace* old_pointer_space = HEAP->old_pointer_space();
1922  CHECK_EQ(1, old_pointer_space->CountTotalPages());
1923  for (int i = 0; i < number_of_test_pages; i++) {
1924  AlwaysAllocateScope always_allocate;
1925  SimulateFullSpace(old_pointer_space);
1926  FACTORY->NewFixedArray(1, TENURED);
1927  }
1928  CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
1929 
1930  // Triggering one GC will cause a lot of garbage to be discovered but
1931  // even spread across all allocated pages.
1932  HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
1933  CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
1934 
1935  // Triggering subsequent GCs should cause at least half of the pages
1936  // to be released to the OS after at most two cycles.
1937  HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
1938  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
1939  HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
1940  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
1941 
1942  // Triggering a last-resort GC should cause all pages to be released to the
1943  // OS so that other processes can seize the memory. If we get a failure here
1944  // where there are 2 pages left instead of 1, then we should increase the
1945  // size of the first page a little in SizeOfFirstPage in spaces.cc. The
1946  // first page should be small in order to reduce memory used when the VM
1947  // boots, but if the 20 small arrays don't fit on the first page then that's
1948  // an indication that it is too small.
1949  HEAP->CollectAllAvailableGarbage("triggered really hard");
1950  CHECK_EQ(1, old_pointer_space->CountTotalPages());
1951 }
1952 
1953 
1954 TEST(Regress2237) {
1955  InitializeVM();
1956  v8::HandleScope scope;
1957  Handle<String> slice(HEAP->empty_string());
1958 
1959  {
1960  // Generate a parent that lives in new-space.
1961  v8::HandleScope inner_scope;
1962  const char* c = "This text is long enough to trigger sliced strings.";
1963  Handle<String> s = FACTORY->NewStringFromAscii(CStrVector(c));
1964  CHECK(s->IsSeqAsciiString());
1965  CHECK(HEAP->InNewSpace(*s));
1966 
1967  // Generate a sliced string that is based on the above parent and
1968  // lives in old-space.
1969  FillUpNewSpace(HEAP->new_space());
1970  AlwaysAllocateScope always_allocate;
1971  Handle<String> t;
1972  // TODO(mstarzinger): Unfortunately FillUpNewSpace() still leaves
1973  // some slack, so we need to allocate a few sliced strings.
1974  for (int i = 0; i < 16; i++) {
1975  t = FACTORY->NewProperSubString(s, 5, 35);
1976  }
1977  CHECK(t->IsSlicedString());
1978  CHECK(!HEAP->InNewSpace(*t));
1979  *slice.location() = *t.location();
1980  }
1981 
1982  CHECK(SlicedString::cast(*slice)->parent()->IsSeqAsciiString());
1983  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
1984  CHECK(SlicedString::cast(*slice)->parent()->IsSeqAsciiString());
1985 }
1986 
1987 
1988 #ifdef OBJECT_PRINT
1989 TEST(PrintSharedFunctionInfo) {
1990  InitializeVM();
1991  v8::HandleScope scope;
1992  const char* source = "f = function() { return 987654321; }\n"
1993  "g = function() { return 123456789; }\n";
1994  CompileRun(source);
1995  Handle<JSFunction> g =
1996  v8::Utils::OpenHandle(
1998  v8::Context::GetCurrent()->Global()->Get(v8_str("g"))));
1999 
2000  AssertNoAllocation no_alloc;
2001  g->shared()->PrintLn();
2002 }
2003 #endif // OBJECT_PRINT
2004 
2005 
2006 TEST(Regress2211) {
2007  InitializeVM();
2008  v8::HandleScope scope;
2009 
2010  v8::Handle<v8::String> value = v8_str("val string");
2011  Smi* hash = Smi::FromInt(321);
2012  Heap* heap = Isolate::Current()->heap();
2013 
2014  for (int i = 0; i < 2; i++) {
2015  // Store identity hash first and common hidden property second.
2017  Handle<JSObject> internal_obj = v8::Utils::OpenHandle(*obj);
2018  CHECK(internal_obj->HasFastProperties());
2019 
2020  // In the first iteration, set hidden value first and identity hash second.
2021  // In the second iteration, reverse the order.
2022  if (i == 0) obj->SetHiddenValue(v8_str("key string"), value);
2023  MaybeObject* maybe_obj = internal_obj->SetIdentityHash(hash,
2024  ALLOW_CREATION);
2025  CHECK(!maybe_obj->IsFailure());
2026  if (i == 1) obj->SetHiddenValue(v8_str("key string"), value);
2027 
2028  // Check values.
2029  CHECK_EQ(hash,
2030  internal_obj->GetHiddenProperty(heap->identity_hash_symbol()));
2031  CHECK(value->Equals(obj->GetHiddenValue(v8_str("key string"))));
2032 
2033  // Check size.
2034  DescriptorArray* descriptors = internal_obj->map()->instance_descriptors();
2036  internal_obj->FastPropertyAt(descriptors->GetFieldIndex(0)));
2037  // HashTable header (5) and 4 initial entries (8).
2038  CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize);
2039  }
2040 }
2041 
2042 
2043 TEST(IncrementalMarkingClearsTypeFeedbackCells) {
2044  if (i::FLAG_always_opt) return;
2045  InitializeVM();
2046  v8::HandleScope scope;
2047  v8::Local<v8::Value> fun1, fun2;
2048 
2049  {
2050  LocalContext env;
2051  CompileRun("function fun() {};");
2052  fun1 = env->Global()->Get(v8_str("fun"));
2053  }
2054 
2055  {
2056  LocalContext env;
2057  CompileRun("function fun() {};");
2058  fun2 = env->Global()->Get(v8_str("fun"));
2059  }
2060 
2061  // Prepare function f that contains type feedback for closures
2062  // originating from two different native contexts.
2063  v8::Context::GetCurrent()->Global()->Set(v8_str("fun1"), fun1);
2064  v8::Context::GetCurrent()->Global()->Set(v8_str("fun2"), fun2);
2065  CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
2066  Handle<JSFunction> f =
2067  v8::Utils::OpenHandle(
2069  v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2071  f->shared()->code()->type_feedback_info())->type_feedback_cells());
2072 
2073  CHECK_EQ(2, cells->CellCount());
2074  CHECK(cells->Cell(0)->value()->IsJSFunction());
2075  CHECK(cells->Cell(1)->value()->IsJSFunction());
2076 
2077  SimulateIncrementalMarking();
2078  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2079 
2080  CHECK_EQ(2, cells->CellCount());
2081  CHECK(cells->Cell(0)->value()->IsTheHole());
2082  CHECK(cells->Cell(1)->value()->IsTheHole());
2083 }
2084 
2085 
2086 static Code* FindFirstIC(Code* code, Code::Kind kind) {
2087  int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
2088  RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
2089  RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) |
2090  RelocInfo::ModeMask(RelocInfo::CODE_TARGET_CONTEXT);
2091  for (RelocIterator it(code, mask); !it.done(); it.next()) {
2092  RelocInfo* info = it.rinfo();
2093  Code* target = Code::GetCodeFromTargetAddress(info->target_address());
2094  if (target->is_inline_cache_stub() && target->kind() == kind) {
2095  return target;
2096  }
2097  }
2098  return NULL;
2099 }
2100 
2101 
2102 TEST(IncrementalMarkingPreservesMonomorhpicIC) {
2103  if (i::FLAG_always_opt) return;
2104  InitializeVM();
2105  v8::HandleScope scope;
2106 
2107  // Prepare function f that contains a monomorphic IC for object
2108  // originating from the same native context.
2109  CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
2110  "function f(o) { return o.x; } f(obj); f(obj);");
2111  Handle<JSFunction> f =
2112  v8::Utils::OpenHandle(
2114  v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2115 
2116  Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2117  CHECK(ic_before->ic_state() == MONOMORPHIC);
2118 
2119  SimulateIncrementalMarking();
2120  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2121 
2122  Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2123  CHECK(ic_after->ic_state() == MONOMORPHIC);
2124 }
2125 
2126 
2127 TEST(IncrementalMarkingClearsMonomorhpicIC) {
2128  if (i::FLAG_always_opt) return;
2129  InitializeVM();
2130  v8::HandleScope scope;
2131  v8::Local<v8::Value> obj1;
2132 
2133  {
2134  LocalContext env;
2135  CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
2136  obj1 = env->Global()->Get(v8_str("obj"));
2137  }
2138 
2139  // Prepare function f that contains a monomorphic IC for object
2140  // originating from a different native context.
2141  v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
2142  CompileRun("function f(o) { return o.x; } f(obj1); f(obj1);");
2143  Handle<JSFunction> f =
2144  v8::Utils::OpenHandle(
2146  v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2147 
2148  Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2149  CHECK(ic_before->ic_state() == MONOMORPHIC);
2150 
2151  // Fire context dispose notification.
2153  SimulateIncrementalMarking();
2154  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2155 
2156  Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2157  CHECK(ic_after->ic_state() == UNINITIALIZED);
2158 }
2159 
2160 
2161 TEST(IncrementalMarkingClearsPolymorhpicIC) {
2162  if (i::FLAG_always_opt) return;
2163  InitializeVM();
2164  v8::HandleScope scope;
2165  v8::Local<v8::Value> obj1, obj2;
2166 
2167  {
2168  LocalContext env;
2169  CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
2170  obj1 = env->Global()->Get(v8_str("obj"));
2171  }
2172 
2173  {
2174  LocalContext env;
2175  CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
2176  obj2 = env->Global()->Get(v8_str("obj"));
2177  }
2178 
2179  // Prepare function f that contains a polymorphic IC for objects
2180  // originating from two different native contexts.
2181  v8::Context::GetCurrent()->Global()->Set(v8_str("obj1"), obj1);
2182  v8::Context::GetCurrent()->Global()->Set(v8_str("obj2"), obj2);
2183  CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
2184  Handle<JSFunction> f =
2185  v8::Utils::OpenHandle(
2187  v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
2188 
2189  Code* ic_before = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2190  CHECK(ic_before->ic_state() == MEGAMORPHIC);
2191 
2192  // Fire context dispose notification.
2194  SimulateIncrementalMarking();
2195  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2196 
2197  Code* ic_after = FindFirstIC(f->shared()->code(), Code::LOAD_IC);
2198  CHECK(ic_after->ic_state() == UNINITIALIZED);
2199 }
2200 
2201 
2203  public:
2204  explicit SourceResource(const char* data)
2205  : data_(data), length_(strlen(data)) { }
2206 
2207  virtual void Dispose() {
2208  i::DeleteArray(data_);
2209  data_ = NULL;
2210  }
2211 
2212  const char* data() const { return data_; }
2213 
2214  size_t length() const { return length_; }
2215 
2216  bool IsDisposed() { return data_ == NULL; }
2217 
2218  private:
2219  const char* data_;
2220  size_t length_;
2221 };
2222 
2223 
2224 TEST(ReleaseStackTraceData) {
2225  // Test that the data retained by the Error.stack accessor is released
2226  // after the first time the accessor is fired. We use external string
2227  // to check whether the data is being released since the external string
2228  // resource's callback is fired when the external string is GC'ed.
2229  InitializeVM();
2230  v8::HandleScope scope;
2231  static const char* source = "var error = 1; "
2232  "try { "
2233  " throw new Error(); "
2234  "} catch (e) { "
2235  " error = e; "
2236  "} ";
2237  SourceResource* resource = new SourceResource(i::StrDup(source));
2238  {
2239  v8::HandleScope scope;
2240  v8::Handle<v8::String> source_string = v8::String::NewExternal(resource);
2241  v8::Script::Compile(source_string)->Run();
2242  CHECK(!resource->IsDisposed());
2243  }
2244  HEAP->CollectAllAvailableGarbage();
2245  // External source is being retained by the stack trace.
2246  CHECK(!resource->IsDisposed());
2247 
2248  CompileRun("error.stack; error.stack;");
2249  HEAP->CollectAllAvailableGarbage();
2250  // External source has been released.
2251  CHECK(resource->IsDisposed());
2252 
2253  delete resource;
2254 }
2255 
2256 
2257 TEST(Regression144230) {
2258  InitializeVM();
2259  v8::HandleScope scope;
2260 
2261  // First make sure that the uninitialized CallIC stub is on a single page
2262  // that will later be selected as an evacuation candidate.
2263  {
2264  v8::HandleScope inner_scope;
2265  AlwaysAllocateScope always_allocate;
2266  SimulateFullSpace(HEAP->code_space());
2267  ISOLATE->stub_cache()->ComputeCallInitialize(9, RelocInfo::CODE_TARGET);
2268  }
2269 
2270  // Second compile a CallIC and execute it once so that it gets patched to
2271  // the pre-monomorphic stub. These code objects are on yet another page.
2272  {
2273  v8::HandleScope inner_scope;
2274  AlwaysAllocateScope always_allocate;
2275  SimulateFullSpace(HEAP->code_space());
2276  CompileRun("var o = { f:function(a,b,c,d,e,f,g,h,i) {}};"
2277  "function call() { o.f(1,2,3,4,5,6,7,8,9); };"
2278  "call();");
2279  }
2280 
2281  // Third we fill up the last page of the code space so that it does not get
2282  // chosen as an evacuation candidate.
2283  {
2284  v8::HandleScope inner_scope;
2285  AlwaysAllocateScope always_allocate;
2286  CompileRun("for (var i = 0; i < 2000; i++) {"
2287  " eval('function f' + i + '() { return ' + i +'; };' +"
2288  " 'f' + i + '();');"
2289  "}");
2290  }
2291  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
2292 
2293  // Fourth is the tricky part. Make sure the code containing the CallIC is
2294  // visited first without clearing the IC. The shared function info is then
2295  // visited later, causing the CallIC to be cleared.
2296  Handle<String> name = FACTORY->LookupAsciiSymbol("call");
2297  Handle<GlobalObject> global(ISOLATE->context()->global_object());
2298  MaybeObject* maybe_call = global->GetProperty(*name);
2299  JSFunction* call = JSFunction::cast(maybe_call->ToObjectChecked());
2300  USE(global->SetProperty(*name, Smi::FromInt(0), NONE, kNonStrictMode));
2301  ISOLATE->compilation_cache()->Clear();
2302  call->shared()->set_ic_age(HEAP->global_ic_age() + 1);
2303  Handle<Object> call_code(call->code());
2304  Handle<Object> call_function(call);
2305 
2306  // Now we are ready to mess up the heap.
2307  HEAP->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2308 
2309  // Either heap verification caught the problem already or we go kaboom once
2310  // the CallIC is executed the next time.
2311  USE(global->SetProperty(*name, *call_function, NONE, kNonStrictMode));
2312  CompileRun("call();");
2313 }
static bool IsBlack(MarkBit mark_bit)
Definition: mark-compact.h:70
byte * Address
Definition: globals.h:157
static Local< Context > GetCurrent()
Definition: api.cc:4545
const SwVfpRegister s2
static const int kMaxLength
Definition: objects.h:2366
void Destroy(Object **location)
static Local< Script > Compile(Handle< String > source, ScriptOrigin *origin=NULL, ScriptData *pre_data=NULL, Handle< String > script_data=Handle< String >())
Definition: api.cc:1568
#define CHECK_EQ(expected, value)
Definition: checks.h:219
void Dispose()
Definition: v8.h:4235
bool Contains(Address addr)
Definition: spaces.h:364
void PrintF(const char *format,...)
Definition: v8utils.cc:40
static TypeFeedbackInfo * cast(Object *obj)
static String * cast(Object *obj)
static bool UseCrankshaft()
Definition: v8.h:86
V8EXPORT Local< Value > Get(Handle< Value > key)
Definition: api.cc:2853
static Smi * FromInt(int value)
Definition: objects-inl.h:981
const int KB
Definition: globals.h:207
#define CHECK_GT(a, b)
Definition: checks.h:227
static HeapObject * cast(Object *obj)
static Handle< T > cast(Handle< S > that)
Definition: handles.h:81
static const int kProtoTransitionElementsPerEntry
Definition: objects.h:4904
const int kVariableSizeSentinel
Definition: objects.h:199
bool is_identical_to(const Handle< T > other) const
Definition: handles.h:67
const char * data() const
Definition: test-heap.cc:2212
Address * allocation_top_address()
Definition: spaces.h:2208
V8EXPORT Local< Value > GetHiddenValue(Handle< String > key)
Definition: api.cc:3322
static bool IsNearDeath(Object **location)
static Failure * Exception()
Definition: objects-inl.h:1024
static Handle< T > Cast(Handle< S > that)
Definition: v8.h:244
V8EXPORT Local< Value > Call(Handle< Object > recv, int argc, Handle< Value > argv[])
Definition: api.cc:3652
void Clear()
Definition: v8.h:214
Map * GetPrototypeTransition(Object *prototype)
Definition: objects.cc:8938
void Step(intptr_t allocated, CompletionAction action)
static const int kReduceMemoryFootprintMask
Definition: heap.h:1083
static Context * cast(Object *context)
Definition: contexts.h:212
#define CHECK(condition)
Definition: checks.h:56
int isnan(double x)
Handle< Object > GetProperty(Handle< JSReceiver > obj, const char *name)
Definition: handles.cc:282
static const int kMaxSize
Definition: objects.h:2364
intptr_t EffectiveCapacity()
Definition: spaces.h:2144
#define CHECK_GE(a, b)
Definition: checks.h:228
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if available(X64 only)") DEFINE_bool(enable_vfp3
V8EXPORT bool Equals(Handle< Value > that) const
Definition: api.cc:2684
static Smi * cast(Object *object)
static MarkBit MarkBitFrom(Address addr)
T ** location() const
Definition: handles.h:75
T * start() const
Definition: utils.h:390
virtual void Dispose()
Definition: test-heap.cc:2207
bool Contains(Address a)
Definition: spaces.h:2122
static SlicedString * cast(Object *obj)
static const int kProtoTransitionMapOffset
Definition: objects.h:4906
Handle< Object > Create(Object *value)
static Failure * RetryAfterGC()
Definition: objects-inl.h:1040
static const int kMinValue
Definition: objects.h:1048
static const int kNoGCFlags
Definition: heap.h:1081
const int kPointerSize
Definition: globals.h:220
virtual intptr_t Size()
Definition: spaces.h:2133
int GetFieldIndex(int descriptor_number)
Definition: objects-inl.h:2141
static int ContextDisposedNotification()
Definition: api.cc:4387
#define __
V8EXPORT int32_t Int32Value() const
Definition: api.cc:2662
SourceResource(const char *data)
Definition: test-heap.cc:2204
Local< Object > Global()
Definition: api.cc:4570
void SimulateFullSpace(PagedSpace *space)
Definition: test-alloc.cc:38
static Flags ComputeFlags(Kind kind, InlineCacheState ic_state=UNINITIALIZED, ExtraICState extra_ic_state=kNoExtraICState, StubType type=NORMAL, int argc=-1, InlineCacheHolderFlag holder=OWN_MAP)
Definition: objects-inl.h:3491
static const int kSize
Definition: objects.h:5139
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:3559
bool is_inline_cache_stub()
Definition: objects-inl.h:3485
V8EXPORT bool SetHiddenValue(Handle< String > key, Handle< Value > value)
Definition: api.cc:3305
static const int kMaxNonCodeHeapObjectSize
Definition: spaces.h:717
#define V8_PTR_PREFIX
Definition: globals.h:181
InlineCacheState ic_state()
Definition: objects-inl.h:3168
static const int kAbortIncrementalMarkingMask
Definition: heap.h:1084
Vector< const char > CStrVector(const char *data)
Definition: utils.h:526
int StrLength(const char *string)
Definition: utils.h:234
#define CHECK_LE(a, b)
Definition: checks.h:230
static int SizeFor(int length)
Definition: objects.h:2353
static const int kProtoTransitionHeaderSize
Definition: objects.h:4902
const SwVfpRegister s1
static const int kHeaderSize
Definition: objects.h:2296
static int SNPrintF(Vector< char > str, const char *format,...)
#define ISOLATE
Definition: isolate.h:1435
intptr_t Capacity()
Definition: spaces.h:2150
static const int kSize
Definition: objects.h:1350
static ObjectHashTable * cast(Object *obj)
Definition: objects.h:3340
bool is_null() const
Definition: handles.h:87
#define OBJECT_POINTER_ALIGN(value)
Definition: v8globals.h:383
static Object * cast(Object *value)
Definition: objects.h:1007
Handle< T > EscapeFrom(v8::HandleScope *scope)
Definition: api.h:246
static const int kProtoTransitionPrototypeOffset
Definition: objects.h:4905
#define HEAP
Definition: isolate.h:1433
MUST_USE_RESULT MaybeObject * GetProperty(String *key)
Definition: objects-inl.h:859
bool IsDisposed()
Definition: test-heap.cc:2216
InstanceType instance_type()
Definition: objects-inl.h:3009
void USE(T)
Definition: globals.h:289
static V8EXPORT Local< Integer > New(int32_t value)
Definition: api.cc:5228
static FixedArray * cast(Object *obj)
static const int kHeaderSize
Definition: objects.h:2173
bool IsEmpty() const
Definition: v8.h:209
#define FACTORY
Definition: isolate.h:1434
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if NULL
Definition: flags.cc:301
Object * get(int index)
Definition: objects-inl.h:1737
static Handle< Object > ToString(Handle< Object > obj, bool *exc)
Definition: execution.cc:639
static Persistent< Context > New(ExtensionConfiguration *extensions=NULL, Handle< ObjectTemplate > global_template=Handle< ObjectTemplate >(), Handle< Value > global_object=Handle< Value >())
Definition: api.cc:4411
V8EXPORT Local< Object > ToObject() const
Definition: api.cc:2353
char * StrDup(const char *str)
Definition: allocation.cc:85
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra code(assertions) for debugging") DEFINE_bool(code_comments
Address * allocation_limit_address()
Definition: spaces.h:2209
size_t length() const
Definition: test-heap.cc:2214
void MakeWeak(Object **location, void *parameter, WeakReferenceCallback callback)
static V8EXPORT Local< String > NewExternal(ExternalStringResource *resource)
Definition: api.cc:4871
void DeleteArray(T *array)
Definition: allocation.h:91
T Min(T a, T b)
Definition: utils.h:229
static const int kMaxValue
Definition: objects.h:1050
Definition: v8.h:106
FixedArray * GetPrototypeTransitions()
Definition: objects-inl.h:3720
static JSObject * cast(Object *obj)
static bool Initialize()
Definition: api.cc:4269
static V8EXPORT Local< Object > New()
Definition: api.cc:4957
int CountNativeContexts()
Definition: test-heap.cc:994
Handle< JSObject > Copy(Handle< JSObject > obj)
Definition: handles.cc:335
static bool IdleNotification(int hint=1000)
Definition: api.cc:4371
const int MB
Definition: globals.h:208
static JSFunction * cast(Object *obj)