v8  3.14.5(node0.10.28)
V8 is Google's open source JavaScript engine
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
test-weakmaps.cc
Go to the documentation of this file.
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "global-handles.h"
31 #include "snapshot.h"
32 #include "cctest.h"
33 
34 using namespace v8::internal;
35 
36 
37 static Handle<JSWeakMap> AllocateJSWeakMap() {
39  Handle<JSObject> weakmap_obj = FACTORY->NewJSObjectFromMap(map);
40  Handle<JSWeakMap> weakmap(JSWeakMap::cast(*weakmap_obj));
41  // Do not use handles for the hash table, it would make entries strong.
42  Object* table_obj = ObjectHashTable::Allocate(1)->ToObjectChecked();
43  ObjectHashTable* table = ObjectHashTable::cast(table_obj);
44  weakmap->set_table(table);
45  weakmap->set_next(Smi::FromInt(0));
46  return weakmap;
47 }
48 
49 static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
50  Handle<JSObject> key,
51  Handle<Object> value) {
55  value);
56  weakmap->set_table(*table);
57 }
58 
59 static int NumberOfWeakCalls = 0;
60 static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void* id) {
61  ASSERT(id == reinterpret_cast<void*>(1234));
62  NumberOfWeakCalls++;
63  handle.Dispose();
64 }
65 
66 
67 TEST(Weakness) {
68  FLAG_incremental_marking = false;
69  LocalContext context;
70  v8::HandleScope scope;
71  Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
72  GlobalHandles* global_handles = Isolate::Current()->global_handles();
73 
74  // Keep global reference to the key.
75  Handle<Object> key;
76  {
77  v8::HandleScope scope;
79  Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
80  key = global_handles->Create(*object);
81  }
82  CHECK(!global_handles->IsWeak(key.location()));
83 
84  // Put entry into weak map.
85  {
86  v8::HandleScope scope;
87  PutIntoWeakMap(weakmap,
90  }
91  CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
92 
93  // Force a full GC.
94  HEAP->CollectAllGarbage(false);
95  CHECK_EQ(0, NumberOfWeakCalls);
96  CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
97  CHECK_EQ(
98  0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
99 
100  // Make the global reference to the key weak.
101  {
102  v8::HandleScope scope;
103  global_handles->MakeWeak(key.location(),
104  reinterpret_cast<void*>(1234),
105  &WeakPointerCallback);
106  }
107  CHECK(global_handles->IsWeak(key.location()));
108 
109  // Force a full GC.
110  // Perform two consecutive GCs because the first one will only clear
111  // weak references whereas the second one will also clear weak maps.
112  HEAP->CollectAllGarbage(false);
113  CHECK_EQ(1, NumberOfWeakCalls);
114  CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
115  CHECK_EQ(
116  0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
117  HEAP->CollectAllGarbage(false);
118  CHECK_EQ(1, NumberOfWeakCalls);
119  CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
120  CHECK_EQ(
121  1, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
122 }
123 
124 
125 TEST(Shrinking) {
126  LocalContext context;
127  v8::HandleScope scope;
128  Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
129 
130  // Check initial capacity.
131  CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->Capacity());
132 
133  // Fill up weak map to trigger capacity change.
134  {
135  v8::HandleScope scope;
137  for (int i = 0; i < 32; i++) {
138  Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
139  PutIntoWeakMap(weakmap, object, Handle<Smi>(Smi::FromInt(i)));
140  }
141  }
142 
143  // Check increased capacity.
144  CHECK_EQ(128, ObjectHashTable::cast(weakmap->table())->Capacity());
145 
146  // Force a full GC.
147  CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
148  CHECK_EQ(
149  0, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
150  HEAP->CollectAllGarbage(false);
151  CHECK_EQ(0, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
152  CHECK_EQ(
153  32, ObjectHashTable::cast(weakmap->table())->NumberOfDeletedElements());
154 
155  // Check shrunk capacity.
156  CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->Capacity());
157 }
158 
159 
160 // Test that weak map values on an evacuation candidate which are not reachable
161 // by other paths are correctly recorded in the slots buffer.
162 TEST(Regress2060a) {
163  FLAG_always_compact = true;
164  LocalContext context;
165  v8::HandleScope scope;
166  Handle<JSFunction> function =
167  FACTORY->NewFunction(FACTORY->function_symbol(), FACTORY->null_value());
168  Handle<JSObject> key = FACTORY->NewJSObject(function);
169  Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
170 
171  // Start second old-space page so that values land on evacuation candidate.
172  Page* first_page = HEAP->old_pointer_space()->anchor()->next_page();
173  FACTORY->NewFixedArray(900 * KB / kPointerSize, TENURED);
174 
175  // Fill up weak map with values on an evacuation candidate.
176  {
177  v8::HandleScope scope;
178  for (int i = 0; i < 32; i++) {
179  Handle<JSObject> object = FACTORY->NewJSObject(function, TENURED);
180  CHECK(!HEAP->InNewSpace(object->address()));
181  CHECK(!first_page->Contains(object->address()));
182  PutIntoWeakMap(weakmap, key, object);
183  }
184  }
185 
186  // Force compacting garbage collection.
187  CHECK(FLAG_always_compact);
188  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
189 }
190 
191 
192 // Test that weak map keys on an evacuation candidate which are reachable by
193 // other strong paths are correctly recorded in the slots buffer.
194 TEST(Regress2060b) {
195  FLAG_always_compact = true;
196 #ifdef VERIFY_HEAP
197  FLAG_verify_heap = true;
198 #endif
199 
200  LocalContext context;
201  v8::HandleScope scope;
202  Handle<JSFunction> function =
203  FACTORY->NewFunction(FACTORY->function_symbol(), FACTORY->null_value());
204 
205  // Start second old-space page so that keys land on evacuation candidate.
206  Page* first_page = HEAP->old_pointer_space()->anchor()->next_page();
207  FACTORY->NewFixedArray(900 * KB / kPointerSize, TENURED);
208 
209  // Fill up weak map with keys on an evacuation candidate.
211  for (int i = 0; i < 32; i++) {
212  keys[i] = FACTORY->NewJSObject(function, TENURED);
213  CHECK(!HEAP->InNewSpace(keys[i]->address()));
214  CHECK(!first_page->Contains(keys[i]->address()));
215  }
216  Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
217  for (int i = 0; i < 32; i++) {
218  PutIntoWeakMap(weakmap, keys[i], Handle<Smi>(Smi::FromInt(i)));
219  }
220 
221  // Force compacting garbage collection. The subsequent collections are used
222  // to verify that key references were actually updated.
223  CHECK(FLAG_always_compact);
224  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
225  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
226  HEAP->CollectAllGarbage(Heap::kNoGCFlags);
227 }
#define CHECK_EQ(expected, value)
Definition: checks.h:219
void Dispose()
Definition: v8.h:4235
bool Contains(Address addr)
Definition: spaces.h:364
static Smi * FromInt(int value)
Definition: objects-inl.h:981
const int KB
Definition: globals.h:207
#define ASSERT(condition)
Definition: checks.h:270
#define CHECK(condition)
Definition: checks.h:56
T ** location() const
Definition: handles.h:75
Handle< Object > Create(Object *value)
static const int kNoGCFlags
Definition: heap.h:1081
const int kPointerSize
Definition: globals.h:220
Handle< ObjectHashTable > PutIntoObjectHashTable(Handle< ObjectHashTable > table, Handle< Object > key, Handle< Object > value)
Definition: handles.cc:857
static ObjectHashTable * cast(Object *obj)
Definition: objects.h:3340
static JSWeakMap * cast(Object *obj)
#define HEAP
Definition: isolate.h:1433
activate correct semantics for inheriting readonliness enable harmony semantics for typeof enable harmony enable harmony proxies enable all harmony harmony_scoping harmony_proxies harmony_scoping tracks arrays with only smi values automatically unbox arrays of doubles use crankshaft use hydrogen range analysis use hydrogen global value numbering use function inlining maximum number of AST nodes considered for a single inlining loop invariant code motion print statistics for hydrogen trace generated IR for specified phases trace register allocator trace range analysis trace representation types environment for every instruction put a break point before deoptimizing polymorphic inlining perform array bounds checks elimination use dead code elimination trace on stack replacement optimize closures cache optimized code for closures functions with arguments object loop weight for representation inference allow uint32 values on optimize frames if they are used only in safe operations track parallel recompilation enable all profiler experiments number of stack frames inspected by the profiler call recompile stub directly when self optimizing trigger profiler ticks based on counting instead of timing weight back edges by jump distance for interrupt triggering percentage of ICs that must have type info to allow optimization watch_ic_patching retry_self_opt interrupt_at_exit extra verbose compilation tracing generate extra emit comments in code disassembly enable use of SSE3 instructions if available enable use of CMOV instruction if available enable use of SAHF instruction if enable use of VFP3 instructions if available this implies enabling ARMv7 and VFP2 enable use of VFP2 instructions if available enable use of SDIV and UDIV instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of MIPS FPU instructions if expose natives in global object expose gc extension number of stack frames to capture disable builtin natives files print a stack trace if an assertion failure occurs use random jit cookie to mask large constants trace lazy optimization use adaptive optimizations prepare for turning on always opt minimum length for automatic enable preparsing maximum number of optimization attempts before giving up cache prototype transitions automatically set the debug break flag when debugger commands are in the queue always cause a debug break before aborting maximum length of function source code printed in a stack trace max size of the new max size of the old max size of executable always perform global GCs print one trace line following each garbage collection do not print trace line after scavenger collection print more details following each garbage collection print amount of external allocated memory after each time it is adjusted flush code that we expect not to use again before full gc do incremental marking steps track object counts and memory usage use caching Perform compaction on every full GC Never perform compaction on full GC testing only Compact code space on full incremental collections Default seed for initializing random allows verbose printing trace parsing and preparsing Check icache flushes in ARM and MIPS simulator Stack alingment in bytes in print stack trace when throwing exceptions randomize hashes to avoid predictable hash Fixed seed to use to hash property keys(0 means random)" "(with snapshots this option cannot override the baked-in seed)") DEFINE_bool(preemption
static bool IsWeak(Object **location)
static const int kHeaderSize
Definition: objects.h:2173
#define FACTORY
Definition: isolate.h:1434
static const int kSize
Definition: objects.h:8238
void MakeWeak(Object **location, void *parameter, WeakReferenceCallback callback)
static MUST_USE_RESULT MaybeObject * Allocate(int at_least_space_for, MinimumCapacity capacity_option=USE_DEFAULT_MINIMUM_CAPACITY, PretenureFlag pretenure=NOT_TENURED)
static JSObject * cast(Object *obj)