• Home
  • History
  • Annotate
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include <utility>
29 
30 #include "src/v8.h"
31 
32 #include "src/global-handles.h"
33 #include "test/cctest/cctest.h"
34 #include "test/cctest/heap/utils-inl.h"
35 
36 using namespace v8::internal;
37 
GetIsolateFrom(LocalContext * context)38 static Isolate* GetIsolateFrom(LocalContext* context) {
39   return reinterpret_cast<Isolate*>((*context)->GetIsolate());
40 }
41 
42 
AllocateJSWeakSet(Isolate * isolate)43 static Handle<JSWeakSet> AllocateJSWeakSet(Isolate* isolate) {
44   Factory* factory = isolate->factory();
45   Handle<Map> map = factory->NewMap(JS_WEAK_SET_TYPE, JSWeakSet::kSize);
46   Handle<JSObject> weakset_obj = factory->NewJSObjectFromMap(map);
47   Handle<JSWeakSet> weakset(JSWeakSet::cast(*weakset_obj));
48   // Do not leak handles for the hash table, it would make entries strong.
49   {
50     HandleScope scope(isolate);
51     Handle<ObjectHashTable> table = ObjectHashTable::New(isolate, 1);
52     weakset->set_table(*table);
53   }
54   return weakset;
55 }
56 
57 static int NumberOfWeakCalls = 0;
WeakPointerCallback(const v8::WeakCallbackData<v8::Value,void> & data)58 static void WeakPointerCallback(
59     const v8::WeakCallbackData<v8::Value, void>& data) {
60   std::pair<v8::Persistent<v8::Value>*, int>* p =
61       reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
62           data.GetParameter());
63   CHECK_EQ(1234, p->second);
64   NumberOfWeakCalls++;
65   p->first->Reset();
66 }
67 
68 
TEST(WeakSet_Weakness)69 TEST(WeakSet_Weakness) {
70   FLAG_incremental_marking = false;
71   LocalContext context;
72   Isolate* isolate = GetIsolateFrom(&context);
73   Factory* factory = isolate->factory();
74   Heap* heap = isolate->heap();
75   HandleScope scope(isolate);
76   Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate);
77   GlobalHandles* global_handles = isolate->global_handles();
78 
79   // Keep global reference to the key.
80   Handle<Object> key;
81   {
82     HandleScope scope(isolate);
83     Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
84     Handle<JSObject> object = factory->NewJSObjectFromMap(map);
85     key = global_handles->Create(*object);
86   }
87   CHECK(!global_handles->IsWeak(key.location()));
88 
89   // Put entry into weak set.
90   {
91     HandleScope scope(isolate);
92     Handle<Smi> smi(Smi::FromInt(23), isolate);
93     int32_t hash = Object::GetOrCreateHash(isolate, key)->value();
94     JSWeakCollection::Set(weakset, key, smi, hash);
95   }
96   CHECK_EQ(1, ObjectHashTable::cast(weakset->table())->NumberOfElements());
97 
98   // Force a full GC.
99   heap->CollectAllGarbage(false);
100   CHECK_EQ(0, NumberOfWeakCalls);
101   CHECK_EQ(1, ObjectHashTable::cast(weakset->table())->NumberOfElements());
102   CHECK_EQ(
103       0, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());
104 
105   // Make the global reference to the key weak.
106   {
107     HandleScope scope(isolate);
108     std::pair<Handle<Object>*, int> handle_and_id(&key, 1234);
109     GlobalHandles::MakeWeak(key.location(),
110                             reinterpret_cast<void*>(&handle_and_id),
111                             &WeakPointerCallback);
112   }
113   CHECK(global_handles->IsWeak(key.location()));
114 
115   // Force a full GC.
116   // Perform two consecutive GCs because the first one will only clear
117   // weak references whereas the second one will also clear weak sets.
118   heap->CollectAllGarbage(false);
119   CHECK_EQ(1, NumberOfWeakCalls);
120   CHECK_EQ(1, ObjectHashTable::cast(weakset->table())->NumberOfElements());
121   CHECK_EQ(
122       0, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());
123   heap->CollectAllGarbage(false);
124   CHECK_EQ(1, NumberOfWeakCalls);
125   CHECK_EQ(0, ObjectHashTable::cast(weakset->table())->NumberOfElements());
126   CHECK_EQ(
127       1, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());
128 }
129 
130 
TEST(WeakSet_Shrinking)131 TEST(WeakSet_Shrinking) {
132   LocalContext context;
133   Isolate* isolate = GetIsolateFrom(&context);
134   Factory* factory = isolate->factory();
135   Heap* heap = isolate->heap();
136   HandleScope scope(isolate);
137   Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate);
138 
139   // Check initial capacity.
140   CHECK_EQ(32, ObjectHashTable::cast(weakset->table())->Capacity());
141 
142   // Fill up weak set to trigger capacity change.
143   {
144     HandleScope scope(isolate);
145     Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
146     for (int i = 0; i < 32; i++) {
147       Handle<JSObject> object = factory->NewJSObjectFromMap(map);
148       Handle<Smi> smi(Smi::FromInt(i), isolate);
149       int32_t hash = Object::GetOrCreateHash(isolate, object)->value();
150       JSWeakCollection::Set(weakset, object, smi, hash);
151     }
152   }
153 
154   // Check increased capacity.
155   CHECK_EQ(128, ObjectHashTable::cast(weakset->table())->Capacity());
156 
157   // Force a full GC.
158   CHECK_EQ(32, ObjectHashTable::cast(weakset->table())->NumberOfElements());
159   CHECK_EQ(
160       0, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());
161   heap->CollectAllGarbage(false);
162   CHECK_EQ(0, ObjectHashTable::cast(weakset->table())->NumberOfElements());
163   CHECK_EQ(
164       32, ObjectHashTable::cast(weakset->table())->NumberOfDeletedElements());
165 
166   // Check shrunk capacity.
167   CHECK_EQ(32, ObjectHashTable::cast(weakset->table())->Capacity());
168 }
169 
170 
171 // Test that weak set values on an evacuation candidate which are not reachable
172 // by other paths are correctly recorded in the slots buffer.
TEST(WeakSet_Regress2060a)173 TEST(WeakSet_Regress2060a) {
174   if (i::FLAG_never_compact) return;
175   FLAG_always_compact = true;
176   LocalContext context;
177   Isolate* isolate = GetIsolateFrom(&context);
178   Factory* factory = isolate->factory();
179   Heap* heap = isolate->heap();
180   HandleScope scope(isolate);
181   Handle<JSFunction> function = factory->NewFunction(
182       factory->function_string());
183   Handle<JSObject> key = factory->NewJSObject(function);
184   Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate);
185 
186   // Start second old-space page so that values land on evacuation candidate.
187   Page* first_page = heap->old_space()->anchor()->next_page();
188   SimulateFullSpace(heap->old_space());
189 
190   // Fill up weak set with values on an evacuation candidate.
191   {
192     HandleScope scope(isolate);
193     for (int i = 0; i < 32; i++) {
194       Handle<JSObject> object = factory->NewJSObject(function, TENURED);
195       CHECK(!heap->InNewSpace(object->address()));
196       CHECK(!first_page->Contains(object->address()));
197       int32_t hash = Object::GetOrCreateHash(isolate, key)->value();
198       JSWeakCollection::Set(weakset, key, object, hash);
199     }
200   }
201 
202   // Force compacting garbage collection.
203   CHECK(FLAG_always_compact);
204   heap->CollectAllGarbage();
205 }
206 
207 
208 // Test that weak set keys on an evacuation candidate which are reachable by
209 // other strong paths are correctly recorded in the slots buffer.
TEST(WeakSet_Regress2060b)210 TEST(WeakSet_Regress2060b) {
211   if (i::FLAG_never_compact) return;
212   FLAG_always_compact = true;
213 #ifdef VERIFY_HEAP
214   FLAG_verify_heap = true;
215 #endif
216 
217   LocalContext context;
218   Isolate* isolate = GetIsolateFrom(&context);
219   Factory* factory = isolate->factory();
220   Heap* heap = isolate->heap();
221   HandleScope scope(isolate);
222   Handle<JSFunction> function = factory->NewFunction(
223       factory->function_string());
224 
225   // Start second old-space page so that keys land on evacuation candidate.
226   Page* first_page = heap->old_space()->anchor()->next_page();
227   SimulateFullSpace(heap->old_space());
228 
229   // Fill up weak set with keys on an evacuation candidate.
230   Handle<JSObject> keys[32];
231   for (int i = 0; i < 32; i++) {
232     keys[i] = factory->NewJSObject(function, TENURED);
233     CHECK(!heap->InNewSpace(keys[i]->address()));
234     CHECK(!first_page->Contains(keys[i]->address()));
235   }
236   Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate);
237   for (int i = 0; i < 32; i++) {
238     Handle<Smi> smi(Smi::FromInt(i), isolate);
239     int32_t hash = Object::GetOrCreateHash(isolate, keys[i])->value();
240     JSWeakCollection::Set(weakset, keys[i], smi, hash);
241   }
242 
243   // Force compacting garbage collection. The subsequent collections are used
244   // to verify that key references were actually updated.
245   CHECK(FLAG_always_compact);
246   heap->CollectAllGarbage();
247   heap->CollectAllGarbage();
248   heap->CollectAllGarbage();
249 }
250