1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include <stdlib.h>
29
30#ifdef __linux__
31#include <sys/types.h>
32#include <sys/stat.h>
33#include <fcntl.h>
34#include <unistd.h>
35#include <errno.h>
36#endif
37
38
39#include "v8.h"
40
41#include "global-handles.h"
42#include "snapshot.h"
43#include "cctest.h"
44
45using namespace v8::internal;
46
47
48TEST(MarkingDeque) {
49  CcTest::InitializeVM();
50  int mem_size = 20 * kPointerSize;
51  byte* mem = NewArray<byte>(20*kPointerSize);
52  Address low = reinterpret_cast<Address>(mem);
53  Address high = low + mem_size;
54  MarkingDeque s;
55  s.Initialize(low, high);
56
57  Address original_address = reinterpret_cast<Address>(&s);
58  Address current_address = original_address;
59  while (!s.IsFull()) {
60    s.PushBlack(HeapObject::FromAddress(current_address));
61    current_address += kPointerSize;
62  }
63
64  while (!s.IsEmpty()) {
65    Address value = s.Pop()->address();
66    current_address -= kPointerSize;
67    CHECK_EQ(current_address, value);
68  }
69
70  CHECK_EQ(original_address, current_address);
71  DeleteArray(mem);
72}
73
74
75TEST(Promotion) {
76  CcTest::InitializeVM();
77  Heap* heap = CcTest::heap();
78  heap->ConfigureHeap(2*256*KB, 1*MB, 1*MB);
79
80  v8::HandleScope sc(CcTest::isolate());
81
82  // Allocate a fixed array in the new space.
83  int array_length =
84      (Page::kMaxNonCodeHeapObjectSize - FixedArray::kHeaderSize) /
85      (4 * kPointerSize);
86  Object* obj = heap->AllocateFixedArray(array_length)->ToObjectChecked();
87  Handle<FixedArray> array(FixedArray::cast(obj));
88
89  // Array should be in the new space.
90  CHECK(heap->InSpace(*array, NEW_SPACE));
91
92  // Call mark compact GC, so array becomes an old object.
93  heap->CollectGarbage(OLD_POINTER_SPACE);
94
95  // Array now sits in the old space
96  CHECK(heap->InSpace(*array, OLD_POINTER_SPACE));
97}
98
99
100TEST(NoPromotion) {
101  CcTest::InitializeVM();
102  Heap* heap = CcTest::heap();
103  heap->ConfigureHeap(2*256*KB, 1*MB, 1*MB);
104
105  v8::HandleScope sc(CcTest::isolate());
106
107  // Allocate a big fixed array in the new space.
108  int array_length =
109      (Page::kMaxNonCodeHeapObjectSize - FixedArray::kHeaderSize) /
110      (2 * kPointerSize);
111  Object* obj = heap->AllocateFixedArray(array_length)->ToObjectChecked();
112  Handle<FixedArray> array(FixedArray::cast(obj));
113
114  // Array should be in the new space.
115  CHECK(heap->InSpace(*array, NEW_SPACE));
116
117  // Simulate a full old space to make promotion fail.
118  SimulateFullSpace(heap->old_pointer_space());
119
120  // Call mark compact GC, and it should pass.
121  heap->CollectGarbage(OLD_POINTER_SPACE);
122}
123
124
125TEST(MarkCompactCollector) {
126  FLAG_incremental_marking = false;
127  CcTest::InitializeVM();
128  Isolate* isolate = CcTest::i_isolate();
129  Heap* heap = isolate->heap();
130
131  v8::HandleScope sc(CcTest::isolate());
132  Handle<GlobalObject> global(isolate->context()->global_object());
133
134  // call mark-compact when heap is empty
135  heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 1");
136
137  // keep allocating garbage in new space until it fails
138  const int ARRAY_SIZE = 100;
139  Object* array;
140  MaybeObject* maybe_array;
141  do {
142    maybe_array = heap->AllocateFixedArray(ARRAY_SIZE);
143  } while (maybe_array->ToObject(&array));
144  heap->CollectGarbage(NEW_SPACE, "trigger 2");
145
146  array = heap->AllocateFixedArray(ARRAY_SIZE)->ToObjectChecked();
147
148  // keep allocating maps until it fails
149  Object* mapp;
150  MaybeObject* maybe_mapp;
151  do {
152    maybe_mapp = heap->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
153  } while (maybe_mapp->ToObject(&mapp));
154  heap->CollectGarbage(MAP_SPACE, "trigger 3");
155  mapp = heap->AllocateMap(JS_OBJECT_TYPE,
156                           JSObject::kHeaderSize)->ToObjectChecked();
157
158  // allocate a garbage
159  String* func_name = String::cast(
160      heap->InternalizeUtf8String("theFunction")->ToObjectChecked());
161  SharedFunctionInfo* function_share = SharedFunctionInfo::cast(
162      heap->AllocateSharedFunctionInfo(func_name)->ToObjectChecked());
163  JSFunction* function = JSFunction::cast(
164      heap->AllocateFunction(*isolate->function_map(),
165                             function_share,
166                             heap->undefined_value())->ToObjectChecked());
167  Map* initial_map =
168      Map::cast(heap->AllocateMap(JS_OBJECT_TYPE,
169                                  JSObject::kHeaderSize)->ToObjectChecked());
170  function->set_initial_map(initial_map);
171  JSReceiver::SetProperty(
172      global, handle(func_name), handle(function), NONE, kNonStrictMode);
173
174  JSObject* obj = JSObject::cast(
175      heap->AllocateJSObject(function)->ToObjectChecked());
176  heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 4");
177
178  func_name = String::cast(
179      heap->InternalizeUtf8String("theFunction")->ToObjectChecked());
180  CHECK(JSReceiver::HasLocalProperty(global, handle(func_name)));
181  Object* func_value = isolate->context()->global_object()->
182      GetProperty(func_name)->ToObjectChecked();
183  CHECK(func_value->IsJSFunction());
184  function = JSFunction::cast(func_value);
185
186  obj = JSObject::cast(heap->AllocateJSObject(function)->ToObjectChecked());
187  String* obj_name =
188      String::cast(heap->InternalizeUtf8String("theObject")->ToObjectChecked());
189  JSReceiver::SetProperty(
190      global, handle(obj_name), handle(obj), NONE, kNonStrictMode);
191  String* prop_name =
192      String::cast(heap->InternalizeUtf8String("theSlot")->ToObjectChecked());
193  Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
194  JSReceiver::SetProperty(
195      handle(obj), handle(prop_name), twenty_three, NONE, kNonStrictMode);
196
197  heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 5");
198
199  obj_name =
200      String::cast(heap->InternalizeUtf8String("theObject")->ToObjectChecked());
201  CHECK(JSReceiver::HasLocalProperty(global, handle(obj_name)));
202  CHECK(isolate->context()->global_object()->
203        GetProperty(obj_name)->ToObjectChecked()->IsJSObject());
204  obj = JSObject::cast(isolate->context()->global_object()->
205                       GetProperty(obj_name)->ToObjectChecked());
206  prop_name =
207      String::cast(heap->InternalizeUtf8String("theSlot")->ToObjectChecked());
208  CHECK(obj->GetProperty(prop_name) == Smi::FromInt(23));
209}
210
211
212// TODO(1600): compaction of map space is temporary removed from GC.
213#if 0
214static Handle<Map> CreateMap(Isolate* isolate) {
215  return isolate->factory()->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
216}
217
218
219TEST(MapCompact) {
220  FLAG_max_map_space_pages = 16;
221  CcTest::InitializeVM();
222  Isolate* isolate = CcTest::i_isolate();
223  Factory* factory = isolate->factory();
224
225  {
226    v8::HandleScope sc;
227    // keep allocating maps while pointers are still encodable and thus
228    // mark compact is permitted.
229    Handle<JSObject> root = factory->NewJSObjectFromMap(CreateMap());
230    do {
231      Handle<Map> map = CreateMap();
232      map->set_prototype(*root);
233      root = factory->NewJSObjectFromMap(map);
234    } while (CcTest::heap()->map_space()->MapPointersEncodable());
235  }
236  // Now, as we don't have any handles to just allocated maps, we should
237  // be able to trigger map compaction.
238  // To give an additional chance to fail, try to force compaction which
239  // should be impossible right now.
240  CcTest::heap()->CollectAllGarbage(Heap::kForceCompactionMask);
241  // And now map pointers should be encodable again.
242  CHECK(CcTest::heap()->map_space()->MapPointersEncodable());
243}
244#endif
245
246
247static int NumberOfWeakCalls = 0;
248static void WeakPointerCallback(v8::Isolate* isolate,
249                                v8::Persistent<v8::Value>* handle,
250                                void* id) {
251  ASSERT(id == reinterpret_cast<void*>(1234));
252  NumberOfWeakCalls++;
253  handle->Reset();
254}
255
256
257TEST(ObjectGroups) {
258  FLAG_incremental_marking = false;
259  CcTest::InitializeVM();
260  GlobalHandles* global_handles = CcTest::i_isolate()->global_handles();
261  Heap* heap = CcTest::heap();
262  NumberOfWeakCalls = 0;
263  v8::HandleScope handle_scope(CcTest::isolate());
264
265  Handle<Object> g1s1 =
266      global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked());
267  Handle<Object> g1s2 =
268      global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked());
269  Handle<Object> g1c1 =
270      global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked());
271  global_handles->MakeWeak(g1s1.location(),
272                           reinterpret_cast<void*>(1234),
273                           &WeakPointerCallback);
274  global_handles->MakeWeak(g1s2.location(),
275                           reinterpret_cast<void*>(1234),
276                           &WeakPointerCallback);
277  global_handles->MakeWeak(g1c1.location(),
278                           reinterpret_cast<void*>(1234),
279                           &WeakPointerCallback);
280
281  Handle<Object> g2s1 =
282      global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked());
283  Handle<Object> g2s2 =
284    global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked());
285  Handle<Object> g2c1 =
286    global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked());
287  global_handles->MakeWeak(g2s1.location(),
288                           reinterpret_cast<void*>(1234),
289                           &WeakPointerCallback);
290  global_handles->MakeWeak(g2s2.location(),
291                           reinterpret_cast<void*>(1234),
292                           &WeakPointerCallback);
293  global_handles->MakeWeak(g2c1.location(),
294                           reinterpret_cast<void*>(1234),
295                           &WeakPointerCallback);
296
297  Handle<Object> root = global_handles->Create(*g1s1);  // make a root.
298
299  // Connect group 1 and 2, make a cycle.
300  Handle<FixedArray>::cast(g1s2)->set(0, *g2s2);
301  Handle<FixedArray>::cast(g2s1)->set(0, *g1s1);
302
303  {
304    Object** g1_objects[] = { g1s1.location(), g1s2.location() };
305    Object** g1_children[] = { g1c1.location() };
306    Object** g2_objects[] = { g2s1.location(), g2s2.location() };
307    Object** g2_children[] = { g2c1.location() };
308    global_handles->AddObjectGroup(g1_objects, 2, NULL);
309    global_handles->AddImplicitReferences(
310        Handle<HeapObject>::cast(g1s1).location(), g1_children, 1);
311    global_handles->AddObjectGroup(g2_objects, 2, NULL);
312    global_handles->AddImplicitReferences(
313        Handle<HeapObject>::cast(g2s1).location(), g2_children, 1);
314  }
315  // Do a full GC
316  heap->CollectGarbage(OLD_POINTER_SPACE);
317
318  // All object should be alive.
319  CHECK_EQ(0, NumberOfWeakCalls);
320
321  // Weaken the root.
322  global_handles->MakeWeak(root.location(),
323                           reinterpret_cast<void*>(1234),
324                           &WeakPointerCallback);
325  // But make children strong roots---all the objects (except for children)
326  // should be collectable now.
327  global_handles->ClearWeakness(g1c1.location());
328  global_handles->ClearWeakness(g2c1.location());
329
330  // Groups are deleted, rebuild groups.
331  {
332    Object** g1_objects[] = { g1s1.location(), g1s2.location() };
333    Object** g1_children[] = { g1c1.location() };
334    Object** g2_objects[] = { g2s1.location(), g2s2.location() };
335    Object** g2_children[] = { g2c1.location() };
336    global_handles->AddObjectGroup(g1_objects, 2, NULL);
337    global_handles->AddImplicitReferences(
338        Handle<HeapObject>::cast(g1s1).location(), g1_children, 1);
339    global_handles->AddObjectGroup(g2_objects, 2, NULL);
340    global_handles->AddImplicitReferences(
341        Handle<HeapObject>::cast(g2s1).location(), g2_children, 1);
342  }
343
344  heap->CollectGarbage(OLD_POINTER_SPACE);
345
346  // All objects should be gone. 5 global handles in total.
347  CHECK_EQ(5, NumberOfWeakCalls);
348
349  // And now make children weak again and collect them.
350  global_handles->MakeWeak(g1c1.location(),
351                           reinterpret_cast<void*>(1234),
352                           &WeakPointerCallback);
353  global_handles->MakeWeak(g2c1.location(),
354                           reinterpret_cast<void*>(1234),
355                           &WeakPointerCallback);
356
357  heap->CollectGarbage(OLD_POINTER_SPACE);
358  CHECK_EQ(7, NumberOfWeakCalls);
359}
360
361
362class TestRetainedObjectInfo : public v8::RetainedObjectInfo {
363 public:
364  TestRetainedObjectInfo() : has_been_disposed_(false) {}
365
366  bool has_been_disposed() { return has_been_disposed_; }
367
368  virtual void Dispose() {
369    ASSERT(!has_been_disposed_);
370    has_been_disposed_ = true;
371  }
372
373  virtual bool IsEquivalent(v8::RetainedObjectInfo* other) {
374    return other == this;
375  }
376
377  virtual intptr_t GetHash() { return 0; }
378
379  virtual const char* GetLabel() { return "whatever"; }
380
381 private:
382  bool has_been_disposed_;
383};
384
385
386TEST(EmptyObjectGroups) {
387  CcTest::InitializeVM();
388  GlobalHandles* global_handles = CcTest::i_isolate()->global_handles();
389
390  v8::HandleScope handle_scope(CcTest::isolate());
391
392  Handle<Object> object = global_handles->Create(
393      CcTest::heap()->AllocateFixedArray(1)->ToObjectChecked());
394
395  TestRetainedObjectInfo info;
396  global_handles->AddObjectGroup(NULL, 0, &info);
397  ASSERT(info.has_been_disposed());
398
399  global_handles->AddImplicitReferences(
400        Handle<HeapObject>::cast(object).location(), NULL, 0);
401}
402
403
404#if defined(__has_feature)
405#if __has_feature(address_sanitizer)
406#define V8_WITH_ASAN 1
407#endif
408#endif
409
410
411// Here is a memory use test that uses /proc, and is therefore Linux-only.  We
412// do not care how much memory the simulator uses, since it is only there for
413// debugging purposes. Testing with ASAN doesn't make sense, either.
414#if defined(__linux__) && !defined(USE_SIMULATOR) && !defined(V8_WITH_ASAN)
415
416
417static uintptr_t ReadLong(char* buffer, intptr_t* position, int base) {
418  char* end_address = buffer + *position;
419  uintptr_t result = strtoul(buffer + *position, &end_address, base);
420  CHECK(result != ULONG_MAX || errno != ERANGE);
421  CHECK(end_address > buffer + *position);
422  *position = end_address - buffer;
423  return result;
424}
425
426
427// The memory use computed this way is not entirely accurate and depends on
428// the way malloc allocates memory.  That's why the memory use may seem to
429// increase even though the sum of the allocated object sizes decreases.  It
430// also means that the memory use depends on the kernel and stdlib.
431static intptr_t MemoryInUse() {
432  intptr_t memory_use = 0;
433
434  int fd = open("/proc/self/maps", O_RDONLY);
435  if (fd < 0) return -1;
436
437  const int kBufSize = 10000;
438  char buffer[kBufSize];
439  int length = read(fd, buffer, kBufSize);
440  intptr_t line_start = 0;
441  CHECK_LT(length, kBufSize);  // Make the buffer bigger.
442  CHECK_GT(length, 0);  // We have to find some data in the file.
443  while (line_start < length) {
444    if (buffer[line_start] == '\n') {
445      line_start++;
446      continue;
447    }
448    intptr_t position = line_start;
449    uintptr_t start = ReadLong(buffer, &position, 16);
450    CHECK_EQ(buffer[position++], '-');
451    uintptr_t end = ReadLong(buffer, &position, 16);
452    CHECK_EQ(buffer[position++], ' ');
453    CHECK(buffer[position] == '-' || buffer[position] == 'r');
454    bool read_permission = (buffer[position++] == 'r');
455    CHECK(buffer[position] == '-' || buffer[position] == 'w');
456    bool write_permission = (buffer[position++] == 'w');
457    CHECK(buffer[position] == '-' || buffer[position] == 'x');
458    bool execute_permission = (buffer[position++] == 'x');
459    CHECK(buffer[position] == '-' || buffer[position] == 'p');
460    bool private_mapping = (buffer[position++] == 'p');
461    CHECK_EQ(buffer[position++], ' ');
462    uintptr_t offset = ReadLong(buffer, &position, 16);
463    USE(offset);
464    CHECK_EQ(buffer[position++], ' ');
465    uintptr_t major = ReadLong(buffer, &position, 16);
466    USE(major);
467    CHECK_EQ(buffer[position++], ':');
468    uintptr_t minor = ReadLong(buffer, &position, 16);
469    USE(minor);
470    CHECK_EQ(buffer[position++], ' ');
471    uintptr_t inode = ReadLong(buffer, &position, 10);
472    while (position < length && buffer[position] != '\n') position++;
473    if ((read_permission || write_permission || execute_permission) &&
474        private_mapping && inode == 0) {
475      memory_use += (end - start);
476    }
477
478    line_start = position;
479  }
480  close(fd);
481  return memory_use;
482}
483
484
485TEST(BootUpMemoryUse) {
486  intptr_t initial_memory = MemoryInUse();
487  // Avoid flakiness.
488  FLAG_crankshaft = false;
489  FLAG_concurrent_recompilation = false;
490
491  // Only Linux has the proc filesystem and only if it is mapped.  If it's not
492  // there we just skip the test.
493  if (initial_memory >= 0) {
494    CcTest::InitializeVM();
495    intptr_t delta = MemoryInUse() - initial_memory;
496    printf("delta: %" V8_PTR_PREFIX "d kB\n", delta / 1024);
497    if (sizeof(initial_memory) == 8) {  // 64-bit.
498      if (v8::internal::Snapshot::IsEnabled()) {
499        CHECK_LE(delta, 4000 * 1024);
500      } else {
501        CHECK_LE(delta, 4500 * 1024);
502      }
503    } else {                            // 32-bit.
504      if (v8::internal::Snapshot::IsEnabled()) {
505        CHECK_LE(delta, 3100 * 1024);
506      } else {
507        CHECK_LE(delta, 3450 * 1024);
508      }
509    }
510  }
511}
512
513
514intptr_t ShortLivingIsolate() {
515  v8::Isolate* isolate = v8::Isolate::New();
516  { v8::Isolate::Scope isolate_scope(isolate);
517    v8::Locker lock(isolate);
518    v8::HandleScope handle_scope(isolate);
519    v8::Local<v8::Context> context = v8::Context::New(isolate);
520    CHECK(!context.IsEmpty());
521  }
522  isolate->Dispose();
523  return MemoryInUse();
524}
525
526
527TEST(RegressJoinThreadsOnIsolateDeinit) {
528  intptr_t size_limit = ShortLivingIsolate() * 2;
529  for (int i = 0; i < 10; i++) {
530    CHECK_GT(size_limit, ShortLivingIsolate());
531  }
532}
533
534#endif  // __linux__ and !USE_SIMULATOR
535