1// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include <stdlib.h>
29
30#include "v8.h"
31#include "cctest.h"
32
33using namespace v8::internal;
34
35#if 0
36static void VerifyRegionMarking(Address page_start) {
37#ifdef ENABLE_CARDMARKING_WRITE_BARRIER
38  Page* p = Page::FromAddress(page_start);
39
40  p->SetRegionMarks(Page::kAllRegionsCleanMarks);
41
42  for (Address addr = p->ObjectAreaStart();
43       addr < p->ObjectAreaEnd();
44       addr += kPointerSize) {
45    CHECK(!Page::FromAddress(addr)->IsRegionDirty(addr));
46  }
47
48  for (Address addr = p->ObjectAreaStart();
49       addr < p->ObjectAreaEnd();
50       addr += kPointerSize) {
51    Page::FromAddress(addr)->MarkRegionDirty(addr);
52  }
53
54  for (Address addr = p->ObjectAreaStart();
55       addr < p->ObjectAreaEnd();
56       addr += kPointerSize) {
57    CHECK(Page::FromAddress(addr)->IsRegionDirty(addr));
58  }
59#endif
60}
61#endif
62
63
64// TODO(gc) you can no longer allocate pages like this. Details are hidden.
65#if 0
66TEST(Page) {
67  byte* mem = NewArray<byte>(2*Page::kPageSize);
68  CHECK(mem != NULL);
69
70  Address start = reinterpret_cast<Address>(mem);
71  Address page_start = RoundUp(start, Page::kPageSize);
72
73  Page* p = Page::FromAddress(page_start);
74  // Initialized Page has heap pointer, normally set by memory_allocator.
75  p->heap_ = CcTest::heap();
76  CHECK(p->address() == page_start);
77  CHECK(p->is_valid());
78
79  p->opaque_header = 0;
80  p->SetIsLargeObjectPage(false);
81  CHECK(!p->next_page()->is_valid());
82
83  CHECK(p->ObjectAreaStart() == page_start + Page::kObjectStartOffset);
84  CHECK(p->ObjectAreaEnd() == page_start + Page::kPageSize);
85
86  CHECK(p->Offset(page_start + Page::kObjectStartOffset) ==
87        Page::kObjectStartOffset);
88  CHECK(p->Offset(page_start + Page::kPageSize) == Page::kPageSize);
89
90  CHECK(p->OffsetToAddress(Page::kObjectStartOffset) == p->ObjectAreaStart());
91  CHECK(p->OffsetToAddress(Page::kPageSize) == p->ObjectAreaEnd());
92
93  // test region marking
94  VerifyRegionMarking(page_start);
95
96  DeleteArray(mem);
97}
98#endif
99
100
101namespace v8 {
102namespace internal {
103
104// Temporarily sets a given allocator in an isolate.
105class TestMemoryAllocatorScope {
106 public:
107  TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator)
108      : isolate_(isolate),
109        old_allocator_(isolate->memory_allocator_) {
110    isolate->memory_allocator_ = allocator;
111  }
112
113  ~TestMemoryAllocatorScope() {
114    isolate_->memory_allocator_ = old_allocator_;
115  }
116
117 private:
118  Isolate* isolate_;
119  MemoryAllocator* old_allocator_;
120
121  DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope);
122};
123
124
125// Temporarily sets a given code range in an isolate.
126class TestCodeRangeScope {
127 public:
128  TestCodeRangeScope(Isolate* isolate, CodeRange* code_range)
129      : isolate_(isolate),
130        old_code_range_(isolate->code_range_) {
131    isolate->code_range_ = code_range;
132  }
133
134  ~TestCodeRangeScope() {
135    isolate_->code_range_ = old_code_range_;
136  }
137
138 private:
139  Isolate* isolate_;
140  CodeRange* old_code_range_;
141
142  DISALLOW_COPY_AND_ASSIGN(TestCodeRangeScope);
143};
144
145} }  // namespace v8::internal
146
147
148static void VerifyMemoryChunk(Isolate* isolate,
149                              Heap* heap,
150                              CodeRange* code_range,
151                              size_t reserve_area_size,
152                              size_t commit_area_size,
153                              size_t second_commit_area_size,
154                              Executability executable) {
155  MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
156  CHECK(memory_allocator->SetUp(heap->MaxReserved(),
157                                heap->MaxExecutableSize()));
158  TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator);
159  TestCodeRangeScope test_code_range_scope(isolate, code_range);
160
161  size_t header_size = (executable == EXECUTABLE)
162                       ? MemoryAllocator::CodePageGuardStartOffset()
163                       : MemoryChunk::kObjectStartOffset;
164  size_t guard_size = (executable == EXECUTABLE)
165                       ? MemoryAllocator::CodePageGuardSize()
166                       : 0;
167
168  MemoryChunk* memory_chunk = memory_allocator->AllocateChunk(reserve_area_size,
169                                                              commit_area_size,
170                                                              executable,
171                                                              NULL);
172  size_t alignment = code_range->exists() ?
173                     MemoryChunk::kAlignment : OS::CommitPageSize();
174  size_t reserved_size = ((executable == EXECUTABLE))
175      ? RoundUp(header_size + guard_size + reserve_area_size + guard_size,
176                alignment)
177      : RoundUp(header_size + reserve_area_size, OS::CommitPageSize());
178  CHECK(memory_chunk->size() == reserved_size);
179  CHECK(memory_chunk->area_start() < memory_chunk->address() +
180                                     memory_chunk->size());
181  CHECK(memory_chunk->area_end() <= memory_chunk->address() +
182                                    memory_chunk->size());
183  CHECK(static_cast<size_t>(memory_chunk->area_size()) == commit_area_size);
184
185  Address area_start = memory_chunk->area_start();
186
187  memory_chunk->CommitArea(second_commit_area_size);
188  CHECK(area_start == memory_chunk->area_start());
189  CHECK(memory_chunk->area_start() < memory_chunk->address() +
190                                     memory_chunk->size());
191  CHECK(memory_chunk->area_end() <= memory_chunk->address() +
192                                    memory_chunk->size());
193  CHECK(static_cast<size_t>(memory_chunk->area_size()) ==
194      second_commit_area_size);
195
196  memory_allocator->Free(memory_chunk);
197  memory_allocator->TearDown();
198  delete memory_allocator;
199}
200
201
202static unsigned int Pseudorandom() {
203  static uint32_t lo = 2345;
204  lo = 18273 * (lo & 0xFFFFF) + (lo >> 16);
205  return lo & 0xFFFFF;
206}
207
208
209TEST(MemoryChunk) {
210  Isolate* isolate = CcTest::i_isolate();
211  isolate->InitializeLoggingAndCounters();
212  Heap* heap = isolate->heap();
213  CHECK(heap->ConfigureHeapDefault());
214
215  size_t reserve_area_size = 1 * MB;
216  size_t initial_commit_area_size, second_commit_area_size;
217
218  for (int i = 0; i < 100; i++) {
219    initial_commit_area_size = Pseudorandom();
220    second_commit_area_size = Pseudorandom();
221
222    // With CodeRange.
223    CodeRange* code_range = new CodeRange(isolate);
224    const int code_range_size = 32 * MB;
225    if (!code_range->SetUp(code_range_size)) return;
226
227    VerifyMemoryChunk(isolate,
228                      heap,
229                      code_range,
230                      reserve_area_size,
231                      initial_commit_area_size,
232                      second_commit_area_size,
233                      EXECUTABLE);
234
235    VerifyMemoryChunk(isolate,
236                      heap,
237                      code_range,
238                      reserve_area_size,
239                      initial_commit_area_size,
240                      second_commit_area_size,
241                      NOT_EXECUTABLE);
242    delete code_range;
243
244    // Without CodeRange.
245    code_range = NULL;
246    VerifyMemoryChunk(isolate,
247                      heap,
248                      code_range,
249                      reserve_area_size,
250                      initial_commit_area_size,
251                      second_commit_area_size,
252                      EXECUTABLE);
253
254    VerifyMemoryChunk(isolate,
255                      heap,
256                      code_range,
257                      reserve_area_size,
258                      initial_commit_area_size,
259                      second_commit_area_size,
260                      NOT_EXECUTABLE);
261  }
262}
263
264
265TEST(MemoryAllocator) {
266  Isolate* isolate = CcTest::i_isolate();
267  isolate->InitializeLoggingAndCounters();
268  Heap* heap = isolate->heap();
269  CHECK(isolate->heap()->ConfigureHeapDefault());
270
271  MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
272  CHECK(memory_allocator->SetUp(heap->MaxReserved(),
273                                heap->MaxExecutableSize()));
274
275  int total_pages = 0;
276  OldSpace faked_space(heap,
277                       heap->MaxReserved(),
278                       OLD_POINTER_SPACE,
279                       NOT_EXECUTABLE);
280  Page* first_page = memory_allocator->AllocatePage(
281      faked_space.AreaSize(), &faked_space, NOT_EXECUTABLE);
282
283  first_page->InsertAfter(faked_space.anchor()->prev_page());
284  CHECK(first_page->is_valid());
285  CHECK(first_page->next_page() == faked_space.anchor());
286  total_pages++;
287
288  for (Page* p = first_page; p != faked_space.anchor(); p = p->next_page()) {
289    CHECK(p->owner() == &faked_space);
290  }
291
292  // Again, we should get n or n - 1 pages.
293  Page* other = memory_allocator->AllocatePage(
294      faked_space.AreaSize(), &faked_space, NOT_EXECUTABLE);
295  CHECK(other->is_valid());
296  total_pages++;
297  other->InsertAfter(first_page);
298  int page_count = 0;
299  for (Page* p = first_page; p != faked_space.anchor(); p = p->next_page()) {
300    CHECK(p->owner() == &faked_space);
301    page_count++;
302  }
303  CHECK(total_pages == page_count);
304
305  Page* second_page = first_page->next_page();
306  CHECK(second_page->is_valid());
307  memory_allocator->Free(first_page);
308  memory_allocator->Free(second_page);
309  memory_allocator->TearDown();
310  delete memory_allocator;
311}
312
313
314TEST(NewSpace) {
315  Isolate* isolate = CcTest::i_isolate();
316  isolate->InitializeLoggingAndCounters();
317  Heap* heap = isolate->heap();
318  CHECK(heap->ConfigureHeapDefault());
319  MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
320  CHECK(memory_allocator->SetUp(heap->MaxReserved(),
321                                heap->MaxExecutableSize()));
322  TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
323
324  NewSpace new_space(heap);
325
326  CHECK(new_space.SetUp(CcTest::heap()->ReservedSemiSpaceSize(),
327                        CcTest::heap()->ReservedSemiSpaceSize()));
328  CHECK(new_space.HasBeenSetUp());
329
330  while (new_space.Available() >= Page::kMaxNonCodeHeapObjectSize) {
331    Object* obj =
332        new_space.AllocateRaw(Page::kMaxNonCodeHeapObjectSize)->
333        ToObjectUnchecked();
334    CHECK(new_space.Contains(HeapObject::cast(obj)));
335  }
336
337  new_space.TearDown();
338  memory_allocator->TearDown();
339  delete memory_allocator;
340}
341
342
343TEST(OldSpace) {
344  Isolate* isolate = CcTest::i_isolate();
345  isolate->InitializeLoggingAndCounters();
346  Heap* heap = isolate->heap();
347  CHECK(heap->ConfigureHeapDefault());
348  MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
349  CHECK(memory_allocator->SetUp(heap->MaxReserved(),
350                                heap->MaxExecutableSize()));
351  TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
352
353  OldSpace* s = new OldSpace(heap,
354                             heap->MaxOldGenerationSize(),
355                             OLD_POINTER_SPACE,
356                             NOT_EXECUTABLE);
357  CHECK(s != NULL);
358
359  CHECK(s->SetUp());
360
361  while (s->Available() > 0) {
362    s->AllocateRaw(Page::kMaxNonCodeHeapObjectSize)->ToObjectUnchecked();
363  }
364
365  s->TearDown();
366  delete s;
367  memory_allocator->TearDown();
368  delete memory_allocator;
369}
370
371
372TEST(LargeObjectSpace) {
373  v8::V8::Initialize();
374
375  LargeObjectSpace* lo = CcTest::heap()->lo_space();
376  CHECK(lo != NULL);
377
378  int lo_size = Page::kPageSize;
379
380  Object* obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE)->ToObjectUnchecked();
381  CHECK(obj->IsHeapObject());
382
383  HeapObject* ho = HeapObject::cast(obj);
384
385  CHECK(lo->Contains(HeapObject::cast(obj)));
386
387  CHECK(lo->FindObject(ho->address()) == obj);
388
389  CHECK(lo->Contains(ho));
390
391  while (true) {
392    intptr_t available = lo->Available();
393    { MaybeObject* maybe_obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE);
394      if (!maybe_obj->ToObject(&obj)) break;
395    }
396    CHECK(lo->Available() < available);
397  };
398
399  CHECK(!lo->IsEmpty());
400
401  CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE)->IsFailure());
402}
403
404
405TEST(SizeOfFirstPageIsLargeEnough) {
406  if (i::FLAG_always_opt) return;
407  CcTest::InitializeVM();
408  Isolate* isolate = CcTest::i_isolate();
409
410  // Freshly initialized VM gets by with one page per space.
411  for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) {
412    CHECK_EQ(1, isolate->heap()->paged_space(i)->CountTotalPages());
413  }
414
415  // Executing the empty script gets by with one page per space.
416  HandleScope scope(isolate);
417  CompileRun("/*empty*/");
418  for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) {
419    CHECK_EQ(1, isolate->heap()->paged_space(i)->CountTotalPages());
420  }
421
422  // No large objects required to perform the above steps.
423  CHECK(isolate->heap()->lo_space()->IsEmpty());
424}
425