test-alloc.cc revision 44f0eee88ff00398ff7f715fab053374d808c90d
1// Copyright 2007-2008 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29#include "accessors.h"
30
31#include "cctest.h"
32
33
34using namespace v8::internal;
35
36
37static MaybeObject* AllocateAfterFailures() {
38  static int attempts = 0;
39  if (++attempts < 3) return Failure::RetryAfterGC();
40  Heap* heap = Isolate::Current()->heap();
41
42  // New space.
43  NewSpace* new_space = heap->new_space();
44  static const int kNewSpaceFillerSize = ByteArray::SizeFor(0);
45  while (new_space->Available() > kNewSpaceFillerSize) {
46    int available_before = static_cast<int>(new_space->Available());
47    CHECK(!heap->AllocateByteArray(0)->IsFailure());
48    if (available_before == new_space->Available()) {
49      // It seems that we are avoiding new space allocations when
50      // allocation is forced, so no need to fill up new space
51      // in order to make the test harder.
52      break;
53    }
54  }
55  CHECK(!heap->AllocateByteArray(100)->IsFailure());
56  CHECK(!heap->AllocateFixedArray(100, NOT_TENURED)->IsFailure());
57
58  // Make sure we can allocate through optimized allocation functions
59  // for specific kinds.
60  CHECK(!heap->AllocateFixedArray(100)->IsFailure());
61  CHECK(!heap->AllocateHeapNumber(0.42)->IsFailure());
62  CHECK(!heap->AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure());
63  Object* object = heap->AllocateJSObject(
64      *Isolate::Current()->object_function())->ToObjectChecked();
65  CHECK(!heap->CopyJSObject(JSObject::cast(object))->IsFailure());
66
67  // Old data space.
68  OldSpace* old_data_space = heap->old_data_space();
69  static const int kOldDataSpaceFillerSize = ByteArray::SizeFor(0);
70  while (old_data_space->Available() > kOldDataSpaceFillerSize) {
71    CHECK(!heap->AllocateByteArray(0, TENURED)->IsFailure());
72  }
73  CHECK(!heap->AllocateRawAsciiString(100, TENURED)->IsFailure());
74
75  // Large object space.
76  while (!heap->OldGenerationAllocationLimitReached()) {
77    CHECK(!heap->AllocateFixedArray(10000, TENURED)->IsFailure());
78  }
79  CHECK(!heap->AllocateFixedArray(10000, TENURED)->IsFailure());
80
81  // Map space.
82  MapSpace* map_space = heap->map_space();
83  static const int kMapSpaceFillerSize = Map::kSize;
84  InstanceType instance_type = JS_OBJECT_TYPE;
85  int instance_size = JSObject::kHeaderSize;
86  while (map_space->Available() > kMapSpaceFillerSize) {
87    CHECK(!heap->AllocateMap(instance_type, instance_size)->IsFailure());
88  }
89  CHECK(!heap->AllocateMap(instance_type, instance_size)->IsFailure());
90
91  // Test that we can allocate in old pointer space and code space.
92  CHECK(!heap->AllocateFixedArray(100, TENURED)->IsFailure());
93  CHECK(!heap->CopyCode(Isolate::Current()->builtins()->builtin(
94      Builtins::kIllegal))->IsFailure());
95
96  // Return success.
97  return Smi::FromInt(42);
98}
99
100
101static Handle<Object> Test() {
102  CALL_HEAP_FUNCTION(ISOLATE, AllocateAfterFailures(), Object);
103}
104
105
106TEST(StressHandles) {
107  v8::Persistent<v8::Context> env = v8::Context::New();
108  v8::HandleScope scope;
109  env->Enter();
110  Handle<Object> o = Test();
111  CHECK(o->IsSmi() && Smi::cast(*o)->value() == 42);
112  env->Exit();
113}
114
115
116static MaybeObject* TestAccessorGet(Object* object, void*) {
117  return AllocateAfterFailures();
118}
119
120
121const AccessorDescriptor kDescriptor = {
122  TestAccessorGet,
123  0,
124  0
125};
126
127
128TEST(StressJS) {
129  v8::Persistent<v8::Context> env = v8::Context::New();
130  v8::HandleScope scope;
131  env->Enter();
132  Handle<JSFunction> function =
133      FACTORY->NewFunction(FACTORY->function_symbol(), FACTORY->null_value());
134  // Force the creation of an initial map and set the code to
135  // something empty.
136  FACTORY->NewJSObject(function);
137  function->ReplaceCode(Isolate::Current()->builtins()->builtin(
138      Builtins::kEmptyFunction));
139  // Patch the map to have an accessor for "get".
140  Handle<Map> map(function->initial_map());
141  Handle<DescriptorArray> instance_descriptors(map->instance_descriptors());
142  Handle<Proxy> proxy = FACTORY->NewProxy(&kDescriptor);
143  instance_descriptors = FACTORY->CopyAppendProxyDescriptor(
144      instance_descriptors,
145      FACTORY->NewStringFromAscii(Vector<const char>("get", 3)),
146      proxy,
147      static_cast<PropertyAttributes>(0));
148  map->set_instance_descriptors(*instance_descriptors);
149  // Add the Foo constructor the global object.
150  env->Global()->Set(v8::String::New("Foo"), v8::Utils::ToLocal(function));
151  // Call the accessor through JavaScript.
152  v8::Handle<v8::Value> result =
153      v8::Script::Compile(v8::String::New("(new Foo).get"))->Run();
154  CHECK_EQ(42, result->Int32Value());
155  env->Exit();
156}
157
158
159// CodeRange test.
160// Tests memory management in a CodeRange by allocating and freeing blocks,
161// using a pseudorandom generator to choose block sizes geometrically
162// distributed between 2 * Page::kPageSize and 2^5 + 1 * Page::kPageSize.
163// Ensure that the freed chunks are collected and reused by allocating (in
164// total) more than the size of the CodeRange.
165
166// This pseudorandom generator does not need to be particularly good.
167// Use the lower half of the V8::Random() generator.
168unsigned int Pseudorandom() {
169  static uint32_t lo = 2345;
170  lo = 18273 * (lo & 0xFFFF) + (lo >> 16);  // Provably not 0.
171  return lo & 0xFFFF;
172}
173
174
175// Plain old data class.  Represents a block of allocated memory.
176class Block {
177 public:
178  Block(void* base_arg, int size_arg)
179      : base(base_arg), size(size_arg) {}
180
181  void *base;
182  int size;
183};
184
185
186TEST(CodeRange) {
187  const int code_range_size = 16*MB;
188  OS::Setup();
189  Isolate::Current()->code_range()->Setup(code_range_size);
190  int current_allocated = 0;
191  int total_allocated = 0;
192  List<Block> blocks(1000);
193
194  while (total_allocated < 5 * code_range_size) {
195    if (current_allocated < code_range_size / 10) {
196      // Allocate a block.
197      // Geometrically distributed sizes, greater than Page::kPageSize.
198      size_t requested = (Page::kPageSize << (Pseudorandom() % 6)) +
199           Pseudorandom() % 5000 + 1;
200      size_t allocated = 0;
201      void* base = Isolate::Current()->code_range()->
202          AllocateRawMemory(requested, &allocated);
203      blocks.Add(Block(base, static_cast<int>(allocated)));
204      current_allocated += static_cast<int>(allocated);
205      total_allocated += static_cast<int>(allocated);
206    } else {
207      // Free a block.
208      int index = Pseudorandom() % blocks.length();
209      Isolate::Current()->code_range()->FreeRawMemory(
210          blocks[index].base, blocks[index].size);
211      current_allocated -= blocks[index].size;
212      if (index < blocks.length() - 1) {
213        blocks[index] = blocks.RemoveLast();
214      } else {
215        blocks.RemoveLast();
216      }
217    }
218  }
219
220  Isolate::Current()->code_range()->TearDown();
221}
222