1/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2
3Licensed under the Apache License, Version 2.0 (the "License");
4you may not use this file except in compliance with the License.
5You may obtain a copy of the License at
6
7    http://www.apache.org/licenses/LICENSE-2.0
8
9Unless required by applicable law or agreed to in writing, software
10distributed under the License is distributed on an "AS IS" BASIS,
11WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12See the License for the specific language governing permissions and
13limitations under the License.
14==============================================================================*/
15
16#include "tensorflow/core/framework/tracking_allocator.h"
17
18#include <unordered_map>
19
20#include "tensorflow/core/framework/allocator.h"
21#include "tensorflow/core/platform/logging.h"
22#include "tensorflow/core/platform/mem.h"
23#include "tensorflow/core/platform/test.h"
24
25namespace tensorflow {
26
27class TestableSizeTrackingAllocator : public Allocator {
28 public:
29  string Name() override { return "test"; }
30  void* AllocateRaw(size_t /*alignment*/, size_t num_bytes) override {
31    void* ptr = port::Malloc(num_bytes);
32    size_map_[ptr] = num_bytes;
33    return ptr;
34  }
35  void DeallocateRaw(void* ptr) override {
36    const auto& iter = size_map_.find(ptr);
37    EXPECT_NE(size_map_.end(), iter);
38    size_map_.erase(iter);
39    port::Free(ptr);
40  }
41  bool TracksAllocationSizes() override { return true; }
42  size_t RequestedSize(const void* ptr) override {
43    const auto& iter = size_map_.find(ptr);
44    EXPECT_NE(size_map_.end(), iter);
45    return iter->second;
46  }
47  void GetStats(AllocatorStats* stats) override { stats->Clear(); }
48
49 private:
50  std::unordered_map<const void*, size_t> size_map_;
51};
52
53class NoMemoryAllocator : public Allocator {
54 public:
55  string Name() override { return "test"; }
56  void* AllocateRaw(size_t /*alignment*/, size_t num_bytes) override {
57    return nullptr;
58  }
59  void DeallocateRaw(void* ptr) override {}
60  bool TracksAllocationSizes() override { return true; }
61  void GetStats(AllocatorStats* stats) override { stats->Clear(); }
62};
63
64TEST(TrackingAllocatorTest, SimpleNoTracking) {
65  Allocator* a = cpu_allocator();
66
67  EXPECT_FALSE(a->TracksAllocationSizes());
68
69  // Don't enable the tracking inside the tracking allocator. Since
70  // the cpu_allocator doesn't track allocations itself the tracking
71  // will be partial
72  TrackingAllocator* ta = new TrackingAllocator(a, false);
73
74  void* p1 = ta->AllocateRaw(4, 4);
75  ta->DeallocateRaw(p1);
76  void* p2 = ta->AllocateRaw(4, 12);
77
78  std::tuple<size_t, size_t, size_t> sizes = ta->GetSizes();
79
80  EXPECT_EQ(16, std::get<0>(sizes));
81  EXPECT_EQ(0, std::get<1>(sizes));
82  EXPECT_EQ(0, std::get<2>(sizes));
83
84  ta->DeallocateRaw(p2);
85  auto records = ta->GetRecordsAndUnRef();
86  EXPECT_EQ(4, records[0].alloc_bytes);
87  EXPECT_EQ(12, records[1].alloc_bytes);
88
89  // This time enable the tracking inside the tracking allocator
90  ta = new TrackingAllocator(a, true);
91  p1 = ta->AllocateRaw(4, 4);
92  EXPECT_EQ(4, ta->RequestedSize(p1));
93  EXPECT_LE(4, ta->AllocatedSize(p1));
94  EXPECT_EQ(1, ta->AllocationId(p1));
95
96  ta->DeallocateRaw(p1);
97  p2 = ta->AllocateRaw(4, 12);
98  EXPECT_EQ(12, ta->RequestedSize(p2));
99  EXPECT_LE(12, ta->AllocatedSize(p2));
100  EXPECT_EQ(2, ta->AllocationId(p2));
101
102  sizes = ta->GetSizes();
103
104  EXPECT_LE(16, std::get<0>(sizes));
105  EXPECT_LE(12, std::get<1>(sizes));
106  EXPECT_LE(12, std::get<2>(sizes));
107
108  ta->DeallocateRaw(p2);
109  records = ta->GetRecordsAndUnRef();
110  EXPECT_LE(4, records[0].alloc_bytes);
111  EXPECT_GE(-4, records[1].alloc_bytes);
112  EXPECT_LE(12, records[2].alloc_bytes);
113  EXPECT_GE(-12, records[3].alloc_bytes);
114}
115
116TEST(TrackingAllocatorTest, SimpleTracking) {
117  TestableSizeTrackingAllocator a = TestableSizeTrackingAllocator();
118
119  EXPECT_TRUE(a.TracksAllocationSizes());
120
121  TrackingAllocator* ta = new TrackingAllocator(&a, false);
122
123  void* p1 = ta->AllocateRaw(4, 12);
124  ta->DeallocateRaw(p1);
125  void* p2 = ta->AllocateRaw(4, 4);
126
127  std::tuple<size_t, size_t, size_t> sizes = ta->GetSizes();
128
129  EXPECT_EQ(16, std::get<0>(sizes));
130  EXPECT_EQ(12, std::get<1>(sizes));
131  EXPECT_EQ(4, std::get<2>(sizes));
132
133  ta->DeallocateRaw(p2);
134
135  auto records = ta->GetRecordsAndUnRef();
136  EXPECT_EQ(12, records[0].alloc_bytes);
137  EXPECT_EQ(-12, records[1].alloc_bytes);
138  EXPECT_EQ(4, records[2].alloc_bytes);
139  EXPECT_EQ(-4, records[3].alloc_bytes);
140}
141
142TEST(TrackingAllocatorTest, OutOfMemory) {
143  NoMemoryAllocator a;
144
145  EXPECT_TRUE(a.TracksAllocationSizes());
146
147  TrackingAllocator* ta = new TrackingAllocator(&a, false);
148
149  void* p1 = ta->AllocateRaw(4, 12);
150  EXPECT_EQ(nullptr, p1);
151
152  std::tuple<size_t, size_t, size_t> sizes = ta->GetSizes();
153
154  EXPECT_EQ(0, std::get<0>(sizes));
155  EXPECT_EQ(0, std::get<1>(sizes));
156  EXPECT_EQ(0, std::get<2>(sizes));
157
158  EXPECT_EQ(0, ta->GetRecordsAndUnRef().size());
159}
160
161TEST(TrackingAllocatorTest, FreeNullPtr) {
162  NoMemoryAllocator a;
163
164  EXPECT_TRUE(a.TracksAllocationSizes());
165
166  TrackingAllocator* ta = new TrackingAllocator(&a, false);
167
168  ta->DeallocateRaw(nullptr);
169
170  std::tuple<size_t, size_t, size_t> sizes = ta->GetSizes();
171
172  EXPECT_EQ(0, std::get<0>(sizes));
173  EXPECT_EQ(0, std::get<1>(sizes));
174  EXPECT_EQ(0, std::get<2>(sizes));
175
176  EXPECT_EQ(0, ta->GetRecordsAndUnRef().size());
177}
178
179}  // namespace tensorflow
180