1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "indirect_reference_table-inl.h"
18
19#include "jni_internal.h"
20#include "reference_table.h"
21#include "runtime.h"
22#include "scoped_thread_state_change.h"
23#include "thread.h"
24#include "utils.h"
25#include "verify_object-inl.h"
26
27#include <cstdlib>
28
29namespace art {
30
31template<typename T>
32class MutatorLockedDumpable {
33 public:
34  explicit MutatorLockedDumpable(T& value)
35      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) : value_(value) {
36  }
37
38  void Dump(std::ostream& os) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
39    value_.Dump(os);
40  }
41
42 private:
43  T& value_;
44
45  DISALLOW_COPY_AND_ASSIGN(MutatorLockedDumpable);
46};
47
48template<typename T>
49std::ostream& operator<<(std::ostream& os, const MutatorLockedDumpable<T>& rhs)
50// TODO: should be SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) however annotalysis
51//       currently fails for this.
52    NO_THREAD_SAFETY_ANALYSIS {
53  rhs.Dump(os);
54  return os;
55}
56
57void IndirectReferenceTable::AbortIfNoCheckJNI() {
58  // If -Xcheck:jni is on, it'll give a more detailed error before aborting.
59  if (!Runtime::Current()->GetJavaVM()->check_jni) {
60    // Otherwise, we want to abort rather than hand back a bad reference.
61    LOG(FATAL) << "JNI ERROR (app bug): see above.";
62  }
63}
64
65IndirectReferenceTable::IndirectReferenceTable(size_t initialCount,
66                                               size_t maxCount, IndirectRefKind desiredKind)
67    : kind_(desiredKind),
68      max_entries_(maxCount) {
69  CHECK_GT(initialCount, 0U);
70  CHECK_LE(initialCount, maxCount);
71  CHECK_NE(desiredKind, kHandleScopeOrInvalid);
72
73  std::string error_str;
74  const size_t table_bytes = maxCount * sizeof(IrtEntry);
75  table_mem_map_.reset(MemMap::MapAnonymous("indirect ref table", nullptr, table_bytes,
76                                            PROT_READ | PROT_WRITE, false, &error_str));
77  CHECK(table_mem_map_.get() != nullptr) << error_str;
78  CHECK_EQ(table_mem_map_->Size(), table_bytes);
79  table_ = reinterpret_cast<IrtEntry*>(table_mem_map_->Begin());
80  CHECK(table_ != nullptr);
81  segment_state_.all = IRT_FIRST_SEGMENT;
82}
83
84IndirectReferenceTable::~IndirectReferenceTable() {
85}
86
87IndirectRef IndirectReferenceTable::Add(uint32_t cookie, mirror::Object* obj) {
88  IRTSegmentState prevState;
89  prevState.all = cookie;
90  size_t topIndex = segment_state_.parts.topIndex;
91
92  CHECK(obj != NULL);
93  VerifyObject(obj);
94  DCHECK(table_ != NULL);
95  DCHECK_GE(segment_state_.parts.numHoles, prevState.parts.numHoles);
96
97  if (topIndex == max_entries_) {
98    LOG(FATAL) << "JNI ERROR (app bug): " << kind_ << " table overflow "
99               << "(max=" << max_entries_ << ")\n"
100               << MutatorLockedDumpable<IndirectReferenceTable>(*this);
101  }
102
103  // We know there's enough room in the table.  Now we just need to find
104  // the right spot.  If there's a hole, find it and fill it; otherwise,
105  // add to the end of the list.
106  IndirectRef result;
107  int numHoles = segment_state_.parts.numHoles - prevState.parts.numHoles;
108  size_t index;
109  if (numHoles > 0) {
110    DCHECK_GT(topIndex, 1U);
111    // Find the first hole; likely to be near the end of the list.
112    IrtEntry* pScan = &table_[topIndex - 1];
113    DCHECK(!pScan->GetReference()->IsNull());
114    --pScan;
115    while (!pScan->GetReference()->IsNull()) {
116      DCHECK_GE(pScan, table_ + prevState.parts.topIndex);
117      --pScan;
118    }
119    index = pScan - table_;
120    segment_state_.parts.numHoles--;
121  } else {
122    // Add to the end.
123    index = topIndex++;
124    segment_state_.parts.topIndex = topIndex;
125  }
126  table_[index].Add(obj);
127  result = ToIndirectRef(index);
128  if (false) {
129    LOG(INFO) << "+++ added at " << ExtractIndex(result) << " top=" << segment_state_.parts.topIndex
130              << " holes=" << segment_state_.parts.numHoles;
131  }
132
133  DCHECK(result != NULL);
134  return result;
135}
136
137void IndirectReferenceTable::AssertEmpty() {
138  if (UNLIKELY(begin() != end())) {
139    ScopedObjectAccess soa(Thread::Current());
140    LOG(FATAL) << "Internal Error: non-empty local reference table\n"
141               << MutatorLockedDumpable<IndirectReferenceTable>(*this);
142  }
143}
144
145// Removes an object. We extract the table offset bits from "iref"
146// and zap the corresponding entry, leaving a hole if it's not at the top.
147// If the entry is not between the current top index and the bottom index
148// specified by the cookie, we don't remove anything. This is the behavior
149// required by JNI's DeleteLocalRef function.
150// This method is not called when a local frame is popped; this is only used
151// for explicit single removals.
152// Returns "false" if nothing was removed.
153bool IndirectReferenceTable::Remove(uint32_t cookie, IndirectRef iref) {
154  IRTSegmentState prevState;
155  prevState.all = cookie;
156  int topIndex = segment_state_.parts.topIndex;
157  int bottomIndex = prevState.parts.topIndex;
158
159  DCHECK(table_ != NULL);
160  DCHECK_GE(segment_state_.parts.numHoles, prevState.parts.numHoles);
161
162  if (GetIndirectRefKind(iref) == kHandleScopeOrInvalid &&
163      Thread::Current()->HandleScopeContains(reinterpret_cast<jobject>(iref))) {
164    LOG(WARNING) << "Attempt to remove local handle scope entry from IRT, ignoring";
165    return true;
166  }
167  const int idx = ExtractIndex(iref);
168  if (idx < bottomIndex) {
169    // Wrong segment.
170    LOG(WARNING) << "Attempt to remove index outside index area (" << idx
171                 << " vs " << bottomIndex << "-" << topIndex << ")";
172    return false;
173  }
174  if (idx >= topIndex) {
175    // Bad --- stale reference?
176    LOG(WARNING) << "Attempt to remove invalid index " << idx
177                 << " (bottom=" << bottomIndex << " top=" << topIndex << ")";
178    return false;
179  }
180
181  if (idx == topIndex - 1) {
182    // Top-most entry.  Scan up and consume holes.
183
184    if (!CheckEntry("remove", iref, idx)) {
185      return false;
186    }
187
188    *table_[idx].GetReference() = GcRoot<mirror::Object>(nullptr);
189    int numHoles = segment_state_.parts.numHoles - prevState.parts.numHoles;
190    if (numHoles != 0) {
191      while (--topIndex > bottomIndex && numHoles != 0) {
192        if (false) {
193          LOG(INFO) << "+++ checking for hole at " << topIndex - 1
194                    << " (cookie=" << cookie << ") val="
195                    << table_[topIndex - 1].GetReference()->Read<kWithoutReadBarrier>();
196        }
197        if (!table_[topIndex - 1].GetReference()->IsNull()) {
198          break;
199        }
200        if (false) {
201          LOG(INFO) << "+++ ate hole at " << (topIndex - 1);
202        }
203        numHoles--;
204      }
205      segment_state_.parts.numHoles = numHoles + prevState.parts.numHoles;
206      segment_state_.parts.topIndex = topIndex;
207    } else {
208      segment_state_.parts.topIndex = topIndex-1;
209      if (false) {
210        LOG(INFO) << "+++ ate last entry " << topIndex - 1;
211      }
212    }
213  } else {
214    // Not the top-most entry.  This creates a hole.  We NULL out the
215    // entry to prevent somebody from deleting it twice and screwing up
216    // the hole count.
217    if (table_[idx].GetReference()->IsNull()) {
218      LOG(INFO) << "--- WEIRD: removing null entry " << idx;
219      return false;
220    }
221    if (!CheckEntry("remove", iref, idx)) {
222      return false;
223    }
224
225    *table_[idx].GetReference() = GcRoot<mirror::Object>(nullptr);
226    segment_state_.parts.numHoles++;
227    if (false) {
228      LOG(INFO) << "+++ left hole at " << idx << ", holes=" << segment_state_.parts.numHoles;
229    }
230  }
231
232  return true;
233}
234
235void IndirectReferenceTable::Trim() {
236  const size_t top_index = Capacity();
237  auto* release_start = AlignUp(reinterpret_cast<uint8_t*>(&table_[top_index]), kPageSize);
238  uint8_t* release_end = table_mem_map_->End();
239  madvise(release_start, release_end - release_start, MADV_DONTNEED);
240}
241
242void IndirectReferenceTable::VisitRoots(RootCallback* callback, void* arg,
243                                        const RootInfo& root_info) {
244  for (auto ref : *this) {
245    callback(ref, arg, root_info);
246    DCHECK(*ref != nullptr);
247  }
248}
249
250void IndirectReferenceTable::Dump(std::ostream& os) const {
251  os << kind_ << " table dump:\n";
252  ReferenceTable::Table entries;
253  for (size_t i = 0; i < Capacity(); ++i) {
254    mirror::Object* obj = table_[i].GetReference()->Read<kWithoutReadBarrier>();
255    if (UNLIKELY(obj == nullptr)) {
256      // Remove NULLs.
257    } else if (UNLIKELY(obj == kClearedJniWeakGlobal)) {
258      // ReferenceTable::Dump() will handle kClearedJniWeakGlobal
259      // while the read barrier won't.
260      entries.push_back(GcRoot<mirror::Object>(obj));
261    } else {
262      obj = table_[i].GetReference()->Read();
263      entries.push_back(GcRoot<mirror::Object>(obj));
264    }
265  }
266  ReferenceTable::Dump(os, entries);
267}
268
269}  // namespace art
270