1 /*
2  * Copyright (C) 2009 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "indirect_reference_table-inl.h"
18 
19 #include "base/bit_utils.h"
20 #include "base/globals.h"
21 #include "base/mutator_locked_dumpable.h"
22 #include "base/systrace.h"
23 #include "base/utils.h"
24 #include "indirect_reference_table.h"
25 #include "jni/java_vm_ext.h"
26 #include "jni/jni_internal.h"
27 #include "mirror/object-inl.h"
28 #include "nth_caller_visitor.h"
29 #include "object_callbacks.h"
30 #include "reference_table.h"
31 #include "runtime-inl.h"
32 #include "scoped_thread_state_change-inl.h"
33 #include "thread.h"
34 
35 #include <cstdlib>
36 
37 namespace art HIDDEN {
38 
39 static constexpr bool kDebugIRT = false;
40 
41 // Maximum table size we allow.
42 static constexpr size_t kMaxTableSizeInBytes = 128 * MB;
43 
GetIndirectRefKindString(IndirectRefKind kind)44 const char* GetIndirectRefKindString(IndirectRefKind kind) {
45   switch (kind) {
46     case kJniTransition:
47       return "JniTransition";
48     case kLocal:
49       return "Local";
50     case kGlobal:
51       return "Global";
52     case kWeakGlobal:
53       return "WeakGlobal";
54   }
55   return "IndirectRefKind Error";
56 }
57 
AbortIfNoCheckJNI(const std::string & msg)58 void IndirectReferenceTable::AbortIfNoCheckJNI(const std::string& msg) {
59   // If -Xcheck:jni is on, it'll give a more detailed error before aborting.
60   JavaVMExt* vm = Runtime::Current()->GetJavaVM();
61   if (!vm->IsCheckJniEnabled()) {
62     // Otherwise, we want to abort rather than hand back a bad reference.
63     LOG(FATAL) << msg;
64   } else {
65     LOG(ERROR) << msg;
66   }
67 }
68 
69 // Mmap an "indirect ref table region. Table_bytes is a multiple of a page size.
NewIRTMap(size_t table_bytes,std::string * error_msg)70 static inline MemMap NewIRTMap(size_t table_bytes, std::string* error_msg) {
71   MemMap result = MemMap::MapAnonymous("indirect ref table",
72                                        table_bytes,
73                                        PROT_READ | PROT_WRITE,
74                                        /*low_4gb=*/ false,
75                                        error_msg);
76   if (!result.IsValid() && error_msg->empty()) {
77       *error_msg = "Unable to map memory for indirect ref table";
78   }
79   return result;
80 }
81 
IndirectReferenceTable(IndirectRefKind kind)82 IndirectReferenceTable::IndirectReferenceTable(IndirectRefKind kind)
83     : table_mem_map_(),
84       table_(nullptr),
85       kind_(kind),
86       top_index_(0u),
87       max_entries_(0u),
88       current_num_holes_(0) {
89   CHECK_NE(kind, kJniTransition);
90   CHECK_NE(kind, kLocal);
91 }
92 
Initialize(size_t max_count,std::string * error_msg)93 bool IndirectReferenceTable::Initialize(size_t max_count, std::string* error_msg) {
94   CHECK(error_msg != nullptr);
95 
96   // Overflow and maximum check.
97   CHECK_LE(max_count, kMaxTableSizeInBytes / sizeof(IrtEntry));
98 
99   const size_t table_bytes = RoundUp(max_count * sizeof(IrtEntry), gPageSize);
100   table_mem_map_ = NewIRTMap(table_bytes, error_msg);
101   if (!table_mem_map_.IsValid()) {
102     DCHECK(!error_msg->empty());
103     return false;
104   }
105 
106   table_ = reinterpret_cast<IrtEntry*>(table_mem_map_.Begin());
107   // Take into account the actual length.
108   max_entries_ = table_bytes / sizeof(IrtEntry);
109   return true;
110 }
111 
~IndirectReferenceTable()112 IndirectReferenceTable::~IndirectReferenceTable() {
113 }
114 
ConstexprChecks()115 void IndirectReferenceTable::ConstexprChecks() {
116   // Use this for some assertions. They can't be put into the header as C++ wants the class
117   // to be complete.
118 
119   // Check kind.
120   static_assert((EncodeIndirectRefKind(kLocal) & (~kKindMask)) == 0, "Kind encoding error");
121   static_assert((EncodeIndirectRefKind(kGlobal) & (~kKindMask)) == 0, "Kind encoding error");
122   static_assert((EncodeIndirectRefKind(kWeakGlobal) & (~kKindMask)) == 0, "Kind encoding error");
123   static_assert(DecodeIndirectRefKind(EncodeIndirectRefKind(kLocal)) == kLocal,
124                 "Kind encoding error");
125   static_assert(DecodeIndirectRefKind(EncodeIndirectRefKind(kGlobal)) == kGlobal,
126                 "Kind encoding error");
127   static_assert(DecodeIndirectRefKind(EncodeIndirectRefKind(kWeakGlobal)) == kWeakGlobal,
128                 "Kind encoding error");
129 
130   // Check serial.
131   static_assert(DecodeSerial(EncodeSerial(0u)) == 0u, "Serial encoding error");
132   static_assert(DecodeSerial(EncodeSerial(1u)) == 1u, "Serial encoding error");
133   static_assert(DecodeSerial(EncodeSerial(2u)) == 2u, "Serial encoding error");
134   static_assert(DecodeSerial(EncodeSerial(3u)) == 3u, "Serial encoding error");
135 
136   // Table index.
137   static_assert(DecodeIndex(EncodeIndex(0u)) == 0u, "Index encoding error");
138   static_assert(DecodeIndex(EncodeIndex(1u)) == 1u, "Index encoding error");
139   static_assert(DecodeIndex(EncodeIndex(2u)) == 2u, "Index encoding error");
140   static_assert(DecodeIndex(EncodeIndex(3u)) == 3u, "Index encoding error");
141 
142   // Distinguishing between local and (weak) global references.
143   static_assert((GetGlobalOrWeakGlobalMask() & EncodeIndirectRefKind(kJniTransition)) == 0u);
144   static_assert((GetGlobalOrWeakGlobalMask() & EncodeIndirectRefKind(kLocal)) == 0u);
145   static_assert((GetGlobalOrWeakGlobalMask() & EncodeIndirectRefKind(kGlobal)) != 0u);
146   static_assert((GetGlobalOrWeakGlobalMask() & EncodeIndirectRefKind(kWeakGlobal)) != 0u);
147 }
148 
149 // Holes:
150 //
151 // To keep the IRT compact, we want to fill "holes" created by non-stack-discipline Add & Remove
152 // operation sequences. For simplicity and lower memory overhead, we do not use a free list or
153 // similar. Instead, we scan for holes, with the expectation that we will find holes fast as they
154 // are usually near the end of the table (see the header, TODO: verify this assumption). To avoid
155 // scans when there are no holes, the number of known holes should be tracked.
156 
CountNullEntries(const IrtEntry * table,size_t to)157 static size_t CountNullEntries(const IrtEntry* table, size_t to) {
158   size_t count = 0;
159   for (size_t index = 0u; index != to; ++index) {
160     if (table[index].GetReference()->IsNull()) {
161       count++;
162     }
163   }
164   return count;
165 }
166 
167 ALWAYS_INLINE
CheckHoleCount(IrtEntry * table,size_t exp_num_holes,size_t top_index)168 static inline void CheckHoleCount(IrtEntry* table,
169                                   size_t exp_num_holes,
170                                   size_t top_index) {
171   if (kIsDebugBuild) {
172     size_t count = CountNullEntries(table, top_index);
173     CHECK_EQ(exp_num_holes, count) << " topIndex=" << top_index;
174   }
175 }
176 
Add(ObjPtr<mirror::Object> obj,std::string * error_msg)177 IndirectRef IndirectReferenceTable::Add(ObjPtr<mirror::Object> obj, std::string* error_msg) {
178   if (kDebugIRT) {
179     LOG(INFO) << "+++ Add: top_index=" << top_index_
180               << " holes=" << current_num_holes_;
181   }
182 
183   CHECK(obj != nullptr);
184   VerifyObject(obj);
185   DCHECK(table_ != nullptr);
186 
187   if (top_index_ == max_entries_) {
188     // TODO: Fill holes before reporting error.
189     std::ostringstream oss;
190     oss << "JNI ERROR (app bug): " << kind_ << " table overflow "
191         << "(max=" << max_entries_ << ")"
192         << MutatorLockedDumpable<IndirectReferenceTable>(*this);
193     *error_msg = oss.str();
194     return nullptr;
195   }
196 
197   CheckHoleCount(table_, current_num_holes_, top_index_);
198 
199   // We know there's enough room in the table.  Now we just need to find
200   // the right spot.  If there's a hole, find it and fill it; otherwise,
201   // add to the end of the list.
202   IndirectRef result;
203   size_t index;
204   if (current_num_holes_ > 0) {
205     DCHECK_GT(top_index_, 1U);
206     // Find the first hole; likely to be near the end of the list.
207     IrtEntry* p_scan = &table_[top_index_ - 1];
208     DCHECK(!p_scan->GetReference()->IsNull());
209     --p_scan;
210     while (!p_scan->GetReference()->IsNull()) {
211       DCHECK_GT(p_scan, table_);
212       --p_scan;
213     }
214     index = p_scan - table_;
215     current_num_holes_--;
216   } else {
217     // Add to the end.
218     index = top_index_;
219     ++top_index_;
220   }
221   table_[index].Add(obj);
222   result = ToIndirectRef(index);
223   if (kDebugIRT) {
224     LOG(INFO) << "+++ added at " << ExtractIndex(result) << " top=" << top_index_
225               << " holes=" << current_num_holes_;
226   }
227 
228   DCHECK(result != nullptr);
229   return result;
230 }
231 
232 // Removes an object. We extract the table offset bits from "iref"
233 // and zap the corresponding entry, leaving a hole if it's not at the top.
234 // Returns "false" if nothing was removed.
Remove(IndirectRef iref)235 bool IndirectReferenceTable::Remove(IndirectRef iref) {
236   if (kDebugIRT) {
237     LOG(INFO) << "+++ Remove: top_index=" << top_index_
238               << " holes=" << current_num_holes_;
239   }
240 
241   // TODO: We should eagerly check the ref kind against the `kind_` instead of postponing until
242   // `CheckEntry()` below. Passing the wrong kind shall currently result in misleading warnings.
243 
244   const uint32_t top_index = top_index_;
245 
246   DCHECK(table_ != nullptr);
247 
248   const uint32_t idx = ExtractIndex(iref);
249   if (idx >= top_index) {
250     // Bad --- stale reference?
251     LOG(WARNING) << "Attempt to remove invalid index " << idx
252                  << " (top=" << top_index << ")";
253     return false;
254   }
255 
256   CheckHoleCount(table_, current_num_holes_, top_index_);
257 
258   if (idx == top_index - 1) {
259     // Top-most entry.  Scan up and consume holes.
260 
261     if (!CheckEntry("remove", iref, idx)) {
262       return false;
263     }
264 
265     *table_[idx].GetReference() = GcRoot<mirror::Object>(nullptr);
266     if (current_num_holes_ != 0) {
267       uint32_t collapse_top_index = top_index;
268       while (--collapse_top_index > 0u && current_num_holes_ != 0) {
269         if (kDebugIRT) {
270           ScopedObjectAccess soa(Thread::Current());
271           LOG(INFO) << "+++ checking for hole at " << collapse_top_index - 1 << " val="
272                     << table_[collapse_top_index - 1].GetReference()->Read<kWithoutReadBarrier>();
273         }
274         if (!table_[collapse_top_index - 1].GetReference()->IsNull()) {
275           break;
276         }
277         if (kDebugIRT) {
278           LOG(INFO) << "+++ ate hole at " << (collapse_top_index - 1);
279         }
280         current_num_holes_--;
281       }
282       top_index_ = collapse_top_index;
283 
284       CheckHoleCount(table_, current_num_holes_, top_index_);
285     } else {
286       top_index_ = top_index - 1;
287       if (kDebugIRT) {
288         LOG(INFO) << "+++ ate last entry " << top_index - 1;
289       }
290     }
291   } else {
292     // Not the top-most entry.  This creates a hole.  We null out the entry to prevent somebody
293     // from deleting it twice and screwing up the hole count.
294     if (table_[idx].GetReference()->IsNull()) {
295       LOG(INFO) << "--- WEIRD: removing null entry " << idx;
296       return false;
297     }
298     if (!CheckEntry("remove", iref, idx)) {
299       return false;
300     }
301 
302     *table_[idx].GetReference() = GcRoot<mirror::Object>(nullptr);
303     current_num_holes_++;
304     CheckHoleCount(table_, current_num_holes_, top_index_);
305     if (kDebugIRT) {
306       LOG(INFO) << "+++ left hole at " << idx << ", holes=" << current_num_holes_;
307     }
308   }
309 
310   return true;
311 }
312 
Trim()313 void IndirectReferenceTable::Trim() {
314   ScopedTrace trace(__PRETTY_FUNCTION__);
315   DCHECK(table_mem_map_.IsValid());
316   const size_t top_index = Capacity();
317   uint8_t* release_start = AlignUp(reinterpret_cast<uint8_t*>(&table_[top_index]), gPageSize);
318   uint8_t* release_end = static_cast<uint8_t*>(table_mem_map_.BaseEnd());
319   DCHECK_GE(reinterpret_cast<uintptr_t>(release_end), reinterpret_cast<uintptr_t>(release_start));
320   DCHECK_ALIGNED_PARAM(release_end, gPageSize);
321   DCHECK_ALIGNED_PARAM(release_end - release_start, gPageSize);
322   if (release_start != release_end) {
323     madvise(release_start, release_end - release_start, MADV_DONTNEED);
324   }
325 }
326 
VisitRoots(RootVisitor * visitor,const RootInfo & root_info)327 void IndirectReferenceTable::VisitRoots(RootVisitor* visitor, const RootInfo& root_info) {
328   BufferedRootVisitor<kDefaultBufferedRootCount> root_visitor(visitor, root_info);
329   for (size_t i = 0, capacity = Capacity(); i != capacity; ++i) {
330     GcRoot<mirror::Object>* ref = table_[i].GetReference();
331     if (!ref->IsNull()) {
332       root_visitor.VisitRoot(*ref);
333       DCHECK(!ref->IsNull());
334     }
335   }
336 }
337 
SweepJniWeakGlobals(IsMarkedVisitor * visitor)338 void IndirectReferenceTable::SweepJniWeakGlobals(IsMarkedVisitor* visitor) {
339   CHECK_EQ(kind_, kWeakGlobal);
340   MutexLock mu(Thread::Current(), *Locks::jni_weak_globals_lock_);
341   Runtime* const runtime = Runtime::Current();
342   for (size_t i = 0, capacity = Capacity(); i != capacity; ++i) {
343     GcRoot<mirror::Object>* entry = table_[i].GetReference();
344     // Need to skip null here to distinguish between null entries and cleared weak ref entries.
345     if (!entry->IsNull()) {
346       mirror::Object* obj = entry->Read<kWithoutReadBarrier>();
347       mirror::Object* new_obj = visitor->IsMarked(obj);
348       if (new_obj == nullptr) {
349         new_obj = runtime->GetClearedJniWeakGlobal();
350       }
351       *entry = GcRoot<mirror::Object>(new_obj);
352     }
353   }
354 }
355 
Dump(std::ostream & os) const356 void IndirectReferenceTable::Dump(std::ostream& os) const {
357   os << kind_ << " table dump:\n";
358   ReferenceTable::Table entries;
359   for (size_t i = 0; i < Capacity(); ++i) {
360     ObjPtr<mirror::Object> obj = table_[i].GetReference()->Read<kWithoutReadBarrier>();
361     if (obj != nullptr) {
362       obj = table_[i].GetReference()->Read();
363       entries.push_back(GcRoot<mirror::Object>(obj));
364     }
365   }
366   ReferenceTable::Dump(os, entries);
367 }
368 
FreeCapacity() const369 size_t IndirectReferenceTable::FreeCapacity() const {
370   return max_entries_ - top_index_;
371 }
372 
373 }  // namespace art
374