Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 1 | // Copyright (C) 2008 The Android Open Source Project |
| 2 | // |
| 3 | // Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | // you may not use this file except in compliance with the License. |
| 5 | // You may obtain a copy of the License at |
| 6 | // |
| 7 | // http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | // |
| 9 | // Unless required by applicable law or agreed to in writing, software |
| 10 | // distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | // See the License for the specific language governing permissions and |
| 13 | // limitations under the License. |
| 14 | |
Elliott Hughes | 5e71b52 | 2011-10-20 13:12:32 -0700 | [diff] [blame] | 15 | #include "heap_bitmap.h" |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 16 | |
| 17 | #include <sys/mman.h> |
| 18 | |
Elliott Hughes | 90a3369 | 2011-08-30 13:27:07 -0700 | [diff] [blame] | 19 | #include "UniquePtr.h" |
Brian Carlstrom | 578bbdc | 2011-07-21 14:07:47 -0700 | [diff] [blame] | 20 | #include "logging.h" |
Brian Carlstrom | 27ec961 | 2011-09-19 20:20:38 -0700 | [diff] [blame] | 21 | #include "utils.h" |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 22 | |
| 23 | namespace art { |
| 24 | |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 25 | HeapBitmap* HeapBitmap::Create(const char* name, byte* heap_begin, size_t heap_capacity) { |
| 26 | CHECK(heap_begin != NULL); |
| 27 | size_t bitmap_size = HB_OFFSET_TO_INDEX(heap_capacity) * kWordSize; |
| 28 | UniquePtr<MemMap> mem_map(MemMap::MapAnonymous(name, NULL, bitmap_size, PROT_READ | PROT_WRITE)); |
| 29 | if (mem_map.get() == NULL) { |
| 30 | LOG(ERROR) << "Failed to allocate bitmap " << name; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 31 | return NULL; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 32 | } |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 33 | word* bitmap_begin = reinterpret_cast<word*>(mem_map->Begin()); |
| 34 | return new HeapBitmap(name, mem_map.release(), bitmap_begin, bitmap_size, heap_begin); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 35 | } |
| 36 | |
| 37 | // Clean up any resources associated with the bitmap. |
Brian Carlstrom | db4d540 | 2011-08-09 12:18:28 -0700 | [diff] [blame] | 38 | HeapBitmap::~HeapBitmap() {} |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 39 | |
| 40 | // Fill the bitmap with zeroes. Returns the bitmap's memory to the |
| 41 | // system as a side-effect. |
| 42 | void HeapBitmap::Clear() { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 43 | if (bitmap_begin_ != NULL) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 44 | // This returns the memory to the system. Successive page faults |
| 45 | // will return zeroed memory. |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 46 | int result = madvise(bitmap_begin_, bitmap_size_, MADV_DONTNEED); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 47 | if (result == -1) { |
| 48 | PLOG(WARNING) << "madvise failed"; |
| 49 | } |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 50 | heap_end_ = heap_begin_ - 1; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 51 | } |
| 52 | } |
| 53 | |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 54 | // Return true iff <obj> is within the range of pointers that this bitmap could potentially cover, |
| 55 | // even if a bit has not been set for it. |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 56 | bool HeapBitmap::HasAddress(const void* obj) const { |
| 57 | if (obj != NULL) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 58 | const uintptr_t offset = (uintptr_t)obj - heap_begin_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 59 | const size_t index = HB_OFFSET_TO_INDEX(offset); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 60 | return index < bitmap_size_ / kWordSize; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 61 | } |
| 62 | return false; |
| 63 | } |
| 64 | |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 65 | void HeapBitmap::VisitRange(uintptr_t visit_begin, uintptr_t visit_end, Callback* visitor, void* arg) const { |
| 66 | size_t start = HB_OFFSET_TO_INDEX(visit_begin - heap_begin_); |
| 67 | size_t end = HB_OFFSET_TO_INDEX(visit_end - heap_begin_ - 1); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 68 | for (size_t i = start; i <= end; i++) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 69 | word w = bitmap_begin_[i]; |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 70 | if (w != 0) { |
| 71 | word high_bit = 1 << (kBitsPerWord - 1); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 72 | uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_; |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 73 | while (w != 0) { |
| 74 | const int shift = CLZ(w); |
| 75 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
| 76 | (*visitor)(obj, arg); |
| 77 | w &= ~(high_bit >> shift); |
| 78 | } |
| 79 | } |
| 80 | } |
| 81 | } |
| 82 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 83 | // Visits set bits in address order. The callback is not permitted to |
| 84 | // change the bitmap bits or max during the traversal. |
| 85 | void HeapBitmap::Walk(HeapBitmap::Callback* callback, void* arg) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 86 | CHECK(bitmap_begin_ != NULL); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 87 | CHECK(callback != NULL); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 88 | uintptr_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 89 | for (uintptr_t i = 0; i <= end; ++i) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 90 | word w = bitmap_begin_[i]; |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 91 | if (UNLIKELY(w != 0)) { |
| 92 | word high_bit = 1 << (kBitsPerWord - 1); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 93 | uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_; |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 94 | while (w != 0) { |
| 95 | const int shift = CLZ(w); |
| 96 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 97 | (*callback)(obj, arg); |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 98 | w &= ~(high_bit >> shift); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 99 | } |
| 100 | } |
| 101 | } |
| 102 | } |
| 103 | |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 104 | // Similar to Walk but the callback routine is permitted to change the bitmap bits and end during |
| 105 | // traversal. Used by the the root marking scan exclusively. |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 106 | // |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 107 | // The callback is invoked with a finger argument. The finger is a pointer to an address not yet |
| 108 | // visited by the traversal. If the callback sets a bit for an address at or above the finger, this |
| 109 | // address will be visited by the traversal. If the callback sets a bit for an address below the |
| 110 | // finger, this address will not be visited (typiscally such an address would be placed on the |
| 111 | // marking stack). |
| 112 | void HeapBitmap::ScanWalk(uintptr_t scan_begin, uintptr_t scan_end, ScanCallback* callback, void* arg) { |
| 113 | CHECK(bitmap_begin_ != NULL); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 114 | CHECK(callback != NULL); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 115 | CHECK_LE(scan_begin, scan_end); |
| 116 | CHECK_GE(scan_begin, heap_begin_); |
| 117 | size_t start = HB_OFFSET_TO_INDEX(scan_begin - heap_begin_); |
| 118 | if (scan_end < heap_end_) { |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 119 | // The end of the space we're looking at is before the current maximum bitmap PC, scan to that |
| 120 | // and don't recompute end on each iteration |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 121 | size_t end = HB_OFFSET_TO_INDEX(scan_end - heap_begin_ - 1); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 122 | for (size_t i = start; i <= end; i++) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 123 | word w = bitmap_begin_[i]; |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 124 | if (UNLIKELY(w != 0)) { |
| 125 | word high_bit = 1 << (kBitsPerWord - 1); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 126 | uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_; |
| 127 | void* finger = reinterpret_cast<void*>(HB_INDEX_TO_OFFSET(i + 1) + heap_begin_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 128 | while (w != 0) { |
| 129 | const int shift = CLZ(w); |
| 130 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
| 131 | (*callback)(obj, finger, arg); |
| 132 | w &= ~(high_bit >> shift); |
| 133 | } |
| 134 | } |
| 135 | } |
| 136 | } else { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 137 | size_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 138 | for (size_t i = start; i <= end; i++) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 139 | word w = bitmap_begin_[i]; |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 140 | if (UNLIKELY(w != 0)) { |
| 141 | word high_bit = 1 << (kBitsPerWord - 1); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 142 | uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_; |
| 143 | void* finger = reinterpret_cast<void*>(HB_INDEX_TO_OFFSET(i + 1) + heap_begin_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 144 | while (w != 0) { |
| 145 | const int shift = CLZ(w); |
| 146 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
| 147 | (*callback)(obj, finger, arg); |
| 148 | w &= ~(high_bit >> shift); |
| 149 | } |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 150 | } |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 151 | // update 'end' in case callback modified bitmap |
| 152 | end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 153 | } |
| 154 | } |
| 155 | } |
| 156 | |
| 157 | // Walk through the bitmaps in increasing address order, and find the |
| 158 | // object pointers that correspond to garbage objects. Call |
| 159 | // <callback> zero or more times with lists of these object pointers. |
| 160 | // |
| 161 | // The callback is not permitted to increase the max of either bitmap. |
| 162 | void HeapBitmap::SweepWalk(const HeapBitmap& live_bitmap, |
| 163 | const HeapBitmap& mark_bitmap, |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 164 | uintptr_t sweep_begin, uintptr_t sweep_end, |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 165 | HeapBitmap::SweepCallback* callback, void* arg) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 166 | CHECK(live_bitmap.bitmap_begin_ != NULL); |
| 167 | CHECK(mark_bitmap.bitmap_begin_ != NULL); |
| 168 | CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_); |
| 169 | CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 170 | CHECK(callback != NULL); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 171 | CHECK_LE(sweep_begin, sweep_end); |
| 172 | CHECK_GE(sweep_begin, live_bitmap.heap_begin_); |
| 173 | sweep_end = std::min(sweep_end - 1, live_bitmap.heap_end_); |
| 174 | if (live_bitmap.heap_end_ < live_bitmap.heap_begin_) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 175 | // Easy case; both are obviously empty. |
| 176 | // TODO: this should never happen |
| 177 | return; |
| 178 | } |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 179 | // TODO: rewrite the callbacks to accept a std::vector<Object*> rather than a Object**? |
| 180 | std::vector<Object*> pointer_buf(4 * kBitsPerWord); |
| 181 | Object** pb = &pointer_buf[0]; |
| 182 | size_t start = HB_OFFSET_TO_INDEX(sweep_begin - live_bitmap.heap_begin_); |
| 183 | size_t end = HB_OFFSET_TO_INDEX(sweep_end - live_bitmap.heap_begin_); |
| 184 | word* live = live_bitmap.bitmap_begin_; |
| 185 | word* mark = mark_bitmap.bitmap_begin_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 186 | for (size_t i = start; i <= end; i++) { |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 187 | word garbage = live[i] & ~mark[i]; |
| 188 | if (UNLIKELY(garbage != 0)) { |
| 189 | word high_bit = 1 << (kBitsPerWord - 1); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 190 | uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + live_bitmap.heap_begin_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 191 | while (garbage != 0) { |
| 192 | int shift = CLZ(garbage); |
| 193 | garbage &= ~(high_bit >> shift); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 194 | *pb++ = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 195 | } |
| 196 | // Make sure that there are always enough slots available for an |
| 197 | // entire word of one bits. |
Elliott Hughes | 3b6baaa | 2011-10-14 19:13:56 -0700 | [diff] [blame] | 198 | if (pb >= &pointer_buf[pointer_buf.size() - kBitsPerWord]) { |
| 199 | (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg); |
| 200 | pb = &pointer_buf[0]; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 201 | } |
| 202 | } |
| 203 | } |
Elliott Hughes | 3b6baaa | 2011-10-14 19:13:56 -0700 | [diff] [blame] | 204 | if (pb > &pointer_buf[0]) { |
| 205 | (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 206 | } |
| 207 | } |
| 208 | |
| 209 | } // namespace art |