blob: 57c60ba9f68339182042160aac073d77a65834c3 [file] [log] [blame]
Carl Shapiro69759ea2011-07-21 18:13:35 -07001// Copyright (C) 2008 The Android Open Source Project
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
Elliott Hughes5e71b522011-10-20 13:12:32 -070015#include "heap_bitmap.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
17#include <sys/mman.h>
18
Elliott Hughes90a33692011-08-30 13:27:07 -070019#include "UniquePtr.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070020#include "logging.h"
Brian Carlstrom27ec9612011-09-19 20:20:38 -070021#include "utils.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070022
23namespace art {
24
Ian Rogers30fab402012-01-23 15:43:46 -080025HeapBitmap* HeapBitmap::Create(const char* name, byte* heap_begin, size_t heap_capacity) {
26 CHECK(heap_begin != NULL);
27 size_t bitmap_size = HB_OFFSET_TO_INDEX(heap_capacity) * kWordSize;
28 UniquePtr<MemMap> mem_map(MemMap::MapAnonymous(name, NULL, bitmap_size, PROT_READ | PROT_WRITE));
29 if (mem_map.get() == NULL) {
30 LOG(ERROR) << "Failed to allocate bitmap " << name;
Carl Shapiro69759ea2011-07-21 18:13:35 -070031 return NULL;
Carl Shapiro69759ea2011-07-21 18:13:35 -070032 }
Ian Rogers30fab402012-01-23 15:43:46 -080033 word* bitmap_begin = reinterpret_cast<word*>(mem_map->Begin());
34 return new HeapBitmap(name, mem_map.release(), bitmap_begin, bitmap_size, heap_begin);
Carl Shapiro69759ea2011-07-21 18:13:35 -070035}
36
37// Clean up any resources associated with the bitmap.
Brian Carlstromdb4d5402011-08-09 12:18:28 -070038HeapBitmap::~HeapBitmap() {}
Carl Shapiro69759ea2011-07-21 18:13:35 -070039
40// Fill the bitmap with zeroes. Returns the bitmap's memory to the
41// system as a side-effect.
42void HeapBitmap::Clear() {
Ian Rogers30fab402012-01-23 15:43:46 -080043 if (bitmap_begin_ != NULL) {
Carl Shapiro69759ea2011-07-21 18:13:35 -070044 // This returns the memory to the system. Successive page faults
45 // will return zeroed memory.
Ian Rogers30fab402012-01-23 15:43:46 -080046 int result = madvise(bitmap_begin_, bitmap_size_, MADV_DONTNEED);
Carl Shapiro69759ea2011-07-21 18:13:35 -070047 if (result == -1) {
48 PLOG(WARNING) << "madvise failed";
49 }
Ian Rogers30fab402012-01-23 15:43:46 -080050 heap_end_ = heap_begin_ - 1;
Carl Shapiro69759ea2011-07-21 18:13:35 -070051 }
52}
53
Ian Rogers30fab402012-01-23 15:43:46 -080054// Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
55// even if a bit has not been set for it.
Carl Shapiro69759ea2011-07-21 18:13:35 -070056bool HeapBitmap::HasAddress(const void* obj) const {
57 if (obj != NULL) {
Ian Rogers30fab402012-01-23 15:43:46 -080058 const uintptr_t offset = (uintptr_t)obj - heap_begin_;
Carl Shapiro69759ea2011-07-21 18:13:35 -070059 const size_t index = HB_OFFSET_TO_INDEX(offset);
Ian Rogers30fab402012-01-23 15:43:46 -080060 return index < bitmap_size_ / kWordSize;
Carl Shapiro69759ea2011-07-21 18:13:35 -070061 }
62 return false;
63}
64
Ian Rogers30fab402012-01-23 15:43:46 -080065void HeapBitmap::VisitRange(uintptr_t visit_begin, uintptr_t visit_end, Callback* visitor, void* arg) const {
66 size_t start = HB_OFFSET_TO_INDEX(visit_begin - heap_begin_);
67 size_t end = HB_OFFSET_TO_INDEX(visit_end - heap_begin_ - 1);
Ian Rogers5d76c432011-10-31 21:42:49 -070068 for (size_t i = start; i <= end; i++) {
Ian Rogers30fab402012-01-23 15:43:46 -080069 word w = bitmap_begin_[i];
Ian Rogers5d76c432011-10-31 21:42:49 -070070 if (w != 0) {
71 word high_bit = 1 << (kBitsPerWord - 1);
Ian Rogers30fab402012-01-23 15:43:46 -080072 uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
Ian Rogers5d76c432011-10-31 21:42:49 -070073 while (w != 0) {
74 const int shift = CLZ(w);
75 Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
76 (*visitor)(obj, arg);
77 w &= ~(high_bit >> shift);
78 }
79 }
80 }
81}
82
Carl Shapiro69759ea2011-07-21 18:13:35 -070083// Visits set bits in address order. The callback is not permitted to
84// change the bitmap bits or max during the traversal.
85void HeapBitmap::Walk(HeapBitmap::Callback* callback, void* arg) {
Ian Rogers30fab402012-01-23 15:43:46 -080086 CHECK(bitmap_begin_ != NULL);
Carl Shapiro69759ea2011-07-21 18:13:35 -070087 CHECK(callback != NULL);
Ian Rogers30fab402012-01-23 15:43:46 -080088 uintptr_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070089 for (uintptr_t i = 0; i <= end; ++i) {
Ian Rogers30fab402012-01-23 15:43:46 -080090 word w = bitmap_begin_[i];
Elliott Hughesb0663112011-10-19 18:16:37 -070091 if (UNLIKELY(w != 0)) {
92 word high_bit = 1 << (kBitsPerWord - 1);
Ian Rogers30fab402012-01-23 15:43:46 -080093 uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
Elliott Hughesb0663112011-10-19 18:16:37 -070094 while (w != 0) {
95 const int shift = CLZ(w);
96 Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
Carl Shapiro69759ea2011-07-21 18:13:35 -070097 (*callback)(obj, arg);
Elliott Hughesb0663112011-10-19 18:16:37 -070098 w &= ~(high_bit >> shift);
Carl Shapiro69759ea2011-07-21 18:13:35 -070099 }
100 }
101 }
102}
103
Ian Rogers30fab402012-01-23 15:43:46 -0800104// Similar to Walk but the callback routine is permitted to change the bitmap bits and end during
105// traversal. Used by the the root marking scan exclusively.
Carl Shapiro69759ea2011-07-21 18:13:35 -0700106//
Ian Rogers30fab402012-01-23 15:43:46 -0800107// The callback is invoked with a finger argument. The finger is a pointer to an address not yet
108// visited by the traversal. If the callback sets a bit for an address at or above the finger, this
109// address will be visited by the traversal. If the callback sets a bit for an address below the
110// finger, this address will not be visited (typiscally such an address would be placed on the
111// marking stack).
112void HeapBitmap::ScanWalk(uintptr_t scan_begin, uintptr_t scan_end, ScanCallback* callback, void* arg) {
113 CHECK(bitmap_begin_ != NULL);
Ian Rogers5d76c432011-10-31 21:42:49 -0700114 CHECK(callback != NULL);
Ian Rogers30fab402012-01-23 15:43:46 -0800115 CHECK_LE(scan_begin, scan_end);
116 CHECK_GE(scan_begin, heap_begin_);
117 size_t start = HB_OFFSET_TO_INDEX(scan_begin - heap_begin_);
118 if (scan_end < heap_end_) {
Ian Rogers5d76c432011-10-31 21:42:49 -0700119 // The end of the space we're looking at is before the current maximum bitmap PC, scan to that
120 // and don't recompute end on each iteration
Ian Rogers30fab402012-01-23 15:43:46 -0800121 size_t end = HB_OFFSET_TO_INDEX(scan_end - heap_begin_ - 1);
Ian Rogers5d76c432011-10-31 21:42:49 -0700122 for (size_t i = start; i <= end; i++) {
Ian Rogers30fab402012-01-23 15:43:46 -0800123 word w = bitmap_begin_[i];
Ian Rogers5d76c432011-10-31 21:42:49 -0700124 if (UNLIKELY(w != 0)) {
125 word high_bit = 1 << (kBitsPerWord - 1);
Ian Rogers30fab402012-01-23 15:43:46 -0800126 uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
127 void* finger = reinterpret_cast<void*>(HB_INDEX_TO_OFFSET(i + 1) + heap_begin_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700128 while (w != 0) {
129 const int shift = CLZ(w);
130 Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
131 (*callback)(obj, finger, arg);
132 w &= ~(high_bit >> shift);
133 }
134 }
135 }
136 } else {
Ian Rogers30fab402012-01-23 15:43:46 -0800137 size_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700138 for (size_t i = start; i <= end; i++) {
Ian Rogers30fab402012-01-23 15:43:46 -0800139 word w = bitmap_begin_[i];
Ian Rogers5d76c432011-10-31 21:42:49 -0700140 if (UNLIKELY(w != 0)) {
141 word high_bit = 1 << (kBitsPerWord - 1);
Ian Rogers30fab402012-01-23 15:43:46 -0800142 uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
143 void* finger = reinterpret_cast<void*>(HB_INDEX_TO_OFFSET(i + 1) + heap_begin_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700144 while (w != 0) {
145 const int shift = CLZ(w);
146 Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
147 (*callback)(obj, finger, arg);
148 w &= ~(high_bit >> shift);
149 }
Carl Shapiro69759ea2011-07-21 18:13:35 -0700150 }
Ian Rogers30fab402012-01-23 15:43:46 -0800151 // update 'end' in case callback modified bitmap
152 end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700153 }
154 }
155}
156
157// Walk through the bitmaps in increasing address order, and find the
158// object pointers that correspond to garbage objects. Call
159// <callback> zero or more times with lists of these object pointers.
160//
161// The callback is not permitted to increase the max of either bitmap.
162void HeapBitmap::SweepWalk(const HeapBitmap& live_bitmap,
163 const HeapBitmap& mark_bitmap,
Ian Rogers30fab402012-01-23 15:43:46 -0800164 uintptr_t sweep_begin, uintptr_t sweep_end,
Carl Shapiro69759ea2011-07-21 18:13:35 -0700165 HeapBitmap::SweepCallback* callback, void* arg) {
Ian Rogers30fab402012-01-23 15:43:46 -0800166 CHECK(live_bitmap.bitmap_begin_ != NULL);
167 CHECK(mark_bitmap.bitmap_begin_ != NULL);
168 CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_);
169 CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700170 CHECK(callback != NULL);
Ian Rogers30fab402012-01-23 15:43:46 -0800171 CHECK_LE(sweep_begin, sweep_end);
172 CHECK_GE(sweep_begin, live_bitmap.heap_begin_);
173 sweep_end = std::min(sweep_end - 1, live_bitmap.heap_end_);
174 if (live_bitmap.heap_end_ < live_bitmap.heap_begin_) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700175 // Easy case; both are obviously empty.
176 // TODO: this should never happen
177 return;
178 }
Ian Rogers30fab402012-01-23 15:43:46 -0800179 // TODO: rewrite the callbacks to accept a std::vector<Object*> rather than a Object**?
180 std::vector<Object*> pointer_buf(4 * kBitsPerWord);
181 Object** pb = &pointer_buf[0];
182 size_t start = HB_OFFSET_TO_INDEX(sweep_begin - live_bitmap.heap_begin_);
183 size_t end = HB_OFFSET_TO_INDEX(sweep_end - live_bitmap.heap_begin_);
184 word* live = live_bitmap.bitmap_begin_;
185 word* mark = mark_bitmap.bitmap_begin_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700186 for (size_t i = start; i <= end; i++) {
Elliott Hughesb0663112011-10-19 18:16:37 -0700187 word garbage = live[i] & ~mark[i];
188 if (UNLIKELY(garbage != 0)) {
189 word high_bit = 1 << (kBitsPerWord - 1);
Ian Rogers30fab402012-01-23 15:43:46 -0800190 uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + live_bitmap.heap_begin_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700191 while (garbage != 0) {
192 int shift = CLZ(garbage);
193 garbage &= ~(high_bit >> shift);
Ian Rogers30fab402012-01-23 15:43:46 -0800194 *pb++ = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700195 }
196 // Make sure that there are always enough slots available for an
197 // entire word of one bits.
Elliott Hughes3b6baaa2011-10-14 19:13:56 -0700198 if (pb >= &pointer_buf[pointer_buf.size() - kBitsPerWord]) {
199 (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
200 pb = &pointer_buf[0];
Carl Shapiro69759ea2011-07-21 18:13:35 -0700201 }
202 }
203 }
Elliott Hughes3b6baaa2011-10-14 19:13:56 -0700204 if (pb > &pointer_buf[0]) {
205 (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700206 }
207}
208
209} // namespace art