summaryrefslogtreecommitdiffstats
path: root/runtime/gc/collector
diff options
context:
space:
mode:
authorIan Rogers <irogers@google.com>2014-10-08 12:43:28 -0700
committerIan Rogers <irogers@google.com>2014-10-09 16:05:58 -0700
commit13735955f39b3b304c37d2b2840663c131262c18 (patch)
tree0a731ac42b8230f9929172fa3e3d8051874e2b18 /runtime/gc/collector
parent25b18bbdaa36ff936eb44f228f0518d4223e9d52 (diff)
downloadart-13735955f39b3b304c37d2b2840663c131262c18.zip
art-13735955f39b3b304c37d2b2840663c131262c18.tar.gz
art-13735955f39b3b304c37d2b2840663c131262c18.tar.bz2
stdint types all the way!
Change-Id: I4e4ef3a2002fc59ebd9097087f150eaf3f2a7e08
Diffstat (limited to 'runtime/gc/collector')
-rw-r--r--runtime/gc/collector/mark_compact.cc2
-rw-r--r--runtime/gc/collector/mark_compact.h2
-rw-r--r--runtime/gc/collector/mark_sweep.cc16
-rw-r--r--runtime/gc/collector/mark_sweep.h4
-rw-r--r--runtime/gc/collector/semi_space.cc8
-rw-r--r--runtime/gc/collector/semi_space.h2
6 files changed, 17 insertions, 17 deletions
diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc
index b3bed64..6691b0f 100644
--- a/runtime/gc/collector/mark_compact.cc
+++ b/runtime/gc/collector/mark_compact.cc
@@ -120,7 +120,7 @@ class CalculateObjectForwardingAddressVisitor {
void MarkCompact::CalculateObjectForwardingAddresses() {
TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
// The bump pointer in the space where the next forwarding address will be.
- bump_pointer_ = reinterpret_cast<byte*>(space_->Begin());
+ bump_pointer_ = reinterpret_cast<uint8_t*>(space_->Begin());
// Visit all the marked objects in the bitmap.
CalculateObjectForwardingAddressVisitor visitor(this);
objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
diff --git a/runtime/gc/collector/mark_compact.h b/runtime/gc/collector/mark_compact.h
index bb85fa0..f40e870 100644
--- a/runtime/gc/collector/mark_compact.h
+++ b/runtime/gc/collector/mark_compact.h
@@ -227,7 +227,7 @@ class MarkCompact : public GarbageCollector {
std::string collector_name_;
// The bump pointer in the space where the next forwarding address will be.
- byte* bump_pointer_;
+ uint8_t* bump_pointer_;
// How many live objects we have in the space.
size_t live_objects_in_space_;
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index 930499a..942b556 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -689,7 +689,7 @@ class CardScanTask : public MarkStackTask<false> {
public:
CardScanTask(ThreadPool* thread_pool, MarkSweep* mark_sweep,
accounting::ContinuousSpaceBitmap* bitmap,
- byte* begin, byte* end, byte minimum_age, size_t mark_stack_size,
+ uint8_t* begin, uint8_t* end, uint8_t minimum_age, size_t mark_stack_size,
Object** mark_stack_obj)
: MarkStackTask<false>(thread_pool, mark_sweep, mark_stack_size, mark_stack_obj),
bitmap_(bitmap),
@@ -700,9 +700,9 @@ class CardScanTask : public MarkStackTask<false> {
protected:
accounting::ContinuousSpaceBitmap* const bitmap_;
- byte* const begin_;
- byte* const end_;
- const byte minimum_age_;
+ uint8_t* const begin_;
+ uint8_t* const end_;
+ const uint8_t minimum_age_;
virtual void Finalize() {
delete this;
@@ -730,7 +730,7 @@ size_t MarkSweep::GetThreadCount(bool paused) const {
}
}
-void MarkSweep::ScanGrayObjects(bool paused, byte minimum_age) {
+void MarkSweep::ScanGrayObjects(bool paused, uint8_t minimum_age) {
accounting::CardTable* card_table = GetHeap()->GetCardTable();
ThreadPool* thread_pool = GetHeap()->GetThreadPool();
size_t thread_count = GetThreadCount(paused);
@@ -754,8 +754,8 @@ void MarkSweep::ScanGrayObjects(bool paused, byte minimum_age) {
if (space->GetMarkBitmap() == nullptr) {
continue;
}
- byte* card_begin = space->Begin();
- byte* card_end = space->End();
+ uint8_t* card_begin = space->Begin();
+ uint8_t* card_end = space->End();
// Align up the end address. For example, the image space's end
// may not be card-size-aligned.
card_end = AlignUp(card_end, accounting::CardTable::kCardSize);
@@ -910,7 +910,7 @@ mirror::Object* MarkSweep::IsMarkedCallback(mirror::Object* object, void* arg) {
return nullptr;
}
-void MarkSweep::RecursiveMarkDirtyObjects(bool paused, byte minimum_age) {
+void MarkSweep::RecursiveMarkDirtyObjects(bool paused, uint8_t minimum_age) {
ScanGrayObjects(paused, minimum_age);
ProcessMarkStack(paused);
}
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index 2780099..9ac110d 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -112,7 +112,7 @@ class MarkSweep : public GarbageCollector {
virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Builds a mark stack with objects on dirty cards and recursively mark until it empties.
- void RecursiveMarkDirtyObjects(bool paused, byte minimum_age)
+ void RecursiveMarkDirtyObjects(bool paused, uint8_t minimum_age)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -257,7 +257,7 @@ class MarkSweep : public GarbageCollector {
void PushOnMarkStack(mirror::Object* obj);
// Blackens objects grayed during a garbage collection.
- void ScanGrayObjects(bool paused, byte minimum_age)
+ void ScanGrayObjects(bool paused, uint8_t minimum_age)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index c8fa869..9459a3b 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -437,15 +437,15 @@ static inline size_t CopyAvoidingDirtyingPages(void* dest, const void* src, size
return 0;
}
size_t saved_bytes = 0;
- byte* byte_dest = reinterpret_cast<byte*>(dest);
+ uint8_t* byte_dest = reinterpret_cast<uint8_t*>(dest);
if (kIsDebugBuild) {
for (size_t i = 0; i < size; ++i) {
CHECK_EQ(byte_dest[i], 0U);
}
}
// Process the start of the page. The page must already be dirty, don't bother with checking.
- const byte* byte_src = reinterpret_cast<const byte*>(src);
- const byte* limit = byte_src + size;
+ const uint8_t* byte_src = reinterpret_cast<const uint8_t*>(src);
+ const uint8_t* limit = byte_src + size;
size_t page_remain = AlignUp(byte_dest, kPageSize) - byte_dest;
// Copy the bytes until the start of the next page.
memcpy(dest, src, page_remain);
@@ -481,7 +481,7 @@ mirror::Object* SemiSpace::MarkNonForwardedObject(mirror::Object* obj) {
const size_t object_size = obj->SizeOf();
size_t bytes_allocated;
mirror::Object* forward_address = nullptr;
- if (generational_ && reinterpret_cast<byte*>(obj) < last_gc_to_space_end_) {
+ if (generational_ && reinterpret_cast<uint8_t*>(obj) < last_gc_to_space_end_) {
// If it's allocated before the last GC (older), move
// (pseudo-promote) it to the main free list space (as sort
// of an old generation.)
diff --git a/runtime/gc/collector/semi_space.h b/runtime/gc/collector/semi_space.h
index 71a83f2..1c4f1e4 100644
--- a/runtime/gc/collector/semi_space.h
+++ b/runtime/gc/collector/semi_space.h
@@ -228,7 +228,7 @@ class SemiSpace : public GarbageCollector {
// Used for the generational mode. the end/top of the bump
// pointer space at the end of the last collection.
- byte* last_gc_to_space_end_;
+ uint8_t* last_gc_to_space_end_;
// Used for the generational mode. During a collection, keeps track
// of how many bytes of objects have been copied so far from the