diff options
author | Vladimir Marko <vmarko@google.com> | 2014-03-07 10:50:35 +0000 |
---|---|---|
committer | Gerrit Code Review <noreply-gerritcodereview@google.com> | 2014-03-07 10:50:35 +0000 |
commit | a9d7be62735e3356cef7e8ed797c519134a17061 (patch) | |
tree | 631a2b7985bfdebc99e0b244273700e22036243c /compiler/utils | |
parent | 234897797cf15beb3223dd2a99047add25a42f0a (diff) | |
parent | 83cc7ae96d4176533dd0391a1591d321b0a87f4f (diff) | |
download | art-a9d7be62735e3356cef7e8ed797c519134a17061.zip art-a9d7be62735e3356cef7e8ed797c519134a17061.tar.gz art-a9d7be62735e3356cef7e8ed797c519134a17061.tar.bz2 |
Merge "Create a scoped arena allocator and use that for LVN."
Diffstat (limited to 'compiler/utils')
-rw-r--r-- | compiler/utils/allocation.h | 2 | ||||
-rw-r--r-- | compiler/utils/arena_allocator.cc | 143 | ||||
-rw-r--r-- | compiler/utils/arena_allocator.h | 120 | ||||
-rw-r--r-- | compiler/utils/arena_bit_vector.cc | 4 | ||||
-rw-r--r-- | compiler/utils/arena_bit_vector.h | 2 | ||||
-rw-r--r-- | compiler/utils/debug_stack.h | 138 | ||||
-rw-r--r-- | compiler/utils/growable_array.h | 6 | ||||
-rw-r--r-- | compiler/utils/scoped_arena_allocator.cc | 126 | ||||
-rw-r--r-- | compiler/utils/scoped_arena_allocator.h | 244 |
9 files changed, 695 insertions, 90 deletions
diff --git a/compiler/utils/allocation.h b/compiler/utils/allocation.h index 07cd397..b0947ca 100644 --- a/compiler/utils/allocation.h +++ b/compiler/utils/allocation.h @@ -26,7 +26,7 @@ class ArenaObject { public: // Allocate a new ArenaObject of 'size' bytes in the Arena. void* operator new(size_t size, ArenaAllocator* allocator) { - return allocator->Alloc(size, ArenaAllocator::kAllocMisc); + return allocator->Alloc(size, kArenaAllocMisc); } void operator delete(void*, size_t) { diff --git a/compiler/utils/arena_allocator.cc b/compiler/utils/arena_allocator.cc index 00c3c57..365b094 100644 --- a/compiler/utils/arena_allocator.cc +++ b/compiler/utils/arena_allocator.cc @@ -14,6 +14,9 @@ * limitations under the License. */ +#include <algorithm> +#include <numeric> + #include "arena_allocator.h" #include "base/logging.h" #include "base/mutex.h" @@ -28,7 +31,7 @@ static constexpr bool kUseMemSet = true && kUseMemMap; static constexpr size_t kValgrindRedZoneBytes = 8; constexpr size_t Arena::kDefaultSize; -static const char* alloc_names[ArenaAllocator::kNumAllocKinds] = { +static const char* alloc_names[kNumArenaAllocKinds] = { "Misc ", "BasicBlock ", "LIR ", @@ -42,8 +45,69 @@ static const char* alloc_names[ArenaAllocator::kNumAllocKinds] = { "RegAlloc ", "Data ", "Preds ", + "STL ", }; +template <bool kCount> +ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl() + : num_allocations_(0u) { + std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u); +} + +template <bool kCount> +void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) { + num_allocations_ = other.num_allocations_; + std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_); +} + +template <bool kCount> +void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) { + alloc_stats_[kind] += bytes; + ++num_allocations_; +} + +template <bool kCount> +size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const { + return num_allocations_; +} + +template <bool kCount> +size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const { + const size_t init = 0u; // Initial value of the correct type. + return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init); +} + +template <bool kCount> +void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first, + ssize_t lost_bytes_adjustment) const { + size_t malloc_bytes = 0u; + size_t lost_bytes = 0u; + size_t num_arenas = 0u; + for (const Arena* arena = first; arena != nullptr; arena = arena->next_) { + malloc_bytes += arena->Size(); + lost_bytes += arena->RemainingSpace(); + ++num_arenas; + } + // The lost_bytes_adjustment is used to make up for the fact that the current arena + // may not have the bytes_allocated_ updated correctly. + lost_bytes += lost_bytes_adjustment; + const size_t bytes_allocated = BytesAllocated(); + os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes + << ", lost: " << lost_bytes << "\n"; + size_t num_allocations = ArenaAllocatorStats::NumAllocations(); + if (num_allocations != 0) { + os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: " + << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n"; + } + os << "===== Allocation by kind\n"; + for (int i = 0; i < kNumArenaAllocKinds; i++) { + os << alloc_names[i] << std::setw(10) << alloc_stats_[i] << "\n"; + } +} + +// Explicitly instantiate the used implementation. +template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>; + Arena::Arena(size_t size) : bytes_allocated_(0), map_(nullptr), @@ -110,24 +174,26 @@ Arena* ArenaPool::AllocArena(size_t size) { return ret; } -void ArenaPool::FreeArena(Arena* arena) { - Thread* self = Thread::Current(); +void ArenaPool::FreeArenaChain(Arena* first) { if (UNLIKELY(RUNNING_ON_VALGRIND > 0)) { - VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_); + for (Arena* arena = first; arena != nullptr; arena = arena->next_) { + VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_); + } } - { + if (first != nullptr) { + Arena* last = first; + while (last->next_ != nullptr) { + last = last->next_; + } + Thread* self = Thread::Current(); MutexLock lock(self, lock_); - arena->next_ = free_arenas_; - free_arenas_ = arena; + last->next_ = free_arenas_; + free_arenas_ = first; } } size_t ArenaAllocator::BytesAllocated() const { - size_t total = 0; - for (int i = 0; i < kNumAllocKinds; i++) { - total += alloc_stats_[i]; - } - return total; + return ArenaAllocatorStats::BytesAllocated(); } ArenaAllocator::ArenaAllocator(ArenaPool* pool) @@ -136,9 +202,7 @@ ArenaAllocator::ArenaAllocator(ArenaPool* pool) end_(nullptr), ptr_(nullptr), arena_head_(nullptr), - num_allocations_(0), running_on_valgrind_(RUNNING_ON_VALGRIND > 0) { - memset(&alloc_stats_[0], 0, sizeof(alloc_stats_)); } void ArenaAllocator::UpdateBytesAllocated() { @@ -158,10 +222,7 @@ void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) { return nullptr; } } - if (kCountAllocations) { - alloc_stats_[kind] += rounded_bytes; - ++num_allocations_; - } + ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind); uint8_t* ret = ptr_; ptr_ += rounded_bytes; // Check that the memory is already zeroed out. @@ -175,11 +236,7 @@ void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) { ArenaAllocator::~ArenaAllocator() { // Reclaim all the arenas by giving them back to the thread pool. UpdateBytesAllocated(); - while (arena_head_ != nullptr) { - Arena* arena = arena_head_; - arena_head_ = arena_head_->next_; - pool_->FreeArena(arena); - } + pool_->FreeArenaChain(arena_head_); } void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) { @@ -192,30 +249,24 @@ void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) { end_ = new_arena->End(); } +MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena, + ssize_t lost_bytes_adjustment) + : name_(name), + stats_(stats), + first_arena_(first_arena), + lost_bytes_adjustment_(lost_bytes_adjustment) { +} + +void MemStats::Dump(std::ostream& os) const { + os << name_ << " stats:\n"; + stats_->Dump(os, first_arena_, lost_bytes_adjustment_); +} + // Dump memory usage stats. -void ArenaAllocator::DumpMemStats(std::ostream& os) const { - size_t malloc_bytes = 0; - // Start out with how many lost bytes we have in the arena we are currently allocating into. - size_t lost_bytes(end_ - ptr_); - size_t num_arenas = 0; - for (Arena* arena = arena_head_; arena != nullptr; arena = arena->next_) { - malloc_bytes += arena->Size(); - if (arena != arena_head_) { - lost_bytes += arena->RemainingSpace(); - } - ++num_arenas; - } - const size_t bytes_allocated = BytesAllocated(); - os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes - << ", lost: " << lost_bytes << "\n"; - if (num_allocations_ != 0) { - os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: " - << num_allocations_ << ", avg size: " << bytes_allocated / num_allocations_ << "\n"; - } - os << "===== Allocation by kind\n"; - for (int i = 0; i < kNumAllocKinds; i++) { - os << alloc_names[i] << std::setw(10) << alloc_stats_[i] << "\n"; - } +MemStats ArenaAllocator::GetMemStats() const { + ssize_t lost_bytes_adjustment = + (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace(); + return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment); } } // namespace art diff --git a/compiler/utils/arena_allocator.h b/compiler/utils/arena_allocator.h index 56cedfe..a6b74f7 100644 --- a/compiler/utils/arena_allocator.h +++ b/compiler/utils/arena_allocator.h @@ -20,6 +20,7 @@ #include <stdint.h> #include <stddef.h> +#include "base/macros.h" #include "base/mutex.h" #include "mem_map.h" @@ -28,6 +29,70 @@ namespace art { class Arena; class ArenaPool; class ArenaAllocator; +class ArenaStack; +class ScopedArenaAllocator; +class MemStats; + +static constexpr bool kArenaAllocatorCountAllocations = false; + +// Type of allocation for memory tuning. +enum ArenaAllocKind { + kArenaAllocMisc, + kArenaAllocBB, + kArenaAllocLIR, + kArenaAllocMIR, + kArenaAllocDFInfo, + kArenaAllocGrowableArray, + kArenaAllocGrowableBitMap, + kArenaAllocDalvikToSSAMap, + kArenaAllocDebugInfo, + kArenaAllocSuccessor, + kArenaAllocRegAlloc, + kArenaAllocData, + kArenaAllocPredecessors, + kArenaAllocSTL, + kNumArenaAllocKinds +}; + +template <bool kCount> +class ArenaAllocatorStatsImpl; + +template <> +class ArenaAllocatorStatsImpl<false> { + public: + ArenaAllocatorStatsImpl() = default; + ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default; + ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete; + + void Copy(const ArenaAllocatorStatsImpl& other) { UNUSED(other); } + void RecordAlloc(size_t bytes, ArenaAllocKind kind) { UNUSED(bytes); UNUSED(kind); } + size_t NumAllocations() const { return 0u; } + size_t BytesAllocated() const { return 0u; } + void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const { + UNUSED(os); UNUSED(first); UNUSED(lost_bytes_adjustment); + } +}; + +template <bool kCount> +class ArenaAllocatorStatsImpl { + public: + ArenaAllocatorStatsImpl(); + ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default; + ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete; + + void Copy(const ArenaAllocatorStatsImpl& other); + void RecordAlloc(size_t bytes, ArenaAllocKind kind); + size_t NumAllocations() const; + size_t BytesAllocated() const; + void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const; + + private: + size_t num_allocations_; + // TODO: Use std::array<size_t, kNumArenaAllocKinds> from C++11 when we upgrade the STL. + size_t alloc_stats_[kNumArenaAllocKinds]; // Bytes used by various allocation kinds. +}; + +typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats; class Arena { public: @@ -59,6 +124,9 @@ class Arena { Arena* next_; friend class ArenaPool; friend class ArenaAllocator; + friend class ArenaStack; + friend class ScopedArenaAllocator; + template <bool kCount> friend class ArenaAllocatorStatsImpl; DISALLOW_COPY_AND_ASSIGN(Arena); }; @@ -67,7 +135,7 @@ class ArenaPool { ArenaPool(); ~ArenaPool(); Arena* AllocArena(size_t size); - void FreeArena(Arena* arena); + void FreeArenaChain(Arena* first); private: Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; @@ -75,28 +143,8 @@ class ArenaPool { DISALLOW_COPY_AND_ASSIGN(ArenaPool); }; -class ArenaAllocator { +class ArenaAllocator : private ArenaAllocatorStats { public: - // Type of allocation for memory tuning. - enum ArenaAllocKind { - kAllocMisc, - kAllocBB, - kAllocLIR, - kAllocMIR, - kAllocDFInfo, - kAllocGrowableArray, - kAllocGrowableBitMap, - kAllocDalvikToSSAMap, - kAllocDebugInfo, - kAllocSuccessor, - kAllocRegAlloc, - kAllocData, - kAllocPredecessors, - kNumAllocKinds - }; - - static constexpr bool kCountAllocations = false; - explicit ArenaAllocator(ArenaPool* pool); ~ArenaAllocator(); @@ -113,10 +161,7 @@ class ArenaAllocator { return nullptr; } } - if (kCountAllocations) { - alloc_stats_[kind] += bytes; - ++num_allocations_; - } + ArenaAllocatorStats::RecordAlloc(bytes, kind); uint8_t* ret = ptr_; ptr_ += bytes; return ret; @@ -125,7 +170,7 @@ class ArenaAllocator { void* AllocValgrind(size_t bytes, ArenaAllocKind kind); void ObtainNewArenaForAllocation(size_t allocation_size); size_t BytesAllocated() const; - void DumpMemStats(std::ostream& os) const; + MemStats GetMemStats() const; private: void UpdateBytesAllocated(); @@ -135,21 +180,22 @@ class ArenaAllocator { uint8_t* end_; uint8_t* ptr_; Arena* arena_head_; - size_t num_allocations_; - size_t alloc_stats_[kNumAllocKinds]; // Bytes used by various allocation kinds. bool running_on_valgrind_; DISALLOW_COPY_AND_ASSIGN(ArenaAllocator); }; // ArenaAllocator -struct MemStats { - public: - void Dump(std::ostream& os) const { - arena_.DumpMemStats(os); - } - explicit MemStats(const ArenaAllocator &arena) : arena_(arena) {} - private: - const ArenaAllocator &arena_; +class MemStats { + public: + MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena, + ssize_t lost_bytes_adjustment = 0); + void Dump(std::ostream& os) const; + + private: + const char* const name_; + const ArenaAllocatorStats* const stats_; + const Arena* const first_arena_; + const ssize_t lost_bytes_adjustment_; }; // MemStats } // namespace art diff --git a/compiler/utils/arena_bit_vector.cc b/compiler/utils/arena_bit_vector.cc index 220ff14..eff9778 100644 --- a/compiler/utils/arena_bit_vector.cc +++ b/compiler/utils/arena_bit_vector.cc @@ -25,13 +25,13 @@ class ArenaBitVectorAllocator : public Allocator { ~ArenaBitVectorAllocator() {} virtual void* Alloc(size_t size) { - return arena_->Alloc(size, ArenaAllocator::kAllocGrowableBitMap); + return arena_->Alloc(size, kArenaAllocGrowableBitMap); } virtual void Free(void*) {} // Nop. static void* operator new(size_t size, ArenaAllocator* arena) { - return arena->Alloc(sizeof(ArenaBitVectorAllocator), ArenaAllocator::kAllocGrowableBitMap); + return arena->Alloc(sizeof(ArenaBitVectorAllocator), kArenaAllocGrowableBitMap); } static void operator delete(void* p) {} // Nop. diff --git a/compiler/utils/arena_bit_vector.h b/compiler/utils/arena_bit_vector.h index 6c14617..1a3d6a3 100644 --- a/compiler/utils/arena_bit_vector.h +++ b/compiler/utils/arena_bit_vector.h @@ -55,7 +55,7 @@ class ArenaBitVector : public BitVector { ~ArenaBitVector() {} static void* operator new(size_t size, ArenaAllocator* arena) { - return arena->Alloc(sizeof(ArenaBitVector), ArenaAllocator::kAllocGrowableBitMap); + return arena->Alloc(sizeof(ArenaBitVector), kArenaAllocGrowableBitMap); } static void operator delete(void* p) {} // Nop. diff --git a/compiler/utils/debug_stack.h b/compiler/utils/debug_stack.h new file mode 100644 index 0000000..2e02b43 --- /dev/null +++ b/compiler/utils/debug_stack.h @@ -0,0 +1,138 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ART_COMPILER_UTILS_DEBUG_STACK_H_ +#define ART_COMPILER_UTILS_DEBUG_STACK_H_ + +#include "base/logging.h" +#include "base/macros.h" +#include "globals.h" + +namespace art { + +// Helper classes for reference counting to enforce construction/destruction order and +// usage of the top element of a stack in debug mode with no overhead in release mode. + +// Reference counter. No references allowed in destructor or in explicitly called CheckNoRefs(). +template <bool kIsDebug> +class DebugStackRefCounterImpl; +// Reference. Allows an explicit check that it's the top reference. +template <bool kIsDebug> +class DebugStackReferenceImpl; +// Indirect top reference. Checks that the reference is the top reference when used. +template <bool kIsDebug> +class DebugStackIndirectTopRefImpl; + +typedef DebugStackRefCounterImpl<kIsDebugBuild> DebugStackRefCounter; +typedef DebugStackReferenceImpl<kIsDebugBuild> DebugStackReference; +typedef DebugStackIndirectTopRefImpl<kIsDebugBuild> DebugStackIndirectTopRef; + +// Non-debug mode specializations. This should be optimized away. + +template <> +class DebugStackRefCounterImpl<false> { + public: + size_t IncrementRefCount() { return 0u; } + void DecrementRefCount() { } + size_t GetRefCount() const { return 0u; } + void CheckNoRefs() const { } +}; + +template <> +class DebugStackReferenceImpl<false> { + public: + explicit DebugStackReferenceImpl(DebugStackRefCounterImpl<false>* counter) { UNUSED(counter); } + DebugStackReferenceImpl(const DebugStackReferenceImpl& other) = default; + DebugStackReferenceImpl& operator=(const DebugStackReferenceImpl& other) = default; + void CheckTop() { } +}; + +template <> +class DebugStackIndirectTopRefImpl<false> { + public: + explicit DebugStackIndirectTopRefImpl(DebugStackReferenceImpl<false>* ref) { UNUSED(ref); } + DebugStackIndirectTopRefImpl(const DebugStackIndirectTopRefImpl& other) = default; + DebugStackIndirectTopRefImpl& operator=(const DebugStackIndirectTopRefImpl& other) = default; + void CheckTop() { } +}; + +// Debug mode versions. + +template <bool kIsDebug> +class DebugStackRefCounterImpl { + public: + DebugStackRefCounterImpl() : ref_count_(0u) { } + ~DebugStackRefCounterImpl() { CheckNoRefs(); } + size_t IncrementRefCount() { return ++ref_count_; } + void DecrementRefCount() { --ref_count_; } + size_t GetRefCount() const { return ref_count_; } + void CheckNoRefs() const { CHECK_EQ(ref_count_, 0u); } + + private: + size_t ref_count_; +}; + +template <bool kIsDebug> +class DebugStackReferenceImpl { + public: + explicit DebugStackReferenceImpl(DebugStackRefCounterImpl<kIsDebug>* counter) + : counter_(counter), ref_count_(counter->IncrementRefCount()) { + } + DebugStackReferenceImpl(const DebugStackReferenceImpl& other) + : counter_(other.counter_), ref_count_(counter_->IncrementRefCount()) { + } + DebugStackReferenceImpl& operator=(const DebugStackReferenceImpl& other) { + CHECK(counter_ == other.counter_); + return *this; + } + ~DebugStackReferenceImpl() { counter_->DecrementRefCount(); } + void CheckTop() { CHECK_EQ(counter_->GetRefCount(), ref_count_); } + + private: + DebugStackRefCounterImpl<true>* counter_; + size_t ref_count_; +}; + +template <bool kIsDebug> +class DebugStackIndirectTopRefImpl { + public: + explicit DebugStackIndirectTopRefImpl(DebugStackReferenceImpl<kIsDebug>* ref) + : ref_(ref) { + CheckTop(); + } + DebugStackIndirectTopRefImpl(const DebugStackIndirectTopRefImpl& other) + : ref_(other.ref_) { + CheckTop(); + } + DebugStackIndirectTopRefImpl& operator=(const DebugStackIndirectTopRefImpl& other) { + CHECK(ref_ == other->ref_); + CheckTop(); + return *this; + } + ~DebugStackIndirectTopRefImpl() { + CheckTop(); + } + void CheckTop() { + ref_->CheckTop(); + } + + private: + DebugStackReferenceImpl<kIsDebug>* ref_; +}; + +} // namespace art + +#endif // ART_COMPILER_UTILS_DEBUG_STACK_H_ diff --git a/compiler/utils/growable_array.h b/compiler/utils/growable_array.h index 82b6a60..a7d1f0e 100644 --- a/compiler/utils/growable_array.h +++ b/compiler/utils/growable_array.h @@ -75,7 +75,7 @@ class GrowableArray { num_used_(0), kind_(kind) { elem_list_ = static_cast<T*>(arena_->Alloc(sizeof(T) * init_length, - ArenaAllocator::kAllocGrowableArray)); + kArenaAllocGrowableArray)); }; @@ -89,7 +89,7 @@ class GrowableArray { target_length = new_length; } T* new_array = static_cast<T*>(arena_->Alloc(sizeof(T) * target_length, - ArenaAllocator::kAllocGrowableArray)); + kArenaAllocGrowableArray)); memcpy(new_array, elem_list_, sizeof(T) * num_allocated_); num_allocated_ = target_length; elem_list_ = new_array; @@ -181,7 +181,7 @@ class GrowableArray { T* GetRawStorage() const { return elem_list_; } static void* operator new(size_t size, ArenaAllocator* arena) { - return arena->Alloc(sizeof(GrowableArray<T>), ArenaAllocator::kAllocGrowableArray); + return arena->Alloc(sizeof(GrowableArray<T>), kArenaAllocGrowableArray); }; static void operator delete(void* p) {} // Nop. diff --git a/compiler/utils/scoped_arena_allocator.cc b/compiler/utils/scoped_arena_allocator.cc new file mode 100644 index 0000000..ee3b07e --- /dev/null +++ b/compiler/utils/scoped_arena_allocator.cc @@ -0,0 +1,126 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "scoped_arena_allocator.h" + +#include "utils/arena_allocator.h" +#include <memcheck/memcheck.h> + +namespace art { + +static constexpr size_t kValgrindRedZoneBytes = 8; + +ArenaStack::ArenaStack(ArenaPool* arena_pool) + : DebugStackRefCounter(), + stats_and_pool_(arena_pool), + bottom_arena_(nullptr), + top_arena_(nullptr), + top_ptr_(nullptr), + top_end_(nullptr), + running_on_valgrind_(RUNNING_ON_VALGRIND > 0) { +} + +ArenaStack::~ArenaStack() { + stats_and_pool_.pool->FreeArenaChain(bottom_arena_); +} + +MemStats ArenaStack::GetPeakStats() const { + DebugStackRefCounter::CheckNoRefs(); + return MemStats("ArenaStack peak", static_cast<const TaggedStats<Peak>*>(&stats_and_pool_), + bottom_arena_); +} + +uint8_t* ArenaStack::AllocateFromNextArena(size_t rounded_bytes) { + UpdateBytesAllocated(); + size_t allocation_size = std::max(Arena::kDefaultSize, rounded_bytes); + if (UNLIKELY(top_arena_ == nullptr)) { + top_arena_ = bottom_arena_ = stats_and_pool_.pool->AllocArena(allocation_size); + top_arena_->next_ = nullptr; + } else if (top_arena_->next_ != nullptr && top_arena_->next_->Size() >= allocation_size) { + top_arena_ = top_arena_->next_; + } else { + Arena* tail = top_arena_->next_; + top_arena_->next_ = stats_and_pool_.pool->AllocArena(allocation_size); + top_arena_ = top_arena_->next_; + top_arena_->next_ = tail; + } + top_end_ = top_arena_->End(); + // top_ptr_ shall be updated by ScopedArenaAllocator. + return top_arena_->Begin(); +} + +void ArenaStack::UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats) { + if (PeakStats()->BytesAllocated() < CurrentStats()->BytesAllocated()) { + PeakStats()->Copy(*CurrentStats()); + } + CurrentStats()->Copy(restore_stats); +} + +void ArenaStack::UpdateBytesAllocated() { + if (top_arena_ != nullptr) { + // Update how many bytes we have allocated into the arena so that the arena pool knows how + // much memory to zero out. Though ScopedArenaAllocator doesn't guarantee the memory is + // zero-initialized, the Arena may be reused by ArenaAllocator which does guarantee this. + size_t allocated = static_cast<size_t>(top_ptr_ - top_arena_->Begin()); + if (top_arena_->bytes_allocated_ < allocated) { + top_arena_->bytes_allocated_ = allocated; + } + } +} + +void* ArenaStack::AllocValgrind(size_t bytes, ArenaAllocKind kind) { + size_t rounded_bytes = (bytes + kValgrindRedZoneBytes + 3) & ~3; + uint8_t* ptr = top_ptr_; + if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) { + ptr = AllocateFromNextArena(rounded_bytes); + } + CurrentStats()->RecordAlloc(bytes, kind); + top_ptr_ = ptr + rounded_bytes; + VALGRIND_MAKE_MEM_NOACCESS(ptr + bytes, rounded_bytes - bytes); + return ptr; +} + +ScopedArenaAllocator::ScopedArenaAllocator(ArenaStack* arena_stack) + : DebugStackReference(arena_stack), + DebugStackRefCounter(), + ArenaAllocatorStats(*arena_stack->CurrentStats()), + arena_stack_(arena_stack), + mark_arena_(arena_stack->top_arena_), + mark_ptr_(arena_stack->top_ptr_), + mark_end_(arena_stack->top_end_) { +} + +ScopedArenaAllocator::~ScopedArenaAllocator() { + Reset(); +} + +void ScopedArenaAllocator::Reset() { + DebugStackReference::CheckTop(); + DebugStackRefCounter::CheckNoRefs(); + arena_stack_->UpdatePeakStatsAndRestore(*this); + arena_stack_->UpdateBytesAllocated(); + if (LIKELY(mark_arena_ != nullptr)) { + arena_stack_->top_arena_ = mark_arena_; + arena_stack_->top_ptr_ = mark_ptr_; + arena_stack_->top_end_ = mark_end_; + } else if (arena_stack_->bottom_arena_ != nullptr) { + mark_arena_ = arena_stack_->top_arena_ = arena_stack_->bottom_arena_; + mark_ptr_ = arena_stack_->top_ptr_ = mark_arena_->Begin(); + mark_end_ = arena_stack_->top_end_ = mark_arena_->End(); + } +} + +} // namespace art diff --git a/compiler/utils/scoped_arena_allocator.h b/compiler/utils/scoped_arena_allocator.h new file mode 100644 index 0000000..24a8afe --- /dev/null +++ b/compiler/utils/scoped_arena_allocator.h @@ -0,0 +1,244 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ART_COMPILER_UTILS_SCOPED_ARENA_ALLOCATOR_H_ +#define ART_COMPILER_UTILS_SCOPED_ARENA_ALLOCATOR_H_ + +#include "base/logging.h" +#include "base/macros.h" +#include "utils/arena_allocator.h" +#include "utils/debug_stack.h" +#include "globals.h" + +namespace art { + +class ArenaStack; +class ScopedArenaAllocator; + +template <typename T> +class ScopedArenaAllocatorAdapter; + +// Holds a list of Arenas for use by ScopedArenaAllocator stack. +class ArenaStack : private DebugStackRefCounter { + public: + explicit ArenaStack(ArenaPool* arena_pool); + ~ArenaStack(); + + size_t PeakBytesAllocated() { + return PeakStats()->BytesAllocated(); + } + + MemStats GetPeakStats() const; + + private: + struct Peak; + struct Current; + template <typename Tag> struct TaggedStats : ArenaAllocatorStats { }; + struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> { + explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { } + ArenaPool* const pool; + }; + + ArenaAllocatorStats* PeakStats() { + return static_cast<TaggedStats<Peak>*>(&stats_and_pool_); + } + + ArenaAllocatorStats* CurrentStats() { + return static_cast<TaggedStats<Current>*>(&stats_and_pool_); + } + + // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter. + void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE { + if (UNLIKELY(running_on_valgrind_)) { + return AllocValgrind(bytes, kind); + } + size_t rounded_bytes = (bytes + 3) & ~3; + uint8_t* ptr = top_ptr_; + if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) { + ptr = AllocateFromNextArena(rounded_bytes); + } + CurrentStats()->RecordAlloc(bytes, kind); + top_ptr_ = ptr + rounded_bytes; + return ptr; + } + + uint8_t* AllocateFromNextArena(size_t rounded_bytes); + void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats); + void UpdateBytesAllocated(); + void* AllocValgrind(size_t bytes, ArenaAllocKind kind); + + StatsAndPool stats_and_pool_; + Arena* bottom_arena_; + Arena* top_arena_; + uint8_t* top_ptr_; + uint8_t* top_end_; + + const bool running_on_valgrind_; + + friend class ScopedArenaAllocator; + template <typename T> + friend class ScopedArenaAllocatorAdapter; + + DISALLOW_COPY_AND_ASSIGN(ArenaStack); +}; + +class ScopedArenaAllocator + : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats { + public: + // Create a ScopedArenaAllocator directly on the ArenaStack when the scope of + // the allocator is not exactly a C++ block scope. For example, an optimization + // pass can create the scoped allocator in Start() and destroy it in End(). + static ScopedArenaAllocator* Create(ArenaStack* arena_stack) { + void* addr = arena_stack->Alloc(sizeof(ScopedArenaAllocator), kArenaAllocMisc); + ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack); + allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr); + return allocator; + } + + explicit ScopedArenaAllocator(ArenaStack* arena_stack); + ~ScopedArenaAllocator(); + + void Reset(); + + void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE { + DebugStackReference::CheckTop(); + return arena_stack_->Alloc(bytes, kind); + } + + // ScopedArenaAllocatorAdapter is incomplete here, we need to define this later. + ScopedArenaAllocatorAdapter<void> Adapter(); + + // Allow a delete-expression to destroy but not deallocate allocators created by Create(). + static void operator delete(void* ptr) { UNUSED(ptr); } + + private: + ArenaStack* const arena_stack_; + Arena* mark_arena_; + uint8_t* mark_ptr_; + uint8_t* mark_end_; + + template <typename T> + friend class ScopedArenaAllocatorAdapter; + + DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator); +}; + +template <> +class ScopedArenaAllocatorAdapter<void> + : private DebugStackReference, private DebugStackIndirectTopRef { + public: + typedef void value_type; + typedef void* pointer; + typedef const void* const_pointer; + + template <typename U> + struct rebind { + typedef ScopedArenaAllocatorAdapter<U> other; + }; + + explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* arena_allocator) + : DebugStackReference(arena_allocator), + DebugStackIndirectTopRef(arena_allocator), + arena_stack_(arena_allocator->arena_stack_) { + } + template <typename U> + ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other) + : DebugStackReference(other), + DebugStackIndirectTopRef(other), + arena_stack_(other.arena_stack_) { + } + ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter& other) = default; + ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter& other) = default; + ~ScopedArenaAllocatorAdapter() = default; + + private: + ArenaStack* arena_stack_; + + template <typename U> + friend class ScopedArenaAllocatorAdapter; +}; + +// Adapter for use of ScopedArenaAllocator in STL containers. +template <typename T> +class ScopedArenaAllocatorAdapter : private DebugStackReference, private DebugStackIndirectTopRef { + public: + typedef T value_type; + typedef T* pointer; + typedef T& reference; + typedef const T* const_pointer; + typedef const T& const_reference; + typedef size_t size_type; + typedef ptrdiff_t difference_type; + + template <typename U> + struct rebind { + typedef ScopedArenaAllocatorAdapter<U> other; + }; + + explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* arena_allocator) + : DebugStackReference(arena_allocator), + DebugStackIndirectTopRef(arena_allocator), + arena_stack_(arena_allocator->arena_stack_) { + } + template <typename U> + ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other) + : DebugStackReference(other), + DebugStackIndirectTopRef(other), + arena_stack_(other.arena_stack_) { + } + ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter& other) = default; + ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter& other) = default; + ~ScopedArenaAllocatorAdapter() = default; + + size_type max_size() const { + return static_cast<size_type>(-1) / sizeof(T); + } + + pointer address(reference x) const { return &x; } + const_pointer address(const_reference x) const { return &x; } + + pointer allocate(size_type n, ScopedArenaAllocatorAdapter<void>::pointer hint = nullptr) { + DCHECK_LE(n, max_size()); + DebugStackIndirectTopRef::CheckTop(); + return reinterpret_cast<T*>(arena_stack_->Alloc(n * sizeof(T), kArenaAllocSTL)); + } + void deallocate(pointer p, size_type n) { + DebugStackIndirectTopRef::CheckTop(); + } + + void construct(pointer p, const_reference val) { + DebugStackIndirectTopRef::CheckTop(); + new (static_cast<void*>(p)) value_type(val); + } + void destroy(pointer p) { + DebugStackIndirectTopRef::CheckTop(); + p->~value_type(); + } + + private: + ArenaStack* arena_stack_; + + template <typename U> + friend class ScopedArenaAllocatorAdapter; +}; + +inline ScopedArenaAllocatorAdapter<void> ScopedArenaAllocator::Adapter() { + return ScopedArenaAllocatorAdapter<void>(this); +} + +} // namespace art + +#endif // ART_COMPILER_UTILS_SCOPED_ARENA_ALLOCATOR_H_ |