summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--runtime/arch/arm/quick_entrypoints_arm.S5
-rw-r--r--runtime/arch/mips/quick_entrypoints_mips.S5
-rw-r--r--runtime/arch/quick_alloc_entrypoints.S3
-rw-r--r--runtime/arch/x86/quick_entrypoints_x86.S5
-rw-r--r--runtime/base/histogram.h4
-rw-r--r--runtime/gc/collector_type.h3
-rw-r--r--runtime/gc/heap.cc3
-rw-r--r--runtime/gc/space/bump_pointer_space.h6
-rw-r--r--runtime/gc/space/space.h5
-rw-r--r--runtime/mirror/array-inl.h1
-rw-r--r--runtime/native/dalvik_system_VMDebug.cc22
11 files changed, 43 insertions, 19 deletions
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index 1976af5..61be14b 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -16,6 +16,8 @@
#include "asm_support_arm.S"
+#include "arch/quick_alloc_entrypoints.S"
+
/* Deliver the given exception */
.extern artDeliverExceptionFromCode
/* Deliver an exception pending on a thread */
@@ -864,7 +866,8 @@ ENTRY \name
END \name
.endm
-#include "arch/quick_alloc_entrypoints.S"
+// Generate the allocation entrypoints for each allocator.
+GENERATE_ALL_ALLOC_ENTRYPOINTS
/*
* Called by managed code when the value in rSUSPEND has been decremented to 0.
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index 6d6d796..2d1e87a 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -16,6 +16,8 @@
#include "asm_support_mips.S"
+#include "arch/quick_alloc_entrypoints.S"
+
.set noreorder
.balign 4
@@ -931,7 +933,8 @@ ENTRY \name
END \name
.endm
-#include "arch/quick_alloc_entrypoints.S"
+// Generate the allocation entrypoints for each allocator.
+GENERATE_ALL_ALLOC_ENTRYPOINTS
/*
* Called by managed code when the value in rSUSPEND has been decremented to 0.
diff --git a/runtime/arch/quick_alloc_entrypoints.S b/runtime/arch/quick_alloc_entrypoints.S
index 0109c13..bdadc51 100644
--- a/runtime/arch/quick_alloc_entrypoints.S
+++ b/runtime/arch/quick_alloc_entrypoints.S
@@ -14,7 +14,6 @@
* limitations under the License.
*/
-
.macro GENERATE_ALLOC_ENTRYPOINTS c_suffix, cxx_suffix
// Called by managed code to allocate an object.
TWO_ARG_DOWNCALL art_quick_alloc_object\c_suffix, artAllocObjectFromCode\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
@@ -32,7 +31,9 @@ THREE_ARG_DOWNCALL art_quick_check_and_alloc_array\c_suffix, artCheckAndAllocArr
THREE_ARG_DOWNCALL art_quick_check_and_alloc_array_with_access_check\c_suffix, artCheckAndAllocArrayFromCodeWithAccessCheck\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
.endm
+.macro GENERATE_ALL_ALLOC_ENTRYPOINTS
GENERATE_ALLOC_ENTRYPOINTS
GENERATE_ALLOC_ENTRYPOINTS _instrumented, Instrumented
GENERATE_ALLOC_ENTRYPOINTS _bump_pointer, BumpPointer
GENERATE_ALLOC_ENTRYPOINTS _bump_pointer_instrumented, BumpPointerInstrumented
+.endm
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index 62a8b70..9679471 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -16,6 +16,8 @@
#include "asm_support_x86.S"
+#include "arch/quick_alloc_entrypoints.S"
+
// For x86, the CFA is esp+4, the address above the pushed return address on the stack.
/*
@@ -426,7 +428,8 @@ MACRO0(RETURN_OR_DELIVER_PENDING_EXCEPTION)
DELIVER_PENDING_EXCEPTION
END_MACRO
-#include "arch/quick_alloc_entrypoints.S"
+// Generate the allocation entrypoints for each allocator.
+GENERATE_ALL_ALLOC_ENTRYPOINTS
TWO_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO
TWO_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO
diff --git a/runtime/base/histogram.h b/runtime/base/histogram.h
index 4e5d29a..a7d51e2 100644
--- a/runtime/base/histogram.h
+++ b/runtime/base/histogram.h
@@ -40,8 +40,10 @@ template <class Value> class Histogram {
std::vector<double> perc_;
};
- // Used for name based comparators in the timing loggers.
+ // Used by the cumulative timing logger to search the histogram set using for an existing split
+ // with the same name using CumulativeLogger::HistogramComparator.
explicit Histogram(const char* name);
+ // This is the expected constructor when creating new Histograms.
Histogram(const char* name, Value initial_bucket_width, size_t max_buckets = 100);
void AddValue(Value);
// Builds the cumulative distribution function from the frequency data.
diff --git a/runtime/gc/collector_type.h b/runtime/gc/collector_type.h
index a42819b..ba3cad6 100644
--- a/runtime/gc/collector_type.h
+++ b/runtime/gc/collector_type.h
@@ -24,8 +24,11 @@ namespace gc {
// Which types of collections are able to be performed.
enum CollectorType {
+ // Non concurrent mark-sweep.
kCollectorTypeMS,
+ // Concurrent mark-sweep.
kCollectorTypeCMS,
+ // Semi-space / mark-sweep hybrid, enables compaction.
kCollectorTypeSS,
};
std::ostream& operator<<(std::ostream& os, const CollectorType& collector_type);
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index d8902f0..5e62729 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -72,7 +72,6 @@ static constexpr bool kGCALotMode = false;
static constexpr size_t kGcAlotInterval = KB;
// Minimum amount of remaining bytes before a concurrent GC is triggered.
static constexpr size_t kMinConcurrentRemainingBytes = 128 * KB;
-static constexpr AllocatorType kDefaultPreZygoteAllocator = kAllocatorTypeFreeList;
Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max_free,
double target_utilization, size_t capacity, const std::string& image_file_name,
@@ -1215,6 +1214,8 @@ void Heap::ChangeCollector(CollectorType collector_type) {
case kCollectorTypeCMS: {
ChangeAllocator(kAllocatorTypeFreeList);
break;
+ default:
+ LOG(FATAL) << "Unimplemented";
}
}
}
diff --git a/runtime/gc/space/bump_pointer_space.h b/runtime/gc/space/bump_pointer_space.h
index 9b0b6aa..2edd3e2 100644
--- a/runtime/gc/space/bump_pointer_space.h
+++ b/runtime/gc/space/bump_pointer_space.h
@@ -120,7 +120,11 @@ class BumpPointerSpace : public ContinuousMemMapAllocSpace {
static mirror::Object* GetNextObject(mirror::Object* obj)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- // Alignment.
+ virtual BumpPointerSpace* AsBumpPointerSpace() {
+ return this;
+ }
+
+ // Object alignment within the space.
static constexpr size_t kAlignment = 8;
protected:
diff --git a/runtime/gc/space/space.h b/runtime/gc/space/space.h
index 38b602e..ca39175 100644
--- a/runtime/gc/space/space.h
+++ b/runtime/gc/space/space.h
@@ -43,6 +43,7 @@ class Heap;
namespace space {
class AllocSpace;
+class BumpPointerSpace;
class ContinuousSpace;
class DiscontinuousSpace;
class MallocSpace;
@@ -138,6 +139,10 @@ class Space {
bool IsBumpPointerSpace() const {
return GetType() == kSpaceTypeBumpPointerSpace;
}
+ virtual BumpPointerSpace* AsBumpPointerSpace() {
+ LOG(FATAL) << "Unreachable";
+ return NULL;
+ }
// Does this space hold large objects and implement the large object space abstraction?
bool IsLargeObjectSpace() const {
diff --git a/runtime/mirror/array-inl.h b/runtime/mirror/array-inl.h
index 46ffaae..a754b69 100644
--- a/runtime/mirror/array-inl.h
+++ b/runtime/mirror/array-inl.h
@@ -58,6 +58,7 @@ static inline size_t ComputeArraySize(Thread* self, Class* array_class, int32_t
return size;
}
+// Used for setting the array length in the allocation code path to ensure it is guarded by a CAS.
class SetLengthVisitor {
public:
explicit SetLengthVisitor(int32_t length) : length_(length) {
diff --git a/runtime/native/dalvik_system_VMDebug.cc b/runtime/native/dalvik_system_VMDebug.cc
index 66fa100..67c4505 100644
--- a/runtime/native/dalvik_system_VMDebug.cc
+++ b/runtime/native/dalvik_system_VMDebug.cc
@@ -20,6 +20,7 @@
#include "class_linker.h"
#include "common_throws.h"
#include "debugger.h"
+#include "gc/space/bump_pointer_space.h"
#include "gc/space/dlmalloc_space.h"
#include "gc/space/large_object_space.h"
#include "gc/space/space-inl.h"
@@ -247,7 +248,7 @@ static jlong VMDebug_countInstancesOfClass(JNIEnv* env, jclass, jclass javaClass
// /proc/<pid>/smaps.
static void VMDebug_getHeapSpaceStats(JNIEnv* env, jclass, jlongArray data) {
jlong* arr = reinterpret_cast<jlong*>(env->GetPrimitiveArrayCritical(data, 0));
- if (arr == NULL || env->GetArrayLength(data) < 9) {
+ if (arr == nullptr || env->GetArrayLength(data) < 9) {
return;
}
@@ -257,29 +258,26 @@ static void VMDebug_getHeapSpaceStats(JNIEnv* env, jclass, jlongArray data) {
size_t zygoteUsed = 0;
size_t largeObjectsSize = 0;
size_t largeObjectsUsed = 0;
-
gc::Heap* heap = Runtime::Current()->GetHeap();
- const std::vector<gc::space::ContinuousSpace*>& continuous_spaces = heap->GetContinuousSpaces();
- const std::vector<gc::space::DiscontinuousSpace*>& discontinuous_spaces = heap->GetDiscontinuousSpaces();
- typedef std::vector<gc::space::ContinuousSpace*>::const_iterator It;
- for (It it = continuous_spaces.begin(), end = continuous_spaces.end(); it != end; ++it) {
- gc::space::ContinuousSpace* space = *it;
+ for (gc::space::ContinuousSpace* space : heap->GetContinuousSpaces()) {
if (space->IsImageSpace()) {
// Currently don't include the image space.
} else if (space->IsZygoteSpace()) {
gc::space::MallocSpace* malloc_space = space->AsMallocSpace();
zygoteSize += malloc_space->GetFootprint();
zygoteUsed += malloc_space->GetBytesAllocated();
- } else {
- // This is the alloc space.
+ } else if (space->IsMallocSpace()) {
+ // This is a malloc space.
gc::space::MallocSpace* malloc_space = space->AsMallocSpace();
allocSize += malloc_space->GetFootprint();
allocUsed += malloc_space->GetBytesAllocated();
+ } else if (space->IsBumpPointerSpace()) {
+ gc::space::BumpPointerSpace* bump_pointer_space = space->AsBumpPointerSpace();
+ allocSize += bump_pointer_space->Size();
+ allocUsed += bump_pointer_space->GetBytesAllocated();
}
}
- typedef std::vector<gc::space::DiscontinuousSpace*>::const_iterator It2;
- for (It2 it = discontinuous_spaces.begin(), end = discontinuous_spaces.end(); it != end; ++it) {
- gc::space::DiscontinuousSpace* space = *it;
+ for (gc::space::DiscontinuousSpace* space : heap->GetDiscontinuousSpaces()) {
if (space->IsLargeObjectSpace()) {
largeObjectsSize += space->AsLargeObjectSpace()->GetBytesAllocated();
largeObjectsUsed += largeObjectsSize;