summaryrefslogtreecommitdiffstats
path: root/runtime
diff options
context:
space:
mode:
Diffstat (limited to 'runtime')
-rw-r--r--runtime/arch/arm64/instruction_set_features_arm64.cc16
-rw-r--r--runtime/arch/arm64/instruction_set_features_arm64.h14
-rw-r--r--runtime/arch/mips/quick_entrypoints_mips.S31
-rw-r--r--runtime/arch/mips64/quick_entrypoints_mips64.S4
-rw-r--r--runtime/base/hash_map.h2
-rw-r--r--runtime/check_reference_map_visitor.h2
-rw-r--r--runtime/gc/accounting/mod_union_table_test.cc2
-rw-r--r--runtime/leb128.h44
-rw-r--r--runtime/memory_region.h71
-rw-r--r--runtime/mirror/array-inl.h21
-rw-r--r--runtime/oat_file_assistant_test.cc22
-rw-r--r--runtime/stack.cc18
-rw-r--r--runtime/stack_map.cc249
-rw-r--r--runtime/stack_map.h662
14 files changed, 758 insertions, 400 deletions
diff --git a/runtime/arch/arm64/instruction_set_features_arm64.cc b/runtime/arch/arm64/instruction_set_features_arm64.cc
index a1270dc..f8a9f9d 100644
--- a/runtime/arch/arm64/instruction_set_features_arm64.cc
+++ b/runtime/arch/arm64/instruction_set_features_arm64.cc
@@ -48,19 +48,23 @@ const Arm64InstructionSetFeatures* Arm64InstructionSetFeatures::FromVariant(
return nullptr;
}
}
- return new Arm64InstructionSetFeatures(smp, needs_a53_835769_fix);
+
+ // The variants that need a fix for 843419 are the same that need a fix for 835769.
+ bool needs_a53_843419_fix = needs_a53_835769_fix;
+
+ return new Arm64InstructionSetFeatures(smp, needs_a53_835769_fix, needs_a53_843419_fix);
}
const Arm64InstructionSetFeatures* Arm64InstructionSetFeatures::FromBitmap(uint32_t bitmap) {
bool smp = (bitmap & kSmpBitfield) != 0;
bool is_a53 = (bitmap & kA53Bitfield) != 0;
- return new Arm64InstructionSetFeatures(smp, is_a53);
+ return new Arm64InstructionSetFeatures(smp, is_a53, is_a53);
}
const Arm64InstructionSetFeatures* Arm64InstructionSetFeatures::FromCppDefines() {
const bool smp = true;
const bool is_a53 = true; // Pessimistically assume all ARM64s are A53s.
- return new Arm64InstructionSetFeatures(smp, is_a53);
+ return new Arm64InstructionSetFeatures(smp, is_a53, is_a53);
}
const Arm64InstructionSetFeatures* Arm64InstructionSetFeatures::FromCpuInfo() {
@@ -85,13 +89,13 @@ const Arm64InstructionSetFeatures* Arm64InstructionSetFeatures::FromCpuInfo() {
} else {
LOG(ERROR) << "Failed to open /proc/cpuinfo";
}
- return new Arm64InstructionSetFeatures(smp, is_a53);
+ return new Arm64InstructionSetFeatures(smp, is_a53, is_a53);
}
const Arm64InstructionSetFeatures* Arm64InstructionSetFeatures::FromHwcap() {
bool smp = sysconf(_SC_NPROCESSORS_CONF) > 1;
const bool is_a53 = true; // Pessimistically assume all ARM64s are A53s.
- return new Arm64InstructionSetFeatures(smp, is_a53);
+ return new Arm64InstructionSetFeatures(smp, is_a53, is_a53);
}
const Arm64InstructionSetFeatures* Arm64InstructionSetFeatures::FromAssembly() {
@@ -140,7 +144,7 @@ const InstructionSetFeatures* Arm64InstructionSetFeatures::AddFeaturesFromSplitS
return nullptr;
}
}
- return new Arm64InstructionSetFeatures(smp, is_a53);
+ return new Arm64InstructionSetFeatures(smp, is_a53, is_a53);
}
} // namespace art
diff --git a/runtime/arch/arm64/instruction_set_features_arm64.h b/runtime/arch/arm64/instruction_set_features_arm64.h
index f6bfee7..3b3e2c9 100644
--- a/runtime/arch/arm64/instruction_set_features_arm64.h
+++ b/runtime/arch/arm64/instruction_set_features_arm64.h
@@ -61,6 +61,11 @@ class Arm64InstructionSetFeatures FINAL : public InstructionSetFeatures {
return fix_cortex_a53_835769_;
}
+ // Generate code addressing Cortex-A53 erratum 843419?
+ bool NeedFixCortexA53_843419() const {
+ return fix_cortex_a53_843419_;
+ }
+
// TODO: Tune this on a per CPU basis. For now, we pessimistically assume
// that all ARM64 CPUs prefer explicit memory barriers over acquire-release.
//
@@ -79,8 +84,12 @@ class Arm64InstructionSetFeatures FINAL : public InstructionSetFeatures {
std::string* error_msg) const OVERRIDE;
private:
- explicit Arm64InstructionSetFeatures(bool smp, bool needs_a53_835769_fix)
- : InstructionSetFeatures(smp), fix_cortex_a53_835769_(needs_a53_835769_fix) {
+ explicit Arm64InstructionSetFeatures(bool smp,
+ bool needs_a53_835769_fix,
+ bool needs_a53_843419_fix)
+ : InstructionSetFeatures(smp),
+ fix_cortex_a53_835769_(needs_a53_835769_fix),
+ fix_cortex_a53_843419_(needs_a53_843419_fix) {
}
// Bitmap positions for encoding features as a bitmap.
@@ -90,6 +99,7 @@ class Arm64InstructionSetFeatures FINAL : public InstructionSetFeatures {
};
const bool fix_cortex_a53_835769_;
+ const bool fix_cortex_a53_843419_;
DISALLOW_COPY_AND_ASSIGN(Arm64InstructionSetFeatures);
};
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index 16f0e70..0c2250e 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -1094,7 +1094,7 @@ ENTRY art_quick_resolution_trampoline
lw $a0, ARG_SLOT_SIZE($sp) # load resolved method to $a0
RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
move $t9, $v0 # code pointer must be in $t9 to generate the global pointer
- jalr $zero, $v0 # tail call to method
+ jalr $zero, $t9 # tail call to method
nop
1:
RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
@@ -1203,29 +1203,28 @@ art_quick_instrumentation_exit:
.cpload $t9
move $ra, $zero # link register is to here, so clobber with 0 for later checks
+ SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
addiu $sp, $sp, -16 # allocate temp storage on the stack
.cfi_adjust_cfa_offset 16
- sw $v0, 12($sp)
- .cfi_rel_offset 2, 32
- sw $v1, 8($sp)
- .cfi_rel_offset 3, 36
- s.d $f0, 0($sp)
- SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
+ sw $v0, ARG_SLOT_SIZE+12($sp)
+ .cfi_rel_offset 2, ARG_SLOT_SIZE+12
+ sw $v1, ARG_SLOT_SIZE+8($sp)
+ .cfi_rel_offset 3, ARG_SLOT_SIZE+8
+ s.d $f0, ARG_SLOT_SIZE($sp)
s.d $f0, 16($sp) # pass fpr result
move $a2, $v0 # pass gpr result
move $a3, $v1
- addiu $a1, $sp, ARG_SLOT_SIZE # pass $sp (remove arg slots)
+ addiu $a1, $sp, ARG_SLOT_SIZE+16 # pass $sp (remove arg slots and temp storage)
jal artInstrumentationMethodExitFromCode # (Thread*, SP, gpr_res, fpr_res)
move $a0, rSELF # pass Thread::Current
- move $t0, $v0 # set aside returned link register
+ move $t9, $v0 # set aside returned link register
move $ra, $v1 # set link register for deoptimization
- addiu $sp, $sp, ARG_SLOT_SIZE+FRAME_SIZE_REFS_ONLY_CALLEE_SAVE # args slot + refs_only callee save frame
- lw $v0, 12($sp) # restore return values
- lw $v1, 8($sp)
- l.d $f0, 0($sp)
- jalr $zero, $t0 # return
- addiu $sp, $sp, 16 # remove temp storage from stack
- .cfi_adjust_cfa_offset -16
+ lw $v0, ARG_SLOT_SIZE+12($sp) # restore return values
+ lw $v1, ARG_SLOT_SIZE+8($sp)
+ l.d $f0, ARG_SLOT_SIZE($sp)
+ jalr $zero, $t9 # return
+ addiu $sp, $sp, ARG_SLOT_SIZE+FRAME_SIZE_REFS_ONLY_CALLEE_SAVE+16 # restore stack
+ .cfi_adjust_cfa_offset -(ARG_SLOT_SIZE+FRAME_SIZE_REFS_ONLY_CALLEE_SAVE+16)
END art_quick_instrumentation_exit
/*
diff --git a/runtime/arch/mips64/quick_entrypoints_mips64.S b/runtime/arch/mips64/quick_entrypoints_mips64.S
index 8cb95f1..697bf00 100644
--- a/runtime/arch/mips64/quick_entrypoints_mips64.S
+++ b/runtime/arch/mips64/quick_entrypoints_mips64.S
@@ -1504,11 +1504,11 @@ art_quick_instrumentation_exit:
move $a1, $t0 # pass $sp
jal artInstrumentationMethodExitFromCode # (Thread*, SP, gpr_res, fpr_res)
move $a0, rSELF # pass Thread::Current
- move $t0, $v0 # set aside returned link register
+ move $t9, $v0 # set aside returned link register
move $ra, $v1 # set link register for deoptimization
ld $v0, 0($sp) # restore return values
l.d $f0, 8($sp)
- jalr $zero, $t0 # return
+ jalr $zero, $t9 # return
daddiu $sp, $sp, 16+FRAME_SIZE_REFS_ONLY_CALLEE_SAVE # 16 bytes of saved values + ref_only callee save frame
.cfi_adjust_cfa_offset -(16+FRAME_SIZE_REFS_ONLY_CALLEE_SAVE)
END art_quick_instrumentation_exit
diff --git a/runtime/base/hash_map.h b/runtime/base/hash_map.h
index c0f903f..eab80ff 100644
--- a/runtime/base/hash_map.h
+++ b/runtime/base/hash_map.h
@@ -48,7 +48,7 @@ class HashMapWrapper {
Fn fn_;
};
-template <class Key, class Value, class EmptyFn = DefaultEmptyFn<Key>,
+template <class Key, class Value, class EmptyFn,
class HashFn = std::hash<Key>, class Pred = std::equal_to<Key>,
class Alloc = std::allocator<std::pair<Key, Value>>>
class HashMap : public HashSet<std::pair<Key, Value>, EmptyFn, HashMapWrapper<HashFn>,
diff --git a/runtime/check_reference_map_visitor.h b/runtime/check_reference_map_visitor.h
index 204546d..5d9cd35 100644
--- a/runtime/check_reference_map_visitor.h
+++ b/runtime/check_reference_map_visitor.h
@@ -75,7 +75,7 @@ class CheckReferenceMapVisitor : public StackVisitor {
int reg = registers[i];
CHECK(reg < m->GetCodeItem()->registers_size_);
DexRegisterLocation location =
- dex_register_map.GetLocationKindAndValue(reg, number_of_dex_registers);
+ dex_register_map.GetDexRegisterLocation(reg, number_of_dex_registers, code_info);
switch (location.GetKind()) {
case DexRegisterLocation::Kind::kNone:
// Not set, should not be a reference.
diff --git a/runtime/gc/accounting/mod_union_table_test.cc b/runtime/gc/accounting/mod_union_table_test.cc
index 7780935..94bb3f5 100644
--- a/runtime/gc/accounting/mod_union_table_test.cc
+++ b/runtime/gc/accounting/mod_union_table_test.cc
@@ -47,7 +47,7 @@ class ModUnionTableTest : public CommonRuntimeTest {
Thread* self, space::ContinuousMemMapAllocSpace* space, size_t component_count)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
auto* klass = GetObjectArrayClass(self, space);
- const size_t size = ComputeArraySize(self, klass, component_count, 2);
+ const size_t size = mirror::ComputeArraySize(component_count, 2);
size_t bytes_allocated = 0, bytes_tl_bulk_allocated;
auto* obj = down_cast<mirror::ObjectArray<mirror::Object>*>(
space->Alloc(self, size, &bytes_allocated, nullptr, &bytes_tl_bulk_allocated));
diff --git a/runtime/leb128.h b/runtime/leb128.h
index dfb42b8..d36b690 100644
--- a/runtime/leb128.h
+++ b/runtime/leb128.h
@@ -124,6 +124,18 @@ static inline uint8_t* EncodeUnsignedLeb128(uint8_t* dest, uint32_t value) {
return dest;
}
+template<typename Allocator>
+static inline void EncodeUnsignedLeb128(std::vector<uint8_t, Allocator>* dest, uint32_t value) {
+ uint8_t out = value & 0x7f;
+ value >>= 7;
+ while (value != 0) {
+ dest->push_back(out | 0x80);
+ out = value & 0x7f;
+ value >>= 7;
+ }
+ dest->push_back(out);
+}
+
static inline uint8_t* EncodeSignedLeb128(uint8_t* dest, int32_t value) {
uint32_t extra_bits = static_cast<uint32_t>(value ^ (value >> 31)) >> 6;
uint8_t out = value & 0x7f;
@@ -137,6 +149,19 @@ static inline uint8_t* EncodeSignedLeb128(uint8_t* dest, int32_t value) {
return dest;
}
+template<typename Allocator>
+static inline void EncodeSignedLeb128(std::vector<uint8_t, Allocator>* dest, int32_t value) {
+ uint32_t extra_bits = static_cast<uint32_t>(value ^ (value >> 31)) >> 6;
+ uint8_t out = value & 0x7f;
+ while (extra_bits != 0u) {
+ dest->push_back(out | 0x80);
+ value >>= 7;
+ out = value & 0x7f;
+ extra_bits >>= 7;
+ }
+ dest->push_back(out);
+}
+
// An encoder that pushed uint32_t data onto the given std::vector.
class Leb128Encoder {
public:
@@ -149,14 +174,7 @@ class Leb128Encoder {
}
void PushBackUnsigned(uint32_t value) {
- uint8_t out = value & 0x7f;
- value >>= 7;
- while (value != 0) {
- data_->push_back(out | 0x80);
- out = value & 0x7f;
- value >>= 7;
- }
- data_->push_back(out);
+ EncodeUnsignedLeb128(data_, value);
}
template<typename It>
@@ -167,15 +185,7 @@ class Leb128Encoder {
}
void PushBackSigned(int32_t value) {
- uint32_t extra_bits = static_cast<uint32_t>(value ^ (value >> 31)) >> 6;
- uint8_t out = value & 0x7f;
- while (extra_bits != 0u) {
- data_->push_back(out | 0x80);
- value >>= 7;
- out = value & 0x7f;
- extra_bits >>= 7;
- }
- data_->push_back(out);
+ EncodeSignedLeb128(data_, value);
}
template<typename It>
diff --git a/runtime/memory_region.h b/runtime/memory_region.h
index f867f6a..6a784eb 100644
--- a/runtime/memory_region.h
+++ b/runtime/memory_region.h
@@ -48,22 +48,28 @@ class MemoryRegion FINAL : public ValueObject {
uint8_t* end() const { return start() + size_; }
// Load value of type `T` at `offset`. The memory address corresponding
- // to `offset` should be word-aligned.
- template<typename T> T Load(uintptr_t offset) const {
- // TODO: DCHECK that the address is word-aligned.
- return *ComputeInternalPointer<T>(offset);
+ // to `offset` should be word-aligned (on ARM, this is a requirement).
+ template<typename T>
+ ALWAYS_INLINE T Load(uintptr_t offset) const {
+ T* address = ComputeInternalPointer<T>(offset);
+ DCHECK(IsWordAligned(address));
+ return *address;
}
// Store `value` (of type `T`) at `offset`. The memory address
- // corresponding to `offset` should be word-aligned.
- template<typename T> void Store(uintptr_t offset, T value) const {
- // TODO: DCHECK that the address is word-aligned.
- *ComputeInternalPointer<T>(offset) = value;
+ // corresponding to `offset` should be word-aligned (on ARM, this is
+ // a requirement).
+ template<typename T>
+ ALWAYS_INLINE void Store(uintptr_t offset, T value) const {
+ T* address = ComputeInternalPointer<T>(offset);
+ DCHECK(IsWordAligned(address));
+ *address = value;
}
// Load value of type `T` at `offset`. The memory address corresponding
// to `offset` does not need to be word-aligned.
- template<typename T> T LoadUnaligned(uintptr_t offset) const {
+ template<typename T>
+ ALWAYS_INLINE T LoadUnaligned(uintptr_t offset) const {
// Equivalent unsigned integer type corresponding to T.
typedef typename UnsignedIntegerType<sizeof(T)>::type U;
U equivalent_unsigned_integer_value = 0;
@@ -77,7 +83,8 @@ class MemoryRegion FINAL : public ValueObject {
// Store `value` (of type `T`) at `offset`. The memory address
// corresponding to `offset` does not need to be word-aligned.
- template<typename T> void StoreUnaligned(uintptr_t offset, T value) const {
+ template<typename T>
+ ALWAYS_INLINE void StoreUnaligned(uintptr_t offset, T value) const {
// Equivalent unsigned integer type corresponding to T.
typedef typename UnsignedIntegerType<sizeof(T)>::type U;
U equivalent_unsigned_integer_value = bit_cast<U, T>(value);
@@ -88,19 +95,20 @@ class MemoryRegion FINAL : public ValueObject {
}
}
- template<typename T> T* PointerTo(uintptr_t offset) const {
+ template<typename T>
+ ALWAYS_INLINE T* PointerTo(uintptr_t offset) const {
return ComputeInternalPointer<T>(offset);
}
// Load a single bit in the region. The bit at offset 0 is the least
// significant bit in the first byte.
- bool LoadBit(uintptr_t bit_offset) const {
+ ALWAYS_INLINE bool LoadBit(uintptr_t bit_offset) const {
uint8_t bit_mask;
uint8_t byte = *ComputeBitPointer(bit_offset, &bit_mask);
return byte & bit_mask;
}
- void StoreBit(uintptr_t bit_offset, bool value) const {
+ ALWAYS_INLINE void StoreBit(uintptr_t bit_offset, bool value) const {
uint8_t bit_mask;
uint8_t* byte = ComputeBitPointer(bit_offset, &bit_mask);
if (value) {
@@ -110,6 +118,31 @@ class MemoryRegion FINAL : public ValueObject {
}
}
+ // Load `length` bits from the region starting at bit offset `bit_offset`.
+ // The bit at the smallest offset is the least significant bit in the
+ // loaded value. `length` must not be larger than the number of bits
+ // contained in the return value (32).
+ uint32_t LoadBits(uintptr_t bit_offset, size_t length) const {
+ CHECK_LE(length, sizeof(uint32_t) * kBitsPerByte);
+ uint32_t value = 0u;
+ for (size_t i = 0; i < length; ++i) {
+ value |= LoadBit(bit_offset + i) << i;
+ }
+ return value;
+ }
+
+ // Store `value` on `length` bits in the region starting at bit offset
+ // `bit_offset`. The bit at the smallest offset is the least significant
+ // bit of the stored `value`. `value` must not be larger than `length`
+ // bits.
+ void StoreBits(uintptr_t bit_offset, uint32_t value, size_t length) {
+ CHECK_LT(value, 2u << length);
+ for (size_t i = 0; i < length; ++i) {
+ bool ith_bit = value & (1 << i);
+ StoreBit(bit_offset + i, ith_bit);
+ }
+ }
+
void CopyFrom(size_t offset, const MemoryRegion& from) const;
// Compute a sub memory region based on an existing one.
@@ -126,7 +159,8 @@ class MemoryRegion FINAL : public ValueObject {
}
private:
- template<typename T> T* ComputeInternalPointer(size_t offset) const {
+ template<typename T>
+ ALWAYS_INLINE T* ComputeInternalPointer(size_t offset) const {
CHECK_GE(size(), sizeof(T));
CHECK_LE(offset, size() - sizeof(T));
return reinterpret_cast<T*>(start() + offset);
@@ -134,13 +168,20 @@ class MemoryRegion FINAL : public ValueObject {
// Locate the bit with the given offset. Returns a pointer to the byte
// containing the bit, and sets bit_mask to the bit within that byte.
- uint8_t* ComputeBitPointer(uintptr_t bit_offset, uint8_t* bit_mask) const {
+ ALWAYS_INLINE uint8_t* ComputeBitPointer(uintptr_t bit_offset, uint8_t* bit_mask) const {
uintptr_t bit_remainder = (bit_offset & (kBitsPerByte - 1));
*bit_mask = (1U << bit_remainder);
uintptr_t byte_offset = (bit_offset >> kBitsPerByteLog2);
return ComputeInternalPointer<uint8_t>(byte_offset);
}
+ // Is `address` aligned on a machine word?
+ template<typename T> static bool IsWordAligned(const T* address) {
+ // Word alignment in bytes.
+ size_t kWordAlignment = GetInstructionSetPointerSize(kRuntimeISA);
+ return IsAlignedParam(address, kWordAlignment);
+ }
+
void* pointer_;
size_t size_;
};
diff --git a/runtime/mirror/array-inl.h b/runtime/mirror/array-inl.h
index 048d8ba..7f04992 100644
--- a/runtime/mirror/array-inl.h
+++ b/runtime/mirror/array-inl.h
@@ -64,25 +64,20 @@ inline bool Array::CheckIsValidIndex(int32_t index) {
return true;
}
-static inline size_t ComputeArraySize(Thread* self, Class* array_class, int32_t component_count,
- size_t component_size_shift)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- DCHECK(array_class != NULL);
+static inline size_t ComputeArraySize(int32_t component_count, size_t component_size_shift) {
DCHECK_GE(component_count, 0);
- DCHECK(array_class->IsArrayClass());
size_t component_size = 1U << component_size_shift;
size_t header_size = Array::DataOffset(component_size).SizeValue();
size_t data_size = static_cast<size_t>(component_count) << component_size_shift;
size_t size = header_size + data_size;
- // Check for size_t overflow and throw OutOfMemoryError if this was
- // an unreasonable request.
+ // Check for size_t overflow if this was an unreasonable request
+ // but let the caller throw OutOfMemoryError.
#ifdef __LP64__
// 64-bit. No overflow as component_count is 32-bit and the maximum
// component size is 8.
DCHECK_LE((1U << component_size_shift), 8U);
- UNUSED(self);
#else
// 32-bit.
DCHECK_NE(header_size, 0U);
@@ -90,9 +85,6 @@ static inline size_t ComputeArraySize(Thread* self, Class* array_class, int32_t
// The array length limit (exclusive).
const size_t length_limit = (0U - header_size) >> component_size_shift;
if (UNLIKELY(length_limit <= static_cast<size_t>(component_count))) {
- self->ThrowOutOfMemoryError(StringPrintf("%s of length %d would overflow",
- PrettyDescriptor(array_class).c_str(),
- component_count).c_str());
return 0; // failure
}
#endif
@@ -159,15 +151,20 @@ template <bool kIsInstrumented, bool kFillUsable>
inline Array* Array::Alloc(Thread* self, Class* array_class, int32_t component_count,
size_t component_size_shift, gc::AllocatorType allocator_type) {
DCHECK(allocator_type != gc::kAllocatorTypeLOS);
+ DCHECK(array_class != nullptr);
+ DCHECK(array_class->IsArrayClass());
DCHECK_EQ(array_class->GetComponentSizeShift(), component_size_shift);
DCHECK_EQ(array_class->GetComponentSize(), (1U << component_size_shift));
- size_t size = ComputeArraySize(self, array_class, component_count, component_size_shift);
+ size_t size = ComputeArraySize(component_count, component_size_shift);
#ifdef __LP64__
// 64-bit. No size_t overflow.
DCHECK_NE(size, 0U);
#else
// 32-bit.
if (UNLIKELY(size == 0)) {
+ self->ThrowOutOfMemoryError(StringPrintf("%s of length %d would overflow",
+ PrettyDescriptor(array_class).c_str(),
+ component_count).c_str());
return nullptr;
}
#endif
diff --git a/runtime/oat_file_assistant_test.cc b/runtime/oat_file_assistant_test.cc
index 0422fcd..a8b0876 100644
--- a/runtime/oat_file_assistant_test.cc
+++ b/runtime/oat_file_assistant_test.cc
@@ -210,29 +210,29 @@ class OatFileAssistantTest : public CommonRuntimeTest {
// image in case of the GSS collector.
+ 384 * MB;
- std::string error_msg;
std::unique_ptr<BacktraceMap> map(BacktraceMap::Create(getpid(), true));
ASSERT_TRUE(map.get() != nullptr) << "Failed to build process map";
for (BacktraceMap::const_iterator it = map->begin();
reservation_start < reservation_end && it != map->end(); ++it) {
- if (it->end <= reservation_start) {
- continue;
- }
-
- if (it->start < reservation_start) {
- reservation_start = std::min(reservation_end, it->end);
- }
+ ReserveImageSpaceChunk(reservation_start, std::min(it->start, reservation_end));
+ reservation_start = std::max(reservation_start, it->end);
+ }
+ ReserveImageSpaceChunk(reservation_start, reservation_end);
+ }
+ // Reserve a chunk of memory for the image space in the given range.
+ // Only has effect for chunks with a positive number of bytes.
+ void ReserveImageSpaceChunk(uintptr_t start, uintptr_t end) {
+ if (start < end) {
+ std::string error_msg;
image_reservation_.push_back(std::unique_ptr<MemMap>(
MemMap::MapAnonymous("image reservation",
- reinterpret_cast<uint8_t*>(reservation_start),
- std::min(it->start, reservation_end) - reservation_start,
+ reinterpret_cast<uint8_t*>(start), end - start,
PROT_NONE, false, false, &error_msg)));
ASSERT_TRUE(image_reservation_.back().get() != nullptr) << error_msg;
LOG(INFO) << "Reserved space for image " <<
reinterpret_cast<void*>(image_reservation_.back()->Begin()) << "-" <<
reinterpret_cast<void*>(image_reservation_.back()->End());
- reservation_start = it->end;
}
}
diff --git a/runtime/stack.cc b/runtime/stack.cc
index 2d688ee..4ae49dd 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -206,21 +206,22 @@ bool StackVisitor::GetVRegFromOptimizedCode(mirror::ArtMethod* m, uint16_t vreg,
DexRegisterMap dex_register_map =
code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
DexRegisterLocation::Kind location_kind =
- dex_register_map.GetLocationKind(vreg, number_of_dex_registers);
+ dex_register_map.GetLocationKind(vreg, number_of_dex_registers, code_info);
switch (location_kind) {
case DexRegisterLocation::Kind::kInStack: {
- const int32_t offset = dex_register_map.GetStackOffsetInBytes(vreg, number_of_dex_registers);
+ const int32_t offset =
+ dex_register_map.GetStackOffsetInBytes(vreg, number_of_dex_registers, code_info);
const uint8_t* addr = reinterpret_cast<const uint8_t*>(cur_quick_frame_) + offset;
*val = *reinterpret_cast<const uint32_t*>(addr);
return true;
}
case DexRegisterLocation::Kind::kInRegister:
case DexRegisterLocation::Kind::kInFpuRegister: {
- uint32_t reg = dex_register_map.GetMachineRegister(vreg, number_of_dex_registers);
+ uint32_t reg = dex_register_map.GetMachineRegister(vreg, number_of_dex_registers, code_info);
return GetRegisterIfAccessible(reg, kind, val);
}
case DexRegisterLocation::Kind::kConstant:
- *val = dex_register_map.GetConstant(vreg, number_of_dex_registers);
+ *val = dex_register_map.GetConstant(vreg, number_of_dex_registers, code_info);
return true;
case DexRegisterLocation::Kind::kNone:
return false;
@@ -228,7 +229,7 @@ bool StackVisitor::GetVRegFromOptimizedCode(mirror::ArtMethod* m, uint16_t vreg,
LOG(FATAL)
<< "Unexpected location kind"
<< DexRegisterLocation::PrettyDescriptor(
- dex_register_map.GetLocationInternalKind(vreg, number_of_dex_registers));
+ dex_register_map.GetLocationInternalKind(vreg, number_of_dex_registers, code_info));
UNREACHABLE();
}
}
@@ -396,18 +397,19 @@ bool StackVisitor::SetVRegFromOptimizedCode(mirror::ArtMethod* m, uint16_t vreg,
DexRegisterMap dex_register_map =
code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
DexRegisterLocation::Kind location_kind =
- dex_register_map.GetLocationKind(vreg, number_of_dex_registers);
+ dex_register_map.GetLocationKind(vreg, number_of_dex_registers, code_info);
uint32_t dex_pc = m->ToDexPc(cur_quick_frame_pc_, false);
switch (location_kind) {
case DexRegisterLocation::Kind::kInStack: {
- const int32_t offset = dex_register_map.GetStackOffsetInBytes(vreg, number_of_dex_registers);
+ const int32_t offset =
+ dex_register_map.GetStackOffsetInBytes(vreg, number_of_dex_registers, code_info);
uint8_t* addr = reinterpret_cast<uint8_t*>(cur_quick_frame_) + offset;
*reinterpret_cast<uint32_t*>(addr) = new_value;
return true;
}
case DexRegisterLocation::Kind::kInRegister:
case DexRegisterLocation::Kind::kInFpuRegister: {
- uint32_t reg = dex_register_map.GetMachineRegister(vreg, number_of_dex_registers);
+ uint32_t reg = dex_register_map.GetMachineRegister(vreg, number_of_dex_registers, code_info);
return SetRegisterIfAccessible(reg, new_value, kind);
}
case DexRegisterLocation::Kind::kConstant:
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
index 020a6e6..11e7e44 100644
--- a/runtime/stack_map.cc
+++ b/runtime/stack_map.cc
@@ -16,133 +16,198 @@
#include "stack_map.h"
+#include <stdint.h>
+
namespace art {
-constexpr uint32_t StackMap::kNoDexRegisterMapSmallEncoding;
-constexpr uint32_t StackMap::kNoInlineInfoSmallEncoding;
+constexpr size_t DexRegisterLocationCatalog::kNoLocationEntryIndex;
constexpr uint32_t StackMap::kNoDexRegisterMap;
constexpr uint32_t StackMap::kNoInlineInfo;
+DexRegisterLocation::Kind DexRegisterMap::GetLocationInternalKind(uint16_t dex_register_number,
+ uint16_t number_of_dex_registers,
+ const CodeInfo& code_info) const {
+ DexRegisterLocationCatalog dex_register_location_catalog =
+ code_info.GetDexRegisterLocationCatalog();
+ size_t location_catalog_entry_index = GetLocationCatalogEntryIndex(
+ dex_register_number,
+ number_of_dex_registers,
+ code_info.GetNumberOfDexRegisterLocationCatalogEntries());
+ return dex_register_location_catalog.GetLocationInternalKind(location_catalog_entry_index);
+}
+
+DexRegisterLocation DexRegisterMap::GetDexRegisterLocation(uint16_t dex_register_number,
+ uint16_t number_of_dex_registers,
+ const CodeInfo& code_info) const {
+ DexRegisterLocationCatalog dex_register_location_catalog =
+ code_info.GetDexRegisterLocationCatalog();
+ size_t location_catalog_entry_index = GetLocationCatalogEntryIndex(
+ dex_register_number,
+ number_of_dex_registers,
+ code_info.GetNumberOfDexRegisterLocationCatalogEntries());
+ return dex_register_location_catalog.GetDexRegisterLocation(location_catalog_entry_index);
+}
+
+// Loads `number_of_bytes` at the given `offset` and assemble a uint32_t. If `check_max` is true,
+// this method converts a maximum value of size `number_of_bytes` into a uint32_t 0xFFFFFFFF.
+static uint32_t LoadAt(MemoryRegion region,
+ size_t number_of_bytes,
+ size_t offset,
+ bool check_max = false) {
+ if (number_of_bytes == 0u) {
+ DCHECK(!check_max);
+ return 0;
+ } else if (number_of_bytes == 1u) {
+ uint8_t value = region.LoadUnaligned<uint8_t>(offset);
+ if (check_max && value == 0xFF) {
+ return -1;
+ } else {
+ return value;
+ }
+ } else if (number_of_bytes == 2u) {
+ uint16_t value = region.LoadUnaligned<uint16_t>(offset);
+ if (check_max && value == 0xFFFF) {
+ return -1;
+ } else {
+ return value;
+ }
+ } else if (number_of_bytes == 3u) {
+ uint16_t low = region.LoadUnaligned<uint16_t>(offset);
+ uint16_t high = region.LoadUnaligned<uint8_t>(offset + sizeof(uint16_t));
+ uint32_t value = (high << 16) + low;
+ if (check_max && value == 0xFFFFFF) {
+ return -1;
+ } else {
+ return value;
+ }
+ } else {
+ DCHECK_EQ(number_of_bytes, 4u);
+ return region.LoadUnaligned<uint32_t>(offset);
+ }
+}
+
+static void StoreAt(MemoryRegion region, size_t number_of_bytes, size_t offset, uint32_t value) {
+ if (number_of_bytes == 0u) {
+ DCHECK_EQ(value, 0u);
+ } else if (number_of_bytes == 1u) {
+ region.StoreUnaligned<uint8_t>(offset, value);
+ } else if (number_of_bytes == 2u) {
+ region.StoreUnaligned<uint16_t>(offset, value);
+ } else if (number_of_bytes == 3u) {
+ region.StoreUnaligned<uint16_t>(offset, Low16Bits(value));
+ region.StoreUnaligned<uint8_t>(offset + sizeof(uint16_t), High16Bits(value));
+ } else {
+ region.StoreUnaligned<uint32_t>(offset, value);
+ DCHECK_EQ(number_of_bytes, 4u);
+ }
+}
+
uint32_t StackMap::GetDexPc(const CodeInfo& info) const {
- return info.HasSmallDexPc()
- ? region_.LoadUnaligned<kSmallEncoding>(info.ComputeStackMapDexPcOffset())
- : region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapDexPcOffset());
+ return LoadAt(region_, info.NumberOfBytesForDexPc(), info.ComputeStackMapDexPcOffset());
}
void StackMap::SetDexPc(const CodeInfo& info, uint32_t dex_pc) {
- DCHECK(!info.HasSmallDexPc() || IsUint<kBitsForSmallEncoding>(dex_pc)) << dex_pc;
- info.HasSmallDexPc()
- ? region_.StoreUnaligned<kSmallEncoding>(info.ComputeStackMapDexPcOffset(), dex_pc)
- : region_.StoreUnaligned<kLargeEncoding>(info.ComputeStackMapDexPcOffset(), dex_pc);
+ StoreAt(region_, info.NumberOfBytesForDexPc(), info.ComputeStackMapDexPcOffset(), dex_pc);
}
uint32_t StackMap::GetNativePcOffset(const CodeInfo& info) const {
- return info.HasSmallNativePc()
- ? region_.LoadUnaligned<kSmallEncoding>(info.ComputeStackMapNativePcOffset())
- : region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapNativePcOffset());
+ return LoadAt(region_, info.NumberOfBytesForNativePc(), info.ComputeStackMapNativePcOffset());
}
void StackMap::SetNativePcOffset(const CodeInfo& info, uint32_t native_pc_offset) {
- DCHECK(!info.HasSmallNativePc()
- || IsUint<kBitsForSmallEncoding>(native_pc_offset)) << native_pc_offset;
- uint32_t entry = info.ComputeStackMapNativePcOffset();
- info.HasSmallNativePc()
- ? region_.StoreUnaligned<kSmallEncoding>(entry, native_pc_offset)
- : region_.StoreUnaligned<kLargeEncoding>(entry, native_pc_offset);
+ StoreAt(region_, info.NumberOfBytesForNativePc(), info.ComputeStackMapNativePcOffset(), native_pc_offset);
}
uint32_t StackMap::GetDexRegisterMapOffset(const CodeInfo& info) const {
- if (info.HasSmallDexRegisterMap()) {
- uint8_t value = region_.LoadUnaligned<kSmallEncoding>(
- info.ComputeStackMapDexRegisterMapOffset());
- if (value == kNoDexRegisterMapSmallEncoding) {
- return kNoDexRegisterMap;
- } else {
- return value;
- }
- } else {
- return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapDexRegisterMapOffset());
- }
+ return LoadAt(region_,
+ info.NumberOfBytesForDexRegisterMap(),
+ info.ComputeStackMapDexRegisterMapOffset(),
+ /* check_max */ true);
}
void StackMap::SetDexRegisterMapOffset(const CodeInfo& info, uint32_t offset) {
- DCHECK(!info.HasSmallDexRegisterMap()
- || (IsUint<kBitsForSmallEncoding>(offset)
- || (offset == kNoDexRegisterMap))) << offset;
- size_t dex_register_map_entry = info.ComputeStackMapDexRegisterMapOffset();
- info.HasSmallDexRegisterMap()
- ? region_.StoreUnaligned<kSmallEncoding>(dex_register_map_entry, offset)
- : region_.StoreUnaligned<kLargeEncoding>(dex_register_map_entry, offset);
+ StoreAt(region_,
+ info.NumberOfBytesForDexRegisterMap(),
+ info.ComputeStackMapDexRegisterMapOffset(),
+ offset);
}
uint32_t StackMap::GetInlineDescriptorOffset(const CodeInfo& info) const {
if (!info.HasInlineInfo()) return kNoInlineInfo;
- if (info.HasSmallInlineInfo()) {
- uint8_t value = region_.LoadUnaligned<kSmallEncoding>(
- info.ComputeStackMapInlineInfoOffset());
- if (value == kNoInlineInfoSmallEncoding) {
- return kNoInlineInfo;
- } else {
- return value;
- }
- } else {
- return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapInlineInfoOffset());
- }
+ return LoadAt(region_,
+ info.NumberOfBytesForInlineInfo(),
+ info.ComputeStackMapInlineInfoOffset(),
+ /* check_max */ true);
}
void StackMap::SetInlineDescriptorOffset(const CodeInfo& info, uint32_t offset) {
DCHECK(info.HasInlineInfo());
- DCHECK(!info.HasSmallInlineInfo()
- || (IsUint<kBitsForSmallEncoding>(offset)
- || (offset == kNoInlineInfo))) << offset;
- size_t inline_entry = info.ComputeStackMapInlineInfoOffset();
- info.HasSmallInlineInfo()
- ? region_.StoreUnaligned<kSmallEncoding>(inline_entry, offset)
- : region_.StoreUnaligned<kLargeEncoding>(inline_entry, offset);
+ StoreAt(region_,
+ info.NumberOfBytesForInlineInfo(),
+ info.ComputeStackMapInlineInfoOffset(),
+ offset);
}
uint32_t StackMap::GetRegisterMask(const CodeInfo& info) const {
- return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapRegisterMaskOffset());
+ return LoadAt(region_,
+ info.NumberOfBytesForRegisterMask(),
+ info.ComputeStackMapRegisterMaskOffset());
}
void StackMap::SetRegisterMask(const CodeInfo& info, uint32_t mask) {
- region_.StoreUnaligned<kLargeEncoding>(info.ComputeStackMapRegisterMaskOffset(), mask);
+ StoreAt(region_,
+ info.NumberOfBytesForRegisterMask(),
+ info.ComputeStackMapRegisterMaskOffset(),
+ mask);
}
-size_t StackMap::ComputeStackMapSize(size_t stack_mask_size,
- bool has_inline_info,
- bool is_small_inline_info,
- bool is_small_dex_map,
- bool is_small_dex_pc,
- bool is_small_native_pc) {
- return StackMap::kFixedSize
- + stack_mask_size
- + (has_inline_info ? NumberOfBytesForEntry(is_small_inline_info) : 0)
- + NumberOfBytesForEntry(is_small_dex_map)
- + NumberOfBytesForEntry(is_small_dex_pc)
- + NumberOfBytesForEntry(is_small_native_pc);
+size_t StackMap::ComputeStackMapSizeInternal(size_t stack_mask_size,
+ size_t number_of_bytes_for_inline_info,
+ size_t number_of_bytes_for_dex_map,
+ size_t number_of_bytes_for_dex_pc,
+ size_t number_of_bytes_for_native_pc,
+ size_t number_of_bytes_for_register_mask) {
+ return stack_mask_size
+ + number_of_bytes_for_inline_info
+ + number_of_bytes_for_dex_map
+ + number_of_bytes_for_dex_pc
+ + number_of_bytes_for_native_pc
+ + number_of_bytes_for_register_mask;
}
size_t StackMap::ComputeStackMapSize(size_t stack_mask_size,
size_t inline_info_size,
size_t dex_register_map_size,
size_t dex_pc_max,
- size_t native_pc_max) {
- return ComputeStackMapSize(
+ size_t native_pc_max,
+ size_t register_mask_max) {
+ return ComputeStackMapSizeInternal(
stack_mask_size,
- inline_info_size != 0,
- // + 1 to also encode kNoInlineInfo.
- IsUint<kBitsForSmallEncoding>(inline_info_size + dex_register_map_size + 1),
+ inline_info_size == 0
+ ? 0
+ // + 1 to also encode kNoInlineInfo.
+ : CodeInfo::EncodingSizeInBytes(inline_info_size + dex_register_map_size + 1),
// + 1 to also encode kNoDexRegisterMap.
- IsUint<kBitsForSmallEncoding>(dex_register_map_size + 1),
- IsUint<kBitsForSmallEncoding>(dex_pc_max),
- IsUint<kBitsForSmallEncoding>(native_pc_max));
+ CodeInfo::EncodingSizeInBytes(dex_register_map_size + 1),
+ CodeInfo::EncodingSizeInBytes(dex_pc_max),
+ CodeInfo::EncodingSizeInBytes(native_pc_max),
+ CodeInfo::EncodingSizeInBytes(register_mask_max));
}
MemoryRegion StackMap::GetStackMask(const CodeInfo& info) const {
return region_.Subregion(info.ComputeStackMapStackMaskOffset(), info.GetStackMaskSize());
}
+static void DumpRegisterMapping(std::ostream& os,
+ size_t dex_register_num,
+ DexRegisterLocation location,
+ const std::string& prefix = "v",
+ const std::string& suffix = "") {
+ os << " " << prefix << dex_register_num << ": "
+ << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind())
+ << " (" << location.GetValue() << ")" << suffix << '\n';
+}
+
void CodeInfo::DumpStackMapHeader(std::ostream& os, size_t stack_map_num) const {
StackMap stack_map = GetStackMapAt(stack_map_num);
os << " StackMap " << stack_map_num
@@ -168,13 +233,25 @@ void CodeInfo::Dump(std::ostream& os, uint16_t number_of_dex_registers) const {
<< ", number_of_dex_registers=" << number_of_dex_registers
<< ", number_of_stack_maps=" << number_of_stack_maps
<< ", has_inline_info=" << HasInlineInfo()
- << ", has_small_inline_info=" << HasSmallInlineInfo()
- << ", has_small_dex_register_map=" << HasSmallDexRegisterMap()
- << ", has_small_dex_pc=" << HasSmallDexPc()
- << ", has_small_native_pc=" << HasSmallNativePc()
+ << ", number_of_bytes_for_inline_info=" << NumberOfBytesForInlineInfo()
+ << ", number_of_bytes_for_dex_register_map=" << NumberOfBytesForDexRegisterMap()
+ << ", number_of_bytes_for_dex_pc=" << NumberOfBytesForDexPc()
+ << ", number_of_bytes_for_native_pc=" << NumberOfBytesForNativePc()
+ << ", number_of_bytes_for_register_mask=" << NumberOfBytesForRegisterMask()
<< ")\n";
- // Display stack maps along with Dex register maps.
+ // Display the Dex register location catalog.
+ size_t number_of_location_catalog_entries = GetNumberOfDexRegisterLocationCatalogEntries();
+ size_t location_catalog_size_in_bytes = GetDexRegisterLocationCatalogSize();
+ os << " DexRegisterLocationCatalog (number_of_entries=" << number_of_location_catalog_entries
+ << ", size_in_bytes=" << location_catalog_size_in_bytes << ")\n";
+ DexRegisterLocationCatalog dex_register_location_catalog = GetDexRegisterLocationCatalog();
+ for (size_t i = 0; i < number_of_location_catalog_entries; ++i) {
+ DexRegisterLocation location = dex_register_location_catalog.GetDexRegisterLocation(i);
+ DumpRegisterMapping(os, i, location, "entry ");
+ }
+
+ // Display stack maps along with (live) Dex register maps.
for (size_t i = 0; i < number_of_stack_maps; ++i) {
StackMap stack_map = GetStackMapAt(i);
DumpStackMapHeader(os, i);
@@ -183,11 +260,13 @@ void CodeInfo::Dump(std::ostream& os, uint16_t number_of_dex_registers) const {
// TODO: Display the bit mask of live Dex registers.
for (size_t j = 0; j < number_of_dex_registers; ++j) {
if (dex_register_map.IsDexRegisterLive(j)) {
+ size_t location_catalog_entry_index = dex_register_map.GetLocationCatalogEntryIndex(
+ j, number_of_dex_registers, number_of_location_catalog_entries);
DexRegisterLocation location =
- dex_register_map.GetLocationKindAndValue(j, number_of_dex_registers);
- os << " " << "v" << j << ": "
- << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind())
- << " (" << location.GetValue() << ")" << '\n';
+ dex_register_map.GetDexRegisterLocation(j, number_of_dex_registers, *this);
+ DumpRegisterMapping(
+ os, j, location, "v",
+ "\t[entry " + std::to_string(static_cast<int>(location_catalog_entry_index)) + "]");
}
}
}
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 6ec7cc8..f68cafe 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -26,13 +26,10 @@ namespace art {
// Size of a frame slot, in bytes. This constant is a signed value,
// to please the compiler in arithmetic operations involving int32_t
// (signed) values.
-static ssize_t constexpr kFrameSlotSize = 4;
-
-// Word alignment required on ARM, in bytes.
-static constexpr size_t kWordAlignment = 4;
+static constexpr ssize_t kFrameSlotSize = 4;
// Size of Dex virtual registers.
-static size_t constexpr kVRegSize = 4;
+static constexpr size_t kVRegSize = 4;
class CodeInfo;
@@ -97,9 +94,9 @@ class DexRegisterLocation {
*
* In addition, DexRegisterMap also uses these values:
* - kInStackLargeOffset: value holds a "large" stack offset (greater than
- * 128 bytes);
- * - kConstantLargeValue: value holds a "large" constant (lower than or
- * equal to -16, or greater than 16).
+ * or equal to 128 bytes);
+ * - kConstantLargeValue: value holds a "large" constant (lower than 0, or
+ * or greater than or equal to 32).
*/
enum class Kind : uint8_t {
// Short location kinds, for entries fitting on one byte (3 bits
@@ -120,8 +117,7 @@ class DexRegisterLocation {
kInStackLargeOffset = 5, // 0b101
// Large constant, that cannot fit on a 5-bit signed integer (i.e.,
- // lower than -2^(5-1) = -16, or greater than or equal to
- // 2^(5-1) - 1 = 15).
+ // lower than 0, or greater than or equal to 2^5 = 32).
kConstantLargeValue = 6, // 0b110
kLastLocationKind = kConstantLargeValue
@@ -193,8 +189,10 @@ class DexRegisterLocation {
}
}
- DexRegisterLocation(Kind kind, int32_t value)
- : kind_(kind), value_(value) {}
+ // Required by art::StackMapStream::LocationCatalogEntriesIndices.
+ DexRegisterLocation() : kind_(Kind::kNone), value_(0) {}
+
+ DexRegisterLocation(Kind kind, int32_t value) : kind_(kind), value_(value) {}
static DexRegisterLocation None() {
return DexRegisterLocation(Kind::kNone, 0);
@@ -223,33 +221,23 @@ class DexRegisterLocation {
private:
Kind kind_;
int32_t value_;
+
+ friend class DexRegisterLocationHashFn;
};
/**
- * Information on dex register values for a specific PC. The information is
- * of the form:
- * [live_bit_mask, DexRegisterLocation+].
+ * Store information on unique Dex register locations used in a method.
+ * The information is of the form:
+ * [DexRegisterLocation+].
* DexRegisterLocations are either 1- or 5-byte wide (see art::DexRegisterLocation::Kind).
*/
-class DexRegisterMap {
+class DexRegisterLocationCatalog {
public:
- explicit DexRegisterMap(MemoryRegion region) : region_(region) {}
+ explicit DexRegisterLocationCatalog(MemoryRegion region) : region_(region) {}
// Short (compressed) location, fitting on one byte.
typedef uint8_t ShortLocation;
- static size_t LiveBitMaskSize(uint16_t number_of_dex_registers) {
- return RoundUp(number_of_dex_registers, kBitsPerByte) / kBitsPerByte;
- }
-
- void SetLiveBitMask(size_t offset,
- uint16_t number_of_dex_registers,
- const BitVector& live_dex_registers_mask) {
- for (uint16_t i = 0; i < number_of_dex_registers; i++) {
- region_.StoreBit(offset + i, live_dex_registers_mask.IsBitSet(i));
- }
- }
-
void SetRegisterInfo(size_t offset, const DexRegisterLocation& dex_register_location) {
DexRegisterLocation::Kind kind = ComputeCompressedKind(dex_register_location);
int32_t value = dex_register_location.GetValue();
@@ -265,12 +253,12 @@ class DexRegisterMap {
DCHECK_EQ(value % kFrameSlotSize, 0);
value /= kFrameSlotSize;
}
- DCHECK(IsUint<kValueBits>(value)) << value;
+ DCHECK(IsShortValue(value)) << value;
region_.StoreUnaligned<ShortLocation>(offset, MakeShortLocation(kind, value));
} else {
// Large location. Write the location on one byte and the value
// on 4 bytes.
- DCHECK(!IsUint<kValueBits>(value)) << value;
+ DCHECK(!IsShortValue(value)) << value;
if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
// Also divide large stack offsets by 4 for the sake of consistency.
DCHECK_EQ(value % kFrameSlotSize, 0);
@@ -285,63 +273,39 @@ class DexRegisterMap {
}
}
- bool IsDexRegisterLive(uint16_t dex_register_index) const {
+ // Find the offset of the location catalog entry number `location_catalog_entry_index`.
+ size_t FindLocationOffset(size_t location_catalog_entry_index) const {
size_t offset = kFixedSize;
- return region_.LoadBit(offset + dex_register_index);
- }
-
- static constexpr size_t kNoDexRegisterLocationOffset = -1;
-
- static size_t GetDexRegisterMapLocationsOffset(uint16_t number_of_dex_registers) {
- return kLiveBitMaskOffset + LiveBitMaskSize(number_of_dex_registers);
- }
-
- // Find the offset of the Dex register location number `dex_register_index`.
- size_t FindLocationOffset(uint16_t dex_register_index, uint16_t number_of_dex_registers) const {
- if (!IsDexRegisterLive(dex_register_index)) return kNoDexRegisterLocationOffset;
- size_t offset = GetDexRegisterMapLocationsOffset(number_of_dex_registers);
- // Skip the first `dex_register_index - 1` entries.
- for (uint16_t i = 0; i < dex_register_index; ++i) {
- if (IsDexRegisterLive(i)) {
- // Read the first next byte and inspect its first 3 bits to decide
- // whether it is a short or a large location.
- DexRegisterLocation::Kind kind = ExtractKindAtOffset(offset);
- if (DexRegisterLocation::IsShortLocationKind(kind)) {
- // Short location. Skip the current byte.
- offset += SingleShortEntrySize();
- } else {
- // Large location. Skip the 5 next bytes.
- offset += SingleLargeEntrySize();
- }
+ // Skip the first `location_catalog_entry_index - 1` entries.
+ for (uint16_t i = 0; i < location_catalog_entry_index; ++i) {
+ // Read the first next byte and inspect its first 3 bits to decide
+ // whether it is a short or a large location.
+ DexRegisterLocation::Kind kind = ExtractKindAtOffset(offset);
+ if (DexRegisterLocation::IsShortLocationKind(kind)) {
+ // Short location. Skip the current byte.
+ offset += SingleShortEntrySize();
+ } else {
+ // Large location. Skip the 5 next bytes.
+ offset += SingleLargeEntrySize();
}
}
return offset;
}
- // Get the surface kind.
- DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_index,
- uint16_t number_of_dex_registers) const {
- return IsDexRegisterLive(dex_register_index)
- ? DexRegisterLocation::ConvertToSurfaceKind(
- GetLocationInternalKind(dex_register_index, number_of_dex_registers))
- : DexRegisterLocation::Kind::kNone;
- }
-
- // Get the internal kind.
- DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_index,
- uint16_t number_of_dex_registers) const {
- return IsDexRegisterLive(dex_register_index)
- ? ExtractKindAtOffset(FindLocationOffset(dex_register_index, number_of_dex_registers))
- : DexRegisterLocation::Kind::kNone;
+ // Get the internal kind of entry at `location_catalog_entry_index`.
+ DexRegisterLocation::Kind GetLocationInternalKind(size_t location_catalog_entry_index) const {
+ if (location_catalog_entry_index == kNoLocationEntryIndex) {
+ return DexRegisterLocation::Kind::kNone;
+ }
+ return ExtractKindAtOffset(FindLocationOffset(location_catalog_entry_index));
}
- // TODO: Rename as GetDexRegisterLocation?
- DexRegisterLocation GetLocationKindAndValue(uint16_t dex_register_index,
- uint16_t number_of_dex_registers) const {
- if (!IsDexRegisterLive(dex_register_index)) {
+ // Get the (surface) kind and value of entry at `location_catalog_entry_index`.
+ DexRegisterLocation GetDexRegisterLocation(size_t location_catalog_entry_index) const {
+ if (location_catalog_entry_index == kNoLocationEntryIndex) {
return DexRegisterLocation::None();
}
- size_t offset = FindLocationOffset(dex_register_index, number_of_dex_registers);
+ size_t offset = FindLocationOffset(location_catalog_entry_index);
// Read the first byte and inspect its first 3 bits to get the location.
ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
DexRegisterLocation::Kind kind = ExtractKindFromShortLocation(first_byte);
@@ -364,31 +328,6 @@ class DexRegisterMap {
}
}
- int32_t GetStackOffsetInBytes(uint16_t dex_register_index,
- uint16_t number_of_dex_registers) const {
- DexRegisterLocation location =
- GetLocationKindAndValue(dex_register_index, number_of_dex_registers);
- DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInStack);
- // GetLocationKindAndValue returns the offset in bytes.
- return location.GetValue();
- }
-
- int32_t GetConstant(uint16_t dex_register_index, uint16_t number_of_dex_registers) const {
- DexRegisterLocation location =
- GetLocationKindAndValue(dex_register_index, number_of_dex_registers);
- DCHECK(location.GetKind() == DexRegisterLocation::Kind::kConstant);
- return location.GetValue();
- }
-
- int32_t GetMachineRegister(uint16_t dex_register_index, uint16_t number_of_dex_registers) const {
- DexRegisterLocation location =
- GetLocationKindAndValue(dex_register_index, number_of_dex_registers);
- DCHECK(location.GetInternalKind() == DexRegisterLocation::Kind::kInRegister
- || location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegister)
- << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
- return location.GetValue();
- }
-
// Compute the compressed kind of `location`.
static DexRegisterLocation::Kind ComputeCompressedKind(const DexRegisterLocation& location) {
switch (location.GetInternalKind()) {
@@ -398,22 +337,21 @@ class DexRegisterMap {
case DexRegisterLocation::Kind::kInRegister:
DCHECK_GE(location.GetValue(), 0);
- DCHECK_LT(location.GetValue(), 1 << DexRegisterMap::kValueBits);
+ DCHECK_LT(location.GetValue(), 1 << kValueBits);
return DexRegisterLocation::Kind::kInRegister;
case DexRegisterLocation::Kind::kInFpuRegister:
DCHECK_GE(location.GetValue(), 0);
- DCHECK_LT(location.GetValue(), 1 << DexRegisterMap::kValueBits);
+ DCHECK_LT(location.GetValue(), 1 << kValueBits);
return DexRegisterLocation::Kind::kInFpuRegister;
case DexRegisterLocation::Kind::kInStack:
- DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
- return IsUint<DexRegisterMap::kValueBits>(location.GetValue() / kFrameSlotSize)
+ return IsShortStackOffsetValue(location.GetValue())
? DexRegisterLocation::Kind::kInStack
: DexRegisterLocation::Kind::kInStackLargeOffset;
case DexRegisterLocation::Kind::kConstant:
- return IsUint<DexRegisterMap::kValueBits>(location.GetValue())
+ return IsShortConstantValue(location.GetValue())
? DexRegisterLocation::Kind::kConstant
: DexRegisterLocation::Kind::kConstantLargeValue;
@@ -433,11 +371,10 @@ class DexRegisterMap {
return true;
case DexRegisterLocation::Kind::kInStack:
- DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
- return IsUint<kValueBits>(location.GetValue() / kFrameSlotSize);
+ return IsShortStackOffsetValue(location.GetValue());
case DexRegisterLocation::Kind::kConstant:
- return IsUint<kValueBits>(location.GetValue());
+ return IsShortConstantValue(location.GetValue());
default:
UNREACHABLE();
@@ -445,9 +382,7 @@ class DexRegisterMap {
}
static size_t EntrySize(const DexRegisterLocation& location) {
- return CanBeEncodedAsShortLocation(location)
- ? DexRegisterMap::SingleShortEntrySize()
- : DexRegisterMap::SingleLargeEntrySize();
+ return CanBeEncodedAsShortLocation(location) ? SingleShortEntrySize() : SingleLargeEntrySize();
}
static size_t SingleShortEntrySize() {
@@ -462,10 +397,14 @@ class DexRegisterMap {
return region_.size();
}
- static constexpr int kLiveBitMaskOffset = 0;
- static constexpr int kFixedSize = kLiveBitMaskOffset;
+ // Special (invalid) Dex register location catalog entry index meaning
+ // that there is no location for a given Dex register (i.e., it is
+ // mapped to a DexRegisterLocation::Kind::kNone location).
+ static constexpr size_t kNoLocationEntryIndex = -1;
private:
+ static constexpr int kFixedSize = 0;
+
// Width of the kind "field" in a short location, in bits.
static constexpr size_t kKindBits = 3;
// Width of the value "field" in a short location, in bits.
@@ -476,10 +415,24 @@ class DexRegisterMap {
static constexpr size_t kKindOffset = 0;
static constexpr size_t kValueOffset = kKindBits;
+ static bool IsShortStackOffsetValue(int32_t value) {
+ DCHECK_EQ(value % kFrameSlotSize, 0);
+ return IsShortValue(value / kFrameSlotSize);
+ }
+
+ static bool IsShortConstantValue(int32_t value) {
+ return IsShortValue(value);
+ }
+
+ static bool IsShortValue(int32_t value) {
+ return IsUint<kValueBits>(value);
+ }
+
static ShortLocation MakeShortLocation(DexRegisterLocation::Kind kind, int32_t value) {
- DCHECK(IsUint<kKindBits>(static_cast<uint8_t>(kind))) << static_cast<uint8_t>(kind);
- DCHECK(IsUint<kValueBits>(value)) << value;
- return (static_cast<uint8_t>(kind) & kKindMask) << kKindOffset
+ uint8_t kind_integer_value = static_cast<uint8_t>(kind);
+ DCHECK(IsUint<kKindBits>(kind_integer_value)) << kind_integer_value;
+ DCHECK(IsShortValue(value)) << value;
+ return (kind_integer_value & kKindMask) << kKindOffset
| (value & kValueMask) << kValueOffset;
}
@@ -507,6 +460,210 @@ class DexRegisterMap {
friend class StackMapStream;
};
+/* Information on Dex register locations for a specific PC, mapping a
+ * stack map's Dex register to a location entry in a DexRegisterLocationCatalog.
+ * The information is of the form:
+ * [live_bit_mask, entries*]
+ * where entries are concatenated unsigned integer values encoded on a number
+ * of bits (fixed per DexRegisterMap instances of a CodeInfo object) depending
+ * on the number of entries in the Dex register location catalog
+ * (see DexRegisterMap::SingleEntrySizeInBits). The map is 1-byte aligned.
+ */
+class DexRegisterMap {
+ public:
+ explicit DexRegisterMap(MemoryRegion region) : region_(region) {}
+
+ // Get the surface kind of Dex register `dex_register_number`.
+ DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_number,
+ uint16_t number_of_dex_registers,
+ const CodeInfo& code_info) const {
+ return DexRegisterLocation::ConvertToSurfaceKind(
+ GetLocationInternalKind(dex_register_number, number_of_dex_registers, code_info));
+ }
+
+ // Get the internal kind of Dex register `dex_register_number`.
+ DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_number,
+ uint16_t number_of_dex_registers,
+ const CodeInfo& code_info) const;
+
+ // Get the Dex register location `dex_register_number`.
+ DexRegisterLocation GetDexRegisterLocation(uint16_t dex_register_number,
+ uint16_t number_of_dex_registers,
+ const CodeInfo& code_info) const;
+
+ int32_t GetStackOffsetInBytes(uint16_t dex_register_number,
+ uint16_t number_of_dex_registers,
+ const CodeInfo& code_info) const {
+ DexRegisterLocation location =
+ GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info);
+ DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInStack);
+ // GetDexRegisterLocation returns the offset in bytes.
+ return location.GetValue();
+ }
+
+ int32_t GetConstant(uint16_t dex_register_number,
+ uint16_t number_of_dex_registers,
+ const CodeInfo& code_info) const {
+ DexRegisterLocation location =
+ GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info);
+ DCHECK(location.GetKind() == DexRegisterLocation::Kind::kConstant);
+ return location.GetValue();
+ }
+
+ int32_t GetMachineRegister(uint16_t dex_register_number,
+ uint16_t number_of_dex_registers,
+ const CodeInfo& code_info) const {
+ DexRegisterLocation location =
+ GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info);
+ DCHECK(location.GetInternalKind() == DexRegisterLocation::Kind::kInRegister
+ || location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegister)
+ << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
+ return location.GetValue();
+ }
+
+ // Get the index of the entry in the Dex register location catalog
+ // corresponding to `dex_register_number`.
+ size_t GetLocationCatalogEntryIndex(uint16_t dex_register_number,
+ uint16_t number_of_dex_registers,
+ size_t number_of_location_catalog_entries) const {
+ if (!IsDexRegisterLive(dex_register_number)) {
+ return DexRegisterLocationCatalog::kNoLocationEntryIndex;
+ }
+
+ if (number_of_location_catalog_entries == 1) {
+ // We do not allocate space for location maps in the case of a
+ // single-entry location catalog, as it is useless. The only valid
+ // entry index is 0;
+ return 0;
+ }
+
+ // The bit offset of the beginning of the map locations.
+ size_t map_locations_offset_in_bits =
+ GetLocationMappingDataOffset(number_of_dex_registers) * kBitsPerByte;
+ size_t index_in_dex_register_map = GetIndexInDexRegisterMap(dex_register_number);
+ DCHECK_LT(index_in_dex_register_map, GetNumberOfLiveDexRegisters(number_of_dex_registers));
+ // The bit size of an entry.
+ size_t map_entry_size_in_bits = SingleEntrySizeInBits(number_of_location_catalog_entries);
+ // The bit offset where `index_in_dex_register_map` is located.
+ size_t entry_offset_in_bits =
+ map_locations_offset_in_bits + index_in_dex_register_map * map_entry_size_in_bits;
+ size_t location_catalog_entry_index =
+ region_.LoadBits(entry_offset_in_bits, map_entry_size_in_bits);
+ DCHECK_LT(location_catalog_entry_index, number_of_location_catalog_entries);
+ return location_catalog_entry_index;
+ }
+
+ // Map entry at `index_in_dex_register_map` to `location_catalog_entry_index`.
+ void SetLocationCatalogEntryIndex(size_t index_in_dex_register_map,
+ size_t location_catalog_entry_index,
+ uint16_t number_of_dex_registers,
+ size_t number_of_location_catalog_entries) {
+ DCHECK_LT(index_in_dex_register_map, GetNumberOfLiveDexRegisters(number_of_dex_registers));
+ DCHECK_LT(location_catalog_entry_index, number_of_location_catalog_entries);
+
+ if (number_of_location_catalog_entries == 1) {
+ // We do not allocate space for location maps in the case of a
+ // single-entry location catalog, as it is useless.
+ return;
+ }
+
+ // The bit offset of the beginning of the map locations.
+ size_t map_locations_offset_in_bits =
+ GetLocationMappingDataOffset(number_of_dex_registers) * kBitsPerByte;
+ // The bit size of an entry.
+ size_t map_entry_size_in_bits = SingleEntrySizeInBits(number_of_location_catalog_entries);
+ // The bit offset where `index_in_dex_register_map` is located.
+ size_t entry_offset_in_bits =
+ map_locations_offset_in_bits + index_in_dex_register_map * map_entry_size_in_bits;
+ region_.StoreBits(entry_offset_in_bits, location_catalog_entry_index, map_entry_size_in_bits);
+ }
+
+ void SetLiveBitMask(uint16_t number_of_dex_registers,
+ const BitVector& live_dex_registers_mask) {
+ size_t live_bit_mask_offset_in_bits = GetLiveBitMaskOffset() * kBitsPerByte;
+ for (uint16_t i = 0; i < number_of_dex_registers; ++i) {
+ region_.StoreBit(live_bit_mask_offset_in_bits + i, live_dex_registers_mask.IsBitSet(i));
+ }
+ }
+
+ bool IsDexRegisterLive(uint16_t dex_register_number) const {
+ size_t live_bit_mask_offset_in_bits = GetLiveBitMaskOffset() * kBitsPerByte;
+ return region_.LoadBit(live_bit_mask_offset_in_bits + dex_register_number);
+ }
+
+ size_t GetNumberOfLiveDexRegisters(uint16_t number_of_dex_registers) const {
+ size_t number_of_live_dex_registers = 0;
+ for (size_t i = 0; i < number_of_dex_registers; ++i) {
+ if (IsDexRegisterLive(i)) {
+ ++number_of_live_dex_registers;
+ }
+ }
+ return number_of_live_dex_registers;
+ }
+
+ static size_t GetLiveBitMaskOffset() {
+ return kFixedSize;
+ }
+
+ // Compute the size of the live register bit mask (in bytes), for a
+ // method having `number_of_dex_registers` Dex registers.
+ static size_t GetLiveBitMaskSize(uint16_t number_of_dex_registers) {
+ return RoundUp(number_of_dex_registers, kBitsPerByte) / kBitsPerByte;
+ }
+
+ static size_t GetLocationMappingDataOffset(uint16_t number_of_dex_registers) {
+ return GetLiveBitMaskOffset() + GetLiveBitMaskSize(number_of_dex_registers);
+ }
+
+ size_t GetLocationMappingDataSize(uint16_t number_of_dex_registers,
+ size_t number_of_location_catalog_entries) const {
+ size_t location_mapping_data_size_in_bits =
+ GetNumberOfLiveDexRegisters(number_of_dex_registers)
+ * SingleEntrySizeInBits(number_of_location_catalog_entries);
+ return RoundUp(location_mapping_data_size_in_bits, kBitsPerByte) / kBitsPerByte;
+ }
+
+ // Return the size of a map entry in bits. Note that if
+ // `number_of_location_catalog_entries` equals 1, this function returns 0,
+ // which is fine, as there is no need to allocate a map for a
+ // single-entry location catalog; the only valid location catalog entry index
+ // for a live register in this case is 0 and there is no need to
+ // store it.
+ static size_t SingleEntrySizeInBits(size_t number_of_location_catalog_entries) {
+ // Handle the case of 0, as we cannot pass 0 to art::WhichPowerOf2.
+ return number_of_location_catalog_entries == 0
+ ? 0u
+ : WhichPowerOf2(RoundUpToPowerOfTwo(number_of_location_catalog_entries));
+ }
+
+ // Return the size of the DexRegisterMap object, in bytes.
+ size_t Size() const {
+ return region_.size();
+ }
+
+ private:
+ // Return the index in the Dex register map corresponding to the Dex
+ // register number `dex_register_number`.
+ size_t GetIndexInDexRegisterMap(uint16_t dex_register_number) const {
+ if (!IsDexRegisterLive(dex_register_number)) {
+ return kInvalidIndexInDexRegisterMap;
+ }
+ return GetNumberOfLiveDexRegisters(dex_register_number);
+ }
+
+ // Special (invalid) Dex register map entry index meaning that there
+ // is no index in the map for a given Dex register (i.e., it must
+ // have been mapped to a DexRegisterLocation::Kind::kNone location).
+ static constexpr size_t kInvalidIndexInDexRegisterMap = -1;
+
+ static constexpr int kFixedSize = 0;
+
+ MemoryRegion region_;
+
+ friend class CodeInfo;
+ friend class StackMapStream;
+};
+
/**
* A Stack Map holds compilation information for a specific PC necessary for:
* - Mapping it to a dex PC,
@@ -516,7 +673,8 @@ class DexRegisterMap {
* - Knowing the values of dex registers.
*
* The information is of the form:
- * [dex_pc, native_pc_offset, dex_register_map_offset, inlining_info_offset, register_mask, stack_mask].
+ * [dex_pc, native_pc_offset, dex_register_map_offset, inlining_info_offset, register_mask,
+ * stack_mask].
*
* Note that register_mask is fixed size, but stack_mask is variable size, depending on the
* stack size of a method.
@@ -568,49 +726,32 @@ class StackMap {
}
static size_t ComputeStackMapSize(size_t stack_mask_size,
- bool has_inline_info,
- bool is_small_inline_info,
- bool is_small_dex_map,
- bool is_small_dex_pc,
- bool is_small_native_pc);
-
- static size_t ComputeStackMapSize(size_t stack_mask_size,
size_t inline_info_size,
size_t dex_register_map_size,
size_t dex_pc_max,
- size_t native_pc_max);
-
- // TODO: Revisit this abstraction if we allow 3 bytes encoding.
- typedef uint8_t kSmallEncoding;
- typedef uint32_t kLargeEncoding;
- static constexpr size_t kBytesForSmallEncoding = sizeof(kSmallEncoding);
- static constexpr size_t kBitsForSmallEncoding = kBitsPerByte * kBytesForSmallEncoding;
- static constexpr size_t kBytesForLargeEncoding = sizeof(kLargeEncoding);
- static constexpr size_t kBitsForLargeEncoding = kBitsPerByte * kBytesForLargeEncoding;
+ size_t native_pc_max,
+ size_t register_mask_max);
// Special (invalid) offset for the DexRegisterMapOffset field meaning
// that there is no Dex register map for this stack map.
static constexpr uint32_t kNoDexRegisterMap = -1;
- static constexpr uint32_t kNoDexRegisterMapSmallEncoding =
- std::numeric_limits<kSmallEncoding>::max();
// Special (invalid) offset for the InlineDescriptorOffset field meaning
// that there is no inline info for this stack map.
static constexpr uint32_t kNoInlineInfo = -1;
- static constexpr uint32_t kNoInlineInfoSmallEncoding =
- std::numeric_limits<kSmallEncoding>::max();
-
- // Returns the number of bytes needed for an entry in the StackMap.
- static size_t NumberOfBytesForEntry(bool small_encoding) {
- return small_encoding ? kBytesForSmallEncoding : kBytesForLargeEncoding;
- }
private:
+ static size_t ComputeStackMapSizeInternal(size_t stack_mask_size,
+ size_t number_of_bytes_for_inline_info,
+ size_t number_of_bytes_for_dex_map,
+ size_t number_of_bytes_for_dex_pc,
+ size_t number_of_bytes_for_native_pc,
+ size_t number_of_bytes_for_register_mask);
+
// TODO: Instead of plain types such as "uint32_t", introduce
// typedefs (and document the memory layout of StackMap).
static constexpr int kRegisterMaskOffset = 0;
- static constexpr int kFixedSize = kRegisterMaskOffset + sizeof(uint32_t);
- static constexpr int kStackMaskOffset = kFixedSize;
+ static constexpr int kFixedSize = 0;
MemoryRegion region_;
@@ -622,7 +763,8 @@ class StackMap {
/**
* Wrapper around all compiler information collected for a method.
* The information is of the form:
- * [overall_size, number_of_stack_maps, stack_mask_size, StackMap+, DexRegisterInfo+, InlineInfo*].
+ * [overall_size, number_of_location_catalog_entries, number_of_stack_maps, stack_mask_size,
+ * DexRegisterLocationCatalog+, StackMap+, DexRegisterMap+, InlineInfo*].
*/
class CodeInfo {
public:
@@ -633,50 +775,77 @@ class CodeInfo {
region_ = MemoryRegion(const_cast<void*>(data), size);
}
+ static size_t EncodingSizeInBytes(size_t max_element) {
+ DCHECK(IsUint<32>(max_element));
+ return (max_element == 0) ? 0
+ : IsUint<8>(max_element) ? 1
+ : IsUint<16>(max_element) ? 2
+ : IsUint<24>(max_element) ? 3
+ : 4;
+ }
+
void SetEncoding(size_t inline_info_size,
size_t dex_register_map_size,
size_t dex_pc_max,
- size_t native_pc_max) {
+ size_t native_pc_max,
+ size_t register_mask_max) {
if (inline_info_size != 0) {
region_.StoreBit(kHasInlineInfoBitOffset, 1);
- region_.StoreBit(kHasSmallInlineInfoBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(
- // + 1 to also encode kNoInlineInfo: if an inline info offset
- // is at 0xFF, we want to overflow to a larger encoding, because it will
- // conflict with kNoInlineInfo.
- // The offset is relative to the dex register map. TODO: Change this.
- inline_info_size + dex_register_map_size + 1));
+ // + 1 to also encode kNoInlineInfo: if an inline info offset
+ // is at 0xFF, we want to overflow to a larger encoding, because it will
+ // conflict with kNoInlineInfo.
+ // The offset is relative to the dex register map. TODO: Change this.
+ SetEncodingAt(kInlineInfoBitOffset,
+ EncodingSizeInBytes(dex_register_map_size + inline_info_size + 1));
} else {
region_.StoreBit(kHasInlineInfoBitOffset, 0);
- region_.StoreBit(kHasSmallInlineInfoBitOffset, 0);
+ SetEncodingAt(kInlineInfoBitOffset, 0);
}
- region_.StoreBit(kHasSmallDexRegisterMapBitOffset,
- // + 1 to also encode kNoDexRegisterMap: if a dex register map offset
- // is at 0xFF, we want to overflow to a larger encoding, because it will
- // conflict with kNoDexRegisterMap.
- IsUint<StackMap::kBitsForSmallEncoding>(dex_register_map_size + 1));
- region_.StoreBit(kHasSmallDexPcBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(dex_pc_max));
- region_.StoreBit(kHasSmallNativePcBitOffset,
- IsUint<StackMap::kBitsForSmallEncoding>(native_pc_max));
+ // + 1 to also encode kNoDexRegisterMap: if a dex register map offset
+ // is at 0xFF, we want to overflow to a larger encoding, because it will
+ // conflict with kNoDexRegisterMap.
+ SetEncodingAt(kDexRegisterMapBitOffset, EncodingSizeInBytes(dex_register_map_size + 1));
+ SetEncodingAt(kDexPcBitOffset, EncodingSizeInBytes(dex_pc_max));
+ SetEncodingAt(kNativePcBitOffset, EncodingSizeInBytes(native_pc_max));
+ SetEncodingAt(kRegisterMaskBitOffset, EncodingSizeInBytes(register_mask_max));
+ }
+
+ void SetEncodingAt(size_t bit_offset, size_t number_of_bytes) {
+ // We encode the number of bytes needed for writing a value on 3 bits,
+ // for values that we know are maximum 32bits.
+ region_.StoreBit(bit_offset, (number_of_bytes & 1));
+ region_.StoreBit(bit_offset + 1, (number_of_bytes & 2));
+ region_.StoreBit(bit_offset + 2, (number_of_bytes & 4));
+ }
+
+ size_t GetNumberOfBytesForEncoding(size_t bit_offset) const {
+ return region_.LoadBit(bit_offset)
+ + (region_.LoadBit(bit_offset + 1) << 1)
+ + (region_.LoadBit(bit_offset + 2) << 2);
}
bool HasInlineInfo() const {
return region_.LoadBit(kHasInlineInfoBitOffset);
}
- bool HasSmallInlineInfo() const {
- return region_.LoadBit(kHasSmallInlineInfoBitOffset);
+ size_t NumberOfBytesForInlineInfo() const {
+ return GetNumberOfBytesForEncoding(kInlineInfoBitOffset);
}
- bool HasSmallDexRegisterMap() const {
- return region_.LoadBit(kHasSmallDexRegisterMapBitOffset);
+ size_t NumberOfBytesForDexRegisterMap() const {
+ return GetNumberOfBytesForEncoding(kDexRegisterMapBitOffset);
}
- bool HasSmallNativePc() const {
- return region_.LoadBit(kHasSmallNativePcBitOffset);
+ size_t NumberOfBytesForRegisterMask() const {
+ return GetNumberOfBytesForEncoding(kRegisterMaskBitOffset);
}
- bool HasSmallDexPc() const {
- return region_.LoadBit(kHasSmallDexPcBitOffset);
+ size_t NumberOfBytesForNativePc() const {
+ return GetNumberOfBytesForEncoding(kNativePcBitOffset);
+ }
+
+ size_t NumberOfBytesForDexPc() const {
+ return GetNumberOfBytesForEncoding(kDexPcBitOffset);
}
size_t ComputeStackMapRegisterMaskOffset() const {
@@ -684,7 +853,8 @@ class CodeInfo {
}
size_t ComputeStackMapStackMaskOffset() const {
- return StackMap::kStackMaskOffset;
+ return ComputeStackMapRegisterMaskOffset()
+ + (NumberOfBytesForRegisterMask() * sizeof(uint8_t));
}
size_t ComputeStackMapDexPcOffset() const {
@@ -693,18 +863,28 @@ class CodeInfo {
size_t ComputeStackMapNativePcOffset() const {
return ComputeStackMapDexPcOffset()
- + (HasSmallDexPc() ? sizeof(uint8_t) : sizeof(uint32_t));
+ + (NumberOfBytesForDexPc() * sizeof(uint8_t));
}
size_t ComputeStackMapDexRegisterMapOffset() const {
return ComputeStackMapNativePcOffset()
- + (HasSmallNativePc() ? sizeof(uint8_t) : sizeof(uint32_t));
+ + (NumberOfBytesForNativePc() * sizeof(uint8_t));
}
size_t ComputeStackMapInlineInfoOffset() const {
CHECK(HasInlineInfo());
return ComputeStackMapDexRegisterMapOffset()
- + (HasSmallDexRegisterMap() ? sizeof(uint8_t) : sizeof(uint32_t));
+ + (NumberOfBytesForDexRegisterMap() * sizeof(uint8_t));
+ }
+
+ uint32_t GetDexRegisterLocationCatalogOffset() const {
+ return kFixedSize;
+ }
+
+ DexRegisterLocationCatalog GetDexRegisterLocationCatalog() const {
+ return DexRegisterLocationCatalog(region_.Subregion(
+ GetDexRegisterLocationCatalogOffset(),
+ GetDexRegisterLocationCatalogSize()));
}
StackMap GetStackMapAt(size_t i) const {
@@ -720,6 +900,19 @@ class CodeInfo {
region_.StoreUnaligned<uint32_t>(kOverallSizeOffset, size);
}
+ uint32_t GetNumberOfDexRegisterLocationCatalogEntries() const {
+ return region_.LoadUnaligned<uint32_t>(kNumberOfDexRegisterLocationCatalogEntriesOffset);
+ }
+
+ void SetNumberOfDexRegisterLocationCatalogEntries(uint32_t num_entries) {
+ region_.StoreUnaligned<uint32_t>(kNumberOfDexRegisterLocationCatalogEntriesOffset, num_entries);
+ }
+
+ uint32_t GetDexRegisterLocationCatalogSize() const {
+ return ComputeDexRegisterLocationCatalogSize(GetDexRegisterLocationCatalogOffset(),
+ GetNumberOfDexRegisterLocationCatalogEntries());
+ }
+
uint32_t GetStackMaskSize() const {
return region_.LoadUnaligned<uint32_t>(kStackMaskSizeOffset);
}
@@ -739,31 +932,31 @@ class CodeInfo {
// Get the size of one stack map of this CodeInfo object, in bytes.
// All stack maps of a CodeInfo have the same size.
size_t StackMapSize() const {
- return StackMap::ComputeStackMapSize(GetStackMaskSize(),
- HasInlineInfo(),
- HasSmallInlineInfo(),
- HasSmallDexRegisterMap(),
- HasSmallDexPc(),
- HasSmallNativePc());
+ return StackMap::ComputeStackMapSizeInternal(GetStackMaskSize(),
+ NumberOfBytesForInlineInfo(),
+ NumberOfBytesForDexRegisterMap(),
+ NumberOfBytesForDexPc(),
+ NumberOfBytesForNativePc(),
+ NumberOfBytesForRegisterMask());
}
// Get the size all the stack maps of this CodeInfo object, in bytes.
- size_t StackMapsSize() const {
+ size_t GetStackMapsSize() const {
return StackMapSize() * GetNumberOfStackMaps();
}
size_t GetDexRegisterMapsOffset() const {
- return CodeInfo::kFixedSize + StackMapsSize();
+ return GetStackMapsOffset() + GetStackMapsSize();
}
uint32_t GetStackMapsOffset() const {
- return kFixedSize;
+ return GetDexRegisterLocationCatalogOffset() + GetDexRegisterLocationCatalogSize();
}
DexRegisterMap GetDexRegisterMapOf(StackMap stack_map, uint32_t number_of_dex_registers) const {
DCHECK(stack_map.HasDexRegisterMap(*this));
- uint32_t offset = stack_map.GetDexRegisterMapOffset(*this) + GetDexRegisterMapsOffset();
- size_t size = ComputeDexRegisterMapSize(offset, number_of_dex_registers);
+ uint32_t offset = GetDexRegisterMapsOffset() + stack_map.GetDexRegisterMapOffset(*this);
+ size_t size = ComputeDexRegisterMapSizeOf(offset, number_of_dex_registers);
return DexRegisterMap(region_.Subregion(offset, size));
}
@@ -806,50 +999,73 @@ class CodeInfo {
// typedefs (and document the memory layout of CodeInfo).
static constexpr int kOverallSizeOffset = 0;
static constexpr int kEncodingInfoOffset = kOverallSizeOffset + sizeof(uint32_t);
- static constexpr int kNumberOfStackMapsOffset = kEncodingInfoOffset + sizeof(uint8_t);
+ static constexpr int kNumberOfDexRegisterLocationCatalogEntriesOffset =
+ kEncodingInfoOffset + sizeof(uint16_t);
+ static constexpr int kNumberOfStackMapsOffset =
+ kNumberOfDexRegisterLocationCatalogEntriesOffset + sizeof(uint32_t);
static constexpr int kStackMaskSizeOffset = kNumberOfStackMapsOffset + sizeof(uint32_t);
static constexpr int kFixedSize = kStackMaskSizeOffset + sizeof(uint32_t);
static constexpr int kHasInlineInfoBitOffset = (kEncodingInfoOffset * kBitsPerByte);
- static constexpr int kHasSmallInlineInfoBitOffset = kHasInlineInfoBitOffset + 1;
- static constexpr int kHasSmallDexRegisterMapBitOffset = kHasSmallInlineInfoBitOffset + 1;
- static constexpr int kHasSmallDexPcBitOffset = kHasSmallDexRegisterMapBitOffset + 1;
- static constexpr int kHasSmallNativePcBitOffset = kHasSmallDexPcBitOffset + 1;
+ static constexpr int kInlineInfoBitOffset = kHasInlineInfoBitOffset + 1;
+ static constexpr int kDexRegisterMapBitOffset = kInlineInfoBitOffset + 3;
+ static constexpr int kDexPcBitOffset = kDexRegisterMapBitOffset + 3;
+ static constexpr int kNativePcBitOffset = kDexPcBitOffset + 3;
+ static constexpr int kRegisterMaskBitOffset = kNativePcBitOffset + 3;
MemoryRegion GetStackMaps() const {
return region_.size() == 0
? MemoryRegion()
- : region_.Subregion(kFixedSize, StackMapsSize());
- }
-
- // Compute the size of a Dex register map starting at offset `origin` in
- // `region_` and containing `number_of_dex_registers` locations.
- size_t ComputeDexRegisterMapSize(uint32_t origin, uint32_t number_of_dex_registers) const {
- // TODO: Ideally, we would like to use art::DexRegisterMap::Size or
- // art::DexRegisterMap::FindLocationOffset, but the DexRegisterMap is not
- // yet built. Try to factor common code.
- size_t offset =
- origin + DexRegisterMap::GetDexRegisterMapLocationsOffset(number_of_dex_registers);
-
- // Create a temporary DexRegisterMap to be able to call DexRegisterMap.IsDexRegisterLive.
- DexRegisterMap only_live_mask(MemoryRegion(region_.Subregion(origin, offset - origin)));
-
- // Skip the first `number_of_dex_registers - 1` entries.
- for (uint16_t i = 0; i < number_of_dex_registers; ++i) {
- if (only_live_mask.IsDexRegisterLive(i)) {
- // Read the first next byte and inspect its first 3 bits to decide
- // whether it is a short or a large location.
- DexRegisterMap::ShortLocation first_byte =
- region_.LoadUnaligned<DexRegisterMap::ShortLocation>(offset);
- DexRegisterLocation::Kind kind =
- DexRegisterMap::ExtractKindFromShortLocation(first_byte);
- if (DexRegisterLocation::IsShortLocationKind(kind)) {
- // Short location. Skip the current byte.
- offset += DexRegisterMap::SingleShortEntrySize();
- } else {
- // Large location. Skip the 5 next bytes.
- offset += DexRegisterMap::SingleLargeEntrySize();
- }
+ : region_.Subregion(GetStackMapsOffset(), GetStackMapsSize());
+ }
+
+ // Compute the size of the Dex register map associated to the stack map at
+ // `dex_register_map_offset_in_code_info`.
+ size_t ComputeDexRegisterMapSizeOf(uint32_t dex_register_map_offset_in_code_info,
+ uint16_t number_of_dex_registers) const {
+ // Offset where the actual mapping data starts within art::DexRegisterMap.
+ size_t location_mapping_data_offset_in_dex_register_map =
+ DexRegisterMap::GetLocationMappingDataOffset(number_of_dex_registers);
+ // Create a temporary art::DexRegisterMap to be able to call
+ // art::DexRegisterMap::GetNumberOfLiveDexRegisters and
+ DexRegisterMap dex_register_map_without_locations(
+ MemoryRegion(region_.Subregion(dex_register_map_offset_in_code_info,
+ location_mapping_data_offset_in_dex_register_map)));
+ size_t number_of_live_dex_registers =
+ dex_register_map_without_locations.GetNumberOfLiveDexRegisters(number_of_dex_registers);
+ size_t location_mapping_data_size_in_bits =
+ DexRegisterMap::SingleEntrySizeInBits(GetNumberOfDexRegisterLocationCatalogEntries())
+ * number_of_live_dex_registers;
+ size_t location_mapping_data_size_in_bytes =
+ RoundUp(location_mapping_data_size_in_bits, kBitsPerByte) / kBitsPerByte;
+ size_t dex_register_map_size =
+ location_mapping_data_offset_in_dex_register_map + location_mapping_data_size_in_bytes;
+ return dex_register_map_size;
+ }
+
+ // Compute the size of a Dex register location catalog starting at offset `origin`
+ // in `region_` and containing `number_of_dex_locations` entries.
+ size_t ComputeDexRegisterLocationCatalogSize(uint32_t origin,
+ uint32_t number_of_dex_locations) const {
+ // TODO: Ideally, we would like to use art::DexRegisterLocationCatalog::Size or
+ // art::DexRegisterLocationCatalog::FindLocationOffset, but the
+ // DexRegisterLocationCatalog is not yet built. Try to factor common code.
+ size_t offset = origin + DexRegisterLocationCatalog::kFixedSize;
+
+ // Skip the first `number_of_dex_locations - 1` entries.
+ for (uint16_t i = 0; i < number_of_dex_locations; ++i) {
+ // Read the first next byte and inspect its first 3 bits to decide
+ // whether it is a short or a large location.
+ DexRegisterLocationCatalog::ShortLocation first_byte =
+ region_.LoadUnaligned<DexRegisterLocationCatalog::ShortLocation>(offset);
+ DexRegisterLocation::Kind kind =
+ DexRegisterLocationCatalog::ExtractKindFromShortLocation(first_byte);
+ if (DexRegisterLocation::IsShortLocationKind(kind)) {
+ // Short location. Skip the current byte.
+ offset += DexRegisterLocationCatalog::SingleShortEntrySize();
+ } else {
+ // Large location. Skip the 5 next bytes.
+ offset += DexRegisterLocationCatalog::SingleLargeEntrySize();
}
}
size_t size = offset - origin;