summaryrefslogtreecommitdiffstats
path: root/runtime
diff options
context:
space:
mode:
authorBrian Carlstrom <bdc@google.com>2013-07-17 23:40:20 -0700
committerBrian Carlstrom <bdc@google.com>2013-07-18 00:12:43 -0700
commit0cd7ec2dcd8d7ba30bf3ca420b40dac52849876c (patch)
tree525a0f99f9381156367c988133b5d51d5dfef6f7 /runtime
parentf69863b3039fc621ff4250e262d2a024d5e79ec8 (diff)
downloadart-0cd7ec2dcd8d7ba30bf3ca420b40dac52849876c.zip
art-0cd7ec2dcd8d7ba30bf3ca420b40dac52849876c.tar.gz
art-0cd7ec2dcd8d7ba30bf3ca420b40dac52849876c.tar.bz2
Fix cpplint whitespace/blank_line issues
Change-Id: Ice937e95e23dd622c17054551d4ae4cebd0ef8a2
Diffstat (limited to 'runtime')
-rw-r--r--runtime/atomic_integer.h3
-rw-r--r--runtime/barrier.cc2
-rw-r--r--runtime/barrier_test.cc9
-rw-r--r--runtime/base/histogram-inl.h1
-rw-r--r--runtime/base/histogram.h1
-rw-r--r--runtime/base/timing_logger.h3
-rw-r--r--runtime/debugger.cc2
-rw-r--r--runtime/dex_method_iterator.h1
-rw-r--r--runtime/gc/accounting/heap_bitmap-inl.h1
-rw-r--r--runtime/gc/accounting/heap_bitmap.h1
-rw-r--r--runtime/gc/accounting/space_bitmap.cc4
-rw-r--r--runtime/gc/accounting/space_bitmap.h1
-rw-r--r--runtime/gc/collector/garbage_collector.h1
-rw-r--r--runtime/gc/space/image_space.h1
-rw-r--r--runtime/gc/space/large_object_space.cc8
-rw-r--r--runtime/gc/space/large_object_space.h2
-rw-r--r--runtime/image_test.cc1
-rw-r--r--runtime/interpreter/interpreter.cc1
-rw-r--r--runtime/jdwp/jdwp_handler.cc1
-rw-r--r--runtime/mirror/abstract_method.h8
-rw-r--r--runtime/mirror/class.cc4
-rw-r--r--runtime/oat/runtime/argument_visitor.h3
-rw-r--r--runtime/oat_file.cc1
-rw-r--r--runtime/runtime_support_llvm.cc2
-rw-r--r--runtime/runtime_support_llvm.h3
-rw-r--r--runtime/stack.h3
-rw-r--r--runtime/thread.cc4
-rw-r--r--runtime/thread_pool.cc13
-rw-r--r--runtime/thread_pool.h4
-rw-r--r--runtime/thread_pool_test.cc4
-rw-r--r--runtime/trace.h1
-rw-r--r--runtime/verifier/method_verifier.cc1
-rw-r--r--runtime/verifier/reg_type.h4
-rw-r--r--runtime/verifier/reg_type_test.cc4
-rw-r--r--runtime/verifier/register_line.cc1
35 files changed, 29 insertions, 75 deletions
diff --git a/runtime/atomic_integer.h b/runtime/atomic_integer.h
index 117e837..6711722 100644
--- a/runtime/atomic_integer.h
+++ b/runtime/atomic_integer.h
@@ -70,10 +70,11 @@ class AtomicInteger {
bool success = android_atomic_cas(expected_value, new_value, &value_) == 0;
return success;
}
+
private:
volatile int32_t value_;
};
-}
+} // namespace art
#endif // ART_RUNTIME_ATOMIC_INTEGER_H_
diff --git a/runtime/barrier.cc b/runtime/barrier.cc
index 250d468..a644998 100644
--- a/runtime/barrier.cc
+++ b/runtime/barrier.cc
@@ -60,4 +60,4 @@ Barrier::~Barrier() {
CHECK(!count_) << "Attempted to destroy barrier with non zero count";
}
-}
+} // namespace art
diff --git a/runtime/barrier_test.cc b/runtime/barrier_test.cc
index d26ae9e..298ae56 100644
--- a/runtime/barrier_test.cc
+++ b/runtime/barrier_test.cc
@@ -32,9 +32,7 @@ class CheckWaitTask : public Task {
: barrier_(barrier),
count1_(count1),
count2_(count2),
- count3_(count3) {
-
- }
+ count3_(count3) {}
void Run(Thread* self) {
LOG(INFO) << "Before barrier 1 " << *self;
@@ -50,6 +48,7 @@ class CheckWaitTask : public Task {
virtual void Finalize() {
delete this;
}
+
private:
Barrier* const barrier_;
AtomicInteger* const count1_;
@@ -100,9 +99,7 @@ class CheckPassTask : public Task {
CheckPassTask(Barrier* barrier, AtomicInteger* count, size_t subtasks)
: barrier_(barrier),
count_(count),
- subtasks_(subtasks) {
-
- }
+ subtasks_(subtasks) {}
void Run(Thread* self) {
for (size_t i = 0; i < subtasks_; ++i) {
diff --git a/runtime/base/histogram-inl.h b/runtime/base/histogram-inl.h
index bbca603..d572cf9 100644
--- a/runtime/base/histogram-inl.h
+++ b/runtime/base/histogram-inl.h
@@ -212,7 +212,6 @@ inline double Histogram<Value>::Percentile(double per) const {
DCHECK_GT(cumulative_perc_.size(), 0ull);
size_t idx, upper_idx = 0, lower_idx = 0;
for (idx = 0; idx < cumulative_perc_.size(); idx++) {
-
if (per <= cumulative_perc_[idx]) {
upper_idx = idx;
break;
diff --git a/runtime/base/histogram.h b/runtime/base/histogram.h
index dfb556b..33a1e65 100644
--- a/runtime/base/histogram.h
+++ b/runtime/base/histogram.h
@@ -30,7 +30,6 @@ namespace art {
// Designed to be simple and used with timing logger in art.
template <class Value> class Histogram {
-
const double kAdjust;
const Value kBucketWidth;
const size_t kInitialBucketCount;
diff --git a/runtime/base/timing_logger.h b/runtime/base/timing_logger.h
index 816cbea..0f00a04 100644
--- a/runtime/base/timing_logger.h
+++ b/runtime/base/timing_logger.h
@@ -50,9 +50,7 @@ namespace base {
} // namespace base
class CumulativeLogger {
-
public:
-
explicit CumulativeLogger(const std::string& name);
void prepare_stats();
~CumulativeLogger();
@@ -68,7 +66,6 @@ class CumulativeLogger {
void AddNewLogger(const base::NewTimingLogger& logger) LOCKS_EXCLUDED(lock_);
private:
-
void AddPair(const std::string &label, uint64_t delta_time)
EXCLUSIVE_LOCKS_REQUIRED(lock_);
void DumpHistogram(std::ostream &os) EXCLUSIVE_LOCKS_REQUIRED(lock_);
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index 5a31c87..b502c9a 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -155,7 +155,6 @@ class DebugInstrumentationListener : public instrumentation::InstrumentationList
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Dbg::PostException(thread, throw_location, catch_method, catch_dex_pc, exception_object);
}
-
} gDebugInstrumentationListener;
// JDWP is allowed unless the Zygote forbids it.
@@ -761,7 +760,6 @@ JDWP::JdwpError Dbg::GetContendedMonitor(JDWP::ObjectId thread_id, JDWP::ObjectI
JDWP::JdwpError Dbg::GetInstanceCounts(const std::vector<JDWP::RefTypeId>& class_ids,
std::vector<uint64_t>& counts)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-
std::vector<mirror::Class*> classes;
counts.clear();
for (size_t i = 0; i < class_ids.size(); ++i) {
diff --git a/runtime/dex_method_iterator.h b/runtime/dex_method_iterator.h
index e915d77..1975e48 100644
--- a/runtime/dex_method_iterator.h
+++ b/runtime/dex_method_iterator.h
@@ -120,7 +120,6 @@ class DexMethodIterator {
}
private:
-
ClassDataItemIterator& GetIterator() const {
CHECK(it_.get() != NULL);
return *it_.get();
diff --git a/runtime/gc/accounting/heap_bitmap-inl.h b/runtime/gc/accounting/heap_bitmap-inl.h
index 7622604..5edea95 100644
--- a/runtime/gc/accounting/heap_bitmap-inl.h
+++ b/runtime/gc/accounting/heap_bitmap-inl.h
@@ -40,7 +40,6 @@ inline void HeapBitmap::Visit(const Visitor& visitor) {
SpaceSetMap* set = *it;
set->Visit(visitor);
}
-
}
} // namespace accounting
diff --git a/runtime/gc/accounting/heap_bitmap.h b/runtime/gc/accounting/heap_bitmap.h
index f4b725c..1710579 100644
--- a/runtime/gc/accounting/heap_bitmap.h
+++ b/runtime/gc/accounting/heap_bitmap.h
@@ -106,7 +106,6 @@ class HeapBitmap {
explicit HeapBitmap(Heap* heap) : heap_(heap) {}
private:
-
const Heap* const heap_;
void AddContinuousSpaceBitmap(SpaceBitmap* bitmap);
diff --git a/runtime/gc/accounting/space_bitmap.cc b/runtime/gc/accounting/space_bitmap.cc
index 19f1128..6edc067 100644
--- a/runtime/gc/accounting/space_bitmap.cc
+++ b/runtime/gc/accounting/space_bitmap.cc
@@ -64,9 +64,7 @@ SpaceBitmap* SpaceBitmap::Create(const std::string& name, byte* heap_begin, size
}
// Clean up any resources associated with the bitmap.
-SpaceBitmap::~SpaceBitmap() {
-
-}
+SpaceBitmap::~SpaceBitmap() {}
void SpaceBitmap::SetHeapLimit(uintptr_t new_end) {
DCHECK(IsAligned<kBitsPerWord * kAlignment>(new_end));
diff --git a/runtime/gc/accounting/space_bitmap.h b/runtime/gc/accounting/space_bitmap.h
index 5a1bfe3..bf4c1ed 100644
--- a/runtime/gc/accounting/space_bitmap.h
+++ b/runtime/gc/accounting/space_bitmap.h
@@ -174,6 +174,7 @@ class SpaceBitmap {
const size_t index = OffsetToIndex(offset);
return &bitmap_begin_[index];
}
+
private:
// TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
// however, we document that this is expected on heap_end_
diff --git a/runtime/gc/collector/garbage_collector.h b/runtime/gc/collector/garbage_collector.h
index a22faac..1684664 100644
--- a/runtime/gc/collector/garbage_collector.h
+++ b/runtime/gc/collector/garbage_collector.h
@@ -79,7 +79,6 @@ class GarbageCollector {
void SwapBitmaps() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
protected:
-
// The initial phase. Done without mutators paused.
virtual void InitializePhase() = 0;
diff --git a/runtime/gc/space/image_space.h b/runtime/gc/space/image_space.h
index fde2b41..bdda9fa 100644
--- a/runtime/gc/space/image_space.h
+++ b/runtime/gc/space/image_space.h
@@ -78,7 +78,6 @@ class ImageSpace : public MemMapSpace {
void Dump(std::ostream& os) const;
private:
-
// Tries to initialize an ImageSpace from the given image path,
// returning NULL on error.
//
diff --git a/runtime/gc/space/large_object_space.cc b/runtime/gc/space/large_object_space.cc
index f7d776f..6aedd9c 100644
--- a/runtime/gc/space/large_object_space.cc
+++ b/runtime/gc/space/large_object_space.cc
@@ -49,9 +49,7 @@ void LargeObjectSpace::CopyLiveToMarked() {
LargeObjectMapSpace::LargeObjectMapSpace(const std::string& name)
: LargeObjectSpace(name),
- lock_("large object map space lock", kAllocSpaceLock) {
-
-}
+ lock_("large object map space lock", kAllocSpaceLock) {}
LargeObjectMapSpace* LargeObjectMapSpace::Create(const std::string& name) {
return new LargeObjectMapSpace(name);
@@ -147,9 +145,7 @@ FreeListSpace::FreeListSpace(const std::string& name, MemMap* mem_map, byte* beg
AddFreeChunk(begin_, end_ - begin_, NULL);
}
-FreeListSpace::~FreeListSpace() {
-
-}
+FreeListSpace::~FreeListSpace() {}
void FreeListSpace::AddFreeChunk(void* address, size_t size, Chunk* previous) {
Chunk* chunk = ChunkFromAddr(address);
diff --git a/runtime/gc/space/large_object_space.h b/runtime/gc/space/large_object_space.h
index db845db..20a4867 100644
--- a/runtime/gc/space/large_object_space.h
+++ b/runtime/gc/space/large_object_space.h
@@ -60,7 +60,6 @@ class LargeObjectSpace : public DiscontinuousSpace, public AllocSpace {
size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs);
protected:
-
explicit LargeObjectSpace(const std::string& name);
// Approximate number of bytes which have been allocated into the space.
@@ -165,6 +164,7 @@ class FreeListSpace : public LargeObjectSpace {
DCHECK(m_previous == NULL ||
(m_previous != NULL && m_previous + m_previous->GetSize() / kAlignment == this));
}
+
private:
size_t m_size;
Chunk* m_previous;
diff --git a/runtime/image_test.cc b/runtime/image_test.cc
index 9ab1d74..ee50118 100644
--- a/runtime/image_test.cc
+++ b/runtime/image_test.cc
@@ -31,7 +31,6 @@
namespace art {
class ImageTest : public CommonTest {
-
protected:
virtual void SetUp() {
ReserveImageSpace();
diff --git a/runtime/interpreter/interpreter.cc b/runtime/interpreter/interpreter.cc
index 2fb272c..45314c2 100644
--- a/runtime/interpreter/interpreter.cc
+++ b/runtime/interpreter/interpreter.cc
@@ -366,7 +366,6 @@ static void InterpreterJni(Thread* self, AbstractMethod* method, StringPiece sho
{
ScopedThreadStateChange tsc(self, kNative);
jresult = fn(soa.Env(), rcvr.get(), arg0.get());
-
}
result->SetL(soa.Decode<Object*>(jresult));
ScopedThreadStateChange tsc(self, kNative);
diff --git a/runtime/jdwp/jdwp_handler.cc b/runtime/jdwp/jdwp_handler.cc
index 8ef146c..e141496 100644
--- a/runtime/jdwp/jdwp_handler.cc
+++ b/runtime/jdwp/jdwp_handler.cc
@@ -361,7 +361,6 @@ static JdwpError VM_Capabilities(JdwpState*, Request&, ExpandBuf* reply)
static JdwpError VM_CapabilitiesNew(JdwpState*, Request& request, ExpandBuf* reply)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-
// The first few capabilities are the same as those reported by the older call.
VM_Capabilities(NULL, request, reply);
diff --git a/runtime/mirror/abstract_method.h b/runtime/mirror/abstract_method.h
index d909058..bbebece 100644
--- a/runtime/mirror/abstract_method.h
+++ b/runtime/mirror/abstract_method.h
@@ -497,13 +497,9 @@ class MANAGED AbstractMethod : public Object {
DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethod);
};
-class MANAGED Method : public AbstractMethod {
+class MANAGED Method : public AbstractMethod {};
-};
-
-class MANAGED Constructor : public AbstractMethod {
-
-};
+class MANAGED Constructor : public AbstractMethod {};
class MANAGED AbstractMethodClass : public Class {
private:
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 2d2130c..e490d97 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -320,13 +320,11 @@ bool Class::IsFieldClass() const {
Class* java_lang_Class = GetClass();
Class* java_lang_reflect_Field = java_lang_Class->GetInstanceField(0)->GetClass();
return this == java_lang_reflect_Field;
-
}
bool Class::IsMethodClass() const {
return (this == AbstractMethod::GetMethodClass()) ||
- (this == AbstractMethod::GetConstructorClass());
-
+ (this == AbstractMethod::GetConstructorClass());
}
void Class::SetClassLoader(ClassLoader* new_class_loader) {
diff --git a/runtime/oat/runtime/argument_visitor.h b/runtime/oat/runtime/argument_visitor.h
index d92ff19..aaf93f7 100644
--- a/runtime/oat/runtime/argument_visitor.h
+++ b/runtime/oat/runtime/argument_visitor.h
@@ -199,7 +199,6 @@ class QuickArgumentVisitor {
uint64_t low_half = *reinterpret_cast<uint32_t*>(GetParamAddress());
uint64_t high_half = *reinterpret_cast<uint32_t*>(stack_args_);
return (low_half & 0xffffffffULL) | (high_half << 32);
-
}
void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -244,6 +243,6 @@ class QuickArgumentVisitor {
bool is_split_long_or_double_;
};
-}
+} // namespace art
#endif // ART_RUNTIME_OAT_RUNTIME_ARGUMENT_VISITOR_H_
diff --git a/runtime/oat_file.cc b/runtime/oat_file.cc
index bb8341e..6562633 100644
--- a/runtime/oat_file.cc
+++ b/runtime/oat_file.cc
@@ -122,7 +122,6 @@ OatFile::~OatFile() {
}
bool OatFile::Dlopen(const std::string& elf_filename, byte* requested_base) {
-
char* absolute_path = realpath(elf_filename.c_str(), NULL);
if (absolute_path == NULL) {
return false;
diff --git a/runtime/runtime_support_llvm.cc b/runtime/runtime_support_llvm.cc
index cbdefe8..d703db2 100644
--- a/runtime/runtime_support_llvm.cc
+++ b/runtime/runtime_support_llvm.cc
@@ -50,7 +50,6 @@
using namespace art;
extern "C" {
-
class ShadowFrameCopyVisitor : public StackVisitor {
public:
explicit ShadowFrameCopyVisitor(Thread* self) : StackVisitor(self, NULL), prev_frame_(NULL),
@@ -844,5 +843,4 @@ void art_portable_proxy_invoke_handler_from_code(mirror::AbstractMethod* proxy_m
void art_portable_constructor_barrier() {
LOG(FATAL) << "Implemented by IRBuilder.";
}
-
} // extern "C"
diff --git a/runtime/runtime_support_llvm.h b/runtime/runtime_support_llvm.h
index 566f7bc..43ea953 100644
--- a/runtime/runtime_support_llvm.h
+++ b/runtime/runtime_support_llvm.h
@@ -18,13 +18,10 @@
#define ART_RUNTIME_RUNTIME_SUPPORT_LLVM_H_
extern "C" {
-
//----------------------------------------------------------------------------
// Runtime Support Function Lookup Callback
//----------------------------------------------------------------------------
-
void* art_portable_find_runtime_support_func(void* context, const char* name);
-
} // extern "C"
#endif // ART_RUNTIME_RUNTIME_SUPPORT_LLVM_H_
diff --git a/runtime/stack.h b/runtime/stack.h
index 0e2c4c5..99ba898 100644
--- a/runtime/stack.h
+++ b/runtime/stack.h
@@ -554,7 +554,6 @@ class StackVisitor {
static void DescribeStack(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
private:
-
instrumentation::InstrumentationStackFrame GetInstrumentationStackFrame(uint32_t depth) const;
void SanityCheckFrame() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -567,6 +566,7 @@ class StackVisitor {
size_t num_frames_;
// Depth of the frame we're currently at.
size_t cur_depth_;
+
protected:
Context* const context_;
};
@@ -638,6 +638,7 @@ class VmapTable {
spill_shifts--; // wind back one as we want the last match
return spill_shifts;
}
+
private:
const uint16_t* table_;
};
diff --git a/runtime/thread.cc b/runtime/thread.cc
index dd55195..a1fb862 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -2104,9 +2104,7 @@ class ReferenceMapVisitor : public StackVisitor {
class RootCallbackVisitor {
public:
- RootCallbackVisitor(RootVisitor* visitor, void* arg) : visitor_(visitor), arg_(arg) {
-
- }
+ RootCallbackVisitor(RootVisitor* visitor, void* arg) : visitor_(visitor), arg_(arg) {}
void operator()(const mirror::Object* obj, size_t, const StackVisitor*) const {
visitor_(obj, arg_);
diff --git a/runtime/thread_pool.cc b/runtime/thread_pool.cc
index 784a7ca..067ef2d 100644
--- a/runtime/thread_pool.cc
+++ b/runtime/thread_pool.cc
@@ -180,10 +180,7 @@ size_t ThreadPool::GetTaskCount(Thread* self) {
WorkStealingWorker::WorkStealingWorker(ThreadPool* thread_pool, const std::string& name,
size_t stack_size)
- : ThreadPoolWorker(thread_pool, name, stack_size),
- task_(NULL) {
-
-}
+ : ThreadPoolWorker(thread_pool, name, stack_size), task_(NULL) {}
void WorkStealingWorker::Run() {
Thread* self = Thread::Current();
@@ -254,9 +251,7 @@ void WorkStealingWorker::Run() {
}
}
-WorkStealingWorker::~WorkStealingWorker() {
-
-}
+WorkStealingWorker::~WorkStealingWorker() {}
WorkStealingThreadPool::WorkStealingThreadPool(size_t num_threads)
: ThreadPool(0),
@@ -288,8 +283,6 @@ WorkStealingTask* WorkStealingThreadPool::FindTaskToStealFrom(Thread* self) {
return NULL;
}
-WorkStealingThreadPool::~WorkStealingThreadPool() {
-
-}
+WorkStealingThreadPool::~WorkStealingThreadPool() {}
} // namespace art
diff --git a/runtime/thread_pool.h b/runtime/thread_pool.h
index b9f185d..7b626fb 100644
--- a/runtime/thread_pool.h
+++ b/runtime/thread_pool.h
@@ -124,9 +124,7 @@ class ThreadPool {
class WorkStealingTask : public Task {
public:
- WorkStealingTask() : ref_count_(0) {
-
- }
+ WorkStealingTask() : ref_count_(0) {}
size_t GetRefCount() const {
return ref_count_;
diff --git a/runtime/thread_pool_test.cc b/runtime/thread_pool_test.cc
index 9b66318..98178bc 100644
--- a/runtime/thread_pool_test.cc
+++ b/runtime/thread_pool_test.cc
@@ -105,9 +105,7 @@ class TreeTask : public Task {
TreeTask(ThreadPool* const thread_pool, AtomicInteger* count, int depth)
: thread_pool_(thread_pool),
count_(count),
- depth_(depth) {
-
- }
+ depth_(depth) {}
void Run(Thread* self) {
if (depth_ > 1) {
diff --git a/runtime/trace.h b/runtime/trace.h
index 5bd6a8d..bd9c140 100644
--- a/runtime/trace.h
+++ b/runtime/trace.h
@@ -78,6 +78,7 @@ class Trace : public instrumentation::InstrumentationListener {
mirror::AbstractMethod* catch_method, uint32_t catch_dex_pc,
mirror::Throwable* exception_object)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
private:
explicit Trace(File* trace_file, int buffer_size, int flags);
diff --git a/runtime/verifier/method_verifier.cc b/runtime/verifier/method_verifier.cc
index 5a70f2a..ff7f594 100644
--- a/runtime/verifier/method_verifier.cc
+++ b/runtime/verifier/method_verifier.cc
@@ -3749,7 +3749,6 @@ MethodVerifier::MethodSafeCastSet* MethodVerifier::GenerateSafeCastSet() {
}
MethodVerifier::PcToConcreteMethodMap* MethodVerifier::GenerateDevirtMap() {
-
// It is risky to rely on reg_types for sharpening in cases of soft
// verification, we might end up sharpening to a wrong implementation. Just abort.
if (!failure_messages_.empty()) {
diff --git a/runtime/verifier/reg_type.h b/runtime/verifier/reg_type.h
index c66e7cb..5b806c4 100644
--- a/runtime/verifier/reg_type.h
+++ b/runtime/verifier/reg_type.h
@@ -309,6 +309,7 @@ class ConflictType : public RegType {
// Destroy the singleton instance.
static void Destroy();
+
private:
ConflictType(mirror::Class* klass, const std::string& descriptor, uint16_t cache_id)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
@@ -338,6 +339,7 @@ class UndefinedType : public RegType {
// Destroy the singleton instance.
static void Destroy();
+
private:
UndefinedType(mirror::Class* klass, const std::string& descriptor, uint16_t cache_id)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
@@ -875,6 +877,7 @@ class UnresolvedSuperClass : public UnresolvedType {
}
std::string Dump() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
private:
void CheckInvariants() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -909,6 +912,7 @@ class UnresolvedMergedType : public UnresolvedType {
}
std::string Dump() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
private:
void CheckInvariants() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/verifier/reg_type_test.cc b/runtime/verifier/reg_type_test.cc
index f37edff..d2c9dd6 100644
--- a/runtime/verifier/reg_type_test.cc
+++ b/runtime/verifier/reg_type_test.cc
@@ -414,7 +414,6 @@ TEST_F(RegTypeReferenceTest, Dump) {
EXPECT_EQ(expected, unresolved_merged.Dump());
}
-
TEST_F(RegTypeReferenceTest, JavalangString) {
// Add a class to the cache then look for the same class and make sure it is a
// Hit the second time. Then check for the same effect when using
@@ -433,8 +432,8 @@ TEST_F(RegTypeReferenceTest, JavalangString) {
const RegType& ref_type_unintialized = cache.Uninitialized(ref_type, 0110ull);
EXPECT_TRUE(ref_type_unintialized.IsUninitializedReference());
EXPECT_FALSE(ref_type_unintialized.IsUnresolvedAndUninitializedReference());
-
}
+
TEST_F(RegTypeReferenceTest, JavalangObject) {
// Add a class to the cache then look for the same class and make sure it is a
// Hit the second time. Then I am checking for the same effect when using
@@ -474,7 +473,6 @@ TEST_F(RegTypeReferenceTest, Merging) {
TEST_F(RegTypeTest, ConstPrecision) {
-
// Tests creating primitive types types.
ScopedObjectAccess soa(Thread::Current());
RegTypeCache cache_new(true);
diff --git a/runtime/verifier/register_line.cc b/runtime/verifier/register_line.cc
index 3a2145b..d2abaac 100644
--- a/runtime/verifier/register_line.cc
+++ b/runtime/verifier/register_line.cc
@@ -254,7 +254,6 @@ void RegisterLine::CopyResultRegister2(uint32_t vdst) {
SetRegisterTypeWide(vdst, type_l, type_h); // also sets the high
result_[0] = verifier_->GetRegTypeCache()->Undefined().GetId();
result_[1] = verifier_->GetRegTypeCache()->Undefined().GetId();
-
}
}