diff options
34 files changed, 60 insertions, 81 deletions
diff --git a/compiler/dex/arena_allocator.h b/compiler/dex/arena_allocator.h index cc81e50..0ad859e 100644 --- a/compiler/dex/arena_allocator.h +++ b/compiler/dex/arena_allocator.h @@ -47,7 +47,7 @@ class ArenaAllocator { kNumAllocKinds }; - ArenaAllocator(size_t default_size = ARENA_DEFAULT_BLOCK_SIZE); + explicit ArenaAllocator(size_t default_size = ARENA_DEFAULT_BLOCK_SIZE); ~ArenaAllocator(); void* NewMem(size_t size, bool zero, ArenaAllocKind kind); size_t BytesAllocated() { @@ -86,7 +86,7 @@ struct MemStats { void Dump(std::ostream& os) const { arena_.DumpMemStats(os); } - MemStats(const ArenaAllocator &arena) : arena_(arena) {}; + explicit MemStats(const ArenaAllocator &arena) : arena_(arena) {}; private: const ArenaAllocator &arena_; }; // MemStats diff --git a/compiler/dex/arena_bit_vector.h b/compiler/dex/arena_bit_vector.h index 0b7bbc5..7e5c436 100644 --- a/compiler/dex/arena_bit_vector.h +++ b/compiler/dex/arena_bit_vector.h @@ -33,7 +33,7 @@ class ArenaBitVector { class Iterator { public: - Iterator(ArenaBitVector* bit_vector) + explicit Iterator(ArenaBitVector* bit_vector) : p_bits_(bit_vector), bit_storage_(bit_vector->GetRawStorage()), bit_index_(0), diff --git a/compiler/dex/backend.h b/compiler/dex/backend.h index 6f5ba38..7fa8e99 100644 --- a/compiler/dex/backend.h +++ b/compiler/dex/backend.h @@ -30,7 +30,7 @@ class Backend { virtual CompiledMethod* GetCompiledMethod() = 0; protected: - Backend(ArenaAllocator* arena) : arena_(arena) {}; + explicit Backend(ArenaAllocator* arena) : arena_(arena) {}; ArenaAllocator* const arena_; }; // Class Backend diff --git a/compiler/dex/growable_array.h b/compiler/dex/growable_array.h index 6ab0f16..6d26bc2 100644 --- a/compiler/dex/growable_array.h +++ b/compiler/dex/growable_array.h @@ -49,7 +49,7 @@ class GrowableArray { class Iterator { public: - Iterator(GrowableArray* g_list) + explicit Iterator(GrowableArray* g_list) : idx_(0), g_list_(g_list) {}; diff --git a/compiler/dex/local_value_numbering.h b/compiler/dex/local_value_numbering.h index f2b2291..09ed7ae 100644 --- a/compiler/dex/local_value_numbering.h +++ b/compiler/dex/local_value_numbering.h @@ -33,7 +33,7 @@ typedef SafeMap<uint32_t, uint16_t> MemoryVersionMap; class LocalValueNumbering { public: - LocalValueNumbering(CompilationUnit* cu) : cu_(cu) {}; + explicit LocalValueNumbering(CompilationUnit* cu) : cu_(cu) {}; static uint64_t BuildKey(uint16_t op, uint16_t operand1, uint16_t operand2, uint16_t modifier) { return (static_cast<uint64_t>(op) << 48 | static_cast<uint64_t>(operand1) << 32 | diff --git a/compiler/driver/dex_compilation_unit.h b/compiler/driver/dex_compilation_unit.h index 53efd12..facc9cb 100644 --- a/compiler/driver/dex_compilation_unit.h +++ b/compiler/driver/dex_compilation_unit.h @@ -32,7 +32,7 @@ struct CompilationUnit; class DexCompilationUnit { public: - DexCompilationUnit(CompilationUnit* cu); + explicit DexCompilationUnit(CompilationUnit* cu); DexCompilationUnit(CompilationUnit* cu, jobject class_loader, ClassLinker* class_linker, const DexFile& dex_file, const DexFile::CodeItem* code_item, diff --git a/compiler/llvm/md_builder.h b/compiler/llvm/md_builder.h index cc169a3..65c52c9 100644 --- a/compiler/llvm/md_builder.h +++ b/compiler/llvm/md_builder.h @@ -35,7 +35,7 @@ typedef ::llvm::MDBuilder LLVMMDBuilder; class MDBuilder : public LLVMMDBuilder { public: - MDBuilder(::llvm::LLVMContext& context) + explicit MDBuilder(::llvm::LLVMContext& context) : LLVMMDBuilder(context), tbaa_root_(createTBAARoot("Art TBAA Root")) { std::memset(tbaa_special_type_, 0, sizeof(tbaa_special_type_)); std::memset(tbaa_memory_jtype_, 0, sizeof(tbaa_memory_jtype_)); diff --git a/dex2oat/dex2oat.cc b/dex2oat/dex2oat.cc index c73c1bb..541c916 100644 --- a/dex2oat/dex2oat.cc +++ b/dex2oat/dex2oat.cc @@ -456,7 +456,7 @@ class WatchDog { } while (false) public: - WatchDog(bool is_watch_dog_enabled) { + explicit WatchDog(bool is_watch_dog_enabled) { is_watch_dog_enabled_ = is_watch_dog_enabled; if (!is_watch_dog_enabled_) { return; diff --git a/runtime/atomic_integer.h b/runtime/atomic_integer.h index ce00454..3492487 100644 --- a/runtime/atomic_integer.h +++ b/runtime/atomic_integer.h @@ -26,7 +26,7 @@ class AtomicInteger { public: AtomicInteger() : value_(0) { } - AtomicInteger(int32_t value) : value_(value) { } + explicit AtomicInteger(int32_t value) : value_(value) { } // Unsafe = operator for non atomic operations on the integer. void store(int32_t desired) { diff --git a/runtime/barrier.h b/runtime/barrier.h index e0ad239..22f08e1 100644 --- a/runtime/barrier.h +++ b/runtime/barrier.h @@ -25,7 +25,7 @@ namespace art { class Barrier { public: - Barrier(int count); + explicit Barrier(int count); virtual ~Barrier(); // Pass through the barrier, decrements the count but does not block. diff --git a/runtime/barrier_test.cc b/runtime/barrier_test.cc index 55d2d3d..d26ae9e 100644 --- a/runtime/barrier_test.cc +++ b/runtime/barrier_test.cc @@ -69,9 +69,9 @@ TEST_F(BarrierTest, CheckWait) { Thread* self = Thread::Current(); ThreadPool thread_pool(num_threads); Barrier barrier(0); - AtomicInteger count1 = 0; - AtomicInteger count2 = 0; - AtomicInteger count3 = 0; + AtomicInteger count1(0); + AtomicInteger count2(0); + AtomicInteger count3(0); for (int32_t i = 0; i < num_threads; ++i) { thread_pool.AddTask(self, new CheckWaitTask(&barrier, &count1, &count2, &count3)); } @@ -126,7 +126,7 @@ TEST_F(BarrierTest, CheckPass) { Thread* self = Thread::Current(); ThreadPool thread_pool(num_threads); Barrier barrier(0); - AtomicInteger count = 0; + AtomicInteger count(0); const int32_t num_tasks = num_threads * 4; const int32_t num_sub_tasks = 128; for (int32_t i = 0; i < num_tasks; ++i) { diff --git a/runtime/base/histogram.h b/runtime/base/histogram.h index 8724d2c..dfb556b 100644 --- a/runtime/base/histogram.h +++ b/runtime/base/histogram.h @@ -36,7 +36,7 @@ template <class Value> class Histogram { const size_t kInitialBucketCount; public: - Histogram(std::string); + explicit Histogram(std::string); void AddValue(Value); void CreateHistogram(); void Reset(); diff --git a/runtime/base/mutex.cc b/runtime/base/mutex.cc index bb4b5c5..25c0b9e 100644 --- a/runtime/base/mutex.cc +++ b/runtime/base/mutex.cc @@ -94,7 +94,7 @@ std::set<BaseMutex*>* all_mutexes_; class ScopedAllMutexesLock { public: - ScopedAllMutexesLock(const BaseMutex* mutex) : mutex_(mutex) { + explicit ScopedAllMutexesLock(const BaseMutex* mutex) : mutex_(mutex) { while (!all_mutexes_guard_.CompareAndSwap(0, reinterpret_cast<int32_t>(mutex))) { NanoSleep(100); } diff --git a/runtime/debugger.cc b/runtime/debugger.cc index fe726aa..5a31c87 100644 --- a/runtime/debugger.cc +++ b/runtime/debugger.cc @@ -1814,7 +1814,7 @@ void Dbg::GetChildThreadGroups(JDWP::ObjectId thread_group_id, std::vector<JDWP: static int GetStackDepth(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { struct CountStackDepthVisitor : public StackVisitor { - CountStackDepthVisitor(Thread* thread) + explicit CountStackDepthVisitor(Thread* thread) : StackVisitor(thread, NULL), depth(0) {} // TODO: Enable annotalysis. We know lock is held in constructor, but abstraction confuses @@ -2458,7 +2458,7 @@ JDWP::JdwpError Dbg::ConfigureStep(JDWP::ObjectId thread_id, JDWP::JdwpStepSize // struct SingleStepStackVisitor : public StackVisitor { - SingleStepStackVisitor(Thread* thread) + explicit SingleStepStackVisitor(Thread* thread) EXCLUSIVE_LOCKS_REQUIRED(Locks::breakpoint_lock_) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) : StackVisitor(thread, NULL) { diff --git a/runtime/dex_method_iterator.h b/runtime/dex_method_iterator.h index cb71cb5..e915d77 100644 --- a/runtime/dex_method_iterator.h +++ b/runtime/dex_method_iterator.h @@ -25,7 +25,7 @@ namespace art { class DexMethodIterator { public: - DexMethodIterator(const std::vector<const DexFile*>& dex_files) + explicit DexMethodIterator(const std::vector<const DexFile*>& dex_files) : dex_files_(dex_files), found_next_(false), dex_file_index_(0), diff --git a/runtime/file_output_stream.h b/runtime/file_output_stream.h index 10405ef..23a57f5 100644 --- a/runtime/file_output_stream.h +++ b/runtime/file_output_stream.h @@ -25,7 +25,7 @@ namespace art { class FileOutputStream : public OutputStream { public: - FileOutputStream(File* file); + explicit FileOutputStream(File* file); virtual ~FileOutputStream() {} diff --git a/runtime/gc/accounting/heap_bitmap.h b/runtime/gc/accounting/heap_bitmap.h index a12809e..f4b725c 100644 --- a/runtime/gc/accounting/heap_bitmap.h +++ b/runtime/gc/accounting/heap_bitmap.h @@ -103,8 +103,7 @@ class HeapBitmap { void ReplaceObjectSet(SpaceSetMap* old_set, SpaceSetMap* new_set) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); - HeapBitmap(Heap* heap) : heap_(heap) { - } + explicit HeapBitmap(Heap* heap) : heap_(heap) {} private: diff --git a/runtime/gc/accounting/mod_union_table-inl.h b/runtime/gc/accounting/mod_union_table-inl.h index 32ac95f..9ea74d4 100644 --- a/runtime/gc/accounting/mod_union_table-inl.h +++ b/runtime/gc/accounting/mod_union_table-inl.h @@ -28,8 +28,7 @@ namespace accounting { // A mod-union table to record image references to the Zygote and alloc space. class ModUnionTableToZygoteAllocspace : public ModUnionTableReferenceCache { public: - ModUnionTableToZygoteAllocspace(Heap* heap) : ModUnionTableReferenceCache(heap) { - } + explicit ModUnionTableToZygoteAllocspace(Heap* heap) : ModUnionTableReferenceCache(heap) {} bool AddReference(const mirror::Object* /* obj */, const mirror::Object* ref) { const std::vector<space::ContinuousSpace*>& spaces = GetHeap()->GetContinuousSpaces(); @@ -48,8 +47,7 @@ public: // A mod-union table to record Zygote references to the alloc space. class ModUnionTableToAllocspace : public ModUnionTableReferenceCache { public: - ModUnionTableToAllocspace(Heap* heap) : ModUnionTableReferenceCache(heap) { - } + explicit ModUnionTableToAllocspace(Heap* heap) : ModUnionTableReferenceCache(heap) {} bool AddReference(const mirror::Object* /* obj */, const mirror::Object* ref) { const std::vector<space::ContinuousSpace*>& spaces = GetHeap()->GetContinuousSpaces(); diff --git a/runtime/gc/accounting/mod_union_table.cc b/runtime/gc/accounting/mod_union_table.cc index 05b68c4..aa02f82 100644 --- a/runtime/gc/accounting/mod_union_table.cc +++ b/runtime/gc/accounting/mod_union_table.cc @@ -117,8 +117,8 @@ class ModUnionClearCardVisitor { class ModUnionScanImageRootVisitor { public: - ModUnionScanImageRootVisitor(collector::MarkSweep* const mark_sweep) : mark_sweep_(mark_sweep) { - } + explicit ModUnionScanImageRootVisitor(collector::MarkSweep* const mark_sweep) + : mark_sweep_(mark_sweep) {} void operator ()(const Object* root) const EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) diff --git a/runtime/gc/accounting/mod_union_table.h b/runtime/gc/accounting/mod_union_table.h index 5435625..d46281c 100644 --- a/runtime/gc/accounting/mod_union_table.h +++ b/runtime/gc/accounting/mod_union_table.h @@ -49,11 +49,9 @@ class HeapBitmap; // cleared between GC phases, reducing the number of dirty cards that need to be scanned. class ModUnionTable { public: - ModUnionTable(Heap* heap) : heap_(heap) { - } + explicit ModUnionTable(Heap* heap) : heap_(heap) {} - virtual ~ModUnionTable() { - } + virtual ~ModUnionTable() {} // Clear cards which map to a memory range of a space. This doesn't immediately update the // mod-union table, as updating the mod-union table may have an associated cost, such as @@ -86,7 +84,7 @@ class ModUnionTable { // Reference caching implementation. Caches references pointing to alloc space(s) for each card. class ModUnionTableReferenceCache : public ModUnionTable { public: - ModUnionTableReferenceCache(Heap* heap) : ModUnionTable(heap) {} + explicit ModUnionTableReferenceCache(Heap* heap) : ModUnionTable(heap) {} virtual ~ModUnionTableReferenceCache() {} // Clear and store cards for a space. @@ -122,7 +120,7 @@ class ModUnionTableReferenceCache : public ModUnionTable { // Card caching implementation. Keeps track of which cards we cleared and only this information. class ModUnionTableCardCache : public ModUnionTable { public: - ModUnionTableCardCache(Heap* heap) : ModUnionTable(heap) {} + explicit ModUnionTableCardCache(Heap* heap) : ModUnionTable(heap) {} virtual ~ModUnionTableCardCache() {} // Clear and store cards for a space. diff --git a/runtime/gc/accounting/space_bitmap.h b/runtime/gc/accounting/space_bitmap.h index 32ab440..5a1bfe3 100644 --- a/runtime/gc/accounting/space_bitmap.h +++ b/runtime/gc/accounting/space_bitmap.h @@ -244,7 +244,7 @@ class SpaceSetMap { } } - SpaceSetMap(const std::string& name) : name_(name) {} + explicit SpaceSetMap(const std::string& name) : name_(name) {} ~SpaceSetMap() {} Objects& GetObjects() { diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc index 7664657..865ee13 100644 --- a/runtime/gc/collector/mark_sweep.cc +++ b/runtime/gc/collector/mark_sweep.cc @@ -70,8 +70,7 @@ static const bool kCountJavaLangRefs = false; class SetFingerVisitor { public: - SetFingerVisitor(MarkSweep* const mark_sweep) : mark_sweep_(mark_sweep) { - } + explicit SetFingerVisitor(MarkSweep* const mark_sweep) : mark_sweep_(mark_sweep) {} void operator ()(void* finger) const { mark_sweep_->SetFinger(reinterpret_cast<Object*>(finger)); @@ -521,10 +520,7 @@ void MarkSweep::MarkConcurrentRoots() { class CheckObjectVisitor { public: - CheckObjectVisitor(MarkSweep* const mark_sweep) - : mark_sweep_(mark_sweep) { - - } + explicit CheckObjectVisitor(MarkSweep* const mark_sweep) : mark_sweep_(mark_sweep) {} void operator ()(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) const NO_THREAD_SAFETY_ANALYSIS { @@ -564,8 +560,7 @@ void MarkSweep::BindLiveToMarkBitmap(space::ContinuousSpace* space) { class ScanObjectVisitor { public: - ScanObjectVisitor(MarkSweep* const mark_sweep) : mark_sweep_(mark_sweep) { - } + explicit ScanObjectVisitor(MarkSweep* const mark_sweep) : mark_sweep_(mark_sweep) {} // TODO: Fixme when anotatalysis works with visitors. void operator ()(const Object* obj) const NO_THREAD_SAFETY_ANALYSIS { @@ -610,8 +605,7 @@ void MarkSweep::ScanGrayObjects(byte minimum_age) { class CheckBitmapVisitor { public: - CheckBitmapVisitor(MarkSweep* mark_sweep) : mark_sweep_(mark_sweep) { - } + explicit CheckBitmapVisitor(MarkSweep* mark_sweep) : mark_sweep_(mark_sweep) {} void operator ()(const Object* obj) const NO_THREAD_SAFETY_ANALYSIS { if (kDebugLocking) { @@ -803,9 +797,7 @@ struct SweepCallbackContext { class CheckpointMarkThreadRoots : public Closure { public: - CheckpointMarkThreadRoots(MarkSweep* mark_sweep) : mark_sweep_(mark_sweep) { - - } + explicit CheckpointMarkThreadRoots(MarkSweep* mark_sweep) : mark_sweep_(mark_sweep) {} virtual void Run(Thread* thread) NO_THREAD_SAFETY_ANALYSIS { // Note: self is not necessarily equal to thread since thread may be suspended. @@ -1084,8 +1076,7 @@ void MarkSweep::ScanRoot(const Object* obj) { class MarkObjectVisitor { public: - MarkObjectVisitor(MarkSweep* const mark_sweep) : mark_sweep_(mark_sweep) { - } + explicit MarkObjectVisitor(MarkSweep* const mark_sweep) : mark_sweep_(mark_sweep) {} // TODO: Fixme when anotatalysis works with visitors. void operator ()(const Object* /* obj */, const Object* ref, const MemberOffset& /* offset */, @@ -1153,9 +1144,7 @@ class MarkStackChunk : public Task { class MarkObjectParallelVisitor { public: - MarkObjectParallelVisitor(MarkStackChunk* chunk_task) : chunk_task_(chunk_task) { - - } + explicit MarkObjectParallelVisitor(MarkStackChunk* chunk_task) : chunk_task_(chunk_task) {} void operator ()(const Object* /* obj */, const Object* ref, const MemberOffset& /* offset */, bool /* is_static */) const { diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index 7f68713..21a0725 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -1174,10 +1174,9 @@ class ScanVisitor { // Verify a reference from an object. class VerifyReferenceVisitor { public: - VerifyReferenceVisitor(Heap* heap) + explicit VerifyReferenceVisitor(Heap* heap) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) - : heap_(heap), failed_(false) { - } + : heap_(heap), failed_(false) {} bool Failed() const { return failed_; @@ -1264,8 +1263,7 @@ class VerifyReferenceVisitor { // Verify all references within an object, for use with HeapBitmap::Visit. class VerifyObjectVisitor { public: - VerifyObjectVisitor(Heap* heap) : heap_(heap), failed_(false) { - } + explicit VerifyObjectVisitor(Heap* heap) : heap_(heap), failed_(false) {} void operator ()(const mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { @@ -1377,11 +1375,9 @@ class VerifyReferenceCardVisitor { class VerifyLiveStackReferences { public: - VerifyLiveStackReferences(Heap* heap) + explicit VerifyLiveStackReferences(Heap* heap) : heap_(heap), - failed_(false) { - - } + failed_(false) {} void operator ()(const mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { diff --git a/runtime/gc/space/large_object_space.h b/runtime/gc/space/large_object_space.h index 74d9cca..db845db 100644 --- a/runtime/gc/space/large_object_space.h +++ b/runtime/gc/space/large_object_space.h @@ -61,7 +61,7 @@ class LargeObjectSpace : public DiscontinuousSpace, public AllocSpace { protected: - LargeObjectSpace(const std::string& name); + explicit LargeObjectSpace(const std::string& name); // Approximate number of bytes which have been allocated into the space. size_t num_bytes_allocated_; @@ -91,7 +91,7 @@ class LargeObjectMapSpace : public LargeObjectSpace { bool Contains(const mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS; private: - LargeObjectMapSpace(const std::string& name); + explicit LargeObjectMapSpace(const std::string& name); virtual ~LargeObjectMapSpace() {} // Used to ensure mutual exclusion when the allocation spaces data structures are being modified. diff --git a/runtime/gc_map.h b/runtime/gc_map.h index 33d09f2..ffe54c4 100644 --- a/runtime/gc_map.h +++ b/runtime/gc_map.h @@ -27,7 +27,7 @@ namespace art { // Lightweight wrapper for native PC offset to reference bit maps. class NativePcOffsetToReferenceMap { public: - NativePcOffsetToReferenceMap(const uint8_t* data) : data_(data) { + explicit NativePcOffsetToReferenceMap(const uint8_t* data) : data_(data) { CHECK(data_ != NULL); } diff --git a/runtime/jdwp/jdwp_adb.cc b/runtime/jdwp/jdwp_adb.cc index 9652f60..9b9fe4c 100644 --- a/runtime/jdwp/jdwp_adb.cc +++ b/runtime/jdwp/jdwp_adb.cc @@ -54,7 +54,7 @@ namespace JDWP { struct JdwpAdbState : public JdwpNetStateBase { public: - JdwpAdbState(JdwpState* state) : JdwpNetStateBase(state) { + explicit JdwpAdbState(JdwpState* state) : JdwpNetStateBase(state) { control_sock_ = -1; shutting_down_ = false; diff --git a/runtime/jdwp/jdwp_priv.h b/runtime/jdwp/jdwp_priv.h index ab89339..557632c 100644 --- a/runtime/jdwp/jdwp_priv.h +++ b/runtime/jdwp/jdwp_priv.h @@ -53,7 +53,7 @@ bool InitAdbTransport(JdwpState*, const JdwpOptions*); */ class JdwpNetStateBase { public: - JdwpNetStateBase(JdwpState*); + explicit JdwpNetStateBase(JdwpState*); virtual ~JdwpNetStateBase(); virtual bool Accept() = 0; diff --git a/runtime/jdwp/jdwp_socket.cc b/runtime/jdwp/jdwp_socket.cc index 08b4859..87706a4 100644 --- a/runtime/jdwp/jdwp_socket.cc +++ b/runtime/jdwp/jdwp_socket.cc @@ -46,7 +46,7 @@ struct JdwpSocketState : public JdwpNetStateBase { uint16_t listenPort; int listenSock; /* listen for connection from debugger */ - JdwpSocketState(JdwpState* state) : JdwpNetStateBase(state) { + explicit JdwpSocketState(JdwpState* state) : JdwpNetStateBase(state) { listenPort = 0; listenSock = -1; } diff --git a/runtime/monitor.h b/runtime/monitor.h index 9206131..3b06217 100644 --- a/runtime/monitor.h +++ b/runtime/monitor.h @@ -196,7 +196,7 @@ class MonitorList { // For use only by the JDWP implementation. class MonitorInfo { public: - MonitorInfo(mirror::Object* o) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_); + explicit MonitorInfo(mirror::Object* o) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_); Thread* owner; size_t entry_count; diff --git a/runtime/oat/utils/x86/assembler_x86.h b/runtime/oat/utils/x86/assembler_x86.h index 390f7aa..3f40a54 100644 --- a/runtime/oat/utils/x86/assembler_x86.h +++ b/runtime/oat/utils/x86/assembler_x86.h @@ -643,7 +643,7 @@ inline void X86Assembler::EmitOperandSizeOverride() { // Slowpath entered when Thread::Current()->_exception is non-null class X86ExceptionSlowPath : public SlowPath { public: - X86ExceptionSlowPath(size_t stack_adjust) : stack_adjust_(stack_adjust) {} + explicit X86ExceptionSlowPath(size_t stack_adjust) : stack_adjust_(stack_adjust) {} virtual void Emit(Assembler *sp_asm); private: const size_t stack_adjust_; diff --git a/runtime/output_stream.h b/runtime/output_stream.h index d2a77d8..aff6bcd 100644 --- a/runtime/output_stream.h +++ b/runtime/output_stream.h @@ -33,7 +33,7 @@ enum Whence { class OutputStream { public: - OutputStream(const std::string& location) : location_(location) {} + explicit OutputStream(const std::string& location) : location_(location) {} virtual ~OutputStream() {} diff --git a/runtime/thread.cc b/runtime/thread.cc index 7089a7d..c0be4d2 100644 --- a/runtime/thread.cc +++ b/runtime/thread.cc @@ -1219,7 +1219,7 @@ void Thread::NotifyLocked(Thread* self) { class CountStackDepthVisitor : public StackVisitor { public: - CountStackDepthVisitor(Thread* thread) + explicit CountStackDepthVisitor(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) : StackVisitor(thread, NULL), depth_(0), skip_depth_(0), skipping_(true) {} diff --git a/runtime/thread_pool.h b/runtime/thread_pool.h index 3462d5e..b9f185d 100644 --- a/runtime/thread_pool.h +++ b/runtime/thread_pool.h @@ -76,7 +76,7 @@ class ThreadPool { // after running it, it is the caller's responsibility. void AddTask(Thread* self, Task* task); - ThreadPool(size_t num_threads); + explicit ThreadPool(size_t num_threads); virtual ~ThreadPool(); // Wait for all tasks currently on queue to get completed. @@ -161,7 +161,7 @@ class WorkStealingWorker : public ThreadPoolWorker { class WorkStealingThreadPool : public ThreadPool { public: - WorkStealingThreadPool(size_t num_threads); + explicit WorkStealingThreadPool(size_t num_threads); virtual ~WorkStealingThreadPool(); private: diff --git a/runtime/thread_pool_test.cc b/runtime/thread_pool_test.cc index 4717ce7..10954e8 100644 --- a/runtime/thread_pool_test.cc +++ b/runtime/thread_pool_test.cc @@ -25,8 +25,7 @@ namespace art { class CountTask : public Task { public: - CountTask(AtomicInteger* count) : count_(count), verbose_(false) { - } + explicit CountTask(AtomicInteger* count) : count_(count), verbose_(false) {} void Run(Thread* self) { if (verbose_) { @@ -61,7 +60,7 @@ int32_t ThreadPoolTest::num_threads = 4; TEST_F(ThreadPoolTest, CheckRun) { Thread* self = Thread::Current(); ThreadPool thread_pool(num_threads); - AtomicInteger count = 0; + AtomicInteger count(0); static const int32_t num_tasks = num_threads * 4; for (int32_t i = 0; i < num_tasks; ++i) { thread_pool.AddTask(self, new CountTask(&count)); @@ -76,7 +75,7 @@ TEST_F(ThreadPoolTest, CheckRun) { TEST_F(ThreadPoolTest, StopStart) { Thread* self = Thread::Current(); ThreadPool thread_pool(num_threads); - AtomicInteger count = 0; + AtomicInteger count(0); static const int32_t num_tasks = num_threads * 4; for (int32_t i = 0; i < num_tasks; ++i) { thread_pool.AddTask(self, new CountTask(&count)); @@ -88,7 +87,7 @@ TEST_F(ThreadPoolTest, StopStart) { thread_pool.StartWorkers(self); usleep(200); thread_pool.StopWorkers(self); - AtomicInteger bad_count = 0; + AtomicInteger bad_count(0); thread_pool.AddTask(self, new CountTask(&bad_count)); usleep(200); // Ensure that the task added after the workers were stopped doesn't get run. @@ -133,7 +132,7 @@ class TreeTask : public Task { TEST_F(ThreadPoolTest, RecursiveTest) { Thread* self = Thread::Current(); ThreadPool thread_pool(num_threads); - AtomicInteger count = 0; + AtomicInteger count(0); static const int depth = 8; thread_pool.AddTask(self, new TreeTask(&thread_pool, &count, depth)); thread_pool.StartWorkers(self); |