summaryrefslogtreecommitdiffstats
path: root/runtime/read_barrier-inl.h
blob: 5631ff4e25cfaca27d6089b07176414d0608a2bd (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
/*
 * Copyright (C) 2014 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef ART_RUNTIME_READ_BARRIER_INL_H_
#define ART_RUNTIME_READ_BARRIER_INL_H_

#include "read_barrier.h"

#include "gc/collector/concurrent_copying.h"
#include "gc/heap.h"
#include "mirror/object_reference.h"
#include "mirror/reference.h"
#include "runtime.h"

namespace art {

template <typename MirrorType, ReadBarrierOption kReadBarrierOption, bool kMaybeDuringStartup>
inline MirrorType* ReadBarrier::Barrier(
    mirror::Object* obj, MemberOffset offset, mirror::HeapReference<MirrorType>* ref_addr) {
  const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
  if (with_read_barrier && kUseBakerReadBarrier) {
    // The higher bits of the rb ptr, rb_ptr_high_bits (must be zero)
    // is used to create artificial data dependency from the is_gray
    // load to the ref field (ptr) load to avoid needing a load-load
    // barrier between the two.
    uintptr_t rb_ptr_high_bits;
    bool is_gray = HasGrayReadBarrierPointer(obj, &rb_ptr_high_bits);
    ref_addr = reinterpret_cast<mirror::HeapReference<MirrorType>*>(
        rb_ptr_high_bits | reinterpret_cast<uintptr_t>(ref_addr));
    MirrorType* ref = ref_addr->AsMirrorPtr();
    if (is_gray) {
      // Slow-path.
      ref = reinterpret_cast<MirrorType*>(Mark(ref));
    }
    if (kEnableReadBarrierInvariantChecks) {
      CHECK_EQ(rb_ptr_high_bits, 0U) << obj << " rb_ptr=" << obj->GetReadBarrierPointer();
    }
    AssertToSpaceInvariant(obj, offset, ref);
    return ref;
  } else if (with_read_barrier && kUseBrooksReadBarrier) {
    // To be implemented.
    return ref_addr->AsMirrorPtr();
  } else if (with_read_barrier && kUseTableLookupReadBarrier) {
    MirrorType* ref = ref_addr->AsMirrorPtr();
    MirrorType* old_ref = ref;
    // The heap or the collector can be null at startup. TODO: avoid the need for this null check.
    gc::Heap* heap = Runtime::Current()->GetHeap();
    if (heap != nullptr && heap->GetReadBarrierTable()->IsSet(old_ref)) {
      ref = reinterpret_cast<MirrorType*>(Mark(old_ref));
      // Update the field atomically. This may fail if mutator updates before us, but it's ok.
      obj->CasFieldStrongSequentiallyConsistentObjectWithoutWriteBarrier<false, false>(
          offset, old_ref, ref);
    }
    AssertToSpaceInvariant(obj, offset, ref);
    return ref;
  } else {
    // No read barrier.
    return ref_addr->AsMirrorPtr();
  }
}

template <typename MirrorType, ReadBarrierOption kReadBarrierOption, bool kMaybeDuringStartup>
inline MirrorType* ReadBarrier::BarrierForRoot(MirrorType** root) {
  MirrorType* ref = *root;
  const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
  if (with_read_barrier && kUseBakerReadBarrier) {
    if (kMaybeDuringStartup && IsDuringStartup()) {
      // During startup, the heap may not be initialized yet. Just
      // return the given ref.
      return ref;
    }
    // TODO: separate the read barrier code from the collector code more.
    if (Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->IsMarking()) {
      ref = reinterpret_cast<MirrorType*>(Mark(ref));
    }
    AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
    return ref;
  } else if (with_read_barrier && kUseBrooksReadBarrier) {
    // To be implemented.
    return ref;
  } else if (with_read_barrier && kUseTableLookupReadBarrier) {
    if (kMaybeDuringStartup && IsDuringStartup()) {
      // During startup, the heap may not be initialized yet. Just
      // return the given ref.
      return ref;
    }
    if (Runtime::Current()->GetHeap()->GetReadBarrierTable()->IsSet(ref)) {
      MirrorType* old_ref = ref;
      ref = reinterpret_cast<MirrorType*>(Mark(old_ref));
      // Update the field atomically. This may fail if mutator updates before us, but it's ok.
      Atomic<mirror::Object*>* atomic_root = reinterpret_cast<Atomic<mirror::Object*>*>(root);
      atomic_root->CompareExchangeStrongSequentiallyConsistent(old_ref, ref);
    }
    AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
    return ref;
  } else {
    return ref;
  }
}

// TODO: Reduce copy paste
template <typename MirrorType, ReadBarrierOption kReadBarrierOption, bool kMaybeDuringStartup>
inline MirrorType* ReadBarrier::BarrierForRoot(mirror::CompressedReference<MirrorType>* root) {
  MirrorType* ref = root->AsMirrorPtr();
  const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
  if (with_read_barrier && kUseBakerReadBarrier) {
    if (kMaybeDuringStartup && IsDuringStartup()) {
      // During startup, the heap may not be initialized yet. Just
      // return the given ref.
      return ref;
    }
    // TODO: separate the read barrier code from the collector code more.
    if (Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->IsMarking()) {
      ref = reinterpret_cast<MirrorType*>(Mark(ref));
    }
    AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
    return ref;
  } else if (with_read_barrier && kUseBrooksReadBarrier) {
    // To be implemented.
    return ref;
  } else if (with_read_barrier && kUseTableLookupReadBarrier) {
    if (kMaybeDuringStartup && IsDuringStartup()) {
      // During startup, the heap may not be initialized yet. Just
      // return the given ref.
      return ref;
    }
    if (Runtime::Current()->GetHeap()->GetReadBarrierTable()->IsSet(ref)) {
      auto old_ref = mirror::CompressedReference<MirrorType>::FromMirrorPtr(ref);
      ref = reinterpret_cast<MirrorType*>(Mark(ref));
      auto new_ref = mirror::CompressedReference<MirrorType>::FromMirrorPtr(ref);
      // Update the field atomically. This may fail if mutator updates before us, but it's ok.
      auto* atomic_root =
          reinterpret_cast<Atomic<mirror::CompressedReference<MirrorType>>*>(root);
      atomic_root->CompareExchangeStrongSequentiallyConsistent(old_ref, new_ref);
    }
    AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
    return ref;
  } else {
    return ref;
  }
}

inline bool ReadBarrier::IsDuringStartup() {
  gc::Heap* heap = Runtime::Current()->GetHeap();
  if (heap == nullptr) {
    // During startup, the heap can be null.
    return true;
  }
  if (heap->CurrentCollectorType() != gc::kCollectorTypeCC) {
    // CC isn't running.
    return true;
  }
  gc::collector::ConcurrentCopying* collector = heap->ConcurrentCopyingCollector();
  if (collector == nullptr) {
    // During startup, the collector can be null.
    return true;
  }
  return false;
}

inline void ReadBarrier::AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset,
                                                mirror::Object* ref) {
  if (kEnableToSpaceInvariantChecks || kIsDebugBuild) {
    if (ref == nullptr || IsDuringStartup()) {
      return;
    }
    Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->
        AssertToSpaceInvariant(obj, offset, ref);
  }
}

inline mirror::Object* ReadBarrier::Mark(mirror::Object* obj) {
  return Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->Mark(obj);
}

inline bool ReadBarrier::HasGrayReadBarrierPointer(mirror::Object* obj,
                                                   uintptr_t* out_rb_ptr_high_bits) {
  mirror::Object* rb_ptr = obj->GetReadBarrierPointer();
  uintptr_t rb_ptr_bits = reinterpret_cast<uintptr_t>(rb_ptr);
  uintptr_t rb_ptr_low_bits = rb_ptr_bits & rb_ptr_mask_;
  if (kEnableReadBarrierInvariantChecks) {
    CHECK(rb_ptr_low_bits == white_ptr_ || rb_ptr_low_bits == gray_ptr_ ||
          rb_ptr_low_bits == black_ptr_)
        << "obj=" << obj << " rb_ptr=" << rb_ptr << " " << PrettyTypeOf(obj);
  }
  bool is_gray = rb_ptr_low_bits == gray_ptr_;
  // The high bits are supposed to be zero. We check this on the caller side.
  *out_rb_ptr_high_bits = rb_ptr_bits & ~rb_ptr_mask_;
  return is_gray;
}

}  // namespace art

#endif  // ART_RUNTIME_READ_BARRIER_INL_H_