summaryrefslogtreecommitdiffstats
path: root/skia/ext/SkThread_chrome.cc
blob: f379bebee71ca6ece80d221a45d6e55787645b1d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include "third_party/skia/include/core/SkThread.h"

#include <new>

#include "base/atomicops.h"
#include "base/basictypes.h"
#include "base/logging.h"
#include "base/synchronization/lock.h"

/** Adds one to the int specified by the address (in a thread-safe manner), and
    returns the previous value.
    No additional memory barrier is required.
    This must act as a compiler barrier.
*/
int32_t sk_atomic_inc(int32_t* addr) {
  // sk_atomic_inc is expected to return the old value,
  // Barrier_AtomicIncrement returns the new value.
  return base::subtle::NoBarrier_AtomicIncrement(addr, 1) - 1;
}

/*  Subtracts one from the int specified by the address (in a thread-safe
    manner), and returns the previous value.
    Expected to act as a release (SL/S) memory barrier and a compiler barrier.
*/
int32_t sk_atomic_dec(int32_t* addr) {
  // sk_atomic_dec is expected to return the old value,
  // Barrier_AtomicIncrement returns the new value.
  return base::subtle::Barrier_AtomicIncrement(addr, -1) + 1;
}
/** If sk_atomic_dec does not act as an aquire (L/SL) barrier, this is expected
    to act as an aquire (L/SL) memory barrier and as a compiler barrier.
*/
void sk_membar_aquire__after_atomic_dec() { }

/** Adds one to the int specified by the address iff the int specified by the
    address is not zero (in a thread-safe manner), and returns the previous
    value.
    No additional memory barrier is required.
    This must act as a compiler barrier.
*/
int32_t sk_atomic_conditional_inc(int32_t* addr) {
    int32_t value = *addr;

    while (true) {
        if (value == 0) {
            return 0;
        }

        int32_t before;
        before = base::subtle::Acquire_CompareAndSwap(addr, value, value + 1);

        if (before == value) {
            return value;
        } else {
            value = before;
        }
    }
}
/** If sk_atomic_conditional_inc does not act as an aquire (L/SL) barrier, this
    is expected to act as an aquire (L/SL) memory barrier and as a compiler
    barrier.
*/
void sk_membar_aquire__after_atomic_conditional_inc() { }

SkMutex::SkMutex() {
  COMPILE_ASSERT(sizeof(base::Lock) <= sizeof(fStorage), Lock_is_too_big_for_SkMutex);
  base::Lock* lock = reinterpret_cast<base::Lock*>(fStorage);
  new(lock) base::Lock();
}

SkMutex::~SkMutex() {
  base::Lock* lock = reinterpret_cast<base::Lock*>(fStorage);
  lock->~Lock();
}

void SkMutex::acquire() {
  base::Lock* lock = reinterpret_cast<base::Lock*>(fStorage);
  lock->Acquire();
}

void SkMutex::release() {
  base::Lock* lock = reinterpret_cast<base::Lock*>(fStorage);
  lock->Release();
}