3 * Copyright 2017 gRPC authors.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
19 #ifndef GRPC_CORE_LIB_GPRPP_ATOMIC_H
20 #define GRPC_CORE_LIB_GPRPP_ATOMIC_H
22 #include <grpc/support/port_platform.h>
26 #include <grpc/support/atm.h>
30 enum class MemoryOrder {
31 RELAXED = std::memory_order_relaxed,
32 CONSUME = std::memory_order_consume,
33 ACQUIRE = std::memory_order_acquire,
34 RELEASE = std::memory_order_release,
35 ACQ_REL = std::memory_order_acq_rel,
36 SEQ_CST = std::memory_order_seq_cst
42 explicit Atomic(T val = T()) : storage_(val) {}
44 T Load(MemoryOrder order) const {
45 return storage_.load(static_cast<std::memory_order>(order));
48 void Store(T val, MemoryOrder order) {
49 storage_.store(val, static_cast<std::memory_order>(order));
52 T Exchange(T desired, MemoryOrder order) {
53 return storage_.exchange(desired, static_cast<std::memory_order>(order));
56 bool CompareExchangeWeak(T* expected, T desired, MemoryOrder success,
57 MemoryOrder failure) {
58 return GPR_ATM_INC_CAS_THEN(storage_.compare_exchange_weak(
59 *expected, desired, static_cast<std::memory_order>(success),
60 static_cast<std::memory_order>(failure)));
63 bool CompareExchangeStrong(T* expected, T desired, MemoryOrder success,
64 MemoryOrder failure) {
65 return GPR_ATM_INC_CAS_THEN(storage_.compare_exchange_strong(
66 *expected, desired, static_cast<std::memory_order>(success),
67 static_cast<std::memory_order>(failure)));
70 template <typename Arg>
71 T FetchAdd(Arg arg, MemoryOrder order = MemoryOrder::SEQ_CST) {
72 return GPR_ATM_INC_ADD_THEN(storage_.fetch_add(
73 static_cast<Arg>(arg), static_cast<std::memory_order>(order)));
76 template <typename Arg>
77 T FetchSub(Arg arg, MemoryOrder order = MemoryOrder::SEQ_CST) {
78 return GPR_ATM_INC_ADD_THEN(storage_.fetch_sub(
79 static_cast<Arg>(arg), static_cast<std::memory_order>(order)));
82 // Atomically increment a counter only if the counter value is not zero.
83 // Returns true if increment took place; false if counter is zero.
84 bool IncrementIfNonzero(MemoryOrder load_order = MemoryOrder::ACQUIRE) {
85 T count = storage_.load(static_cast<std::memory_order>(load_order));
87 // If zero, we are done (without an increment). If not, we must do a CAS
88 // to maintain the contract: do not increment the counter if it is already
93 } while (!CompareExchangeWeak(&count, count + 1, MemoryOrder::ACQ_REL,
99 std::atomic<T> storage_;
102 } // namespace grpc_core
104 #endif /* GRPC_CORE_LIB_GPRPP_ATOMIC_H */