-
Notifications
You must be signed in to change notification settings - Fork 0
/
reference_counted.h
137 lines (119 loc) · 4.9 KB
/
reference_counted.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef _REFCOUNT_H
#define _REFCOUNT_H
#include <atomic>
#include <cassert>
#include <cstddef>
#include <type_traits>
#include <utility>
#include "absl/base/attributes.h"
namespace refptr {
class Refcount {
public:
constexpr Refcount() : count_{1} {}
// Increments the reference count. Imposes no memory ordering.
inline void Inc() {
// Similarly to
// https://chromium.googlesource.com/chromium/src/third_party/abseil-cpp/+/6d2ed7db891d53d83c5202a9368e4b19e4ca61f0/absl/strings/internal/cord_internal.h#155
// this can be just _relaxed_:
// This thread already has at least one reference count in `count_`.
// Therefore other threads won't decrement to 0 even if they don't observe
// this operation immediately. And if the added reference count is passed
// to a different thread, that operation needs to ensure proper
// synchronization barriers on its own.
count_.fetch_add(1, std::memory_order_relaxed);
}
// Returns whether the atomic integer is 1.
inline bool IsOne() const {
// This thread must observe the correct value, including any prior
// modifications by other threads.
return count_.load(std::memory_order_acquire) == 1;
}
// Returns `true` iff the counter's value is zero after the decrement
// operation. In such a case the caller must destroy the referenced object,
// and the counter's state becomes undefined.
//
// A caller should pass `expect_one = true` if there is a reasonable chance
// that there is only a single reference to the object. This allows slight
// performane optimization when reqesting the appropriate memory barriers.
inline bool Dec(bool expect_one = false) {
// This thread must observe the correct value if `refcount` reaches zero,
// including any prior modifications by other threads. All other threads
// must observe the result of the operation.
if (expect_one && IsOne()) {
// Knowing the object will be destructed, we don't decrement the counter.
// This way, we save the _release operation_ that would be needed for
// decrementing it below.
return true;
}
int_fast32_t refcount = count_.fetch_sub(1, std::memory_order_acq_rel);
assert(refcount > 0);
return refcount == 1;
}
private:
std::atomic<int_fast32_t> count_;
};
// Keeps a `Refcount`-ed instance of `T`.
//
// When a caller requests deletion of an instance via `SelfDelete`, `Alloc`
// is used to destroy and delete the memory block.
template <typename T, class Alloc = std::allocator<T>>
struct Refcounted {
public:
using SelfAlloc =
typename std::allocator_traits<Alloc>::template rebind_alloc<Refcounted>;
template <typename... Arg>
ABSL_DEPRECATED(
"Do not use - use `New` below instead. The constructor is made public "
"just so that it's possible to use `construct` of an allocator to "
"construct new instances.")
Refcounted(Alloc allocator_, Arg&&... args_)
: refcount(),
nested(std::forward<Arg>(args_)...),
allocator(std::move(allocator_)) {}
template <typename... Arg>
static Refcounted* New(Alloc allocator_, Arg&&... args_) {
SelfAlloc self_allocator(std::move(allocator_));
Refcounted* ptr =
std::allocator_traits<SelfAlloc>::allocate(self_allocator, 1);
try {
std::allocator_traits<SelfAlloc>::construct(self_allocator, ptr,
StoredAlloc(self_allocator),
std::forward<Arg>(args_)...);
} catch (...) {
std::allocator_traits<SelfAlloc>::deallocate(self_allocator, ptr, 1);
throw;
}
return ptr;
}
void SelfDelete() && {
// Move out the allocator to a local variable so that `this` can be
// destroyed.
SelfAlloc allocator_copy = std::move(allocator);
std::allocator_traits<SelfAlloc>::destroy(allocator_copy, this);
std::allocator_traits<SelfAlloc>::deallocate(allocator_copy, this, 1);
}
SelfAlloc Allocator() { return SelfAlloc(allocator); }
mutable Refcount refcount;
T nested;
private:
// The stored allocator is rebound to a different type thatn `SelfAlloc`,
// since would create a circular dependency when defining the type.
using StoredAlloc =
typename std::allocator_traits<Alloc>::template rebind_alloc<T>;
StoredAlloc allocator;
};
} // namespace refptr
#endif // _REFCOUNT_H