xenium
concurrent_ptr.hpp
1//
2// Copyright (c) 2018-2020 Manuel Pöter.
3// Licensed under the MIT License. See LICENSE file in the project root for full license information.
4//
5
6#ifndef XENIUM_DETAL_CONCURRENT_PTR_HPP
7#define XENIUM_DETAL_CONCURRENT_PTR_HPP
8
9#include <xenium/marked_ptr.hpp>
10
11#include <atomic>
12
13namespace xenium{ namespace reclamation { namespace detail {
14
16 template <
17 class T,
18 std::size_t N,
19 template <class, class MarkedPtr> class GuardPtr
20 >
22 public:
24 using guard_ptr = GuardPtr<T, marked_ptr>;
25
26 concurrent_ptr(const marked_ptr& p = marked_ptr()) noexcept : ptr(p) {}
27 concurrent_ptr(const concurrent_ptr&) = delete;
29 concurrent_ptr& operator=(const concurrent_ptr&) = delete;
30 concurrent_ptr& operator=(concurrent_ptr&&) = delete;
31
32 // Atomic load that does not guard target from being reclaimed.
33 marked_ptr load(std::memory_order order = std::memory_order_seq_cst) const
34 {
35 return ptr.load(order);
36 }
37
38 // Atomic store.
39 void store(const marked_ptr& src, std::memory_order order = std::memory_order_seq_cst)
40 {
41 ptr.store(src, order);
42 }
43
44 // Shorthand for store (src.get())
45 void store(const guard_ptr& src, std::memory_order order = std::memory_order_seq_cst)
46 {
47 ptr.store(src.get(), order);
48 }
49
50 bool compare_exchange_weak(marked_ptr& expected, marked_ptr desired, std::memory_order order = std::memory_order_seq_cst)
51 {
52 return ptr.compare_exchange_weak(expected, desired, order);
53 }
54
55 bool compare_exchange_weak(marked_ptr& expected, marked_ptr desired, std::memory_order order = std::memory_order_seq_cst) volatile
56 {
57 return ptr.compare_exchange_weak(expected, desired, order);
58 }
59
60 bool compare_exchange_weak(marked_ptr& expected, marked_ptr desired, std::memory_order success, std::memory_order failure)
61 {
62 return ptr.compare_exchange_weak(expected, desired, success, failure);
63 }
64
65 bool compare_exchange_weak(marked_ptr& expected, marked_ptr desired, std::memory_order success, std::memory_order failure) volatile
66 {
67 return ptr.compare_exchange_weak(expected, desired, success, failure);
68 }
69
70 bool compare_exchange_strong(marked_ptr& expected, marked_ptr desired, std::memory_order order = std::memory_order_seq_cst)
71 {
72 return ptr.compare_exchange_strong(expected, desired, order);
73 }
74
75 bool compare_exchange_strong(marked_ptr& expected, marked_ptr desired, std::memory_order order = std::memory_order_seq_cst) volatile
76 {
77 return ptr.compare_exchange_strong(expected, desired, order);
78 }
79
80 bool compare_exchange_strong(marked_ptr& expected, marked_ptr desired, std::memory_order success, std::memory_order failure)
81 {
82 return ptr.compare_exchange_strong(expected, desired, success, failure);
83 }
84
85 bool compare_exchange_strong(marked_ptr& expected, marked_ptr desired, std::memory_order success, std::memory_order failure) volatile
86 {
87 return ptr.compare_exchange_strong(expected, desired, success, failure);
88 }
89
90 private:
91 std::atomic<marked_ptr> ptr;
92 };
93}}}
94
95#endif
A pointer with an embedded mark/tag value.
Definition: marked_ptr.hpp:41
T * get() const noexcept
Get underlying pointer (with mark bits stripped off).
Definition: marked_ptr.hpp:77
T must be derived from enable_concurrent_ptr<T>. D is a deleter.
Definition: concurrent_ptr.hpp:21