From Jason Turner

[atomics.types.pointer]

Diff to HTML by rtfpessoa

Files changed (1) hide show
  1. tmp/tmppy54dvbo/{from.md → to.md} +48 -29
tmp/tmppy54dvbo/{from.md → to.md} RENAMED
@@ -3,41 +3,49 @@
3
  ``` cpp
4
  namespace std {
5
  template<class T> struct atomic<T*> {
6
  using value_type = T*;
7
  using difference_type = ptrdiff_t;
 
8
  static constexpr bool is_always_lock_free = implementation-defined // whether a given atomic type's operations are always lock free;
9
  bool is_lock_free() const volatile noexcept;
10
  bool is_lock_free() const noexcept;
11
- void store(T*, memory_order = memory_order_seq_cst) volatile noexcept;
12
- void store(T*, memory_order = memory_order_seq_cst) noexcept;
13
- T* load(memory_order = memory_order_seq_cst) const volatile noexcept;
14
- T* load(memory_order = memory_order_seq_cst) const noexcept;
15
- operator T*() const volatile noexcept;
16
- operator T*() const noexcept;
17
- T* exchange(T*, memory_order = memory_order_seq_cst) volatile noexcept;
18
- T* exchange(T*, memory_order = memory_order_seq_cst) noexcept;
19
- bool compare_exchange_weak(T*&, T*, memory_order, memory_order) volatile noexcept;
20
- bool compare_exchange_weak(T*&, T*, memory_order, memory_order) noexcept;
21
- bool compare_exchange_strong(T*&, T*, memory_order, memory_order) volatile noexcept;
22
- bool compare_exchange_strong(T*&, T*, memory_order, memory_order) noexcept;
23
- bool compare_exchange_weak(T*&, T*, memory_order = memory_order_seq_cst) volatile noexcept;
24
- bool compare_exchange_weak(T*&, T*, memory_order = memory_order_seq_cst) noexcept;
25
- bool compare_exchange_strong(T*&, T*, memory_order = memory_order_seq_cst) volatile noexcept;
26
- bool compare_exchange_strong(T*&, T*, memory_order = memory_order_seq_cst) noexcept;
27
- T* fetch_add(ptrdiff_t, memory_order = memory_order_seq_cst) volatile noexcept;
28
- T* fetch_add(ptrdiff_t, memory_order = memory_order_seq_cst) noexcept;
29
- T* fetch_sub(ptrdiff_t, memory_order = memory_order_seq_cst) volatile noexcept;
30
- T* fetch_sub(ptrdiff_t, memory_order = memory_order_seq_cst) noexcept;
31
 
32
- atomic() noexcept = default;
33
  constexpr atomic(T*) noexcept;
34
  atomic(const atomic&) = delete;
35
  atomic& operator=(const atomic&) = delete;
36
  atomic& operator=(const atomic&) volatile = delete;
 
 
 
37
  T* operator=(T*) volatile noexcept;
38
  T* operator=(T*) noexcept;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
 
40
  T* operator++(int) volatile noexcept;
41
  T* operator++(int) noexcept;
42
  T* operator--(int) volatile noexcept;
43
  T* operator--(int) noexcept;
@@ -47,47 +55,55 @@ namespace std {
47
  T* operator--() noexcept;
48
  T* operator+=(ptrdiff_t) volatile noexcept;
49
  T* operator+=(ptrdiff_t) noexcept;
50
  T* operator-=(ptrdiff_t) volatile noexcept;
51
  T* operator-=(ptrdiff_t) noexcept;
 
 
 
 
 
 
 
52
  };
53
  }
54
  ```
55
 
56
  There is a partial specialization of the `atomic` class template for
57
  pointers. Specializations of this partial specialization are
58
- standard-layout structs. They each have a trivial default constructor
59
- and a trivial destructor.
60
 
61
  Descriptions are provided below only for members that differ from the
62
  primary template.
63
 
64
  The following operations perform pointer arithmetic. The key, operator,
65
  and computation correspondence is:
66
 
67
- **Table: Atomic pointer computations** <a id="tab:atomic.pointer.computations">[tab:atomic.pointer.computations]</a>
68
 
69
  | | | | | | |
70
  | ----- | --- | -------- | ----- | --- | ----------- |
71
  | `add` | `+` | addition | `sub` | `-` | subtraction |
72
 
73
  ``` cpp
74
- T* fetch_key(ptrdiff_t operand, memory_order order = memory_order_seq_cst) volatile noexcept;
75
- T* fetch_key(ptrdiff_t operand, memory_order order = memory_order_seq_cst) noexcept;
76
  ```
77
 
78
- *Requires:* T shall be an object type, otherwise the program is
79
- ill-formed.
 
 
80
 
81
  [*Note 1*: Pointer arithmetic on `void*` or function pointers is
82
  ill-formed. — *end note*]
83
 
84
  *Effects:* Atomically replaces the value pointed to by `this` with the
85
  result of the computation applied to the value pointed to by `this` and
86
  the given `operand`. Memory is affected according to the value of
87
  `order`. These operations are atomic read-modify-write
88
- operations ([[intro.multithread]]).
89
 
90
  *Returns:* Atomically, the value pointed to by `this` immediately before
91
  the effects.
92
 
93
  *Remarks:* The result may be an undefined address, but the operations
@@ -96,8 +112,11 @@ otherwise have no undefined behavior.
96
  ``` cpp
97
  T* operator op=(ptrdiff_t operand) volatile noexcept;
98
  T* operator op=(ptrdiff_t operand) noexcept;
99
  ```
100
 
 
 
 
101
  *Effects:* Equivalent to:
102
  `return fetch_`*`key`*`(operand) `*`op`*` operand;`
103
 
 
3
  ``` cpp
4
  namespace std {
5
  template<class T> struct atomic<T*> {
6
  using value_type = T*;
7
  using difference_type = ptrdiff_t;
8
+
9
  static constexpr bool is_always_lock_free = implementation-defined // whether a given atomic type's operations are always lock free;
10
  bool is_lock_free() const volatile noexcept;
11
  bool is_lock_free() const noexcept;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
+ constexpr atomic() noexcept;
14
  constexpr atomic(T*) noexcept;
15
  atomic(const atomic&) = delete;
16
  atomic& operator=(const atomic&) = delete;
17
  atomic& operator=(const atomic&) volatile = delete;
18
+
19
+ void store(T*, memory_order = memory_order::seq_cst) volatile noexcept;
20
+ void store(T*, memory_order = memory_order::seq_cst) noexcept;
21
  T* operator=(T*) volatile noexcept;
22
  T* operator=(T*) noexcept;
23
+ T* load(memory_order = memory_order::seq_cst) const volatile noexcept;
24
+ T* load(memory_order = memory_order::seq_cst) const noexcept;
25
+ operator T*() const volatile noexcept;
26
+ operator T*() const noexcept;
27
+
28
+ T* exchange(T*, memory_order = memory_order::seq_cst) volatile noexcept;
29
+ T* exchange(T*, memory_order = memory_order::seq_cst) noexcept;
30
+ bool compare_exchange_weak(T*&, T*, memory_order, memory_order) volatile noexcept;
31
+ bool compare_exchange_weak(T*&, T*, memory_order, memory_order) noexcept;
32
+ bool compare_exchange_strong(T*&, T*, memory_order, memory_order) volatile noexcept;
33
+ bool compare_exchange_strong(T*&, T*, memory_order, memory_order) noexcept;
34
+ bool compare_exchange_weak(T*&, T*,
35
+ memory_order = memory_order::seq_cst) volatile noexcept;
36
+ bool compare_exchange_weak(T*&, T*,
37
+ memory_order = memory_order::seq_cst) noexcept;
38
+ bool compare_exchange_strong(T*&, T*,
39
+ memory_order = memory_order::seq_cst) volatile noexcept;
40
+ bool compare_exchange_strong(T*&, T*,
41
+ memory_order = memory_order::seq_cst) noexcept;
42
+
43
+ T* fetch_add(ptrdiff_t, memory_order = memory_order::seq_cst) volatile noexcept;
44
+ T* fetch_add(ptrdiff_t, memory_order = memory_order::seq_cst) noexcept;
45
+ T* fetch_sub(ptrdiff_t, memory_order = memory_order::seq_cst) volatile noexcept;
46
+ T* fetch_sub(ptrdiff_t, memory_order = memory_order::seq_cst) noexcept;
47
 
48
  T* operator++(int) volatile noexcept;
49
  T* operator++(int) noexcept;
50
  T* operator--(int) volatile noexcept;
51
  T* operator--(int) noexcept;
 
55
  T* operator--() noexcept;
56
  T* operator+=(ptrdiff_t) volatile noexcept;
57
  T* operator+=(ptrdiff_t) noexcept;
58
  T* operator-=(ptrdiff_t) volatile noexcept;
59
  T* operator-=(ptrdiff_t) noexcept;
60
+
61
+ void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept;
62
+ void wait(T*, memory_order = memory_order::seq_cst) const noexcept;
63
+ void notify_one() volatile noexcept;
64
+ void notify_one() noexcept;
65
+ void notify_all() volatile noexcept;
66
+ void notify_all() noexcept;
67
  };
68
  }
69
  ```
70
 
71
  There is a partial specialization of the `atomic` class template for
72
  pointers. Specializations of this partial specialization are
73
+ standard-layout structs. They each have a trivial destructor.
 
74
 
75
  Descriptions are provided below only for members that differ from the
76
  primary template.
77
 
78
  The following operations perform pointer arithmetic. The key, operator,
79
  and computation correspondence is:
80
 
81
+ **Table: Atomic pointer computations** <a id="atomic.types.pointer.comp">[atomic.types.pointer.comp]</a>
82
 
83
  | | | | | | |
84
  | ----- | --- | -------- | ----- | --- | ----------- |
85
  | `add` | `+` | addition | `sub` | `-` | subtraction |
86
 
87
  ``` cpp
88
+ T* fetch_key(ptrdiff_t operand, memory_order order = memory_order::seq_cst) volatile noexcept;
89
+ T* fetch_key(ptrdiff_t operand, memory_order order = memory_order::seq_cst) noexcept;
90
  ```
91
 
92
+ *Constraints:* For the `volatile` overload of this function,
93
+ `is_always_lock_free` is `true`.
94
+
95
+ *Mandates:* `T` is a complete object type.
96
 
97
  [*Note 1*: Pointer arithmetic on `void*` or function pointers is
98
  ill-formed. — *end note*]
99
 
100
  *Effects:* Atomically replaces the value pointed to by `this` with the
101
  result of the computation applied to the value pointed to by `this` and
102
  the given `operand`. Memory is affected according to the value of
103
  `order`. These operations are atomic read-modify-write
104
+ operations [[intro.multithread]].
105
 
106
  *Returns:* Atomically, the value pointed to by `this` immediately before
107
  the effects.
108
 
109
  *Remarks:* The result may be an undefined address, but the operations
 
112
  ``` cpp
113
  T* operator op=(ptrdiff_t operand) volatile noexcept;
114
  T* operator op=(ptrdiff_t operand) noexcept;
115
  ```
116
 
117
+ *Constraints:* For the `volatile` overload of this function,
118
+ `is_always_lock_free` is `true`.
119
+
120
  *Effects:* Equivalent to:
121
  `return fetch_`*`key`*`(operand) `*`op`*` operand;`
122