Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
stenzek
GitHub Repository: stenzek/duckstation
Path: blob/master/dep/rapidyaml/include/c4/yml/detail/stack.hpp
4270 views
1
#ifndef _C4_YML_DETAIL_STACK_HPP_
2
#define _C4_YML_DETAIL_STACK_HPP_
3
4
#ifndef _C4_YML_COMMON_HPP_
5
#include "../common.hpp"
6
#endif
7
8
#ifdef RYML_DBG
9
# include <type_traits>
10
#endif
11
12
#include <string.h>
13
14
namespace c4 {
15
namespace yml {
16
namespace detail {
17
18
/** A lightweight contiguous stack with SSO. This avoids a dependency on std. */
19
template<class T, size_t N=16>
20
class stack
21
{
22
static_assert(std::is_trivially_copyable<T>::value, "T must be trivially copyable");
23
static_assert(std::is_trivially_destructible<T>::value, "T must be trivially destructible");
24
25
enum : size_t { sso_size = N };
26
27
public:
28
29
T m_buf[N];
30
T * m_stack;
31
size_t m_size;
32
size_t m_capacity;
33
Callbacks m_callbacks;
34
35
public:
36
37
constexpr static bool is_contiguous() { return true; }
38
39
stack(Callbacks const& cb)
40
: m_buf()
41
, m_stack(m_buf)
42
, m_size(0)
43
, m_capacity(N)
44
, m_callbacks(cb) {}
45
stack() : stack(get_callbacks()) {}
46
~stack()
47
{
48
_free();
49
}
50
51
stack(stack const& that) noexcept : stack(that.m_callbacks)
52
{
53
resize(that.m_size);
54
_cp(&that);
55
}
56
57
stack(stack &&that) noexcept : stack(that.m_callbacks)
58
{
59
_mv(&that);
60
}
61
62
stack& operator= (stack const& that) noexcept
63
{
64
_cb(that.m_callbacks);
65
resize(that.m_size);
66
_cp(&that);
67
return *this;
68
}
69
70
stack& operator= (stack &&that) noexcept
71
{
72
_cb(that.m_callbacks);
73
_mv(&that);
74
return *this;
75
}
76
77
public:
78
79
size_t size() const { return m_size; }
80
size_t empty() const { return m_size == 0; }
81
size_t capacity() const { return m_capacity; }
82
83
void clear()
84
{
85
m_size = 0;
86
}
87
88
void resize(size_t sz)
89
{
90
reserve(sz);
91
m_size = sz;
92
}
93
94
void reserve(size_t sz);
95
96
void push(T const& C4_RESTRICT n)
97
{
98
RYML_ASSERT((const char*)&n + sizeof(T) < (const char*)m_stack || &n > m_stack + m_capacity);
99
if(m_size == m_capacity)
100
{
101
size_t cap = m_capacity == 0 ? N : 2 * m_capacity;
102
reserve(cap);
103
}
104
m_stack[m_size] = n;
105
++m_size;
106
}
107
108
void push_top()
109
{
110
RYML_ASSERT(m_size > 0);
111
if(m_size == m_capacity)
112
{
113
size_t cap = m_capacity == 0 ? N : 2 * m_capacity;
114
reserve(cap);
115
}
116
m_stack[m_size] = m_stack[m_size - 1];
117
++m_size;
118
}
119
120
T const& C4_RESTRICT pop()
121
{
122
RYML_ASSERT(m_size > 0);
123
--m_size;
124
return m_stack[m_size];
125
}
126
127
C4_ALWAYS_INLINE T const& C4_RESTRICT top() const { RYML_ASSERT(m_size > 0); return m_stack[m_size - 1]; }
128
C4_ALWAYS_INLINE T & C4_RESTRICT top() { RYML_ASSERT(m_size > 0); return m_stack[m_size - 1]; }
129
130
C4_ALWAYS_INLINE T const& C4_RESTRICT bottom() const { RYML_ASSERT(m_size > 0); return m_stack[0]; }
131
C4_ALWAYS_INLINE T & C4_RESTRICT bottom() { RYML_ASSERT(m_size > 0); return m_stack[0]; }
132
133
C4_ALWAYS_INLINE T const& C4_RESTRICT top(size_t i) const { RYML_ASSERT(i < m_size); return m_stack[m_size - 1 - i]; }
134
C4_ALWAYS_INLINE T & C4_RESTRICT top(size_t i) { RYML_ASSERT(i < m_size); return m_stack[m_size - 1 - i]; }
135
136
C4_ALWAYS_INLINE T const& C4_RESTRICT bottom(size_t i) const { RYML_ASSERT(i < m_size); return m_stack[i]; }
137
C4_ALWAYS_INLINE T & C4_RESTRICT bottom(size_t i) { RYML_ASSERT(i < m_size); return m_stack[i]; }
138
139
C4_ALWAYS_INLINE T const& C4_RESTRICT operator[](size_t i) const { RYML_ASSERT(i < m_size); return m_stack[i]; }
140
C4_ALWAYS_INLINE T & C4_RESTRICT operator[](size_t i) { RYML_ASSERT(i < m_size); return m_stack[i]; }
141
142
public:
143
144
using iterator = T *;
145
using const_iterator = T const *;
146
147
iterator begin() { return m_stack; }
148
iterator end () { return m_stack + m_size; }
149
150
const_iterator begin() const { return (const_iterator)m_stack; }
151
const_iterator end () const { return (const_iterator)m_stack + m_size; }
152
153
public:
154
void _free();
155
void _cp(stack const* C4_RESTRICT that);
156
void _mv(stack * that);
157
void _cb(Callbacks const& cb);
158
};
159
160
161
//-----------------------------------------------------------------------------
162
//-----------------------------------------------------------------------------
163
//-----------------------------------------------------------------------------
164
165
template<class T, size_t N>
166
void stack<T, N>::reserve(size_t sz)
167
{
168
if(sz <= m_size)
169
return;
170
if(sz <= N)
171
{
172
m_stack = m_buf;
173
m_capacity = N;
174
return;
175
}
176
T *buf = (T*) m_callbacks.m_allocate(sz * sizeof(T), m_stack, m_callbacks.m_user_data);
177
memcpy(buf, m_stack, m_size * sizeof(T));
178
if(m_stack != m_buf)
179
{
180
m_callbacks.m_free(m_stack, m_capacity * sizeof(T), m_callbacks.m_user_data);
181
}
182
m_stack = buf;
183
m_capacity = sz;
184
}
185
186
187
//-----------------------------------------------------------------------------
188
189
template<class T, size_t N>
190
void stack<T, N>::_free()
191
{
192
RYML_ASSERT(m_stack != nullptr); // this structure cannot be memset() to zero
193
if(m_stack != m_buf)
194
{
195
m_callbacks.m_free(m_stack, m_capacity * sizeof(T), m_callbacks.m_user_data);
196
m_stack = m_buf;
197
m_size = N;
198
m_capacity = N;
199
}
200
else
201
{
202
RYML_ASSERT(m_capacity == N);
203
}
204
}
205
206
207
//-----------------------------------------------------------------------------
208
209
template<class T, size_t N>
210
void stack<T, N>::_cp(stack const* C4_RESTRICT that)
211
{
212
if(that->m_stack != that->m_buf)
213
{
214
RYML_ASSERT(that->m_capacity > N);
215
RYML_ASSERT(that->m_size <= that->m_capacity);
216
}
217
else
218
{
219
RYML_ASSERT(that->m_capacity <= N);
220
RYML_ASSERT(that->m_size <= that->m_capacity);
221
}
222
memcpy(m_stack, that->m_stack, that->m_size * sizeof(T));
223
m_size = that->m_size;
224
m_capacity = that->m_size < N ? N : that->m_size;
225
m_callbacks = that->m_callbacks;
226
}
227
228
229
//-----------------------------------------------------------------------------
230
231
template<class T, size_t N>
232
void stack<T, N>::_mv(stack * that)
233
{
234
if(that->m_stack != that->m_buf)
235
{
236
RYML_ASSERT(that->m_capacity > N);
237
RYML_ASSERT(that->m_size <= that->m_capacity);
238
m_stack = that->m_stack;
239
}
240
else
241
{
242
RYML_ASSERT(that->m_capacity <= N);
243
RYML_ASSERT(that->m_size <= that->m_capacity);
244
memcpy(m_buf, that->m_buf, that->m_size * sizeof(T));
245
m_stack = m_buf;
246
}
247
m_size = that->m_size;
248
m_capacity = that->m_capacity;
249
m_callbacks = that->m_callbacks;
250
// make sure no deallocation happens on destruction
251
RYML_ASSERT(that->m_stack != m_buf);
252
that->m_stack = that->m_buf;
253
that->m_capacity = N;
254
that->m_size = 0;
255
}
256
257
258
//-----------------------------------------------------------------------------
259
260
template<class T, size_t N>
261
void stack<T, N>::_cb(Callbacks const& cb)
262
{
263
if(cb != m_callbacks)
264
{
265
_free();
266
m_callbacks = cb;
267
}
268
}
269
270
} // namespace detail
271
} // namespace yml
272
} // namespace c4
273
274
#endif /* _C4_YML_DETAIL_STACK_HPP_ */
275
276