Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
godotengine
GitHub Repository: godotengine/godot
Path: blob/master/thirdparty/manifold/src/sparse.h
9903 views
1
// Copyright 2021 The Manifold Authors.
2
//
3
// Licensed under the Apache License, Version 2.0 (the "License");
4
// you may not use this file except in compliance with the License.
5
// You may obtain a copy of the License at
6
//
7
// http://www.apache.org/licenses/LICENSE-2.0
8
//
9
// Unless required by applicable law or agreed to in writing, software
10
// distributed under the License is distributed on an "AS IS" BASIS,
11
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
// See the License for the specific language governing permissions and
13
// limitations under the License.
14
15
#pragma once
16
#include "./parallel.h"
17
#include "./utils.h"
18
#include "./vec.h"
19
#include "manifold/common.h"
20
#include "manifold/optional_assert.h"
21
22
namespace {
23
template <typename T>
24
inline bool FirstFinite(T v) {
25
return std::isfinite(v[0]);
26
}
27
28
template <>
29
inline bool FirstFinite<double>(double v) {
30
return std::isfinite(v);
31
}
32
} // namespace
33
34
namespace manifold {
35
36
/** @ingroup Private */
37
class SparseIndices {
38
// sparse indices where {p1: q1, p2: q2, ...} are laid out as
39
// p1 q1 p2 q2 or q1 p1 q2 p2, depending on endianness
40
// such that the indices are sorted by (p << 32) | q
41
public:
42
#if defined(__BYTE_ORDER) && __BYTE_ORDER == __BIG_ENDIAN || \
43
defined(__BIG_ENDIAN__) || defined(__ARMEB__) || defined(__THUMBEB__) || \
44
defined(__AARCH64EB__) || defined(_MIBSEB) || defined(__MIBSEB) || \
45
defined(__MIBSEB__)
46
static constexpr size_t pOffset = 0;
47
#elif defined(__BYTE_ORDER) && __BYTE_ORDER == __LITTLE_ENDIAN || \
48
defined(__LITTLE_ENDIAN__) || defined(__ARMEL__) || \
49
defined(__THUMBEL__) || defined(__AARCH64EL__) || defined(_MIPSEL) || \
50
defined(__MIPSEL) || defined(__MIPSEL__) || defined(__EMSCRIPTEN__) || \
51
defined(_WIN32)
52
static constexpr size_t pOffset = 1;
53
#else
54
#error "unknown architecture"
55
#endif
56
static constexpr int64_t EncodePQ(int p, int q) {
57
return (int64_t(p) << 32) | q;
58
}
59
60
SparseIndices() = default;
61
SparseIndices(size_t size) { data_ = Vec<char>(size * sizeof(int64_t)); }
62
63
void Clear() { data_.clear(false); }
64
65
void FromIndices(const std::vector<SparseIndices>& indices) {
66
std::vector<size_t> sizes;
67
size_t total_size = 0;
68
for (const auto& ind : indices) {
69
sizes.push_back(total_size);
70
total_size += ind.data_.size();
71
}
72
data_ = Vec<char>(total_size);
73
for_each_n(ExecutionPolicy::Par, countAt(0), indices.size(), [&](size_t i) {
74
std::copy(indices[i].data_.begin(), indices[i].data_.end(),
75
data_.begin() + sizes[i]);
76
});
77
}
78
79
size_t size() const { return data_.size() / sizeof(int64_t); }
80
81
Vec<int> Copy(bool use_q) const {
82
Vec<int> out(size());
83
size_t offset = pOffset;
84
if (use_q) offset = 1 - offset;
85
const int* p = ptr();
86
for_each(autoPolicy(out.size()), countAt(0_uz), countAt(out.size()),
87
[&](size_t i) { out[i] = p[i * 2 + offset]; });
88
return out;
89
}
90
91
void Sort() {
92
VecView<int64_t> view = AsVec64();
93
stable_sort(view.begin(), view.end());
94
}
95
96
void Resize(size_t size) { data_.resize(size * sizeof(int64_t), -1); }
97
98
inline int& Get(size_t i, bool use_q) {
99
if (use_q)
100
return ptr()[2 * i + 1 - pOffset];
101
else
102
return ptr()[2 * i + pOffset];
103
}
104
105
inline int Get(size_t i, bool use_q) const {
106
if (use_q)
107
return ptr()[2 * i + 1 - pOffset];
108
else
109
return ptr()[2 * i + pOffset];
110
}
111
112
inline int64_t GetPQ(size_t i) const {
113
VecView<const int64_t> view = AsVec64();
114
return view[i];
115
}
116
117
inline void Set(size_t i, int p, int q) {
118
VecView<int64_t> view = AsVec64();
119
view[i] = EncodePQ(p, q);
120
}
121
122
inline void SetPQ(size_t i, int64_t pq) {
123
VecView<int64_t> view = AsVec64();
124
view[i] = pq;
125
}
126
127
VecView<int64_t> AsVec64() {
128
return VecView<int64_t>(reinterpret_cast<int64_t*>(data_.data()),
129
data_.size() / sizeof(int64_t));
130
}
131
132
VecView<const int64_t> AsVec64() const {
133
return VecView<const int64_t>(
134
reinterpret_cast<const int64_t*>(data_.data()),
135
data_.size() / sizeof(int64_t));
136
}
137
138
VecView<int32_t> AsVec32() {
139
return VecView<int32_t>(reinterpret_cast<int32_t*>(data_.data()),
140
data_.size() / sizeof(int32_t));
141
}
142
143
VecView<const int32_t> AsVec32() const {
144
return VecView<const int32_t>(
145
reinterpret_cast<const int32_t*>(data_.data()),
146
data_.size() / sizeof(int32_t));
147
}
148
149
inline void Add(int p, int q, bool seq = false) {
150
data_.extend(sizeof(int64_t), seq);
151
Set(size() - 1, p, q);
152
}
153
154
void Unique() {
155
Sort();
156
VecView<int64_t> view = AsVec64();
157
size_t newSize = unique(view.begin(), view.end()) - view.begin();
158
Resize(newSize);
159
}
160
161
size_t RemoveZeros(Vec<int>& S) {
162
DEBUG_ASSERT(S.size() == size(), userErr,
163
"Different number of values than indicies!");
164
165
Vec<size_t> new2Old(S.size());
166
sequence(new2Old.begin(), new2Old.end());
167
168
size_t size = copy_if(countAt(0_uz), countAt(S.size()), new2Old.begin(),
169
[&S](const size_t i) { return S[i] != 0; }) -
170
new2Old.begin();
171
new2Old.resize(size);
172
173
Permute(S, new2Old);
174
Vec<char> tmp(std::move(data_));
175
Resize(size);
176
gather(new2Old.begin(), new2Old.end(),
177
reinterpret_cast<int64_t*>(tmp.data()),
178
reinterpret_cast<int64_t*>(data_.data()));
179
180
return size;
181
}
182
183
template <typename T>
184
size_t KeepFinite(Vec<T>& v, Vec<int>& x) {
185
DEBUG_ASSERT(x.size() == size(), userErr,
186
"Different number of values than indicies!");
187
188
Vec<int> new2Old(v.size());
189
size_t size = copy_if(countAt(0_uz), countAt(v.size()), new2Old.begin(),
190
[&v](size_t i) { return FirstFinite(v[i]); }) -
191
new2Old.begin();
192
new2Old.resize(size);
193
194
Permute(v, new2Old);
195
Permute(x, new2Old);
196
Vec<char> tmp(std::move(data_));
197
Resize(size);
198
gather(new2Old.begin(), new2Old.end(),
199
reinterpret_cast<int64_t*>(tmp.data()),
200
reinterpret_cast<int64_t*>(data_.data()));
201
202
return size;
203
}
204
205
#ifdef MANIFOLD_DEBUG
206
void Dump() const {
207
std::cout << "SparseIndices = " << std::endl;
208
const int* p = ptr();
209
for (size_t i = 0; i < size(); ++i) {
210
std::cout << i << ", p = " << Get(i, false) << ", q = " << Get(i, true)
211
<< std::endl;
212
}
213
std::cout << std::endl;
214
}
215
#endif
216
217
private:
218
Vec<char> data_;
219
inline int* ptr() { return reinterpret_cast<int32_t*>(data_.data()); }
220
inline const int* ptr() const {
221
return reinterpret_cast<const int32_t*>(data_.data());
222
}
223
};
224
225
} // namespace manifold
226
227