Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/freebsd-src
Path: blob/main/contrib/llvm-project/compiler-rt/lib/msan/msan_poisoning.cpp
35262 views
1
//===-- msan_poisoning.cpp --------------------------------------*- C++ -*-===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file is a part of MemorySanitizer.
10
//
11
//===----------------------------------------------------------------------===//
12
13
#include "msan_poisoning.h"
14
15
#include "interception/interception.h"
16
#include "msan_origin.h"
17
#include "msan_thread.h"
18
#include "sanitizer_common/sanitizer_common.h"
19
20
DECLARE_REAL(void *, memset, void *dest, int c, uptr n)
21
DECLARE_REAL(void *, memcpy, void *dest, const void *src, uptr n)
22
DECLARE_REAL(void *, memmove, void *dest, const void *src, uptr n)
23
24
namespace __msan {
25
26
u32 GetOriginIfPoisoned(uptr addr, uptr size) {
27
unsigned char *s = (unsigned char *)MEM_TO_SHADOW(addr);
28
for (uptr i = 0; i < size; ++i)
29
if (s[i]) return *(u32 *)SHADOW_TO_ORIGIN(((uptr)s + i) & ~3UL);
30
return 0;
31
}
32
33
void SetOriginIfPoisoned(uptr addr, uptr src_shadow, uptr size,
34
u32 src_origin) {
35
uptr dst_s = MEM_TO_SHADOW(addr);
36
uptr src_s = src_shadow;
37
uptr src_s_end = src_s + size;
38
39
for (; src_s < src_s_end; ++dst_s, ++src_s)
40
if (*(u8 *)src_s) *(u32 *)SHADOW_TO_ORIGIN(dst_s & ~3UL) = src_origin;
41
}
42
43
void CopyOrigin(const void *dst, const void *src, uptr size,
44
StackTrace *stack) {
45
if (!MEM_IS_APP(dst) || !MEM_IS_APP(src)) return;
46
47
uptr d = (uptr)dst;
48
uptr beg = d & ~3UL;
49
// Copy left unaligned origin if that memory is poisoned.
50
if (beg < d) {
51
u32 o = GetOriginIfPoisoned((uptr)src, beg + 4 - d);
52
if (o) {
53
if (__msan_get_track_origins() > 1) o = ChainOrigin(o, stack);
54
*(u32 *)MEM_TO_ORIGIN(beg) = o;
55
}
56
beg += 4;
57
}
58
59
uptr end = (d + size) & ~3UL;
60
// If both ends fall into the same 4-byte slot, we are done.
61
if (end < beg) return;
62
63
// Copy right unaligned origin if that memory is poisoned.
64
if (end < d + size) {
65
u32 o = GetOriginIfPoisoned((uptr)src + (end - d), (d + size) - end);
66
if (o) {
67
if (__msan_get_track_origins() > 1) o = ChainOrigin(o, stack);
68
*(u32 *)MEM_TO_ORIGIN(end) = o;
69
}
70
}
71
72
if (beg < end) {
73
// Align src up.
74
uptr s = ((uptr)src + 3) & ~3UL;
75
// FIXME: factor out to msan_copy_origin_aligned
76
if (__msan_get_track_origins() > 1) {
77
u32 *src = (u32 *)MEM_TO_ORIGIN(s);
78
u32 *src_s = (u32 *)MEM_TO_SHADOW(s);
79
u32 *src_end = (u32 *)MEM_TO_ORIGIN(s + (end - beg));
80
u32 *dst = (u32 *)MEM_TO_ORIGIN(beg);
81
u32 src_o = 0;
82
u32 dst_o = 0;
83
for (; src < src_end; ++src, ++src_s, ++dst) {
84
if (!*src_s) continue;
85
if (*src != src_o) {
86
src_o = *src;
87
dst_o = ChainOrigin(src_o, stack);
88
}
89
*dst = dst_o;
90
}
91
} else {
92
REAL(memcpy)((void *)MEM_TO_ORIGIN(beg), (void *)MEM_TO_ORIGIN(s),
93
end - beg);
94
}
95
}
96
}
97
98
void ReverseCopyOrigin(const void *dst, const void *src, uptr size,
99
StackTrace *stack) {
100
if (!MEM_IS_APP(dst) || !MEM_IS_APP(src))
101
return;
102
103
uptr d = (uptr)dst;
104
uptr end = (d + size) & ~3UL;
105
106
// Copy right unaligned origin if that memory is poisoned.
107
if (end < d + size) {
108
u32 o = GetOriginIfPoisoned((uptr)src + (end - d), (d + size) - end);
109
if (o) {
110
if (__msan_get_track_origins() > 1)
111
o = ChainOrigin(o, stack);
112
*(u32 *)MEM_TO_ORIGIN(end) = o;
113
}
114
}
115
116
uptr beg = d & ~3UL;
117
118
if (beg + 4 < end) {
119
// Align src up.
120
uptr s = ((uptr)src + 3) & ~3UL;
121
if (__msan_get_track_origins() > 1) {
122
u32 *src = (u32 *)MEM_TO_ORIGIN(s + end - beg - 4);
123
u32 *src_s = (u32 *)MEM_TO_SHADOW(s + end - beg - 4);
124
u32 *src_begin = (u32 *)MEM_TO_ORIGIN(s);
125
u32 *dst = (u32 *)MEM_TO_ORIGIN(end - 4);
126
u32 src_o = 0;
127
u32 dst_o = 0;
128
for (; src >= src_begin; --src, --src_s, --dst) {
129
if (!*src_s)
130
continue;
131
if (*src != src_o) {
132
src_o = *src;
133
dst_o = ChainOrigin(src_o, stack);
134
}
135
*dst = dst_o;
136
}
137
} else {
138
REAL(memmove)
139
((void *)MEM_TO_ORIGIN(beg), (void *)MEM_TO_ORIGIN(s), end - beg - 4);
140
}
141
}
142
143
// Copy left unaligned origin if that memory is poisoned.
144
if (beg < d) {
145
u32 o = GetOriginIfPoisoned((uptr)src, beg + 4 - d);
146
if (o) {
147
if (__msan_get_track_origins() > 1)
148
o = ChainOrigin(o, stack);
149
*(u32 *)MEM_TO_ORIGIN(beg) = o;
150
}
151
}
152
}
153
154
void MoveOrigin(const void *dst, const void *src, uptr size,
155
StackTrace *stack) {
156
// If destination origin range overlaps with source origin range, move
157
// origins by coping origins in a reverse order; otherwise, copy origins in
158
// a normal order.
159
uptr src_aligned_beg = reinterpret_cast<uptr>(src) & ~3UL;
160
uptr src_aligned_end = (reinterpret_cast<uptr>(src) + size) & ~3UL;
161
uptr dst_aligned_beg = reinterpret_cast<uptr>(dst) & ~3UL;
162
if (dst_aligned_beg < src_aligned_end && dst_aligned_beg >= src_aligned_beg)
163
return ReverseCopyOrigin(dst, src, size, stack);
164
return CopyOrigin(dst, src, size, stack);
165
}
166
167
void MoveShadowAndOrigin(const void *dst, const void *src, uptr size,
168
StackTrace *stack) {
169
if (!MEM_IS_APP(dst)) return;
170
if (!MEM_IS_APP(src)) return;
171
if (src == dst) return;
172
// MoveOrigin transfers origins by refering to their shadows. So we
173
// need to move origins before moving shadows.
174
if (__msan_get_track_origins())
175
MoveOrigin(dst, src, size, stack);
176
REAL(memmove)((void *)MEM_TO_SHADOW((uptr)dst),
177
(void *)MEM_TO_SHADOW((uptr)src), size);
178
}
179
180
void CopyShadowAndOrigin(const void *dst, const void *src, uptr size,
181
StackTrace *stack) {
182
if (!MEM_IS_APP(dst)) return;
183
if (!MEM_IS_APP(src)) return;
184
// Because origin's range is slightly larger than app range, memcpy may also
185
// cause overlapped origin ranges.
186
REAL(memcpy)((void *)MEM_TO_SHADOW((uptr)dst),
187
(void *)MEM_TO_SHADOW((uptr)src), size);
188
if (__msan_get_track_origins())
189
MoveOrigin(dst, src, size, stack);
190
}
191
192
void CopyMemory(void *dst, const void *src, uptr size, StackTrace *stack) {
193
REAL(memcpy)(dst, src, size);
194
CopyShadowAndOrigin(dst, src, size, stack);
195
}
196
197
void SetShadow(const void *ptr, uptr size, u8 value) {
198
uptr PageSize = GetPageSizeCached();
199
uptr shadow_beg = MEM_TO_SHADOW(ptr);
200
uptr shadow_end = shadow_beg + size;
201
if (value ||
202
shadow_end - shadow_beg < common_flags()->clear_shadow_mmap_threshold) {
203
REAL(memset)((void *)shadow_beg, value, shadow_end - shadow_beg);
204
} else {
205
uptr page_beg = RoundUpTo(shadow_beg, PageSize);
206
uptr page_end = RoundDownTo(shadow_end, PageSize);
207
208
if (page_beg >= page_end) {
209
REAL(memset)((void *)shadow_beg, 0, shadow_end - shadow_beg);
210
} else {
211
if (page_beg != shadow_beg) {
212
REAL(memset)((void *)shadow_beg, 0, page_beg - shadow_beg);
213
}
214
if (page_end != shadow_end) {
215
REAL(memset)((void *)page_end, 0, shadow_end - page_end);
216
}
217
if (!MmapFixedSuperNoReserve(page_beg, page_end - page_beg))
218
Die();
219
220
if (__msan_get_track_origins()) {
221
// No need to set origin for zero shadow, but we can release pages.
222
uptr origin_beg = RoundUpTo(MEM_TO_ORIGIN(ptr), PageSize);
223
if (!MmapFixedSuperNoReserve(origin_beg, page_end - page_beg))
224
Die();
225
}
226
}
227
}
228
}
229
230
void SetOrigin(const void *dst, uptr size, u32 origin) {
231
// Origin mapping is 4 bytes per 4 bytes of application memory.
232
// Here we extend the range such that its left and right bounds are both
233
// 4 byte aligned.
234
uptr x = MEM_TO_ORIGIN((uptr)dst);
235
uptr beg = x & ~3UL; // align down.
236
uptr end = (x + size + 3) & ~3UL; // align up.
237
u64 origin64 = ((u64)origin << 32) | origin;
238
// This is like memset, but the value is 32-bit. We unroll by 2 to write
239
// 64 bits at once. May want to unroll further to get 128-bit stores.
240
if (beg & 7ULL) {
241
*(u32 *)beg = origin;
242
beg += 4;
243
}
244
for (uptr addr = beg; addr < (end & ~7UL); addr += 8) *(u64 *)addr = origin64;
245
if (end & 7ULL) *(u32 *)(end - 4) = origin;
246
}
247
248
void PoisonMemory(const void *dst, uptr size, StackTrace *stack) {
249
SetShadow(dst, size, (u8)-1);
250
251
if (__msan_get_track_origins()) {
252
MsanThread *t = GetCurrentThread();
253
if (t && t->InSignalHandler())
254
return;
255
Origin o = Origin::CreateHeapOrigin(stack);
256
SetOrigin(dst, size, o.raw_id());
257
}
258
}
259
260
} // namespace __msan
261
262