Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/alpha/lib/csum_partial_copy.c
26424 views
1
// SPDX-License-Identifier: GPL-2.0
2
/*
3
* csum_partial_copy - do IP checksumming and copy
4
*
5
* (C) Copyright 1996 Linus Torvalds
6
* accelerated versions (and 21264 assembly versions ) contributed by
7
* Rick Gorton <[email protected]>
8
*
9
* Don't look at this too closely - you'll go mad. The things
10
* we do for performance..
11
*/
12
13
#include <linux/types.h>
14
#include <linux/string.h>
15
#include <linux/uaccess.h>
16
#include <net/checksum.h>
17
18
19
#define ldq_u(x,y) \
20
__asm__ __volatile__("ldq_u %0,%1":"=r" (x):"m" (*(const unsigned long *)(y)))
21
22
#define stq_u(x,y) \
23
__asm__ __volatile__("stq_u %1,%0":"=m" (*(unsigned long *)(y)):"r" (x))
24
25
#define extql(x,y,z) \
26
__asm__ __volatile__("extql %1,%2,%0":"=r" (z):"r" (x),"r" (y))
27
28
#define extqh(x,y,z) \
29
__asm__ __volatile__("extqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
30
31
#define mskql(x,y,z) \
32
__asm__ __volatile__("mskql %1,%2,%0":"=r" (z):"r" (x),"r" (y))
33
34
#define mskqh(x,y,z) \
35
__asm__ __volatile__("mskqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
36
37
#define insql(x,y,z) \
38
__asm__ __volatile__("insql %1,%2,%0":"=r" (z):"r" (x),"r" (y))
39
40
#define insqh(x,y,z) \
41
__asm__ __volatile__("insqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
42
43
#define __get_word(insn,x,ptr) \
44
({ \
45
long __guu_err; \
46
__asm__ __volatile__( \
47
"1: "#insn" %0,%2\n" \
48
"2:\n" \
49
EXC(1b,2b,%0,%1) \
50
: "=r"(x), "=r"(__guu_err) \
51
: "m"(__m(ptr)), "1"(0)); \
52
__guu_err; \
53
})
54
55
static inline unsigned short from64to16(unsigned long x)
56
{
57
/* Using extract instructions is a bit more efficient
58
than the original shift/bitmask version. */
59
60
union {
61
unsigned long ul;
62
unsigned int ui[2];
63
unsigned short us[4];
64
} in_v, tmp_v, out_v;
65
66
in_v.ul = x;
67
tmp_v.ul = (unsigned long) in_v.ui[0] + (unsigned long) in_v.ui[1];
68
69
/* Since the bits of tmp_v.sh[3] are going to always be zero,
70
we don't have to bother to add that in. */
71
out_v.ul = (unsigned long) tmp_v.us[0] + (unsigned long) tmp_v.us[1]
72
+ (unsigned long) tmp_v.us[2];
73
74
/* Similarly, out_v.us[2] is always zero for the final add. */
75
return out_v.us[0] + out_v.us[1];
76
}
77
78
79
80
/*
81
* Ok. This isn't fun, but this is the EASY case.
82
*/
83
static inline unsigned long
84
csum_partial_cfu_aligned(const unsigned long __user *src, unsigned long *dst,
85
long len)
86
{
87
unsigned long checksum = ~0U;
88
unsigned long carry = 0;
89
90
while (len >= 0) {
91
unsigned long word;
92
if (__get_word(ldq, word, src))
93
return 0;
94
checksum += carry;
95
src++;
96
checksum += word;
97
len -= 8;
98
carry = checksum < word;
99
*dst = word;
100
dst++;
101
}
102
len += 8;
103
checksum += carry;
104
if (len) {
105
unsigned long word, tmp;
106
if (__get_word(ldq, word, src))
107
return 0;
108
tmp = *dst;
109
mskql(word, len, word);
110
checksum += word;
111
mskqh(tmp, len, tmp);
112
carry = checksum < word;
113
*dst = word | tmp;
114
checksum += carry;
115
}
116
return checksum;
117
}
118
119
/*
120
* This is even less fun, but this is still reasonably
121
* easy.
122
*/
123
static inline unsigned long
124
csum_partial_cfu_dest_aligned(const unsigned long __user *src,
125
unsigned long *dst,
126
unsigned long soff,
127
long len)
128
{
129
unsigned long first;
130
unsigned long word, carry;
131
unsigned long lastsrc = 7+len+(unsigned long)src;
132
unsigned long checksum = ~0U;
133
134
if (__get_word(ldq_u, first,src))
135
return 0;
136
carry = 0;
137
while (len >= 0) {
138
unsigned long second;
139
140
if (__get_word(ldq_u, second, src+1))
141
return 0;
142
extql(first, soff, word);
143
len -= 8;
144
src++;
145
extqh(second, soff, first);
146
checksum += carry;
147
word |= first;
148
first = second;
149
checksum += word;
150
*dst = word;
151
dst++;
152
carry = checksum < word;
153
}
154
len += 8;
155
checksum += carry;
156
if (len) {
157
unsigned long tmp;
158
unsigned long second;
159
if (__get_word(ldq_u, second, lastsrc))
160
return 0;
161
tmp = *dst;
162
extql(first, soff, word);
163
extqh(second, soff, first);
164
word |= first;
165
mskql(word, len, word);
166
checksum += word;
167
mskqh(tmp, len, tmp);
168
carry = checksum < word;
169
*dst = word | tmp;
170
checksum += carry;
171
}
172
return checksum;
173
}
174
175
/*
176
* This is slightly less fun than the above..
177
*/
178
static inline unsigned long
179
csum_partial_cfu_src_aligned(const unsigned long __user *src,
180
unsigned long *dst,
181
unsigned long doff,
182
long len,
183
unsigned long partial_dest)
184
{
185
unsigned long carry = 0;
186
unsigned long word;
187
unsigned long second_dest;
188
unsigned long checksum = ~0U;
189
190
mskql(partial_dest, doff, partial_dest);
191
while (len >= 0) {
192
if (__get_word(ldq, word, src))
193
return 0;
194
len -= 8;
195
insql(word, doff, second_dest);
196
checksum += carry;
197
stq_u(partial_dest | second_dest, dst);
198
src++;
199
checksum += word;
200
insqh(word, doff, partial_dest);
201
carry = checksum < word;
202
dst++;
203
}
204
len += 8;
205
if (len) {
206
checksum += carry;
207
if (__get_word(ldq, word, src))
208
return 0;
209
mskql(word, len, word);
210
len -= 8;
211
checksum += word;
212
insql(word, doff, second_dest);
213
len += doff;
214
carry = checksum < word;
215
partial_dest |= second_dest;
216
if (len >= 0) {
217
stq_u(partial_dest, dst);
218
if (!len) goto out;
219
dst++;
220
insqh(word, doff, partial_dest);
221
}
222
doff = len;
223
}
224
ldq_u(second_dest, dst);
225
mskqh(second_dest, doff, second_dest);
226
stq_u(partial_dest | second_dest, dst);
227
out:
228
checksum += carry;
229
return checksum;
230
}
231
232
/*
233
* This is so totally un-fun that it's frightening. Don't
234
* look at this too closely, you'll go blind.
235
*/
236
static inline unsigned long
237
csum_partial_cfu_unaligned(const unsigned long __user * src,
238
unsigned long * dst,
239
unsigned long soff, unsigned long doff,
240
long len, unsigned long partial_dest)
241
{
242
unsigned long carry = 0;
243
unsigned long first;
244
unsigned long lastsrc;
245
unsigned long checksum = ~0U;
246
247
if (__get_word(ldq_u, first, src))
248
return 0;
249
lastsrc = 7+len+(unsigned long)src;
250
mskql(partial_dest, doff, partial_dest);
251
while (len >= 0) {
252
unsigned long second, word;
253
unsigned long second_dest;
254
255
if (__get_word(ldq_u, second, src+1))
256
return 0;
257
extql(first, soff, word);
258
checksum += carry;
259
len -= 8;
260
extqh(second, soff, first);
261
src++;
262
word |= first;
263
first = second;
264
insql(word, doff, second_dest);
265
checksum += word;
266
stq_u(partial_dest | second_dest, dst);
267
carry = checksum < word;
268
insqh(word, doff, partial_dest);
269
dst++;
270
}
271
len += doff;
272
checksum += carry;
273
if (len >= 0) {
274
unsigned long second, word;
275
unsigned long second_dest;
276
277
if (__get_word(ldq_u, second, lastsrc))
278
return 0;
279
extql(first, soff, word);
280
extqh(second, soff, first);
281
word |= first;
282
first = second;
283
mskql(word, len-doff, word);
284
checksum += word;
285
insql(word, doff, second_dest);
286
carry = checksum < word;
287
stq_u(partial_dest | second_dest, dst);
288
if (len) {
289
ldq_u(second_dest, dst+1);
290
insqh(word, doff, partial_dest);
291
mskqh(second_dest, len, second_dest);
292
stq_u(partial_dest | second_dest, dst+1);
293
}
294
checksum += carry;
295
} else {
296
unsigned long second, word;
297
unsigned long second_dest;
298
299
if (__get_word(ldq_u, second, lastsrc))
300
return 0;
301
extql(first, soff, word);
302
extqh(second, soff, first);
303
word |= first;
304
ldq_u(second_dest, dst);
305
mskql(word, len-doff, word);
306
checksum += word;
307
mskqh(second_dest, len, second_dest);
308
carry = checksum < word;
309
insql(word, doff, word);
310
stq_u(partial_dest | word | second_dest, dst);
311
checksum += carry;
312
}
313
return checksum;
314
}
315
316
static __wsum __csum_and_copy(const void __user *src, void *dst, int len)
317
{
318
unsigned long soff = 7 & (unsigned long) src;
319
unsigned long doff = 7 & (unsigned long) dst;
320
unsigned long checksum;
321
322
if (!doff) {
323
if (!soff)
324
checksum = csum_partial_cfu_aligned(
325
(const unsigned long __user *) src,
326
(unsigned long *) dst, len-8);
327
else
328
checksum = csum_partial_cfu_dest_aligned(
329
(const unsigned long __user *) src,
330
(unsigned long *) dst,
331
soff, len-8);
332
} else {
333
unsigned long partial_dest;
334
ldq_u(partial_dest, dst);
335
if (!soff)
336
checksum = csum_partial_cfu_src_aligned(
337
(const unsigned long __user *) src,
338
(unsigned long *) dst,
339
doff, len-8, partial_dest);
340
else
341
checksum = csum_partial_cfu_unaligned(
342
(const unsigned long __user *) src,
343
(unsigned long *) dst,
344
soff, doff, len-8, partial_dest);
345
}
346
return (__force __wsum)from64to16 (checksum);
347
}
348
349
__wsum
350
csum_and_copy_from_user(const void __user *src, void *dst, int len)
351
{
352
if (!access_ok(src, len))
353
return 0;
354
return __csum_and_copy(src, dst, len);
355
}
356
357
__wsum
358
csum_partial_copy_nocheck(const void *src, void *dst, int len)
359
{
360
return __csum_and_copy((__force const void __user *)src,
361
dst, len);
362
}
363
EXPORT_SYMBOL(csum_partial_copy_nocheck);
364
365