Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/freebsd-src
Path: blob/main/sys/compat/linuxkpi/common/include/asm/unaligned.h
39604 views
1
/*-
2
* SPDX-License-Identifier: BSD-2-Clause
3
*
4
* Copyright (c) 2020,2023 The FreeBSD Foundation
5
*
6
* This software was developed by Björn Zeeb under sponsorship from
7
* the FreeBSD Foundation.
8
*
9
* Redistribution and use in source and binary forms, with or without
10
* modification, are permitted provided that the following conditions
11
* are met:
12
* 1. Redistributions of source code must retain the above copyright
13
* notice, this list of conditions and the following disclaimer.
14
* 2. Redistributions in binary form must reproduce the above copyright
15
* notice, this list of conditions and the following disclaimer in the
16
* documentation and/or other materials provided with the distribution.
17
*
18
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
19
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
22
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
24
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
25
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
26
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
27
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28
* SUCH DAMAGE.
29
*/
30
31
#ifndef _LINUXKPI_ASM_UNALIGNED_H
32
#define _LINUXKPI_ASM_UNALIGNED_H
33
34
#include <linux/types.h>
35
#include <asm/byteorder.h>
36
37
static __inline uint16_t
38
get_unaligned_le16(const void *p)
39
{
40
41
return (le16_to_cpup((const __le16 *)p));
42
}
43
44
static __inline uint32_t
45
get_unaligned_le32(const void *p)
46
{
47
48
return (le32_to_cpup((const __le32 *)p));
49
}
50
51
static __inline void
52
put_unaligned_le16(__le16 v, void *p)
53
{
54
__le16 x;
55
56
x = cpu_to_le16(v);
57
memcpy(p, &x, sizeof(x));
58
}
59
60
static __inline void
61
put_unaligned_le32(__le32 v, void *p)
62
{
63
__le32 x;
64
65
x = cpu_to_le32(v);
66
memcpy(p, &x, sizeof(x));
67
}
68
69
static __inline void
70
put_unaligned_le64(__le64 v, void *p)
71
{
72
__le64 x;
73
74
x = cpu_to_le64(v);
75
memcpy(p, &x, sizeof(x));
76
}
77
78
static __inline uint16_t
79
get_unaligned_be16(const void *p)
80
{
81
82
return (be16_to_cpup((const __be16 *)p));
83
}
84
85
static __inline uint32_t
86
get_unaligned_be32(const void *p)
87
{
88
89
return (be32_to_cpup((const __be32 *)p));
90
}
91
92
static __inline uint64_t
93
get_unaligned_be64(const void *p)
94
{
95
96
return (be64_to_cpup((const __be64 *)p));
97
}
98
99
#endif /* _LINUXKPI_ASM_UNALIGNED_H */
100
101