Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/freebsd-src
Path: blob/main/sys/compat/linuxkpi/common/include/asm/unaligned.h
103030 views
1
/*-
2
* SPDX-License-Identifier: BSD-2-Clause
3
*
4
* Copyright (c) 2020,2023 The FreeBSD Foundation
5
*
6
* This software was developed by Björn Zeeb under sponsorship from
7
* the FreeBSD Foundation.
8
*
9
* Redistribution and use in source and binary forms, with or without
10
* modification, are permitted provided that the following conditions
11
* are met:
12
* 1. Redistributions of source code must retain the above copyright
13
* notice, this list of conditions and the following disclaimer.
14
* 2. Redistributions in binary form must reproduce the above copyright
15
* notice, this list of conditions and the following disclaimer in the
16
* documentation and/or other materials provided with the distribution.
17
*
18
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
19
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
22
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
24
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
25
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
26
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
27
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28
* SUCH DAMAGE.
29
*/
30
31
#ifndef _LINUXKPI_ASM_UNALIGNED_H
32
#define _LINUXKPI_ASM_UNALIGNED_H
33
34
#include <linux/types.h>
35
#include <asm/byteorder.h>
36
37
static __inline uint16_t
38
get_unaligned_le16(const void *p)
39
{
40
41
return (le16_to_cpup((const __le16 *)p));
42
}
43
44
static __inline uint32_t
45
get_unaligned_le32(const void *p)
46
{
47
48
return (le32_to_cpup((const __le32 *)p));
49
}
50
51
static __inline uint64_t
52
get_unaligned_le64(const void *p)
53
{
54
55
return (le64_to_cpup((const __le64 *)p));
56
}
57
58
static __inline void
59
put_unaligned_le16(__le16 v, void *p)
60
{
61
__le16 x;
62
63
x = cpu_to_le16(v);
64
memcpy(p, &x, sizeof(x));
65
}
66
67
static __inline void
68
put_unaligned_le32(__le32 v, void *p)
69
{
70
__le32 x;
71
72
x = cpu_to_le32(v);
73
memcpy(p, &x, sizeof(x));
74
}
75
76
static __inline void
77
put_unaligned_le64(__le64 v, void *p)
78
{
79
__le64 x;
80
81
x = cpu_to_le64(v);
82
memcpy(p, &x, sizeof(x));
83
}
84
85
static __inline uint16_t
86
get_unaligned_be16(const void *p)
87
{
88
89
return (be16_to_cpup((const __be16 *)p));
90
}
91
92
static __inline uint32_t
93
get_unaligned_be32(const void *p)
94
{
95
96
return (be32_to_cpup((const __be32 *)p));
97
}
98
99
static __inline uint64_t
100
get_unaligned_be64(const void *p)
101
{
102
103
return (be64_to_cpup((const __be64 *)p));
104
}
105
106
#endif /* _LINUXKPI_ASM_UNALIGNED_H */
107
108