Path: blob/main/sys/compat/linuxkpi/common/include/asm/byteorder.h
39604 views
/*-1* Copyright (c) 2010 Isilon Systems, Inc.2* Copyright (c) 2010 iX Systems, Inc.3* Copyright (c) 2010 Panasas, Inc.4* Copyright (c) 2013-2016 Mellanox Technologies, Ltd.5* All rights reserved.6*7* Redistribution and use in source and binary forms, with or without8* modification, are permitted provided that the following conditions9* are met:10* 1. Redistributions of source code must retain the above copyright11* notice unmodified, this list of conditions, and the following12* disclaimer.13* 2. Redistributions in binary form must reproduce the above copyright14* notice, this list of conditions and the following disclaimer in the15* documentation and/or other materials provided with the distribution.16*17* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR18* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES19* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.20* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,21* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT22* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,23* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY24* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT25* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF26* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.27*/28#ifndef _LINUXKPI_ASM_BYTEORDER_H_29#define _LINUXKPI_ASM_BYTEORDER_H_3031#include <sys/types.h>32#include <sys/endian.h>33#include <asm/types.h>3435#if BYTE_ORDER == LITTLE_ENDIAN36#define __LITTLE_ENDIAN37#else38#define __BIG_ENDIAN39#endif4041#define __cpu_to_le64(x) htole64(x)42#define cpu_to_le64(x) __cpu_to_le64(x)43#define __le64_to_cpu(x) le64toh(x)44#define le64_to_cpu(x) __le64_to_cpu(x)45#define __cpu_to_le32(x) htole32(x)46#define cpu_to_le32(x) __cpu_to_le32(x)47#define __le32_to_cpu(x) le32toh(x)48#define le32_to_cpu(x) __le32_to_cpu(x)49#define __cpu_to_le16(x) htole16(x)50#define cpu_to_le16(x) __cpu_to_le16(x)51#define __le16_to_cpu(x) le16toh(x)52#define le16_to_cpu(x) __le16_to_cpu(x)53#define __cpu_to_be64(x) htobe64(x)54#define cpu_to_be64(x) __cpu_to_be64(x)55#define __be64_to_cpu(x) be64toh(x)56#define be64_to_cpu(x) __be64_to_cpu(x)57#define __cpu_to_be32(x) htobe32(x)58#define cpu_to_be32(x) __cpu_to_be32(x)59#define __be32_to_cpu(x) be32toh(x)60#define be32_to_cpu(x) __be32_to_cpu(x)61#define __cpu_to_be16(x) htobe16(x)62#define cpu_to_be16(x) __cpu_to_be16(x)63#define __be16_to_cpu(x) be16toh(x)64#define be16_to_cpu(x) __be16_to_cpu(x)6566#define __cpu_to_le64p(x) htole64(*((const uint64_t *)(x)))67#define cpu_to_le64p(x) __cpu_to_le64p(x)68#define __le64_to_cpup(x) le64toh(*((const uint64_t *)(x)))69#define le64_to_cpup(x) __le64_to_cpup(x)70#define __cpu_to_le32p(x) htole32(*((const uint32_t *)(x)))71#define cpu_to_le32p(x) __cpu_to_le32p(x)72#define __le32_to_cpup(x) le32toh(*((const uint32_t *)(x)))73#define le32_to_cpup(x) __le32_to_cpup(x)74#define __cpu_to_le16p(x) htole16(*((const uint16_t *)(x)))75#define cpu_to_le16p(x) __cpu_to_le16p(x)76#define __le16_to_cpup(x) le16toh(*((const uint16_t *)(x)))77#define le16_to_cpup(x) __le16_to_cpup(x)78#define __cpu_to_be64p(x) htobe64(*((const uint64_t *)(x)))79#define cpu_to_be64p(x) __cpu_to_be64p(x)80#define __be64_to_cpup(x) be64toh(*((const uint64_t *)(x)))81#define be64_to_cpup(x) __be64_to_cpup(x)82#define __cpu_to_be32p(x) htobe32(*((const uint32_t *)(x)))83#define cpu_to_be32p(x) __cpu_to_be32p(x)84#define __be32_to_cpup(x) be32toh(*((const uint32_t *)(x)))85#define be32_to_cpup(x) __be32_to_cpup(x)86#define __cpu_to_be16p(x) htobe16(*((const uint16_t *)(x)))87#define cpu_to_be16p(x) __cpu_to_be16p(x)88#define __be16_to_cpup(x) be16toh(*((const uint16_t *)(x)))89#define be16_to_cpup(x) __be16_to_cpup(x)909192#define __cpu_to_le64s(x) do { *((uint64_t *)(x)) = cpu_to_le64p((x)); } while (0)93#define cpu_to_le64s(x) __cpu_to_le64s(x)94#define __le64_to_cpus(x) do { *((uint64_t *)(x)) = le64_to_cpup((x)); } while (0)95#define le64_to_cpus(x) __le64_to_cpus(x)96#define __cpu_to_le32s(x) do { *((uint32_t *)(x)) = cpu_to_le32p((x)); } while (0)97#define cpu_to_le32s(x) __cpu_to_le32s(x)98#define __le32_to_cpus(x) do { *((uint32_t *)(x)) = le32_to_cpup((x)); } while (0)99#define le32_to_cpus(x) __le32_to_cpus(x)100#define __cpu_to_le16s(x) do { *((uint16_t *)(x)) = cpu_to_le16p((x)); } while (0)101#define cpu_to_le16s(x) __cpu_to_le16s(x)102#define __le16_to_cpus(x) do { *((uint16_t *)(x)) = le16_to_cpup((x)); } while (0)103#define le16_to_cpus(x) __le16_to_cpus(x)104#define __cpu_to_be64s(x) do { *((uint64_t *)(x)) = cpu_to_be64p((x)); } while (0)105#define cpu_to_be64s(x) __cpu_to_be64s(x)106#define __be64_to_cpus(x) do { *((uint64_t *)(x)) = be64_to_cpup((x)); } while (0)107#define be64_to_cpus(x) __be64_to_cpus(x)108#define __cpu_to_be32s(x) do { *((uint32_t *)(x)) = cpu_to_be32p((x)); } while (0)109#define cpu_to_be32s(x) __cpu_to_be32s(x)110#define __be32_to_cpus(x) do { *((uint32_t *)(x)) = be32_to_cpup((x)); } while (0)111#define be32_to_cpus(x) __be32_to_cpus(x)112#define __cpu_to_be16s(x) do { *((uint16_t *)(x)) = cpu_to_be16p((x)); } while (0)113#define cpu_to_be16s(x) __cpu_to_be16s(x)114#define __be16_to_cpus(x) do { *((uint16_t *)(x)) = be16_to_cpup((x)); } while (0)115#define be16_to_cpus(x) __be16_to_cpus(x)116117#define swab16(x) bswap16(x)118#define swab32(x) bswap32(x)119#define swab64(x) bswap64(x)120121static inline void122be64_add_cpu(uint64_t *var, uint64_t val)123{124*var = cpu_to_be64(be64_to_cpu(*var) + val);125}126127static inline void128be32_add_cpu(uint32_t *var, uint32_t val)129{130*var = cpu_to_be32(be32_to_cpu(*var) + val);131}132133static inline void134be16_add_cpu(uint16_t *var, uint16_t val)135{136*var = cpu_to_be16(be16_to_cpu(*var) + val);137}138139static __inline void140le64_add_cpu(uint64_t *var, uint64_t val)141{142*var = cpu_to_le64(le64_to_cpu(*var) + val);143}144145static __inline void146le32_add_cpu(uint32_t *var, uint32_t val)147{148*var = cpu_to_le32(le32_to_cpu(*var) + val);149}150151static inline void152le16_add_cpu(uint16_t *var, uint16_t val)153{154*var = cpu_to_le16(le16_to_cpu(*var) + val);155}156157#endif /* _LINUXKPI_ASM_BYTEORDER_H_ */158159160