// SPDX-License-Identifier: GPL-2.0-or-later1/*2* Copyright (C) 2017 Imagination Technologies3* Author: Paul Burton <[email protected]>4*/56#include <linux/bitops.h>7#include <asm/cmpxchg.h>89unsigned long __xchg_small(volatile void *ptr, unsigned long val, unsigned int size)10{11u32 old32, new32, load32, mask;12volatile u32 *ptr32;13unsigned int shift;1415/* Check that ptr is naturally aligned */16WARN_ON((unsigned long)ptr & (size - 1));1718/* Mask value to the correct size. */19mask = GENMASK((size * BITS_PER_BYTE) - 1, 0);20val &= mask;2122/*23* Calculate a shift & mask that correspond to the value we wish to24* exchange within the naturally aligned 4 byte integer that includes25* it.26*/27shift = (unsigned long)ptr & 0x3;28if (IS_ENABLED(CONFIG_CPU_BIG_ENDIAN))29shift ^= sizeof(u32) - size;30shift *= BITS_PER_BYTE;31mask <<= shift;3233/*34* Calculate a pointer to the naturally aligned 4 byte integer that35* includes our byte of interest, and load its value.36*/37ptr32 = (volatile u32 *)((unsigned long)ptr & ~0x3);38load32 = *ptr32;3940do {41old32 = load32;42new32 = (load32 & ~mask) | (val << shift);43load32 = arch_cmpxchg(ptr32, old32, new32);44} while (load32 != old32);4546return (load32 & mask) >> shift;47}4849unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,50unsigned long new, unsigned int size)51{52u32 mask, old32, new32, load32, load;53volatile u32 *ptr32;54unsigned int shift;5556/* Check that ptr is naturally aligned */57WARN_ON((unsigned long)ptr & (size - 1));5859/* Mask inputs to the correct size. */60mask = GENMASK((size * BITS_PER_BYTE) - 1, 0);61old &= mask;62new &= mask;6364/*65* Calculate a shift & mask that correspond to the value we wish to66* compare & exchange within the naturally aligned 4 byte integer67* that includes it.68*/69shift = (unsigned long)ptr & 0x3;70if (IS_ENABLED(CONFIG_CPU_BIG_ENDIAN))71shift ^= sizeof(u32) - size;72shift *= BITS_PER_BYTE;73mask <<= shift;7475/*76* Calculate a pointer to the naturally aligned 4 byte integer that77* includes our byte of interest, and load its value.78*/79ptr32 = (volatile u32 *)((unsigned long)ptr & ~0x3);80load32 = *ptr32;8182while (true) {83/*84* Ensure the byte we want to exchange matches the expected85* old value, and if not then bail.86*/87load = (load32 & mask) >> shift;88if (load != old)89return load;9091/*92* Calculate the old & new values of the naturally aligned93* 4 byte integer that include the byte we want to exchange.94* Attempt to exchange the old value for the new value, and95* return if we succeed.96*/97old32 = (load32 & ~mask) | (old << shift);98new32 = (load32 & ~mask) | (new << shift);99load32 = arch_cmpxchg(ptr32, old32, new32);100if (load32 == old32)101return old;102}103}104EXPORT_SYMBOL(__cmpxchg_small);105106107