Path: blob/master/arch/powerpc/include/asm/dcr-native.h
26481 views
/* SPDX-License-Identifier: GPL-2.0-or-later */1/*2* (c) Copyright 2006 Benjamin Herrenschmidt, IBM Corp.3* <[email protected]>4*/56#ifndef _ASM_POWERPC_DCR_NATIVE_H7#define _ASM_POWERPC_DCR_NATIVE_H8#ifdef __KERNEL__9#ifndef __ASSEMBLY__1011#include <linux/spinlock.h>12#include <asm/cputable.h>13#include <asm/cpu_has_feature.h>14#include <linux/stringify.h>1516typedef struct {17unsigned int base;18} dcr_host_native_t;1920static inline bool dcr_map_ok_native(dcr_host_native_t host)21{22return true;23}2425#define dcr_map_native(dev, dcr_n, dcr_c) \26((dcr_host_native_t){ .base = (dcr_n) })27#define dcr_unmap_native(host, dcr_c) do {} while (0)28#define dcr_read_native(host, dcr_n) mfdcr(dcr_n + host.base)29#define dcr_write_native(host, dcr_n, value) mtdcr(dcr_n + host.base, value)3031/* Table based DCR accessors */32extern void __mtdcr(unsigned int reg, unsigned int val);33extern unsigned int __mfdcr(unsigned int reg);3435/* mfdcrx/mtdcrx instruction based accessors. We hand code36* the opcodes in order not to depend on newer binutils37*/38static inline unsigned int mfdcrx(unsigned int reg)39{40unsigned int ret;41asm volatile(".long 0x7c000206 | (%0 << 21) | (%1 << 16)"42: "=r" (ret) : "r" (reg));43return ret;44}4546static inline void mtdcrx(unsigned int reg, unsigned int val)47{48asm volatile(".long 0x7c000306 | (%0 << 21) | (%1 << 16)"49: : "r" (val), "r" (reg));50}5152#define mfdcr(rn) \53({unsigned int rval; \54if (__builtin_constant_p(rn) && rn < 1024) \55asm volatile("mfdcr %0, %1" : "=r" (rval) \56: "n" (rn)); \57else if (likely(cpu_has_feature(CPU_FTR_INDEXED_DCR))) \58rval = mfdcrx(rn); \59else \60rval = __mfdcr(rn); \61rval;})6263#define mtdcr(rn, v) \64do { \65if (__builtin_constant_p(rn) && rn < 1024) \66asm volatile("mtdcr %0, %1" \67: : "n" (rn), "r" (v)); \68else if (likely(cpu_has_feature(CPU_FTR_INDEXED_DCR))) \69mtdcrx(rn, v); \70else \71__mtdcr(rn, v); \72} while (0)7374/* R/W of indirect DCRs make use of standard naming conventions for DCRs */75extern spinlock_t dcr_ind_lock;7677static inline unsigned __mfdcri(int base_addr, int base_data, int reg)78{79unsigned long flags;80unsigned int val;8182spin_lock_irqsave(&dcr_ind_lock, flags);83if (cpu_has_feature(CPU_FTR_INDEXED_DCR)) {84mtdcrx(base_addr, reg);85val = mfdcrx(base_data);86} else {87__mtdcr(base_addr, reg);88val = __mfdcr(base_data);89}90spin_unlock_irqrestore(&dcr_ind_lock, flags);91return val;92}9394static inline void __mtdcri(int base_addr, int base_data, int reg,95unsigned val)96{97unsigned long flags;9899spin_lock_irqsave(&dcr_ind_lock, flags);100if (cpu_has_feature(CPU_FTR_INDEXED_DCR)) {101mtdcrx(base_addr, reg);102mtdcrx(base_data, val);103} else {104__mtdcr(base_addr, reg);105__mtdcr(base_data, val);106}107spin_unlock_irqrestore(&dcr_ind_lock, flags);108}109110static inline void __dcri_clrset(int base_addr, int base_data, int reg,111unsigned clr, unsigned set)112{113unsigned long flags;114unsigned int val;115116spin_lock_irqsave(&dcr_ind_lock, flags);117if (cpu_has_feature(CPU_FTR_INDEXED_DCR)) {118mtdcrx(base_addr, reg);119val = (mfdcrx(base_data) & ~clr) | set;120mtdcrx(base_data, val);121} else {122__mtdcr(base_addr, reg);123val = (__mfdcr(base_data) & ~clr) | set;124__mtdcr(base_data, val);125}126spin_unlock_irqrestore(&dcr_ind_lock, flags);127}128129#define mfdcri(base, reg) __mfdcri(DCRN_ ## base ## _CONFIG_ADDR, \130DCRN_ ## base ## _CONFIG_DATA, \131reg)132133#define mtdcri(base, reg, data) __mtdcri(DCRN_ ## base ## _CONFIG_ADDR, \134DCRN_ ## base ## _CONFIG_DATA, \135reg, data)136137#define dcri_clrset(base, reg, clr, set) __dcri_clrset(DCRN_ ## base ## _CONFIG_ADDR, \138DCRN_ ## base ## _CONFIG_DATA, \139reg, clr, set)140141#endif /* __ASSEMBLY__ */142#endif /* __KERNEL__ */143#endif /* _ASM_POWERPC_DCR_NATIVE_H */144145146