Path: blob/master/arch/loongarch/include/asm/alternative.h
26488 views
/* SPDX-License-Identifier: GPL-2.0 */1#ifndef _ASM_ALTERNATIVE_H2#define _ASM_ALTERNATIVE_H34#ifndef __ASSEMBLER__56#include <linux/types.h>7#include <linux/stddef.h>8#include <linux/stringify.h>9#include <asm/asm.h>1011struct alt_instr {12s32 instr_offset; /* offset to original instruction */13s32 replace_offset; /* offset to replacement instruction */14u16 feature; /* feature bit set for replacement */15u8 instrlen; /* length of original instruction */16u8 replacementlen; /* length of new instruction */17} __packed;1819/*20* Debug flag that can be tested to see whether alternative21* instructions were patched in already:22*/23extern int alternatives_patched;24extern struct alt_instr __alt_instructions[], __alt_instructions_end[];2526extern void alternative_instructions(void);27extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end);2829#define b_replacement(num) "664"#num30#define e_replacement(num) "665"#num3132#define alt_end_marker "663"33#define alt_slen "662b-661b"34#define alt_total_slen alt_end_marker"b-661b"35#define alt_rlen(num) e_replacement(num)"f-"b_replacement(num)"f"3637#define __OLDINSTR(oldinstr, num) \38"661:\n\t" oldinstr "\n662:\n" \39".fill -(((" alt_rlen(num) ")-(" alt_slen ")) > 0) * " \40"((" alt_rlen(num) ")-(" alt_slen ")) / 4, 4, 0x03400000\n"4142#define OLDINSTR(oldinstr, num) \43__OLDINSTR(oldinstr, num) \44alt_end_marker ":\n"4546#define alt_max_short(a, b) "((" a ") ^ (((" a ") ^ (" b ")) & -(-((" a ") < (" b ")))))"4748/*49* Pad the second replacement alternative with additional NOPs if it is50* additionally longer than the first replacement alternative.51*/52#define OLDINSTR_2(oldinstr, num1, num2) \53"661:\n\t" oldinstr "\n662:\n" \54".fill -((" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")) > 0) * " \55"(" alt_max_short(alt_rlen(num1), alt_rlen(num2)) " - (" alt_slen ")) / 4, " \56"4, 0x03400000\n" \57alt_end_marker ":\n"5859#define ALTINSTR_ENTRY(feature, num) \60" .long 661b - .\n" /* label */ \61" .long " b_replacement(num)"f - .\n" /* new instruction */ \62" .short " __stringify(feature) "\n" /* feature bit */ \63" .byte " alt_total_slen "\n" /* source len */ \64" .byte " alt_rlen(num) "\n" /* replacement len */6566#define ALTINSTR_REPLACEMENT(newinstr, feature, num) /* replacement */ \67b_replacement(num)":\n\t" newinstr "\n" e_replacement(num) ":\n\t"6869/* alternative assembly primitive: */70#define ALTERNATIVE(oldinstr, newinstr, feature) \71OLDINSTR(oldinstr, 1) \72".pushsection .altinstructions,\"a\"\n" \73ALTINSTR_ENTRY(feature, 1) \74".popsection\n" \75".subsection 1\n" \76ALTINSTR_REPLACEMENT(newinstr, feature, 1) \77".previous\n"7879#define ALTERNATIVE_2(oldinstr, newinstr1, feature1, newinstr2, feature2)\80OLDINSTR_2(oldinstr, 1, 2) \81".pushsection .altinstructions,\"a\"\n" \82ALTINSTR_ENTRY(feature1, 1) \83ALTINSTR_ENTRY(feature2, 2) \84".popsection\n" \85".subsection 1\n" \86ALTINSTR_REPLACEMENT(newinstr1, feature1, 1) \87ALTINSTR_REPLACEMENT(newinstr2, feature2, 2) \88".previous\n"8990/*91* Alternative instructions for different CPU types or capabilities.92*93* This allows to use optimized instructions even on generic binary94* kernels.95*96* length of oldinstr must be longer or equal the length of newinstr97* It can be padded with nops as needed.98*99* For non barrier like inlines please define new variants100* without volatile and memory clobber.101*/102#define alternative(oldinstr, newinstr, feature) \103(asm volatile (ALTERNATIVE(oldinstr, newinstr, feature) : : : "memory"))104105#define alternative_2(oldinstr, newinstr1, feature1, newinstr2, feature2) \106(asm volatile(ALTERNATIVE_2(oldinstr, newinstr1, feature1, newinstr2, feature2) ::: "memory"))107108#endif /* __ASSEMBLER__ */109110#endif /* _ASM_ALTERNATIVE_H */111112113