Path: blob/master/arch/ia64/include/asm/gcc_intrin.h
17533 views
#ifndef _ASM_IA64_GCC_INTRIN_H1#define _ASM_IA64_GCC_INTRIN_H2/*3*4* Copyright (C) 2002,2003 Jun Nakajima <[email protected]>5* Copyright (C) 2002,2003 Suresh Siddha <[email protected]>6*/78#include <linux/types.h>9#include <linux/compiler.h>1011/* define this macro to get some asm stmts included in 'c' files */12#define ASM_SUPPORTED1314/* Optimization barrier */15/* The "volatile" is due to gcc bugs */16#define ia64_barrier() asm volatile ("":::"memory")1718#define ia64_stop() asm volatile (";;"::)1920#define ia64_invala_gr(regnum) asm volatile ("invala.e r%0" :: "i"(regnum))2122#define ia64_invala_fr(regnum) asm volatile ("invala.e f%0" :: "i"(regnum))2324#define ia64_flushrs() asm volatile ("flushrs;;":::"memory")2526#define ia64_loadrs() asm volatile ("loadrs;;":::"memory")2728extern void ia64_bad_param_for_setreg (void);29extern void ia64_bad_param_for_getreg (void);3031#ifdef __KERNEL__32register unsigned long ia64_r13 asm ("r13") __used;33#endif3435#define ia64_native_setreg(regnum, val) \36({ \37switch (regnum) { \38case _IA64_REG_PSR_L: \39asm volatile ("mov psr.l=%0" :: "r"(val) : "memory"); \40break; \41case _IA64_REG_AR_KR0 ... _IA64_REG_AR_EC: \42asm volatile ("mov ar%0=%1" :: \43"i" (regnum - _IA64_REG_AR_KR0), \44"r"(val): "memory"); \45break; \46case _IA64_REG_CR_DCR ... _IA64_REG_CR_LRR1: \47asm volatile ("mov cr%0=%1" :: \48"i" (regnum - _IA64_REG_CR_DCR), \49"r"(val): "memory" ); \50break; \51case _IA64_REG_SP: \52asm volatile ("mov r12=%0" :: \53"r"(val): "memory"); \54break; \55case _IA64_REG_GP: \56asm volatile ("mov gp=%0" :: "r"(val) : "memory"); \57break; \58default: \59ia64_bad_param_for_setreg(); \60break; \61} \62})6364#define ia64_native_getreg(regnum) \65({ \66__u64 ia64_intri_res; \67\68switch (regnum) { \69case _IA64_REG_GP: \70asm volatile ("mov %0=gp" : "=r"(ia64_intri_res)); \71break; \72case _IA64_REG_IP: \73asm volatile ("mov %0=ip" : "=r"(ia64_intri_res)); \74break; \75case _IA64_REG_PSR: \76asm volatile ("mov %0=psr" : "=r"(ia64_intri_res)); \77break; \78case _IA64_REG_TP: /* for current() */ \79ia64_intri_res = ia64_r13; \80break; \81case _IA64_REG_AR_KR0 ... _IA64_REG_AR_EC: \82asm volatile ("mov %0=ar%1" : "=r" (ia64_intri_res) \83: "i"(regnum - _IA64_REG_AR_KR0)); \84break; \85case _IA64_REG_CR_DCR ... _IA64_REG_CR_LRR1: \86asm volatile ("mov %0=cr%1" : "=r" (ia64_intri_res) \87: "i" (regnum - _IA64_REG_CR_DCR)); \88break; \89case _IA64_REG_SP: \90asm volatile ("mov %0=sp" : "=r" (ia64_intri_res)); \91break; \92default: \93ia64_bad_param_for_getreg(); \94break; \95} \96ia64_intri_res; \97})9899#define ia64_hint_pause 0100101#define ia64_hint(mode) \102({ \103switch (mode) { \104case ia64_hint_pause: \105asm volatile ("hint @pause" ::: "memory"); \106break; \107} \108})109110111/* Integer values for mux1 instruction */112#define ia64_mux1_brcst 0113#define ia64_mux1_mix 8114#define ia64_mux1_shuf 9115#define ia64_mux1_alt 10116#define ia64_mux1_rev 11117118#define ia64_mux1(x, mode) \119({ \120__u64 ia64_intri_res; \121\122switch (mode) { \123case ia64_mux1_brcst: \124asm ("mux1 %0=%1,@brcst" : "=r" (ia64_intri_res) : "r" (x)); \125break; \126case ia64_mux1_mix: \127asm ("mux1 %0=%1,@mix" : "=r" (ia64_intri_res) : "r" (x)); \128break; \129case ia64_mux1_shuf: \130asm ("mux1 %0=%1,@shuf" : "=r" (ia64_intri_res) : "r" (x)); \131break; \132case ia64_mux1_alt: \133asm ("mux1 %0=%1,@alt" : "=r" (ia64_intri_res) : "r" (x)); \134break; \135case ia64_mux1_rev: \136asm ("mux1 %0=%1,@rev" : "=r" (ia64_intri_res) : "r" (x)); \137break; \138} \139ia64_intri_res; \140})141142#if __GNUC__ >= 4 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)143# define ia64_popcnt(x) __builtin_popcountl(x)144#else145# define ia64_popcnt(x) \146({ \147__u64 ia64_intri_res; \148asm ("popcnt %0=%1" : "=r" (ia64_intri_res) : "r" (x)); \149\150ia64_intri_res; \151})152#endif153154#define ia64_getf_exp(x) \155({ \156long ia64_intri_res; \157\158asm ("getf.exp %0=%1" : "=r"(ia64_intri_res) : "f"(x)); \159\160ia64_intri_res; \161})162163#define ia64_shrp(a, b, count) \164({ \165__u64 ia64_intri_res; \166asm ("shrp %0=%1,%2,%3" : "=r"(ia64_intri_res) : "r"(a), "r"(b), "i"(count)); \167ia64_intri_res; \168})169170#define ia64_ldfs(regnum, x) \171({ \172register double __f__ asm ("f"#regnum); \173asm volatile ("ldfs %0=[%1]" :"=f"(__f__): "r"(x)); \174})175176#define ia64_ldfd(regnum, x) \177({ \178register double __f__ asm ("f"#regnum); \179asm volatile ("ldfd %0=[%1]" :"=f"(__f__): "r"(x)); \180})181182#define ia64_ldfe(regnum, x) \183({ \184register double __f__ asm ("f"#regnum); \185asm volatile ("ldfe %0=[%1]" :"=f"(__f__): "r"(x)); \186})187188#define ia64_ldf8(regnum, x) \189({ \190register double __f__ asm ("f"#regnum); \191asm volatile ("ldf8 %0=[%1]" :"=f"(__f__): "r"(x)); \192})193194#define ia64_ldf_fill(regnum, x) \195({ \196register double __f__ asm ("f"#regnum); \197asm volatile ("ldf.fill %0=[%1]" :"=f"(__f__): "r"(x)); \198})199200#define ia64_st4_rel_nta(m, val) \201({ \202asm volatile ("st4.rel.nta [%0] = %1\n\t" :: "r"(m), "r"(val)); \203})204205#define ia64_stfs(x, regnum) \206({ \207register double __f__ asm ("f"#regnum); \208asm volatile ("stfs [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \209})210211#define ia64_stfd(x, regnum) \212({ \213register double __f__ asm ("f"#regnum); \214asm volatile ("stfd [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \215})216217#define ia64_stfe(x, regnum) \218({ \219register double __f__ asm ("f"#regnum); \220asm volatile ("stfe [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \221})222223#define ia64_stf8(x, regnum) \224({ \225register double __f__ asm ("f"#regnum); \226asm volatile ("stf8 [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \227})228229#define ia64_stf_spill(x, regnum) \230({ \231register double __f__ asm ("f"#regnum); \232asm volatile ("stf.spill [%0]=%1" :: "r"(x), "f"(__f__) : "memory"); \233})234235#define ia64_fetchadd4_acq(p, inc) \236({ \237\238__u64 ia64_intri_res; \239asm volatile ("fetchadd4.acq %0=[%1],%2" \240: "=r"(ia64_intri_res) : "r"(p), "i" (inc) \241: "memory"); \242\243ia64_intri_res; \244})245246#define ia64_fetchadd4_rel(p, inc) \247({ \248__u64 ia64_intri_res; \249asm volatile ("fetchadd4.rel %0=[%1],%2" \250: "=r"(ia64_intri_res) : "r"(p), "i" (inc) \251: "memory"); \252\253ia64_intri_res; \254})255256#define ia64_fetchadd8_acq(p, inc) \257({ \258\259__u64 ia64_intri_res; \260asm volatile ("fetchadd8.acq %0=[%1],%2" \261: "=r"(ia64_intri_res) : "r"(p), "i" (inc) \262: "memory"); \263\264ia64_intri_res; \265})266267#define ia64_fetchadd8_rel(p, inc) \268({ \269__u64 ia64_intri_res; \270asm volatile ("fetchadd8.rel %0=[%1],%2" \271: "=r"(ia64_intri_res) : "r"(p), "i" (inc) \272: "memory"); \273\274ia64_intri_res; \275})276277#define ia64_xchg1(ptr,x) \278({ \279__u64 ia64_intri_res; \280asm volatile ("xchg1 %0=[%1],%2" \281: "=r" (ia64_intri_res) : "r" (ptr), "r" (x) : "memory"); \282ia64_intri_res; \283})284285#define ia64_xchg2(ptr,x) \286({ \287__u64 ia64_intri_res; \288asm volatile ("xchg2 %0=[%1],%2" : "=r" (ia64_intri_res) \289: "r" (ptr), "r" (x) : "memory"); \290ia64_intri_res; \291})292293#define ia64_xchg4(ptr,x) \294({ \295__u64 ia64_intri_res; \296asm volatile ("xchg4 %0=[%1],%2" : "=r" (ia64_intri_res) \297: "r" (ptr), "r" (x) : "memory"); \298ia64_intri_res; \299})300301#define ia64_xchg8(ptr,x) \302({ \303__u64 ia64_intri_res; \304asm volatile ("xchg8 %0=[%1],%2" : "=r" (ia64_intri_res) \305: "r" (ptr), "r" (x) : "memory"); \306ia64_intri_res; \307})308309#define ia64_cmpxchg1_acq(ptr, new, old) \310({ \311__u64 ia64_intri_res; \312asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \313asm volatile ("cmpxchg1.acq %0=[%1],%2,ar.ccv": \314"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \315ia64_intri_res; \316})317318#define ia64_cmpxchg1_rel(ptr, new, old) \319({ \320__u64 ia64_intri_res; \321asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \322asm volatile ("cmpxchg1.rel %0=[%1],%2,ar.ccv": \323"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \324ia64_intri_res; \325})326327#define ia64_cmpxchg2_acq(ptr, new, old) \328({ \329__u64 ia64_intri_res; \330asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \331asm volatile ("cmpxchg2.acq %0=[%1],%2,ar.ccv": \332"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \333ia64_intri_res; \334})335336#define ia64_cmpxchg2_rel(ptr, new, old) \337({ \338__u64 ia64_intri_res; \339asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \340\341asm volatile ("cmpxchg2.rel %0=[%1],%2,ar.ccv": \342"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \343ia64_intri_res; \344})345346#define ia64_cmpxchg4_acq(ptr, new, old) \347({ \348__u64 ia64_intri_res; \349asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \350asm volatile ("cmpxchg4.acq %0=[%1],%2,ar.ccv": \351"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \352ia64_intri_res; \353})354355#define ia64_cmpxchg4_rel(ptr, new, old) \356({ \357__u64 ia64_intri_res; \358asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \359asm volatile ("cmpxchg4.rel %0=[%1],%2,ar.ccv": \360"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \361ia64_intri_res; \362})363364#define ia64_cmpxchg8_acq(ptr, new, old) \365({ \366__u64 ia64_intri_res; \367asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \368asm volatile ("cmpxchg8.acq %0=[%1],%2,ar.ccv": \369"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \370ia64_intri_res; \371})372373#define ia64_cmpxchg8_rel(ptr, new, old) \374({ \375__u64 ia64_intri_res; \376asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \377\378asm volatile ("cmpxchg8.rel %0=[%1],%2,ar.ccv": \379"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \380ia64_intri_res; \381})382383#define ia64_mf() asm volatile ("mf" ::: "memory")384#define ia64_mfa() asm volatile ("mf.a" ::: "memory")385386#define ia64_invala() asm volatile ("invala" ::: "memory")387388#define ia64_native_thash(addr) \389({ \390unsigned long ia64_intri_res; \391asm volatile ("thash %0=%1" : "=r"(ia64_intri_res) : "r" (addr)); \392ia64_intri_res; \393})394395#define ia64_srlz_i() asm volatile (";; srlz.i ;;" ::: "memory")396#define ia64_srlz_d() asm volatile (";; srlz.d" ::: "memory");397398#ifdef HAVE_SERIALIZE_DIRECTIVE399# define ia64_dv_serialize_data() asm volatile (".serialize.data");400# define ia64_dv_serialize_instruction() asm volatile (".serialize.instruction");401#else402# define ia64_dv_serialize_data()403# define ia64_dv_serialize_instruction()404#endif405406#define ia64_nop(x) asm volatile ("nop %0"::"i"(x));407408#define ia64_itci(addr) asm volatile ("itc.i %0;;" :: "r"(addr) : "memory")409410#define ia64_itcd(addr) asm volatile ("itc.d %0;;" :: "r"(addr) : "memory")411412413#define ia64_itri(trnum, addr) asm volatile ("itr.i itr[%0]=%1" \414:: "r"(trnum), "r"(addr) : "memory")415416#define ia64_itrd(trnum, addr) asm volatile ("itr.d dtr[%0]=%1" \417:: "r"(trnum), "r"(addr) : "memory")418419#define ia64_tpa(addr) \420({ \421unsigned long ia64_pa; \422asm volatile ("tpa %0 = %1" : "=r"(ia64_pa) : "r"(addr) : "memory"); \423ia64_pa; \424})425426#define __ia64_set_dbr(index, val) \427asm volatile ("mov dbr[%0]=%1" :: "r"(index), "r"(val) : "memory")428429#define ia64_set_ibr(index, val) \430asm volatile ("mov ibr[%0]=%1" :: "r"(index), "r"(val) : "memory")431432#define ia64_set_pkr(index, val) \433asm volatile ("mov pkr[%0]=%1" :: "r"(index), "r"(val) : "memory")434435#define ia64_set_pmc(index, val) \436asm volatile ("mov pmc[%0]=%1" :: "r"(index), "r"(val) : "memory")437438#define ia64_set_pmd(index, val) \439asm volatile ("mov pmd[%0]=%1" :: "r"(index), "r"(val) : "memory")440441#define ia64_native_set_rr(index, val) \442asm volatile ("mov rr[%0]=%1" :: "r"(index), "r"(val) : "memory");443444#define ia64_native_get_cpuid(index) \445({ \446unsigned long ia64_intri_res; \447asm volatile ("mov %0=cpuid[%r1]" : "=r"(ia64_intri_res) : "rO"(index)); \448ia64_intri_res; \449})450451#define __ia64_get_dbr(index) \452({ \453unsigned long ia64_intri_res; \454asm volatile ("mov %0=dbr[%1]" : "=r"(ia64_intri_res) : "r"(index)); \455ia64_intri_res; \456})457458#define ia64_get_ibr(index) \459({ \460unsigned long ia64_intri_res; \461asm volatile ("mov %0=ibr[%1]" : "=r"(ia64_intri_res) : "r"(index)); \462ia64_intri_res; \463})464465#define ia64_get_pkr(index) \466({ \467unsigned long ia64_intri_res; \468asm volatile ("mov %0=pkr[%1]" : "=r"(ia64_intri_res) : "r"(index)); \469ia64_intri_res; \470})471472#define ia64_get_pmc(index) \473({ \474unsigned long ia64_intri_res; \475asm volatile ("mov %0=pmc[%1]" : "=r"(ia64_intri_res) : "r"(index)); \476ia64_intri_res; \477})478479480#define ia64_native_get_pmd(index) \481({ \482unsigned long ia64_intri_res; \483asm volatile ("mov %0=pmd[%1]" : "=r"(ia64_intri_res) : "r"(index)); \484ia64_intri_res; \485})486487#define ia64_native_get_rr(index) \488({ \489unsigned long ia64_intri_res; \490asm volatile ("mov %0=rr[%1]" : "=r"(ia64_intri_res) : "r" (index)); \491ia64_intri_res; \492})493494#define ia64_native_fc(addr) asm volatile ("fc %0" :: "r"(addr) : "memory")495496497#define ia64_sync_i() asm volatile (";; sync.i" ::: "memory")498499#define ia64_native_ssm(mask) asm volatile ("ssm %0":: "i"((mask)) : "memory")500#define ia64_native_rsm(mask) asm volatile ("rsm %0":: "i"((mask)) : "memory")501#define ia64_sum(mask) asm volatile ("sum %0":: "i"((mask)) : "memory")502#define ia64_rum(mask) asm volatile ("rum %0":: "i"((mask)) : "memory")503504#define ia64_ptce(addr) asm volatile ("ptc.e %0" :: "r"(addr))505506#define ia64_native_ptcga(addr, size) \507do { \508asm volatile ("ptc.ga %0,%1" :: "r"(addr), "r"(size) : "memory"); \509ia64_dv_serialize_data(); \510} while (0)511512#define ia64_ptcl(addr, size) \513do { \514asm volatile ("ptc.l %0,%1" :: "r"(addr), "r"(size) : "memory"); \515ia64_dv_serialize_data(); \516} while (0)517518#define ia64_ptri(addr, size) \519asm volatile ("ptr.i %0,%1" :: "r"(addr), "r"(size) : "memory")520521#define ia64_ptrd(addr, size) \522asm volatile ("ptr.d %0,%1" :: "r"(addr), "r"(size) : "memory")523524#define ia64_ttag(addr) \525({ \526__u64 ia64_intri_res; \527asm volatile ("ttag %0=%1" : "=r"(ia64_intri_res) : "r" (addr)); \528ia64_intri_res; \529})530531532/* Values for lfhint in ia64_lfetch and ia64_lfetch_fault */533534#define ia64_lfhint_none 0535#define ia64_lfhint_nt1 1536#define ia64_lfhint_nt2 2537#define ia64_lfhint_nta 3538539#define ia64_lfetch(lfhint, y) \540({ \541switch (lfhint) { \542case ia64_lfhint_none: \543asm volatile ("lfetch [%0]" : : "r"(y)); \544break; \545case ia64_lfhint_nt1: \546asm volatile ("lfetch.nt1 [%0]" : : "r"(y)); \547break; \548case ia64_lfhint_nt2: \549asm volatile ("lfetch.nt2 [%0]" : : "r"(y)); \550break; \551case ia64_lfhint_nta: \552asm volatile ("lfetch.nta [%0]" : : "r"(y)); \553break; \554} \555})556557#define ia64_lfetch_excl(lfhint, y) \558({ \559switch (lfhint) { \560case ia64_lfhint_none: \561asm volatile ("lfetch.excl [%0]" :: "r"(y)); \562break; \563case ia64_lfhint_nt1: \564asm volatile ("lfetch.excl.nt1 [%0]" :: "r"(y)); \565break; \566case ia64_lfhint_nt2: \567asm volatile ("lfetch.excl.nt2 [%0]" :: "r"(y)); \568break; \569case ia64_lfhint_nta: \570asm volatile ("lfetch.excl.nta [%0]" :: "r"(y)); \571break; \572} \573})574575#define ia64_lfetch_fault(lfhint, y) \576({ \577switch (lfhint) { \578case ia64_lfhint_none: \579asm volatile ("lfetch.fault [%0]" : : "r"(y)); \580break; \581case ia64_lfhint_nt1: \582asm volatile ("lfetch.fault.nt1 [%0]" : : "r"(y)); \583break; \584case ia64_lfhint_nt2: \585asm volatile ("lfetch.fault.nt2 [%0]" : : "r"(y)); \586break; \587case ia64_lfhint_nta: \588asm volatile ("lfetch.fault.nta [%0]" : : "r"(y)); \589break; \590} \591})592593#define ia64_lfetch_fault_excl(lfhint, y) \594({ \595switch (lfhint) { \596case ia64_lfhint_none: \597asm volatile ("lfetch.fault.excl [%0]" :: "r"(y)); \598break; \599case ia64_lfhint_nt1: \600asm volatile ("lfetch.fault.excl.nt1 [%0]" :: "r"(y)); \601break; \602case ia64_lfhint_nt2: \603asm volatile ("lfetch.fault.excl.nt2 [%0]" :: "r"(y)); \604break; \605case ia64_lfhint_nta: \606asm volatile ("lfetch.fault.excl.nta [%0]" :: "r"(y)); \607break; \608} \609})610611#define ia64_native_intrin_local_irq_restore(x) \612do { \613asm volatile (";; cmp.ne p6,p7=%0,r0;;" \614"(p6) ssm psr.i;" \615"(p7) rsm psr.i;;" \616"(p6) srlz.d" \617:: "r"((x)) : "p6", "p7", "memory"); \618} while (0)619620#endif /* _ASM_IA64_GCC_INTRIN_H */621622623