/*1* FPU support code, moved here from head.S so that it can be used2* by chips which use other head-whatever.S files.3*4* Copyright (C) 1995-1996 Gary Thomas ([email protected])5* Copyright (C) 1996 Cort Dougan <[email protected]>6* Copyright (C) 1996 Paul Mackerras.7* Copyright (C) 1997 Dan Malek ([email protected]).8*9* This program is free software; you can redistribute it and/or10* modify it under the terms of the GNU General Public License11* as published by the Free Software Foundation; either version12* 2 of the License, or (at your option) any later version.13*14*/1516#include <asm/reg.h>17#include <asm/page.h>18#include <asm/mmu.h>19#include <asm/pgtable.h>20#include <asm/cputable.h>21#include <asm/cache.h>22#include <asm/thread_info.h>23#include <asm/ppc_asm.h>24#include <asm/asm-offsets.h>25#include <asm/ptrace.h>2627#ifdef CONFIG_VSX28#define REST_32FPVSRS(n,c,base) \29BEGIN_FTR_SECTION \30b 2f; \31END_FTR_SECTION_IFSET(CPU_FTR_VSX); \32REST_32FPRS(n,base); \33b 3f; \342: REST_32VSRS(n,c,base); \353:3637#define SAVE_32FPVSRS(n,c,base) \38BEGIN_FTR_SECTION \39b 2f; \40END_FTR_SECTION_IFSET(CPU_FTR_VSX); \41SAVE_32FPRS(n,base); \42b 3f; \432: SAVE_32VSRS(n,c,base); \443:45#else46#define REST_32FPVSRS(n,b,base) REST_32FPRS(n, base)47#define SAVE_32FPVSRS(n,b,base) SAVE_32FPRS(n, base)48#endif4950/*51* This task wants to use the FPU now.52* On UP, disable FP for the task which had the FPU previously,53* and save its floating-point registers in its thread_struct.54* Load up this task's FP registers from its thread_struct,55* enable the FPU for the current task and return to the task.56*/57_GLOBAL(load_up_fpu)58mfmsr r559ori r5,r5,MSR_FP60#ifdef CONFIG_VSX61BEGIN_FTR_SECTION62oris r5,r5,MSR_VSX@h63END_FTR_SECTION_IFSET(CPU_FTR_VSX)64#endif65SYNC66MTMSRD(r5) /* enable use of fpu now */67isync68/*69* For SMP, we don't do lazy FPU switching because it just gets too70* horrendously complex, especially when a task switches from one CPU71* to another. Instead we call giveup_fpu in switch_to.72*/73#ifndef CONFIG_SMP74LOAD_REG_ADDRBASE(r3, last_task_used_math)75toreal(r3)76PPC_LL r4,ADDROFF(last_task_used_math)(r3)77PPC_LCMPI 0,r4,078beq 1f79toreal(r4)80addi r4,r4,THREAD /* want last_task_used_math->thread */81SAVE_32FPVSRS(0, r5, r4)82mffs fr083stfd fr0,THREAD_FPSCR(r4)84PPC_LL r5,PT_REGS(r4)85toreal(r5)86PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5)87li r10,MSR_FP|MSR_FE0|MSR_FE188andc r4,r4,r10 /* disable FP for previous task */89PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)901:91#endif /* CONFIG_SMP */92/* enable use of FP after return */93#ifdef CONFIG_PPC3294mfspr r5,SPRN_SPRG_THREAD /* current task's THREAD (phys) */95lwz r4,THREAD_FPEXC_MODE(r5)96ori r9,r9,MSR_FP /* enable FP for current */97or r9,r9,r498#else99ld r4,PACACURRENT(r13)100addi r5,r4,THREAD /* Get THREAD */101lwz r4,THREAD_FPEXC_MODE(r5)102ori r12,r12,MSR_FP103or r12,r12,r4104std r12,_MSR(r1)105#endif106lfd fr0,THREAD_FPSCR(r5)107MTFSF_L(fr0)108REST_32FPVSRS(0, r4, r5)109#ifndef CONFIG_SMP110subi r4,r5,THREAD111fromreal(r4)112PPC_STL r4,ADDROFF(last_task_used_math)(r3)113#endif /* CONFIG_SMP */114/* restore registers and return */115/* we haven't used ctr or xer or lr */116blr117118/*119* giveup_fpu(tsk)120* Disable FP for the task given as the argument,121* and save the floating-point registers in its thread_struct.122* Enables the FPU for use in the kernel on return.123*/124_GLOBAL(giveup_fpu)125mfmsr r5126ori r5,r5,MSR_FP127#ifdef CONFIG_VSX128BEGIN_FTR_SECTION129oris r5,r5,MSR_VSX@h130END_FTR_SECTION_IFSET(CPU_FTR_VSX)131#endif132SYNC_601133ISYNC_601134MTMSRD(r5) /* enable use of fpu now */135SYNC_601136isync137PPC_LCMPI 0,r3,0138beqlr- /* if no previous owner, done */139addi r3,r3,THREAD /* want THREAD of task */140PPC_LL r5,PT_REGS(r3)141PPC_LCMPI 0,r5,0142SAVE_32FPVSRS(0, r4 ,r3)143mffs fr0144stfd fr0,THREAD_FPSCR(r3)145beq 1f146PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5)147li r3,MSR_FP|MSR_FE0|MSR_FE1148#ifdef CONFIG_VSX149BEGIN_FTR_SECTION150oris r3,r3,MSR_VSX@h151END_FTR_SECTION_IFSET(CPU_FTR_VSX)152#endif153andc r4,r4,r3 /* disable FP for previous task */154PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)1551:156#ifndef CONFIG_SMP157li r5,0158LOAD_REG_ADDRBASE(r4,last_task_used_math)159PPC_STL r5,ADDROFF(last_task_used_math)(r4)160#endif /* CONFIG_SMP */161blr162163/*164* These are used in the alignment trap handler when emulating165* single-precision loads and stores.166*/167168_GLOBAL(cvt_fd)169lfs 0,0(r3)170stfd 0,0(r4)171blr172173_GLOBAL(cvt_df)174lfd 0,0(r3)175stfs 0,0(r4)176blr177178179