/*---------------------------------------------------------------------------+1| round_Xsig.S |2| |3| Copyright (C) 1992,1993,1994,1995 |4| W. Metzenthen, 22 Parker St, Ormond, Vic 3163, |5| Australia. E-mail [email protected] |6| |7| Normalize and round a 12 byte quantity. |8| Call from C as: |9| int round_Xsig(Xsig *n) |10| |11| Normalize a 12 byte quantity. |12| Call from C as: |13| int norm_Xsig(Xsig *n) |14| |15| Each function returns the size of the shift (nr of bits). |16| |17+---------------------------------------------------------------------------*/18.file "round_Xsig.S"1920#include "fpu_emu.h"212223.text24ENTRY(round_Xsig)25pushl %ebp26movl %esp,%ebp27pushl %ebx /* Reserve some space */28pushl %ebx29pushl %esi3031movl PARAM1,%esi3233movl 8(%esi),%edx34movl 4(%esi),%ebx35movl (%esi),%eax3637movl $0,-4(%ebp)3839orl %edx,%edx /* ms bits */40js L_round /* Already normalized */41jnz L_shift_1 /* Shift left 1 - 31 bits */4243movl %ebx,%edx44movl %eax,%ebx45xorl %eax,%eax46movl $-32,-4(%ebp)4748/* We need to shift left by 1 - 31 bits */49L_shift_1:50bsrl %edx,%ecx /* get the required shift in %ecx */51subl $31,%ecx52negl %ecx53subl %ecx,-4(%ebp)54shld %cl,%ebx,%edx55shld %cl,%eax,%ebx56shl %cl,%eax5758L_round:59testl $0x80000000,%eax60jz L_exit6162addl $1,%ebx63adcl $0,%edx64jnz L_exit6566movl $0x80000000,%edx67incl -4(%ebp)6869L_exit:70movl %edx,8(%esi)71movl %ebx,4(%esi)72movl %eax,(%esi)7374movl -4(%ebp),%eax7576popl %esi77popl %ebx78leave79ret8081828384ENTRY(norm_Xsig)85pushl %ebp86movl %esp,%ebp87pushl %ebx /* Reserve some space */88pushl %ebx89pushl %esi9091movl PARAM1,%esi9293movl 8(%esi),%edx94movl 4(%esi),%ebx95movl (%esi),%eax9697movl $0,-4(%ebp)9899orl %edx,%edx /* ms bits */100js L_n_exit /* Already normalized */101jnz L_n_shift_1 /* Shift left 1 - 31 bits */102103movl %ebx,%edx104movl %eax,%ebx105xorl %eax,%eax106movl $-32,-4(%ebp)107108orl %edx,%edx /* ms bits */109js L_n_exit /* Normalized now */110jnz L_n_shift_1 /* Shift left 1 - 31 bits */111112movl %ebx,%edx113movl %eax,%ebx114xorl %eax,%eax115addl $-32,-4(%ebp)116jmp L_n_exit /* Might not be normalized,117but shift no more. */118119/* We need to shift left by 1 - 31 bits */120L_n_shift_1:121bsrl %edx,%ecx /* get the required shift in %ecx */122subl $31,%ecx123negl %ecx124subl %ecx,-4(%ebp)125shld %cl,%ebx,%edx126shld %cl,%eax,%ebx127shl %cl,%eax128129L_n_exit:130movl %edx,8(%esi)131movl %ebx,4(%esi)132movl %eax,(%esi)133134movl -4(%ebp),%eax135136popl %esi137popl %ebx138leave139ret140141142143