/* SPDX-License-Identifier: GPL-2.0 */1/*---------------------------------------------------------------------------+2| round_Xsig.S |3| |4| Copyright (C) 1992,1993,1994,1995 |5| W. Metzenthen, 22 Parker St, Ormond, Vic 3163, |6| Australia. E-mail [email protected] |7| |8| Normalize and round a 12 byte quantity. |9| Call from C as: |10| int round_Xsig(Xsig *n) |11| |12| Normalize a 12 byte quantity. |13| Call from C as: |14| int norm_Xsig(Xsig *n) |15| |16| Each function returns the size of the shift (nr of bits). |17| |18+---------------------------------------------------------------------------*/19.file "round_Xsig.S"2021#include "fpu_emu.h"222324.text25SYM_FUNC_START(round_Xsig)26pushl %ebp27movl %esp,%ebp28pushl %ebx /* Reserve some space */29pushl %ebx30pushl %esi3132movl PARAM1,%esi3334movl 8(%esi),%edx35movl 4(%esi),%ebx36movl (%esi),%eax3738movl $0,-4(%ebp)3940orl %edx,%edx /* ms bits */41js L_round /* Already normalized */42jnz L_shift_1 /* Shift left 1 - 31 bits */4344movl %ebx,%edx45movl %eax,%ebx46xorl %eax,%eax47movl $-32,-4(%ebp)4849/* We need to shift left by 1 - 31 bits */50L_shift_1:51bsrl %edx,%ecx /* get the required shift in %ecx */52subl $31,%ecx53negl %ecx54subl %ecx,-4(%ebp)55shld %cl,%ebx,%edx56shld %cl,%eax,%ebx57shl %cl,%eax5859L_round:60testl $0x80000000,%eax61jz L_exit6263addl $1,%ebx64adcl $0,%edx65jnz L_exit6667movl $0x80000000,%edx68incl -4(%ebp)6970L_exit:71movl %edx,8(%esi)72movl %ebx,4(%esi)73movl %eax,(%esi)7475movl -4(%ebp),%eax7677popl %esi78popl %ebx79leave80RET81SYM_FUNC_END(round_Xsig)82838485SYM_FUNC_START(norm_Xsig)86pushl %ebp87movl %esp,%ebp88pushl %ebx /* Reserve some space */89pushl %ebx90pushl %esi9192movl PARAM1,%esi9394movl 8(%esi),%edx95movl 4(%esi),%ebx96movl (%esi),%eax9798movl $0,-4(%ebp)99100orl %edx,%edx /* ms bits */101js L_n_exit /* Already normalized */102jnz L_n_shift_1 /* Shift left 1 - 31 bits */103104movl %ebx,%edx105movl %eax,%ebx106xorl %eax,%eax107movl $-32,-4(%ebp)108109orl %edx,%edx /* ms bits */110js L_n_exit /* Normalized now */111jnz L_n_shift_1 /* Shift left 1 - 31 bits */112113movl %ebx,%edx114movl %eax,%ebx115xorl %eax,%eax116addl $-32,-4(%ebp)117jmp L_n_exit /* Might not be normalized,118but shift no more. */119120/* We need to shift left by 1 - 31 bits */121L_n_shift_1:122bsrl %edx,%ecx /* get the required shift in %ecx */123subl $31,%ecx124negl %ecx125subl %ecx,-4(%ebp)126shld %cl,%ebx,%edx127shld %cl,%eax,%ebx128shl %cl,%eax129130L_n_exit:131movl %edx,8(%esi)132movl %ebx,4(%esi)133movl %eax,(%esi)134135movl -4(%ebp),%eax136137popl %esi138popl %ebx139leave140RET141SYM_FUNC_END(norm_Xsig)142143144