^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*---------------------------------------------------------------------------+
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) | round_Xsig.S |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) | |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) | Copyright (C) 1992,1993,1994,1995 |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) | W. Metzenthen, 22 Parker St, Ormond, Vic 3163, |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) | Australia. E-mail billm@jacobi.maths.monash.edu.au |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) | |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) | Normalize and round a 12 byte quantity. |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) | Call from C as: |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) | int round_Xsig(Xsig *n) |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) | |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) | Normalize a 12 byte quantity. |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) | Call from C as: |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) | int norm_Xsig(Xsig *n) |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) | |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) | Each function returns the size of the shift (nr of bits). |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) | |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) +---------------------------------------------------------------------------*/
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) .file "round_Xsig.S"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #include "fpu_emu.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) .text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) SYM_FUNC_START(round_Xsig)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) pushl %ebp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) movl %esp,%ebp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) pushl %ebx /* Reserve some space */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) pushl %ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) pushl %esi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) movl PARAM1,%esi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) movl 8(%esi),%edx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) movl 4(%esi),%ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) movl (%esi),%eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) movl $0,-4(%ebp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) orl %edx,%edx /* ms bits */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) js L_round /* Already normalized */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) jnz L_shift_1 /* Shift left 1 - 31 bits */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) movl %ebx,%edx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) movl %eax,%ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) xorl %eax,%eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) movl $-32,-4(%ebp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) /* We need to shift left by 1 - 31 bits */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) L_shift_1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) bsrl %edx,%ecx /* get the required shift in %ecx */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) subl $31,%ecx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) negl %ecx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) subl %ecx,-4(%ebp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) shld %cl,%ebx,%edx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) shld %cl,%eax,%ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) shl %cl,%eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) L_round:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) testl $0x80000000,%eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) jz L_exit
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) addl $1,%ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) adcl $0,%edx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) jnz L_exit
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) movl $0x80000000,%edx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) incl -4(%ebp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) L_exit:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) movl %edx,8(%esi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) movl %ebx,4(%esi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) movl %eax,(%esi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) movl -4(%ebp),%eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) popl %esi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) popl %ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) leave
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) ret
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) SYM_FUNC_END(round_Xsig)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) SYM_FUNC_START(norm_Xsig)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) pushl %ebp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) movl %esp,%ebp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) pushl %ebx /* Reserve some space */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) pushl %ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) pushl %esi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) movl PARAM1,%esi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) movl 8(%esi),%edx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) movl 4(%esi),%ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) movl (%esi),%eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) movl $0,-4(%ebp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) orl %edx,%edx /* ms bits */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) js L_n_exit /* Already normalized */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) jnz L_n_shift_1 /* Shift left 1 - 31 bits */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) movl %ebx,%edx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) movl %eax,%ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) xorl %eax,%eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) movl $-32,-4(%ebp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) orl %edx,%edx /* ms bits */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) js L_n_exit /* Normalized now */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) jnz L_n_shift_1 /* Shift left 1 - 31 bits */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) movl %ebx,%edx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) movl %eax,%ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) xorl %eax,%eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) addl $-32,-4(%ebp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) jmp L_n_exit /* Might not be normalized,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) but shift no more. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) /* We need to shift left by 1 - 31 bits */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) L_n_shift_1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) bsrl %edx,%ecx /* get the required shift in %ecx */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) subl $31,%ecx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) negl %ecx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) subl %ecx,-4(%ebp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) shld %cl,%ebx,%edx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) shld %cl,%eax,%ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) shl %cl,%eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) L_n_exit:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) movl %edx,8(%esi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) movl %ebx,4(%esi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) movl %eax,(%esi)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) movl -4(%ebp),%eax
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) popl %esi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) popl %ebx
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) leave
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) ret
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) SYM_FUNC_END(norm_Xsig)