shr_Xsig.S 2.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788
  1. .file "shr_Xsig.S"
  2. /*---------------------------------------------------------------------------+
  3. | shr_Xsig.S |
  4. | |
  5. | 12 byte right shift function |
  6. | |
  7. | Copyright (C) 1992,1994,1995 |
  8. | W. Metzenthen, 22 Parker St, Ormond, Vic 3163, |
  9. | Australia. E-mail billm@jacobi.maths.monash.edu.au |
  10. | |
  11. | Call from C as: |
  12. | void shr_Xsig(Xsig *arg, unsigned nr) |
  13. | |
  14. | Extended shift right function. |
  15. | Fastest for small shifts. |
  16. | Shifts the 12 byte quantity pointed to by the first arg (arg) |
  17. | right by the number of bits specified by the second arg (nr). |
  18. | |
  19. +---------------------------------------------------------------------------*/
  20. #include "fpu_emu.h"
  21. .text
  22. ENTRY(shr_Xsig)
  23. push %ebp
  24. movl %esp,%ebp
  25. pushl %esi
  26. movl PARAM2,%ecx
  27. movl PARAM1,%esi
  28. cmpl $32,%ecx /* shrd only works for 0..31 bits */
  29. jnc L_more_than_31
  30. /* less than 32 bits */
  31. pushl %ebx
  32. movl (%esi),%eax /* lsl */
  33. movl 4(%esi),%ebx /* midl */
  34. movl 8(%esi),%edx /* msl */
  35. shrd %cl,%ebx,%eax
  36. shrd %cl,%edx,%ebx
  37. shr %cl,%edx
  38. movl %eax,(%esi)
  39. movl %ebx,4(%esi)
  40. movl %edx,8(%esi)
  41. popl %ebx
  42. popl %esi
  43. leave
  44. ret
  45. L_more_than_31:
  46. cmpl $64,%ecx
  47. jnc L_more_than_63
  48. subb $32,%cl
  49. movl 4(%esi),%eax /* midl */
  50. movl 8(%esi),%edx /* msl */
  51. shrd %cl,%edx,%eax
  52. shr %cl,%edx
  53. movl %eax,(%esi)
  54. movl %edx,4(%esi)
  55. movl $0,8(%esi)
  56. popl %esi
  57. leave
  58. ret
  59. L_more_than_63:
  60. cmpl $96,%ecx
  61. jnc L_more_than_95
  62. subb $64,%cl
  63. movl 8(%esi),%eax /* msl */
  64. shr %cl,%eax
  65. xorl %edx,%edx
  66. movl %eax,(%esi)
  67. movl %edx,4(%esi)
  68. movl %edx,8(%esi)
  69. popl %esi
  70. leave
  71. ret
  72. L_more_than_95:
  73. xorl %eax,%eax
  74. movl %eax,(%esi)
  75. movl %eax,4(%esi)
  76. movl %eax,8(%esi)
  77. popl %esi
  78. leave
  79. ret