Pre-generate the optimized x86 crypto code and check it in rather than

depending on perl at build time.  Makefile.asm is a helper for after the
next import.

With my cvs@ hat on, the relatively small repo cost of this is acceptable,
especially given that we have other (much bigger) things like
lib*.so.gz.uu checked in under src/lib/compat/*.

Reviewed by:	kris (maintainer)
This commit is contained in:
Peter Wemm 2002-05-03 00:14:39 +00:00
parent 39ee03c316
commit 81fb684cc3
Notes: svn2git 2020-12-20 02:59:44 +00:00
svn path=/head/; revision=95967
15 changed files with 14661 additions and 41 deletions

View File

@ -16,13 +16,8 @@
${LCRYPTO_SRC}/stack ${LCRYPTO_SRC}/txt_db ${LCRYPTO_SRC}/x509 \ ${LCRYPTO_SRC}/stack ${LCRYPTO_SRC}/txt_db ${LCRYPTO_SRC}/x509 \
${LCRYPTO_SRC}/x509v3 ${LCRYPTO_SRC}/x509v3
.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386" .if ${MACHINE_ARCH} == "i386"
.PATH: ${LCRYPTO_SRC}/rc4/asm ${LCRYPTO_SRC}/rc5/asm \ .PATH: ${.CURDIR}/i386
${LCRYPTO_SRC}/des/asm ${LCRYPTO_SRC}/cast/asm \
${LCRYPTO_SRC}/sha/asm ${LCRYPTO_SRC}/bn/asm \
${LCRYPTO_SRC}/bf/asm ${LCRYPTO_SRC}/md5/asm \
${LCRYPTO_SRC}/ripemd/asm
PERLPATH= ${LCRYPTO_SRC}/des/asm:${LCRYPTO_SRC}/perlasm
.endif .endif
.if defined(MAKE_IDEA) && ${MAKE_IDEA} == YES .if defined(MAKE_IDEA) && ${MAKE_IDEA} == YES
@ -58,11 +53,11 @@ SRCS+= a_bitstr.c a_bmp.c a_bool.c a_bytes.c a_d2i_fp.c a_digest.c \
# blowfish # blowfish
SRCS+= bf_cfb64.c bf_ecb.c bf_ofb64.c bf_skey.c SRCS+= bf_cfb64.c bf_ecb.c bf_ofb64.c bf_skey.c
.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386" .if ${MACHINE_ARCH} == "i386"
.if ${MACHINE_CPU:Mi686} .if ${MACHINE_CPU:Mi686}
SRCS+= bf-686.pl SRCS+= bf-686.s
.else .else
SRCS+= bf-586.pl SRCS+= bf-586.s
.endif .endif
.else .else
SRCS+= bf_enc.c SRCS+= bf_enc.c
@ -79,8 +74,8 @@ SRCS+= bn_add.c bn_blind.c bn_ctx.c bn_div.c bn_err.c \
bn_exp.c bn_exp2.c bn_gcd.c bn_lib.c bn_mont.c bn_mpi.c \ bn_exp.c bn_exp2.c bn_gcd.c bn_lib.c bn_mont.c bn_mpi.c \
bn_mul.c bn_prime.c bn_print.c bn_rand.c bn_recp.c bn_shift.c \ bn_mul.c bn_prime.c bn_print.c bn_rand.c bn_recp.c bn_shift.c \
bn_sqr.c bn_word.c bn_sqr.c bn_word.c
.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386" .if ${MACHINE_ARCH} == "i386"
SRCS+= bn-586.pl co-586.pl SRCS+= bn-586.s co-586.s
.else .else
SRCS+= bn_asm.c SRCS+= bn_asm.c
.endif .endif
@ -90,8 +85,8 @@ SRCS+= buf_err.c buffer.c
# cast # cast
SRCS+= c_cfb64.c c_ecb.c c_ofb64.c c_skey.c SRCS+= c_cfb64.c c_ecb.c c_ofb64.c c_skey.c
.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386" .if ${MACHINE_ARCH} == "i386"
SRCS+= cast-586.pl SRCS+= cast-586.s
.else .else
SRCS+= c_enc.c SRCS+= c_enc.c
.endif .endif
@ -108,8 +103,8 @@ SRCS+= cbc_cksm.c cbc_enc.c cfb64ede.c cfb64enc.c cfb_enc.c \
fcrypt.c ofb64ede.c ofb64enc.c ofb_enc.c pcbc_enc.c \ fcrypt.c ofb64ede.c ofb64enc.c ofb_enc.c pcbc_enc.c \
qud_cksm.c rand_key.c read2pwd.c read_pwd.c rpc_enc.c \ qud_cksm.c rand_key.c read2pwd.c read_pwd.c rpc_enc.c \
set_key.c str2key.c xcbc_enc.c rnd_keys.c set_key.c str2key.c xcbc_enc.c rnd_keys.c
.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386" .if ${MACHINE_ARCH} == "i386"
SRCS+= des-586.pl crypt586.pl SRCS+= des-586.s crypt586.s
.else .else
SRCS+= des_enc.c fcrypt_b.c SRCS+= des_enc.c fcrypt_b.c
.endif .endif
@ -156,8 +151,8 @@ SRCS+= md4_dgst.c md4_one.c
# md5 # md5
SRCS+= md5_dgst.c md5_one.c SRCS+= md5_dgst.c md5_one.c
.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386" .if ${MACHINE_ARCH} == "i386"
SRCS+= md5-586.pl SRCS+= md5-586.s
.endif .endif
# mdc2 # mdc2
@ -185,24 +180,24 @@ SRCS+= rc2_cbc.c rc2cfb64.c rc2_ecb.c rc2ofb64.c rc2_skey.c
# rc4 # rc4
SRCS+= rc4_skey.c SRCS+= rc4_skey.c
.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386" .if ${MACHINE_ARCH} == "i386"
SRCS+= rc4-586.pl SRCS+= rc4-586.s
.else .else
SRCS+= rc4_enc.c SRCS+= rc4_enc.c
.endif .endif
# rc5 # rc5
SRCS+= rc5cfb64.c rc5_ecb.c rc5ofb64.c rc5_skey.c SRCS+= rc5cfb64.c rc5_ecb.c rc5ofb64.c rc5_skey.c
.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386" .if ${MACHINE_ARCH} == "i386"
SRCS+= rc5-586.pl SRCS+= rc5-586.s
.else .else
SRCS+= rc5_enc.c SRCS+= rc5_enc.c
.endif .endif
# ripemd # ripemd
SRCS+= rmd_dgst.c rmd_one.c SRCS+= rmd_dgst.c rmd_one.c
.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386" .if ${MACHINE_ARCH} == "i386"
SRCS+= rmd-586.pl SRCS+= rmd-586.s
.endif .endif
# rsa # rsa
@ -213,8 +208,8 @@ SRCS+= rsa_chk.c rsa_eay.c rsa_err.c rsa_gen.c rsa_lib.c rsa_none.c \
# sha # sha
SRCS+= sha_dgst.c sha_one.c sha1_one.c sha1dgst.c SRCS+= sha_dgst.c sha_one.c sha1_one.c sha1dgst.c
.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386" .if ${MACHINE_ARCH} == "i386"
SRCS+= sha1-586.pl SRCS+= sha1-586.s
.endif .endif
# stack # stack
@ -387,14 +382,3 @@ SYMLINKS+= lib${LIB}_p.a ${LIBDIR}/libdes_p.a
.endif .endif
.include <bsd.lib.mk> .include <bsd.lib.mk>
.if !defined(NOPERL) && !defined(NO_PERL) && ${MACHINE_ARCH} == "i386"
CLEANFILES+= ${SRCS:M*.pl:S/.pl$/.cmt/} ${SRCS:M*.pl:S/.pl$/.s/}
.SUFFIXES: .pl .cmt
.pl.cmt:
perl -I${PERLPATH} ${.IMPSRC} elf ${CPUTYPE:Mi386:S/i//} > ${.TARGET}
.cmt.s:
tr -d "'" < ${.IMPSRC} > ${.TARGET}
.endif

View File

@ -0,0 +1,63 @@
# $FreeBSD$
# Use this to help generate the asm *.s files after an import. It is not
# perfect by any means, but does what is needed.
# Do a 'make -f Makefile.asm all' and it will generate *.s. Move them
# to the i386 subdir, and correct any exposed paths and $FreeBSD$ tags.
.if ${MACHINE_ARCH} == "i386"
.include "Makefile.inc"
.PATH: ${LCRYPTO_SRC}/rc4/asm ${LCRYPTO_SRC}/rc5/asm \
${LCRYPTO_SRC}/des/asm ${LCRYPTO_SRC}/cast/asm \
${LCRYPTO_SRC}/sha/asm ${LCRYPTO_SRC}/bn/asm \
${LCRYPTO_SRC}/bf/asm ${LCRYPTO_SRC}/md5/asm \
${LCRYPTO_SRC}/ripemd/asm
PERLPATH= ${LCRYPTO_SRC}/des/asm:${LCRYPTO_SRC}/perlasm
SRCS=
# blowfish
SRCS+= bf-686.pl
SRCS+= bf-586.pl
# bn
SRCS+= bn-586.pl co-586.pl
# cast
SRCS+= cast-586.pl
# des
SRCS+= des-586.pl crypt586.pl
# md5
SRCS+= md5-586.pl
# rc4
SRCS+= rc4-586.pl
# rc5
SRCS+= rc5-586.pl
# ripemd
SRCS+= rmd-586.pl
# sha
SRCS+= sha1-586.pl
ASM= ${SRCS:S/.pl/.s/}
all: ${ASM}
CLEANFILES+= ${SRCS:M*.pl:S/.pl$/.cmt/} ${SRCS:M*.pl:S/.pl$/.s/}
.SUFFIXES: .pl .cmt
.pl.cmt:
perl -I${PERLPATH} ${.IMPSRC} elf ${CPUTYPE:Mi386:S/i//} > ${.TARGET}
.cmt.s:
tr -d "'" < ${.IMPSRC} > ${.TARGET}
.include <bsd.prog.mk>
.endif

View File

@ -7,10 +7,7 @@ CFLAGS+= -DNO_IDEA
.endif .endif
.if ${MACHINE_ARCH} == "i386" .if ${MACHINE_ARCH} == "i386"
CFLAGS+= -DL_ENDIAN CFLAGS+= -DL_ENDIAN -DSHA1_ASM -DBN_ASM -DMD5_ASM -DRMD160_ASM
.if !defined(NOPERL) && !defined(NO_PERL)
CFLAGS+= -DSHA1_ASM -DBN_ASM -DMD5_ASM -DRMD160_ASM
.endif
.elif ${MACHINE_ARCH} == "alpha" .elif ${MACHINE_ARCH} == "alpha"
# no ENDIAN stuff defined for alpha (64-bit) # no ENDIAN stuff defined for alpha (64-bit)
.endif .endif

View File

@ -0,0 +1,932 @@
# $FreeBSD$
# Dont even think of reading this code
# It was automatically generated by bf-586.pl
# Which is a perl program used to generate the x86 assember for
# any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
# eric <eay@cryptsoft.com>
.file "bf-586.s"
.version "01.01"
gcc2_compiled.:
.text
.align 16
.globl BF_encrypt
.type BF_encrypt,@function
BF_encrypt:
pushl %ebp
pushl %ebx
movl 12(%esp), %ebx
movl 16(%esp), %ebp
pushl %esi
pushl %edi
# Load the 2 words
movl (%ebx), %edi
movl 4(%ebx), %esi
xorl %eax, %eax
movl (%ebp), %ebx
xorl %ecx, %ecx
xorl %ebx, %edi
# Round 0
movl 4(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 1
movl 8(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 2
movl 12(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 3
movl 16(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 4
movl 20(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 5
movl 24(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 6
movl 28(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 7
movl 32(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 8
movl 36(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 9
movl 40(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 10
movl 44(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 11
movl 48(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 12
movl 52(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 13
movl 56(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 14
movl 60(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 15
movl 64(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
# Load parameter 0 (16) enc=1
movl 20(%esp), %eax
xorl %ebx, %edi
movl 68(%ebp), %edx
xorl %edx, %esi
movl %edi, 4(%eax)
movl %esi, (%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.BF_encrypt_end:
.size BF_encrypt,.BF_encrypt_end-BF_encrypt
.ident "BF_encrypt"
.text
.align 16
.globl BF_decrypt
.type BF_decrypt,@function
BF_decrypt:
pushl %ebp
pushl %ebx
movl 12(%esp), %ebx
movl 16(%esp), %ebp
pushl %esi
pushl %edi
# Load the 2 words
movl (%ebx), %edi
movl 4(%ebx), %esi
xorl %eax, %eax
movl 68(%ebp), %ebx
xorl %ecx, %ecx
xorl %ebx, %edi
# Round 16
movl 64(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 15
movl 60(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 14
movl 56(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 13
movl 52(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 12
movl 48(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 11
movl 44(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 10
movl 40(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 9
movl 36(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 8
movl 32(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 7
movl 28(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 6
movl 24(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 5
movl 20(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 4
movl 16(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 3
movl 12(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %edi
# Round 2
movl 8(%ebp), %edx
movl %edi, %ebx
xorl %edx, %esi
shrl $16, %ebx
movl %edi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
xorl %eax, %eax
xorl %ebx, %esi
# Round 1
movl 4(%ebp), %edx
movl %esi, %ebx
xorl %edx, %edi
shrl $16, %ebx
movl %esi, %edx
movb %bh, %al
andl $255, %ebx
movb %dh, %cl
andl $255, %edx
movl 72(%ebp,%eax,4),%eax
movl 1096(%ebp,%ebx,4),%ebx
addl %eax, %ebx
movl 2120(%ebp,%ecx,4),%eax
xorl %eax, %ebx
movl 3144(%ebp,%edx,4),%edx
addl %edx, %ebx
# Load parameter 0 (1) enc=0
movl 20(%esp), %eax
xorl %ebx, %edi
movl (%ebp), %edx
xorl %edx, %esi
movl %edi, 4(%eax)
movl %esi, (%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.BF_decrypt_end:
.size BF_decrypt,.BF_decrypt_end-BF_decrypt
.ident "BF_decrypt"
.text
.align 16
.globl BF_cbc_encrypt
.type BF_cbc_encrypt,@function
BF_cbc_encrypt:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp), %ebp
# getting iv ptr from parameter 4
movl 36(%esp), %ebx
movl (%ebx), %esi
movl 4(%ebx), %edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp, %ebx
movl 36(%esp), %esi
movl 40(%esp), %edi
# getting encrypt flag from parameter 5
movl 56(%esp), %ecx
# get and push parameter 3
movl 48(%esp), %eax
pushl %eax
pushl %ebx
cmpl $0, %ecx
jz .L000decrypt
andl $4294967288, %ebp
movl 8(%esp), %eax
movl 12(%esp), %ebx
jz .L001encrypt_finish
.L002encrypt_loop:
movl (%esi), %ecx
movl 4(%esi), %edx
xorl %ecx, %eax
xorl %edx, %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call BF_encrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, (%edi)
movl %ebx, 4(%edi)
addl $8, %esi
addl $8, %edi
subl $8, %ebp
jnz .L002encrypt_loop
.L001encrypt_finish:
movl 52(%esp), %ebp
andl $7, %ebp
jz .L003finish
xorl %ecx, %ecx
xorl %edx, %edx
movl .L004cbc_enc_jmp_table(,%ebp,4),%ebp
jmp *%ebp
.L005ej7:
movb 6(%esi), %dh
sall $8, %edx
.L006ej6:
movb 5(%esi), %dh
.L007ej5:
movb 4(%esi), %dl
.L008ej4:
movl (%esi), %ecx
jmp .L009ejend
.L010ej3:
movb 2(%esi), %ch
sall $8, %ecx
.L011ej2:
movb 1(%esi), %ch
.L012ej1:
movb (%esi), %cl
.L009ejend:
xorl %ecx, %eax
xorl %edx, %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call BF_encrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, (%edi)
movl %ebx, 4(%edi)
jmp .L003finish
.align 16
.L000decrypt:
andl $4294967288, %ebp
movl 16(%esp), %eax
movl 20(%esp), %ebx
jz .L013decrypt_finish
.L014decrypt_loop:
movl (%esi), %eax
movl 4(%esi), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call BF_decrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl 16(%esp), %ecx
movl 20(%esp), %edx
xorl %eax, %ecx
xorl %ebx, %edx
movl (%esi), %eax
movl 4(%esi), %ebx
movl %ecx, (%edi)
movl %edx, 4(%edi)
movl %eax, 16(%esp)
movl %ebx, 20(%esp)
addl $8, %esi
addl $8, %edi
subl $8, %ebp
jnz .L014decrypt_loop
.L013decrypt_finish:
movl 52(%esp), %ebp
andl $7, %ebp
jz .L003finish
movl (%esi), %eax
movl 4(%esi), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call BF_decrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl 16(%esp), %ecx
movl 20(%esp), %edx
xorl %eax, %ecx
xorl %ebx, %edx
movl (%esi), %eax
movl 4(%esi), %ebx
.L015dj7:
rorl $16, %edx
movb %dl, 6(%edi)
shrl $16, %edx
.L016dj6:
movb %dh, 5(%edi)
.L017dj5:
movb %dl, 4(%edi)
.L018dj4:
movl %ecx, (%edi)
jmp .L019djend
.L020dj3:
rorl $16, %ecx
movb %cl, 2(%edi)
sall $16, %ecx
.L021dj2:
movb %ch, 1(%esi)
.L022dj1:
movb %cl, (%esi)
.L019djend:
jmp .L003finish
.align 16
.L003finish:
movl 60(%esp), %ecx
addl $24, %esp
movl %eax, (%ecx)
movl %ebx, 4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 16
.L004cbc_enc_jmp_table:
.long 0
.long .L012ej1
.long .L011ej2
.long .L010ej3
.long .L008ej4
.long .L007ej5
.long .L006ej6
.long .L005ej7
.align 16
.L023cbc_dec_jmp_table:
.long 0
.long .L022dj1
.long .L021dj2
.long .L020dj3
.long .L018dj4
.long .L017dj5
.long .L016dj6
.long .L015dj7
.L_BF_cbc_encrypt_end:
.size BF_cbc_encrypt,.L_BF_cbc_encrypt_end-BF_cbc_encrypt
.ident "desasm.pl"

View File

@ -0,0 +1,902 @@
# $FreeBSD$
# Dont even think of reading this code
# It was automatically generated by bf-686.pl
# Which is a perl program used to generate the x86 assember for
# any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
# eric <eay@cryptsoft.com>
.file "bf-686.s"
.version "01.01"
gcc2_compiled.:
.text
.align 16
.globl BF_encrypt
.type BF_encrypt,@function
BF_encrypt:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
# Load the 2 words
movl 20(%esp), %eax
movl (%eax), %ecx
movl 4(%eax), %edx
# P pointer, s and enc flag
movl 24(%esp), %edi
xorl %eax, %eax
xorl %ebx, %ebx
xorl (%edi), %ecx
# Round 0
rorl $16, %ecx
movl 4(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 1
rorl $16, %edx
movl 8(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 2
rorl $16, %ecx
movl 12(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 3
rorl $16, %edx
movl 16(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 4
rorl $16, %ecx
movl 20(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 5
rorl $16, %edx
movl 24(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 6
rorl $16, %ecx
movl 28(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 7
rorl $16, %edx
movl 32(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 8
rorl $16, %ecx
movl 36(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 9
rorl $16, %edx
movl 40(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 10
rorl $16, %ecx
movl 44(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 11
rorl $16, %edx
movl 48(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 12
rorl $16, %ecx
movl 52(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 13
rorl $16, %edx
movl 56(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 14
rorl $16, %ecx
movl 60(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 15
rorl $16, %edx
movl 64(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
xorl 68(%edi), %edx
movl 20(%esp), %eax
movl %edx, (%eax)
movl %ecx, 4(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.L_BF_encrypt_end:
.size BF_encrypt,.L_BF_encrypt_end-BF_encrypt
.ident "desasm.pl"
.text
.align 16
.globl BF_decrypt
.type BF_decrypt,@function
BF_decrypt:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
# Load the 2 words
movl 20(%esp), %eax
movl (%eax), %ecx
movl 4(%eax), %edx
# P pointer, s and enc flag
movl 24(%esp), %edi
xorl %eax, %eax
xorl %ebx, %ebx
xorl 68(%edi), %ecx
# Round 16
rorl $16, %ecx
movl 64(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 15
rorl $16, %edx
movl 60(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 14
rorl $16, %ecx
movl 56(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 13
rorl $16, %edx
movl 52(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 12
rorl $16, %ecx
movl 48(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 11
rorl $16, %edx
movl 44(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 10
rorl $16, %ecx
movl 40(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 9
rorl $16, %edx
movl 36(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 8
rorl $16, %ecx
movl 32(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 7
rorl $16, %edx
movl 28(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 6
rorl $16, %ecx
movl 24(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 5
rorl $16, %edx
movl 20(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 4
rorl $16, %ecx
movl 16(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 3
rorl $16, %edx
movl 12(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
# Round 2
rorl $16, %ecx
movl 8(%edi), %esi
movb %ch, %al
movb %cl, %bl
rorl $16, %ecx
xorl %esi, %edx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %ch, %al
movb %cl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %edx
# Round 1
rorl $16, %edx
movl 4(%edi), %esi
movb %dh, %al
movb %dl, %bl
rorl $16, %edx
xorl %esi, %ecx
movl 72(%edi,%eax,4),%esi
movl 1096(%edi,%ebx,4),%ebp
movb %dh, %al
movb %dl, %bl
addl %ebp, %esi
movl 2120(%edi,%eax,4),%eax
xorl %eax, %esi
movl 3144(%edi,%ebx,4),%ebp
addl %ebp, %esi
xorl %eax, %eax
xorl %esi, %ecx
xorl (%edi), %edx
movl 20(%esp), %eax
movl %edx, (%eax)
movl %ecx, 4(%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.L_BF_decrypt_end:
.size BF_decrypt,.L_BF_decrypt_end-BF_decrypt
.ident "desasm.pl"
.text
.align 16
.globl BF_cbc_encrypt
.type BF_cbc_encrypt,@function
BF_cbc_encrypt:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp), %ebp
# getting iv ptr from parameter 4
movl 36(%esp), %ebx
movl (%ebx), %esi
movl 4(%ebx), %edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp, %ebx
movl 36(%esp), %esi
movl 40(%esp), %edi
# getting encrypt flag from parameter 5
movl 56(%esp), %ecx
# get and push parameter 3
movl 48(%esp), %eax
pushl %eax
pushl %ebx
cmpl $0, %ecx
jz .L000decrypt
andl $4294967288, %ebp
movl 8(%esp), %eax
movl 12(%esp), %ebx
jz .L001encrypt_finish
.L002encrypt_loop:
movl (%esi), %ecx
movl 4(%esi), %edx
xorl %ecx, %eax
xorl %edx, %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call BF_encrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, (%edi)
movl %ebx, 4(%edi)
addl $8, %esi
addl $8, %edi
subl $8, %ebp
jnz .L002encrypt_loop
.L001encrypt_finish:
movl 52(%esp), %ebp
andl $7, %ebp
jz .L003finish
xorl %ecx, %ecx
xorl %edx, %edx
movl .L004cbc_enc_jmp_table(,%ebp,4),%ebp
jmp *%ebp
.L005ej7:
movb 6(%esi), %dh
sall $8, %edx
.L006ej6:
movb 5(%esi), %dh
.L007ej5:
movb 4(%esi), %dl
.L008ej4:
movl (%esi), %ecx
jmp .L009ejend
.L010ej3:
movb 2(%esi), %ch
sall $8, %ecx
.L011ej2:
movb 1(%esi), %ch
.L012ej1:
movb (%esi), %cl
.L009ejend:
xorl %ecx, %eax
xorl %edx, %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call BF_encrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, (%edi)
movl %ebx, 4(%edi)
jmp .L003finish
.align 16
.L000decrypt:
andl $4294967288, %ebp
movl 16(%esp), %eax
movl 20(%esp), %ebx
jz .L013decrypt_finish
.L014decrypt_loop:
movl (%esi), %eax
movl 4(%esi), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call BF_decrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl 16(%esp), %ecx
movl 20(%esp), %edx
xorl %eax, %ecx
xorl %ebx, %edx
movl (%esi), %eax
movl 4(%esi), %ebx
movl %ecx, (%edi)
movl %edx, 4(%edi)
movl %eax, 16(%esp)
movl %ebx, 20(%esp)
addl $8, %esi
addl $8, %edi
subl $8, %ebp
jnz .L014decrypt_loop
.L013decrypt_finish:
movl 52(%esp), %ebp
andl $7, %ebp
jz .L003finish
movl (%esi), %eax
movl 4(%esi), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call BF_decrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl 16(%esp), %ecx
movl 20(%esp), %edx
xorl %eax, %ecx
xorl %ebx, %edx
movl (%esi), %eax
movl 4(%esi), %ebx
.L015dj7:
rorl $16, %edx
movb %dl, 6(%edi)
shrl $16, %edx
.L016dj6:
movb %dh, 5(%edi)
.L017dj5:
movb %dl, 4(%edi)
.L018dj4:
movl %ecx, (%edi)
jmp .L019djend
.L020dj3:
rorl $16, %ecx
movb %cl, 2(%edi)
sall $16, %ecx
.L021dj2:
movb %ch, 1(%esi)
.L022dj1:
movb %cl, (%esi)
.L019djend:
jmp .L003finish
.align 16
.L003finish:
movl 60(%esp), %ecx
addl $24, %esp
movl %eax, (%ecx)
movl %ebx, 4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 16
.L004cbc_enc_jmp_table:
.long 0
.long .L012ej1
.long .L011ej2
.long .L010ej3
.long .L008ej4
.long .L007ej5
.long .L006ej6
.long .L005ej7
.align 16
.L023cbc_dec_jmp_table:
.long 0
.long .L022dj1
.long .L021dj2
.long .L020dj3
.long .L018dj4
.long .L017dj5
.long .L016dj6
.long .L015dj7
.L_BF_cbc_encrypt_end:
.size BF_cbc_encrypt,.L_BF_cbc_encrypt_end-BF_cbc_encrypt
.ident "desasm.pl"

View File

@ -0,0 +1,890 @@
# $FreeBSD$
# Dont even think of reading this code
# It was automatically generated by bn-586.pl
# Which is a perl program used to generate the x86 assember for
# any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
# eric <eay@cryptsoft.com>
.file "bn-586.s"
.version "01.01"
gcc2_compiled.:
.text
.align 16
.globl bn_mul_add_words
.type bn_mul_add_words,@function
bn_mul_add_words:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %esi, %esi
movl 20(%esp), %edi
movl 28(%esp), %ecx
movl 24(%esp), %ebx
andl $4294967288, %ecx
movl 32(%esp), %ebp
pushl %ecx
jz .L000maw_finish
.L001maw_loop:
movl %ecx, (%esp)
# Round 0
movl (%ebx), %eax
mull %ebp
addl %esi, %eax
movl (%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
movl %eax, (%edi)
movl %edx, %esi
# Round 4
movl 4(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 4(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
movl %eax, 4(%edi)
movl %edx, %esi
# Round 8
movl 8(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 8(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
movl %eax, 8(%edi)
movl %edx, %esi
# Round 12
movl 12(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 12(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
movl %eax, 12(%edi)
movl %edx, %esi
# Round 16
movl 16(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 16(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
movl %eax, 16(%edi)
movl %edx, %esi
# Round 20
movl 20(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 20(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
movl %eax, 20(%edi)
movl %edx, %esi
# Round 24
movl 24(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 24(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
movl %eax, 24(%edi)
movl %edx, %esi
# Round 28
movl 28(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 28(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
movl %eax, 28(%edi)
movl %edx, %esi
movl (%esp), %ecx
addl $32, %ebx
addl $32, %edi
subl $8, %ecx
jnz .L001maw_loop
.L000maw_finish:
movl 32(%esp), %ecx
andl $7, %ecx
jnz .L002maw_finish2
jmp .L003maw_end
.align 16
.L002maw_finish2:
# Tail Round 0
movl (%ebx), %eax
mull %ebp
addl %esi, %eax
movl (%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
decl %ecx
movl %eax, (%edi)
movl %edx, %esi
jz .L003maw_end
# Tail Round 1
movl 4(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 4(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
decl %ecx
movl %eax, 4(%edi)
movl %edx, %esi
jz .L003maw_end
# Tail Round 2
movl 8(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 8(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
decl %ecx
movl %eax, 8(%edi)
movl %edx, %esi
jz .L003maw_end
# Tail Round 3
movl 12(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 12(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
decl %ecx
movl %eax, 12(%edi)
movl %edx, %esi
jz .L003maw_end
# Tail Round 4
movl 16(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 16(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
decl %ecx
movl %eax, 16(%edi)
movl %edx, %esi
jz .L003maw_end
# Tail Round 5
movl 20(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 20(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
decl %ecx
movl %eax, 20(%edi)
movl %edx, %esi
jz .L003maw_end
# Tail Round 6
movl 24(%ebx), %eax
mull %ebp
addl %esi, %eax
movl 24(%edi), %esi
adcl $0, %edx
addl %esi, %eax
adcl $0, %edx
movl %eax, 24(%edi)
movl %edx, %esi
.L003maw_end:
movl %esi, %eax
popl %ecx
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.bn_mul_add_words_end:
.size bn_mul_add_words,.bn_mul_add_words_end-bn_mul_add_words
.ident "bn_mul_add_words"
.text
.align 16
.globl bn_mul_words
.type bn_mul_words,@function
bn_mul_words:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
xorl %esi, %esi
movl 20(%esp), %edi
movl 24(%esp), %ebx
movl 28(%esp), %ebp
movl 32(%esp), %ecx
andl $4294967288, %ebp
jz .L004mw_finish
.L005mw_loop:
# Round 0
movl (%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, (%edi)
movl %edx, %esi
# Round 4
movl 4(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 4(%edi)
movl %edx, %esi
# Round 8
movl 8(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 8(%edi)
movl %edx, %esi
# Round 12
movl 12(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 12(%edi)
movl %edx, %esi
# Round 16
movl 16(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 16(%edi)
movl %edx, %esi
# Round 20
movl 20(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 20(%edi)
movl %edx, %esi
# Round 24
movl 24(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 24(%edi)
movl %edx, %esi
# Round 28
movl 28(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 28(%edi)
movl %edx, %esi
addl $32, %ebx
addl $32, %edi
subl $8, %ebp
jz .L004mw_finish
jmp .L005mw_loop
.L004mw_finish:
movl 28(%esp), %ebp
andl $7, %ebp
jnz .L006mw_finish2
jmp .L007mw_end
.align 16
.L006mw_finish2:
# Tail Round 0
movl (%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, (%edi)
movl %edx, %esi
decl %ebp
jz .L007mw_end
# Tail Round 1
movl 4(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 4(%edi)
movl %edx, %esi
decl %ebp
jz .L007mw_end
# Tail Round 2
movl 8(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 8(%edi)
movl %edx, %esi
decl %ebp
jz .L007mw_end
# Tail Round 3
movl 12(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 12(%edi)
movl %edx, %esi
decl %ebp
jz .L007mw_end
# Tail Round 4
movl 16(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 16(%edi)
movl %edx, %esi
decl %ebp
jz .L007mw_end
# Tail Round 5
movl 20(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 20(%edi)
movl %edx, %esi
decl %ebp
jz .L007mw_end
# Tail Round 6
movl 24(%ebx), %eax
mull %ecx
addl %esi, %eax
adcl $0, %edx
movl %eax, 24(%edi)
movl %edx, %esi
.L007mw_end:
movl %esi, %eax
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.bn_mul_words_end:
.size bn_mul_words,.bn_mul_words_end-bn_mul_words
.ident "bn_mul_words"
.text
.align 16
.globl bn_sqr_words
.type bn_sqr_words,@function
bn_sqr_words:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp), %esi
movl 24(%esp), %edi
movl 28(%esp), %ebx
andl $4294967288, %ebx
jz .L008sw_finish
.L009sw_loop:
# Round 0
movl (%edi), %eax
mull %eax
movl %eax, (%esi)
movl %edx, 4(%esi)
# Round 4
movl 4(%edi), %eax
mull %eax
movl %eax, 8(%esi)
movl %edx, 12(%esi)
# Round 8
movl 8(%edi), %eax
mull %eax
movl %eax, 16(%esi)
movl %edx, 20(%esi)
# Round 12
movl 12(%edi), %eax
mull %eax
movl %eax, 24(%esi)
movl %edx, 28(%esi)
# Round 16
movl 16(%edi), %eax
mull %eax
movl %eax, 32(%esi)
movl %edx, 36(%esi)
# Round 20
movl 20(%edi), %eax
mull %eax
movl %eax, 40(%esi)
movl %edx, 44(%esi)
# Round 24
movl 24(%edi), %eax
mull %eax
movl %eax, 48(%esi)
movl %edx, 52(%esi)
# Round 28
movl 28(%edi), %eax
mull %eax
movl %eax, 56(%esi)
movl %edx, 60(%esi)
addl $32, %edi
addl $64, %esi
subl $8, %ebx
jnz .L009sw_loop
.L008sw_finish:
movl 28(%esp), %ebx
andl $7, %ebx
jz .L010sw_end
# Tail Round 0
movl (%edi), %eax
mull %eax
movl %eax, (%esi)
decl %ebx
movl %edx, 4(%esi)
jz .L010sw_end
# Tail Round 1
movl 4(%edi), %eax
mull %eax
movl %eax, 8(%esi)
decl %ebx
movl %edx, 12(%esi)
jz .L010sw_end
# Tail Round 2
movl 8(%edi), %eax
mull %eax
movl %eax, 16(%esi)
decl %ebx
movl %edx, 20(%esi)
jz .L010sw_end
# Tail Round 3
movl 12(%edi), %eax
mull %eax
movl %eax, 24(%esi)
decl %ebx
movl %edx, 28(%esi)
jz .L010sw_end
# Tail Round 4
movl 16(%edi), %eax
mull %eax
movl %eax, 32(%esi)
decl %ebx
movl %edx, 36(%esi)
jz .L010sw_end
# Tail Round 5
movl 20(%edi), %eax
mull %eax
movl %eax, 40(%esi)
decl %ebx
movl %edx, 44(%esi)
jz .L010sw_end
# Tail Round 6
movl 24(%edi), %eax
mull %eax
movl %eax, 48(%esi)
movl %edx, 52(%esi)
.L010sw_end:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.bn_sqr_words_end:
.size bn_sqr_words,.bn_sqr_words_end-bn_sqr_words
.ident "bn_sqr_words"
.text
.align 16
.globl bn_div_words
.type bn_div_words,@function
bn_div_words:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp), %edx
movl 24(%esp), %eax
movl 28(%esp), %ebx
divl %ebx
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.bn_div_words_end:
.size bn_div_words,.bn_div_words_end-bn_div_words
.ident "bn_div_words"
.text
.align 16
.globl bn_add_words
.type bn_add_words,@function
bn_add_words:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp), %ebx
movl 24(%esp), %esi
movl 28(%esp), %edi
movl 32(%esp), %ebp
xorl %eax, %eax
andl $4294967288, %ebp
jz .L011aw_finish
.L012aw_loop:
# Round 0
movl (%esi), %ecx
movl (%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
movl %ecx, (%ebx)
# Round 1
movl 4(%esi), %ecx
movl 4(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
movl %ecx, 4(%ebx)
# Round 2
movl 8(%esi), %ecx
movl 8(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
movl %ecx, 8(%ebx)
# Round 3
movl 12(%esi), %ecx
movl 12(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
movl %ecx, 12(%ebx)
# Round 4
movl 16(%esi), %ecx
movl 16(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
movl %ecx, 16(%ebx)
# Round 5
movl 20(%esi), %ecx
movl 20(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
movl %ecx, 20(%ebx)
# Round 6
movl 24(%esi), %ecx
movl 24(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
movl %ecx, 24(%ebx)
# Round 7
movl 28(%esi), %ecx
movl 28(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
movl %ecx, 28(%ebx)
addl $32, %esi
addl $32, %edi
addl $32, %ebx
subl $8, %ebp
jnz .L012aw_loop
.L011aw_finish:
movl 32(%esp), %ebp
andl $7, %ebp
jz .L013aw_end
# Tail Round 0
movl (%esi), %ecx
movl (%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
decl %ebp
movl %ecx, (%ebx)
jz .L013aw_end
# Tail Round 1
movl 4(%esi), %ecx
movl 4(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
decl %ebp
movl %ecx, 4(%ebx)
jz .L013aw_end
# Tail Round 2
movl 8(%esi), %ecx
movl 8(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
decl %ebp
movl %ecx, 8(%ebx)
jz .L013aw_end
# Tail Round 3
movl 12(%esi), %ecx
movl 12(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
decl %ebp
movl %ecx, 12(%ebx)
jz .L013aw_end
# Tail Round 4
movl 16(%esi), %ecx
movl 16(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
decl %ebp
movl %ecx, 16(%ebx)
jz .L013aw_end
# Tail Round 5
movl 20(%esi), %ecx
movl 20(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
decl %ebp
movl %ecx, 20(%ebx)
jz .L013aw_end
# Tail Round 6
movl 24(%esi), %ecx
movl 24(%edi), %edx
addl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
addl %edx, %ecx
adcl $0, %eax
movl %ecx, 24(%ebx)
.L013aw_end:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.bn_add_words_end:
.size bn_add_words,.bn_add_words_end-bn_add_words
.ident "bn_add_words"
.text
.align 16
.globl bn_sub_words
.type bn_sub_words,@function
bn_sub_words:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 20(%esp), %ebx
movl 24(%esp), %esi
movl 28(%esp), %edi
movl 32(%esp), %ebp
xorl %eax, %eax
andl $4294967288, %ebp
jz .L014aw_finish
.L015aw_loop:
# Round 0
movl (%esi), %ecx
movl (%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
movl %ecx, (%ebx)
# Round 1
movl 4(%esi), %ecx
movl 4(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
movl %ecx, 4(%ebx)
# Round 2
movl 8(%esi), %ecx
movl 8(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
movl %ecx, 8(%ebx)
# Round 3
movl 12(%esi), %ecx
movl 12(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
movl %ecx, 12(%ebx)
# Round 4
movl 16(%esi), %ecx
movl 16(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
movl %ecx, 16(%ebx)
# Round 5
movl 20(%esi), %ecx
movl 20(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
movl %ecx, 20(%ebx)
# Round 6
movl 24(%esi), %ecx
movl 24(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
movl %ecx, 24(%ebx)
# Round 7
movl 28(%esi), %ecx
movl 28(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
movl %ecx, 28(%ebx)
addl $32, %esi
addl $32, %edi
addl $32, %ebx
subl $8, %ebp
jnz .L015aw_loop
.L014aw_finish:
movl 32(%esp), %ebp
andl $7, %ebp
jz .L016aw_end
# Tail Round 0
movl (%esi), %ecx
movl (%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
decl %ebp
movl %ecx, (%ebx)
jz .L016aw_end
# Tail Round 1
movl 4(%esi), %ecx
movl 4(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
decl %ebp
movl %ecx, 4(%ebx)
jz .L016aw_end
# Tail Round 2
movl 8(%esi), %ecx
movl 8(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
decl %ebp
movl %ecx, 8(%ebx)
jz .L016aw_end
# Tail Round 3
movl 12(%esi), %ecx
movl 12(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
decl %ebp
movl %ecx, 12(%ebx)
jz .L016aw_end
# Tail Round 4
movl 16(%esi), %ecx
movl 16(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
decl %ebp
movl %ecx, 16(%ebx)
jz .L016aw_end
# Tail Round 5
movl 20(%esi), %ecx
movl 20(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
decl %ebp
movl %ecx, 20(%ebx)
jz .L016aw_end
# Tail Round 6
movl 24(%esi), %ecx
movl 24(%edi), %edx
subl %eax, %ecx
movl $0, %eax
adcl %eax, %eax
subl %edx, %ecx
adcl $0, %eax
movl %ecx, 24(%ebx)
.L016aw_end:
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.bn_sub_words_end:
.size bn_sub_words,.bn_sub_words_end-bn_sub_words
.ident "bn_sub_words"

View File

@ -0,0 +1,971 @@
# $FreeBSD$
# Dont even think of reading this code
# It was automatically generated by cast-586.pl
# Which is a perl program used to generate the x86 assember for
# any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
# eric <eay@cryptsoft.com>
.file "cast-586.s"
.version "01.01"
gcc2_compiled.:
.text
.align 16
.globl CAST_encrypt
.type CAST_encrypt,@function
CAST_encrypt:
pushl %ebp
pushl %ebx
movl 12(%esp), %ebx
movl 16(%esp), %ebp
pushl %esi
pushl %edi
# Load the 2 words
movl (%ebx), %edi
movl 4(%ebx), %esi
# Get short key flag
movl 128(%ebp), %eax
pushl %eax
xorl %eax, %eax
# round 0
movl (%ebp), %edx
movl 4(%ebp), %ecx
addl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx, %ecx
xorl %ecx, %edi
# round 1
movl 8(%ebp), %edx
movl 12(%ebp), %ecx
xorl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx, %ecx
xorl %ecx, %esi
# round 2
movl 16(%ebp), %edx
movl 20(%ebp), %ecx
subl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx, %ecx
xorl %ecx, %edi
# round 3
movl 24(%ebp), %edx
movl 28(%ebp), %ecx
addl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx, %ecx
xorl %ecx, %esi
# round 4
movl 32(%ebp), %edx
movl 36(%ebp), %ecx
xorl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx, %ecx
xorl %ecx, %edi
# round 5
movl 40(%ebp), %edx
movl 44(%ebp), %ecx
subl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx, %ecx
xorl %ecx, %esi
# round 6
movl 48(%ebp), %edx
movl 52(%ebp), %ecx
addl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx, %ecx
xorl %ecx, %edi
# round 7
movl 56(%ebp), %edx
movl 60(%ebp), %ecx
xorl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx, %ecx
xorl %ecx, %esi
# round 8
movl 64(%ebp), %edx
movl 68(%ebp), %ecx
subl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx, %ecx
xorl %ecx, %edi
# round 9
movl 72(%ebp), %edx
movl 76(%ebp), %ecx
addl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx, %ecx
xorl %ecx, %esi
# round 10
movl 80(%ebp), %edx
movl 84(%ebp), %ecx
xorl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx, %ecx
xorl %ecx, %edi
# round 11
movl 88(%ebp), %edx
movl 92(%ebp), %ecx
subl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx, %ecx
xorl %ecx, %esi
# test short key flag
popl %edx
orl %edx, %edx
jnz .L000cast_enc_done
# round 12
movl 96(%ebp), %edx
movl 100(%ebp), %ecx
addl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx, %ecx
xorl %ecx, %edi
# round 13
movl 104(%ebp), %edx
movl 108(%ebp), %ecx
xorl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx, %ecx
xorl %ecx, %esi
# round 14
movl 112(%ebp), %edx
movl 116(%ebp), %ecx
subl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx, %ecx
xorl %ecx, %edi
# round 15
movl 120(%ebp), %edx
movl 124(%ebp), %ecx
addl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx, %ecx
xorl %ecx, %esi
.L000cast_enc_done:
nop
movl 20(%esp), %eax
movl %edi, 4(%eax)
movl %esi, (%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.CAST_encrypt_end:
.size CAST_encrypt,.CAST_encrypt_end-CAST_encrypt
.ident "CAST_encrypt"
.text
.align 16
.globl CAST_decrypt
.type CAST_decrypt,@function
CAST_decrypt:
pushl %ebp
pushl %ebx
movl 12(%esp), %ebx
movl 16(%esp), %ebp
pushl %esi
pushl %edi
# Load the 2 words
movl (%ebx), %edi
movl 4(%ebx), %esi
# Get short key flag
movl 128(%ebp), %eax
orl %eax, %eax
jnz .L001cast_dec_skip
xorl %eax, %eax
# round 15
movl 120(%ebp), %edx
movl 124(%ebp), %ecx
addl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx, %ecx
xorl %ecx, %edi
# round 14
movl 112(%ebp), %edx
movl 116(%ebp), %ecx
subl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx, %ecx
xorl %ecx, %esi
# round 13
movl 104(%ebp), %edx
movl 108(%ebp), %ecx
xorl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx, %ecx
xorl %ecx, %edi
# round 12
movl 96(%ebp), %edx
movl 100(%ebp), %ecx
addl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx, %ecx
xorl %ecx, %esi
.L001cast_dec_skip:
# round 11
movl 88(%ebp), %edx
movl 92(%ebp), %ecx
subl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx, %ecx
xorl %ecx, %edi
# round 10
movl 80(%ebp), %edx
movl 84(%ebp), %ecx
xorl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx, %ecx
xorl %ecx, %esi
# round 9
movl 72(%ebp), %edx
movl 76(%ebp), %ecx
addl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx, %ecx
xorl %ecx, %edi
# round 8
movl 64(%ebp), %edx
movl 68(%ebp), %ecx
subl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx, %ecx
xorl %ecx, %esi
# round 7
movl 56(%ebp), %edx
movl 60(%ebp), %ecx
xorl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx, %ecx
xorl %ecx, %edi
# round 6
movl 48(%ebp), %edx
movl 52(%ebp), %ecx
addl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx, %ecx
xorl %ecx, %esi
# round 5
movl 40(%ebp), %edx
movl 44(%ebp), %ecx
subl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx, %ecx
xorl %ecx, %edi
# round 4
movl 32(%ebp), %edx
movl 36(%ebp), %ecx
xorl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx, %ecx
xorl %ecx, %esi
# round 3
movl 24(%ebp), %edx
movl 28(%ebp), %ecx
addl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx, %ecx
xorl %ecx, %edi
# round 2
movl 16(%ebp), %edx
movl 20(%ebp), %ecx
subl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
addl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
subl %ebx, %ecx
xorl %ecx, %esi
# round 1
movl 8(%ebp), %edx
movl 12(%ebp), %ecx
xorl %esi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
subl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
addl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
xorl %ebx, %ecx
xorl %ecx, %edi
# round 0
movl (%ebp), %edx
movl 4(%ebp), %ecx
addl %edi, %edx
roll %cl, %edx
movl %edx, %ebx
xorl %ecx, %ecx
movb %dh, %cl
andl $255, %ebx
shrl $16, %edx
xorl %eax, %eax
movb %dh, %al
andl $255, %edx
movl CAST_S_table0(,%ecx,4),%ecx
movl CAST_S_table1(,%ebx,4),%ebx
xorl %ebx, %ecx
movl CAST_S_table2(,%eax,4),%ebx
subl %ebx, %ecx
movl CAST_S_table3(,%edx,4),%ebx
addl %ebx, %ecx
xorl %ecx, %esi
nop
movl 20(%esp), %eax
movl %edi, 4(%eax)
movl %esi, (%eax)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.CAST_decrypt_end:
.size CAST_decrypt,.CAST_decrypt_end-CAST_decrypt
.ident "CAST_decrypt"
.text
.align 16
.globl CAST_cbc_encrypt
.type CAST_cbc_encrypt,@function
CAST_cbc_encrypt:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp), %ebp
# getting iv ptr from parameter 4
movl 36(%esp), %ebx
movl (%ebx), %esi
movl 4(%ebx), %edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp, %ebx
movl 36(%esp), %esi
movl 40(%esp), %edi
# getting encrypt flag from parameter 5
movl 56(%esp), %ecx
# get and push parameter 3
movl 48(%esp), %eax
pushl %eax
pushl %ebx
cmpl $0, %ecx
jz .L002decrypt
andl $4294967288, %ebp
movl 8(%esp), %eax
movl 12(%esp), %ebx
jz .L003encrypt_finish
.L004encrypt_loop:
movl (%esi), %ecx
movl 4(%esi), %edx
xorl %ecx, %eax
xorl %edx, %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call CAST_encrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, (%edi)
movl %ebx, 4(%edi)
addl $8, %esi
addl $8, %edi
subl $8, %ebp
jnz .L004encrypt_loop
.L003encrypt_finish:
movl 52(%esp), %ebp
andl $7, %ebp
jz .L005finish
xorl %ecx, %ecx
xorl %edx, %edx
movl .L006cbc_enc_jmp_table(,%ebp,4),%ebp
jmp *%ebp
.L007ej7:
xorl %edx, %edx
movb 6(%esi), %dh
sall $8, %edx
.L008ej6:
movb 5(%esi), %dh
.L009ej5:
movb 4(%esi), %dl
.L010ej4:
movl (%esi), %ecx
jmp .L011ejend
.L012ej3:
movb 2(%esi), %ch
xorl %ecx, %ecx
sall $8, %ecx
.L013ej2:
movb 1(%esi), %ch
.L014ej1:
movb (%esi), %cl
.L011ejend:
xorl %ecx, %eax
xorl %edx, %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call CAST_encrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, (%edi)
movl %ebx, 4(%edi)
jmp .L005finish
.align 16
.L002decrypt:
andl $4294967288, %ebp
movl 16(%esp), %eax
movl 20(%esp), %ebx
jz .L015decrypt_finish
.L016decrypt_loop:
movl (%esi), %eax
movl 4(%esi), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call CAST_decrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl 16(%esp), %ecx
movl 20(%esp), %edx
xorl %eax, %ecx
xorl %ebx, %edx
movl (%esi), %eax
movl 4(%esi), %ebx
movl %ecx, (%edi)
movl %edx, 4(%edi)
movl %eax, 16(%esp)
movl %ebx, 20(%esp)
addl $8, %esi
addl $8, %edi
subl $8, %ebp
jnz .L016decrypt_loop
.L015decrypt_finish:
movl 52(%esp), %ebp
andl $7, %ebp
jz .L005finish
movl (%esi), %eax
movl 4(%esi), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call CAST_decrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
.byte 15
.byte 200 # bswapl %eax
.byte 15
.byte 203 # bswapl %ebx
movl 16(%esp), %ecx
movl 20(%esp), %edx
xorl %eax, %ecx
xorl %ebx, %edx
movl (%esi), %eax
movl 4(%esi), %ebx
.L017dj7:
rorl $16, %edx
movb %dl, 6(%edi)
shrl $16, %edx
.L018dj6:
movb %dh, 5(%edi)
.L019dj5:
movb %dl, 4(%edi)
.L020dj4:
movl %ecx, (%edi)
jmp .L021djend
.L022dj3:
rorl $16, %ecx
movb %cl, 2(%edi)
sall $16, %ecx
.L023dj2:
movb %ch, 1(%esi)
.L024dj1:
movb %cl, (%esi)
.L021djend:
jmp .L005finish
.align 16
.L005finish:
movl 60(%esp), %ecx
addl $24, %esp
movl %eax, (%ecx)
movl %ebx, 4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 16
.L006cbc_enc_jmp_table:
.long 0
.long .L014ej1
.long .L013ej2
.long .L012ej3
.long .L010ej4
.long .L009ej5
.long .L008ej6
.long .L007ej7
.align 16
.L025cbc_dec_jmp_table:
.long 0
.long .L024dj1
.long .L023dj2
.long .L022dj3
.long .L020dj4
.long .L019dj5
.long .L018dj6
.long .L017dj7
.L_CAST_cbc_encrypt_end:
.size CAST_cbc_encrypt,.L_CAST_cbc_encrypt_end-CAST_cbc_encrypt
.ident "desasm.pl"

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,933 @@
# $FreeBSD$
# Dont even think of reading this code
# It was automatically generated by crypt586.pl
# Which is a perl program used to generate the x86 assember for
# any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
# eric <eay@cryptsoft.com>
.file "crypt586.s"
.version "01.01"
gcc2_compiled.:
.text
.align 16
.globl fcrypt_body
.type fcrypt_body,@function
fcrypt_body:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
# Load the 2 words
xorl %edi, %edi
xorl %esi, %esi
movl 24(%esp), %ebp
pushl $25
.L000start:
# Round 0
movl 32(%esp), %eax
movl %esi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %esi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl (%ebp), %ebx
xorl %ebx, %eax
movl 4(%ebp), %ecx
xorl %esi, %eax
xorl %esi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %edi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %edi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %edi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %edi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %edi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %edi
# Round 1
movl 32(%esp), %eax
movl %edi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %edi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 8(%ebp), %ebx
xorl %ebx, %eax
movl 12(%ebp), %ecx
xorl %edi, %eax
xorl %edi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %esi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %esi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %esi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %esi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %esi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %esi
# Round 2
movl 32(%esp), %eax
movl %esi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %esi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 16(%ebp), %ebx
xorl %ebx, %eax
movl 20(%ebp), %ecx
xorl %esi, %eax
xorl %esi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %edi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %edi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %edi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %edi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %edi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %edi
# Round 3
movl 32(%esp), %eax
movl %edi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %edi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 24(%ebp), %ebx
xorl %ebx, %eax
movl 28(%ebp), %ecx
xorl %edi, %eax
xorl %edi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %esi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %esi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %esi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %esi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %esi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %esi
# Round 4
movl 32(%esp), %eax
movl %esi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %esi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 32(%ebp), %ebx
xorl %ebx, %eax
movl 36(%ebp), %ecx
xorl %esi, %eax
xorl %esi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %edi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %edi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %edi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %edi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %edi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %edi
# Round 5
movl 32(%esp), %eax
movl %edi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %edi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 40(%ebp), %ebx
xorl %ebx, %eax
movl 44(%ebp), %ecx
xorl %edi, %eax
xorl %edi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %esi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %esi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %esi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %esi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %esi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %esi
# Round 6
movl 32(%esp), %eax
movl %esi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %esi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 48(%ebp), %ebx
xorl %ebx, %eax
movl 52(%ebp), %ecx
xorl %esi, %eax
xorl %esi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %edi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %edi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %edi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %edi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %edi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %edi
# Round 7
movl 32(%esp), %eax
movl %edi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %edi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 56(%ebp), %ebx
xorl %ebx, %eax
movl 60(%ebp), %ecx
xorl %edi, %eax
xorl %edi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %esi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %esi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %esi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %esi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %esi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %esi
# Round 8
movl 32(%esp), %eax
movl %esi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %esi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 64(%ebp), %ebx
xorl %ebx, %eax
movl 68(%ebp), %ecx
xorl %esi, %eax
xorl %esi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %edi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %edi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %edi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %edi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %edi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %edi
# Round 9
movl 32(%esp), %eax
movl %edi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %edi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 72(%ebp), %ebx
xorl %ebx, %eax
movl 76(%ebp), %ecx
xorl %edi, %eax
xorl %edi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %esi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %esi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %esi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %esi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %esi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %esi
# Round 10
movl 32(%esp), %eax
movl %esi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %esi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 80(%ebp), %ebx
xorl %ebx, %eax
movl 84(%ebp), %ecx
xorl %esi, %eax
xorl %esi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %edi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %edi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %edi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %edi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %edi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %edi
# Round 11
movl 32(%esp), %eax
movl %edi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %edi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 88(%ebp), %ebx
xorl %ebx, %eax
movl 92(%ebp), %ecx
xorl %edi, %eax
xorl %edi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %esi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %esi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %esi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %esi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %esi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %esi
# Round 12
movl 32(%esp), %eax
movl %esi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %esi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 96(%ebp), %ebx
xorl %ebx, %eax
movl 100(%ebp), %ecx
xorl %esi, %eax
xorl %esi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %edi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %edi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %edi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %edi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %edi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %edi
# Round 13
movl 32(%esp), %eax
movl %edi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %edi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 104(%ebp), %ebx
xorl %ebx, %eax
movl 108(%ebp), %ecx
xorl %edi, %eax
xorl %edi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %esi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %esi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %esi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %esi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %esi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %esi
# Round 14
movl 32(%esp), %eax
movl %esi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %esi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 112(%ebp), %ebx
xorl %ebx, %eax
movl 116(%ebp), %ecx
xorl %esi, %eax
xorl %esi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %edi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %edi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %edi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %edi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %edi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %edi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %edi
# Round 15
movl 32(%esp), %eax
movl %edi, %edx
shrl $16, %edx
movl 36(%esp), %ecx
xorl %edi, %edx
andl %edx, %eax
andl %ecx, %edx
movl %eax, %ebx
sall $16, %ebx
movl %edx, %ecx
sall $16, %ecx
xorl %ebx, %eax
xorl %ecx, %edx
movl 120(%ebp), %ebx
xorl %ebx, %eax
movl 124(%ebp), %ecx
xorl %edi, %eax
xorl %edi, %edx
xorl %ecx, %edx
andl $0xfcfcfcfc, %eax
xorl %ebx, %ebx
andl $0xcfcfcfcf, %edx
xorl %ecx, %ecx
movb %al, %bl
movb %ah, %cl
rorl $4, %edx
movl des_SPtrans(%ebx),%ebp
movb %dl, %bl
xorl %ebp, %esi
movl 0x200+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movb %dh, %cl
shrl $16, %eax
movl 0x100+des_SPtrans(%ebx),%ebp
xorl %ebp, %esi
movb %ah, %bl
shrl $16, %edx
movl 0x300+des_SPtrans(%ecx),%ebp
xorl %ebp, %esi
movl 28(%esp), %ebp
movb %dh, %cl
andl $0xff, %eax
andl $0xff, %edx
movl 0x600+des_SPtrans(%ebx),%ebx
xorl %ebx, %esi
movl 0x700+des_SPtrans(%ecx),%ebx
xorl %ebx, %esi
movl 0x400+des_SPtrans(%eax),%ebx
xorl %ebx, %esi
movl 0x500+des_SPtrans(%edx),%ebx
xorl %ebx, %esi
movl (%esp), %ebx
movl %edi, %eax
decl %ebx
movl %esi, %edi
movl %eax, %esi
movl %ebx, (%esp)
jnz .L000start
# FP
movl 24(%esp), %edx
.byte 209
.byte 207 # rorl $1 %edi
movl %esi, %eax
xorl %edi, %esi
andl $0xaaaaaaaa, %esi
xorl %esi, %eax
xorl %esi, %edi
roll $23, %eax
movl %eax, %esi
xorl %edi, %eax
andl $0x03fc03fc, %eax
xorl %eax, %esi
xorl %eax, %edi
roll $10, %esi
movl %esi, %eax
xorl %edi, %esi
andl $0x33333333, %esi
xorl %esi, %eax
xorl %esi, %edi
roll $18, %edi
movl %edi, %esi
xorl %eax, %edi
andl $0xfff0000f, %edi
xorl %edi, %esi
xorl %edi, %eax
roll $12, %esi
movl %esi, %edi
xorl %eax, %esi
andl $0xf0f0f0f0, %esi
xorl %esi, %edi
xorl %esi, %eax
rorl $4, %eax
movl %eax, (%edx)
movl %edi, 4(%edx)
popl %ecx
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.fcrypt_body_end:
.size fcrypt_body,.fcrypt_body_end-fcrypt_body
.ident "fcrypt_body"

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,689 @@
# $FreeBSD$
# Dont even think of reading this code
# It was automatically generated by md5-586.pl
# Which is a perl program used to generate the x86 assember for
# any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
# eric <eay@cryptsoft.com>
.file "md5-586.s"
.version "01.01"
gcc2_compiled.:
.text
.align 16
.globl md5_block_asm_host_order
.type md5_block_asm_host_order,@function
md5_block_asm_host_order:
pushl %esi
pushl %edi
movl 12(%esp), %edi
movl 16(%esp), %esi
movl 20(%esp), %ecx
pushl %ebp
sall $6, %ecx
pushl %ebx
addl %esi, %ecx
subl $64, %ecx
movl (%edi), %eax
pushl %ecx
movl 4(%edi), %ebx
movl 8(%edi), %ecx
movl 12(%edi), %edx
.L000start:
# R0 section
movl %ecx, %edi
movl (%esi), %ebp
# R0 0
xorl %edx, %edi
andl %ebx, %edi
leal 3614090360(%eax,%ebp,1),%eax
xorl %edx, %edi
addl %edi, %eax
movl %ebx, %edi
roll $7, %eax
movl 4(%esi), %ebp
addl %ebx, %eax
# R0 1
xorl %ecx, %edi
andl %eax, %edi
leal 3905402710(%edx,%ebp,1),%edx
xorl %ecx, %edi
addl %edi, %edx
movl %eax, %edi
roll $12, %edx
movl 8(%esi), %ebp
addl %eax, %edx
# R0 2
xorl %ebx, %edi
andl %edx, %edi
leal 606105819(%ecx,%ebp,1),%ecx
xorl %ebx, %edi
addl %edi, %ecx
movl %edx, %edi
roll $17, %ecx
movl 12(%esi), %ebp
addl %edx, %ecx
# R0 3
xorl %eax, %edi
andl %ecx, %edi
leal 3250441966(%ebx,%ebp,1),%ebx
xorl %eax, %edi
addl %edi, %ebx
movl %ecx, %edi
roll $22, %ebx
movl 16(%esi), %ebp
addl %ecx, %ebx
# R0 4
xorl %edx, %edi
andl %ebx, %edi
leal 4118548399(%eax,%ebp,1),%eax
xorl %edx, %edi
addl %edi, %eax
movl %ebx, %edi
roll $7, %eax
movl 20(%esi), %ebp
addl %ebx, %eax
# R0 5
xorl %ecx, %edi
andl %eax, %edi
leal 1200080426(%edx,%ebp,1),%edx
xorl %ecx, %edi
addl %edi, %edx
movl %eax, %edi
roll $12, %edx
movl 24(%esi), %ebp
addl %eax, %edx
# R0 6
xorl %ebx, %edi
andl %edx, %edi
leal 2821735955(%ecx,%ebp,1),%ecx
xorl %ebx, %edi
addl %edi, %ecx
movl %edx, %edi
roll $17, %ecx
movl 28(%esi), %ebp
addl %edx, %ecx
# R0 7
xorl %eax, %edi
andl %ecx, %edi
leal 4249261313(%ebx,%ebp,1),%ebx
xorl %eax, %edi
addl %edi, %ebx
movl %ecx, %edi
roll $22, %ebx
movl 32(%esi), %ebp
addl %ecx, %ebx
# R0 8
xorl %edx, %edi
andl %ebx, %edi
leal 1770035416(%eax,%ebp,1),%eax
xorl %edx, %edi
addl %edi, %eax
movl %ebx, %edi
roll $7, %eax
movl 36(%esi), %ebp
addl %ebx, %eax
# R0 9
xorl %ecx, %edi
andl %eax, %edi
leal 2336552879(%edx,%ebp,1),%edx
xorl %ecx, %edi
addl %edi, %edx
movl %eax, %edi
roll $12, %edx
movl 40(%esi), %ebp
addl %eax, %edx
# R0 10
xorl %ebx, %edi
andl %edx, %edi
leal 4294925233(%ecx,%ebp,1),%ecx
xorl %ebx, %edi
addl %edi, %ecx
movl %edx, %edi
roll $17, %ecx
movl 44(%esi), %ebp
addl %edx, %ecx
# R0 11
xorl %eax, %edi
andl %ecx, %edi
leal 2304563134(%ebx,%ebp,1),%ebx
xorl %eax, %edi
addl %edi, %ebx
movl %ecx, %edi
roll $22, %ebx
movl 48(%esi), %ebp
addl %ecx, %ebx
# R0 12
xorl %edx, %edi
andl %ebx, %edi
leal 1804603682(%eax,%ebp,1),%eax
xorl %edx, %edi
addl %edi, %eax
movl %ebx, %edi
roll $7, %eax
movl 52(%esi), %ebp
addl %ebx, %eax
# R0 13
xorl %ecx, %edi
andl %eax, %edi
leal 4254626195(%edx,%ebp,1),%edx
xorl %ecx, %edi
addl %edi, %edx
movl %eax, %edi
roll $12, %edx
movl 56(%esi), %ebp
addl %eax, %edx
# R0 14
xorl %ebx, %edi
andl %edx, %edi
leal 2792965006(%ecx,%ebp,1),%ecx
xorl %ebx, %edi
addl %edi, %ecx
movl %edx, %edi
roll $17, %ecx
movl 60(%esi), %ebp
addl %edx, %ecx
# R0 15
xorl %eax, %edi
andl %ecx, %edi
leal 1236535329(%ebx,%ebp,1),%ebx
xorl %eax, %edi
addl %edi, %ebx
movl %ecx, %edi
roll $22, %ebx
movl 4(%esi), %ebp
addl %ecx, %ebx
# R1 section
# R1 16
leal 4129170786(%eax,%ebp,1),%eax
xorl %ebx, %edi
andl %edx, %edi
movl 24(%esi), %ebp
xorl %ecx, %edi
addl %edi, %eax
movl %ebx, %edi
roll $5, %eax
addl %ebx, %eax
# R1 17
leal 3225465664(%edx,%ebp,1),%edx
xorl %eax, %edi
andl %ecx, %edi
movl 44(%esi), %ebp
xorl %ebx, %edi
addl %edi, %edx
movl %eax, %edi
roll $9, %edx
addl %eax, %edx
# R1 18
leal 643717713(%ecx,%ebp,1),%ecx
xorl %edx, %edi
andl %ebx, %edi
movl (%esi), %ebp
xorl %eax, %edi
addl %edi, %ecx
movl %edx, %edi
roll $14, %ecx
addl %edx, %ecx
# R1 19
leal 3921069994(%ebx,%ebp,1),%ebx
xorl %ecx, %edi
andl %eax, %edi
movl 20(%esi), %ebp
xorl %edx, %edi
addl %edi, %ebx
movl %ecx, %edi
roll $20, %ebx
addl %ecx, %ebx
# R1 20
leal 3593408605(%eax,%ebp,1),%eax
xorl %ebx, %edi
andl %edx, %edi
movl 40(%esi), %ebp
xorl %ecx, %edi
addl %edi, %eax
movl %ebx, %edi
roll $5, %eax
addl %ebx, %eax
# R1 21
leal 38016083(%edx,%ebp,1),%edx
xorl %eax, %edi
andl %ecx, %edi
movl 60(%esi), %ebp
xorl %ebx, %edi
addl %edi, %edx
movl %eax, %edi
roll $9, %edx
addl %eax, %edx
# R1 22
leal 3634488961(%ecx,%ebp,1),%ecx
xorl %edx, %edi
andl %ebx, %edi
movl 16(%esi), %ebp
xorl %eax, %edi
addl %edi, %ecx
movl %edx, %edi
roll $14, %ecx
addl %edx, %ecx
# R1 23
leal 3889429448(%ebx,%ebp,1),%ebx
xorl %ecx, %edi
andl %eax, %edi
movl 36(%esi), %ebp
xorl %edx, %edi
addl %edi, %ebx
movl %ecx, %edi
roll $20, %ebx
addl %ecx, %ebx
# R1 24
leal 568446438(%eax,%ebp,1),%eax
xorl %ebx, %edi
andl %edx, %edi
movl 56(%esi), %ebp
xorl %ecx, %edi
addl %edi, %eax
movl %ebx, %edi
roll $5, %eax
addl %ebx, %eax
# R1 25
leal 3275163606(%edx,%ebp,1),%edx
xorl %eax, %edi
andl %ecx, %edi
movl 12(%esi), %ebp
xorl %ebx, %edi
addl %edi, %edx
movl %eax, %edi
roll $9, %edx
addl %eax, %edx
# R1 26
leal 4107603335(%ecx,%ebp,1),%ecx
xorl %edx, %edi
andl %ebx, %edi
movl 32(%esi), %ebp
xorl %eax, %edi
addl %edi, %ecx
movl %edx, %edi
roll $14, %ecx
addl %edx, %ecx
# R1 27
leal 1163531501(%ebx,%ebp,1),%ebx
xorl %ecx, %edi
andl %eax, %edi
movl 52(%esi), %ebp
xorl %edx, %edi
addl %edi, %ebx
movl %ecx, %edi
roll $20, %ebx
addl %ecx, %ebx
# R1 28
leal 2850285829(%eax,%ebp,1),%eax
xorl %ebx, %edi
andl %edx, %edi
movl 8(%esi), %ebp
xorl %ecx, %edi
addl %edi, %eax
movl %ebx, %edi
roll $5, %eax
addl %ebx, %eax
# R1 29
leal 4243563512(%edx,%ebp,1),%edx
xorl %eax, %edi
andl %ecx, %edi
movl 28(%esi), %ebp
xorl %ebx, %edi
addl %edi, %edx
movl %eax, %edi
roll $9, %edx
addl %eax, %edx
# R1 30
leal 1735328473(%ecx,%ebp,1),%ecx
xorl %edx, %edi
andl %ebx, %edi
movl 48(%esi), %ebp
xorl %eax, %edi
addl %edi, %ecx
movl %edx, %edi
roll $14, %ecx
addl %edx, %ecx
# R1 31
leal 2368359562(%ebx,%ebp,1),%ebx
xorl %ecx, %edi
andl %eax, %edi
movl 20(%esi), %ebp
xorl %edx, %edi
addl %edi, %ebx
movl %ecx, %edi
roll $20, %ebx
addl %ecx, %ebx
# R2 section
# R2 32
xorl %edx, %edi
xorl %ebx, %edi
leal 4294588738(%eax,%ebp,1),%eax
addl %edi, %eax
roll $4, %eax
movl 32(%esi), %ebp
movl %ebx, %edi
# R2 33
leal 2272392833(%edx,%ebp,1),%edx
addl %ebx, %eax
xorl %ecx, %edi
xorl %eax, %edi
movl 44(%esi), %ebp
addl %edi, %edx
movl %eax, %edi
roll $11, %edx
addl %eax, %edx
# R2 34
xorl %ebx, %edi
xorl %edx, %edi
leal 1839030562(%ecx,%ebp,1),%ecx
addl %edi, %ecx
roll $16, %ecx
movl 56(%esi), %ebp
movl %edx, %edi
# R2 35
leal 4259657740(%ebx,%ebp,1),%ebx
addl %edx, %ecx
xorl %eax, %edi
xorl %ecx, %edi
movl 4(%esi), %ebp
addl %edi, %ebx
movl %ecx, %edi
roll $23, %ebx
addl %ecx, %ebx
# R2 36
xorl %edx, %edi
xorl %ebx, %edi
leal 2763975236(%eax,%ebp,1),%eax
addl %edi, %eax
roll $4, %eax
movl 16(%esi), %ebp
movl %ebx, %edi
# R2 37
leal 1272893353(%edx,%ebp,1),%edx
addl %ebx, %eax
xorl %ecx, %edi
xorl %eax, %edi
movl 28(%esi), %ebp
addl %edi, %edx
movl %eax, %edi
roll $11, %edx
addl %eax, %edx
# R2 38
xorl %ebx, %edi
xorl %edx, %edi
leal 4139469664(%ecx,%ebp,1),%ecx
addl %edi, %ecx
roll $16, %ecx
movl 40(%esi), %ebp
movl %edx, %edi
# R2 39
leal 3200236656(%ebx,%ebp,1),%ebx
addl %edx, %ecx
xorl %eax, %edi
xorl %ecx, %edi
movl 52(%esi), %ebp
addl %edi, %ebx
movl %ecx, %edi
roll $23, %ebx
addl %ecx, %ebx
# R2 40
xorl %edx, %edi
xorl %ebx, %edi
leal 681279174(%eax,%ebp,1),%eax
addl %edi, %eax
roll $4, %eax
movl (%esi), %ebp
movl %ebx, %edi
# R2 41
leal 3936430074(%edx,%ebp,1),%edx
addl %ebx, %eax
xorl %ecx, %edi
xorl %eax, %edi
movl 12(%esi), %ebp
addl %edi, %edx
movl %eax, %edi
roll $11, %edx
addl %eax, %edx
# R2 42
xorl %ebx, %edi
xorl %edx, %edi
leal 3572445317(%ecx,%ebp,1),%ecx
addl %edi, %ecx
roll $16, %ecx
movl 24(%esi), %ebp
movl %edx, %edi
# R2 43
leal 76029189(%ebx,%ebp,1),%ebx
addl %edx, %ecx
xorl %eax, %edi
xorl %ecx, %edi
movl 36(%esi), %ebp
addl %edi, %ebx
movl %ecx, %edi
roll $23, %ebx
addl %ecx, %ebx
# R2 44
xorl %edx, %edi
xorl %ebx, %edi
leal 3654602809(%eax,%ebp,1),%eax
addl %edi, %eax
roll $4, %eax
movl 48(%esi), %ebp
movl %ebx, %edi
# R2 45
leal 3873151461(%edx,%ebp,1),%edx
addl %ebx, %eax
xorl %ecx, %edi
xorl %eax, %edi
movl 60(%esi), %ebp
addl %edi, %edx
movl %eax, %edi
roll $11, %edx
addl %eax, %edx
# R2 46
xorl %ebx, %edi
xorl %edx, %edi
leal 530742520(%ecx,%ebp,1),%ecx
addl %edi, %ecx
roll $16, %ecx
movl 8(%esi), %ebp
movl %edx, %edi
# R2 47
leal 3299628645(%ebx,%ebp,1),%ebx
addl %edx, %ecx
xorl %eax, %edi
xorl %ecx, %edi
movl (%esi), %ebp
addl %edi, %ebx
movl $-1, %edi
roll $23, %ebx
addl %ecx, %ebx
# R3 section
# R3 48
xorl %edx, %edi
orl %ebx, %edi
leal 4096336452(%eax,%ebp,1),%eax
xorl %ecx, %edi
movl 28(%esi), %ebp
addl %edi, %eax
movl $-1, %edi
roll $6, %eax
xorl %ecx, %edi
addl %ebx, %eax
# R3 49
orl %eax, %edi
leal 1126891415(%edx,%ebp,1),%edx
xorl %ebx, %edi
movl 56(%esi), %ebp
addl %edi, %edx
movl $-1, %edi
roll $10, %edx
xorl %ebx, %edi
addl %eax, %edx
# R3 50
orl %edx, %edi
leal 2878612391(%ecx,%ebp,1),%ecx
xorl %eax, %edi
movl 20(%esi), %ebp
addl %edi, %ecx
movl $-1, %edi
roll $15, %ecx
xorl %eax, %edi
addl %edx, %ecx
# R3 51
orl %ecx, %edi
leal 4237533241(%ebx,%ebp,1),%ebx
xorl %edx, %edi
movl 48(%esi), %ebp
addl %edi, %ebx
movl $-1, %edi
roll $21, %ebx
xorl %edx, %edi
addl %ecx, %ebx
# R3 52
orl %ebx, %edi
leal 1700485571(%eax,%ebp,1),%eax
xorl %ecx, %edi
movl 12(%esi), %ebp
addl %edi, %eax
movl $-1, %edi
roll $6, %eax
xorl %ecx, %edi
addl %ebx, %eax
# R3 53
orl %eax, %edi
leal 2399980690(%edx,%ebp,1),%edx
xorl %ebx, %edi
movl 40(%esi), %ebp
addl %edi, %edx
movl $-1, %edi
roll $10, %edx
xorl %ebx, %edi
addl %eax, %edx
# R3 54
orl %edx, %edi
leal 4293915773(%ecx,%ebp,1),%ecx
xorl %eax, %edi
movl 4(%esi), %ebp
addl %edi, %ecx
movl $-1, %edi
roll $15, %ecx
xorl %eax, %edi
addl %edx, %ecx
# R3 55
orl %ecx, %edi
leal 2240044497(%ebx,%ebp,1),%ebx
xorl %edx, %edi
movl 32(%esi), %ebp
addl %edi, %ebx
movl $-1, %edi
roll $21, %ebx
xorl %edx, %edi
addl %ecx, %ebx
# R3 56
orl %ebx, %edi
leal 1873313359(%eax,%ebp,1),%eax
xorl %ecx, %edi
movl 60(%esi), %ebp
addl %edi, %eax
movl $-1, %edi
roll $6, %eax
xorl %ecx, %edi
addl %ebx, %eax
# R3 57
orl %eax, %edi
leal 4264355552(%edx,%ebp,1),%edx
xorl %ebx, %edi
movl 24(%esi), %ebp
addl %edi, %edx
movl $-1, %edi
roll $10, %edx
xorl %ebx, %edi
addl %eax, %edx
# R3 58
orl %edx, %edi
leal 2734768916(%ecx,%ebp,1),%ecx
xorl %eax, %edi
movl 52(%esi), %ebp
addl %edi, %ecx
movl $-1, %edi
roll $15, %ecx
xorl %eax, %edi
addl %edx, %ecx
# R3 59
orl %ecx, %edi
leal 1309151649(%ebx,%ebp,1),%ebx
xorl %edx, %edi
movl 16(%esi), %ebp
addl %edi, %ebx
movl $-1, %edi
roll $21, %ebx
xorl %edx, %edi
addl %ecx, %ebx
# R3 60
orl %ebx, %edi
leal 4149444226(%eax,%ebp,1),%eax
xorl %ecx, %edi
movl 44(%esi), %ebp
addl %edi, %eax
movl $-1, %edi
roll $6, %eax
xorl %ecx, %edi
addl %ebx, %eax
# R3 61
orl %eax, %edi
leal 3174756917(%edx,%ebp,1),%edx
xorl %ebx, %edi
movl 8(%esi), %ebp
addl %edi, %edx
movl $-1, %edi
roll $10, %edx
xorl %ebx, %edi
addl %eax, %edx
# R3 62
orl %edx, %edi
leal 718787259(%ecx,%ebp,1),%ecx
xorl %eax, %edi
movl 36(%esi), %ebp
addl %edi, %ecx
movl $-1, %edi
roll $15, %ecx
xorl %eax, %edi
addl %edx, %ecx
# R3 63
orl %ecx, %edi
leal 3951481745(%ebx,%ebp,1),%ebx
xorl %edx, %edi
movl 24(%esp), %ebp
addl %edi, %ebx
addl $64, %esi
roll $21, %ebx
movl (%ebp), %edi
addl %ecx, %ebx
addl %edi, %eax
movl 4(%ebp), %edi
addl %edi, %ebx
movl 8(%ebp), %edi
addl %edi, %ecx
movl 12(%ebp), %edi
addl %edi, %edx
movl %eax, (%ebp)
movl %ebx, 4(%ebp)
movl (%esp), %edi
movl %ecx, 8(%ebp)
movl %edx, 12(%ebp)
cmpl %esi, %edi
jge .L000start
popl %eax
popl %ebx
popl %ebp
popl %edi
popl %esi
ret
.L_md5_block_asm_host_order_end:
.size md5_block_asm_host_order,.L_md5_block_asm_host_order_end-md5_block_asm_host_order
.ident "desasm.pl"

View File

@ -0,0 +1,316 @@
# $FreeBSD$
# Dont even think of reading this code
# It was automatically generated by rc4-586.pl
# Which is a perl program used to generate the x86 assember for
# any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
# eric <eay@cryptsoft.com>
.file "rc4-586.s"
.version "01.01"
gcc2_compiled.:
.text
.align 16
.globl RC4
.type RC4,@function
RC4:
pushl %ebp
pushl %ebx
movl 12(%esp), %ebp
movl 16(%esp), %ebx
pushl %esi
pushl %edi
movl (%ebp), %ecx
movl 4(%ebp), %edx
movl 28(%esp), %esi
incl %ecx
subl $12, %esp
addl $8, %ebp
andl $255, %ecx
leal -8(%ebx,%esi), %ebx
movl 44(%esp), %edi
movl %ebx, 8(%esp)
movl (%ebp,%ecx,4), %eax
cmpl %esi, %ebx
jl .L000end
.L001start:
addl $8, %esi
# Round 0
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb %bl, (%esp)
# Round 1
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb %bl, 1(%esp)
# Round 2
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb %bl, 2(%esp)
# Round 3
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb %bl, 3(%esp)
# Round 4
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb %bl, 4(%esp)
# Round 5
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb %bl, 5(%esp)
# Round 6
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb %bl, 6(%esp)
# Round 7
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
addl $8, %edi
movb %bl, 7(%esp)
# apply the cipher text
movl (%esp), %eax
movl -8(%esi), %ebx
xorl %ebx, %eax
movl -4(%esi), %ebx
movl %eax, -8(%edi)
movl 4(%esp), %eax
xorl %ebx, %eax
movl 8(%esp), %ebx
movl %eax, -4(%edi)
movl (%ebp,%ecx,4), %eax
cmpl %ebx, %esi
jle .L001start
.L000end:
# Round 0
addl $8, %ebx
incl %esi
cmpl %esi, %ebx
jl .L002finished
movl %ebx, 8(%esp)
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb -1(%esi), %bh
xorb %bh, %bl
movb %bl, (%edi)
# Round 1
movl 8(%esp), %ebx
cmpl %esi, %ebx
jle .L002finished
incl %esi
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb -1(%esi), %bh
xorb %bh, %bl
movb %bl, 1(%edi)
# Round 2
movl 8(%esp), %ebx
cmpl %esi, %ebx
jle .L002finished
incl %esi
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb -1(%esi), %bh
xorb %bh, %bl
movb %bl, 2(%edi)
# Round 3
movl 8(%esp), %ebx
cmpl %esi, %ebx
jle .L002finished
incl %esi
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb -1(%esi), %bh
xorb %bh, %bl
movb %bl, 3(%edi)
# Round 4
movl 8(%esp), %ebx
cmpl %esi, %ebx
jle .L002finished
incl %esi
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb -1(%esi), %bh
xorb %bh, %bl
movb %bl, 4(%edi)
# Round 5
movl 8(%esp), %ebx
cmpl %esi, %ebx
jle .L002finished
incl %esi
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movl (%ebp,%ecx,4), %eax
movb -1(%esi), %bh
xorb %bh, %bl
movb %bl, 5(%edi)
# Round 6
movl 8(%esp), %ebx
cmpl %esi, %ebx
jle .L002finished
incl %esi
addl %eax, %edx
andl $255, %edx
incl %ecx
movl (%ebp,%edx,4), %ebx
movl %ebx, -4(%ebp,%ecx,4)
addl %eax, %ebx
andl $255, %ecx
andl $255, %ebx
movl %eax, (%ebp,%edx,4)
nop
movl (%ebp,%ebx,4), %ebx
movb -1(%esi), %bh
xorb %bh, %bl
movb %bl, 6(%edi)
.L002finished:
decl %ecx
addl $12, %esp
movl %edx, -4(%ebp)
movb %cl, -8(%ebp)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.RC4_end:
.size RC4,.RC4_end-RC4
.ident "RC4"

View File

@ -0,0 +1,584 @@
# $FreeBSD$
# Dont even think of reading this code
# It was automatically generated by rc5-586.pl
# Which is a perl program used to generate the x86 assember for
# any of elf, a.out, BSDI, Win32, gaswin (for GNU as on Win32) or Solaris
# eric <eay@cryptsoft.com>
.file "rc5-586.s"
.version "01.01"
gcc2_compiled.:
.text
.align 16
.globl RC5_32_encrypt
.type RC5_32_encrypt,@function
RC5_32_encrypt:
pushl %ebp
pushl %esi
pushl %edi
movl 16(%esp), %edx
movl 20(%esp), %ebp
# Load the 2 words
movl (%edx), %edi
movl 4(%edx), %esi
pushl %ebx
movl (%ebp), %ebx
addl 4(%ebp), %edi
addl 8(%ebp), %esi
xorl %esi, %edi
movl 12(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 16(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 20(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 24(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 28(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 32(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 36(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 40(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 44(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 48(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 52(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 56(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 60(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 64(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 68(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 72(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
cmpl $8, %ebx
je .L000rc5_exit
xorl %esi, %edi
movl 76(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 80(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 84(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 88(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 92(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 96(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 100(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 104(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
cmpl $12, %ebx
je .L000rc5_exit
xorl %esi, %edi
movl 108(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 112(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 116(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 120(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 124(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 128(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
xorl %esi, %edi
movl 132(%ebp), %eax
movl %esi, %ecx
roll %cl, %edi
addl %eax, %edi
xorl %edi, %esi
movl 136(%ebp), %eax
movl %edi, %ecx
roll %cl, %esi
addl %eax, %esi
.L000rc5_exit:
movl %edi, (%edx)
movl %esi, 4(%edx)
popl %ebx
popl %edi
popl %esi
popl %ebp
ret
.L_RC5_32_encrypt_end:
.size RC5_32_encrypt,.L_RC5_32_encrypt_end-RC5_32_encrypt
.ident "desasm.pl"
.text
.align 16
.globl RC5_32_decrypt
.type RC5_32_decrypt,@function
RC5_32_decrypt:
pushl %ebp
pushl %esi
pushl %edi
movl 16(%esp), %edx
movl 20(%esp), %ebp
# Load the 2 words
movl (%edx), %edi
movl 4(%edx), %esi
pushl %ebx
movl (%ebp), %ebx
cmpl $12, %ebx
je .L001rc5_dec_12
cmpl $8, %ebx
je .L002rc5_dec_8
movl 136(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 132(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 128(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 124(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 120(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 116(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 112(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 108(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
.L001rc5_dec_12:
movl 104(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 100(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 96(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 92(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 88(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 84(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 80(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 76(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
.L002rc5_dec_8:
movl 72(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 68(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 64(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 60(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 56(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 52(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 48(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 44(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 40(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 36(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 32(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 28(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 24(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 20(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
movl 16(%ebp), %eax
subl %eax, %esi
movl %edi, %ecx
rorl %cl, %esi
xorl %edi, %esi
movl 12(%ebp), %eax
subl %eax, %edi
movl %esi, %ecx
rorl %cl, %edi
xorl %esi, %edi
subl 8(%ebp), %esi
subl 4(%ebp), %edi
.L003rc5_exit:
movl %edi, (%edx)
movl %esi, 4(%edx)
popl %ebx
popl %edi
popl %esi
popl %ebp
ret
.L_RC5_32_decrypt_end:
.size RC5_32_decrypt,.L_RC5_32_decrypt_end-RC5_32_decrypt
.ident "desasm.pl"
.text
.align 16
.globl RC5_32_cbc_encrypt
.type RC5_32_cbc_encrypt,@function
RC5_32_cbc_encrypt:
pushl %ebp
pushl %ebx
pushl %esi
pushl %edi
movl 28(%esp), %ebp
# getting iv ptr from parameter 4
movl 36(%esp), %ebx
movl (%ebx), %esi
movl 4(%ebx), %edi
pushl %edi
pushl %esi
pushl %edi
pushl %esi
movl %esp, %ebx
movl 36(%esp), %esi
movl 40(%esp), %edi
# getting encrypt flag from parameter 5
movl 56(%esp), %ecx
# get and push parameter 3
movl 48(%esp), %eax
pushl %eax
pushl %ebx
cmpl $0, %ecx
jz .L004decrypt
andl $4294967288, %ebp
movl 8(%esp), %eax
movl 12(%esp), %ebx
jz .L005encrypt_finish
.L006encrypt_loop:
movl (%esi), %ecx
movl 4(%esi), %edx
xorl %ecx, %eax
xorl %edx, %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call RC5_32_encrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
movl %eax, (%edi)
movl %ebx, 4(%edi)
addl $8, %esi
addl $8, %edi
subl $8, %ebp
jnz .L006encrypt_loop
.L005encrypt_finish:
movl 52(%esp), %ebp
andl $7, %ebp
jz .L007finish
xorl %ecx, %ecx
xorl %edx, %edx
movl .L008cbc_enc_jmp_table(,%ebp,4),%ebp
jmp *%ebp
.L009ej7:
movb 6(%esi), %dh
sall $8, %edx
.L010ej6:
movb 5(%esi), %dh
.L011ej5:
movb 4(%esi), %dl
.L012ej4:
movl (%esi), %ecx
jmp .L013ejend
.L014ej3:
movb 2(%esi), %ch
sall $8, %ecx
.L015ej2:
movb 1(%esi), %ch
.L016ej1:
movb (%esi), %cl
.L013ejend:
xorl %ecx, %eax
xorl %edx, %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call RC5_32_encrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
movl %eax, (%edi)
movl %ebx, 4(%edi)
jmp .L007finish
.align 16
.L004decrypt:
andl $4294967288, %ebp
movl 16(%esp), %eax
movl 20(%esp), %ebx
jz .L017decrypt_finish
.L018decrypt_loop:
movl (%esi), %eax
movl 4(%esi), %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call RC5_32_decrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
movl 16(%esp), %ecx
movl 20(%esp), %edx
xorl %eax, %ecx
xorl %ebx, %edx
movl (%esi), %eax
movl 4(%esi), %ebx
movl %ecx, (%edi)
movl %edx, 4(%edi)
movl %eax, 16(%esp)
movl %ebx, 20(%esp)
addl $8, %esi
addl $8, %edi
subl $8, %ebp
jnz .L018decrypt_loop
.L017decrypt_finish:
movl 52(%esp), %ebp
andl $7, %ebp
jz .L007finish
movl (%esi), %eax
movl 4(%esi), %ebx
movl %eax, 8(%esp)
movl %ebx, 12(%esp)
call RC5_32_decrypt
movl 8(%esp), %eax
movl 12(%esp), %ebx
movl 16(%esp), %ecx
movl 20(%esp), %edx
xorl %eax, %ecx
xorl %ebx, %edx
movl (%esi), %eax
movl 4(%esi), %ebx
.L019dj7:
rorl $16, %edx
movb %dl, 6(%edi)
shrl $16, %edx
.L020dj6:
movb %dh, 5(%edi)
.L021dj5:
movb %dl, 4(%edi)
.L022dj4:
movl %ecx, (%edi)
jmp .L023djend
.L024dj3:
rorl $16, %ecx
movb %cl, 2(%edi)
sall $16, %ecx
.L025dj2:
movb %ch, 1(%esi)
.L026dj1:
movb %cl, (%esi)
.L023djend:
jmp .L007finish
.align 16
.L007finish:
movl 60(%esp), %ecx
addl $24, %esp
movl %eax, (%ecx)
movl %ebx, 4(%ecx)
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
.align 16
.L008cbc_enc_jmp_table:
.long 0
.long .L016ej1
.long .L015ej2
.long .L014ej3
.long .L012ej4
.long .L011ej5
.long .L010ej6
.long .L009ej7
.align 16
.L027cbc_dec_jmp_table:
.long 0
.long .L026dj1
.long .L025dj2
.long .L024dj3
.long .L022dj4
.long .L021dj5
.long .L020dj6
.long .L019dj7
.L_RC5_32_cbc_encrypt_end:
.size RC5_32_cbc_encrypt,.L_RC5_32_cbc_encrypt_end-RC5_32_cbc_encrypt
.ident "desasm.pl"

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff