1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1.2 +++ b/os/ossrv/genericopenlibs/liboil/src/i386/md5_i386.c Fri Jun 15 03:10:57 2012 +0200
1.3 @@ -0,0 +1,553 @@
1.4 +/*
1.5 + * LIBOIL - Library of Optimized Inner Loops
1.6 + * Copyright (c) 2004 David A. Schleef <ds@schleef.org>
1.7 + * All rights reserved.
1.8 + *
1.9 + * Redistribution and use in source and binary forms, with or without
1.10 + * modification, are permitted provided that the following conditions
1.11 + * are met:
1.12 + * 1. Redistributions of source code must retain the above copyright
1.13 + * notice, this list of conditions and the following disclaimer.
1.14 + * 2. Redistributions in binary form must reproduce the above copyright
1.15 + * notice, this list of conditions and the following disclaimer in the
1.16 + * documentation and/or other materials provided with the distribution.
1.17 + *
1.18 + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
1.19 + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
1.20 + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
1.21 + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
1.22 + * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
1.23 + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
1.24 + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
1.25 + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
1.26 + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
1.27 + * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
1.28 + * POSSIBILITY OF SUCH DAMAGE.
1.29 + */
1.30 +//Portions Copyright (c) 2008-2009 Nokia Corporation and/or its subsidiary(-ies). All rights reserved.
1.31 +
1.32 +#ifdef HAVE_CONFIG_H
1.33 +#include "config.h"
1.34 +#endif
1.35 +
1.36 +#include <liboil/liboilfunction.h>
1.37 +#include <liboil/liboilclasses.h>
1.38 +
1.39 +
1.40 +#define F1(x, y, z) (z ^ (x & (y ^ z)))
1.41 +#define F2(x, y, z) F1(z, x, y)
1.42 +#define F3(x, y, z) (x ^ y ^ z)
1.43 +#define F4(x, y, z) (y ^ (x | ~z))
1.44 +
1.45 +#define MD5STEP(f,w,x,y,z,in,s) \
1.46 + (w += f(x,y,z) + in, w = (w<<s | w>>(32-s)) + x)
1.47 +
1.48 +
1.49 +static void
1.50 +md5_asm1(uint32_t *state, uint32_t *src)
1.51 +{
1.52 +#if !defined(__WINSCW__) && !defined(__WINS__)
1.53 + struct {
1.54 + void *state;
1.55 + void *src;
1.56 + void *ebp;
1.57 + void *ebx;
1.58 + }tmp;
1.59 +
1.60 + tmp.state = state;
1.61 + tmp.src = src;
1.62 +
1.63 + __asm__ __volatile__ (
1.64 + " mov %%ebp, 0x8(%%eax)\n"
1.65 + " mov %%ebx, 0xc(%%eax)\n"
1.66 + " mov %%eax, %%ebp\n"
1.67 + " mov (%%ebp), %%esi\n"
1.68 + " mov (%%esi), %%eax\n"
1.69 + " mov 0x4(%%esi), %%ebx\n"
1.70 + " mov 0x8(%%esi), %%ecx\n"
1.71 + " mov 0xc(%%esi), %%edx\n"
1.72 + " mov 0x4(%%ebp), %%esi\n"
1.73 +
1.74 +#define STEP1(r1,r2,r3,r4,offset,constant,rotate) \
1.75 + " mov %%e" #r4 "x, %%edi\n" \
1.76 + " xor %%e" #r3 "x, %%edi\n" \
1.77 + " and %%e" #r2 "x, %%edi\n" \
1.78 + " xor %%e" #r4 "x, %%edi\n" \
1.79 + " add %%edi, %%e" #r1 "x\n" \
1.80 + " add (" #offset "* 4)(%%esi), %%e" #r1 "x\n" \
1.81 + " add $" #constant ", %%e" #r1 "x\n" \
1.82 + " rol $" #rotate ", %%e" #r1 "x\n" \
1.83 + " add %%e" #r2 "x, %%e" #r1 "x\n"
1.84 +
1.85 + STEP1 (a, b, c, d, 0, 0xd76aa478, 7)
1.86 + STEP1 (d, a, b, c, 1, 0xe8c7b756, 12)
1.87 + STEP1 (c, d, a, b, 2, 0x242070db, 17)
1.88 + STEP1 (b, c, d, a, 3, 0xc1bdceee, 22)
1.89 + STEP1 (a, b, c, d, 4, 0xf57c0faf, 7)
1.90 + STEP1 (d, a, b, c, 5, 0x4787c62a, 12)
1.91 + STEP1 (c, d, a, b, 6, 0xa8304613, 17)
1.92 + STEP1 (b, c, d, a, 7, 0xfd469501, 22)
1.93 + STEP1 (a, b, c, d, 8, 0x698098d8, 7)
1.94 + STEP1 (d, a, b, c, 9, 0x8b44f7af, 12)
1.95 + STEP1 (c, d, a, b, 10, 0xffff5bb1, 17)
1.96 + STEP1 (b, c, d, a, 11, 0x895cd7be, 22)
1.97 + STEP1 (a, b, c, d, 12, 0x6b901122, 7)
1.98 + STEP1 (d, a, b, c, 13, 0xfd987193, 12)
1.99 + STEP1 (c, d, a, b, 14, 0xa679438e, 17)
1.100 + STEP1 (b, c, d, a, 15, 0x49b40821, 22)
1.101 +
1.102 +#define STEP2(r1,r2,r3,r4,offset,constant,rotate) \
1.103 + " mov %%e" #r3 "x, %%edi\n" \
1.104 + " xor %%e" #r2 "x, %%edi\n" \
1.105 + " and %%e" #r4 "x, %%edi\n" \
1.106 + " xor %%e" #r3 "x, %%edi\n" \
1.107 + " add %%edi, %%e" #r1 "x\n" \
1.108 + " add (" #offset "*4)(%%esi), %%e" #r1 "x\n" \
1.109 + " add $" #constant ", %%e" #r1 "x\n" \
1.110 + " rol $" #rotate ", %%e" #r1 "x\n" \
1.111 + " add %%e" #r2 "x, %%e" #r1 "x\n"
1.112 +
1.113 + STEP2(a, b, c, d, 1, 0xf61e2562, 5)
1.114 + STEP2(d, a, b, c, 6, 0xc040b340, 9)
1.115 + STEP2(c, d, a, b, 11, 0x265e5a51, 14)
1.116 + STEP2(b, c, d, a, 0, 0xe9b6c7aa, 20)
1.117 + STEP2(a, b, c, d, 5, 0xd62f105d, 5)
1.118 + STEP2(d, a, b, c, 10, 0x02441453, 9)
1.119 + STEP2(c, d, a, b, 15, 0xd8a1e681, 14)
1.120 + STEP2(b, c, d, a, 4, 0xe7d3fbc8, 20)
1.121 + STEP2(a, b, c, d, 9, 0x21e1cde6, 5)
1.122 + STEP2(d, a, b, c, 14, 0xc33707d6, 9)
1.123 + STEP2(c, d, a, b, 3, 0xf4d50d87, 14)
1.124 + STEP2(b, c, d, a, 8, 0x455a14ed, 20)
1.125 + STEP2(a, b, c, d, 13, 0xa9e3e905, 5)
1.126 + STEP2(d, a, b, c, 2, 0xfcefa3f8, 9)
1.127 + STEP2(c, d, a, b, 7, 0x676f02d9, 14)
1.128 + STEP2(b, c, d, a, 12, 0x8d2a4c8a, 20)
1.129 +
1.130 +#define STEP3(r1,r2,r3,r4,offset,constant,rotate) \
1.131 + " mov %%e" #r4 "x, %%edi\n" \
1.132 + " xor %%e" #r3 "x, %%edi\n" \
1.133 + " xor %%e" #r2 "x, %%edi\n" \
1.134 + " add %%edi, %%e" #r1 "x\n" \
1.135 + " add (" #offset "*4)(%%esi), %%e" #r1 "x\n" \
1.136 + " add $" #constant ", %%e" #r1 "x\n" \
1.137 + " rol $" #rotate ", %%e" #r1 "x\n" \
1.138 + " add %%e" #r2 "x, %%e" #r1 "x\n"
1.139 +
1.140 + STEP3 (a, b, c, d, 5, 0xfffa3942, 4)
1.141 + STEP3 (d, a, b, c, 8, 0x8771f681, 11)
1.142 + STEP3 (c, d, a, b, 11, 0x6d9d6122, 16)
1.143 + STEP3 (b, c, d, a, 14, 0xfde5380c, 23)
1.144 + STEP3 (a, b, c, d, 1, 0xa4beea44, 4)
1.145 + STEP3 (d, a, b, c, 4, 0x4bdecfa9, 11)
1.146 + STEP3 (c, d, a, b, 7, 0xf6bb4b60, 16)
1.147 + STEP3 (b, c, d, a, 10, 0xbebfbc70, 23)
1.148 + STEP3 (a, b, c, d, 13, 0x289b7ec6, 4)
1.149 + STEP3 (d, a, b, c, 0, 0xeaa127fa, 11)
1.150 + STEP3 (c, d, a, b, 3, 0xd4ef3085, 16)
1.151 + STEP3 (b, c, d, a, 6, 0x04881d05, 23)
1.152 + STEP3 (a, b, c, d, 9, 0xd9d4d039, 4)
1.153 + STEP3 (d, a, b, c, 12, 0xe6db99e5, 11)
1.154 + STEP3 (c, d, a, b, 15, 0x1fa27cf8, 16)
1.155 + STEP3 (b, c, d, a, 2, 0xc4ac5665, 23)
1.156 +
1.157 +#define STEP4(r1,r2,r3,r4,offset,constant,rotate) \
1.158 + " mov %%e" #r4 "x, %%edi\n" \
1.159 + " not %%edi\n" \
1.160 + " or %%e" #r2 "x, %%edi\n" \
1.161 + " xor %%e" #r3 "x, %%edi\n" \
1.162 + " add %%edi, %%e" #r1 "x\n" \
1.163 + " add (" #offset "*4)(%%esi), %%e" #r1 "x\n" \
1.164 + " add $" #constant ", %%e" #r1 "x\n" \
1.165 + " rol $" #rotate ", %%e" #r1 "x\n" \
1.166 + " add %%e" #r2 "x, %%e" #r1 "x\n"
1.167 +
1.168 + STEP4 (a, b, c, d, 0, 0xf4292244, 6)
1.169 + STEP4 (d, a, b, c, 7, 0x432aff97, 10)
1.170 + STEP4 (c, d, a, b, 14, 0xab9423a7, 15)
1.171 + STEP4 (b, c, d, a, 5, 0xfc93a039, 21)
1.172 + STEP4 (a, b, c, d, 12, 0x655b59c3, 6)
1.173 + STEP4 (d, a, b, c, 3, 0x8f0ccc92, 10)
1.174 + STEP4 (c, d, a, b, 10, 0xffeff47d, 15)
1.175 + STEP4 (b, c, d, a, 1, 0x85845dd1, 21)
1.176 + STEP4 (a, b, c, d, 8, 0x6fa87e4f, 6)
1.177 + STEP4 (d, a, b, c, 15, 0xfe2ce6e0, 10)
1.178 + STEP4 (c, d, a, b, 6, 0xa3014314, 15)
1.179 + STEP4 (b, c, d, a, 13, 0x4e0811a1, 21)
1.180 + STEP4 (a, b, c, d, 4, 0xf7537e82, 6)
1.181 + STEP4 (d, a, b, c, 11, 0xbd3af235, 10)
1.182 + STEP4 (c, d, a, b, 2, 0x2ad7d2bb, 15)
1.183 + STEP4 (b, c, d, a, 9, 0xeb86d391, 21)
1.184 +
1.185 + " mov (%%ebp), %%edi\n"
1.186 + " add %%eax, 0x0(%%edi)\n"
1.187 + " add %%ebx, 0x4(%%edi)\n"
1.188 + " add %%ecx, 0x8(%%edi)\n"
1.189 + " add %%edx, 0xc(%%edi)\n"
1.190 + " mov 0xc(%%ebp), %%ebx\n"
1.191 + " mov 0x8(%%ebp), %%ebp\n"
1.192 + :
1.193 + : "a" (&tmp)
1.194 + : "esi", "ecx", "edx", "edi", "memory");
1.195 +#endif
1.196 +}
1.197 +#undef STEP1
1.198 +#undef STEP2
1.199 +#undef STEP3
1.200 +#undef STEP4
1.201 +OIL_DEFINE_IMPL (md5_asm1, md5);
1.202 +
1.203 +static void
1.204 +md5_asm2(uint32_t *state, uint32_t *src)
1.205 +{
1.206 +#if !defined(__WINSCW__) && !defined(__WINS__)
1.207 + struct {
1.208 + void *state;
1.209 + void *src;
1.210 + void *ebp;
1.211 + void *ebx;
1.212 + }tmp;
1.213 +
1.214 + tmp.state = state;
1.215 + tmp.src = src;
1.216 +
1.217 + asm (
1.218 + " mov %%ebp, 0x8(%%eax)\n"
1.219 + " mov %%ebx, 0xc(%%eax)\n"
1.220 + " mov %%eax, %%ebp\n"
1.221 + " mov (%%ebp), %%esi\n"
1.222 + " mov (%%esi), %%eax\n"
1.223 + " mov 0x4(%%esi), %%ebx\n"
1.224 + " mov 0x8(%%esi), %%ecx\n"
1.225 + " mov 0xc(%%esi), %%edx\n"
1.226 + " mov 0x4(%%ebp), %%esi\n"
1.227 +
1.228 +#define STEP1(r1,r2,r3,r4,offset,constant,rotate) \
1.229 + " mov %%e" #r4 "x, %%edi\n" \
1.230 + " xor %%e" #r3 "x, %%edi\n" \
1.231 + " and %%e" #r2 "x, %%edi\n" \
1.232 + " xor %%e" #r4 "x, %%edi\n" \
1.233 + " add %%edi, %%e" #r1 "x\n" \
1.234 + " mov (" #offset "* 4)(%%esi), %%edi\n" \
1.235 + " add $" #constant ", %%edi\n" \
1.236 + " add %%edi, %%e" #r1 "x\n" \
1.237 + " rol $" #rotate ", %%e" #r1 "x\n" \
1.238 + " add %%e" #r2 "x, %%e" #r1 "x\n"
1.239 +
1.240 + STEP1 (a, b, c, d, 0, 0xd76aa478, 7)
1.241 + STEP1 (d, a, b, c, 1, 0xe8c7b756, 12)
1.242 + STEP1 (c, d, a, b, 2, 0x242070db, 17)
1.243 + STEP1 (b, c, d, a, 3, 0xc1bdceee, 22)
1.244 + STEP1 (a, b, c, d, 4, 0xf57c0faf, 7)
1.245 + STEP1 (d, a, b, c, 5, 0x4787c62a, 12)
1.246 + STEP1 (c, d, a, b, 6, 0xa8304613, 17)
1.247 + STEP1 (b, c, d, a, 7, 0xfd469501, 22)
1.248 + STEP1 (a, b, c, d, 8, 0x698098d8, 7)
1.249 + STEP1 (d, a, b, c, 9, 0x8b44f7af, 12)
1.250 + STEP1 (c, d, a, b, 10, 0xffff5bb1, 17)
1.251 + STEP1 (b, c, d, a, 11, 0x895cd7be, 22)
1.252 + STEP1 (a, b, c, d, 12, 0x6b901122, 7)
1.253 + STEP1 (d, a, b, c, 13, 0xfd987193, 12)
1.254 + STEP1 (c, d, a, b, 14, 0xa679438e, 17)
1.255 + STEP1 (b, c, d, a, 15, 0x49b40821, 22)
1.256 +
1.257 +#define STEP2(r1,r2,r3,r4,offset,constant,rotate) \
1.258 + " mov %%e" #r3 "x, %%edi\n" \
1.259 + " xor %%e" #r2 "x, %%edi\n" \
1.260 + " and %%e" #r4 "x, %%edi\n" \
1.261 + " xor %%e" #r3 "x, %%edi\n" \
1.262 + " add %%edi, %%e" #r1 "x\n" \
1.263 + " mov (" #offset "* 4)(%%esi), %%edi\n" \
1.264 + " add $" #constant ", %%edi\n" \
1.265 + " add %%edi, %%e" #r1 "x\n" \
1.266 + " rol $" #rotate ", %%e" #r1 "x\n" \
1.267 + " add %%e" #r2 "x, %%e" #r1 "x\n"
1.268 +
1.269 + STEP2(a, b, c, d, 1, 0xf61e2562, 5)
1.270 + STEP2(d, a, b, c, 6, 0xc040b340, 9)
1.271 + STEP2(c, d, a, b, 11, 0x265e5a51, 14)
1.272 + STEP2(b, c, d, a, 0, 0xe9b6c7aa, 20)
1.273 + STEP2(a, b, c, d, 5, 0xd62f105d, 5)
1.274 + STEP2(d, a, b, c, 10, 0x02441453, 9)
1.275 + STEP2(c, d, a, b, 15, 0xd8a1e681, 14)
1.276 + STEP2(b, c, d, a, 4, 0xe7d3fbc8, 20)
1.277 + STEP2(a, b, c, d, 9, 0x21e1cde6, 5)
1.278 + STEP2(d, a, b, c, 14, 0xc33707d6, 9)
1.279 + STEP2(c, d, a, b, 3, 0xf4d50d87, 14)
1.280 + STEP2(b, c, d, a, 8, 0x455a14ed, 20)
1.281 + STEP2(a, b, c, d, 13, 0xa9e3e905, 5)
1.282 + STEP2(d, a, b, c, 2, 0xfcefa3f8, 9)
1.283 + STEP2(c, d, a, b, 7, 0x676f02d9, 14)
1.284 + STEP2(b, c, d, a, 12, 0x8d2a4c8a, 20)
1.285 +
1.286 +#define STEP3(r1,r2,r3,r4,offset,constant,rotate) \
1.287 + " mov %%e" #r4 "x, %%edi\n" \
1.288 + " xor %%e" #r3 "x, %%edi\n" \
1.289 + " xor %%e" #r2 "x, %%edi\n" \
1.290 + " add %%edi, %%e" #r1 "x\n" \
1.291 + " mov (" #offset "* 4)(%%esi), %%edi\n" \
1.292 + " add $" #constant ", %%edi\n" \
1.293 + " add %%edi, %%e" #r1 "x\n" \
1.294 + " rol $" #rotate ", %%e" #r1 "x\n" \
1.295 + " add %%e" #r2 "x, %%e" #r1 "x\n"
1.296 +
1.297 + STEP3 (a, b, c, d, 5, 0xfffa3942, 4)
1.298 + STEP3 (d, a, b, c, 8, 0x8771f681, 11)
1.299 + STEP3 (c, d, a, b, 11, 0x6d9d6122, 16)
1.300 + STEP3 (b, c, d, a, 14, 0xfde5380c, 23)
1.301 + STEP3 (a, b, c, d, 1, 0xa4beea44, 4)
1.302 + STEP3 (d, a, b, c, 4, 0x4bdecfa9, 11)
1.303 + STEP3 (c, d, a, b, 7, 0xf6bb4b60, 16)
1.304 + STEP3 (b, c, d, a, 10, 0xbebfbc70, 23)
1.305 + STEP3 (a, b, c, d, 13, 0x289b7ec6, 4)
1.306 + STEP3 (d, a, b, c, 0, 0xeaa127fa, 11)
1.307 + STEP3 (c, d, a, b, 3, 0xd4ef3085, 16)
1.308 + STEP3 (b, c, d, a, 6, 0x04881d05, 23)
1.309 + STEP3 (a, b, c, d, 9, 0xd9d4d039, 4)
1.310 + STEP3 (d, a, b, c, 12, 0xe6db99e5, 11)
1.311 + STEP3 (c, d, a, b, 15, 0x1fa27cf8, 16)
1.312 + STEP3 (b, c, d, a, 2, 0xc4ac5665, 23)
1.313 +
1.314 +#define STEP4(r1,r2,r3,r4,offset,constant,rotate) \
1.315 + " mov %%e" #r4 "x, %%edi\n" \
1.316 + " not %%edi\n" \
1.317 + " or %%e" #r2 "x, %%edi\n" \
1.318 + " xor %%e" #r3 "x, %%edi\n" \
1.319 + " add %%edi, %%e" #r1 "x\n" \
1.320 + " mov (" #offset "* 4)(%%esi), %%edi\n" \
1.321 + " add $" #constant ", %%edi\n" \
1.322 + " add %%edi, %%e" #r1 "x\n" \
1.323 + " rol $" #rotate ", %%e" #r1 "x\n" \
1.324 + " add %%e" #r2 "x, %%e" #r1 "x\n"
1.325 +
1.326 + STEP4 (a, b, c, d, 0, 0xf4292244, 6)
1.327 + STEP4 (d, a, b, c, 7, 0x432aff97, 10)
1.328 + STEP4 (c, d, a, b, 14, 0xab9423a7, 15)
1.329 + STEP4 (b, c, d, a, 5, 0xfc93a039, 21)
1.330 + STEP4 (a, b, c, d, 12, 0x655b59c3, 6)
1.331 + STEP4 (d, a, b, c, 3, 0x8f0ccc92, 10)
1.332 + STEP4 (c, d, a, b, 10, 0xffeff47d, 15)
1.333 + STEP4 (b, c, d, a, 1, 0x85845dd1, 21)
1.334 + STEP4 (a, b, c, d, 8, 0x6fa87e4f, 6)
1.335 + STEP4 (d, a, b, c, 15, 0xfe2ce6e0, 10)
1.336 + STEP4 (c, d, a, b, 6, 0xa3014314, 15)
1.337 + STEP4 (b, c, d, a, 13, 0x4e0811a1, 21)
1.338 + STEP4 (a, b, c, d, 4, 0xf7537e82, 6)
1.339 + STEP4 (d, a, b, c, 11, 0xbd3af235, 10)
1.340 + STEP4 (c, d, a, b, 2, 0x2ad7d2bb, 15)
1.341 + STEP4 (b, c, d, a, 9, 0xeb86d391, 21)
1.342 +
1.343 + " mov (%%ebp), %%edi\n"
1.344 + " add %%eax, 0x0(%%edi)\n"
1.345 + " add %%ebx, 0x4(%%edi)\n"
1.346 + " add %%ecx, 0x8(%%edi)\n"
1.347 + " add %%edx, 0xc(%%edi)\n"
1.348 + " mov 0xc(%%ebp), %%ebx\n"
1.349 + " mov 0x8(%%ebp), %%ebp\n"
1.350 + :
1.351 + : "a" (&tmp)
1.352 + : "esi", "ecx", "edx", "edi", "memory");
1.353 +#undef STEP1
1.354 +#undef STEP2
1.355 +#undef STEP3
1.356 +#undef STEP4
1.357 +#endif
1.358 +}
1.359 +OIL_DEFINE_IMPL_ASM (md5_asm2, md5);
1.360 +
1.361 +
1.362 +
1.363 +#ifdef ENABLE_BROKEN_IMPLS
1.364 +/* FIXME this is way too clever. Using %esp as a general purpose
1.365 + * register? NOT a brilliant idea. */
1.366 +static void
1.367 +md5_asm3(uint32_t *state, uint32_t *src)
1.368 +{
1.369 + struct {
1.370 + void *state;
1.371 + void *src;
1.372 + void *ebp;
1.373 + void *esp;
1.374 + void *ebx;
1.375 + }tmp;
1.376 +
1.377 + tmp.state = state;
1.378 + tmp.src = src;
1.379 +
1.380 + asm (
1.381 + " mov %%ebp, 0x8(%%eax)\n"
1.382 + " mov %%esp, 0xc(%%eax)\n"
1.383 + " mov %%ebx, 0x10(%%eax)\n"
1.384 + " mov %%eax, %%ebp\n"
1.385 + " mov (%%ebp), %%esi\n"
1.386 + " mov (%%esi), %%eax\n"
1.387 + " mov 0x4(%%esi), %%ebx\n"
1.388 + " mov 0x8(%%esi), %%ecx\n"
1.389 + " mov 0xc(%%esi), %%edx\n"
1.390 + " mov 0x4(%%ebp), %%esi\n"
1.391 +
1.392 +#define STEP1(r1,r2,r3,r4,offset,constant,rotate) \
1.393 + " mov %%e" #r2 "x, %%edi\n" \
1.394 + " mov %%e" #r2 "x, %%esp\n" \
1.395 + " not %%esp\n" \
1.396 + " and %%e" #r3 "x, %%edi\n" \
1.397 + " and %%e" #r4 "x, %%esp\n" \
1.398 + " or %%esp, %%edi\n" \
1.399 + " add %%edi, %%e" #r1 "x\n" \
1.400 + " mov (" #offset "* 4)(%%esi), %%edi\n" \
1.401 + " add $" #constant ", %%edi\n" \
1.402 + " add %%edi, %%e" #r1 "x\n" \
1.403 + " rol $" #rotate ", %%e" #r1 "x\n" \
1.404 + " add %%e" #r2 "x, %%e" #r1 "x\n"
1.405 +
1.406 + STEP1 (a, b, c, d, 0, 0xd76aa478, 7)
1.407 + STEP1 (d, a, b, c, 1, 0xe8c7b756, 12)
1.408 + STEP1 (c, d, a, b, 2, 0x242070db, 17)
1.409 + STEP1 (b, c, d, a, 3, 0xc1bdceee, 22)
1.410 + STEP1 (a, b, c, d, 4, 0xf57c0faf, 7)
1.411 + STEP1 (d, a, b, c, 5, 0x4787c62a, 12)
1.412 + STEP1 (c, d, a, b, 6, 0xa8304613, 17)
1.413 + STEP1 (b, c, d, a, 7, 0xfd469501, 22)
1.414 + STEP1 (a, b, c, d, 8, 0x698098d8, 7)
1.415 + STEP1 (d, a, b, c, 9, 0x8b44f7af, 12)
1.416 + STEP1 (c, d, a, b, 10, 0xffff5bb1, 17)
1.417 + STEP1 (b, c, d, a, 11, 0x895cd7be, 22)
1.418 + STEP1 (a, b, c, d, 12, 0x6b901122, 7)
1.419 + STEP1 (d, a, b, c, 13, 0xfd987193, 12)
1.420 + STEP1 (c, d, a, b, 14, 0xa679438e, 17)
1.421 + STEP1 (b, c, d, a, 15, 0x49b40821, 22)
1.422 +
1.423 +#define STEP2(r1,r2,r3,r4,offset,constant,rotate) \
1.424 + " mov %%e" #r4 "x, %%edi\n" \
1.425 + " mov %%e" #r4 "x, %%esp\n" \
1.426 + " not %%esp\n" \
1.427 + " and %%e" #r2 "x, %%edi\n" \
1.428 + " and %%e" #r3 "x, %%esp\n" \
1.429 + " or %%esp, %%edi\n" \
1.430 + " add %%edi, %%e" #r1 "x\n" \
1.431 + " mov (" #offset "* 4)(%%esi), %%edi\n" \
1.432 + " add $" #constant ", %%edi\n" \
1.433 + " add %%edi, %%e" #r1 "x\n" \
1.434 + " rol $" #rotate ", %%e" #r1 "x\n" \
1.435 + " add %%e" #r2 "x, %%e" #r1 "x\n"
1.436 +
1.437 + STEP2(a, b, c, d, 1, 0xf61e2562, 5)
1.438 + STEP2(d, a, b, c, 6, 0xc040b340, 9)
1.439 + STEP2(c, d, a, b, 11, 0x265e5a51, 14)
1.440 + STEP2(b, c, d, a, 0, 0xe9b6c7aa, 20)
1.441 + STEP2(a, b, c, d, 5, 0xd62f105d, 5)
1.442 + STEP2(d, a, b, c, 10, 0x02441453, 9)
1.443 + STEP2(c, d, a, b, 15, 0xd8a1e681, 14)
1.444 + STEP2(b, c, d, a, 4, 0xe7d3fbc8, 20)
1.445 + STEP2(a, b, c, d, 9, 0x21e1cde6, 5)
1.446 + STEP2(d, a, b, c, 14, 0xc33707d6, 9)
1.447 + STEP2(c, d, a, b, 3, 0xf4d50d87, 14)
1.448 + STEP2(b, c, d, a, 8, 0x455a14ed, 20)
1.449 + STEP2(a, b, c, d, 13, 0xa9e3e905, 5)
1.450 + STEP2(d, a, b, c, 2, 0xfcefa3f8, 9)
1.451 + STEP2(c, d, a, b, 7, 0x676f02d9, 14)
1.452 + STEP2(b, c, d, a, 12, 0x8d2a4c8a, 20)
1.453 +
1.454 +#define STEP3(r1,r2,r3,r4,offset,constant,rotate) \
1.455 + " mov %%e" #r4 "x, %%edi\n" \
1.456 + " xor %%e" #r3 "x, %%edi\n" \
1.457 + " xor %%e" #r2 "x, %%edi\n" \
1.458 + " add %%edi, %%e" #r1 "x\n" \
1.459 + " mov (" #offset "* 4)(%%esi), %%edi\n" \
1.460 + " add $" #constant ", %%edi\n" \
1.461 + " add %%edi, %%e" #r1 "x\n" \
1.462 + " rol $" #rotate ", %%e" #r1 "x\n" \
1.463 + " add %%e" #r2 "x, %%e" #r1 "x\n"
1.464 +
1.465 + STEP3 (a, b, c, d, 5, 0xfffa3942, 4)
1.466 + STEP3 (d, a, b, c, 8, 0x8771f681, 11)
1.467 + STEP3 (c, d, a, b, 11, 0x6d9d6122, 16)
1.468 + STEP3 (b, c, d, a, 14, 0xfde5380c, 23)
1.469 + STEP3 (a, b, c, d, 1, 0xa4beea44, 4)
1.470 + STEP3 (d, a, b, c, 4, 0x4bdecfa9, 11)
1.471 + STEP3 (c, d, a, b, 7, 0xf6bb4b60, 16)
1.472 + STEP3 (b, c, d, a, 10, 0xbebfbc70, 23)
1.473 + STEP3 (a, b, c, d, 13, 0x289b7ec6, 4)
1.474 + STEP3 (d, a, b, c, 0, 0xeaa127fa, 11)
1.475 + STEP3 (c, d, a, b, 3, 0xd4ef3085, 16)
1.476 + STEP3 (b, c, d, a, 6, 0x04881d05, 23)
1.477 + STEP3 (a, b, c, d, 9, 0xd9d4d039, 4)
1.478 + STEP3 (d, a, b, c, 12, 0xe6db99e5, 11)
1.479 + STEP3 (c, d, a, b, 15, 0x1fa27cf8, 16)
1.480 + STEP3 (b, c, d, a, 2, 0xc4ac5665, 23)
1.481 +
1.482 +#define STEP4(r1,r2,r3,r4,offset,constant,rotate) \
1.483 + " mov %%e" #r4 "x, %%edi\n" \
1.484 + " not %%edi\n" \
1.485 + " or %%e" #r2 "x, %%edi\n" \
1.486 + " xor %%e" #r3 "x, %%edi\n" \
1.487 + " add %%edi, %%e" #r1 "x\n" \
1.488 + " mov (" #offset "* 4)(%%esi), %%edi\n" \
1.489 + " add $" #constant ", %%edi\n" \
1.490 + " add %%edi, %%e" #r1 "x\n" \
1.491 + " rol $" #rotate ", %%e" #r1 "x\n" \
1.492 + " add %%e" #r2 "x, %%e" #r1 "x\n"
1.493 +
1.494 + STEP4 (a, b, c, d, 0, 0xf4292244, 6)
1.495 + STEP4 (d, a, b, c, 7, 0x432aff97, 10)
1.496 + STEP4 (c, d, a, b, 14, 0xab9423a7, 15)
1.497 + STEP4 (b, c, d, a, 5, 0xfc93a039, 21)
1.498 + STEP4 (a, b, c, d, 12, 0x655b59c3, 6)
1.499 + STEP4 (d, a, b, c, 3, 0x8f0ccc92, 10)
1.500 + STEP4 (c, d, a, b, 10, 0xffeff47d, 15)
1.501 + STEP4 (b, c, d, a, 1, 0x85845dd1, 21)
1.502 + STEP4 (a, b, c, d, 8, 0x6fa87e4f, 6)
1.503 + STEP4 (d, a, b, c, 15, 0xfe2ce6e0, 10)
1.504 + STEP4 (c, d, a, b, 6, 0xa3014314, 15)
1.505 + STEP4 (b, c, d, a, 13, 0x4e0811a1, 21)
1.506 + STEP4 (a, b, c, d, 4, 0xf7537e82, 6)
1.507 + STEP4 (d, a, b, c, 11, 0xbd3af235, 10)
1.508 + STEP4 (c, d, a, b, 2, 0x2ad7d2bb, 15)
1.509 + STEP4 (b, c, d, a, 9, 0xeb86d391, 21)
1.510 +
1.511 + " mov (%%ebp), %%edi\n"
1.512 + " add %%eax, 0x0(%%edi)\n"
1.513 + " add %%ebx, 0x4(%%edi)\n"
1.514 + " add %%ecx, 0x8(%%edi)\n"
1.515 + " add %%edx, 0xc(%%edi)\n"
1.516 + " mov 0x10(%%ebp), %%ebx\n"
1.517 + " mov 0xc(%%ebp), %%esp\n"
1.518 + " mov 0x8(%%ebp), %%ebp\n"
1.519 + :
1.520 + : "a" (&tmp)
1.521 + : "esi", "ecx", "edx", "edi");
1.522 +#undef STEP1
1.523 +#undef STEP2
1.524 +#undef STEP3
1.525 +#undef STEP4
1.526 +}
1.527 +
1.528 +
1.529 +OIL_DEFINE_IMPL_ASM (md5_asm3, md5);
1.530 +#endif
1.531 +
1.532 +
1.533 +
1.534 +#ifdef __SYMBIAN32__
1.535 +
1.536 +OilFunctionImpl* __oil_function_impl_md5_asm1() {
1.537 + return &_oil_function_impl_md5_asm1;
1.538 +}
1.539 +#endif
1.540 +
1.541 +
1.542 +
1.543 +#ifdef __SYMBIAN32__
1.544 +
1.545 +OilFunctionImpl* __oil_function_impl_md5_asm2() {
1.546 + return &_oil_function_impl_md5_asm2;
1.547 +}
1.548 +#endif
1.549 +
1.550 +#ifdef __SYMBIAN32__
1.551 +
1.552 +OilFunctionImpl* __oil_function_impl_md5_asm3() {
1.553 + return &_oil_function_impl_md5_asm3;
1.554 +}
1.555 +#endif
1.556 +