1 /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
4 * This package is an SSL implementation written
5 * by Eric Young (eay@cryptsoft.com).
6 * The implementation was written so as to conform with Netscapes SSL.
8 * This library is free for commercial and non-commercial use as long as
9 * the following conditions are aheared to. The following conditions
10 * apply to all code found in this distribution, be it the RC4, RSA,
11 * lhash, DES, etc., code; not just the SSL code. The SSL documentation
12 * included with this distribution is covered by the same copyright terms
13 * except that the holder is Tim Hudson (tjh@cryptsoft.com).
15 * Copyright remains Eric Young's, and as such any Copyright notices in
16 * the code are not to be removed.
17 * If this package is used in a product, Eric Young should be given attribution
18 * as the author of the parts of the library used.
19 * This can be in the form of a textual message at program startup or
20 * in documentation (online or textual) provided with the package.
22 * Redistribution and use in source and binary forms, with or without
23 * modification, are permitted provided that the following conditions
25 * 1. Redistributions of source code must retain the copyright
26 * notice, this list of conditions and the following disclaimer.
27 * 2. Redistributions in binary form must reproduce the above copyright
28 * notice, this list of conditions and the following disclaimer in the
29 * documentation and/or other materials provided with the distribution.
30 * 3. All advertising materials mentioning features or use of this software
31 * must display the following acknowledgement:
32 * "This product includes cryptographic software written by
33 * Eric Young (eay@cryptsoft.com)"
34 * The word 'cryptographic' can be left out if the rouines from the library
35 * being used are not cryptographic related :-).
36 * 4. If you include any Windows specific code (or a derivative thereof) from
37 * the apps directory (application code) you must include an acknowledgement:
38 * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
40 * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
41 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
42 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
43 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
44 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
45 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
46 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
47 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
48 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
49 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
52 * The licence and distribution terms for any publically available version or
53 * derivative of this code cannot be changed. i.e. this code cannot simply be
54 * copied and put under another distribution licence
55 * [including the GNU Public Licence.]
70 #define SHA_LONG_LOG2 2 /* default to 32 bits */
73 #define DATA_ORDER_IS_BIG_ENDIAN
75 #define HASH_LONG SHA_LONG
76 #define HASH_LONG_LOG2 SHA_LONG_LOG2
77 #define HASH_CTX SHA_CTX
78 #define HASH_CBLOCK SHA_CBLOCK
79 #define HASH_LBLOCK SHA_LBLOCK
80 #define HASH_MAKE_STRING(c,s) do { \
82 ll=(c)->h0; HOST_l2c(ll,(s)); \
83 ll=(c)->h1; HOST_l2c(ll,(s)); \
84 ll=(c)->h2; HOST_l2c(ll,(s)); \
85 ll=(c)->h3; HOST_l2c(ll,(s)); \
86 ll=(c)->h4; HOST_l2c(ll,(s)); \
91 # define HASH_UPDATE SHA_Update
92 # define HASH_TRANSFORM SHA_Transform
93 # define HASH_FINAL SHA_Final
94 # define HASH_INIT SHA_Init
95 # define HASH_BLOCK_HOST_ORDER sha_block_host_order
96 # define HASH_BLOCK_DATA_ORDER sha_block_data_order
97 # define Xupdate(a,ix,ia,ib,ic,id) (ix=(a)=(ia^ib^ic^id))
99 void sha_block_host_order (SHA_CTX *c, const void *p, size_t num);
100 void sha_block_data_order (SHA_CTX *c, const void *p, size_t num);
104 # define HASH_UPDATE SHA1_Update
105 # define HASH_TRANSFORM SHA1_Transform
106 # define HASH_FINAL SHA1_Final
107 # define HASH_INIT SHA1_Init
108 # define HASH_BLOCK_HOST_ORDER sha1_block_host_order
109 # define HASH_BLOCK_DATA_ORDER sha1_block_data_order
110 # if defined(__MWERKS__) && defined(__MC68K__)
111 /* Metrowerks for Motorola fails otherwise:-( <appro@fy.chalmers.se> */
112 # define Xupdate(a,ix,ia,ib,ic,id) do { (a)=(ia^ib^ic^id); \
113 ix=(a)=ROTATE((a),1); \
116 # define Xupdate(a,ix,ia,ib,ic,id) ( (a)=(ia^ib^ic^id), \
117 ix=(a)=ROTATE((a),1) \
122 # if defined(__i386) || defined(__i386__) || defined(_M_IX86) || defined(__INTEL__)
123 # if !defined(B_ENDIAN)
124 # define sha1_block_host_order sha1_block_asm_host_order
125 # define DONT_IMPLEMENT_BLOCK_HOST_ORDER
126 # define sha1_block_data_order sha1_block_asm_data_order
127 # define DONT_IMPLEMENT_BLOCK_DATA_ORDER
128 # define HASH_BLOCK_DATA_ORDER_ALIGNED sha1_block_asm_data_order
130 # elif defined(__ia64) || defined(__ia64__) || defined(_M_IA64)
131 # define sha1_block_host_order sha1_block_asm_host_order
132 # define DONT_IMPLEMENT_BLOCK_HOST_ORDER
133 # define sha1_block_data_order sha1_block_asm_data_order
134 # define DONT_IMPLEMENT_BLOCK_DATA_ORDER
137 void sha1_block_host_order (SHA_CTX *c, const void *p, size_t num);
138 void sha1_block_data_order (SHA_CTX *c, const void *p, size_t num);
141 # error "Either SHA_0 or SHA_1 must be defined."
144 #include "md32_common.h"
146 #define INIT_DATA_h0 0x67452301UL
147 #define INIT_DATA_h1 0xefcdab89UL
148 #define INIT_DATA_h2 0x98badcfeUL
149 #define INIT_DATA_h3 0x10325476UL
150 #define INIT_DATA_h4 0xc3d2e1f0UL
152 int HASH_INIT (SHA_CTX *c)
154 c->h0 = INIT_DATA_h0;
155 c->h1 = INIT_DATA_h1;
156 c->h2 = INIT_DATA_h2;
157 c->h3 = INIT_DATA_h3;
158 c->h4 = INIT_DATA_h4;
165 #define K_00_19 0x5a827999UL
166 #define K_20_39 0x6ed9eba1UL
167 #define K_40_59 0x8f1bbcdcUL
168 #define K_60_79 0xca62c1d6UL
170 /* As pointed out by Wei Dai <weidai@eskimo.com>, F() below can be
171 * simplified to the code in F_00_19. Wei attributes these optimisations
172 * to Peter Gutmann's SHS code, and he attributes it to Rich Schroeppel.
173 * #define F(x,y,z) (((x) & (y)) | ((~(x)) & (z)))
174 * I've just become aware of another tweak to be made, again from Wei Dai,
175 * in F_40_59, (x&a)|(y&a) -> (x|y)&a
177 #define F_00_19(b,c,d) ((((c) ^ (d)) & (b)) ^ (d))
178 #define F_20_39(b,c,d) ((b) ^ (c) ^ (d))
179 #define F_40_59(b,c,d) (((b) & (c)) | (((b)|(c)) & (d)))
180 #define F_60_79(b,c,d) F_20_39(b,c,d)
182 #ifndef OPENSSL_SMALL_FOOTPRINT
184 #define BODY_00_15(i,a,b,c,d,e,f,xi) \
185 (f)=xi+(e)+K_00_19+ROTATE((a),5)+F_00_19((b),(c),(d)); \
188 #define BODY_16_19(i,a,b,c,d,e,f,xi,xa,xb,xc,xd) \
189 Xupdate(f,xi,xa,xb,xc,xd); \
190 (f)+=(e)+K_00_19+ROTATE((a),5)+F_00_19((b),(c),(d)); \
193 #define BODY_20_31(i,a,b,c,d,e,f,xi,xa,xb,xc,xd) \
194 Xupdate(f,xi,xa,xb,xc,xd); \
195 (f)+=(e)+K_20_39+ROTATE((a),5)+F_20_39((b),(c),(d)); \
198 #define BODY_32_39(i,a,b,c,d,e,f,xa,xb,xc,xd) \
199 Xupdate(f,xa,xa,xb,xc,xd); \
200 (f)+=(e)+K_20_39+ROTATE((a),5)+F_20_39((b),(c),(d)); \
203 #define BODY_40_59(i,a,b,c,d,e,f,xa,xb,xc,xd) \
204 Xupdate(f,xa,xa,xb,xc,xd); \
205 (f)+=(e)+K_40_59+ROTATE((a),5)+F_40_59((b),(c),(d)); \
208 #define BODY_60_79(i,a,b,c,d,e,f,xa,xb,xc,xd) \
209 Xupdate(f,xa,xa,xb,xc,xd); \
210 (f)=xa+(e)+K_60_79+ROTATE((a),5)+F_60_79((b),(c),(d)); \
218 * Originally X was an array. As it's automatic it's natural
219 * to expect RISC compiler to accomodate at least part of it in
220 * the register bank, isn't it? Unfortunately not all compilers
221 * "find" this expectation reasonable:-( On order to make such
222 * compilers generate better code I replace X[] with a bunch of
223 * X0, X1, etc. See the function body below...
224 * <appro@fy.chalmers.se>
229 * However! Some compilers (most notably HP C) get overwhelmed by
230 * that many local variables so that we have to have the way to
231 * fall down to the original behavior.
236 #ifndef DONT_IMPLEMENT_BLOCK_HOST_ORDER
237 void HASH_BLOCK_HOST_ORDER (SHA_CTX *c, const void *d, size_t num)
239 const SHA_LONG *W = (const SHA_LONG *)d;
240 register unsigned MD32_REG_T A, B, C, D, E, T;
242 unsigned MD32_REG_T XX0, XX1, XX2, XX3, XX4, XX5, XX6, XX7,
243 XX8, XX9, XX10, XX11, XX12, XX13, XX14, XX15;
255 BODY_00_15( 0, A, B, C, D, E, T, W[ 0]);
256 BODY_00_15( 1, T, A, B, C, D, E, W[ 1]);
257 BODY_00_15( 2, E, T, A, B, C, D, W[ 2]);
258 BODY_00_15( 3, D, E, T, A, B, C, W[ 3]);
259 BODY_00_15( 4, C, D, E, T, A, B, W[ 4]);
260 BODY_00_15( 5, B, C, D, E, T, A, W[ 5]);
261 BODY_00_15( 6, A, B, C, D, E, T, W[ 6]);
262 BODY_00_15( 7, T, A, B, C, D, E, W[ 7]);
263 BODY_00_15( 8, E, T, A, B, C, D, W[ 8]);
264 BODY_00_15( 9, D, E, T, A, B, C, W[ 9]);
265 BODY_00_15(10, C, D, E, T, A, B, W[10]);
266 BODY_00_15(11, B, C, D, E, T, A, W[11]);
267 BODY_00_15(12, A, B, C, D, E, T, W[12]);
268 BODY_00_15(13, T, A, B, C, D, E, W[13]);
269 BODY_00_15(14, E, T, A, B, C, D, W[14]);
270 BODY_00_15(15, D, E, T, A, B, C, W[15]);
272 BODY_16_19(16, C, D, E, T, A, B, X( 0), W[ 0], W[ 2], W[ 8], W[13]);
273 BODY_16_19(17, B, C, D, E, T, A, X( 1), W[ 1], W[ 3], W[ 9], W[14]);
274 BODY_16_19(18, A, B, C, D, E, T, X( 2), W[ 2], W[ 4], W[10], W[15]);
275 BODY_16_19(19, T, A, B, C, D, E, X( 3), W[ 3], W[ 5], W[11], X( 0));
277 BODY_20_31(20, E, T, A, B, C, D, X( 4), W[ 4], W[ 6], W[12], X( 1));
278 BODY_20_31(21, D, E, T, A, B, C, X( 5), W[ 5], W[ 7], W[13], X( 2));
279 BODY_20_31(22, C, D, E, T, A, B, X( 6), W[ 6], W[ 8], W[14], X( 3));
280 BODY_20_31(23, B, C, D, E, T, A, X( 7), W[ 7], W[ 9], W[15], X( 4));
281 BODY_20_31(24, A, B, C, D, E, T, X( 8), W[ 8], W[10], X( 0), X( 5));
282 BODY_20_31(25, T, A, B, C, D, E, X( 9), W[ 9], W[11], X( 1), X( 6));
283 BODY_20_31(26, E, T, A, B, C, D, X(10), W[10], W[12], X( 2), X( 7));
284 BODY_20_31(27, D, E, T, A, B, C, X(11), W[11], W[13], X( 3), X( 8));
285 BODY_20_31(28, C, D, E, T, A, B, X(12), W[12], W[14], X( 4), X( 9));
286 BODY_20_31(29, B, C, D, E, T, A, X(13), W[13], W[15], X( 5), X(10));
287 BODY_20_31(30, A, B, C, D, E, T, X(14), W[14], X( 0), X( 6), X(11));
288 BODY_20_31(31, T, A, B, C, D, E, X(15), W[15], X( 1), X( 7), X(12));
290 BODY_32_39(32, E, T, A, B, C, D, X( 0), X( 2), X( 8), X(13));
291 BODY_32_39(33, D, E, T, A, B, C, X( 1), X( 3), X( 9), X(14));
292 BODY_32_39(34, C, D, E, T, A, B, X( 2), X( 4), X(10), X(15));
293 BODY_32_39(35, B, C, D, E, T, A, X( 3), X( 5), X(11), X( 0));
294 BODY_32_39(36, A, B, C, D, E, T, X( 4), X( 6), X(12), X( 1));
295 BODY_32_39(37, T, A, B, C, D, E, X( 5), X( 7), X(13), X( 2));
296 BODY_32_39(38, E, T, A, B, C, D, X( 6), X( 8), X(14), X( 3));
297 BODY_32_39(39, D, E, T, A, B, C, X( 7), X( 9), X(15), X( 4));
299 BODY_40_59(40, C, D, E, T, A, B, X( 8), X(10), X( 0), X( 5));
300 BODY_40_59(41, B, C, D, E, T, A, X( 9), X(11), X( 1), X( 6));
301 BODY_40_59(42, A, B, C, D, E, T, X(10), X(12), X( 2), X( 7));
302 BODY_40_59(43, T, A, B, C, D, E, X(11), X(13), X( 3), X( 8));
303 BODY_40_59(44, E, T, A, B, C, D, X(12), X(14), X( 4), X( 9));
304 BODY_40_59(45, D, E, T, A, B, C, X(13), X(15), X( 5), X(10));
305 BODY_40_59(46, C, D, E, T, A, B, X(14), X( 0), X( 6), X(11));
306 BODY_40_59(47, B, C, D, E, T, A, X(15), X( 1), X( 7), X(12));
307 BODY_40_59(48, A, B, C, D, E, T, X( 0), X( 2), X( 8), X(13));
308 BODY_40_59(49, T, A, B, C, D, E, X( 1), X( 3), X( 9), X(14));
309 BODY_40_59(50, E, T, A, B, C, D, X( 2), X( 4), X(10), X(15));
310 BODY_40_59(51, D, E, T, A, B, C, X( 3), X( 5), X(11), X( 0));
311 BODY_40_59(52, C, D, E, T, A, B, X( 4), X( 6), X(12), X( 1));
312 BODY_40_59(53, B, C, D, E, T, A, X( 5), X( 7), X(13), X( 2));
313 BODY_40_59(54, A, B, C, D, E, T, X( 6), X( 8), X(14), X( 3));
314 BODY_40_59(55, T, A, B, C, D, E, X( 7), X( 9), X(15), X( 4));
315 BODY_40_59(56, E, T, A, B, C, D, X( 8), X(10), X( 0), X( 5));
316 BODY_40_59(57, D, E, T, A, B, C, X( 9), X(11), X( 1), X( 6));
317 BODY_40_59(58, C, D, E, T, A, B, X(10), X(12), X( 2), X( 7));
318 BODY_40_59(59, B, C, D, E, T, A, X(11), X(13), X( 3), X( 8));
320 BODY_60_79(60, A, B, C, D, E, T, X(12), X(14), X( 4), X( 9));
321 BODY_60_79(61, T, A, B, C, D, E, X(13), X(15), X( 5), X(10));
322 BODY_60_79(62, E, T, A, B, C, D, X(14), X( 0), X( 6), X(11));
323 BODY_60_79(63, D, E, T, A, B, C, X(15), X( 1), X( 7), X(12));
324 BODY_60_79(64, C, D, E, T, A, B, X( 0), X( 2), X( 8), X(13));
325 BODY_60_79(65, B, C, D, E, T, A, X( 1), X( 3), X( 9), X(14));
326 BODY_60_79(66, A, B, C, D, E, T, X( 2), X( 4), X(10), X(15));
327 BODY_60_79(67, T, A, B, C, D, E, X( 3), X( 5), X(11), X( 0));
328 BODY_60_79(68, E, T, A, B, C, D, X( 4), X( 6), X(12), X( 1));
329 BODY_60_79(69, D, E, T, A, B, C, X( 5), X( 7), X(13), X( 2));
330 BODY_60_79(70, C, D, E, T, A, B, X( 6), X( 8), X(14), X( 3));
331 BODY_60_79(71, B, C, D, E, T, A, X( 7), X( 9), X(15), X( 4));
332 BODY_60_79(72, A, B, C, D, E, T, X( 8), X(10), X( 0), X( 5));
333 BODY_60_79(73, T, A, B, C, D, E, X( 9), X(11), X( 1), X( 6));
334 BODY_60_79(74, E, T, A, B, C, D, X(10), X(12), X( 2), X( 7));
335 BODY_60_79(75, D, E, T, A, B, C, X(11), X(13), X( 3), X( 8));
336 BODY_60_79(76, C, D, E, T, A, B, X(12), X(14), X( 4), X( 9));
337 BODY_60_79(77, B, C, D, E, T, A, X(13), X(15), X( 5), X(10));
338 BODY_60_79(78, A, B, C, D, E, T, X(14), X( 0), X( 6), X(11));
339 BODY_60_79(79, T, A, B, C, D, E, X(15), X( 1), X( 7), X(12));
341 c->h0 = (c->h0 + E) & 0xffffffffL;
342 c->h1 = (c->h1 + T) & 0xffffffffL;
343 c->h2 = (c->h2 + A) & 0xffffffffL;
344 c->h3 = (c->h3 + B) & 0xffffffffL;
345 c->h4 = (c->h4 + C) & 0xffffffffL;
362 #ifndef DONT_IMPLEMENT_BLOCK_DATA_ORDER
363 void HASH_BLOCK_DATA_ORDER (SHA_CTX *c, const void *p, size_t num)
365 const unsigned char *data = (const unsigned char *)p;
366 register unsigned MD32_REG_T A, B, C, D, E, T, l;
368 unsigned MD32_REG_T XX0, XX1, XX2, XX3, XX4, XX5, XX6, XX7,
369 XX8, XX9, XX10, XX11, XX12, XX13, XX14, XX15;
386 BODY_00_15( 0, A, B, C, D, E, T, X( 0));
389 BODY_00_15( 1, T, A, B, C, D, E, X( 1));
392 BODY_00_15( 2, E, T, A, B, C, D, X( 2));
395 BODY_00_15( 3, D, E, T, A, B, C, X( 3));
398 BODY_00_15( 4, C, D, E, T, A, B, X( 4));
401 BODY_00_15( 5, B, C, D, E, T, A, X( 5));
404 BODY_00_15( 6, A, B, C, D, E, T, X( 6));
407 BODY_00_15( 7, T, A, B, C, D, E, X( 7));
410 BODY_00_15( 8, E, T, A, B, C, D, X( 8));
413 BODY_00_15( 9, D, E, T, A, B, C, X( 9));
416 BODY_00_15(10, C, D, E, T, A, B, X(10));
419 BODY_00_15(11, B, C, D, E, T, A, X(11));
422 BODY_00_15(12, A, B, C, D, E, T, X(12));
425 BODY_00_15(13, T, A, B, C, D, E, X(13));
428 BODY_00_15(14, E, T, A, B, C, D, X(14));
429 BODY_00_15(15, D, E, T, A, B, C, X(15));
431 BODY_16_19(16, C, D, E, T, A, B, X( 0), X( 0), X( 2), X( 8), X(13));
432 BODY_16_19(17, B, C, D, E, T, A, X( 1), X( 1), X( 3), X( 9), X(14));
433 BODY_16_19(18, A, B, C, D, E, T, X( 2), X( 2), X( 4), X(10), X(15));
434 BODY_16_19(19, T, A, B, C, D, E, X( 3), X( 3), X( 5), X(11), X( 0));
436 BODY_20_31(20, E, T, A, B, C, D, X( 4), X( 4), X( 6), X(12), X( 1));
437 BODY_20_31(21, D, E, T, A, B, C, X( 5), X( 5), X( 7), X(13), X( 2));
438 BODY_20_31(22, C, D, E, T, A, B, X( 6), X( 6), X( 8), X(14), X( 3));
439 BODY_20_31(23, B, C, D, E, T, A, X( 7), X( 7), X( 9), X(15), X( 4));
440 BODY_20_31(24, A, B, C, D, E, T, X( 8), X( 8), X(10), X( 0), X( 5));
441 BODY_20_31(25, T, A, B, C, D, E, X( 9), X( 9), X(11), X( 1), X( 6));
442 BODY_20_31(26, E, T, A, B, C, D, X(10), X(10), X(12), X( 2), X( 7));
443 BODY_20_31(27, D, E, T, A, B, C, X(11), X(11), X(13), X( 3), X( 8));
444 BODY_20_31(28, C, D, E, T, A, B, X(12), X(12), X(14), X( 4), X( 9));
445 BODY_20_31(29, B, C, D, E, T, A, X(13), X(13), X(15), X( 5), X(10));
446 BODY_20_31(30, A, B, C, D, E, T, X(14), X(14), X( 0), X( 6), X(11));
447 BODY_20_31(31, T, A, B, C, D, E, X(15), X(15), X( 1), X( 7), X(12));
449 BODY_32_39(32, E, T, A, B, C, D, X( 0), X( 2), X( 8), X(13));
450 BODY_32_39(33, D, E, T, A, B, C, X( 1), X( 3), X( 9), X(14));
451 BODY_32_39(34, C, D, E, T, A, B, X( 2), X( 4), X(10), X(15));
452 BODY_32_39(35, B, C, D, E, T, A, X( 3), X( 5), X(11), X( 0));
453 BODY_32_39(36, A, B, C, D, E, T, X( 4), X( 6), X(12), X( 1));
454 BODY_32_39(37, T, A, B, C, D, E, X( 5), X( 7), X(13), X( 2));
455 BODY_32_39(38, E, T, A, B, C, D, X( 6), X( 8), X(14), X( 3));
456 BODY_32_39(39, D, E, T, A, B, C, X( 7), X( 9), X(15), X( 4));
458 BODY_40_59(40, C, D, E, T, A, B, X( 8), X(10), X( 0), X( 5));
459 BODY_40_59(41, B, C, D, E, T, A, X( 9), X(11), X( 1), X( 6));
460 BODY_40_59(42, A, B, C, D, E, T, X(10), X(12), X( 2), X( 7));
461 BODY_40_59(43, T, A, B, C, D, E, X(11), X(13), X( 3), X( 8));
462 BODY_40_59(44, E, T, A, B, C, D, X(12), X(14), X( 4), X( 9));
463 BODY_40_59(45, D, E, T, A, B, C, X(13), X(15), X( 5), X(10));
464 BODY_40_59(46, C, D, E, T, A, B, X(14), X( 0), X( 6), X(11));
465 BODY_40_59(47, B, C, D, E, T, A, X(15), X( 1), X( 7), X(12));
466 BODY_40_59(48, A, B, C, D, E, T, X( 0), X( 2), X( 8), X(13));
467 BODY_40_59(49, T, A, B, C, D, E, X( 1), X( 3), X( 9), X(14));
468 BODY_40_59(50, E, T, A, B, C, D, X( 2), X( 4), X(10), X(15));
469 BODY_40_59(51, D, E, T, A, B, C, X( 3), X( 5), X(11), X( 0));
470 BODY_40_59(52, C, D, E, T, A, B, X( 4), X( 6), X(12), X( 1));
471 BODY_40_59(53, B, C, D, E, T, A, X( 5), X( 7), X(13), X( 2));
472 BODY_40_59(54, A, B, C, D, E, T, X( 6), X( 8), X(14), X( 3));
473 BODY_40_59(55, T, A, B, C, D, E, X( 7), X( 9), X(15), X( 4));
474 BODY_40_59(56, E, T, A, B, C, D, X( 8), X(10), X( 0), X( 5));
475 BODY_40_59(57, D, E, T, A, B, C, X( 9), X(11), X( 1), X( 6));
476 BODY_40_59(58, C, D, E, T, A, B, X(10), X(12), X( 2), X( 7));
477 BODY_40_59(59, B, C, D, E, T, A, X(11), X(13), X( 3), X( 8));
479 BODY_60_79(60, A, B, C, D, E, T, X(12), X(14), X( 4), X( 9));
480 BODY_60_79(61, T, A, B, C, D, E, X(13), X(15), X( 5), X(10));
481 BODY_60_79(62, E, T, A, B, C, D, X(14), X( 0), X( 6), X(11));
482 BODY_60_79(63, D, E, T, A, B, C, X(15), X( 1), X( 7), X(12));
483 BODY_60_79(64, C, D, E, T, A, B, X( 0), X( 2), X( 8), X(13));
484 BODY_60_79(65, B, C, D, E, T, A, X( 1), X( 3), X( 9), X(14));
485 BODY_60_79(66, A, B, C, D, E, T, X( 2), X( 4), X(10), X(15));
486 BODY_60_79(67, T, A, B, C, D, E, X( 3), X( 5), X(11), X( 0));
487 BODY_60_79(68, E, T, A, B, C, D, X( 4), X( 6), X(12), X( 1));
488 BODY_60_79(69, D, E, T, A, B, C, X( 5), X( 7), X(13), X( 2));
489 BODY_60_79(70, C, D, E, T, A, B, X( 6), X( 8), X(14), X( 3));
490 BODY_60_79(71, B, C, D, E, T, A, X( 7), X( 9), X(15), X( 4));
491 BODY_60_79(72, A, B, C, D, E, T, X( 8), X(10), X( 0), X( 5));
492 BODY_60_79(73, T, A, B, C, D, E, X( 9), X(11), X( 1), X( 6));
493 BODY_60_79(74, E, T, A, B, C, D, X(10), X(12), X( 2), X( 7));
494 BODY_60_79(75, D, E, T, A, B, C, X(11), X(13), X( 3), X( 8));
495 BODY_60_79(76, C, D, E, T, A, B, X(12), X(14), X( 4), X( 9));
496 BODY_60_79(77, B, C, D, E, T, A, X(13), X(15), X( 5), X(10));
497 BODY_60_79(78, A, B, C, D, E, T, X(14), X( 0), X( 6), X(11));
498 BODY_60_79(79, T, A, B, C, D, E, X(15), X( 1), X( 7), X(12));
500 c->h0 = (c->h0 + E) & 0xffffffffL;
501 c->h1 = (c->h1 + T) & 0xffffffffL;
502 c->h2 = (c->h2 + A) & 0xffffffffL;
503 c->h3 = (c->h3 + B) & 0xffffffffL;
504 c->h4 = (c->h4 + C) & 0xffffffffL;
520 #else /* OPENSSL_SMALL_FOOTPRINT */
522 #define BODY_00_15(xi) do { \
523 T=E+K_00_19+F_00_19(B,C,D); \
524 E=D, D=C, C=ROTATE(B,30), B=A; \
525 A=ROTATE(A,5)+T+xi; } while(0)
527 #define BODY_16_19(xa,xb,xc,xd) do { \
528 Xupdate(T,xa,xa,xb,xc,xd); \
529 T+=E+K_00_19+F_00_19(B,C,D); \
530 E=D, D=C, C=ROTATE(B,30), B=A; \
531 A=ROTATE(A,5)+T; } while(0)
533 #define BODY_20_39(xa,xb,xc,xd) do { \
534 Xupdate(T,xa,xa,xb,xc,xd); \
535 T+=E+K_20_39+F_20_39(B,C,D); \
536 E=D, D=C, C=ROTATE(B,30), B=A; \
537 A=ROTATE(A,5)+T; } while(0)
539 #define BODY_40_59(xa,xb,xc,xd) do { \
540 Xupdate(T,xa,xa,xb,xc,xd); \
541 T+=E+K_40_59+F_40_59(B,C,D); \
542 E=D, D=C, C=ROTATE(B,30), B=A; \
543 A=ROTATE(A,5)+T; } while(0)
545 #define BODY_60_79(xa,xb,xc,xd) do { \
546 Xupdate(T,xa,xa,xb,xc,xd); \
547 T=E+K_60_79+F_60_79(B,C,D); \
548 E=D, D=C, C=ROTATE(B,30), B=A; \
549 A=ROTATE(A,5)+T+xa; } while(0)
551 #ifndef DONT_IMPLEMENT_BLOCK_HOST_ORDER
552 void HASH_BLOCK_HOST_ORDER (SHA_CTX *c, const void *d, size_t num)
554 const SHA_LONG *W = d;
555 register unsigned MD32_REG_T A, B, C, D, E, T;
566 for (i = 0; i < 16; i++) {
570 for (i = 0; i < 4; i++) {
571 BODY_16_19(X[i], X[i + 2], X[i + 8], X[(i + 13) & 15]);
573 for (; i < 24; i++) {
574 BODY_20_39(X[i & 15], X[(i + 2) & 15], X[(i + 8) & 15], X[(i + 13) & 15]);
576 for (i = 0; i < 20; i++) {
577 BODY_40_59(X[(i + 8) & 15], X[(i + 10) & 15], X[i & 15], X[(i + 5) & 15]);
579 for (i = 4; i < 24; i++) {
580 BODY_60_79(X[(i + 8) & 15], X[(i + 10) & 15], X[i & 15], X[(i + 5) & 15]);
583 c->h0 = (c->h0 + A) & 0xffffffffL;
584 c->h1 = (c->h1 + B) & 0xffffffffL;
585 c->h2 = (c->h2 + C) & 0xffffffffL;
586 c->h3 = (c->h3 + D) & 0xffffffffL;
587 c->h4 = (c->h4 + E) & 0xffffffffL;
604 #ifndef DONT_IMPLEMENT_BLOCK_DATA_ORDER
605 void HASH_BLOCK_DATA_ORDER (SHA_CTX *c, const void *p, size_t num)
607 const unsigned char *data = p;
608 register unsigned MD32_REG_T A, B, C, D, E, T, l;
619 for (i = 0; i < 16; i++) {
624 for (i = 0; i < 4; i++) {
625 BODY_16_19(X[i], X[i + 2], X[i + 8], X[(i + 13) & 15]);
627 for (; i < 24; i++) {
628 BODY_20_39(X[i & 15], X[(i + 2) & 15], X[(i + 8) & 15], X[(i + 13) & 15]);
630 for (i = 0; i < 20; i++) {
631 BODY_40_59(X[(i + 8) & 15], X[(i + 10) & 15], X[i & 15], X[(i + 5) & 15]);
633 for (i = 4; i < 24; i++) {
634 BODY_60_79(X[(i + 8) & 15], X[(i + 10) & 15], X[i & 15], X[(i + 5) & 15]);
637 c->h0 = (c->h0 + A) & 0xffffffffL;
638 c->h1 = (c->h1 + B) & 0xffffffffL;
639 c->h2 = (c->h2 + C) & 0xffffffffL;
640 c->h3 = (c->h3 + D) & 0xffffffffL;
641 c->h4 = (c->h4 + E) & 0xffffffffL;
660 #endif /* _SHA_LOCL__H */