diff --git a/src/ltc/ciphers/aes/aes.c b/src/ltc/ciphers/aes/aes.c index 1712248..8ba1bfc 100644 --- a/src/ltc/ciphers/aes/aes.c +++ b/src/ltc/ciphers/aes/aes.c @@ -91,20 +91,20 @@ static ulong32 setup_mix(ulong32 temp) { - return (Te4_3[byte(temp, 2)]) ^ - (Te4_2[byte(temp, 1)]) ^ - (Te4_1[byte(temp, 0)]) ^ - (Te4_0[byte(temp, 3)]); + return (Te4_3[LTC_BYTE(temp, 2)]) ^ + (Te4_2[LTC_BYTE(temp, 1)]) ^ + (Te4_1[LTC_BYTE(temp, 0)]) ^ + (Te4_0[LTC_BYTE(temp, 3)]); } #ifndef ENCRYPT_ONLY #ifdef LTC_SMALL_CODE static ulong32 setup_mix2(ulong32 temp) { - return Td0(255 & Te4[byte(temp, 3)]) ^ - Td1(255 & Te4[byte(temp, 2)]) ^ - Td2(255 & Te4[byte(temp, 1)]) ^ - Td3(255 & Te4[byte(temp, 0)]); + return Td0(255 & Te4[LTC_BYTE(temp, 3)]) ^ + Td1(255 & Te4[LTC_BYTE(temp, 2)]) ^ + Td2(255 & Te4[LTC_BYTE(temp, 1)]) ^ + Td3(255 & Te4[LTC_BYTE(temp, 0)]); } #endif #endif @@ -235,28 +235,28 @@ #else temp = rrk[0]; rk[0] = - Tks0[byte(temp, 3)] ^ - Tks1[byte(temp, 2)] ^ - Tks2[byte(temp, 1)] ^ - Tks3[byte(temp, 0)]; + Tks0[LTC_BYTE(temp, 3)] ^ + Tks1[LTC_BYTE(temp, 2)] ^ + Tks2[LTC_BYTE(temp, 1)] ^ + Tks3[LTC_BYTE(temp, 0)]; temp = rrk[1]; rk[1] = - Tks0[byte(temp, 3)] ^ - Tks1[byte(temp, 2)] ^ - Tks2[byte(temp, 1)] ^ - Tks3[byte(temp, 0)]; + Tks0[LTC_BYTE(temp, 3)] ^ + Tks1[LTC_BYTE(temp, 2)] ^ + Tks2[LTC_BYTE(temp, 1)] ^ + Tks3[LTC_BYTE(temp, 0)]; temp = rrk[2]; rk[2] = - Tks0[byte(temp, 3)] ^ - Tks1[byte(temp, 2)] ^ - Tks2[byte(temp, 1)] ^ - Tks3[byte(temp, 0)]; + Tks0[LTC_BYTE(temp, 3)] ^ + Tks1[LTC_BYTE(temp, 2)] ^ + Tks2[LTC_BYTE(temp, 1)] ^ + Tks3[LTC_BYTE(temp, 0)]; temp = rrk[3]; rk[3] = - Tks0[byte(temp, 3)] ^ - Tks1[byte(temp, 2)] ^ - Tks2[byte(temp, 1)] ^ - Tks3[byte(temp, 0)]; + Tks0[LTC_BYTE(temp, 3)] ^ + Tks1[LTC_BYTE(temp, 2)] ^ + Tks2[LTC_BYTE(temp, 1)] ^ + Tks3[LTC_BYTE(temp, 0)]; #endif } @@ -311,28 +311,28 @@ for (r = 0; ; r++) { rk += 4; t0 = - Te0(byte(s0, 3)) ^ - Te1(byte(s1, 2)) ^ - Te2(byte(s2, 1)) ^ - Te3(byte(s3, 0)) ^ + Te0(LTC_BYTE(s0, 3)) ^ + Te1(LTC_BYTE(s1, 2)) ^ + Te2(LTC_BYTE(s2, 1)) ^ + Te3(LTC_BYTE(s3, 0)) ^ rk[0]; t1 = - Te0(byte(s1, 3)) ^ - Te1(byte(s2, 2)) ^ - Te2(byte(s3, 1)) ^ - Te3(byte(s0, 0)) ^ + Te0(LTC_BYTE(s1, 3)) ^ + Te1(LTC_BYTE(s2, 2)) ^ + Te2(LTC_BYTE(s3, 1)) ^ + Te3(LTC_BYTE(s0, 0)) ^ rk[1]; t2 = - Te0(byte(s2, 3)) ^ - Te1(byte(s3, 2)) ^ - Te2(byte(s0, 1)) ^ - Te3(byte(s1, 0)) ^ + Te0(LTC_BYTE(s2, 3)) ^ + Te1(LTC_BYTE(s3, 2)) ^ + Te2(LTC_BYTE(s0, 1)) ^ + Te3(LTC_BYTE(s1, 0)) ^ rk[2]; t3 = - Te0(byte(s3, 3)) ^ - Te1(byte(s0, 2)) ^ - Te2(byte(s1, 1)) ^ - Te3(byte(s2, 0)) ^ + Te0(LTC_BYTE(s3, 3)) ^ + Te1(LTC_BYTE(s0, 2)) ^ + Te2(LTC_BYTE(s1, 1)) ^ + Te3(LTC_BYTE(s2, 0)) ^ rk[3]; if (r == Nr-2) { break; @@ -349,28 +349,28 @@ r = Nr >> 1; for (;;) { t0 = - Te0(byte(s0, 3)) ^ - Te1(byte(s1, 2)) ^ - Te2(byte(s2, 1)) ^ - Te3(byte(s3, 0)) ^ + Te0(LTC_BYTE(s0, 3)) ^ + Te1(LTC_BYTE(s1, 2)) ^ + Te2(LTC_BYTE(s2, 1)) ^ + Te3(LTC_BYTE(s3, 0)) ^ rk[4]; t1 = - Te0(byte(s1, 3)) ^ - Te1(byte(s2, 2)) ^ - Te2(byte(s3, 1)) ^ - Te3(byte(s0, 0)) ^ + Te0(LTC_BYTE(s1, 3)) ^ + Te1(LTC_BYTE(s2, 2)) ^ + Te2(LTC_BYTE(s3, 1)) ^ + Te3(LTC_BYTE(s0, 0)) ^ rk[5]; t2 = - Te0(byte(s2, 3)) ^ - Te1(byte(s3, 2)) ^ - Te2(byte(s0, 1)) ^ - Te3(byte(s1, 0)) ^ + Te0(LTC_BYTE(s2, 3)) ^ + Te1(LTC_BYTE(s3, 2)) ^ + Te2(LTC_BYTE(s0, 1)) ^ + Te3(LTC_BYTE(s1, 0)) ^ rk[6]; t3 = - Te0(byte(s3, 3)) ^ - Te1(byte(s0, 2)) ^ - Te2(byte(s1, 1)) ^ - Te3(byte(s2, 0)) ^ + Te0(LTC_BYTE(s3, 3)) ^ + Te1(LTC_BYTE(s0, 2)) ^ + Te2(LTC_BYTE(s1, 1)) ^ + Te3(LTC_BYTE(s2, 0)) ^ rk[7]; rk += 8; @@ -379,28 +379,28 @@ } s0 = - Te0(byte(t0, 3)) ^ - Te1(byte(t1, 2)) ^ - Te2(byte(t2, 1)) ^ - Te3(byte(t3, 0)) ^ + Te0(LTC_BYTE(t0, 3)) ^ + Te1(LTC_BYTE(t1, 2)) ^ + Te2(LTC_BYTE(t2, 1)) ^ + Te3(LTC_BYTE(t3, 0)) ^ rk[0]; s1 = - Te0(byte(t1, 3)) ^ - Te1(byte(t2, 2)) ^ - Te2(byte(t3, 1)) ^ - Te3(byte(t0, 0)) ^ + Te0(LTC_BYTE(t1, 3)) ^ + Te1(LTC_BYTE(t2, 2)) ^ + Te2(LTC_BYTE(t3, 1)) ^ + Te3(LTC_BYTE(t0, 0)) ^ rk[1]; s2 = - Te0(byte(t2, 3)) ^ - Te1(byte(t3, 2)) ^ - Te2(byte(t0, 1)) ^ - Te3(byte(t1, 0)) ^ + Te0(LTC_BYTE(t2, 3)) ^ + Te1(LTC_BYTE(t3, 2)) ^ + Te2(LTC_BYTE(t0, 1)) ^ + Te3(LTC_BYTE(t1, 0)) ^ rk[2]; s3 = - Te0(byte(t3, 3)) ^ - Te1(byte(t0, 2)) ^ - Te2(byte(t1, 1)) ^ - Te3(byte(t2, 0)) ^ + Te0(LTC_BYTE(t3, 3)) ^ + Te1(LTC_BYTE(t0, 2)) ^ + Te2(LTC_BYTE(t1, 1)) ^ + Te3(LTC_BYTE(t2, 0)) ^ rk[3]; } @@ -411,31 +411,31 @@ * map cipher state to byte array block: */ s0 = - (Te4_3[byte(t0, 3)]) ^ - (Te4_2[byte(t1, 2)]) ^ - (Te4_1[byte(t2, 1)]) ^ - (Te4_0[byte(t3, 0)]) ^ + (Te4_3[LTC_BYTE(t0, 3)]) ^ + (Te4_2[LTC_BYTE(t1, 2)]) ^ + (Te4_1[LTC_BYTE(t2, 1)]) ^ + (Te4_0[LTC_BYTE(t3, 0)]) ^ rk[0]; STORE32H(s0, ct); s1 = - (Te4_3[byte(t1, 3)]) ^ - (Te4_2[byte(t2, 2)]) ^ - (Te4_1[byte(t3, 1)]) ^ - (Te4_0[byte(t0, 0)]) ^ + (Te4_3[LTC_BYTE(t1, 3)]) ^ + (Te4_2[LTC_BYTE(t2, 2)]) ^ + (Te4_1[LTC_BYTE(t3, 1)]) ^ + (Te4_0[LTC_BYTE(t0, 0)]) ^ rk[1]; STORE32H(s1, ct+4); s2 = - (Te4_3[byte(t2, 3)]) ^ - (Te4_2[byte(t3, 2)]) ^ - (Te4_1[byte(t0, 1)]) ^ - (Te4_0[byte(t1, 0)]) ^ + (Te4_3[LTC_BYTE(t2, 3)]) ^ + (Te4_2[LTC_BYTE(t3, 2)]) ^ + (Te4_1[LTC_BYTE(t0, 1)]) ^ + (Te4_0[LTC_BYTE(t1, 0)]) ^ rk[2]; STORE32H(s2, ct+8); s3 = - (Te4_3[byte(t3, 3)]) ^ - (Te4_2[byte(t0, 2)]) ^ - (Te4_1[byte(t1, 1)]) ^ - (Te4_0[byte(t2, 0)]) ^ + (Te4_3[LTC_BYTE(t3, 3)]) ^ + (Te4_2[LTC_BYTE(t0, 2)]) ^ + (Te4_1[LTC_BYTE(t1, 1)]) ^ + (Te4_0[LTC_BYTE(t2, 0)]) ^ rk[3]; STORE32H(s3, ct+12); @@ -490,28 +490,28 @@ for (r = 0; ; r++) { rk += 4; t0 = - Td0(byte(s0, 3)) ^ - Td1(byte(s3, 2)) ^ - Td2(byte(s2, 1)) ^ - Td3(byte(s1, 0)) ^ + Td0(LTC_BYTE(s0, 3)) ^ + Td1(LTC_BYTE(s3, 2)) ^ + Td2(LTC_BYTE(s2, 1)) ^ + Td3(LTC_BYTE(s1, 0)) ^ rk[0]; t1 = - Td0(byte(s1, 3)) ^ - Td1(byte(s0, 2)) ^ - Td2(byte(s3, 1)) ^ - Td3(byte(s2, 0)) ^ + Td0(LTC_BYTE(s1, 3)) ^ + Td1(LTC_BYTE(s0, 2)) ^ + Td2(LTC_BYTE(s3, 1)) ^ + Td3(LTC_BYTE(s2, 0)) ^ rk[1]; t2 = - Td0(byte(s2, 3)) ^ - Td1(byte(s1, 2)) ^ - Td2(byte(s0, 1)) ^ - Td3(byte(s3, 0)) ^ + Td0(LTC_BYTE(s2, 3)) ^ + Td1(LTC_BYTE(s1, 2)) ^ + Td2(LTC_BYTE(s0, 1)) ^ + Td3(LTC_BYTE(s3, 0)) ^ rk[2]; t3 = - Td0(byte(s3, 3)) ^ - Td1(byte(s2, 2)) ^ - Td2(byte(s1, 1)) ^ - Td3(byte(s0, 0)) ^ + Td0(LTC_BYTE(s3, 3)) ^ + Td1(LTC_BYTE(s2, 2)) ^ + Td2(LTC_BYTE(s1, 1)) ^ + Td3(LTC_BYTE(s0, 0)) ^ rk[3]; if (r == Nr-2) { break; @@ -529,28 +529,28 @@ for (;;) { t0 = - Td0(byte(s0, 3)) ^ - Td1(byte(s3, 2)) ^ - Td2(byte(s2, 1)) ^ - Td3(byte(s1, 0)) ^ + Td0(LTC_BYTE(s0, 3)) ^ + Td1(LTC_BYTE(s3, 2)) ^ + Td2(LTC_BYTE(s2, 1)) ^ + Td3(LTC_BYTE(s1, 0)) ^ rk[4]; t1 = - Td0(byte(s1, 3)) ^ - Td1(byte(s0, 2)) ^ - Td2(byte(s3, 1)) ^ - Td3(byte(s2, 0)) ^ + Td0(LTC_BYTE(s1, 3)) ^ + Td1(LTC_BYTE(s0, 2)) ^ + Td2(LTC_BYTE(s3, 1)) ^ + Td3(LTC_BYTE(s2, 0)) ^ rk[5]; t2 = - Td0(byte(s2, 3)) ^ - Td1(byte(s1, 2)) ^ - Td2(byte(s0, 1)) ^ - Td3(byte(s3, 0)) ^ + Td0(LTC_BYTE(s2, 3)) ^ + Td1(LTC_BYTE(s1, 2)) ^ + Td2(LTC_BYTE(s0, 1)) ^ + Td3(LTC_BYTE(s3, 0)) ^ rk[6]; t3 = - Td0(byte(s3, 3)) ^ - Td1(byte(s2, 2)) ^ - Td2(byte(s1, 1)) ^ - Td3(byte(s0, 0)) ^ + Td0(LTC_BYTE(s3, 3)) ^ + Td1(LTC_BYTE(s2, 2)) ^ + Td2(LTC_BYTE(s1, 1)) ^ + Td3(LTC_BYTE(s0, 0)) ^ rk[7]; rk += 8; @@ -560,28 +560,28 @@ s0 = - Td0(byte(t0, 3)) ^ - Td1(byte(t3, 2)) ^ - Td2(byte(t2, 1)) ^ - Td3(byte(t1, 0)) ^ + Td0(LTC_BYTE(t0, 3)) ^ + Td1(LTC_BYTE(t3, 2)) ^ + Td2(LTC_BYTE(t2, 1)) ^ + Td3(LTC_BYTE(t1, 0)) ^ rk[0]; s1 = - Td0(byte(t1, 3)) ^ - Td1(byte(t0, 2)) ^ - Td2(byte(t3, 1)) ^ - Td3(byte(t2, 0)) ^ + Td0(LTC_BYTE(t1, 3)) ^ + Td1(LTC_BYTE(t0, 2)) ^ + Td2(LTC_BYTE(t3, 1)) ^ + Td3(LTC_BYTE(t2, 0)) ^ rk[1]; s2 = - Td0(byte(t2, 3)) ^ - Td1(byte(t1, 2)) ^ - Td2(byte(t0, 1)) ^ - Td3(byte(t3, 0)) ^ + Td0(LTC_BYTE(t2, 3)) ^ + Td1(LTC_BYTE(t1, 2)) ^ + Td2(LTC_BYTE(t0, 1)) ^ + Td3(LTC_BYTE(t3, 0)) ^ rk[2]; s3 = - Td0(byte(t3, 3)) ^ - Td1(byte(t2, 2)) ^ - Td2(byte(t1, 1)) ^ - Td3(byte(t0, 0)) ^ + Td0(LTC_BYTE(t3, 3)) ^ + Td1(LTC_BYTE(t2, 2)) ^ + Td2(LTC_BYTE(t1, 1)) ^ + Td3(LTC_BYTE(t0, 0)) ^ rk[3]; } #endif @@ -591,31 +591,31 @@ * map cipher state to byte array block: */ s0 = - (Td4[byte(t0, 3)] & 0xff000000) ^ - (Td4[byte(t3, 2)] & 0x00ff0000) ^ - (Td4[byte(t2, 1)] & 0x0000ff00) ^ - (Td4[byte(t1, 0)] & 0x000000ff) ^ + (Td4[LTC_BYTE(t0, 3)] & 0xff000000) ^ + (Td4[LTC_BYTE(t3, 2)] & 0x00ff0000) ^ + (Td4[LTC_BYTE(t2, 1)] & 0x0000ff00) ^ + (Td4[LTC_BYTE(t1, 0)] & 0x000000ff) ^ rk[0]; STORE32H(s0, pt); s1 = - (Td4[byte(t1, 3)] & 0xff000000) ^ - (Td4[byte(t0, 2)] & 0x00ff0000) ^ - (Td4[byte(t3, 1)] & 0x0000ff00) ^ - (Td4[byte(t2, 0)] & 0x000000ff) ^ + (Td4[LTC_BYTE(t1, 3)] & 0xff000000) ^ + (Td4[LTC_BYTE(t0, 2)] & 0x00ff0000) ^ + (Td4[LTC_BYTE(t3, 1)] & 0x0000ff00) ^ + (Td4[LTC_BYTE(t2, 0)] & 0x000000ff) ^ rk[1]; STORE32H(s1, pt+4); s2 = - (Td4[byte(t2, 3)] & 0xff000000) ^ - (Td4[byte(t1, 2)] & 0x00ff0000) ^ - (Td4[byte(t0, 1)] & 0x0000ff00) ^ - (Td4[byte(t3, 0)] & 0x000000ff) ^ + (Td4[LTC_BYTE(t2, 3)] & 0xff000000) ^ + (Td4[LTC_BYTE(t1, 2)] & 0x00ff0000) ^ + (Td4[LTC_BYTE(t0, 1)] & 0x0000ff00) ^ + (Td4[LTC_BYTE(t3, 0)] & 0x000000ff) ^ rk[2]; STORE32H(s2, pt+8); s3 = - (Td4[byte(t3, 3)] & 0xff000000) ^ - (Td4[byte(t2, 2)] & 0x00ff0000) ^ - (Td4[byte(t1, 1)] & 0x0000ff00) ^ - (Td4[byte(t0, 0)] & 0x000000ff) ^ + (Td4[LTC_BYTE(t3, 3)] & 0xff000000) ^ + (Td4[LTC_BYTE(t2, 2)] & 0x00ff0000) ^ + (Td4[LTC_BYTE(t1, 1)] & 0x0000ff00) ^ + (Td4[LTC_BYTE(t0, 0)] & 0x000000ff) ^ rk[3]; STORE32H(s3, pt+12); diff --git a/src/ltc/ciphers/anubis.c b/src/ltc/ciphers/anubis.c index 7bc3590..e0d7424 100644 --- a/src/ltc/ciphers/anubis.c +++ b/src/ltc/ciphers/anubis.c @@ -30,15 +30,7 @@ NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL }; -#define MIN_N 4 #define MAX_N 10 -#define MIN_ROUNDS (8 + MIN_N) -#define MAX_ROUNDS (8 + MAX_N) -#define MIN_KEYSIZEB (4*MIN_N) -#define MAX_KEYSIZEB (4*MAX_N) -#define BLOCKSIZE 128 -#define BLOCKSIZEB (BLOCKSIZE/8) - /* * Though Anubis is endianness-neutral, the encryption tables are listed diff --git a/src/ltc/ciphers/blowfish.c b/src/ltc/ciphers/blowfish.c index 355a235..b53e05b 100644 --- a/src/ltc/ciphers/blowfish.c +++ b/src/ltc/ciphers/blowfish.c @@ -373,9 +373,9 @@ } #ifndef __GNUC__ -#define F(x) ((S1[byte(x,3)] + S2[byte(x,2)]) ^ S3[byte(x,1)]) + S4[byte(x,0)] +#define F(x) ((S1[LTC_BYTE(x,3)] + S2[LTC_BYTE(x,2)]) ^ S3[LTC_BYTE(x,1)]) + S4[LTC_BYTE(x,0)] #else -#define F(x) ((skey->blowfish.S[0][byte(x,3)] + skey->blowfish.S[1][byte(x,2)]) ^ skey->blowfish.S[2][byte(x,1)]) + skey->blowfish.S[3][byte(x,0)] +#define F(x) ((skey->blowfish.S[0][LTC_BYTE(x,3)] + skey->blowfish.S[1][LTC_BYTE(x,2)]) ^ skey->blowfish.S[2][LTC_BYTE(x,1)]) + skey->blowfish.S[3][LTC_BYTE(x,0)] #endif /** diff --git a/src/ltc/ciphers/cast5.c b/src/ltc/ciphers/cast5.c index 712b57d..d69dafc 100644 --- a/src/ltc/ciphers/cast5.c +++ b/src/ltc/ciphers/cast5.c @@ -508,7 +508,7 @@ ulong32 I; I = (Km + R); I = ROL(I, Kr); - return ((S1[byte(I, 3)] ^ S2[byte(I,2)]) - S3[byte(I,1)]) + S4[byte(I,0)]; + return ((S1[LTC_BYTE(I, 3)] ^ S2[LTC_BYTE(I,2)]) - S3[LTC_BYTE(I,1)]) + S4[LTC_BYTE(I,0)]; } INLINE static ulong32 FII(ulong32 R, ulong32 Km, ulong32 Kr) @@ -516,7 +516,7 @@ ulong32 I; I = (Km ^ R); I = ROL(I, Kr); - return ((S1[byte(I, 3)] - S2[byte(I,2)]) + S3[byte(I,1)]) ^ S4[byte(I,0)]; + return ((S1[LTC_BYTE(I, 3)] - S2[LTC_BYTE(I,2)]) + S3[LTC_BYTE(I,1)]) ^ S4[LTC_BYTE(I,0)]; } INLINE static ulong32 FIII(ulong32 R, ulong32 Km, ulong32 Kr) @@ -524,7 +524,7 @@ ulong32 I; I = (Km - R); I = ROL(I, Kr); - return ((S1[byte(I, 3)] + S2[byte(I,2)]) ^ S3[byte(I,1)]) - S4[byte(I,0)]; + return ((S1[LTC_BYTE(I, 3)] + S2[LTC_BYTE(I,2)]) ^ S3[LTC_BYTE(I,1)]) - S4[LTC_BYTE(I,0)]; } /** diff --git a/src/ltc/ciphers/des.c b/src/ltc/ciphers/des.c index a521030..44907dd 100644 --- a/src/ltc/ciphers/des.c +++ b/src/ltc/ciphers/des.c @@ -1432,14 +1432,14 @@ #else { ulong64 tmp; - tmp = des_ip[0][byte(leftt, 0)] ^ - des_ip[1][byte(leftt, 1)] ^ - des_ip[2][byte(leftt, 2)] ^ - des_ip[3][byte(leftt, 3)] ^ - des_ip[4][byte(right, 0)] ^ - des_ip[5][byte(right, 1)] ^ - des_ip[6][byte(right, 2)] ^ - des_ip[7][byte(right, 3)]; + tmp = des_ip[0][LTC_BYTE(leftt, 0)] ^ + des_ip[1][LTC_BYTE(leftt, 1)] ^ + des_ip[2][LTC_BYTE(leftt, 2)] ^ + des_ip[3][LTC_BYTE(leftt, 3)] ^ + des_ip[4][LTC_BYTE(right, 0)] ^ + des_ip[5][LTC_BYTE(right, 1)] ^ + des_ip[6][LTC_BYTE(right, 2)] ^ + des_ip[7][LTC_BYTE(right, 3)]; leftt = (ulong32)(tmp >> 32); right = (ulong32)(tmp & 0xFFFFFFFFUL); } @@ -1491,14 +1491,14 @@ #else { ulong64 tmp; - tmp = des_fp[0][byte(leftt, 0)] ^ - des_fp[1][byte(leftt, 1)] ^ - des_fp[2][byte(leftt, 2)] ^ - des_fp[3][byte(leftt, 3)] ^ - des_fp[4][byte(right, 0)] ^ - des_fp[5][byte(right, 1)] ^ - des_fp[6][byte(right, 2)] ^ - des_fp[7][byte(right, 3)]; + tmp = des_fp[0][LTC_BYTE(leftt, 0)] ^ + des_fp[1][LTC_BYTE(leftt, 1)] ^ + des_fp[2][LTC_BYTE(leftt, 2)] ^ + des_fp[3][LTC_BYTE(leftt, 3)] ^ + des_fp[4][LTC_BYTE(right, 0)] ^ + des_fp[5][LTC_BYTE(right, 1)] ^ + des_fp[6][LTC_BYTE(right, 2)] ^ + des_fp[7][LTC_BYTE(right, 3)]; leftt = (ulong32)(tmp >> 32); right = (ulong32)(tmp & 0xFFFFFFFFUL); } diff --git a/src/ltc/ciphers/khazad.c b/src/ltc/ciphers/khazad.c index aa75182..175d513 100644 --- a/src/ltc/ciphers/khazad.c +++ b/src/ltc/ciphers/khazad.c @@ -30,10 +30,6 @@ }; #define R 8 -#define KEYSIZE 128 -#define KEYSIZEB (KEYSIZE/8) -#define BLOCKSIZE 64 -#define BLOCKSIZEB (BLOCKSIZE/8) static const ulong64 T0[256] = { CONST64(0xbad3d268bbb96a01), CONST64(0x54fc4d19e59a66b1), CONST64(0x2f71bc93e26514cd), CONST64(0x749ccdb925871b51), diff --git a/src/ltc/ciphers/twofish/twofish.c b/src/ltc/ciphers/twofish/twofish.c index 0a52aef..c7965f6 100644 --- a/src/ltc/ciphers/twofish/twofish.c +++ b/src/ltc/ciphers/twofish/twofish.c @@ -37,8 +37,12 @@ }; /* the two polynomials */ +#ifndef LTC_TWOFISH_TABLES #define MDS_POLY 0x169 +#endif +#ifndef LTC_TWOFISH_ALL_TABLES #define RS_POLY 0x14D +#endif /* The 4x8 RS Linear Transform */ static const unsigned char RS[4][8] = { @@ -278,8 +282,8 @@ #endif /* the G function */ -#define g_func(x, dum) (S1[byte(x,0)] ^ S2[byte(x,1)] ^ S3[byte(x,2)] ^ S4[byte(x,3)]) -#define g1_func(x, dum) (S2[byte(x,0)] ^ S3[byte(x,1)] ^ S4[byte(x,2)] ^ S1[byte(x,3)]) +#define g_func(x, dum) (S1[LTC_BYTE(x,0)] ^ S2[LTC_BYTE(x,1)] ^ S3[LTC_BYTE(x,2)] ^ S4[LTC_BYTE(x,3)]) +#define g1_func(x, dum) (S2[LTC_BYTE(x,0)] ^ S3[LTC_BYTE(x,1)] ^ S4[LTC_BYTE(x,2)] ^ S1[LTC_BYTE(x,3)]) #else diff --git a/src/ltc/hashes/tiger.c b/src/ltc/hashes/tiger.c index 0d3ba10..970582d 100644 --- a/src/ltc/hashes/tiger.c +++ b/src/ltc/hashes/tiger.c @@ -564,8 +564,8 @@ { ulong64 tmp; tmp = (*c ^= x); - *a -= t1[byte(tmp, 0)] ^ t2[byte(tmp, 2)] ^ t3[byte(tmp, 4)] ^ t4[byte(tmp, 6)]; - tmp = (*b += t4[byte(tmp, 1)] ^ t3[byte(tmp, 3)] ^ t2[byte(tmp,5)] ^ t1[byte(tmp,7)]); + *a -= t1[LTC_BYTE(tmp, 0)] ^ t2[LTC_BYTE(tmp, 2)] ^ t3[LTC_BYTE(tmp, 4)] ^ t4[LTC_BYTE(tmp, 6)]; + tmp = (*b += t4[LTC_BYTE(tmp, 1)] ^ t3[LTC_BYTE(tmp, 3)] ^ t2[LTC_BYTE(tmp,5)] ^ t1[LTC_BYTE(tmp,7)]); switch (mul) { case 5: *b = (tmp << 2) + tmp; break; case 7: *b = (tmp << 3) - tmp; break; diff --git a/src/ltc/headers/tomcrypt_macros.h b/src/ltc/headers/tomcrypt_macros.h index 94e368f..2e4eb00 100644 --- a/src/ltc/headers/tomcrypt_macros.h +++ b/src/ltc/headers/tomcrypt_macros.h @@ -429,13 +429,6 @@ #define LTC_UNUSED_PARAM(x) (void)(x) #endif -/* extract a byte portably */ -#ifdef _MSC_VER - #define byte(x, n) ((unsigned char)((x) >> (8 * (n)))) -#else - #define byte(x, n) (((x) >> (8 * (n))) & 255) -#endif - /* there is no snprintf before Visual C++ 2015 */ #if defined(_MSC_VER) && _MSC_VER < 1900 #define snprintf _snprintf diff --git a/src/ltc/headers/tomcrypt_private.h b/src/ltc/headers/tomcrypt_private.h index bd5a9d7..5da8f72 100644 --- a/src/ltc/headers/tomcrypt_private.h +++ b/src/ltc/headers/tomcrypt_private.h @@ -388,6 +388,12 @@ return CRYPT_OK; \ } +/* extract a byte portably */ +#ifdef _MSC_VER + #define LTC_BYTE(x, n) ((unsigned char)((x) >> (8 * (n)))) +#else + #define LTC_BYTE(x, n) (((x) >> (8 * (n))) & 255) +#endif /* ref: $Format:%D$ */ /* git commit: $Format:%H$ */ diff --git a/src/ltc/mac/pelican/pelican.c b/src/ltc/mac/pelican/pelican.c index e8cea64..7c678d2 100644 --- a/src/ltc/mac/pelican/pelican.c +++ b/src/ltc/mac/pelican/pelican.c @@ -62,25 +62,25 @@ LOAD32H(s3, pelmac->state + 12); for (r = 0; r < 4; r++) { t0 = - Te0(byte(s0, 3)) ^ - Te1(byte(s1, 2)) ^ - Te2(byte(s2, 1)) ^ - Te3(byte(s3, 0)); + Te0(LTC_BYTE(s0, 3)) ^ + Te1(LTC_BYTE(s1, 2)) ^ + Te2(LTC_BYTE(s2, 1)) ^ + Te3(LTC_BYTE(s3, 0)); t1 = - Te0(byte(s1, 3)) ^ - Te1(byte(s2, 2)) ^ - Te2(byte(s3, 1)) ^ - Te3(byte(s0, 0)); + Te0(LTC_BYTE(s1, 3)) ^ + Te1(LTC_BYTE(s2, 2)) ^ + Te2(LTC_BYTE(s3, 1)) ^ + Te3(LTC_BYTE(s0, 0)); t2 = - Te0(byte(s2, 3)) ^ - Te1(byte(s3, 2)) ^ - Te2(byte(s0, 1)) ^ - Te3(byte(s1, 0)); + Te0(LTC_BYTE(s2, 3)) ^ + Te1(LTC_BYTE(s3, 2)) ^ + Te2(LTC_BYTE(s0, 1)) ^ + Te3(LTC_BYTE(s1, 0)); t3 = - Te0(byte(s3, 3)) ^ - Te1(byte(s0, 2)) ^ - Te2(byte(s1, 1)) ^ - Te3(byte(s2, 0)); + Te0(LTC_BYTE(s3, 3)) ^ + Te1(LTC_BYTE(s0, 2)) ^ + Te2(LTC_BYTE(s1, 1)) ^ + Te3(LTC_BYTE(s2, 0)); s0 = t0; s1 = t1; s2 = t2; s3 = t3; } STORE32H(s0, pelmac->state ); diff --git a/src/ltc/pk/ecc/ecc_import_pkcs8.c b/src/ltc/pk/ecc/ecc_import_pkcs8.c index 8db49da..552bd89 100644 --- a/src/ltc/pk/ecc/ecc_import_pkcs8.c +++ b/src/ltc/pk/ecc/ecc_import_pkcs8.c @@ -26,12 +26,14 @@ static int _der_flexi_sequence_cmp(const ltc_asn1_list *flexi, der_flexi_check *check) { const ltc_asn1_list *cur; - if (flexi->type != LTC_ASN1_SEQUENCE) + if (flexi->type != LTC_ASN1_SEQUENCE) { return CRYPT_INVALID_PACKET; + } cur = flexi->child; while(check->t != LTC_ASN1_EOL) { - if (!LTC_ASN1_IS_TYPE(cur, check->t)) + if (!LTC_ASN1_IS_TYPE(cur, check->t)) { return CRYPT_INVALID_PACKET; + } if (check->pp != NULL) *check->pp = (ltc_asn1_list*)cur; cur = cur->next; check++; diff --git a/src/ltc/pk/ecc/ecc_verify_hash.c b/src/ltc/pk/ecc/ecc_verify_hash.c index d7c14be..d18ef93 100644 --- a/src/ltc/pk/ecc/ecc_verify_hash.c +++ b/src/ltc/pk/ecc/ecc_verify_hash.c @@ -76,11 +76,11 @@ } else if (sigformat == LTC_ECCSIG_RFC7518) { /* RFC7518 format - raw (r,s) */ - if (siglen % 2) { + i = mp_unsigned_bin_size(key->dp.order); + if (siglen != (2*i)) { err = CRYPT_INVALID_PACKET; goto error; } - i = siglen / 2; if ((err = mp_read_unsigned_bin(r, (unsigned char *)sig, i)) != CRYPT_OK) { goto error; } if ((err = mp_read_unsigned_bin(s, (unsigned char *)sig+i, i)) != CRYPT_OK) { goto error; } } diff --git a/src/ltc/stream/sober128/sober128_stream.c b/src/ltc/stream/sober128/sober128_stream.c index ecbe4d8..952d562 100644 --- a/src/ltc/stream/sober128/sober128_stream.c +++ b/src/ltc/stream/sober128/sober128_stream.c @@ -21,12 +21,9 @@ /* don't change these... */ #define N 17 -#define FOLD N /* how many iterations of folding to do */ #define INITKONST 0x6996c53a /* value of KONST to use during key loading */ #define KEYP 15 /* where to insert key words */ #define FOLDP 4 /* where to insert non-linear feedback */ - -#define B(x,i) ((unsigned char)(((x) >> (8*i)) & 0xFF)) static ulong32 BYTE2WORD(const unsigned char *b) {