Remove static keywords from OpenCL functions, they can cause old NV JiT compiler to fail

pull/1404/head
jsteube 7 years ago
parent c790622917
commit 503f676cb0

@ -692,7 +692,7 @@ __constant u32a rcon[] =
// 128 bit key
static void aes128_ExpandKey (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
void aes128_ExpandKey (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
{
ks[0] = ukey[0];
ks[1] = ukey[1];
@ -718,7 +718,7 @@ static void aes128_ExpandKey (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM
}
}
static void aes128_InvertKey (u32 *ks, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void aes128_InvertKey (u32 *ks, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
for (u32 i = 0, j = 40; i < j; i += 4, j -= 4)
{
@ -758,7 +758,7 @@ static void aes128_InvertKey (u32 *ks, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1,
}
}
static void aes128_set_encrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
void aes128_set_encrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
{
u32 ukey_s[4];
@ -770,7 +770,7 @@ static void aes128_set_encrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te
aes128_ExpandKey (ks, ukey_s, s_te0, s_te1, s_te2, s_te3, s_te4);
}
static void aes128_set_decrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void aes128_set_decrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 ukey_s[4];
@ -784,7 +784,7 @@ static void aes128_set_decrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te
aes128_InvertKey (ks, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
static void aes128_encrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
void aes128_encrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
{
const u32 in_s0 = swap32_S (in[0]);
const u32 in_s1 = swap32_S (in[1]);
@ -847,7 +847,7 @@ static void aes128_encrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32
out[3] = swap32_S (out[3]);
}
static void aes128_decrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void aes128_decrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
const u32 in_s0 = swap32_S (in[0]);
const u32 in_s1 = swap32_S (in[1]);
@ -912,7 +912,7 @@ static void aes128_decrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32
// 256 bit key
static void aes256_ExpandKey (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
void aes256_ExpandKey (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
{
ks[0] = ukey[0];
ks[1] = ukey[1];
@ -957,7 +957,7 @@ static void aes256_ExpandKey (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM
}
}
static void aes256_InvertKey (u32 *ks, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void aes256_InvertKey (u32 *ks, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
for (u32 i = 0, j = 56; i < j; i += 4, j -= 4)
{
@ -997,7 +997,7 @@ static void aes256_InvertKey (u32 *ks, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1,
}
}
static void aes256_set_encrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
void aes256_set_encrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
{
u32 ukey_s[8];
@ -1013,7 +1013,7 @@ static void aes256_set_encrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te
aes256_ExpandKey (ks, ukey_s, s_te0, s_te1, s_te2, s_te3, s_te4);
}
static void aes256_set_decrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void aes256_set_decrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 ukey_s[8];
@ -1031,7 +1031,7 @@ static void aes256_set_decrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te
aes256_InvertKey (ks, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
static void aes256_encrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
void aes256_encrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
{
const u32 in_s0 = swap32_S (in[0]);
const u32 in_s1 = swap32_S (in[1]);
@ -1094,7 +1094,7 @@ static void aes256_encrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32
out[3] = swap32_S (out[3]);
}
static void aes256_decrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void aes256_decrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
const u32 in_s0 = swap32_S (in[0]);
const u32 in_s1 = swap32_S (in[1]);
@ -1159,7 +1159,7 @@ static void aes256_decrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32
// wrapper to avoid swap32_S() confusion in the kernel code
static void AES128_set_encrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
void AES128_set_encrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
{
u32 ukey_s[4];
@ -1171,7 +1171,7 @@ static void AES128_set_encrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te
aes128_set_encrypt_key (ks, ukey_s, s_te0, s_te1, s_te2, s_te3, s_te4);
}
static void AES128_set_decrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void AES128_set_decrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 ukey_s[4];
@ -1183,7 +1183,7 @@ static void AES128_set_decrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te
aes128_set_decrypt_key (ks, ukey_s, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
static void AES128_encrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
void AES128_encrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
{
u32 in_s[4];
@ -1202,7 +1202,7 @@ static void AES128_encrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32
out[3] = swap32_S (out_s[3]);
}
static void AES128_decrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void AES128_decrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 in_s[4];
@ -1221,7 +1221,7 @@ static void AES128_decrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32
out[3] = swap32_S (out_s[3]);
}
static void AES256_set_encrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
void AES256_set_encrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
{
u32 ukey_s[8];
@ -1237,7 +1237,7 @@ static void AES256_set_encrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te
aes256_set_encrypt_key (ks, ukey_s, s_te0, s_te1, s_te2, s_te3, s_te4);
}
static void AES256_set_decrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void AES256_set_decrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 ukey_s[8];
@ -1253,7 +1253,7 @@ static void AES256_set_decrypt_key (u32 *ks, const u32 *ukey, SHM_TYPE u32 *s_te
aes256_set_decrypt_key (ks, ukey_s, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
static void AES256_encrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
void AES256_encrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4)
{
u32 in_s[4];
@ -1272,7 +1272,7 @@ static void AES256_encrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32
out[3] = swap32_S (out_s[3]);
}
static void AES256_decrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void AES256_decrypt (const u32 *ks, const u32 *in, u32 *out, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 in_s[4];

@ -403,7 +403,7 @@
// 128 bit key
static void serpent128_set_key (u32 *ks, const u32 *ukey)
void serpent128_set_key (u32 *ks, const u32 *ukey)
{
#ifdef _unroll
#pragma unroll
@ -469,7 +469,7 @@ static void serpent128_set_key (u32 *ks, const u32 *ukey)
k_set(32,a,b,c,d); sb3(a,b,c,d,e,f,g,h); k_get(32,e,f,g,h);
}
static void serpent128_encrypt (const u32 *ks, const u32 *in, u32 *out)
void serpent128_encrypt (const u32 *ks, const u32 *in, u32 *out)
{
u32 a,b,c,d,e,f,g,h;
u32 t1,t2,t3,t4,t5,t6,t7,t8,t9,t10,t11,t12,t13,t14,t15,t16;
@ -519,7 +519,7 @@ static void serpent128_encrypt (const u32 *ks, const u32 *in, u32 *out)
out[3] = d;
}
static void serpent128_decrypt (const u32 *ks, const u32 *in, u32 *out)
void serpent128_decrypt (const u32 *ks, const u32 *in, u32 *out)
{
u32 a,b,c,d,e,f,g,h;
u32 t1,t2,t3,t4,t5,t6,t7,t8,t9,t10,t11,t12,t13,t14,t15,t16;
@ -571,7 +571,7 @@ static void serpent128_decrypt (const u32 *ks, const u32 *in, u32 *out)
// 256 bit key
static void serpent256_set_key (u32 *ks, const u32 *ukey)
void serpent256_set_key (u32 *ks, const u32 *ukey)
{
#ifdef _unroll
#pragma unroll
@ -627,7 +627,7 @@ static void serpent256_set_key (u32 *ks, const u32 *ukey)
k_set(32,a,b,c,d); sb3(a,b,c,d,e,f,g,h); k_get(32,e,f,g,h);
}
static void serpent256_encrypt (const u32 *ks, const u32 *in, u32 *out)
void serpent256_encrypt (const u32 *ks, const u32 *in, u32 *out)
{
u32 a,b,c,d,e,f,g,h;
u32 t1,t2,t3,t4,t5,t6,t7,t8,t9,t10,t11,t12,t13,t14,t15,t16;
@ -677,7 +677,7 @@ static void serpent256_encrypt (const u32 *ks, const u32 *in, u32 *out)
out[3] = d;
}
static void serpent256_decrypt (const u32 *ks, const u32 *in, u32 *out)
void serpent256_decrypt (const u32 *ks, const u32 *in, u32 *out)
{
u32 a,b,c,d,e,f,g,h;
u32 t1,t2,t3,t4,t5,t6,t7,t8,t9,t10,t11,t12,t13,t14,t15,t16;

@ -266,7 +266,7 @@ __constant u32a m_tab[4][256] =
#define q42(x,k) q (1, q (0, q (0, q (0, x) ^ extract_byte (k[3], 2)) ^ extract_byte (k[2], 2)) ^ extract_byte (k[1], 2)) ^ extract_byte (k[0], 2)
#define q43(x,k) q (1, q (1, q (0, q (1, x) ^ extract_byte (k[3], 3)) ^ extract_byte (k[2], 3)) ^ extract_byte (k[1], 3)) ^ extract_byte (k[0], 3)
static u32 mds_rem (u32 p0, u32 p1)
u32 mds_rem (u32 p0, u32 p1)
{
#define G_MOD 0x14d
@ -332,7 +332,7 @@ static u32 mds_rem (u32 p0, u32 p1)
data[1] = rotr32_S (data[1] ^ (t2 + 2 * t3 + lk[4 * (i) + 9]), 1); \
}
static u32 h_fun128 (u32 *sk, u32 *lk, const u32 x, const u32 *key)
u32 h_fun128 (u32 *sk, u32 *lk, const u32 x, const u32 *key)
{
u32 b0, b1, b2, b3;
@ -349,7 +349,7 @@ static u32 h_fun128 (u32 *sk, u32 *lk, const u32 x, const u32 *key)
return mds (0, b0) ^ mds (1, b1) ^ mds (2, b2) ^ mds (3, b3);
}
static void twofish128_set_key (u32 *sk, u32 *lk, const u32 *ukey)
void twofish128_set_key (u32 *sk, u32 *lk, const u32 *ukey)
{
u32 me_key[2];
@ -379,7 +379,7 @@ static void twofish128_set_key (u32 *sk, u32 *lk, const u32 *ukey)
}
}
static void twofish128_encrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out)
void twofish128_encrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out)
{
u32 data[4];
@ -403,7 +403,7 @@ static void twofish128_encrypt (const u32 *sk, const u32 *lk, const u32 *in, u32
out[3] = data[1] ^ lk[7];
}
static void twofish128_decrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out)
void twofish128_decrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out)
{
u32 data[4];
@ -465,7 +465,7 @@ static void twofish128_decrypt (const u32 *sk, const u32 *lk, const u32 *in, u32
data[1] = rotr32_S (data[1] ^ (t2 + 2 * t3 + lk[4 * (i) + 9]), 1); \
}
static u32 h_fun256 (u32 *sk, u32 *lk, const u32 x, const u32 *key)
u32 h_fun256 (u32 *sk, u32 *lk, const u32 x, const u32 *key)
{
u32 b0, b1, b2, b3;
@ -492,7 +492,7 @@ static u32 h_fun256 (u32 *sk, u32 *lk, const u32 x, const u32 *key)
return mds (0, b0) ^ mds (1, b1) ^ mds (2, b2) ^ mds (3, b3);
}
static void twofish256_set_key (u32 *sk, u32 *lk, const u32 *ukey)
void twofish256_set_key (u32 *sk, u32 *lk, const u32 *ukey)
{
u32 me_key[4];
@ -528,7 +528,7 @@ static void twofish256_set_key (u32 *sk, u32 *lk, const u32 *ukey)
}
}
static void twofish256_encrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out)
void twofish256_encrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out)
{
u32 data[4];
@ -552,7 +552,7 @@ static void twofish256_encrypt (const u32 *sk, const u32 *lk, const u32 *in, u32
out[3] = data[1] ^ lk[7];
}
static void twofish256_decrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out)
void twofish256_decrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out)
{
u32 data[4];

@ -75,7 +75,7 @@ __constant u32 c_append_helper[64][16] =
* pure scalar functions
*/
static int ffz (const u32 v)
int ffz (const u32 v)
{
#ifdef _unroll
#pragma unroll
@ -90,7 +90,7 @@ static int ffz (const u32 v)
return -1;
}
static int hash_comp (const u32 d1[4], __global const u32 *d2)
int hash_comp (const u32 d1[4], __global const u32 *d2)
{
if (d1[3] > d2[DGST_R3]) return ( 1);
if (d1[3] < d2[DGST_R3]) return (-1);
@ -104,7 +104,7 @@ static int hash_comp (const u32 d1[4], __global const u32 *d2)
return (0);
}
static int find_hash (const u32 digest[4], const u32 digests_cnt, __global const digest_t *digests_buf)
int find_hash (const u32 digest[4], const u32 digests_cnt, __global const digest_t *digests_buf)
{
for (u32 l = 0, r = digests_cnt; r; r >>= 1)
{
@ -127,12 +127,12 @@ static int find_hash (const u32 digest[4], const u32 digests_cnt, __global const
return (-1);
}
static u32 check_bitmap (__global const u32 *bitmap, const u32 bitmap_mask, const u32 bitmap_shift, const u32 digest)
u32 check_bitmap (__global const u32 *bitmap, const u32 bitmap_mask, const u32 bitmap_shift, const u32 digest)
{
return (bitmap[(digest >> bitmap_shift) & bitmap_mask] & (1 << (digest & 0x1f)));
}
static u32 check (const u32 digest[4], __global const u32 *bitmap_s1_a, __global const u32 *bitmap_s1_b, __global const u32 *bitmap_s1_c, __global const u32 *bitmap_s1_d, __global const u32 *bitmap_s2_a, __global const u32 *bitmap_s2_b, __global const u32 *bitmap_s2_c, __global const u32 *bitmap_s2_d, const u32 bitmap_mask, const u32 bitmap_shift1, const u32 bitmap_shift2)
u32 check (const u32 digest[4], __global const u32 *bitmap_s1_a, __global const u32 *bitmap_s1_b, __global const u32 *bitmap_s1_c, __global const u32 *bitmap_s1_d, __global const u32 *bitmap_s2_a, __global const u32 *bitmap_s2_b, __global const u32 *bitmap_s2_c, __global const u32 *bitmap_s2_d, const u32 bitmap_mask, const u32 bitmap_shift1, const u32 bitmap_shift2)
{
if (check_bitmap (bitmap_s1_a, bitmap_mask, bitmap_shift1, digest[0]) == 0) return (0);
if (check_bitmap (bitmap_s1_b, bitmap_mask, bitmap_shift1, digest[1]) == 0) return (0);
@ -147,7 +147,7 @@ static u32 check (const u32 digest[4], __global const u32 *bitmap_s1_a, __global
return (1);
}
static void mark_hash (__global plain_t *plains_buf, __global u32 *d_result, const u32 salt_pos, const u32 digests_cnt, const u32 digest_pos, const u32 hash_pos, const u32 gid, const u32 il_pos)
void mark_hash (__global plain_t *plains_buf, __global u32 *d_result, const u32 salt_pos, const u32 digests_cnt, const u32 digest_pos, const u32 hash_pos, const u32 gid, const u32 il_pos)
{
const u32 idx = atomic_inc (d_result);
@ -168,7 +168,7 @@ static void mark_hash (__global plain_t *plains_buf, __global u32 *d_result, con
plains_buf[idx].il_pos = il_pos;
}
static int count_char (const u32 *buf, const int elems, const u32 c)
int count_char (const u32 *buf, const int elems, const u32 c)
{
int r = 0;
@ -185,7 +185,7 @@ static int count_char (const u32 *buf, const int elems, const u32 c)
return r;
}
static float get_entropy (const u32 *buf, const int elems)
float get_entropy (const u32 *buf, const int elems)
{
const int length = elems * 4;
@ -212,7 +212,7 @@ static float get_entropy (const u32 *buf, const int elems)
* vector functions
*/
static void make_utf16be (const u32x in[4], u32x out1[4], u32x out2[4])
void make_utf16be (const u32x in[4], u32x out1[4], u32x out2[4])
{
#if defined IS_NV
@ -250,7 +250,7 @@ static void make_utf16be (const u32x in[4], u32x out1[4], u32x out2[4])
#endif
}
static void make_utf16beN (const u32x in[4], u32x out1[4], u32x out2[4])
void make_utf16beN (const u32x in[4], u32x out1[4], u32x out2[4])
{
#if defined IS_NV
@ -288,7 +288,7 @@ static void make_utf16beN (const u32x in[4], u32x out1[4], u32x out2[4])
#endif
}
static void make_utf16le (const u32x in[4], u32x out1[4], u32x out2[4])
void make_utf16le (const u32x in[4], u32x out1[4], u32x out2[4])
{
#if defined IS_NV
@ -326,7 +326,7 @@ static void make_utf16le (const u32x in[4], u32x out1[4], u32x out2[4])
#endif
}
static void make_utf16leN (const u32x in[4], u32x out1[4], u32x out2[4])
void make_utf16leN (const u32x in[4], u32x out1[4], u32x out2[4])
{
#if defined IS_NV
@ -364,7 +364,7 @@ static void make_utf16leN (const u32x in[4], u32x out1[4], u32x out2[4])
#endif
}
static void undo_utf16be (const u32x in1[4], const u32x in2[4], u32x out[4])
void undo_utf16be (const u32x in1[4], const u32x in2[4], u32x out[4])
{
#if defined IS_NV
@ -394,7 +394,7 @@ static void undo_utf16be (const u32x in1[4], const u32x in2[4], u32x out[4])
#endif
}
static void undo_utf16le (const u32x in1[4], const u32x in2[4], u32x out[4])
void undo_utf16le (const u32x in1[4], const u32x in2[4], u32x out[4])
{
#if defined IS_NV
@ -424,7 +424,7 @@ static void undo_utf16le (const u32x in1[4], const u32x in2[4], u32x out[4])
#endif
}
static void append_0x80_1x4 (u32x w0[4], const u32 offset)
void append_0x80_1x4 (u32x w0[4], const u32 offset)
{
w0[0] |= 0x80808080 & c_append_helper[offset][0];
w0[1] |= 0x80808080 & c_append_helper[offset][1];
@ -432,7 +432,7 @@ static void append_0x80_1x4 (u32x w0[4], const u32 offset)
w0[3] |= 0x80808080 & c_append_helper[offset][3];
}
static void append_0x80_2x4 (u32x w0[4], u32x w1[4], const u32 offset)
void append_0x80_2x4 (u32x w0[4], u32x w1[4], const u32 offset)
{
w0[0] |= 0x80808080 & c_append_helper[offset][0];
w0[1] |= 0x80808080 & c_append_helper[offset][1];
@ -444,7 +444,7 @@ static void append_0x80_2x4 (u32x w0[4], u32x w1[4], const u32 offset)
w1[3] |= 0x80808080 & c_append_helper[offset][7];
}
static void append_0x80_3x4 (u32x w0[4], u32x w1[4], u32x w2[4], const u32 offset)
void append_0x80_3x4 (u32x w0[4], u32x w1[4], u32x w2[4], const u32 offset)
{
w0[0] |= 0x80808080 & c_append_helper[offset][ 0];
w0[1] |= 0x80808080 & c_append_helper[offset][ 1];
@ -460,7 +460,7 @@ static void append_0x80_3x4 (u32x w0[4], u32x w1[4], u32x w2[4], const u32 offse
w2[3] |= 0x80808080 & c_append_helper[offset][11];
}
static void append_0x80_4x4 (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const u32 offset)
void append_0x80_4x4 (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const u32 offset)
{
w0[0] |= 0x80808080 & c_append_helper[offset][ 0];
w0[1] |= 0x80808080 & c_append_helper[offset][ 1];
@ -480,7 +480,7 @@ static void append_0x80_4x4 (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], con
w3[3] |= 0x80808080 & c_append_helper[offset][15];
}
static void append_0x80_8x4 (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const u32 offset)
void append_0x80_8x4 (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const u32 offset)
{
switch (offset)
{
@ -998,7 +998,7 @@ static void append_0x80_8x4 (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32
}
}
static void append_0x80_1x16 (u32x w[16], const u32 offset)
void append_0x80_1x16 (u32x w[16], const u32 offset)
{
switch (offset)
{
@ -1260,7 +1260,7 @@ static void append_0x80_1x16 (u32x w[16], const u32 offset)
}
}
static void switch_buffer_by_offset_le (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const u32 offset)
void switch_buffer_by_offset_le (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const u32 offset)
{
const int offset_mod_4 = offset & 3;
@ -1967,7 +1967,7 @@ static void switch_buffer_by_offset_le (u32x w0[4], u32x w1[4], u32x w2[4], u32x
#endif
}
static void switch_buffer_by_offset_carry_le (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x c0[4], u32x c1[4], u32x c2[4], u32x c3[4], const u32 offset)
void switch_buffer_by_offset_carry_le (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x c0[4], u32x c1[4], u32x c2[4], u32x c3[4], const u32 offset)
{
const int offset_mod_4 = offset & 3;
@ -3289,7 +3289,7 @@ static void switch_buffer_by_offset_carry_le (u32x w0[4], u32x w1[4], u32x w2[4]
#endif
}
static void switch_buffer_by_offset_be (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const u32 offset)
void switch_buffer_by_offset_be (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const u32 offset)
{
#ifdef IS_AMD
volatile const int offset_switch = offset / 4;
@ -3960,7 +3960,7 @@ static void switch_buffer_by_offset_be (u32x w0[4], u32x w1[4], u32x w2[4], u32x
#endif
}
static void switch_buffer_by_offset_carry_be (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x c0[4], u32x c1[4], u32x c2[4], u32x c3[4], const u32 offset)
void switch_buffer_by_offset_carry_be (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x c0[4], u32x c1[4], u32x c2[4], u32x c3[4], const u32 offset)
{
#ifdef IS_AMD
volatile const int offset_switch = offset / 4;
@ -4900,7 +4900,7 @@ static void switch_buffer_by_offset_carry_be (u32x w0[4], u32x w1[4], u32x w2[4]
#endif
}
static void switch_buffer_by_offset_8x4_le (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const u32 offset)
void switch_buffer_by_offset_8x4_le (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const u32 offset)
{
const int offset_mod_4 = offset & 3;
@ -6710,7 +6710,7 @@ static void switch_buffer_by_offset_8x4_le (u32x w0[4], u32x w1[4], u32x w2[4],
#endif
}
static void switch_buffer_by_offset_8x4_be (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const u32 offset)
void switch_buffer_by_offset_8x4_be (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const u32 offset)
{
#ifdef IS_AMD
volatile const int offset_switch = offset / 4;
@ -9042,7 +9042,7 @@ static void switch_buffer_by_offset_8x4_be (u32x w0[4], u32x w1[4], u32x w2[4],
#endif
}
static void switch_buffer_by_offset_8x4_carry_be (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], u32x c0[4], u32x c1[4], u32x c2[4], u32x c3[4], u32x c4[4], u32x c5[4], u32x c6[4], u32x c7[4], const u32 offset)
void switch_buffer_by_offset_8x4_carry_be (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], u32x c0[4], u32x c1[4], u32x c2[4], u32x c3[4], u32x c4[4], u32x c5[4], u32x c6[4], u32x c7[4], const u32 offset)
{
#ifdef IS_AMD
volatile const int offset_switch = offset / 4;
@ -12430,7 +12430,7 @@ static void switch_buffer_by_offset_8x4_carry_be (u32x w0[4], u32x w1[4], u32x w
#endif
}
static void switch_buffer_by_offset_1x64_le (u32x w[64], const u32 offset)
void switch_buffer_by_offset_1x64_le (u32x w[64], const u32 offset)
{
const int offset_mod_4 = offset & 3;
@ -21174,7 +21174,7 @@ static void switch_buffer_by_offset_1x64_le (u32x w[64], const u32 offset)
#endif
}
static void switch_buffer_by_offset_1x64_be (u32x w[64], const u32 offset)
void switch_buffer_by_offset_1x64_be (u32x w[64], const u32 offset)
{
#ifdef IS_AMD
volatile const int offset_switch = offset / 4;
@ -29910,7 +29910,7 @@ static void switch_buffer_by_offset_1x64_be (u32x w[64], const u32 offset)
* vector functions as scalar (for outer loop usage)
*/
static void truncate_block_4x4_le_S (u32 w0[4], const u32 len)
void truncate_block_4x4_le_S (u32 w0[4], const u32 len)
{
switch (len)
{
@ -30020,7 +30020,7 @@ static void truncate_block_4x4_le_S (u32 w0[4], const u32 len)
}
}
static void truncate_block_4x4_be_S (u32 w0[4], const u32 len)
void truncate_block_4x4_be_S (u32 w0[4], const u32 len)
{
switch (len)
{
@ -30130,7 +30130,7 @@ static void truncate_block_4x4_be_S (u32 w0[4], const u32 len)
}
}
static void truncate_block_16x4_le_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 len)
void truncate_block_16x4_le_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 len)
{
switch (len)
{
@ -30936,7 +30936,7 @@ static void truncate_block_16x4_le_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4]
}
}
static void truncate_block_16x4_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 len)
void truncate_block_16x4_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 len)
{
switch (len)
{
@ -31742,7 +31742,7 @@ static void truncate_block_16x4_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4]
}
}
static void append_0x01_2x4_S (u32 w0[4], u32 w1[4], const u32 offset)
void append_0x01_2x4_S (u32 w0[4], u32 w1[4], const u32 offset)
{
w0[0] |= 0x01010101 & c_append_helper[offset][0];
w0[1] |= 0x01010101 & c_append_helper[offset][1];
@ -31754,7 +31754,7 @@ static void append_0x01_2x4_S (u32 w0[4], u32 w1[4], const u32 offset)
w1[3] |= 0x01010101 & c_append_helper[offset][7];
}
static void append_0x80_1x4_S (u32 w0[4], const u32 offset)
void append_0x80_1x4_S (u32 w0[4], const u32 offset)
{
w0[0] |= 0x80808080 & c_append_helper[offset][0];
w0[1] |= 0x80808080 & c_append_helper[offset][1];
@ -31762,7 +31762,7 @@ static void append_0x80_1x4_S (u32 w0[4], const u32 offset)
w0[3] |= 0x80808080 & c_append_helper[offset][3];
}
static void append_0x80_2x4_S (u32 w0[4], u32 w1[4], const u32 offset)
void append_0x80_2x4_S (u32 w0[4], u32 w1[4], const u32 offset)
{
w0[0] |= 0x80808080 & c_append_helper[offset][0];
w0[1] |= 0x80808080 & c_append_helper[offset][1];
@ -31774,7 +31774,7 @@ static void append_0x80_2x4_S (u32 w0[4], u32 w1[4], const u32 offset)
w1[3] |= 0x80808080 & c_append_helper[offset][7];
}
static void append_0x80_3x4_S (u32 w0[4], u32 w1[4], u32 w2[4], const u32 offset)
void append_0x80_3x4_S (u32 w0[4], u32 w1[4], u32 w2[4], const u32 offset)
{
w0[0] |= 0x80808080 & c_append_helper[offset][ 0];
w0[1] |= 0x80808080 & c_append_helper[offset][ 1];
@ -31790,7 +31790,7 @@ static void append_0x80_3x4_S (u32 w0[4], u32 w1[4], u32 w2[4], const u32 offset
w2[3] |= 0x80808080 & c_append_helper[offset][11];
}
static void append_0x80_4x4_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 offset)
void append_0x80_4x4_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 offset)
{
w0[0] |= 0x80808080 & c_append_helper[offset][ 0];
w0[1] |= 0x80808080 & c_append_helper[offset][ 1];
@ -31810,7 +31810,7 @@ static void append_0x80_4x4_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const
w3[3] |= 0x80808080 & c_append_helper[offset][15];
}
static void append_0x80_8x4_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const u32 offset)
void append_0x80_8x4_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const u32 offset)
{
switch (offset)
{
@ -32328,7 +32328,7 @@ static void append_0x80_8x4_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w
}
}
static void make_utf16be_S (const u32 in[4], u32 out1[4], u32 out2[4])
void make_utf16be_S (const u32 in[4], u32 out1[4], u32 out2[4])
{
#if defined IS_NV
@ -32366,7 +32366,7 @@ static void make_utf16be_S (const u32 in[4], u32 out1[4], u32 out2[4])
#endif
}
static void make_utf16le_S (const u32 in[4], u32 out1[4], u32 out2[4])
void make_utf16le_S (const u32 in[4], u32 out1[4], u32 out2[4])
{
#if defined IS_NV
@ -32404,7 +32404,7 @@ static void make_utf16le_S (const u32 in[4], u32 out1[4], u32 out2[4])
#endif
}
static void undo_utf16be_S (const u32 in1[4], const u32 in2[4], u32 out[4])
void undo_utf16be_S (const u32 in1[4], const u32 in2[4], u32 out[4])
{
#if defined IS_NV
@ -32434,7 +32434,7 @@ static void undo_utf16be_S (const u32 in1[4], const u32 in2[4], u32 out[4])
#endif
}
static void undo_utf16le_S (const u32 in1[4], const u32 in2[4], u32 out[4])
void undo_utf16le_S (const u32 in1[4], const u32 in2[4], u32 out[4])
{
#if defined IS_NV
@ -32464,7 +32464,7 @@ static void undo_utf16le_S (const u32 in1[4], const u32 in2[4], u32 out[4])
#endif
}
static void switch_buffer_by_offset_le_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 offset)
void switch_buffer_by_offset_le_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 offset)
{
const int offset_mod_4 = offset & 3;
@ -33170,7 +33170,7 @@ static void switch_buffer_by_offset_le_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w
#endif
}
static void switch_buffer_by_offset_carry_le_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 c0[4], u32 c1[4], u32 c2[4], u32 c3[4], const u32 offset)
void switch_buffer_by_offset_carry_le_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 c0[4], u32 c1[4], u32 c2[4], u32 c3[4], const u32 offset)
{
const int offset_mod_4 = offset & 3;
@ -34492,7 +34492,7 @@ static void switch_buffer_by_offset_carry_le_S (u32 w0[4], u32 w1[4], u32 w2[4],
#endif
}
static void switch_buffer_by_offset_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 offset)
void switch_buffer_by_offset_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 offset)
{
#ifdef IS_AMD
volatile const int offset_switch = offset / 4;
@ -35160,7 +35160,7 @@ static void switch_buffer_by_offset_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w
#endif
}
static void switch_buffer_by_offset_carry_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 c0[4], u32 c1[4], u32 c2[4], u32 c3[4], const u32 offset)
void switch_buffer_by_offset_carry_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 c0[4], u32 c1[4], u32 c2[4], u32 c3[4], const u32 offset)
{
#ifdef IS_AMD
volatile const int offset_switch = offset / 4;
@ -36100,7 +36100,7 @@ static void switch_buffer_by_offset_carry_be_S (u32 w0[4], u32 w1[4], u32 w2[4],
#endif
}
static void switch_buffer_by_offset_8x4_le_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const u32 offset)
void switch_buffer_by_offset_8x4_le_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const u32 offset)
{
const int offset_mod_4 = offset & 3;
@ -37910,7 +37910,7 @@ static void switch_buffer_by_offset_8x4_le_S (u32 w0[4], u32 w1[4], u32 w2[4], u
#endif
}
static void switch_buffer_by_offset_8x4_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const u32 offset)
void switch_buffer_by_offset_8x4_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const u32 offset)
{
#ifdef IS_AMD
volatile const int offset_switch = offset / 4;
@ -40242,7 +40242,7 @@ static void switch_buffer_by_offset_8x4_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u
#endif
}
static void switch_buffer_by_offset_8x4_carry_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], u32 c0[4], u32 c1[4], u32 c2[4], u32 c3[4], u32 c4[4], u32 c5[4], u32 c6[4], u32 c7[4], const u32 offset)
void switch_buffer_by_offset_8x4_carry_be_S (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], u32 c0[4], u32 c1[4], u32 c2[4], u32 c3[4], u32 c4[4], u32 c5[4], u32 c6[4], u32 c7[4], const u32 offset)
{
#ifdef IS_AMD
volatile const int offset_switch = offset / 4;
@ -43630,7 +43630,7 @@ static void switch_buffer_by_offset_8x4_carry_be_S (u32 w0[4], u32 w1[4], u32 w2
#endif
}
static void switch_buffer_by_offset_1x64_le_S (u32 w[64], const u32 offset)
void switch_buffer_by_offset_1x64_le_S (u32 w[64], const u32 offset)
{
const int offset_mod_4 = offset & 3;
@ -52374,7 +52374,7 @@ static void switch_buffer_by_offset_1x64_le_S (u32 w[64], const u32 offset)
#endif
}
static void switch_buffer_by_offset_1x64_be_S (u32 w[64], const u32 offset)
void switch_buffer_by_offset_1x64_be_S (u32 w[64], const u32 offset)
{
#ifdef IS_AMD
volatile const int offset_switch = offset / 4;
@ -61170,7 +61170,7 @@ static void switch_buffer_by_offset_1x64_be_S (u32 w[64], const u32 offset)
PACKSV4 (s6, v6, e); \
PACKSV4 (s7, v7, e);
static void switch_buffer_by_offset_le_VV (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const u32x offset)
void switch_buffer_by_offset_le_VV (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const u32x offset)
{
#if VECT_SIZE == 1
@ -61230,7 +61230,7 @@ static void switch_buffer_by_offset_le_VV (u32x w0[4], u32x w1[4], u32x w2[4], u
#endif
}
static void switch_buffer_by_offset_8x4_le_VV (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const u32x offset)
void switch_buffer_by_offset_8x4_le_VV (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const u32x offset)
{
#if VECT_SIZE == 1
@ -61410,7 +61410,7 @@ static void switch_buffer_by_offset_8x4_le_VV (u32x w0[4], u32x w1[4], u32x w2[4
#endif
}
static void append_0x01_2x4_VV (u32x w0[4], u32x w1[4], const u32x offset)
void append_0x01_2x4_VV (u32x w0[4], u32x w1[4], const u32x offset)
{
#if VECT_SIZE == 1
@ -61468,7 +61468,7 @@ static void append_0x01_2x4_VV (u32x w0[4], u32x w1[4], const u32x offset)
#endif
}
static void append_0x80_2x4_VV (u32x w0[4], u32x w1[4], const u32x offset)
void append_0x80_2x4_VV (u32x w0[4], u32x w1[4], const u32x offset)
{
#if VECT_SIZE == 1
@ -61526,7 +61526,7 @@ static void append_0x80_2x4_VV (u32x w0[4], u32x w1[4], const u32x offset)
#endif
}
static void append_0x80_4x4_VV (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const u32x offset)
void append_0x80_4x4_VV (u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const u32x offset)
{
#if VECT_SIZE == 1

@ -17,7 +17,7 @@ typedef struct md4_ctx
} md4_ctx_t;
static void md4_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[4])
void md4_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[4])
{
u32 a = digest[0];
u32 b = digest[1];
@ -81,7 +81,7 @@ static void md4_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], co
digest[3] += d;
}
static void md4_init (md4_ctx_t *ctx)
void md4_init (md4_ctx_t *ctx)
{
ctx->h[0] = MD4M_A;
ctx->h[1] = MD4M_B;
@ -108,7 +108,7 @@ static void md4_init (md4_ctx_t *ctx)
ctx->len = 0;
}
static void md4_update_64 (md4_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void md4_update_64 (md4_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -186,7 +186,7 @@ static void md4_update_64 (md4_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32
}
}
static void md4_update (md4_ctx_t *ctx, const u32 *w, const int len)
void md4_update (md4_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -238,7 +238,7 @@ static void md4_update (md4_ctx_t *ctx, const u32 *w, const int len)
md4_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void md4_update_swap (md4_ctx_t *ctx, const u32 *w, const int len)
void md4_update_swap (md4_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -324,7 +324,7 @@ static void md4_update_swap (md4_ctx_t *ctx, const u32 *w, const int len)
md4_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void md4_update_utf16le (md4_ctx_t *ctx, const u32 *w, const int len)
void md4_update_utf16le (md4_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -366,7 +366,7 @@ static void md4_update_utf16le (md4_ctx_t *ctx, const u32 *w, const int len)
md4_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void md4_update_utf16le_swap (md4_ctx_t *ctx, const u32 *w, const int len)
void md4_update_utf16le_swap (md4_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -442,7 +442,7 @@ static void md4_update_utf16le_swap (md4_ctx_t *ctx, const u32 *w, const int len
md4_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void md4_update_global (md4_ctx_t *ctx, const __global u32 *w, const int len)
void md4_update_global (md4_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -494,7 +494,7 @@ static void md4_update_global (md4_ctx_t *ctx, const __global u32 *w, const int
md4_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void md4_update_global_swap (md4_ctx_t *ctx, const __global u32 *w, const int len)
void md4_update_global_swap (md4_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -580,7 +580,7 @@ static void md4_update_global_swap (md4_ctx_t *ctx, const __global u32 *w, const
md4_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void md4_update_global_utf16le (md4_ctx_t *ctx, const __global u32 *w, const int len)
void md4_update_global_utf16le (md4_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -622,7 +622,7 @@ static void md4_update_global_utf16le (md4_ctx_t *ctx, const __global u32 *w, co
md4_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void md4_update_global_utf16le_swap (md4_ctx_t *ctx, const __global u32 *w, const int len)
void md4_update_global_utf16le_swap (md4_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -698,7 +698,7 @@ static void md4_update_global_utf16le_swap (md4_ctx_t *ctx, const __global u32 *
md4_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void md4_final (md4_ctx_t *ctx)
void md4_final (md4_ctx_t *ctx)
{
const int pos = ctx->len & 63;
@ -741,7 +741,7 @@ typedef struct md4_hmac_ctx
} md4_hmac_ctx_t;
static void md4_hmac_init_64 (md4_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4])
void md4_hmac_init_64 (md4_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4])
{
u32 t0[4];
u32 t1[4];
@ -795,7 +795,7 @@ static void md4_hmac_init_64 (md4_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1
md4_update_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void md4_hmac_init (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
void md4_hmac_init (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -852,7 +852,7 @@ static void md4_hmac_init (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
md4_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void md4_hmac_init_swap (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
void md4_hmac_init_swap (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -909,7 +909,7 @@ static void md4_hmac_init_swap (md4_hmac_ctx_t *ctx, const u32 *w, const int len
md4_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void md4_hmac_init_global (md4_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void md4_hmac_init_global (md4_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -966,7 +966,7 @@ static void md4_hmac_init_global (md4_hmac_ctx_t *ctx, __global const u32 *w, co
md4_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void md4_hmac_init_global_swap (md4_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void md4_hmac_init_global_swap (md4_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1023,52 +1023,52 @@ static void md4_hmac_init_global_swap (md4_hmac_ctx_t *ctx, __global const u32 *
md4_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void md4_hmac_update_64 (md4_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void md4_hmac_update_64 (md4_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
md4_update_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void md4_hmac_update (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
void md4_hmac_update (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
{
md4_update (&ctx->ipad, w, len);
}
static void md4_hmac_update_swap (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
void md4_hmac_update_swap (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
{
md4_update_swap (&ctx->ipad, w, len);
}
static void md4_hmac_update_utf16le (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
void md4_hmac_update_utf16le (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
{
md4_update_utf16le (&ctx->ipad, w, len);
}
static void md4_hmac_update_utf16le_swap (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
void md4_hmac_update_utf16le_swap (md4_hmac_ctx_t *ctx, const u32 *w, const int len)
{
md4_update_utf16le_swap (&ctx->ipad, w, len);
}
static void md4_hmac_update_global (md4_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void md4_hmac_update_global (md4_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
md4_update_global (&ctx->ipad, w, len);
}
static void md4_hmac_update_global_swap (md4_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void md4_hmac_update_global_swap (md4_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
md4_update_global_swap (&ctx->ipad, w, len);
}
static void md4_hmac_update_global_utf16le (md4_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void md4_hmac_update_global_utf16le (md4_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
md4_update_global_utf16le (&ctx->ipad, w, len);
}
static void md4_hmac_update_global_utf16le_swap (md4_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void md4_hmac_update_global_utf16le_swap (md4_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
md4_update_global_utf16le_swap (&ctx->ipad, w, len);
}
static void md4_hmac_final (md4_hmac_ctx_t *ctx)
void md4_hmac_final (md4_hmac_ctx_t *ctx)
{
md4_final (&ctx->ipad);
@ -1114,7 +1114,7 @@ typedef struct md4_ctx_vector
} md4_ctx_vector_t;
static void md4_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[4])
void md4_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[4])
{
u32x a = digest[0];
u32x b = digest[1];
@ -1178,7 +1178,7 @@ static void md4_transform_vector (const u32x w0[4], const u32x w1[4], const u32x
digest[3] += d;
}
static void md4_init_vector (md4_ctx_vector_t *ctx)
void md4_init_vector (md4_ctx_vector_t *ctx)
{
ctx->h[0] = MD4M_A;
ctx->h[1] = MD4M_B;
@ -1205,7 +1205,7 @@ static void md4_init_vector (md4_ctx_vector_t *ctx)
ctx->len = 0;
}
static void md4_init_vector_from_scalar (md4_ctx_vector_t *ctx, md4_ctx_t *ctx0)
void md4_init_vector_from_scalar (md4_ctx_vector_t *ctx, md4_ctx_t *ctx0)
{
ctx->h[0] = ctx0->h[0];
ctx->h[1] = ctx0->h[1];
@ -1232,7 +1232,7 @@ static void md4_init_vector_from_scalar (md4_ctx_vector_t *ctx, md4_ctx_t *ctx0)
ctx->len = ctx0->len;
}
static void md4_update_vector_64 (md4_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void md4_update_vector_64 (md4_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -1310,7 +1310,7 @@ static void md4_update_vector_64 (md4_ctx_vector_t *ctx, u32x w0[4], u32x w1[4],
}
}
static void md4_update_vector (md4_ctx_vector_t *ctx, const u32x *w, const int len)
void md4_update_vector (md4_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1362,7 +1362,7 @@ static void md4_update_vector (md4_ctx_vector_t *ctx, const u32x *w, const int l
md4_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void md4_update_vector_swap (md4_ctx_vector_t *ctx, const u32x *w, const int len)
void md4_update_vector_swap (md4_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1448,7 +1448,7 @@ static void md4_update_vector_swap (md4_ctx_vector_t *ctx, const u32x *w, const
md4_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void md4_update_vector_utf16le (md4_ctx_vector_t *ctx, const u32x *w, const int len)
void md4_update_vector_utf16le (md4_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1490,7 +1490,7 @@ static void md4_update_vector_utf16le (md4_ctx_vector_t *ctx, const u32x *w, con
md4_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void md4_update_vector_utf16le_swap (md4_ctx_vector_t *ctx, const u32x *w, const int len)
void md4_update_vector_utf16le_swap (md4_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1566,7 +1566,7 @@ static void md4_update_vector_utf16le_swap (md4_ctx_vector_t *ctx, const u32x *w
md4_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void md4_final_vector (md4_ctx_vector_t *ctx)
void md4_final_vector (md4_ctx_vector_t *ctx)
{
const int pos = ctx->len & 63;
@ -1609,7 +1609,7 @@ typedef struct md4_hmac_ctx_vector
} md4_hmac_ctx_vector_t;
static void md4_hmac_init_vector_64 (md4_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4])
void md4_hmac_init_vector_64 (md4_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4])
{
u32x t0[4];
u32x t1[4];
@ -1663,7 +1663,7 @@ static void md4_hmac_init_vector_64 (md4_hmac_ctx_vector_t *ctx, const u32x w0[4
md4_update_vector_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void md4_hmac_init_vector (md4_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void md4_hmac_init_vector (md4_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1720,17 +1720,17 @@ static void md4_hmac_init_vector (md4_hmac_ctx_vector_t *ctx, const u32x *w, con
md4_hmac_init_vector_64 (ctx, w0, w1, w2, w3);
}
static void md4_hmac_update_vector_64 (md4_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void md4_hmac_update_vector_64 (md4_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
md4_update_vector_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void md4_hmac_update_vector (md4_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void md4_hmac_update_vector (md4_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
md4_update_vector (&ctx->ipad, w, len);
}
static void md4_hmac_final_vector (md4_hmac_ctx_vector_t *ctx)
void md4_hmac_final_vector (md4_hmac_ctx_vector_t *ctx)
{
md4_final_vector (&ctx->ipad);

@ -17,7 +17,7 @@ typedef struct md5_ctx
} md5_ctx_t;
static void md5_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[4])
void md5_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[4])
{
u32 a = digest[0];
u32 b = digest[1];
@ -117,7 +117,7 @@ static void md5_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], co
digest[3] += d;
}
static void md5_init (md5_ctx_t *ctx)
void md5_init (md5_ctx_t *ctx)
{
ctx->h[0] = MD5M_A;
ctx->h[1] = MD5M_B;
@ -144,7 +144,7 @@ static void md5_init (md5_ctx_t *ctx)
ctx->len = 0;
}
static void md5_update_64 (md5_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void md5_update_64 (md5_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -222,7 +222,7 @@ static void md5_update_64 (md5_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32
}
}
static void md5_update (md5_ctx_t *ctx, const u32 *w, const int len)
void md5_update (md5_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -274,7 +274,7 @@ static void md5_update (md5_ctx_t *ctx, const u32 *w, const int len)
md5_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void md5_update_swap (md5_ctx_t *ctx, const u32 *w, const int len)
void md5_update_swap (md5_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -360,7 +360,7 @@ static void md5_update_swap (md5_ctx_t *ctx, const u32 *w, const int len)
md5_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void md5_update_utf16le (md5_ctx_t *ctx, const u32 *w, const int len)
void md5_update_utf16le (md5_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -402,7 +402,7 @@ static void md5_update_utf16le (md5_ctx_t *ctx, const u32 *w, const int len)
md5_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void md5_update_utf16le_swap (md5_ctx_t *ctx, const u32 *w, const int len)
void md5_update_utf16le_swap (md5_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -478,7 +478,7 @@ static void md5_update_utf16le_swap (md5_ctx_t *ctx, const u32 *w, const int len
md5_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void md5_update_global (md5_ctx_t *ctx, const __global u32 *w, const int len)
void md5_update_global (md5_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -530,7 +530,7 @@ static void md5_update_global (md5_ctx_t *ctx, const __global u32 *w, const int
md5_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void md5_update_global_swap (md5_ctx_t *ctx, const __global u32 *w, const int len)
void md5_update_global_swap (md5_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -616,7 +616,7 @@ static void md5_update_global_swap (md5_ctx_t *ctx, const __global u32 *w, const
md5_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void md5_update_global_utf16le (md5_ctx_t *ctx, const __global u32 *w, const int len)
void md5_update_global_utf16le (md5_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -658,7 +658,7 @@ static void md5_update_global_utf16le (md5_ctx_t *ctx, const __global u32 *w, co
md5_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void md5_update_global_utf16le_swap (md5_ctx_t *ctx, const __global u32 *w, const int len)
void md5_update_global_utf16le_swap (md5_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -734,7 +734,7 @@ static void md5_update_global_utf16le_swap (md5_ctx_t *ctx, const __global u32 *
md5_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void md5_final (md5_ctx_t *ctx)
void md5_final (md5_ctx_t *ctx)
{
const int pos = ctx->len & 63;
@ -777,7 +777,7 @@ typedef struct md5_hmac_ctx
} md5_hmac_ctx_t;
static void md5_hmac_init_64 (md5_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4])
void md5_hmac_init_64 (md5_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4])
{
u32 t0[4];
u32 t1[4];
@ -831,7 +831,7 @@ static void md5_hmac_init_64 (md5_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1
md5_update_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void md5_hmac_init (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
void md5_hmac_init (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -888,7 +888,7 @@ static void md5_hmac_init (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
md5_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void md5_hmac_init_swap (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
void md5_hmac_init_swap (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -945,7 +945,7 @@ static void md5_hmac_init_swap (md5_hmac_ctx_t *ctx, const u32 *w, const int len
md5_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void md5_hmac_init_global (md5_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void md5_hmac_init_global (md5_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1002,7 +1002,7 @@ static void md5_hmac_init_global (md5_hmac_ctx_t *ctx, __global const u32 *w, co
md5_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void md5_hmac_init_global_swap (md5_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void md5_hmac_init_global_swap (md5_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1059,52 +1059,52 @@ static void md5_hmac_init_global_swap (md5_hmac_ctx_t *ctx, __global const u32 *
md5_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void md5_hmac_update_64 (md5_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void md5_hmac_update_64 (md5_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
md5_update_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void md5_hmac_update (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
void md5_hmac_update (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
{
md5_update (&ctx->ipad, w, len);
}
static void md5_hmac_update_swap (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
void md5_hmac_update_swap (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
{
md5_update_swap (&ctx->ipad, w, len);
}
static void md5_hmac_update_utf16le (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
void md5_hmac_update_utf16le (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
{
md5_update_utf16le (&ctx->ipad, w, len);
}
static void md5_hmac_update_utf16le_swap (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
void md5_hmac_update_utf16le_swap (md5_hmac_ctx_t *ctx, const u32 *w, const int len)
{
md5_update_utf16le_swap (&ctx->ipad, w, len);
}
static void md5_hmac_update_global (md5_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void md5_hmac_update_global (md5_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
md5_update_global (&ctx->ipad, w, len);
}
static void md5_hmac_update_global_swap (md5_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void md5_hmac_update_global_swap (md5_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
md5_update_global_swap (&ctx->ipad, w, len);
}
static void md5_hmac_update_global_utf16le (md5_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void md5_hmac_update_global_utf16le (md5_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
md5_update_global_utf16le (&ctx->ipad, w, len);
}
static void md5_hmac_update_global_utf16le_swap (md5_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void md5_hmac_update_global_utf16le_swap (md5_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
md5_update_global_utf16le_swap (&ctx->ipad, w, len);
}
static void md5_hmac_final (md5_hmac_ctx_t *ctx)
void md5_hmac_final (md5_hmac_ctx_t *ctx)
{
md5_final (&ctx->ipad);
@ -1150,7 +1150,7 @@ typedef struct md5_ctx_vector
} md5_ctx_vector_t;
static void md5_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[4])
void md5_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[4])
{
u32x a = digest[0];
u32x b = digest[1];
@ -1250,7 +1250,7 @@ static void md5_transform_vector (const u32x w0[4], const u32x w1[4], const u32x
digest[3] += d;
}
static void md5_init_vector (md5_ctx_vector_t *ctx)
void md5_init_vector (md5_ctx_vector_t *ctx)
{
ctx->h[0] = MD5M_A;
ctx->h[1] = MD5M_B;
@ -1277,7 +1277,7 @@ static void md5_init_vector (md5_ctx_vector_t *ctx)
ctx->len = 0;
}
static void md5_init_vector_from_scalar (md5_ctx_vector_t *ctx, md5_ctx_t *ctx0)
void md5_init_vector_from_scalar (md5_ctx_vector_t *ctx, md5_ctx_t *ctx0)
{
ctx->h[0] = ctx0->h[0];
ctx->h[1] = ctx0->h[1];
@ -1304,7 +1304,7 @@ static void md5_init_vector_from_scalar (md5_ctx_vector_t *ctx, md5_ctx_t *ctx0)
ctx->len = ctx0->len;
}
static void md5_update_vector_64 (md5_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void md5_update_vector_64 (md5_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -1382,7 +1382,7 @@ static void md5_update_vector_64 (md5_ctx_vector_t *ctx, u32x w0[4], u32x w1[4],
}
}
static void md5_update_vector (md5_ctx_vector_t *ctx, const u32x *w, const int len)
void md5_update_vector (md5_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1434,7 +1434,7 @@ static void md5_update_vector (md5_ctx_vector_t *ctx, const u32x *w, const int l
md5_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void md5_update_vector_swap (md5_ctx_vector_t *ctx, const u32x *w, const int len)
void md5_update_vector_swap (md5_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1520,7 +1520,7 @@ static void md5_update_vector_swap (md5_ctx_vector_t *ctx, const u32x *w, const
md5_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void md5_update_vector_utf16le (md5_ctx_vector_t *ctx, const u32x *w, const int len)
void md5_update_vector_utf16le (md5_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1562,7 +1562,7 @@ static void md5_update_vector_utf16le (md5_ctx_vector_t *ctx, const u32x *w, con
md5_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void md5_update_vector_utf16le_swap (md5_ctx_vector_t *ctx, const u32x *w, const int len)
void md5_update_vector_utf16le_swap (md5_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1638,7 +1638,7 @@ static void md5_update_vector_utf16le_swap (md5_ctx_vector_t *ctx, const u32x *w
md5_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void md5_final_vector (md5_ctx_vector_t *ctx)
void md5_final_vector (md5_ctx_vector_t *ctx)
{
const int pos = ctx->len & 63;
@ -1681,7 +1681,7 @@ typedef struct md5_hmac_ctx_vector
} md5_hmac_ctx_vector_t;
static void md5_hmac_init_vector_64 (md5_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4])
void md5_hmac_init_vector_64 (md5_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4])
{
u32x t0[4];
u32x t1[4];
@ -1735,7 +1735,7 @@ static void md5_hmac_init_vector_64 (md5_hmac_ctx_vector_t *ctx, const u32x w0[4
md5_update_vector_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void md5_hmac_init_vector (md5_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void md5_hmac_init_vector (md5_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1792,17 +1792,17 @@ static void md5_hmac_init_vector (md5_hmac_ctx_vector_t *ctx, const u32x *w, con
md5_hmac_init_vector_64 (ctx, w0, w1, w2, w3);
}
static void md5_hmac_update_vector_64 (md5_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void md5_hmac_update_vector_64 (md5_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
md5_update_vector_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void md5_hmac_update_vector (md5_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void md5_hmac_update_vector (md5_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
md5_update_vector (&ctx->ipad, w, len);
}
static void md5_hmac_final_vector (md5_hmac_ctx_vector_t *ctx)
void md5_hmac_final_vector (md5_hmac_ctx_vector_t *ctx)
{
md5_final_vector (&ctx->ipad);

@ -17,7 +17,7 @@ typedef struct ripemd160_ctx
} ripemd160_ctx_t;
static void ripemd160_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[5])
void ripemd160_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[5])
{
u32 a1 = digest[0];
u32 b1 = digest[1];
@ -214,7 +214,7 @@ static void ripemd160_transform (const u32 w0[4], const u32 w1[4], const u32 w2[
digest[4] = e;
}
static void ripemd160_init (ripemd160_ctx_t *ctx)
void ripemd160_init (ripemd160_ctx_t *ctx)
{
ctx->h[0] = RIPEMD160M_A;
ctx->h[1] = RIPEMD160M_B;
@ -242,7 +242,7 @@ static void ripemd160_init (ripemd160_ctx_t *ctx)
ctx->len = 0;
}
static void ripemd160_update_64 (ripemd160_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void ripemd160_update_64 (ripemd160_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -320,7 +320,7 @@ static void ripemd160_update_64 (ripemd160_ctx_t *ctx, u32 w0[4], u32 w1[4], u32
}
}
static void ripemd160_update (ripemd160_ctx_t *ctx, const u32 *w, const int len)
void ripemd160_update (ripemd160_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -372,7 +372,7 @@ static void ripemd160_update (ripemd160_ctx_t *ctx, const u32 *w, const int len)
ripemd160_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void ripemd160_update_swap (ripemd160_ctx_t *ctx, const u32 *w, const int len)
void ripemd160_update_swap (ripemd160_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -458,7 +458,7 @@ static void ripemd160_update_swap (ripemd160_ctx_t *ctx, const u32 *w, const int
ripemd160_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void ripemd160_update_utf16le (ripemd160_ctx_t *ctx, const u32 *w, const int len)
void ripemd160_update_utf16le (ripemd160_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -500,7 +500,7 @@ static void ripemd160_update_utf16le (ripemd160_ctx_t *ctx, const u32 *w, const
ripemd160_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void ripemd160_update_utf16le_swap (ripemd160_ctx_t *ctx, const u32 *w, const int len)
void ripemd160_update_utf16le_swap (ripemd160_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -576,7 +576,7 @@ static void ripemd160_update_utf16le_swap (ripemd160_ctx_t *ctx, const u32 *w, c
ripemd160_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void ripemd160_update_global (ripemd160_ctx_t *ctx, const __global u32 *w, const int len)
void ripemd160_update_global (ripemd160_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -628,7 +628,7 @@ static void ripemd160_update_global (ripemd160_ctx_t *ctx, const __global u32 *w
ripemd160_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void ripemd160_update_global_swap (ripemd160_ctx_t *ctx, const __global u32 *w, const int len)
void ripemd160_update_global_swap (ripemd160_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -714,7 +714,7 @@ static void ripemd160_update_global_swap (ripemd160_ctx_t *ctx, const __global u
ripemd160_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void ripemd160_update_global_utf16le (ripemd160_ctx_t *ctx, const __global u32 *w, const int len)
void ripemd160_update_global_utf16le (ripemd160_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -756,7 +756,7 @@ static void ripemd160_update_global_utf16le (ripemd160_ctx_t *ctx, const __globa
ripemd160_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void ripemd160_update_global_utf16le_swap (ripemd160_ctx_t *ctx, const __global u32 *w, const int len)
void ripemd160_update_global_utf16le_swap (ripemd160_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -832,7 +832,7 @@ static void ripemd160_update_global_utf16le_swap (ripemd160_ctx_t *ctx, const __
ripemd160_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void ripemd160_final (ripemd160_ctx_t *ctx)
void ripemd160_final (ripemd160_ctx_t *ctx)
{
const int pos = ctx->len & 63;
@ -875,7 +875,7 @@ typedef struct ripemd160_hmac_ctx
} ripemd160_hmac_ctx_t;
static void ripemd160_hmac_init_64 (ripemd160_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4])
void ripemd160_hmac_init_64 (ripemd160_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4])
{
u32 t0[4];
u32 t1[4];
@ -929,7 +929,7 @@ static void ripemd160_hmac_init_64 (ripemd160_hmac_ctx_t *ctx, const u32 w0[4],
ripemd160_update_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void ripemd160_hmac_init (ripemd160_hmac_ctx_t *ctx, const u32 *w, const int len)
void ripemd160_hmac_init (ripemd160_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -986,7 +986,7 @@ static void ripemd160_hmac_init (ripemd160_hmac_ctx_t *ctx, const u32 *w, const
ripemd160_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void ripemd160_hmac_init_swap (ripemd160_hmac_ctx_t *ctx, const u32 *w, const int len)
void ripemd160_hmac_init_swap (ripemd160_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1043,7 +1043,7 @@ static void ripemd160_hmac_init_swap (ripemd160_hmac_ctx_t *ctx, const u32 *w, c
ripemd160_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void ripemd160_hmac_init_global (ripemd160_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void ripemd160_hmac_init_global (ripemd160_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1100,7 +1100,7 @@ static void ripemd160_hmac_init_global (ripemd160_hmac_ctx_t *ctx, __global cons
ripemd160_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void ripemd160_hmac_init_global_swap (ripemd160_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void ripemd160_hmac_init_global_swap (ripemd160_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1157,52 +1157,52 @@ static void ripemd160_hmac_init_global_swap (ripemd160_hmac_ctx_t *ctx, __global
ripemd160_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void ripemd160_hmac_update_64 (ripemd160_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void ripemd160_hmac_update_64 (ripemd160_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
ripemd160_update_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void ripemd160_hmac_update (ripemd160_hmac_ctx_t *ctx, const u32 *w, const int len)
void ripemd160_hmac_update (ripemd160_hmac_ctx_t *ctx, const u32 *w, const int len)
{
ripemd160_update (&ctx->ipad, w, len);
}
static void ripemd160_hmac_update_swap (ripemd160_hmac_ctx_t *ctx, const u32 *w, const int len)
void ripemd160_hmac_update_swap (ripemd160_hmac_ctx_t *ctx, const u32 *w, const int len)
{
ripemd160_update_swap (&ctx->ipad, w, len);
}
static void ripemd160_hmac_update_utf16le (ripemd160_hmac_ctx_t *ctx, const u32 *w, const int len)
void ripemd160_hmac_update_utf16le (ripemd160_hmac_ctx_t *ctx, const u32 *w, const int len)
{
ripemd160_update_utf16le (&ctx->ipad, w, len);
}
static void ripemd160_hmac_update_utf16le_swap (ripemd160_hmac_ctx_t *ctx, const u32 *w, const int len)
void ripemd160_hmac_update_utf16le_swap (ripemd160_hmac_ctx_t *ctx, const u32 *w, const int len)
{
ripemd160_update_utf16le_swap (&ctx->ipad, w, len);
}
static void ripemd160_hmac_update_global (ripemd160_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void ripemd160_hmac_update_global (ripemd160_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
ripemd160_update_global (&ctx->ipad, w, len);
}
static void ripemd160_hmac_update_global_swap (ripemd160_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void ripemd160_hmac_update_global_swap (ripemd160_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
ripemd160_update_global_swap (&ctx->ipad, w, len);
}
static void ripemd160_hmac_update_global_utf16le (ripemd160_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void ripemd160_hmac_update_global_utf16le (ripemd160_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
ripemd160_update_global_utf16le (&ctx->ipad, w, len);
}
static void ripemd160_hmac_update_global_utf16le_swap (ripemd160_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void ripemd160_hmac_update_global_utf16le_swap (ripemd160_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
ripemd160_update_global_utf16le_swap (&ctx->ipad, w, len);
}
static void ripemd160_hmac_final (ripemd160_hmac_ctx_t *ctx)
void ripemd160_hmac_final (ripemd160_hmac_ctx_t *ctx)
{
ripemd160_final (&ctx->ipad);
@ -1248,7 +1248,7 @@ typedef struct ripemd160_ctx_vector
} ripemd160_ctx_vector_t;
static void ripemd160_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[5])
void ripemd160_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[5])
{
u32x a1 = digest[0];
u32x b1 = digest[1];
@ -1445,7 +1445,7 @@ static void ripemd160_transform_vector (const u32x w0[4], const u32x w1[4], cons
digest[4] = e;
}
static void ripemd160_init_vector (ripemd160_ctx_vector_t *ctx)
void ripemd160_init_vector (ripemd160_ctx_vector_t *ctx)
{
ctx->h[0] = RIPEMD160M_A;
ctx->h[1] = RIPEMD160M_B;
@ -1473,7 +1473,7 @@ static void ripemd160_init_vector (ripemd160_ctx_vector_t *ctx)
ctx->len = 0;
}
static void ripemd160_init_vector_from_scalar (ripemd160_ctx_vector_t *ctx, ripemd160_ctx_t *ctx0)
void ripemd160_init_vector_from_scalar (ripemd160_ctx_vector_t *ctx, ripemd160_ctx_t *ctx0)
{
ctx->h[0] = ctx0->h[0];
ctx->h[1] = ctx0->h[1];
@ -1501,7 +1501,7 @@ static void ripemd160_init_vector_from_scalar (ripemd160_ctx_vector_t *ctx, ripe
ctx->len = ctx0->len;
}
static void ripemd160_update_vector_64 (ripemd160_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void ripemd160_update_vector_64 (ripemd160_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -1579,7 +1579,7 @@ static void ripemd160_update_vector_64 (ripemd160_ctx_vector_t *ctx, u32x w0[4],
}
}
static void ripemd160_update_vector (ripemd160_ctx_vector_t *ctx, const u32x *w, const int len)
void ripemd160_update_vector (ripemd160_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1631,7 +1631,7 @@ static void ripemd160_update_vector (ripemd160_ctx_vector_t *ctx, const u32x *w,
ripemd160_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void ripemd160_update_vector_swap (ripemd160_ctx_vector_t *ctx, const u32x *w, const int len)
void ripemd160_update_vector_swap (ripemd160_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1717,7 +1717,7 @@ static void ripemd160_update_vector_swap (ripemd160_ctx_vector_t *ctx, const u32
ripemd160_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void ripemd160_update_vector_utf16le (ripemd160_ctx_vector_t *ctx, const u32x *w, const int len)
void ripemd160_update_vector_utf16le (ripemd160_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1759,7 +1759,7 @@ static void ripemd160_update_vector_utf16le (ripemd160_ctx_vector_t *ctx, const
ripemd160_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void ripemd160_update_vector_utf16le_swap (ripemd160_ctx_vector_t *ctx, const u32x *w, const int len)
void ripemd160_update_vector_utf16le_swap (ripemd160_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1835,7 +1835,7 @@ static void ripemd160_update_vector_utf16le_swap (ripemd160_ctx_vector_t *ctx, c
ripemd160_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void ripemd160_final_vector (ripemd160_ctx_vector_t *ctx)
void ripemd160_final_vector (ripemd160_ctx_vector_t *ctx)
{
const int pos = ctx->len & 63;
@ -1878,7 +1878,7 @@ typedef struct ripemd160_hmac_ctx_vector
} ripemd160_hmac_ctx_vector_t;
static void ripemd160_hmac_init_vector_64 (ripemd160_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4])
void ripemd160_hmac_init_vector_64 (ripemd160_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4])
{
u32x t0[4];
u32x t1[4];
@ -1932,7 +1932,7 @@ static void ripemd160_hmac_init_vector_64 (ripemd160_hmac_ctx_vector_t *ctx, con
ripemd160_update_vector_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void ripemd160_hmac_init_vector (ripemd160_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void ripemd160_hmac_init_vector (ripemd160_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1989,17 +1989,17 @@ static void ripemd160_hmac_init_vector (ripemd160_hmac_ctx_vector_t *ctx, const
ripemd160_hmac_init_vector_64 (ctx, w0, w1, w2, w3);
}
static void ripemd160_hmac_update_vector_64 (ripemd160_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void ripemd160_hmac_update_vector_64 (ripemd160_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
ripemd160_update_vector_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void ripemd160_hmac_update_vector (ripemd160_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void ripemd160_hmac_update_vector (ripemd160_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
ripemd160_update_vector (&ctx->ipad, w, len);
}
static void ripemd160_hmac_final_vector (ripemd160_hmac_ctx_vector_t *ctx)
void ripemd160_hmac_final_vector (ripemd160_hmac_ctx_vector_t *ctx)
{
ripemd160_final_vector (&ctx->ipad);

@ -17,7 +17,7 @@ typedef struct sha1_ctx
} sha1_ctx_t;
static void sha1_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[5])
void sha1_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[5])
{
u32 a = digest[0];
u32 b = digest[1];
@ -146,7 +146,7 @@ static void sha1_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], c
digest[4] += e;
}
static void sha1_init (sha1_ctx_t *ctx)
void sha1_init (sha1_ctx_t *ctx)
{
ctx->h[0] = SHA1M_A;
ctx->h[1] = SHA1M_B;
@ -174,7 +174,7 @@ static void sha1_init (sha1_ctx_t *ctx)
ctx->len = 0;
}
static void sha1_update_64 (sha1_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void sha1_update_64 (sha1_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -252,7 +252,7 @@ static void sha1_update_64 (sha1_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u3
}
}
static void sha1_update (sha1_ctx_t *ctx, const u32 *w, const int len)
void sha1_update (sha1_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -304,7 +304,7 @@ static void sha1_update (sha1_ctx_t *ctx, const u32 *w, const int len)
sha1_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha1_update_swap (sha1_ctx_t *ctx, const u32 *w, const int len)
void sha1_update_swap (sha1_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -390,7 +390,7 @@ static void sha1_update_swap (sha1_ctx_t *ctx, const u32 *w, const int len)
sha1_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha1_update_utf16le (sha1_ctx_t *ctx, const u32 *w, const int len)
void sha1_update_utf16le (sha1_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -432,7 +432,7 @@ static void sha1_update_utf16le (sha1_ctx_t *ctx, const u32 *w, const int len)
sha1_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha1_update_utf16le_swap (sha1_ctx_t *ctx, const u32 *w, const int len)
void sha1_update_utf16le_swap (sha1_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -508,7 +508,7 @@ static void sha1_update_utf16le_swap (sha1_ctx_t *ctx, const u32 *w, const int l
sha1_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha1_update_utf16be (sha1_ctx_t *ctx, const u32 *w, const int len)
void sha1_update_utf16be (sha1_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -550,7 +550,7 @@ static void sha1_update_utf16be (sha1_ctx_t *ctx, const u32 *w, const int len)
sha1_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha1_update_utf16be_swap (sha1_ctx_t *ctx, const u32 *w, const int len)
void sha1_update_utf16be_swap (sha1_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -626,7 +626,7 @@ static void sha1_update_utf16be_swap (sha1_ctx_t *ctx, const u32 *w, const int l
sha1_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha1_update_global (sha1_ctx_t *ctx, const __global u32 *w, const int len)
void sha1_update_global (sha1_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -678,7 +678,7 @@ static void sha1_update_global (sha1_ctx_t *ctx, const __global u32 *w, const in
sha1_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha1_update_global_swap (sha1_ctx_t *ctx, const __global u32 *w, const int len)
void sha1_update_global_swap (sha1_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -764,7 +764,7 @@ static void sha1_update_global_swap (sha1_ctx_t *ctx, const __global u32 *w, con
sha1_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha1_update_global_utf16le (sha1_ctx_t *ctx, const __global u32 *w, const int len)
void sha1_update_global_utf16le (sha1_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -806,7 +806,7 @@ static void sha1_update_global_utf16le (sha1_ctx_t *ctx, const __global u32 *w,
sha1_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha1_update_global_utf16le_swap (sha1_ctx_t *ctx, const __global u32 *w, const int len)
void sha1_update_global_utf16le_swap (sha1_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -882,7 +882,7 @@ static void sha1_update_global_utf16le_swap (sha1_ctx_t *ctx, const __global u32
sha1_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha1_update_global_utf16be (sha1_ctx_t *ctx, const __global u32 *w, const int len)
void sha1_update_global_utf16be (sha1_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -924,7 +924,7 @@ static void sha1_update_global_utf16be (sha1_ctx_t *ctx, const __global u32 *w,
sha1_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha1_update_global_utf16be_swap (sha1_ctx_t *ctx, const __global u32 *w, const int len)
void sha1_update_global_utf16be_swap (sha1_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1000,7 +1000,7 @@ static void sha1_update_global_utf16be_swap (sha1_ctx_t *ctx, const __global u32
sha1_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha1_final (sha1_ctx_t *ctx)
void sha1_final (sha1_ctx_t *ctx)
{
const int pos = ctx->len & 63;
@ -1043,7 +1043,7 @@ typedef struct sha1_hmac_ctx
} sha1_hmac_ctx_t;
static void sha1_hmac_init_64 (sha1_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4])
void sha1_hmac_init_64 (sha1_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4])
{
u32 t0[4];
u32 t1[4];
@ -1097,7 +1097,7 @@ static void sha1_hmac_init_64 (sha1_hmac_ctx_t *ctx, const u32 w0[4], const u32
sha1_update_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void sha1_hmac_init (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha1_hmac_init (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1154,7 +1154,7 @@ static void sha1_hmac_init (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
sha1_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void sha1_hmac_init_swap (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha1_hmac_init_swap (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1211,7 +1211,7 @@ static void sha1_hmac_init_swap (sha1_hmac_ctx_t *ctx, const u32 *w, const int l
sha1_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void sha1_hmac_init_global (sha1_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void sha1_hmac_init_global (sha1_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1268,7 +1268,7 @@ static void sha1_hmac_init_global (sha1_hmac_ctx_t *ctx, __global const u32 *w,
sha1_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void sha1_hmac_init_global_swap (sha1_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void sha1_hmac_init_global_swap (sha1_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1325,52 +1325,52 @@ static void sha1_hmac_init_global_swap (sha1_hmac_ctx_t *ctx, __global const u32
sha1_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void sha1_hmac_update_64 (sha1_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void sha1_hmac_update_64 (sha1_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
sha1_update_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void sha1_hmac_update (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha1_hmac_update (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha1_update (&ctx->ipad, w, len);
}
static void sha1_hmac_update_swap (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha1_hmac_update_swap (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha1_update_swap (&ctx->ipad, w, len);
}
static void sha1_hmac_update_utf16le (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha1_hmac_update_utf16le (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha1_update_utf16le (&ctx->ipad, w, len);
}
static void sha1_hmac_update_utf16le_swap (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha1_hmac_update_utf16le_swap (sha1_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha1_update_utf16le_swap (&ctx->ipad, w, len);
}
static void sha1_hmac_update_global (sha1_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha1_hmac_update_global (sha1_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha1_update_global (&ctx->ipad, w, len);
}
static void sha1_hmac_update_global_swap (sha1_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha1_hmac_update_global_swap (sha1_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha1_update_global_swap (&ctx->ipad, w, len);
}
static void sha1_hmac_update_global_utf16le (sha1_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha1_hmac_update_global_utf16le (sha1_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha1_update_global_utf16le (&ctx->ipad, w, len);
}
static void sha1_hmac_update_global_utf16le_swap (sha1_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha1_hmac_update_global_utf16le_swap (sha1_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha1_update_global_utf16le_swap (&ctx->ipad, w, len);
}
static void sha1_hmac_final (sha1_hmac_ctx_t *ctx)
void sha1_hmac_final (sha1_hmac_ctx_t *ctx)
{
sha1_final (&ctx->ipad);
@ -1416,7 +1416,7 @@ typedef struct sha1_ctx_vector
} sha1_ctx_vector_t;
static void sha1_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[5])
void sha1_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[5])
{
u32x a = digest[0];
u32x b = digest[1];
@ -1545,7 +1545,7 @@ static void sha1_transform_vector (const u32x w0[4], const u32x w1[4], const u32
digest[4] += e;
}
static void sha1_init_vector (sha1_ctx_vector_t *ctx)
void sha1_init_vector (sha1_ctx_vector_t *ctx)
{
ctx->h[0] = SHA1M_A;
ctx->h[1] = SHA1M_B;
@ -1573,7 +1573,7 @@ static void sha1_init_vector (sha1_ctx_vector_t *ctx)
ctx->len = 0;
}
static void sha1_init_vector_from_scalar (sha1_ctx_vector_t *ctx, sha1_ctx_t *ctx0)
void sha1_init_vector_from_scalar (sha1_ctx_vector_t *ctx, sha1_ctx_t *ctx0)
{
ctx->h[0] = ctx0->h[0];
ctx->h[1] = ctx0->h[1];
@ -1601,7 +1601,7 @@ static void sha1_init_vector_from_scalar (sha1_ctx_vector_t *ctx, sha1_ctx_t *ct
ctx->len = ctx0->len;
}
static void sha1_update_vector_64 (sha1_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void sha1_update_vector_64 (sha1_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -1679,7 +1679,7 @@ static void sha1_update_vector_64 (sha1_ctx_vector_t *ctx, u32x w0[4], u32x w1[4
}
}
static void sha1_update_vector (sha1_ctx_vector_t *ctx, const u32x *w, const int len)
void sha1_update_vector (sha1_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1731,7 +1731,7 @@ static void sha1_update_vector (sha1_ctx_vector_t *ctx, const u32x *w, const int
sha1_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha1_update_vector_swap (sha1_ctx_vector_t *ctx, const u32x *w, const int len)
void sha1_update_vector_swap (sha1_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1817,7 +1817,7 @@ static void sha1_update_vector_swap (sha1_ctx_vector_t *ctx, const u32x *w, cons
sha1_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha1_update_vector_utf16le (sha1_ctx_vector_t *ctx, const u32x *w, const int len)
void sha1_update_vector_utf16le (sha1_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1859,7 +1859,7 @@ static void sha1_update_vector_utf16le (sha1_ctx_vector_t *ctx, const u32x *w, c
sha1_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha1_update_vector_utf16le_swap (sha1_ctx_vector_t *ctx, const u32x *w, const int len)
void sha1_update_vector_utf16le_swap (sha1_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1935,7 +1935,7 @@ static void sha1_update_vector_utf16le_swap (sha1_ctx_vector_t *ctx, const u32x
sha1_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha1_update_vector_utf16leN (sha1_ctx_vector_t *ctx, const u32x *w, const int len)
void sha1_update_vector_utf16leN (sha1_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1977,7 +1977,7 @@ static void sha1_update_vector_utf16leN (sha1_ctx_vector_t *ctx, const u32x *w,
sha1_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha1_update_vector_utf16beN (sha1_ctx_vector_t *ctx, const u32x *w, const int len)
void sha1_update_vector_utf16beN (sha1_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2019,7 +2019,7 @@ static void sha1_update_vector_utf16beN (sha1_ctx_vector_t *ctx, const u32x *w,
sha1_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha1_final_vector (sha1_ctx_vector_t *ctx)
void sha1_final_vector (sha1_ctx_vector_t *ctx)
{
const int pos = ctx->len & 63;
@ -2062,7 +2062,7 @@ typedef struct sha1_hmac_ctx_vector
} sha1_hmac_ctx_vector_t;
static void sha1_hmac_init_vector_64 (sha1_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4])
void sha1_hmac_init_vector_64 (sha1_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4])
{
u32x t0[4];
u32x t1[4];
@ -2116,7 +2116,7 @@ static void sha1_hmac_init_vector_64 (sha1_hmac_ctx_vector_t *ctx, const u32x w0
sha1_update_vector_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void sha1_hmac_init_vector (sha1_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void sha1_hmac_init_vector (sha1_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2173,17 +2173,17 @@ static void sha1_hmac_init_vector (sha1_hmac_ctx_vector_t *ctx, const u32x *w, c
sha1_hmac_init_vector_64 (ctx, w0, w1, w2, w3);
}
static void sha1_hmac_update_vector_64 (sha1_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void sha1_hmac_update_vector_64 (sha1_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
sha1_update_vector_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void sha1_hmac_update_vector (sha1_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void sha1_hmac_update_vector (sha1_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
sha1_update_vector (&ctx->ipad, w, len);
}
static void sha1_hmac_final_vector (sha1_hmac_ctx_vector_t *ctx)
void sha1_hmac_final_vector (sha1_hmac_ctx_vector_t *ctx)
{
sha1_final_vector (&ctx->ipad);

@ -37,7 +37,7 @@ typedef struct sha224_ctx
} sha224_ctx_t;
static void sha224_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[8])
void sha224_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[8])
{
u32 a = digest[0];
u32 b = digest[1];
@ -128,7 +128,7 @@ static void sha224_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4],
digest[7] += h;
}
static void sha224_init (sha224_ctx_t *ctx)
void sha224_init (sha224_ctx_t *ctx)
{
ctx->h[0] = SHA224M_A;
ctx->h[1] = SHA224M_B;
@ -159,7 +159,7 @@ static void sha224_init (sha224_ctx_t *ctx)
ctx->len = 0;
}
static void sha224_update_64 (sha224_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void sha224_update_64 (sha224_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -237,7 +237,7 @@ static void sha224_update_64 (sha224_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4]
}
}
static void sha224_update (sha224_ctx_t *ctx, const u32 *w, const int len)
void sha224_update (sha224_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -289,7 +289,7 @@ static void sha224_update (sha224_ctx_t *ctx, const u32 *w, const int len)
sha224_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha224_update_swap (sha224_ctx_t *ctx, const u32 *w, const int len)
void sha224_update_swap (sha224_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -375,7 +375,7 @@ static void sha224_update_swap (sha224_ctx_t *ctx, const u32 *w, const int len)
sha224_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha224_update_utf16le (sha224_ctx_t *ctx, const u32 *w, const int len)
void sha224_update_utf16le (sha224_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -417,7 +417,7 @@ static void sha224_update_utf16le (sha224_ctx_t *ctx, const u32 *w, const int le
sha224_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha224_update_utf16le_swap (sha224_ctx_t *ctx, const u32 *w, const int len)
void sha224_update_utf16le_swap (sha224_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -493,7 +493,7 @@ static void sha224_update_utf16le_swap (sha224_ctx_t *ctx, const u32 *w, const i
sha224_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha224_update_global (sha224_ctx_t *ctx, const __global u32 *w, const int len)
void sha224_update_global (sha224_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -545,7 +545,7 @@ static void sha224_update_global (sha224_ctx_t *ctx, const __global u32 *w, cons
sha224_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha224_update_global_swap (sha224_ctx_t *ctx, const __global u32 *w, const int len)
void sha224_update_global_swap (sha224_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -631,7 +631,7 @@ static void sha224_update_global_swap (sha224_ctx_t *ctx, const __global u32 *w,
sha224_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha224_update_global_utf16le (sha224_ctx_t *ctx, const __global u32 *w, const int len)
void sha224_update_global_utf16le (sha224_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -673,7 +673,7 @@ static void sha224_update_global_utf16le (sha224_ctx_t *ctx, const __global u32
sha224_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha224_update_global_utf16le_swap (sha224_ctx_t *ctx, const __global u32 *w, const int len)
void sha224_update_global_utf16le_swap (sha224_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -749,7 +749,7 @@ static void sha224_update_global_utf16le_swap (sha224_ctx_t *ctx, const __global
sha224_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha224_final (sha224_ctx_t *ctx)
void sha224_final (sha224_ctx_t *ctx)
{
const int pos = ctx->len & 63;
@ -792,7 +792,7 @@ typedef struct sha224_hmac_ctx
} sha224_hmac_ctx_t;
static void sha224_hmac_init_64 (sha224_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4])
void sha224_hmac_init_64 (sha224_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4])
{
u32 t0[4];
u32 t1[4];
@ -846,7 +846,7 @@ static void sha224_hmac_init_64 (sha224_hmac_ctx_t *ctx, const u32 w0[4], const
sha224_update_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void sha224_hmac_init (sha224_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha224_hmac_init (sha224_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -903,7 +903,7 @@ static void sha224_hmac_init (sha224_hmac_ctx_t *ctx, const u32 *w, const int le
sha224_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void sha224_hmac_init_swap (sha224_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha224_hmac_init_swap (sha224_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -960,7 +960,7 @@ static void sha224_hmac_init_swap (sha224_hmac_ctx_t *ctx, const u32 *w, const i
sha224_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void sha224_hmac_init_global (sha224_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void sha224_hmac_init_global (sha224_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1017,7 +1017,7 @@ static void sha224_hmac_init_global (sha224_hmac_ctx_t *ctx, __global const u32
sha224_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void sha224_hmac_init_global_swap (sha224_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void sha224_hmac_init_global_swap (sha224_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1074,52 +1074,52 @@ static void sha224_hmac_init_global_swap (sha224_hmac_ctx_t *ctx, __global const
sha224_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void sha224_hmac_update_64 (sha224_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void sha224_hmac_update_64 (sha224_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
sha224_update_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void sha224_hmac_update (sha224_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha224_hmac_update (sha224_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha224_update (&ctx->ipad, w, len);
}
static void sha224_hmac_update_swap (sha224_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha224_hmac_update_swap (sha224_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha224_update_swap (&ctx->ipad, w, len);
}
static void sha224_hmac_update_utf16le (sha224_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha224_hmac_update_utf16le (sha224_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha224_update_utf16le (&ctx->ipad, w, len);
}
static void sha224_hmac_update_utf16le_swap (sha224_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha224_hmac_update_utf16le_swap (sha224_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha224_update_utf16le_swap (&ctx->ipad, w, len);
}
static void sha224_hmac_update_global (sha224_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha224_hmac_update_global (sha224_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha224_update_global (&ctx->ipad, w, len);
}
static void sha224_hmac_update_global_swap (sha224_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha224_hmac_update_global_swap (sha224_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha224_update_global_swap (&ctx->ipad, w, len);
}
static void sha224_hmac_update_global_utf16le (sha224_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha224_hmac_update_global_utf16le (sha224_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha224_update_global_utf16le (&ctx->ipad, w, len);
}
static void sha224_hmac_update_global_utf16le_swap (sha224_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha224_hmac_update_global_utf16le_swap (sha224_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha224_update_global_utf16le_swap (&ctx->ipad, w, len);
}
static void sha224_hmac_final (sha224_hmac_ctx_t *ctx)
void sha224_hmac_final (sha224_hmac_ctx_t *ctx)
{
sha224_final (&ctx->ipad);
@ -1165,7 +1165,7 @@ typedef struct sha224_ctx_vector
} sha224_ctx_vector_t;
static void sha224_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[8])
void sha224_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[8])
{
u32x a = digest[0];
u32x b = digest[1];
@ -1256,7 +1256,7 @@ static void sha224_transform_vector (const u32x w0[4], const u32x w1[4], const u
digest[7] += h;
}
static void sha224_init_vector (sha224_ctx_vector_t *ctx)
void sha224_init_vector (sha224_ctx_vector_t *ctx)
{
ctx->h[0] = SHA224M_A;
ctx->h[1] = SHA224M_B;
@ -1287,7 +1287,7 @@ static void sha224_init_vector (sha224_ctx_vector_t *ctx)
ctx->len = 0;
}
static void sha224_init_vector_from_scalar (sha224_ctx_vector_t *ctx, sha224_ctx_t *ctx0)
void sha224_init_vector_from_scalar (sha224_ctx_vector_t *ctx, sha224_ctx_t *ctx0)
{
ctx->h[0] = ctx0->h[0];
ctx->h[1] = ctx0->h[1];
@ -1318,7 +1318,7 @@ static void sha224_init_vector_from_scalar (sha224_ctx_vector_t *ctx, sha224_ctx
ctx->len = ctx0->len;
}
static void sha224_update_vector_64 (sha224_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void sha224_update_vector_64 (sha224_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -1396,7 +1396,7 @@ static void sha224_update_vector_64 (sha224_ctx_vector_t *ctx, u32x w0[4], u32x
}
}
static void sha224_update_vector (sha224_ctx_vector_t *ctx, const u32x *w, const int len)
void sha224_update_vector (sha224_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1448,7 +1448,7 @@ static void sha224_update_vector (sha224_ctx_vector_t *ctx, const u32x *w, const
sha224_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha224_update_vector_swap (sha224_ctx_vector_t *ctx, const u32x *w, const int len)
void sha224_update_vector_swap (sha224_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1534,7 +1534,7 @@ static void sha224_update_vector_swap (sha224_ctx_vector_t *ctx, const u32x *w,
sha224_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha224_update_vector_utf16le (sha224_ctx_vector_t *ctx, const u32x *w, const int len)
void sha224_update_vector_utf16le (sha224_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1576,7 +1576,7 @@ static void sha224_update_vector_utf16le (sha224_ctx_vector_t *ctx, const u32x *
sha224_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha224_update_vector_utf16le_swap (sha224_ctx_vector_t *ctx, const u32x *w, const int len)
void sha224_update_vector_utf16le_swap (sha224_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1652,7 +1652,7 @@ static void sha224_update_vector_utf16le_swap (sha224_ctx_vector_t *ctx, const u
sha224_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha224_update_vector_utf16beN (sha224_ctx_vector_t *ctx, const u32x *w, const int len)
void sha224_update_vector_utf16beN (sha224_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1694,7 +1694,7 @@ static void sha224_update_vector_utf16beN (sha224_ctx_vector_t *ctx, const u32x
sha224_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha224_final_vector (sha224_ctx_vector_t *ctx)
void sha224_final_vector (sha224_ctx_vector_t *ctx)
{
const int pos = ctx->len & 63;
@ -1737,7 +1737,7 @@ typedef struct sha224_hmac_ctx_vector
} sha224_hmac_ctx_vector_t;
static void sha224_hmac_init_vector_64 (sha224_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4])
void sha224_hmac_init_vector_64 (sha224_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4])
{
u32x t0[4];
u32x t1[4];
@ -1791,7 +1791,7 @@ static void sha224_hmac_init_vector_64 (sha224_hmac_ctx_vector_t *ctx, const u32
sha224_update_vector_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void sha224_hmac_init_vector (sha224_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void sha224_hmac_init_vector (sha224_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1848,17 +1848,17 @@ static void sha224_hmac_init_vector (sha224_hmac_ctx_vector_t *ctx, const u32x *
sha224_hmac_init_vector_64 (ctx, w0, w1, w2, w3);
}
static void sha224_hmac_update_vector_64 (sha224_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void sha224_hmac_update_vector_64 (sha224_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
sha224_update_vector_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void sha224_hmac_update_vector (sha224_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void sha224_hmac_update_vector (sha224_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
sha224_update_vector (&ctx->ipad, w, len);
}
static void sha224_hmac_final_vector (sha224_hmac_ctx_vector_t *ctx)
void sha224_hmac_final_vector (sha224_hmac_ctx_vector_t *ctx)
{
sha224_final_vector (&ctx->ipad);

@ -37,7 +37,7 @@ typedef struct sha256_ctx
} sha256_ctx_t;
static void sha256_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[8])
void sha256_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[8])
{
u32 a = digest[0];
u32 b = digest[1];
@ -128,7 +128,7 @@ static void sha256_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4],
digest[7] += h;
}
static void sha256_init (sha256_ctx_t *ctx)
void sha256_init (sha256_ctx_t *ctx)
{
ctx->h[0] = SHA256M_A;
ctx->h[1] = SHA256M_B;
@ -159,7 +159,7 @@ static void sha256_init (sha256_ctx_t *ctx)
ctx->len = 0;
}
static void sha256_update_64 (sha256_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void sha256_update_64 (sha256_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -237,7 +237,7 @@ static void sha256_update_64 (sha256_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4]
}
}
static void sha256_update (sha256_ctx_t *ctx, const u32 *w, const int len)
void sha256_update (sha256_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -289,7 +289,7 @@ static void sha256_update (sha256_ctx_t *ctx, const u32 *w, const int len)
sha256_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha256_update_swap (sha256_ctx_t *ctx, const u32 *w, const int len)
void sha256_update_swap (sha256_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -375,7 +375,7 @@ static void sha256_update_swap (sha256_ctx_t *ctx, const u32 *w, const int len)
sha256_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha256_update_utf16le (sha256_ctx_t *ctx, const u32 *w, const int len)
void sha256_update_utf16le (sha256_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -417,7 +417,7 @@ static void sha256_update_utf16le (sha256_ctx_t *ctx, const u32 *w, const int le
sha256_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha256_update_utf16le_swap (sha256_ctx_t *ctx, const u32 *w, const int len)
void sha256_update_utf16le_swap (sha256_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -493,7 +493,7 @@ static void sha256_update_utf16le_swap (sha256_ctx_t *ctx, const u32 *w, const i
sha256_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha256_update_global (sha256_ctx_t *ctx, const __global u32 *w, const int len)
void sha256_update_global (sha256_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -545,7 +545,7 @@ static void sha256_update_global (sha256_ctx_t *ctx, const __global u32 *w, cons
sha256_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha256_update_global_swap (sha256_ctx_t *ctx, const __global u32 *w, const int len)
void sha256_update_global_swap (sha256_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -631,7 +631,7 @@ static void sha256_update_global_swap (sha256_ctx_t *ctx, const __global u32 *w,
sha256_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha256_update_global_utf16le (sha256_ctx_t *ctx, const __global u32 *w, const int len)
void sha256_update_global_utf16le (sha256_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -673,7 +673,7 @@ static void sha256_update_global_utf16le (sha256_ctx_t *ctx, const __global u32
sha256_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha256_update_global_utf16le_swap (sha256_ctx_t *ctx, const __global u32 *w, const int len)
void sha256_update_global_utf16le_swap (sha256_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -749,7 +749,7 @@ static void sha256_update_global_utf16le_swap (sha256_ctx_t *ctx, const __global
sha256_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha256_final (sha256_ctx_t *ctx)
void sha256_final (sha256_ctx_t *ctx)
{
const int pos = ctx->len & 63;
@ -792,7 +792,7 @@ typedef struct sha256_hmac_ctx
} sha256_hmac_ctx_t;
static void sha256_hmac_init_64 (sha256_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4])
void sha256_hmac_init_64 (sha256_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4])
{
u32 t0[4];
u32 t1[4];
@ -846,7 +846,7 @@ static void sha256_hmac_init_64 (sha256_hmac_ctx_t *ctx, const u32 w0[4], const
sha256_update_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void sha256_hmac_init (sha256_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha256_hmac_init (sha256_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -903,7 +903,7 @@ static void sha256_hmac_init (sha256_hmac_ctx_t *ctx, const u32 *w, const int le
sha256_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void sha256_hmac_init_swap (sha256_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha256_hmac_init_swap (sha256_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -960,7 +960,7 @@ static void sha256_hmac_init_swap (sha256_hmac_ctx_t *ctx, const u32 *w, const i
sha256_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void sha256_hmac_init_global (sha256_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void sha256_hmac_init_global (sha256_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1017,7 +1017,7 @@ static void sha256_hmac_init_global (sha256_hmac_ctx_t *ctx, __global const u32
sha256_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void sha256_hmac_init_global_swap (sha256_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void sha256_hmac_init_global_swap (sha256_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1074,52 +1074,52 @@ static void sha256_hmac_init_global_swap (sha256_hmac_ctx_t *ctx, __global const
sha256_hmac_init_64 (ctx, w0, w1, w2, w3);
}
static void sha256_hmac_update_64 (sha256_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void sha256_hmac_update_64 (sha256_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
sha256_update_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void sha256_hmac_update (sha256_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha256_hmac_update (sha256_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha256_update (&ctx->ipad, w, len);
}
static void sha256_hmac_update_swap (sha256_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha256_hmac_update_swap (sha256_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha256_update_swap (&ctx->ipad, w, len);
}
static void sha256_hmac_update_utf16le (sha256_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha256_hmac_update_utf16le (sha256_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha256_update_utf16le (&ctx->ipad, w, len);
}
static void sha256_hmac_update_utf16le_swap (sha256_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha256_hmac_update_utf16le_swap (sha256_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha256_update_utf16le_swap (&ctx->ipad, w, len);
}
static void sha256_hmac_update_global (sha256_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha256_hmac_update_global (sha256_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha256_update_global (&ctx->ipad, w, len);
}
static void sha256_hmac_update_global_swap (sha256_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha256_hmac_update_global_swap (sha256_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha256_update_global_swap (&ctx->ipad, w, len);
}
static void sha256_hmac_update_global_utf16le (sha256_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha256_hmac_update_global_utf16le (sha256_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha256_update_global_utf16le (&ctx->ipad, w, len);
}
static void sha256_hmac_update_global_utf16le_swap (sha256_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha256_hmac_update_global_utf16le_swap (sha256_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha256_update_global_utf16le_swap (&ctx->ipad, w, len);
}
static void sha256_hmac_final (sha256_hmac_ctx_t *ctx)
void sha256_hmac_final (sha256_hmac_ctx_t *ctx)
{
sha256_final (&ctx->ipad);
@ -1165,7 +1165,7 @@ typedef struct sha256_ctx_vector
} sha256_ctx_vector_t;
static void sha256_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[8])
void sha256_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[8])
{
u32x a = digest[0];
u32x b = digest[1];
@ -1256,7 +1256,7 @@ static void sha256_transform_vector (const u32x w0[4], const u32x w1[4], const u
digest[7] += h;
}
static void sha256_init_vector (sha256_ctx_vector_t *ctx)
void sha256_init_vector (sha256_ctx_vector_t *ctx)
{
ctx->h[0] = SHA256M_A;
ctx->h[1] = SHA256M_B;
@ -1287,7 +1287,7 @@ static void sha256_init_vector (sha256_ctx_vector_t *ctx)
ctx->len = 0;
}
static void sha256_init_vector_from_scalar (sha256_ctx_vector_t *ctx, sha256_ctx_t *ctx0)
void sha256_init_vector_from_scalar (sha256_ctx_vector_t *ctx, sha256_ctx_t *ctx0)
{
ctx->h[0] = ctx0->h[0];
ctx->h[1] = ctx0->h[1];
@ -1318,7 +1318,7 @@ static void sha256_init_vector_from_scalar (sha256_ctx_vector_t *ctx, sha256_ctx
ctx->len = ctx0->len;
}
static void sha256_update_vector_64 (sha256_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void sha256_update_vector_64 (sha256_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -1396,7 +1396,7 @@ static void sha256_update_vector_64 (sha256_ctx_vector_t *ctx, u32x w0[4], u32x
}
}
static void sha256_update_vector (sha256_ctx_vector_t *ctx, const u32x *w, const int len)
void sha256_update_vector (sha256_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1448,7 +1448,7 @@ static void sha256_update_vector (sha256_ctx_vector_t *ctx, const u32x *w, const
sha256_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha256_update_vector_swap (sha256_ctx_vector_t *ctx, const u32x *w, const int len)
void sha256_update_vector_swap (sha256_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1534,7 +1534,7 @@ static void sha256_update_vector_swap (sha256_ctx_vector_t *ctx, const u32x *w,
sha256_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void sha256_update_vector_utf16le (sha256_ctx_vector_t *ctx, const u32x *w, const int len)
void sha256_update_vector_utf16le (sha256_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1576,7 +1576,7 @@ static void sha256_update_vector_utf16le (sha256_ctx_vector_t *ctx, const u32x *
sha256_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha256_update_vector_utf16le_swap (sha256_ctx_vector_t *ctx, const u32x *w, const int len)
void sha256_update_vector_utf16le_swap (sha256_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1652,7 +1652,7 @@ static void sha256_update_vector_utf16le_swap (sha256_ctx_vector_t *ctx, const u
sha256_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha256_update_vector_utf16beN (sha256_ctx_vector_t *ctx, const u32x *w, const int len)
void sha256_update_vector_utf16beN (sha256_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1694,7 +1694,7 @@ static void sha256_update_vector_utf16beN (sha256_ctx_vector_t *ctx, const u32x
sha256_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void sha256_final_vector (sha256_ctx_vector_t *ctx)
void sha256_final_vector (sha256_ctx_vector_t *ctx)
{
const int pos = ctx->len & 63;
@ -1737,7 +1737,7 @@ typedef struct sha256_hmac_ctx_vector
} sha256_hmac_ctx_vector_t;
static void sha256_hmac_init_vector_64 (sha256_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4])
void sha256_hmac_init_vector_64 (sha256_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4])
{
u32x t0[4];
u32x t1[4];
@ -1791,7 +1791,7 @@ static void sha256_hmac_init_vector_64 (sha256_hmac_ctx_vector_t *ctx, const u32
sha256_update_vector_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void sha256_hmac_init_vector (sha256_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void sha256_hmac_init_vector (sha256_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -1848,17 +1848,17 @@ static void sha256_hmac_init_vector (sha256_hmac_ctx_vector_t *ctx, const u32x *
sha256_hmac_init_vector_64 (ctx, w0, w1, w2, w3);
}
static void sha256_hmac_update_vector_64 (sha256_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void sha256_hmac_update_vector_64 (sha256_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
sha256_update_vector_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void sha256_hmac_update_vector (sha256_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void sha256_hmac_update_vector (sha256_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
sha256_update_vector (&ctx->ipad, w, len);
}
static void sha256_hmac_final_vector (sha256_hmac_ctx_vector_t *ctx)
void sha256_hmac_final_vector (sha256_hmac_ctx_vector_t *ctx)
{
sha256_final_vector (&ctx->ipad);

@ -45,7 +45,7 @@ typedef struct sha384_ctx
} sha384_ctx_t;
static void sha384_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], const u32 w4[4], const u32 w5[4], const u32 w6[4], const u32 w7[4], u64 digest[8])
void sha384_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], const u32 w4[4], const u32 w5[4], const u32 w6[4], const u32 w7[4], u64 digest[8])
{
u64 a = digest[0];
u64 b = digest[1];
@ -136,7 +136,7 @@ static void sha384_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4],
digest[7] += h;
}
static void sha384_init (sha384_ctx_t *ctx)
void sha384_init (sha384_ctx_t *ctx)
{
ctx->h[0] = SHA384M_A;
ctx->h[1] = SHA384M_B;
@ -183,7 +183,7 @@ static void sha384_init (sha384_ctx_t *ctx)
ctx->len = 0;
}
static void sha384_update_128 (sha384_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const int len)
void sha384_update_128 (sha384_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 127;
@ -313,7 +313,7 @@ static void sha384_update_128 (sha384_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4
}
}
static void sha384_update (sha384_ctx_t *ctx, const u32 *w, const int len)
void sha384_update (sha384_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -401,7 +401,7 @@ static void sha384_update (sha384_ctx_t *ctx, const u32 *w, const int len)
sha384_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, len - pos1);
}
static void sha384_update_swap (sha384_ctx_t *ctx, const u32 *w, const int len)
void sha384_update_swap (sha384_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -555,7 +555,7 @@ static void sha384_update_swap (sha384_ctx_t *ctx, const u32 *w, const int len)
sha384_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, len - pos1);
}
static void sha384_update_utf16le (sha384_ctx_t *ctx, const u32 *w, const int len)
void sha384_update_utf16le (sha384_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -621,7 +621,7 @@ static void sha384_update_utf16le (sha384_ctx_t *ctx, const u32 *w, const int le
sha384_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha384_update_utf16le_swap (sha384_ctx_t *ctx, const u32 *w, const int len)
void sha384_update_utf16le_swap (sha384_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -753,7 +753,7 @@ static void sha384_update_utf16le_swap (sha384_ctx_t *ctx, const u32 *w, const i
sha384_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha384_update_global (sha384_ctx_t *ctx, const __global u32 *w, const int len)
void sha384_update_global (sha384_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -841,7 +841,7 @@ static void sha384_update_global (sha384_ctx_t *ctx, const __global u32 *w, cons
sha384_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, len - pos1);
}
static void sha384_update_global_swap (sha384_ctx_t *ctx, const __global u32 *w, const int len)
void sha384_update_global_swap (sha384_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -995,7 +995,7 @@ static void sha384_update_global_swap (sha384_ctx_t *ctx, const __global u32 *w,
sha384_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, len - pos1);
}
static void sha384_update_global_utf16le (sha384_ctx_t *ctx, const __global u32 *w, const int len)
void sha384_update_global_utf16le (sha384_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1061,7 +1061,7 @@ static void sha384_update_global_utf16le (sha384_ctx_t *ctx, const __global u32
sha384_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha384_update_global_utf16le_swap (sha384_ctx_t *ctx, const __global u32 *w, const int len)
void sha384_update_global_utf16le_swap (sha384_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1193,7 +1193,7 @@ static void sha384_update_global_utf16le_swap (sha384_ctx_t *ctx, const __global
sha384_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha384_final (sha384_ctx_t *ctx)
void sha384_final (sha384_ctx_t *ctx)
{
const int pos = ctx->len & 127;
@ -1252,7 +1252,7 @@ typedef struct sha384_hmac_ctx
} sha384_hmac_ctx_t;
static void sha384_hmac_init_128 (sha384_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], const u32 w4[4], const u32 w5[4], const u32 w6[4], const u32 w7[4])
void sha384_hmac_init_128 (sha384_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], const u32 w4[4], const u32 w5[4], const u32 w6[4], const u32 w7[4])
{
u32 t0[4];
u32 t1[4];
@ -1342,7 +1342,7 @@ static void sha384_hmac_init_128 (sha384_hmac_ctx_t *ctx, const u32 w0[4], const
sha384_update_128 (&ctx->opad, t0, t1, t2, t3, t4, t5, t6, t7, 128);
}
static void sha384_hmac_init (sha384_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha384_hmac_init (sha384_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1435,7 +1435,7 @@ static void sha384_hmac_init (sha384_hmac_ctx_t *ctx, const u32 *w, const int le
sha384_hmac_init_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7);
}
static void sha384_hmac_init_swap (sha384_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha384_hmac_init_swap (sha384_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1528,7 +1528,7 @@ static void sha384_hmac_init_swap (sha384_hmac_ctx_t *ctx, const u32 *w, const i
sha384_hmac_init_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7);
}
static void sha384_hmac_init_global (sha384_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void sha384_hmac_init_global (sha384_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1621,7 +1621,7 @@ static void sha384_hmac_init_global (sha384_hmac_ctx_t *ctx, __global const u32
sha384_hmac_init_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7);
}
static void sha384_hmac_init_global_swap (sha384_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void sha384_hmac_init_global_swap (sha384_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1714,52 +1714,52 @@ static void sha384_hmac_init_global_swap (sha384_hmac_ctx_t *ctx, __global const
sha384_hmac_init_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7);
}
static void sha384_hmac_update_128 (sha384_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const int len)
void sha384_hmac_update_128 (sha384_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const int len)
{
sha384_update_128 (&ctx->ipad, w0, w1, w2, w3, w4, w5, w6, w7, len);
}
static void sha384_hmac_update (sha384_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha384_hmac_update (sha384_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha384_update (&ctx->ipad, w, len);
}
static void sha384_hmac_update_swap (sha384_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha384_hmac_update_swap (sha384_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha384_update_swap (&ctx->ipad, w, len);
}
static void sha384_hmac_update_utf16le (sha384_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha384_hmac_update_utf16le (sha384_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha384_update_utf16le (&ctx->ipad, w, len);
}
static void sha384_hmac_update_utf16le_swap (sha384_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha384_hmac_update_utf16le_swap (sha384_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha384_update_utf16le_swap (&ctx->ipad, w, len);
}
static void sha384_hmac_update_global (sha384_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha384_hmac_update_global (sha384_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha384_update_global (&ctx->ipad, w, len);
}
static void sha384_hmac_update_global_swap (sha384_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha384_hmac_update_global_swap (sha384_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha384_update_global_swap (&ctx->ipad, w, len);
}
static void sha384_hmac_update_global_utf16le (sha384_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha384_hmac_update_global_utf16le (sha384_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha384_update_global_utf16le (&ctx->ipad, w, len);
}
static void sha384_hmac_update_global_utf16le_swap (sha384_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha384_hmac_update_global_utf16le_swap (sha384_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha384_update_global_utf16le_swap (&ctx->ipad, w, len);
}
static void sha384_hmac_final (sha384_hmac_ctx_t *ctx)
void sha384_hmac_final (sha384_hmac_ctx_t *ctx)
{
sha384_final (&ctx->ipad);
@ -1829,7 +1829,7 @@ typedef struct sha384_ctx_vector
} sha384_ctx_vector_t;
static void sha384_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], const u32x w4[4], const u32x w5[4], const u32x w6[4], const u32x w7[4], u64x digest[8])
void sha384_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], const u32x w4[4], const u32x w5[4], const u32x w6[4], const u32x w7[4], u64x digest[8])
{
u64x a = digest[0];
u64x b = digest[1];
@ -1920,7 +1920,7 @@ static void sha384_transform_vector (const u32x w0[4], const u32x w1[4], const u
digest[7] += h;
}
static void sha384_init_vector (sha384_ctx_vector_t *ctx)
void sha384_init_vector (sha384_ctx_vector_t *ctx)
{
ctx->h[0] = SHA384M_A;
ctx->h[1] = SHA384M_B;
@ -1967,7 +1967,7 @@ static void sha384_init_vector (sha384_ctx_vector_t *ctx)
ctx->len = 0;
}
static void sha384_init_vector_from_scalar (sha384_ctx_vector_t *ctx, sha384_ctx_t *ctx0)
void sha384_init_vector_from_scalar (sha384_ctx_vector_t *ctx, sha384_ctx_t *ctx0)
{
ctx->h[0] = ctx0->h[0];
ctx->h[1] = ctx0->h[1];
@ -2014,7 +2014,7 @@ static void sha384_init_vector_from_scalar (sha384_ctx_vector_t *ctx, sha384_ctx
ctx->len = ctx0->len;
}
static void sha384_update_vector_128 (sha384_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const int len)
void sha384_update_vector_128 (sha384_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 127;
@ -2144,7 +2144,7 @@ static void sha384_update_vector_128 (sha384_ctx_vector_t *ctx, u32x w0[4], u32x
}
}
static void sha384_update_vector (sha384_ctx_vector_t *ctx, const u32x *w, const int len)
void sha384_update_vector (sha384_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2232,7 +2232,7 @@ static void sha384_update_vector (sha384_ctx_vector_t *ctx, const u32x *w, const
sha384_update_vector_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, len - pos1);
}
static void sha384_update_vector_swap (sha384_ctx_vector_t *ctx, const u32x *w, const int len)
void sha384_update_vector_swap (sha384_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2386,7 +2386,7 @@ static void sha384_update_vector_swap (sha384_ctx_vector_t *ctx, const u32x *w,
sha384_update_vector_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, len - pos1);
}
static void sha384_update_vector_utf16le (sha384_ctx_vector_t *ctx, const u32x *w, const int len)
void sha384_update_vector_utf16le (sha384_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2452,7 +2452,7 @@ static void sha384_update_vector_utf16le (sha384_ctx_vector_t *ctx, const u32x *
sha384_update_vector_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha384_update_vector_utf16le_swap (sha384_ctx_vector_t *ctx, const u32x *w, const int len)
void sha384_update_vector_utf16le_swap (sha384_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2584,7 +2584,7 @@ static void sha384_update_vector_utf16le_swap (sha384_ctx_vector_t *ctx, const u
sha384_update_vector_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha384_update_vector_utf16beN (sha384_ctx_vector_t *ctx, const u32x *w, const int len)
void sha384_update_vector_utf16beN (sha384_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2650,7 +2650,7 @@ static void sha384_update_vector_utf16beN (sha384_ctx_vector_t *ctx, const u32x
sha384_update_vector_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha384_final_vector (sha384_ctx_vector_t *ctx)
void sha384_final_vector (sha384_ctx_vector_t *ctx)
{
const int pos = ctx->len & 127;
@ -2709,7 +2709,7 @@ typedef struct sha384_hmac_ctx_vector
} sha384_hmac_ctx_vector_t;
static void sha384_hmac_init_vector_128 (sha384_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], const u32x w4[4], const u32x w5[4], const u32x w6[4], const u32x w7[4])
void sha384_hmac_init_vector_128 (sha384_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], const u32x w4[4], const u32x w5[4], const u32x w6[4], const u32x w7[4])
{
u32x t0[4];
u32x t1[4];
@ -2799,7 +2799,7 @@ static void sha384_hmac_init_vector_128 (sha384_hmac_ctx_vector_t *ctx, const u3
sha384_update_vector_128 (&ctx->opad, t0, t1, t2, t3, t4, t5, t6, t7, 128);
}
static void sha384_hmac_init_vector (sha384_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void sha384_hmac_init_vector (sha384_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2892,17 +2892,17 @@ static void sha384_hmac_init_vector (sha384_hmac_ctx_vector_t *ctx, const u32x *
sha384_hmac_init_vector_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7);
}
static void sha384_hmac_update_vector_128 (sha384_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const int len)
void sha384_hmac_update_vector_128 (sha384_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const int len)
{
sha384_update_vector_128 (&ctx->ipad, w0, w1, w2, w3, w4, w5, w6, w7, len);
}
static void sha384_hmac_update_vector (sha384_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void sha384_hmac_update_vector (sha384_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
sha384_update_vector (&ctx->ipad, w, len);
}
static void sha384_hmac_final_vector (sha384_hmac_ctx_vector_t *ctx)
void sha384_hmac_final_vector (sha384_hmac_ctx_vector_t *ctx)
{
sha384_final_vector (&ctx->ipad);

@ -45,7 +45,7 @@ typedef struct sha512_ctx
} sha512_ctx_t;
static void sha512_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], const u32 w4[4], const u32 w5[4], const u32 w6[4], const u32 w7[4], u64 digest[8])
void sha512_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], const u32 w4[4], const u32 w5[4], const u32 w6[4], const u32 w7[4], u64 digest[8])
{
u64 a = digest[0];
u64 b = digest[1];
@ -136,7 +136,7 @@ static void sha512_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4],
digest[7] += h;
}
static void sha512_init (sha512_ctx_t *ctx)
void sha512_init (sha512_ctx_t *ctx)
{
ctx->h[0] = SHA512M_A;
ctx->h[1] = SHA512M_B;
@ -183,7 +183,7 @@ static void sha512_init (sha512_ctx_t *ctx)
ctx->len = 0;
}
static void sha512_update_128 (sha512_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const int len)
void sha512_update_128 (sha512_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 127;
@ -313,7 +313,7 @@ static void sha512_update_128 (sha512_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4
}
}
static void sha512_update (sha512_ctx_t *ctx, const u32 *w, const int len)
void sha512_update (sha512_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -401,7 +401,7 @@ static void sha512_update (sha512_ctx_t *ctx, const u32 *w, const int len)
sha512_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, len - pos1);
}
static void sha512_update_swap (sha512_ctx_t *ctx, const u32 *w, const int len)
void sha512_update_swap (sha512_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -555,7 +555,7 @@ static void sha512_update_swap (sha512_ctx_t *ctx, const u32 *w, const int len)
sha512_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, len - pos1);
}
static void sha512_update_utf16le (sha512_ctx_t *ctx, const u32 *w, const int len)
void sha512_update_utf16le (sha512_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -621,7 +621,7 @@ static void sha512_update_utf16le (sha512_ctx_t *ctx, const u32 *w, const int le
sha512_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha512_update_utf16le_swap (sha512_ctx_t *ctx, const u32 *w, const int len)
void sha512_update_utf16le_swap (sha512_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -753,7 +753,7 @@ static void sha512_update_utf16le_swap (sha512_ctx_t *ctx, const u32 *w, const i
sha512_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha512_update_global (sha512_ctx_t *ctx, const __global u32 *w, const int len)
void sha512_update_global (sha512_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -841,7 +841,7 @@ static void sha512_update_global (sha512_ctx_t *ctx, const __global u32 *w, cons
sha512_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, len - pos1);
}
static void sha512_update_global_swap (sha512_ctx_t *ctx, const __global u32 *w, const int len)
void sha512_update_global_swap (sha512_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -995,7 +995,7 @@ static void sha512_update_global_swap (sha512_ctx_t *ctx, const __global u32 *w,
sha512_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, len - pos1);
}
static void sha512_update_global_utf16le (sha512_ctx_t *ctx, const __global u32 *w, const int len)
void sha512_update_global_utf16le (sha512_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1061,7 +1061,7 @@ static void sha512_update_global_utf16le (sha512_ctx_t *ctx, const __global u32
sha512_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha512_update_global_utf16le_swap (sha512_ctx_t *ctx, const __global u32 *w, const int len)
void sha512_update_global_utf16le_swap (sha512_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1193,7 +1193,7 @@ static void sha512_update_global_utf16le_swap (sha512_ctx_t *ctx, const __global
sha512_update_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha512_final (sha512_ctx_t *ctx)
void sha512_final (sha512_ctx_t *ctx)
{
const int pos = ctx->len & 127;
@ -1252,7 +1252,7 @@ typedef struct sha512_hmac_ctx
} sha512_hmac_ctx_t;
static void sha512_hmac_init_128 (sha512_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], const u32 w4[4], const u32 w5[4], const u32 w6[4], const u32 w7[4])
void sha512_hmac_init_128 (sha512_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], const u32 w4[4], const u32 w5[4], const u32 w6[4], const u32 w7[4])
{
u32 t0[4];
u32 t1[4];
@ -1342,7 +1342,7 @@ static void sha512_hmac_init_128 (sha512_hmac_ctx_t *ctx, const u32 w0[4], const
sha512_update_128 (&ctx->opad, t0, t1, t2, t3, t4, t5, t6, t7, 128);
}
static void sha512_hmac_init (sha512_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha512_hmac_init (sha512_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1435,7 +1435,7 @@ static void sha512_hmac_init (sha512_hmac_ctx_t *ctx, const u32 *w, const int le
sha512_hmac_init_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7);
}
static void sha512_hmac_init_swap (sha512_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha512_hmac_init_swap (sha512_hmac_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1528,7 +1528,7 @@ static void sha512_hmac_init_swap (sha512_hmac_ctx_t *ctx, const u32 *w, const i
sha512_hmac_init_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7);
}
static void sha512_hmac_init_global (sha512_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void sha512_hmac_init_global (sha512_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1621,7 +1621,7 @@ static void sha512_hmac_init_global (sha512_hmac_ctx_t *ctx, __global const u32
sha512_hmac_init_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7);
}
static void sha512_hmac_init_global_swap (sha512_hmac_ctx_t *ctx, __global const u32 *w, const int len)
void sha512_hmac_init_global_swap (sha512_hmac_ctx_t *ctx, __global const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1714,52 +1714,52 @@ static void sha512_hmac_init_global_swap (sha512_hmac_ctx_t *ctx, __global const
sha512_hmac_init_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7);
}
static void sha512_hmac_update_128 (sha512_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const int len)
void sha512_hmac_update_128 (sha512_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], u32 w4[4], u32 w5[4], u32 w6[4], u32 w7[4], const int len)
{
sha512_update_128 (&ctx->ipad, w0, w1, w2, w3, w4, w5, w6, w7, len);
}
static void sha512_hmac_update (sha512_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha512_hmac_update (sha512_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha512_update (&ctx->ipad, w, len);
}
static void sha512_hmac_update_swap (sha512_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha512_hmac_update_swap (sha512_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha512_update_swap (&ctx->ipad, w, len);
}
static void sha512_hmac_update_utf16le (sha512_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha512_hmac_update_utf16le (sha512_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha512_update_utf16le (&ctx->ipad, w, len);
}
static void sha512_hmac_update_utf16le_swap (sha512_hmac_ctx_t *ctx, const u32 *w, const int len)
void sha512_hmac_update_utf16le_swap (sha512_hmac_ctx_t *ctx, const u32 *w, const int len)
{
sha512_update_utf16le_swap (&ctx->ipad, w, len);
}
static void sha512_hmac_update_global (sha512_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha512_hmac_update_global (sha512_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha512_update_global (&ctx->ipad, w, len);
}
static void sha512_hmac_update_global_swap (sha512_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha512_hmac_update_global_swap (sha512_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha512_update_global_swap (&ctx->ipad, w, len);
}
static void sha512_hmac_update_global_utf16le (sha512_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha512_hmac_update_global_utf16le (sha512_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha512_update_global_utf16le (&ctx->ipad, w, len);
}
static void sha512_hmac_update_global_utf16le_swap (sha512_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void sha512_hmac_update_global_utf16le_swap (sha512_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
sha512_update_global_utf16le_swap (&ctx->ipad, w, len);
}
static void sha512_hmac_final (sha512_hmac_ctx_t *ctx)
void sha512_hmac_final (sha512_hmac_ctx_t *ctx)
{
sha512_final (&ctx->ipad);
@ -1829,7 +1829,7 @@ typedef struct sha512_ctx_vector
} sha512_ctx_vector_t;
static void sha512_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], const u32x w4[4], const u32x w5[4], const u32x w6[4], const u32x w7[4], u64x digest[8])
void sha512_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], const u32x w4[4], const u32x w5[4], const u32x w6[4], const u32x w7[4], u64x digest[8])
{
u64x a = digest[0];
u64x b = digest[1];
@ -1920,7 +1920,7 @@ static void sha512_transform_vector (const u32x w0[4], const u32x w1[4], const u
digest[7] += h;
}
static void sha512_init_vector (sha512_ctx_vector_t *ctx)
void sha512_init_vector (sha512_ctx_vector_t *ctx)
{
ctx->h[0] = SHA512M_A;
ctx->h[1] = SHA512M_B;
@ -1967,7 +1967,7 @@ static void sha512_init_vector (sha512_ctx_vector_t *ctx)
ctx->len = 0;
}
static void sha512_init_vector_from_scalar (sha512_ctx_vector_t *ctx, sha512_ctx_t *ctx0)
void sha512_init_vector_from_scalar (sha512_ctx_vector_t *ctx, sha512_ctx_t *ctx0)
{
ctx->h[0] = ctx0->h[0];
ctx->h[1] = ctx0->h[1];
@ -2014,7 +2014,7 @@ static void sha512_init_vector_from_scalar (sha512_ctx_vector_t *ctx, sha512_ctx
ctx->len = ctx0->len;
}
static void sha512_update_vector_128 (sha512_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const int len)
void sha512_update_vector_128 (sha512_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 127;
@ -2144,7 +2144,7 @@ static void sha512_update_vector_128 (sha512_ctx_vector_t *ctx, u32x w0[4], u32x
}
}
static void sha512_update_vector (sha512_ctx_vector_t *ctx, const u32x *w, const int len)
void sha512_update_vector (sha512_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2232,7 +2232,7 @@ static void sha512_update_vector (sha512_ctx_vector_t *ctx, const u32x *w, const
sha512_update_vector_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, len - pos1);
}
static void sha512_update_vector_swap (sha512_ctx_vector_t *ctx, const u32x *w, const int len)
void sha512_update_vector_swap (sha512_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2386,7 +2386,7 @@ static void sha512_update_vector_swap (sha512_ctx_vector_t *ctx, const u32x *w,
sha512_update_vector_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, len - pos1);
}
static void sha512_update_vector_utf16le (sha512_ctx_vector_t *ctx, const u32x *w, const int len)
void sha512_update_vector_utf16le (sha512_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2452,7 +2452,7 @@ static void sha512_update_vector_utf16le (sha512_ctx_vector_t *ctx, const u32x *
sha512_update_vector_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha512_update_vector_utf16le_swap (sha512_ctx_vector_t *ctx, const u32x *w, const int len)
void sha512_update_vector_utf16le_swap (sha512_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2584,7 +2584,7 @@ static void sha512_update_vector_utf16le_swap (sha512_ctx_vector_t *ctx, const u
sha512_update_vector_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha512_update_vector_utf16beN (sha512_ctx_vector_t *ctx, const u32x *w, const int len)
void sha512_update_vector_utf16beN (sha512_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2650,7 +2650,7 @@ static void sha512_update_vector_utf16beN (sha512_ctx_vector_t *ctx, const u32x
sha512_update_vector_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7, (len - pos1) * 2);
}
static void sha512_final_vector (sha512_ctx_vector_t *ctx)
void sha512_final_vector (sha512_ctx_vector_t *ctx)
{
const int pos = ctx->len & 127;
@ -2709,7 +2709,7 @@ typedef struct sha512_hmac_ctx_vector
} sha512_hmac_ctx_vector_t;
static void sha512_hmac_init_vector_128 (sha512_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], const u32x w4[4], const u32x w5[4], const u32x w6[4], const u32x w7[4])
void sha512_hmac_init_vector_128 (sha512_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], const u32x w4[4], const u32x w5[4], const u32x w6[4], const u32x w7[4])
{
u32x t0[4];
u32x t1[4];
@ -2799,7 +2799,7 @@ static void sha512_hmac_init_vector_128 (sha512_hmac_ctx_vector_t *ctx, const u3
sha512_update_vector_128 (&ctx->opad, t0, t1, t2, t3, t4, t5, t6, t7, 128);
}
static void sha512_hmac_init_vector (sha512_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void sha512_hmac_init_vector (sha512_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2892,17 +2892,17 @@ static void sha512_hmac_init_vector (sha512_hmac_ctx_vector_t *ctx, const u32x *
sha512_hmac_init_vector_128 (ctx, w0, w1, w2, w3, w4, w5, w6, w7);
}
static void sha512_hmac_update_vector_128 (sha512_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const int len)
void sha512_hmac_update_vector_128 (sha512_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], u32x w4[4], u32x w5[4], u32x w6[4], u32x w7[4], const int len)
{
sha512_update_vector_128 (&ctx->ipad, w0, w1, w2, w3, w4, w5, w6, w7, len);
}
static void sha512_hmac_update_vector (sha512_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void sha512_hmac_update_vector (sha512_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
sha512_update_vector (&ctx->ipad, w, len);
}
static void sha512_hmac_final_vector (sha512_hmac_ctx_vector_t *ctx)
void sha512_hmac_final_vector (sha512_hmac_ctx_vector_t *ctx)
{
sha512_final_vector (&ctx->ipad);

@ -1130,7 +1130,7 @@ typedef struct whirlpool_ctx
} whirlpool_ctx_t;
static void whirlpool_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[16], SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
void whirlpool_transform (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[16], SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
{
u32 Kh[8];
u32 Kl[8];
@ -1300,7 +1300,7 @@ static void whirlpool_transform (const u32 w0[4], const u32 w1[4], const u32 w2[
digest[15] ^= statel[7] ^ w3[3];
}
static void whirlpool_init (whirlpool_ctx_t *ctx, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
void whirlpool_init (whirlpool_ctx_t *ctx, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
{
ctx->h[ 0] = 0;
ctx->h[ 1] = 0;
@ -1342,7 +1342,7 @@ static void whirlpool_init (whirlpool_ctx_t *ctx, SHM_TYPE u32 (*s_Ch)[256], SHM
ctx->s_Cl = s_Cl;
}
static void whirlpool_update_64 (whirlpool_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void whirlpool_update_64 (whirlpool_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -1420,7 +1420,7 @@ static void whirlpool_update_64 (whirlpool_ctx_t *ctx, u32 w0[4], u32 w1[4], u32
}
}
static void whirlpool_update (whirlpool_ctx_t *ctx, const u32 *w, const int len)
void whirlpool_update (whirlpool_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1472,7 +1472,7 @@ static void whirlpool_update (whirlpool_ctx_t *ctx, const u32 *w, const int len)
whirlpool_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void whirlpool_update_swap (whirlpool_ctx_t *ctx, const u32 *w, const int len)
void whirlpool_update_swap (whirlpool_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1558,7 +1558,7 @@ static void whirlpool_update_swap (whirlpool_ctx_t *ctx, const u32 *w, const int
whirlpool_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void whirlpool_update_utf16le (whirlpool_ctx_t *ctx, const u32 *w, const int len)
void whirlpool_update_utf16le (whirlpool_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1600,7 +1600,7 @@ static void whirlpool_update_utf16le (whirlpool_ctx_t *ctx, const u32 *w, const
whirlpool_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void whirlpool_update_utf16le_swap (whirlpool_ctx_t *ctx, const u32 *w, const int len)
void whirlpool_update_utf16le_swap (whirlpool_ctx_t *ctx, const u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1676,7 +1676,7 @@ static void whirlpool_update_utf16le_swap (whirlpool_ctx_t *ctx, const u32 *w, c
whirlpool_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void whirlpool_update_global (whirlpool_ctx_t *ctx, const __global u32 *w, const int len)
void whirlpool_update_global (whirlpool_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1728,7 +1728,7 @@ static void whirlpool_update_global (whirlpool_ctx_t *ctx, const __global u32 *w
whirlpool_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void whirlpool_update_global_swap (whirlpool_ctx_t *ctx, const __global u32 *w, const int len)
void whirlpool_update_global_swap (whirlpool_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1814,7 +1814,7 @@ static void whirlpool_update_global_swap (whirlpool_ctx_t *ctx, const __global u
whirlpool_update_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void whirlpool_update_global_utf16le (whirlpool_ctx_t *ctx, const __global u32 *w, const int len)
void whirlpool_update_global_utf16le (whirlpool_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1856,7 +1856,7 @@ static void whirlpool_update_global_utf16le (whirlpool_ctx_t *ctx, const __globa
whirlpool_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void whirlpool_update_global_utf16le_swap (whirlpool_ctx_t *ctx, const __global u32 *w, const int len)
void whirlpool_update_global_utf16le_swap (whirlpool_ctx_t *ctx, const __global u32 *w, const int len)
{
u32 w0[4];
u32 w1[4];
@ -1932,7 +1932,7 @@ static void whirlpool_update_global_utf16le_swap (whirlpool_ctx_t *ctx, const __
whirlpool_update_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void whirlpool_final (whirlpool_ctx_t *ctx)
void whirlpool_final (whirlpool_ctx_t *ctx)
{
const int pos = ctx->len & 63;
@ -1975,7 +1975,7 @@ typedef struct whirlpool_hmac_ctx
} whirlpool_hmac_ctx_t;
static void whirlpool_hmac_init_64 (whirlpool_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
void whirlpool_hmac_init_64 (whirlpool_hmac_ctx_t *ctx, const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
{
u32 t0[4];
u32 t1[4];
@ -2029,7 +2029,7 @@ static void whirlpool_hmac_init_64 (whirlpool_hmac_ctx_t *ctx, const u32 w0[4],
whirlpool_update_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void whirlpool_hmac_init (whirlpool_hmac_ctx_t *ctx, const u32 *w, const int len, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
void whirlpool_hmac_init (whirlpool_hmac_ctx_t *ctx, const u32 *w, const int len, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
{
u32 w0[4];
u32 w1[4];
@ -2086,7 +2086,7 @@ static void whirlpool_hmac_init (whirlpool_hmac_ctx_t *ctx, const u32 *w, const
whirlpool_hmac_init_64 (ctx, w0, w1, w2, w3, s_Ch, s_Cl);
}
static void whirlpool_hmac_init_swap (whirlpool_hmac_ctx_t *ctx, const u32 *w, const int len, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
void whirlpool_hmac_init_swap (whirlpool_hmac_ctx_t *ctx, const u32 *w, const int len, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
{
u32 w0[4];
u32 w1[4];
@ -2143,7 +2143,7 @@ static void whirlpool_hmac_init_swap (whirlpool_hmac_ctx_t *ctx, const u32 *w, c
whirlpool_hmac_init_64 (ctx, w0, w1, w2, w3, s_Ch, s_Cl);
}
static void whirlpool_hmac_init_global (whirlpool_hmac_ctx_t *ctx, __global const u32 *w, const int len, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
void whirlpool_hmac_init_global (whirlpool_hmac_ctx_t *ctx, __global const u32 *w, const int len, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
{
u32 w0[4];
u32 w1[4];
@ -2200,7 +2200,7 @@ static void whirlpool_hmac_init_global (whirlpool_hmac_ctx_t *ctx, __global cons
whirlpool_hmac_init_64 (ctx, w0, w1, w2, w3, s_Ch, s_Cl);
}
static void whirlpool_hmac_init_global_swap (whirlpool_hmac_ctx_t *ctx, __global const u32 *w, const int len, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
void whirlpool_hmac_init_global_swap (whirlpool_hmac_ctx_t *ctx, __global const u32 *w, const int len, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
{
u32 w0[4];
u32 w1[4];
@ -2257,52 +2257,52 @@ static void whirlpool_hmac_init_global_swap (whirlpool_hmac_ctx_t *ctx, __global
whirlpool_hmac_init_64 (ctx, w0, w1, w2, w3, s_Ch, s_Cl);
}
static void whirlpool_hmac_update_64 (whirlpool_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
void whirlpool_hmac_update_64 (whirlpool_hmac_ctx_t *ctx, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const int len)
{
whirlpool_update_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void whirlpool_hmac_update (whirlpool_hmac_ctx_t *ctx, const u32 *w, const int len)
void whirlpool_hmac_update (whirlpool_hmac_ctx_t *ctx, const u32 *w, const int len)
{
whirlpool_update (&ctx->ipad, w, len);
}
static void whirlpool_hmac_update_swap (whirlpool_hmac_ctx_t *ctx, const u32 *w, const int len)
void whirlpool_hmac_update_swap (whirlpool_hmac_ctx_t *ctx, const u32 *w, const int len)
{
whirlpool_update_swap (&ctx->ipad, w, len);
}
static void whirlpool_hmac_update_utf16le (whirlpool_hmac_ctx_t *ctx, const u32 *w, const int len)
void whirlpool_hmac_update_utf16le (whirlpool_hmac_ctx_t *ctx, const u32 *w, const int len)
{
whirlpool_update_utf16le (&ctx->ipad, w, len);
}
static void whirlpool_hmac_update_utf16le_swap (whirlpool_hmac_ctx_t *ctx, const u32 *w, const int len)
void whirlpool_hmac_update_utf16le_swap (whirlpool_hmac_ctx_t *ctx, const u32 *w, const int len)
{
whirlpool_update_utf16le_swap (&ctx->ipad, w, len);
}
static void whirlpool_hmac_update_global (whirlpool_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void whirlpool_hmac_update_global (whirlpool_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
whirlpool_update_global (&ctx->ipad, w, len);
}
static void whirlpool_hmac_update_global_swap (whirlpool_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void whirlpool_hmac_update_global_swap (whirlpool_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
whirlpool_update_global_swap (&ctx->ipad, w, len);
}
static void whirlpool_hmac_update_global_utf16le (whirlpool_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void whirlpool_hmac_update_global_utf16le (whirlpool_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
whirlpool_update_global_utf16le (&ctx->ipad, w, len);
}
static void whirlpool_hmac_update_global_utf16le_swap (whirlpool_hmac_ctx_t *ctx, const __global u32 *w, const int len)
void whirlpool_hmac_update_global_utf16le_swap (whirlpool_hmac_ctx_t *ctx, const __global u32 *w, const int len)
{
whirlpool_update_global_utf16le_swap (&ctx->ipad, w, len);
}
static void whirlpool_hmac_final (whirlpool_hmac_ctx_t *ctx)
void whirlpool_hmac_final (whirlpool_hmac_ctx_t *ctx)
{
whirlpool_final (&ctx->ipad);
@ -2351,7 +2351,7 @@ typedef struct whirlpool_ctx_vector
} whirlpool_ctx_vector_t;
static void whirlpool_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[16], SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
void whirlpool_transform_vector (const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], u32x digest[16], SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
{
u32x Kh[8];
u32x Kl[8];
@ -2521,7 +2521,7 @@ static void whirlpool_transform_vector (const u32x w0[4], const u32x w1[4], cons
digest[15] ^= statel[7] ^ w3[3];
}
static void whirlpool_init_vector (whirlpool_ctx_vector_t *ctx, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
void whirlpool_init_vector (whirlpool_ctx_vector_t *ctx, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
{
ctx->h[ 0] = 0;
ctx->h[ 1] = 0;
@ -2563,7 +2563,7 @@ static void whirlpool_init_vector (whirlpool_ctx_vector_t *ctx, SHM_TYPE u32 (*s
ctx->s_Cl = s_Cl;
}
static void whirlpool_init_vector_from_scalar (whirlpool_ctx_vector_t *ctx, whirlpool_ctx_t *ctx0)
void whirlpool_init_vector_from_scalar (whirlpool_ctx_vector_t *ctx, whirlpool_ctx_t *ctx0)
{
ctx->h[ 0] = ctx0->h[ 0];
ctx->h[ 1] = ctx0->h[ 1];
@ -2605,7 +2605,7 @@ static void whirlpool_init_vector_from_scalar (whirlpool_ctx_vector_t *ctx, whir
ctx->s_Cl = ctx0->s_Cl;
}
static void whirlpool_update_vector_64 (whirlpool_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void whirlpool_update_vector_64 (whirlpool_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
#ifdef IS_AMD
volatile const int pos = ctx->len & 63;
@ -2683,7 +2683,7 @@ static void whirlpool_update_vector_64 (whirlpool_ctx_vector_t *ctx, u32x w0[4],
}
}
static void whirlpool_update_vector (whirlpool_ctx_vector_t *ctx, const u32x *w, const int len)
void whirlpool_update_vector (whirlpool_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2735,7 +2735,7 @@ static void whirlpool_update_vector (whirlpool_ctx_vector_t *ctx, const u32x *w,
whirlpool_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void whirlpool_update_vector_swap (whirlpool_ctx_vector_t *ctx, const u32x *w, const int len)
void whirlpool_update_vector_swap (whirlpool_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2821,7 +2821,7 @@ static void whirlpool_update_vector_swap (whirlpool_ctx_vector_t *ctx, const u32
whirlpool_update_vector_64 (ctx, w0, w1, w2, w3, len - pos1);
}
static void whirlpool_update_vector_utf16le (whirlpool_ctx_vector_t *ctx, const u32x *w, const int len)
void whirlpool_update_vector_utf16le (whirlpool_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2863,7 +2863,7 @@ static void whirlpool_update_vector_utf16le (whirlpool_ctx_vector_t *ctx, const
whirlpool_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void whirlpool_update_vector_utf16le_swap (whirlpool_ctx_vector_t *ctx, const u32x *w, const int len)
void whirlpool_update_vector_utf16le_swap (whirlpool_ctx_vector_t *ctx, const u32x *w, const int len)
{
u32x w0[4];
u32x w1[4];
@ -2939,7 +2939,7 @@ static void whirlpool_update_vector_utf16le_swap (whirlpool_ctx_vector_t *ctx, c
whirlpool_update_vector_64 (ctx, w0, w1, w2, w3, (len - pos1) * 2);
}
static void whirlpool_final_vector (whirlpool_ctx_vector_t *ctx)
void whirlpool_final_vector (whirlpool_ctx_vector_t *ctx)
{
const int pos = ctx->len & 63;
@ -2982,7 +2982,7 @@ typedef struct whirlpool_hmac_ctx_vector
} whirlpool_hmac_ctx_vector_t;
static void whirlpool_hmac_init_vector_64 (whirlpool_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
void whirlpool_hmac_init_vector_64 (whirlpool_hmac_ctx_vector_t *ctx, const u32x w0[4], const u32x w1[4], const u32x w2[4], const u32x w3[4], SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
{
u32x t0[4];
u32x t1[4];
@ -3036,7 +3036,7 @@ static void whirlpool_hmac_init_vector_64 (whirlpool_hmac_ctx_vector_t *ctx, con
whirlpool_update_vector_64 (&ctx->opad, t0, t1, t2, t3, 64);
}
static void whirlpool_hmac_init_vector (whirlpool_hmac_ctx_vector_t *ctx, const u32x *w, const int len, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
void whirlpool_hmac_init_vector (whirlpool_hmac_ctx_vector_t *ctx, const u32x *w, const int len, SHM_TYPE u32 (*s_Ch)[256], SHM_TYPE u32 (*s_Cl)[256])
{
u32x w0[4];
u32x w1[4];
@ -3093,17 +3093,17 @@ static void whirlpool_hmac_init_vector (whirlpool_hmac_ctx_vector_t *ctx, const
whirlpool_hmac_init_vector_64 (ctx, w0, w1, w2, w3, s_Ch, s_Cl);
}
static void whirlpool_hmac_update_vector_64 (whirlpool_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
void whirlpool_hmac_update_vector_64 (whirlpool_hmac_ctx_vector_t *ctx, u32x w0[4], u32x w1[4], u32x w2[4], u32x w3[4], const int len)
{
whirlpool_update_vector_64 (&ctx->ipad, w0, w1, w2, w3, len);
}
static void whirlpool_hmac_update_vector (whirlpool_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
void whirlpool_hmac_update_vector (whirlpool_hmac_ctx_vector_t *ctx, const u32x *w, const int len)
{
whirlpool_update_vector (&ctx->ipad, w, len);
}
static void whirlpool_hmac_final_vector (whirlpool_hmac_ctx_vector_t *ctx)
void whirlpool_hmac_final_vector (whirlpool_hmac_ctx_vector_t *ctx)
{
whirlpool_final_vector (&ctx->ipad);

@ -1,7 +1,7 @@
// cbc-essiv
static void aes128_decrypt_cbc (const u32 *ks1, const u32 *in, u32 *out, u32 *essiv, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void aes128_decrypt_cbc (const u32 *ks1, const u32 *in, u32 *out, u32 *essiv, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
aes128_decrypt (ks1, in, out, s_td0, s_td1, s_td2, s_td3, s_td4);
@ -16,7 +16,7 @@ static void aes128_decrypt_cbc (const u32 *ks1, const u32 *in, u32 *out, u32 *es
essiv[3] = in[3];
}
static void aes256_decrypt_cbc (const u32 *ks1, const u32 *in, u32 *out, u32 *essiv, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void aes256_decrypt_cbc (const u32 *ks1, const u32 *in, u32 *out, u32 *essiv, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
aes256_decrypt (ks1, in, out, s_td0, s_td1, s_td2, s_td3, s_td4);
@ -31,7 +31,7 @@ static void aes256_decrypt_cbc (const u32 *ks1, const u32 *in, u32 *out, u32 *es
essiv[3] = in[3];
}
static void luks_decrypt_sector_aes_cbc_essiv128 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv128 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -65,7 +65,7 @@ static void luks_decrypt_sector_aes_cbc_essiv128 (__global const u32 *in, u32 *o
}
}
static void luks_decrypt_sector_aes_cbc_essiv128_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv128_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -102,7 +102,7 @@ static void luks_decrypt_sector_aes_cbc_essiv128_mk_sha1 (__global const u32 *in
}
}
static void luks_decrypt_sector_aes_cbc_essiv128_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv128_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -162,7 +162,7 @@ static void luks_decrypt_sector_aes_cbc_essiv128_mk_sha1_final (__global const u
}
}
static void luks_decrypt_sector_aes_cbc_essiv128_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv128_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -199,7 +199,7 @@ static void luks_decrypt_sector_aes_cbc_essiv128_mk_sha256 (__global const u32 *
}
}
static void luks_decrypt_sector_aes_cbc_essiv128_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv128_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -259,7 +259,7 @@ static void luks_decrypt_sector_aes_cbc_essiv128_mk_sha256_final (__global const
}
}
static void luks_decrypt_sector_aes_cbc_essiv128_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv128_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -296,7 +296,7 @@ static void luks_decrypt_sector_aes_cbc_essiv128_mk_sha512 (__global const u32 *
}
}
static void luks_decrypt_sector_aes_cbc_essiv128_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv128_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -356,7 +356,7 @@ static void luks_decrypt_sector_aes_cbc_essiv128_mk_sha512_final (__global const
}
}
static void luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -393,7 +393,7 @@ static void luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160 (__global const u3
}
}
static void luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -453,7 +453,7 @@ static void luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160_final (__global co
}
}
static void luks_decrypt_sector_aes_cbc_essiv256 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv256 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -487,7 +487,7 @@ static void luks_decrypt_sector_aes_cbc_essiv256 (__global const u32 *in, u32 *o
}
}
static void luks_decrypt_sector_aes_cbc_essiv256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -524,7 +524,7 @@ static void luks_decrypt_sector_aes_cbc_essiv256_mk_sha1 (__global const u32 *in
}
}
static void luks_decrypt_sector_aes_cbc_essiv256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -584,7 +584,7 @@ static void luks_decrypt_sector_aes_cbc_essiv256_mk_sha1_final (__global const u
}
}
static void luks_decrypt_sector_aes_cbc_essiv256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -621,7 +621,7 @@ static void luks_decrypt_sector_aes_cbc_essiv256_mk_sha256 (__global const u32 *
}
}
static void luks_decrypt_sector_aes_cbc_essiv256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -681,7 +681,7 @@ static void luks_decrypt_sector_aes_cbc_essiv256_mk_sha256_final (__global const
}
}
static void luks_decrypt_sector_aes_cbc_essiv256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -718,7 +718,7 @@ static void luks_decrypt_sector_aes_cbc_essiv256_mk_sha512 (__global const u32 *
}
}
static void luks_decrypt_sector_aes_cbc_essiv256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -778,7 +778,7 @@ static void luks_decrypt_sector_aes_cbc_essiv256_mk_sha512_final (__global const
}
}
static void luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -815,7 +815,7 @@ static void luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160 (__global const u3
}
}
static void luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -877,7 +877,7 @@ static void luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160_final (__global co
// cbc-plain
static void luks_decrypt_sector_aes_cbc_plain128 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain128 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -907,7 +907,7 @@ static void luks_decrypt_sector_aes_cbc_plain128 (__global const u32 *in, u32 *o
}
}
static void luks_decrypt_sector_aes_cbc_plain128_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain128_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -940,7 +940,7 @@ static void luks_decrypt_sector_aes_cbc_plain128_mk_sha1 (__global const u32 *in
}
}
static void luks_decrypt_sector_aes_cbc_plain128_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain128_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -996,7 +996,7 @@ static void luks_decrypt_sector_aes_cbc_plain128_mk_sha1_final (__global const u
}
}
static void luks_decrypt_sector_aes_cbc_plain128_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain128_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1029,7 +1029,7 @@ static void luks_decrypt_sector_aes_cbc_plain128_mk_sha256 (__global const u32 *
}
}
static void luks_decrypt_sector_aes_cbc_plain128_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain128_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1085,7 +1085,7 @@ static void luks_decrypt_sector_aes_cbc_plain128_mk_sha256_final (__global const
}
}
static void luks_decrypt_sector_aes_cbc_plain128_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain128_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1118,7 +1118,7 @@ static void luks_decrypt_sector_aes_cbc_plain128_mk_sha512 (__global const u32 *
}
}
static void luks_decrypt_sector_aes_cbc_plain128_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain128_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1174,7 +1174,7 @@ static void luks_decrypt_sector_aes_cbc_plain128_mk_sha512_final (__global const
}
}
static void luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1207,7 +1207,7 @@ static void luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160 (__global const u3
}
}
static void luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1263,7 +1263,7 @@ static void luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160_final (__global co
}
}
static void luks_decrypt_sector_aes_cbc_plain256 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain256 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1293,7 +1293,7 @@ static void luks_decrypt_sector_aes_cbc_plain256 (__global const u32 *in, u32 *o
}
}
static void luks_decrypt_sector_aes_cbc_plain256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1326,7 +1326,7 @@ static void luks_decrypt_sector_aes_cbc_plain256_mk_sha1 (__global const u32 *in
}
}
static void luks_decrypt_sector_aes_cbc_plain256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1382,7 +1382,7 @@ static void luks_decrypt_sector_aes_cbc_plain256_mk_sha1_final (__global const u
}
}
static void luks_decrypt_sector_aes_cbc_plain256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1415,7 +1415,7 @@ static void luks_decrypt_sector_aes_cbc_plain256_mk_sha256 (__global const u32 *
}
}
static void luks_decrypt_sector_aes_cbc_plain256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1471,7 +1471,7 @@ static void luks_decrypt_sector_aes_cbc_plain256_mk_sha256_final (__global const
}
}
static void luks_decrypt_sector_aes_cbc_plain256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1504,7 +1504,7 @@ static void luks_decrypt_sector_aes_cbc_plain256_mk_sha512 (__global const u32 *
}
}
static void luks_decrypt_sector_aes_cbc_plain256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1560,7 +1560,7 @@ static void luks_decrypt_sector_aes_cbc_plain256_mk_sha512_final (__global const
}
}
static void luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1593,7 +1593,7 @@ static void luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160 (__global const u3
}
}
static void luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1651,7 +1651,7 @@ static void luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160_final (__global co
// xts-plain
static void aes128_decrypt_xts (const u32 *ks1, const u32 *in, u32 *out, u32 *T, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void aes128_decrypt_xts (const u32 *ks1, const u32 *in, u32 *out, u32 *T, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
out[0] = in[0];
out[1] = in[1];
@ -1673,7 +1673,7 @@ static void aes128_decrypt_xts (const u32 *ks1, const u32 *in, u32 *out, u32 *T,
xts_mul2 (T, T);
}
static void aes256_decrypt_xts (const u32 *ks1, const u32 *in, u32 *out, u32 *T, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void aes256_decrypt_xts (const u32 *ks1, const u32 *in, u32 *out, u32 *T, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
out[0] = in[0];
out[1] = in[1];
@ -1695,7 +1695,7 @@ static void aes256_decrypt_xts (const u32 *ks1, const u32 *in, u32 *out, u32 *T,
xts_mul2 (T, T);
}
static void luks_decrypt_sector_aes_xts_plain256 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain256 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1729,7 +1729,7 @@ static void luks_decrypt_sector_aes_xts_plain256 (__global const u32 *in, u32 *o
}
}
static void luks_decrypt_sector_aes_xts_plain256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1766,7 +1766,7 @@ static void luks_decrypt_sector_aes_xts_plain256_mk_sha1 (__global const u32 *in
}
}
static void luks_decrypt_sector_aes_xts_plain256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1826,7 +1826,7 @@ static void luks_decrypt_sector_aes_xts_plain256_mk_sha1_final (__global const u
}
}
static void luks_decrypt_sector_aes_xts_plain256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1863,7 +1863,7 @@ static void luks_decrypt_sector_aes_xts_plain256_mk_sha256 (__global const u32 *
}
}
static void luks_decrypt_sector_aes_xts_plain256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1923,7 +1923,7 @@ static void luks_decrypt_sector_aes_xts_plain256_mk_sha256_final (__global const
}
}
static void luks_decrypt_sector_aes_xts_plain256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1960,7 +1960,7 @@ static void luks_decrypt_sector_aes_xts_plain256_mk_sha512 (__global const u32 *
}
}
static void luks_decrypt_sector_aes_xts_plain256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2020,7 +2020,7 @@ static void luks_decrypt_sector_aes_xts_plain256_mk_sha512_final (__global const
}
}
static void luks_decrypt_sector_aes_xts_plain256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2057,7 +2057,7 @@ static void luks_decrypt_sector_aes_xts_plain256_mk_ripemd160 (__global const u3
}
}
static void luks_decrypt_sector_aes_xts_plain256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2117,7 +2117,7 @@ static void luks_decrypt_sector_aes_xts_plain256_mk_ripemd160_final (__global co
}
}
static void luks_decrypt_sector_aes_xts_plain512 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain512 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2151,7 +2151,7 @@ static void luks_decrypt_sector_aes_xts_plain512 (__global const u32 *in, u32 *o
}
}
static void luks_decrypt_sector_aes_xts_plain512_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain512_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2188,7 +2188,7 @@ static void luks_decrypt_sector_aes_xts_plain512_mk_sha1 (__global const u32 *in
}
}
static void luks_decrypt_sector_aes_xts_plain512_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain512_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2248,7 +2248,7 @@ static void luks_decrypt_sector_aes_xts_plain512_mk_sha1_final (__global const u
}
}
static void luks_decrypt_sector_aes_xts_plain512_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain512_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2285,7 +2285,7 @@ static void luks_decrypt_sector_aes_xts_plain512_mk_sha256 (__global const u32 *
}
}
static void luks_decrypt_sector_aes_xts_plain512_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain512_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2345,7 +2345,7 @@ static void luks_decrypt_sector_aes_xts_plain512_mk_sha256_final (__global const
}
}
static void luks_decrypt_sector_aes_xts_plain512_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain512_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2382,7 +2382,7 @@ static void luks_decrypt_sector_aes_xts_plain512_mk_sha512 (__global const u32 *
}
}
static void luks_decrypt_sector_aes_xts_plain512_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain512_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2442,7 +2442,7 @@ static void luks_decrypt_sector_aes_xts_plain512_mk_sha512_final (__global const
}
}
static void luks_decrypt_sector_aes_xts_plain512_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain512_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2479,7 +2479,7 @@ static void luks_decrypt_sector_aes_xts_plain512_mk_ripemd160 (__global const u3
}
}
static void luks_decrypt_sector_aes_xts_plain512_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_decrypt_sector_aes_xts_plain512_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, const u32 sector, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2541,7 +2541,7 @@ static void luks_decrypt_sector_aes_xts_plain512_mk_ripemd160_final (__global co
// luks helper
static void luks_af_sha1_then_aes_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_af_sha1_then_aes_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
const u32 key_size = luks_bufs->key_size;
const u32 cipher_mode = luks_bufs->cipher_mode;
@ -2932,7 +2932,7 @@ static void luks_af_sha1_then_aes_decrypt (__global const luks_t *luks_bufs, __g
}
}
static void luks_af_sha256_then_aes_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_af_sha256_then_aes_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
const u32 key_size = luks_bufs->key_size;
const u32 cipher_mode = luks_bufs->cipher_mode;
@ -3291,7 +3291,7 @@ static void luks_af_sha256_then_aes_decrypt (__global const luks_t *luks_bufs, _
}
}
static void luks_af_sha512_then_aes_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_af_sha512_then_aes_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
const u32 key_size = luks_bufs->key_size;
const u32 cipher_mode = luks_bufs->cipher_mode;
@ -3669,7 +3669,7 @@ static void luks_af_sha512_then_aes_decrypt (__global const luks_t *luks_bufs, _
}
}
static void luks_af_ripemd160_then_aes_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void luks_af_ripemd160_then_aes_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
const u32 key_size = luks_bufs->key_size;
const u32 cipher_mode = luks_bufs->cipher_mode;

@ -45,7 +45,7 @@ __constant u64a AF_k_sha512[80] =
SHA512C4c, SHA512C4d, SHA512C4e, SHA512C4f,
};
static void AF_sha1_transform_S (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[5])
void AF_sha1_transform_S (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[5])
{
u32 A = digest[0];
u32 B = digest[1];
@ -173,7 +173,7 @@ static void AF_sha1_transform_S (const u32 w0[4], const u32 w1[4], const u32 w2[
digest[4] += E;
}
static void AF_sha256_transform_S (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[8])
void AF_sha256_transform_S (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[8])
{
u32 a = digest[0];
u32 b = digest[1];
@ -261,7 +261,7 @@ static void AF_sha256_transform_S (const u32 w0[4], const u32 w1[4], const u32 w
digest[7] += h;
}
static void AF_sha512_transform_S (const u64 w0[4], const u64 w1[4], const u64 w2[4], const u64 w3[4], u64 digest[8])
void AF_sha512_transform_S (const u64 w0[4], const u64 w1[4], const u64 w2[4], const u64 w3[4], u64 digest[8])
{
u64 a = digest[0];
u64 b = digest[1];
@ -349,7 +349,7 @@ static void AF_sha512_transform_S (const u64 w0[4], const u64 w1[4], const u64 w
digest[7] += h;
}
static void AF_ripemd160_transform_S (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[5])
void AF_ripemd160_transform_S (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[5])
{
u32 w0_t = w0[0];
u32 w1_t = w0[1];
@ -565,7 +565,7 @@ static void AF_ripemd160_transform_S (const u32 w0[4], const u32 w1[4], const u3
// diffuse functions
static void AF_sha1_diffuse16 (u32 *out)
void AF_sha1_diffuse16 (u32 *out)
{
u32 w0[4];
u32 w1[4];
@ -607,7 +607,7 @@ static void AF_sha1_diffuse16 (u32 *out)
out[3] = swap32_S (digest[3]);
}
static void AF_sha1_diffuse32 (u32 *out)
void AF_sha1_diffuse32 (u32 *out)
{
u32 w0[4];
u32 w1[4];
@ -681,7 +681,7 @@ static void AF_sha1_diffuse32 (u32 *out)
out[7] = swap32_S (digest[2]);
}
static void AF_sha1_diffuse64 (u32 *out)
void AF_sha1_diffuse64 (u32 *out)
{
u32 w0[4];
u32 w1[4];
@ -819,7 +819,7 @@ static void AF_sha1_diffuse64 (u32 *out)
out[15] = swap32_S (digest[0]);
}
static void AF_sha256_diffuse16 (u32 *out)
void AF_sha256_diffuse16 (u32 *out)
{
u32 w0[4];
u32 w1[4];
@ -864,7 +864,7 @@ static void AF_sha256_diffuse16 (u32 *out)
out[3] = swap32_S (digest[3]);
}
static void AF_sha256_diffuse32 (u32 *out)
void AF_sha256_diffuse32 (u32 *out)
{
u32 w0[4];
u32 w1[4];
@ -913,7 +913,7 @@ static void AF_sha256_diffuse32 (u32 *out)
out[7] = swap32_S (digest[7]);
}
static void AF_sha256_diffuse64 (u32 *out)
void AF_sha256_diffuse64 (u32 *out)
{
u32 w0[4];
u32 w1[4];
@ -1001,7 +1001,7 @@ static void AF_sha256_diffuse64 (u32 *out)
out[15] = swap32_S (digest[7]);
}
static void AF_sha512_diffuse16 (u32 *out)
void AF_sha512_diffuse16 (u32 *out)
{
u64 w0[4];
u64 w1[4];
@ -1046,7 +1046,7 @@ static void AF_sha512_diffuse16 (u32 *out)
out[3] = swap32_S (l32_from_64_S (digest[1]));
}
static void AF_sha512_diffuse32 (u32 *out)
void AF_sha512_diffuse32 (u32 *out)
{
u64 w0[4];
u64 w1[4];
@ -1095,7 +1095,7 @@ static void AF_sha512_diffuse32 (u32 *out)
out[7] = swap32_S (l32_from_64_S (digest[3]));
}
static void AF_sha512_diffuse64 (u32 *out)
void AF_sha512_diffuse64 (u32 *out)
{
u64 w0[4];
u64 w1[4];
@ -1152,7 +1152,7 @@ static void AF_sha512_diffuse64 (u32 *out)
out[15] = swap32_S (l32_from_64_S (digest[7]));
}
static void AF_ripemd160_diffuse16 (u32 *out)
void AF_ripemd160_diffuse16 (u32 *out)
{
u32 w0[4];
u32 w1[4];
@ -1194,7 +1194,7 @@ static void AF_ripemd160_diffuse16 (u32 *out)
out[3] = digest[3];
}
static void AF_ripemd160_diffuse32 (u32 *out)
void AF_ripemd160_diffuse32 (u32 *out)
{
u32 w0[4];
u32 w1[4];
@ -1268,7 +1268,7 @@ static void AF_ripemd160_diffuse32 (u32 *out)
out[7] = digest[2];
}
static void AF_ripemd160_diffuse64 (u32 *out)
void AF_ripemd160_diffuse64 (u32 *out)
{
u32 w0[4];
u32 w1[4];

@ -19,7 +19,7 @@ __constant u32a ESSIV_k_sha256[64] =
};
// basically a normal sha256_transform() but with a different name to avoid collisions with function nameing
static void ESSIV_sha256_transform_S (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[8])
void ESSIV_sha256_transform_S (const u32 w0[4], const u32 w1[4], const u32 w2[4], const u32 w3[4], u32 digest[8])
{
u32 a = digest[0];
u32 b = digest[1];
@ -107,7 +107,7 @@ static void ESSIV_sha256_transform_S (const u32 w0[4], const u32 w1[4], const u3
digest[7] += h;
}
static void ESSIV_sha256_init128 (u32 *key, u32 *essivhash)
void ESSIV_sha256_init128 (u32 *key, u32 *essivhash)
{
essivhash[0] = SHA256M_A;
essivhash[1] = SHA256M_B;
@ -152,7 +152,7 @@ static void ESSIV_sha256_init128 (u32 *key, u32 *essivhash)
essivhash[7] = swap32_S (essivhash[7]);
}
static void ESSIV_sha256_init256 (u32 *key, u32 *essivhash)
void ESSIV_sha256_init256 (u32 *key, u32 *essivhash)
{
essivhash[0] = SHA256M_A;
essivhash[1] = SHA256M_B;

@ -1,7 +1,7 @@
// cbc-essiv
static void serpent128_decrypt_cbc (const u32 *ks1, const u32 *in, u32 *out, u32 *essiv)
void serpent128_decrypt_cbc (const u32 *ks1, const u32 *in, u32 *out, u32 *essiv)
{
serpent128_decrypt (ks1, in, out);
@ -16,7 +16,7 @@ static void serpent128_decrypt_cbc (const u32 *ks1, const u32 *in, u32 *out, u32
essiv[3] = in[3];
}
static void serpent256_decrypt_cbc (const u32 *ks1, const u32 *in, u32 *out, u32 *essiv)
void serpent256_decrypt_cbc (const u32 *ks1, const u32 *in, u32 *out, u32 *essiv)
{
serpent256_decrypt (ks1, in, out);
@ -31,7 +31,7 @@ static void serpent256_decrypt_cbc (const u32 *ks1, const u32 *in, u32 *out, u32
essiv[3] = in[3];
}
static void luks_decrypt_sector_serpent_cbc_essiv128 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv128 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -65,7 +65,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv128 (__global const u32 *in, u3
}
}
static void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -102,7 +102,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1 (__global const u32
}
}
static void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -162,7 +162,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1_final (__global con
}
}
static void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -199,7 +199,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256 (__global const u
}
}
static void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -259,7 +259,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256_final (__global c
}
}
static void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -296,7 +296,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512 (__global const u
}
}
static void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -356,7 +356,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512_final (__global c
}
}
static void luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -393,7 +393,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160 (__global cons
}
}
static void luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -453,7 +453,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160_final (__globa
}
}
static void luks_decrypt_sector_serpent_cbc_essiv256 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv256 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -487,7 +487,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv256 (__global const u32 *in, u3
}
}
static void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -524,7 +524,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1 (__global const u32
}
}
static void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -584,7 +584,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1_final (__global con
}
}
static void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -621,7 +621,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256 (__global const u
}
}
static void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -681,7 +681,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256_final (__global c
}
}
static void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -718,7 +718,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512 (__global const u
}
}
static void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -778,7 +778,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512_final (__global c
}
}
static void luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -815,7 +815,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160 (__global cons
}
}
static void luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -877,7 +877,7 @@ static void luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160_final (__globa
// cbc-plain
static void luks_decrypt_sector_serpent_cbc_plain128 (__global const u32 *in, u32 *out, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain128 (__global const u32 *in, u32 *out, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -907,7 +907,7 @@ static void luks_decrypt_sector_serpent_cbc_plain128 (__global const u32 *in, u3
}
}
static void luks_decrypt_sector_serpent_cbc_plain128_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain128_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -940,7 +940,7 @@ static void luks_decrypt_sector_serpent_cbc_plain128_mk_sha1 (__global const u32
}
}
static void luks_decrypt_sector_serpent_cbc_plain128_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain128_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -996,7 +996,7 @@ static void luks_decrypt_sector_serpent_cbc_plain128_mk_sha1_final (__global con
}
}
static void luks_decrypt_sector_serpent_cbc_plain128_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain128_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1029,7 +1029,7 @@ static void luks_decrypt_sector_serpent_cbc_plain128_mk_sha256 (__global const u
}
}
static void luks_decrypt_sector_serpent_cbc_plain128_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain128_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1085,7 +1085,7 @@ static void luks_decrypt_sector_serpent_cbc_plain128_mk_sha256_final (__global c
}
}
static void luks_decrypt_sector_serpent_cbc_plain128_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain128_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1118,7 +1118,7 @@ static void luks_decrypt_sector_serpent_cbc_plain128_mk_sha512 (__global const u
}
}
static void luks_decrypt_sector_serpent_cbc_plain128_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain128_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1174,7 +1174,7 @@ static void luks_decrypt_sector_serpent_cbc_plain128_mk_sha512_final (__global c
}
}
static void luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1207,7 +1207,7 @@ static void luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160 (__global cons
}
}
static void luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1263,7 +1263,7 @@ static void luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160_final (__globa
}
}
static void luks_decrypt_sector_serpent_cbc_plain256 (__global const u32 *in, u32 *out, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain256 (__global const u32 *in, u32 *out, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1293,7 +1293,7 @@ static void luks_decrypt_sector_serpent_cbc_plain256 (__global const u32 *in, u3
}
}
static void luks_decrypt_sector_serpent_cbc_plain256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1326,7 +1326,7 @@ static void luks_decrypt_sector_serpent_cbc_plain256_mk_sha1 (__global const u32
}
}
static void luks_decrypt_sector_serpent_cbc_plain256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1382,7 +1382,7 @@ static void luks_decrypt_sector_serpent_cbc_plain256_mk_sha1_final (__global con
}
}
static void luks_decrypt_sector_serpent_cbc_plain256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1415,7 +1415,7 @@ static void luks_decrypt_sector_serpent_cbc_plain256_mk_sha256 (__global const u
}
}
static void luks_decrypt_sector_serpent_cbc_plain256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1471,7 +1471,7 @@ static void luks_decrypt_sector_serpent_cbc_plain256_mk_sha256_final (__global c
}
}
static void luks_decrypt_sector_serpent_cbc_plain256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1504,7 +1504,7 @@ static void luks_decrypt_sector_serpent_cbc_plain256_mk_sha512 (__global const u
}
}
static void luks_decrypt_sector_serpent_cbc_plain256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1560,7 +1560,7 @@ static void luks_decrypt_sector_serpent_cbc_plain256_mk_sha512_final (__global c
}
}
static void luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1593,7 +1593,7 @@ static void luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160 (__global cons
}
}
static void luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
void luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1651,7 +1651,7 @@ static void luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160_final (__globa
// xts-plain
static void serpent128_decrypt_xts (const u32 *ks1, const u32 *in, u32 *out, u32 *T)
void serpent128_decrypt_xts (const u32 *ks1, const u32 *in, u32 *out, u32 *T)
{
out[0] = in[0];
out[1] = in[1];
@ -1673,7 +1673,7 @@ static void serpent128_decrypt_xts (const u32 *ks1, const u32 *in, u32 *out, u32
xts_mul2 (T, T);
}
static void serpent256_decrypt_xts (const u32 *ks1, const u32 *in, u32 *out, u32 *T)
void serpent256_decrypt_xts (const u32 *ks1, const u32 *in, u32 *out, u32 *T)
{
out[0] = in[0];
out[1] = in[1];
@ -1695,7 +1695,7 @@ static void serpent256_decrypt_xts (const u32 *ks1, const u32 *in, u32 *out, u32
xts_mul2 (T, T);
}
static void luks_decrypt_sector_serpent_xts_plain256 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain256 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1729,7 +1729,7 @@ static void luks_decrypt_sector_serpent_xts_plain256 (__global const u32 *in, u3
}
}
static void luks_decrypt_sector_serpent_xts_plain256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1766,7 +1766,7 @@ static void luks_decrypt_sector_serpent_xts_plain256_mk_sha1 (__global const u32
}
}
static void luks_decrypt_sector_serpent_xts_plain256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1826,7 +1826,7 @@ static void luks_decrypt_sector_serpent_xts_plain256_mk_sha1_final (__global con
}
}
static void luks_decrypt_sector_serpent_xts_plain256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1863,7 +1863,7 @@ static void luks_decrypt_sector_serpent_xts_plain256_mk_sha256 (__global const u
}
}
static void luks_decrypt_sector_serpent_xts_plain256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1923,7 +1923,7 @@ static void luks_decrypt_sector_serpent_xts_plain256_mk_sha256_final (__global c
}
}
static void luks_decrypt_sector_serpent_xts_plain256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1960,7 +1960,7 @@ static void luks_decrypt_sector_serpent_xts_plain256_mk_sha512 (__global const u
}
}
static void luks_decrypt_sector_serpent_xts_plain256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2020,7 +2020,7 @@ static void luks_decrypt_sector_serpent_xts_plain256_mk_sha512_final (__global c
}
}
static void luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2057,7 +2057,7 @@ static void luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160 (__global cons
}
}
static void luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2117,7 +2117,7 @@ static void luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160_final (__globa
}
}
static void luks_decrypt_sector_serpent_xts_plain512 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain512 (__global const u32 *in, u32 *out, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2151,7 +2151,7 @@ static void luks_decrypt_sector_serpent_xts_plain512 (__global const u32 *in, u3
}
}
static void luks_decrypt_sector_serpent_xts_plain512_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain512_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2188,7 +2188,7 @@ static void luks_decrypt_sector_serpent_xts_plain512_mk_sha1 (__global const u32
}
}
static void luks_decrypt_sector_serpent_xts_plain512_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain512_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2248,7 +2248,7 @@ static void luks_decrypt_sector_serpent_xts_plain512_mk_sha1_final (__global con
}
}
static void luks_decrypt_sector_serpent_xts_plain512_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain512_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2285,7 +2285,7 @@ static void luks_decrypt_sector_serpent_xts_plain512_mk_sha256 (__global const u
}
}
static void luks_decrypt_sector_serpent_xts_plain512_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain512_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2345,7 +2345,7 @@ static void luks_decrypt_sector_serpent_xts_plain512_mk_sha256_final (__global c
}
}
static void luks_decrypt_sector_serpent_xts_plain512_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain512_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2382,7 +2382,7 @@ static void luks_decrypt_sector_serpent_xts_plain512_mk_sha512 (__global const u
}
}
static void luks_decrypt_sector_serpent_xts_plain512_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain512_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2442,7 +2442,7 @@ static void luks_decrypt_sector_serpent_xts_plain512_mk_sha512_final (__global c
}
}
static void luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2479,7 +2479,7 @@ static void luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160 (__global cons
}
}
static void luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
void luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *ks1, const u32 *ks2, volatile const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2541,7 +2541,7 @@ static void luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160_final (__globa
// luks helper
static void luks_af_sha1_then_serpent_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
void luks_af_sha1_then_serpent_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
{
const u32 key_size = luks_bufs->key_size;
const u32 cipher_mode = luks_bufs->cipher_mode;
@ -2900,7 +2900,7 @@ static void luks_af_sha1_then_serpent_decrypt (__global const luks_t *luks_bufs,
}
}
static void luks_af_sha256_then_serpent_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
void luks_af_sha256_then_serpent_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
{
const u32 key_size = luks_bufs->key_size;
const u32 cipher_mode = luks_bufs->cipher_mode;
@ -3259,7 +3259,7 @@ static void luks_af_sha256_then_serpent_decrypt (__global const luks_t *luks_buf
}
}
static void luks_af_sha512_then_serpent_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
void luks_af_sha512_then_serpent_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
{
const u32 key_size = luks_bufs->key_size;
const u32 cipher_mode = luks_bufs->cipher_mode;
@ -3637,7 +3637,7 @@ static void luks_af_sha512_then_serpent_decrypt (__global const luks_t *luks_buf
}
}
static void luks_af_ripemd160_then_serpent_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
void luks_af_ripemd160_then_serpent_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
{
const u32 key_size = luks_bufs->key_size;
const u32 cipher_mode = luks_bufs->cipher_mode;

@ -1,7 +1,7 @@
// cbc-essiv
static void twofish128_decrypt_cbc (const u32 *sk1, const u32 *lk1, const u32 *in, u32 *out, u32 *essiv)
void twofish128_decrypt_cbc (const u32 *sk1, const u32 *lk1, const u32 *in, u32 *out, u32 *essiv)
{
twofish128_decrypt (sk1, lk1, in, out);
@ -16,7 +16,7 @@ static void twofish128_decrypt_cbc (const u32 *sk1, const u32 *lk1, const u32 *i
essiv[3] = in[3];
}
static void twofish256_decrypt_cbc (const u32 *sk1, const u32 *lk1, const u32 *in, u32 *out, u32 *essiv)
void twofish256_decrypt_cbc (const u32 *sk1, const u32 *lk1, const u32 *in, u32 *out, u32 *essiv)
{
twofish256_decrypt (sk1, lk1, in, out);
@ -31,7 +31,7 @@ static void twofish256_decrypt_cbc (const u32 *sk1, const u32 *lk1, const u32 *i
essiv[3] = in[3];
}
static void luks_decrypt_sector_twofish_cbc_essiv128 (__global const u32 *in, u32 *out, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv128 (__global const u32 *in, u32 *out, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -65,7 +65,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv128 (__global const u32 *in, u3
}
}
static void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -102,7 +102,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1 (__global const u32
}
}
static void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -162,7 +162,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1_final (__global con
}
}
static void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -199,7 +199,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256 (__global const u
}
}
static void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -259,7 +259,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256_final (__global c
}
}
static void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -296,7 +296,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha512 (__global const u
}
}
static void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -356,7 +356,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv128_mk_sha512_final (__global c
}
}
static void luks_decrypt_sector_twofish_cbc_essiv128_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv128_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -393,7 +393,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv128_mk_ripemd160 (__global cons
}
}
static void luks_decrypt_sector_twofish_cbc_essiv128_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv128_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -453,7 +453,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv128_mk_ripemd160_final (__globa
}
}
static void luks_decrypt_sector_twofish_cbc_essiv256 (__global const u32 *in, u32 *out, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv256 (__global const u32 *in, u32 *out, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -487,7 +487,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv256 (__global const u32 *in, u3
}
}
static void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -524,7 +524,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1 (__global const u32
}
}
static void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -584,7 +584,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1_final (__global con
}
}
static void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -621,7 +621,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256 (__global const u
}
}
static void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -681,7 +681,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256_final (__global c
}
}
static void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -718,7 +718,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha512 (__global const u
}
}
static void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -778,7 +778,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv256_mk_sha512_final (__global c
}
}
static void luks_decrypt_sector_twofish_cbc_essiv256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -815,7 +815,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv256_mk_ripemd160 (__global cons
}
}
static void luks_decrypt_sector_twofish_cbc_essiv256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_cbc_essiv256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -877,7 +877,7 @@ static void luks_decrypt_sector_twofish_cbc_essiv256_mk_ripemd160_final (__globa
// cbc-plain
static void luks_decrypt_sector_twofish_cbc_plain128 (__global const u32 *in, u32 *out, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain128 (__global const u32 *in, u32 *out, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -907,7 +907,7 @@ static void luks_decrypt_sector_twofish_cbc_plain128 (__global const u32 *in, u3
}
}
static void luks_decrypt_sector_twofish_cbc_plain128_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain128_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -940,7 +940,7 @@ static void luks_decrypt_sector_twofish_cbc_plain128_mk_sha1 (__global const u32
}
}
static void luks_decrypt_sector_twofish_cbc_plain128_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain128_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -996,7 +996,7 @@ static void luks_decrypt_sector_twofish_cbc_plain128_mk_sha1_final (__global con
}
}
static void luks_decrypt_sector_twofish_cbc_plain128_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain128_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1029,7 +1029,7 @@ static void luks_decrypt_sector_twofish_cbc_plain128_mk_sha256 (__global const u
}
}
static void luks_decrypt_sector_twofish_cbc_plain128_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain128_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1085,7 +1085,7 @@ static void luks_decrypt_sector_twofish_cbc_plain128_mk_sha256_final (__global c
}
}
static void luks_decrypt_sector_twofish_cbc_plain128_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain128_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1118,7 +1118,7 @@ static void luks_decrypt_sector_twofish_cbc_plain128_mk_sha512 (__global const u
}
}
static void luks_decrypt_sector_twofish_cbc_plain128_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain128_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1174,7 +1174,7 @@ static void luks_decrypt_sector_twofish_cbc_plain128_mk_sha512_final (__global c
}
}
static void luks_decrypt_sector_twofish_cbc_plain128_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain128_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1207,7 +1207,7 @@ static void luks_decrypt_sector_twofish_cbc_plain128_mk_ripemd160 (__global cons
}
}
static void luks_decrypt_sector_twofish_cbc_plain128_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain128_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1263,7 +1263,7 @@ static void luks_decrypt_sector_twofish_cbc_plain128_mk_ripemd160_final (__globa
}
}
static void luks_decrypt_sector_twofish_cbc_plain256 (__global const u32 *in, u32 *out, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain256 (__global const u32 *in, u32 *out, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1293,7 +1293,7 @@ static void luks_decrypt_sector_twofish_cbc_plain256 (__global const u32 *in, u3
}
}
static void luks_decrypt_sector_twofish_cbc_plain256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1326,7 +1326,7 @@ static void luks_decrypt_sector_twofish_cbc_plain256_mk_sha1 (__global const u32
}
}
static void luks_decrypt_sector_twofish_cbc_plain256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1382,7 +1382,7 @@ static void luks_decrypt_sector_twofish_cbc_plain256_mk_sha1_final (__global con
}
}
static void luks_decrypt_sector_twofish_cbc_plain256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1415,7 +1415,7 @@ static void luks_decrypt_sector_twofish_cbc_plain256_mk_sha256 (__global const u
}
}
static void luks_decrypt_sector_twofish_cbc_plain256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1471,7 +1471,7 @@ static void luks_decrypt_sector_twofish_cbc_plain256_mk_sha256_final (__global c
}
}
static void luks_decrypt_sector_twofish_cbc_plain256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1504,7 +1504,7 @@ static void luks_decrypt_sector_twofish_cbc_plain256_mk_sha512 (__global const u
}
}
static void luks_decrypt_sector_twofish_cbc_plain256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1560,7 +1560,7 @@ static void luks_decrypt_sector_twofish_cbc_plain256_mk_sha512_final (__global c
}
}
static void luks_decrypt_sector_twofish_cbc_plain256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1593,7 +1593,7 @@ static void luks_decrypt_sector_twofish_cbc_plain256_mk_ripemd160 (__global cons
}
}
static void luks_decrypt_sector_twofish_cbc_plain256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
void luks_decrypt_sector_twofish_cbc_plain256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1651,7 +1651,7 @@ static void luks_decrypt_sector_twofish_cbc_plain256_mk_ripemd160_final (__globa
// xts-plain
static void twofish128_decrypt_xts (const u32 *sk1, const u32 *lk1, const u32 *in, u32 *out, u32 *T)
void twofish128_decrypt_xts (const u32 *sk1, const u32 *lk1, const u32 *in, u32 *out, u32 *T)
{
out[0] = in[0];
out[1] = in[1];
@ -1673,7 +1673,7 @@ static void twofish128_decrypt_xts (const u32 *sk1, const u32 *lk1, const u32 *i
xts_mul2 (T, T);
}
static void twofish256_decrypt_xts (const u32 *sk1, const u32 *lk1, const u32 *in, u32 *out, u32 *T)
void twofish256_decrypt_xts (const u32 *sk1, const u32 *lk1, const u32 *in, u32 *out, u32 *T)
{
out[0] = in[0];
out[1] = in[1];
@ -1695,7 +1695,7 @@ static void twofish256_decrypt_xts (const u32 *sk1, const u32 *lk1, const u32 *i
xts_mul2 (T, T);
}
static void luks_decrypt_sector_twofish_xts_plain256 (__global const u32 *in, u32 *out, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain256 (__global const u32 *in, u32 *out, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1729,7 +1729,7 @@ static void luks_decrypt_sector_twofish_xts_plain256 (__global const u32 *in, u3
}
}
static void luks_decrypt_sector_twofish_xts_plain256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain256_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1766,7 +1766,7 @@ static void luks_decrypt_sector_twofish_xts_plain256_mk_sha1 (__global const u32
}
}
static void luks_decrypt_sector_twofish_xts_plain256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain256_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1826,7 +1826,7 @@ static void luks_decrypt_sector_twofish_xts_plain256_mk_sha1_final (__global con
}
}
static void luks_decrypt_sector_twofish_xts_plain256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain256_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1863,7 +1863,7 @@ static void luks_decrypt_sector_twofish_xts_plain256_mk_sha256 (__global const u
}
}
static void luks_decrypt_sector_twofish_xts_plain256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain256_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1923,7 +1923,7 @@ static void luks_decrypt_sector_twofish_xts_plain256_mk_sha256_final (__global c
}
}
static void luks_decrypt_sector_twofish_xts_plain256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain256_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -1960,7 +1960,7 @@ static void luks_decrypt_sector_twofish_xts_plain256_mk_sha512 (__global const u
}
}
static void luks_decrypt_sector_twofish_xts_plain256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain256_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2020,7 +2020,7 @@ static void luks_decrypt_sector_twofish_xts_plain256_mk_sha512_final (__global c
}
}
static void luks_decrypt_sector_twofish_xts_plain256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain256_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2057,7 +2057,7 @@ static void luks_decrypt_sector_twofish_xts_plain256_mk_ripemd160 (__global cons
}
}
static void luks_decrypt_sector_twofish_xts_plain256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain256_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2117,7 +2117,7 @@ static void luks_decrypt_sector_twofish_xts_plain256_mk_ripemd160_final (__globa
}
}
static void luks_decrypt_sector_twofish_xts_plain512 (__global const u32 *in, u32 *out, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain512 (__global const u32 *in, u32 *out, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2151,7 +2151,7 @@ static void luks_decrypt_sector_twofish_xts_plain512 (__global const u32 *in, u3
}
}
static void luks_decrypt_sector_twofish_xts_plain512_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain512_mk_sha1 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2188,7 +2188,7 @@ static void luks_decrypt_sector_twofish_xts_plain512_mk_sha1 (__global const u32
}
}
static void luks_decrypt_sector_twofish_xts_plain512_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain512_mk_sha1_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2248,7 +2248,7 @@ static void luks_decrypt_sector_twofish_xts_plain512_mk_sha1_final (__global con
}
}
static void luks_decrypt_sector_twofish_xts_plain512_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain512_mk_sha256 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2285,7 +2285,7 @@ static void luks_decrypt_sector_twofish_xts_plain512_mk_sha256 (__global const u
}
}
static void luks_decrypt_sector_twofish_xts_plain512_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain512_mk_sha256_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2345,7 +2345,7 @@ static void luks_decrypt_sector_twofish_xts_plain512_mk_sha256_final (__global c
}
}
static void luks_decrypt_sector_twofish_xts_plain512_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain512_mk_sha512 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2382,7 +2382,7 @@ static void luks_decrypt_sector_twofish_xts_plain512_mk_sha512 (__global const u
}
}
static void luks_decrypt_sector_twofish_xts_plain512_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain512_mk_sha512_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2442,7 +2442,7 @@ static void luks_decrypt_sector_twofish_xts_plain512_mk_sha512_final (__global c
}
}
static void luks_decrypt_sector_twofish_xts_plain512_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain512_mk_ripemd160 (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2479,7 +2479,7 @@ static void luks_decrypt_sector_twofish_xts_plain512_mk_ripemd160 (__global cons
}
}
static void luks_decrypt_sector_twofish_xts_plain512_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
void luks_decrypt_sector_twofish_xts_plain512_mk_ripemd160_final (__global const u32 *in, u32 *mk, const u32 *sk1, const u32 *lk1, const u32 *sk2, const u32 *lk2, const u32 sector)
{
u32 S[4] = { sector, 0, 0, 0 };
@ -2541,7 +2541,7 @@ static void luks_decrypt_sector_twofish_xts_plain512_mk_ripemd160_final (__globa
// luks helper
static void luks_af_sha1_then_twofish_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
void luks_af_sha1_then_twofish_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
{
const u32 key_size = luks_bufs->key_size;
const u32 cipher_mode = luks_bufs->cipher_mode;
@ -2900,7 +2900,7 @@ static void luks_af_sha1_then_twofish_decrypt (__global const luks_t *luks_bufs,
}
}
static void luks_af_sha256_then_twofish_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
void luks_af_sha256_then_twofish_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
{
const u32 key_size = luks_bufs->key_size;
const u32 cipher_mode = luks_bufs->cipher_mode;
@ -3259,7 +3259,7 @@ static void luks_af_sha256_then_twofish_decrypt (__global const luks_t *luks_buf
}
}
static void luks_af_sha512_then_twofish_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
void luks_af_sha512_then_twofish_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
{
const u32 key_size = luks_bufs->key_size;
const u32 cipher_mode = luks_bufs->cipher_mode;
@ -3637,7 +3637,7 @@ static void luks_af_sha512_then_twofish_decrypt (__global const luks_t *luks_buf
}
}
static void luks_af_ripemd160_then_twofish_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
void luks_af_ripemd160_then_twofish_decrypt (__global const luks_t *luks_bufs, __global luks_tmp_t *tmps, u32 *pt_buf)
{
const u32 key_size = luks_bufs->key_size;
const u32 cipher_mode = luks_bufs->cipher_mode;

@ -1,4 +1,4 @@
static void xts_mul2 (u32 *in, u32 *out)
void xts_mul2 (u32 *in, u32 *out)
{
const u32 c = in[3] >> 31;

@ -5,7 +5,7 @@
#define MAYBE_UNUSED
static u32 generate_cmask (const u32 value)
u32 generate_cmask (const u32 value)
{
const u32 rmask = ((value & 0x40404040u) >> 1u)
& ~((value & 0x80808080u) >> 2u);
@ -16,7 +16,7 @@ static u32 generate_cmask (const u32 value)
return rmask & ~hmask & lmask;
}
static void append_four_byte (const u32 *buf_src, const int off_src, u32 *buf_dst, const int off_dst)
void append_four_byte (const u32 *buf_src, const int off_src, u32 *buf_dst, const int off_dst)
{
const int sd = off_src / 4;
const int sm = off_src & 3;
@ -39,7 +39,7 @@ static void append_four_byte (const u32 *buf_src, const int off_src, u32 *buf_ds
buf_dst[dd + 1] |= t1;
}
static void append_three_byte (const u32 *buf_src, const int off_src, u32 *buf_dst, const int off_dst)
void append_three_byte (const u32 *buf_src, const int off_src, u32 *buf_dst, const int off_dst)
{
const int sd = off_src / 4;
const int sm = off_src & 3;
@ -62,7 +62,7 @@ static void append_three_byte (const u32 *buf_src, const int off_src, u32 *buf_d
buf_dst[dd + 1] |= t1;
}
static void append_two_byte (const u32 *buf_src, const int off_src, u32 *buf_dst, const int off_dst)
void append_two_byte (const u32 *buf_src, const int off_src, u32 *buf_dst, const int off_dst)
{
const int sd = off_src / 4;
const int sm = off_src & 3;
@ -85,7 +85,7 @@ static void append_two_byte (const u32 *buf_src, const int off_src, u32 *buf_dst
buf_dst[dd + 1] |= t1;
}
static void append_one_byte (const u32 *buf_src, const int off_src, u32 *buf_dst, const int off_dst)
void append_one_byte (const u32 *buf_src, const int off_src, u32 *buf_dst, const int off_dst)
{
const int sd = off_src / 4;
const int sm = off_src & 3;
@ -104,7 +104,7 @@ static void append_one_byte (const u32 *buf_src, const int off_src, u32 *buf_dst
buf_dst[dd] |= t;
}
static void append_block (const u32 *buf_src, const int off_src, u32 *buf_dst, const int off_dst, const int len)
void append_block (const u32 *buf_src, const int off_src, u32 *buf_dst, const int off_dst, const int len)
{
int i;
@ -123,7 +123,7 @@ static void append_block (const u32 *buf_src, const int off_src, u32 *buf_dst, c
}
}
static void exchange_byte (u32 *buf, const int off_src, const int off_dst)
void exchange_byte (u32 *buf, const int off_src, const int off_dst)
{
u8 *ptr = (u8 *) buf;
@ -162,7 +162,7 @@ static void exchange_byte (u32 *buf, const int off_src, const int off_dst)
*/
}
static int mangle_lrest (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_lrest (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
for (int i = 0, idx = 0; i < len; i += 4, idx += 1)
{
@ -174,7 +174,7 @@ static int mangle_lrest (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32
return (len);
}
static int mangle_lrest_ufirst (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_lrest_ufirst (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
for (int i = 0, idx = 0; i < len; i += 4, idx += 1)
{
@ -190,7 +190,7 @@ static int mangle_lrest_ufirst (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8
return (len);
}
static int mangle_urest (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_urest (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
for (int i = 0, idx = 0; i < len; i += 4, idx += 1)
{
@ -202,7 +202,7 @@ static int mangle_urest (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32
return (len);
}
static int mangle_urest_lfirst (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_urest_lfirst (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
for (int i = 0, idx = 0; i < len; i += 4, idx += 1)
{
@ -218,7 +218,7 @@ static int mangle_urest_lfirst (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8
return (len);
}
static int mangle_trest (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_trest (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
for (int i = 0, idx = 0; i < len; i += 4, idx += 1)
{
@ -230,7 +230,7 @@ static int mangle_trest (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32
return (len);
}
static int mangle_toggle_at (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_toggle_at (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
if (p0 >= len) return (len);
@ -246,7 +246,7 @@ static int mangle_toggle_at (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1,
return (len);
}
static int mangle_reverse (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_reverse (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
for (int l = 0; l < len / 2; l++)
{
@ -258,7 +258,7 @@ static int mangle_reverse (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u
return (len);
}
static int mangle_dupeword (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_dupeword (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
const int out_len = len * 2;
@ -269,7 +269,7 @@ static int mangle_dupeword (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1,
return (out_len);
}
static int mangle_dupeword_times (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_dupeword_times (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
const int out_len = (len * p0) + len;
@ -282,7 +282,7 @@ static int mangle_dupeword_times (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u
return (out_len);
}
static int mangle_reflect (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_reflect (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
const int out_len = len * 2;
@ -300,7 +300,7 @@ static int mangle_reflect (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u
return out_len;
}
static int mangle_append (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_append (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
const int out_len = len + 1;
@ -311,7 +311,7 @@ static int mangle_append (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8
return (out_len);
}
static int mangle_prepend (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_prepend (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
const int out_len = len + 1;
@ -327,7 +327,7 @@ static int mangle_prepend (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u
return (out_len);
}
static int mangle_rotate_left (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_rotate_left (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
for (int l = 0, r = len - 1; r > l; r--)
{
@ -337,7 +337,7 @@ static int mangle_rotate_left (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p
return (len);
}
static int mangle_rotate_right (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_rotate_right (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
for (int l = 0, r = len - 1; l < r; l++)
{
@ -347,7 +347,7 @@ static int mangle_rotate_right (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8
return (len);
}
static int mangle_delete_at (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_delete_at (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 >= len) return (len);
@ -361,19 +361,19 @@ static int mangle_delete_at (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1,
return (len - 1);
}
static int mangle_delete_first (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_delete_first (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
return mangle_delete_at (0, p1, buf, len);
}
static int mangle_delete_last (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_delete_last (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (len == 0) return 0;
return mangle_delete_at (len - 1, p1, buf, len);
}
static int mangle_extract (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_extract (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 >= len) return (len);
@ -392,7 +392,7 @@ static int mangle_extract (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u
return (p1);
}
static int mangle_omit (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_omit (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 >= len) return (len);
@ -411,7 +411,7 @@ static int mangle_omit (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *
return (len - p1);
}
static int mangle_insert (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_insert (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 >= len + 1) return (len);
@ -429,7 +429,7 @@ static int mangle_insert (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8
return (out_len);
}
static int mangle_overstrike (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_overstrike (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 >= len) return (len);
@ -438,7 +438,7 @@ static int mangle_overstrike (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1
return (len);
}
static int mangle_truncate_at (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_truncate_at (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 >= len) return (len);
@ -450,7 +450,7 @@ static int mangle_truncate_at (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p
return (p0);
}
static int mangle_replace (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_replace (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
for (int pos = 0; pos < len; pos++)
{
@ -462,7 +462,7 @@ static int mangle_replace (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u
return (len);
}
static int mangle_purgechar (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_purgechar (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
int out_len = 0;
@ -483,7 +483,7 @@ static int mangle_purgechar (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1,
return (out_len);
}
static int mangle_dupechar_first (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_dupechar_first (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
const int out_len = len + p0;
@ -499,7 +499,7 @@ static int mangle_dupechar_first (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u
return (out_len);
}
static int mangle_dupechar_last (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_dupechar_last (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
const int out_len = len + p0;
@ -516,7 +516,7 @@ static int mangle_dupechar_last (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8
return (out_len);
}
static int mangle_dupechar_all (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_dupechar_all (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
const int out_len = len + len;
@ -534,7 +534,7 @@ static int mangle_dupechar_all (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8
return (out_len);
}
static int mangle_switch_first (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_switch_first (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
if (len < 2) return (len);
@ -543,7 +543,7 @@ static int mangle_switch_first (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8
return (len);
}
static int mangle_switch_last (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_switch_last (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
if (len < 2) return (len);
@ -552,7 +552,7 @@ static int mangle_switch_last (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p
return (len);
}
static int mangle_switch_at (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_switch_at (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
if (p0 >= len) return (len);
if (p1 >= len) return (len);
@ -562,7 +562,7 @@ static int mangle_switch_at (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1,
return (len);
}
static int mangle_chr_shiftl (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_chr_shiftl (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 >= len) return (len);
@ -571,7 +571,7 @@ static int mangle_chr_shiftl (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1
return (len);
}
static int mangle_chr_shiftr (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_chr_shiftr (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 >= len) return (len);
@ -580,7 +580,7 @@ static int mangle_chr_shiftr (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1
return (len);
}
static int mangle_chr_incr (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_chr_incr (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 >= len) return (len);
@ -589,7 +589,7 @@ static int mangle_chr_incr (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1,
return (len);
}
static int mangle_chr_decr (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_chr_decr (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 >= len) return (len);
@ -598,7 +598,7 @@ static int mangle_chr_decr (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1,
return (len);
}
static int mangle_replace_np1 (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_replace_np1 (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if ((p0 + 1) >= len) return (len);
@ -607,7 +607,7 @@ static int mangle_replace_np1 (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p
return (len);
}
static int mangle_replace_nm1 (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_replace_nm1 (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 == 0) return (len);
@ -618,7 +618,7 @@ static int mangle_replace_nm1 (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p
return (len);
}
static int mangle_dupeblock_first (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_dupeblock_first (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 >= len) return (len);
@ -636,7 +636,7 @@ static int mangle_dupeblock_first (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const
return (out_len);
}
static int mangle_dupeblock_last (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
int mangle_dupeblock_last (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u8 *buf, const int len)
{
if (p0 >= len) return (len);
@ -654,7 +654,7 @@ static int mangle_dupeblock_last (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u
return (out_len);
}
static int mangle_title_sep (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
int mangle_title_sep (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int len)
{
if ((len + 4) >= RP_PASSWORD_SIZE) return (len); // cheap way to not need to check for overflow of i + 1
@ -679,7 +679,7 @@ static int mangle_title_sep (MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1,
return (len);
}
static int apply_rule (const u32 name, MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int in_len)
int apply_rule (const u32 name, MAYBE_UNUSED const u8 p0, MAYBE_UNUSED const u8 p1, u32 *buf, const int in_len)
{
int out_len = in_len;
@ -730,7 +730,7 @@ static int apply_rule (const u32 name, MAYBE_UNUSED const u8 p0, MAYBE_UNUSED co
return out_len;
}
static int apply_rules (__constant const u32 *cmds, u32 *buf, const int in_len)
int apply_rules (__constant const u32 *cmds, u32 *buf, const int in_len)
{
int out_len = in_len;

@ -5,7 +5,7 @@
#define MAYBE_UNUSED
static u32 generate_cmask (const u32 value)
u32 generate_cmask (const u32 value)
{
const u32 rmask = ((value & 0x40404040u) >> 1u)
& ~((value & 0x80808080u) >> 2u);
@ -16,7 +16,7 @@ static u32 generate_cmask (const u32 value)
return rmask & ~hmask & lmask;
}
static void truncate_right (u32 buf0[4], u32 buf1[4], const u32 offset)
void truncate_right (u32 buf0[4], u32 buf1[4], const u32 offset)
{
const u32 tmp = (1u << ((offset & 3u) * 8u)) - 1u;
@ -75,7 +75,7 @@ static void truncate_right (u32 buf0[4], u32 buf1[4], const u32 offset)
}
}
static void truncate_left (u32 buf0[4], u32 buf1[4], const u32 offset)
void truncate_left (u32 buf0[4], u32 buf1[4], const u32 offset)
{
const u32 tmp = ~((1u << ((offset & 3u) * 8u)) - 1u);
@ -134,7 +134,7 @@ static void truncate_left (u32 buf0[4], u32 buf1[4], const u32 offset)
}
}
static void lshift_block (const u32 in0[4], const u32 in1[4], u32 out0[4], u32 out1[4])
void lshift_block (const u32 in0[4], const u32 in1[4], u32 out0[4], u32 out1[4])
{
out0[0] = amd_bytealign_S (in0[1], in0[0], 1);
out0[1] = amd_bytealign_S (in0[2], in0[1], 1);
@ -146,7 +146,7 @@ static void lshift_block (const u32 in0[4], const u32 in1[4], u32 out0[4], u32 o
out1[3] = amd_bytealign_S ( 0, in1[3], 1);
}
static void rshift_block (const u32 in0[4], const u32 in1[4], u32 out0[4], u32 out1[4])
void rshift_block (const u32 in0[4], const u32 in1[4], u32 out0[4], u32 out1[4])
{
out1[3] = amd_bytealign_S (in1[3], in1[2], 3);
out1[2] = amd_bytealign_S (in1[2], in1[1], 3);
@ -158,7 +158,7 @@ static void rshift_block (const u32 in0[4], const u32 in1[4], u32 out0[4], u32 o
out0[0] = amd_bytealign_S (in0[0], 0, 3);
}
static void lshift_block_N (const u32 in0[4], const u32 in1[4], u32 out0[4], u32 out1[4], const u32 num)
void lshift_block_N (const u32 in0[4], const u32 in1[4], u32 out0[4], u32 out1[4], const u32 num)
{
switch (num)
{
@ -453,7 +453,7 @@ static void lshift_block_N (const u32 in0[4], const u32 in1[4], u32 out0[4], u32
}
}
static void rshift_block_N (const u32 in0[4], const u32 in1[4], u32 out0[4], u32 out1[4], const u32 num)
void rshift_block_N (const u32 in0[4], const u32 in1[4], u32 out0[4], u32 out1[4], const u32 num)
{
switch (num)
{
@ -748,7 +748,7 @@ static void rshift_block_N (const u32 in0[4], const u32 in1[4], u32 out0[4], u32
}
}
static void append_block1 (const u32 offset, u32 buf0[4], u32 buf1[4], const u32 src_r0)
void append_block1 (const u32 offset, u32 buf0[4], u32 buf1[4], const u32 src_r0)
{
// this version works with 1 byte append only
const u32 value = src_r0 & 0xff;
@ -768,7 +768,7 @@ static void append_block1 (const u32 offset, u32 buf0[4], u32 buf1[4], const u32
buf1[3] |= tmp & c_append_helper[offset][7];
}
static void append_block8 (const u32 offset, u32 buf0[4], u32 buf1[4], const u32 src_l0[4], const u32 src_l1[4], const u32 src_r0[4], const u32 src_r1[4])
void append_block8 (const u32 offset, u32 buf0[4], u32 buf1[4], const u32 src_l0[4], const u32 src_l1[4], const u32 src_r0[4], const u32 src_r1[4])
{
u32 s0 = 0;
u32 s1 = 0;
@ -1023,7 +1023,7 @@ static void append_block8 (const u32 offset, u32 buf0[4], u32 buf1[4], const u32
buf1[3] = src_l1[3] | s7;
}
static void reverse_block (u32 in0[4], u32 in1[4], u32 out0[4], u32 out1[4], const u32 len)
void reverse_block (u32 in0[4], u32 in1[4], u32 out0[4], u32 out1[4], const u32 len)
{
rshift_block_N (in0, in1, out0, out1, 32 - len);
@ -1049,7 +1049,7 @@ static void reverse_block (u32 in0[4], u32 in1[4], u32 out0[4], u32 out1[4], con
out1[3] = swap32_S (tib41[3]);
}
static void exchange_byte (u32 *buf, const int off_src, const int off_dst)
void exchange_byte (u32 *buf, const int off_src, const int off_dst)
{
u8 *ptr = (u8 *) buf;
@ -1059,7 +1059,7 @@ static void exchange_byte (u32 *buf, const int off_src, const int off_dst)
ptr[off_dst] = tmp;
}
static u32 rule_op_mangle_lrest (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_lrest (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
u32 t;
@ -1075,7 +1075,7 @@ static u32 rule_op_mangle_lrest (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u
return in_len;
}
static u32 rule_op_mangle_urest (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_urest (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
u32 t;
@ -1091,7 +1091,7 @@ static u32 rule_op_mangle_urest (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u
return in_len;
}
static u32 rule_op_mangle_lrest_ufirst (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_lrest_ufirst (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
u32 t;
@ -1109,7 +1109,7 @@ static u32 rule_op_mangle_lrest_ufirst (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED
return in_len;
}
static u32 rule_op_mangle_urest_lfirst (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_urest_lfirst (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
u32 t;
@ -1127,7 +1127,7 @@ static u32 rule_op_mangle_urest_lfirst (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED
return in_len;
}
static u32 rule_op_mangle_trest (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_trest (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
u32 t;
@ -1143,7 +1143,7 @@ static u32 rule_op_mangle_trest (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u
return in_len;
}
static u32 rule_op_mangle_toggle_at (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_toggle_at (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 >= in_len) return in_len;
@ -1176,14 +1176,14 @@ static u32 rule_op_mangle_toggle_at (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED con
return (in_len);
}
static u32 rule_op_mangle_reverse (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_reverse (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
reverse_block (buf0, buf1, buf0, buf1, in_len);
return in_len;
}
static u32 rule_op_mangle_dupeword (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_dupeword (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if ((in_len + in_len) >= 32) return in_len;
@ -1196,7 +1196,7 @@ static u32 rule_op_mangle_dupeword (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED cons
return out_len;
}
static u32 rule_op_mangle_dupeword_times (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_dupeword_times (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (((in_len * p0) + in_len) >= 32) return in_len;
@ -1224,7 +1224,7 @@ static u32 rule_op_mangle_dupeword_times (MAYBE_UNUSED const u32 p0, MAYBE_UNUSE
return out_len;
}
static u32 rule_op_mangle_reflect (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_reflect (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if ((in_len + in_len) >= 32) return in_len;
@ -1242,7 +1242,7 @@ static u32 rule_op_mangle_reflect (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const
return out_len;
}
static u32 rule_op_mangle_append (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_append (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if ((in_len + 1) >= 32) return in_len;
@ -1255,7 +1255,7 @@ static u32 rule_op_mangle_append (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const
return out_len;
}
static u32 rule_op_mangle_prepend (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_prepend (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if ((in_len + 1) >= 32) return in_len;
@ -1270,7 +1270,7 @@ static u32 rule_op_mangle_prepend (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const
return out_len;
}
static u32 rule_op_mangle_rotate_left (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_rotate_left (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (in_len == 0) return in_len;
@ -1285,7 +1285,7 @@ static u32 rule_op_mangle_rotate_left (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED c
return in_len;
}
static u32 rule_op_mangle_rotate_right (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_rotate_right (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (in_len == 0) return in_len;
@ -1315,7 +1315,7 @@ static u32 rule_op_mangle_rotate_right (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED
return in_len;
}
static u32 rule_op_mangle_delete_first (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_delete_first (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (in_len == 0) return in_len;
@ -1326,7 +1326,7 @@ static u32 rule_op_mangle_delete_first (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED
return in_len1;
}
static u32 rule_op_mangle_delete_last (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_delete_last (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (in_len == 0) return in_len;
@ -1346,7 +1346,7 @@ static u32 rule_op_mangle_delete_last (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED c
return in_len1;
}
static u32 rule_op_mangle_delete_at (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_delete_at (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 >= in_len) return in_len;
@ -1427,7 +1427,7 @@ static u32 rule_op_mangle_delete_at (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED con
return out_len;
}
static u32 rule_op_mangle_extract (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_extract (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 >= in_len) return in_len;
@ -1442,7 +1442,7 @@ static u32 rule_op_mangle_extract (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const
return out_len;
}
static u32 rule_op_mangle_omit (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_omit (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 >= in_len) return in_len;
@ -1534,7 +1534,7 @@ static u32 rule_op_mangle_omit (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u3
return out_len;
}
static u32 rule_op_mangle_insert (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_insert (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 > in_len) return in_len;
@ -1612,7 +1612,7 @@ static u32 rule_op_mangle_insert (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const
return out_len;
}
static u32 rule_op_mangle_overstrike (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_overstrike (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 >= in_len) return in_len;
@ -1647,7 +1647,7 @@ static u32 rule_op_mangle_overstrike (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED co
return in_len;
}
static u32 rule_op_mangle_truncate_at (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_truncate_at (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 >= in_len) return in_len;
@ -1656,7 +1656,7 @@ static u32 rule_op_mangle_truncate_at (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED c
return p0;
}
static u32 search_on_register (const u32 in, const u32 p0)
u32 search_on_register (const u32 in, const u32 p0)
{
u32 r = 0;
@ -1668,7 +1668,7 @@ static u32 search_on_register (const u32 in, const u32 p0)
return r;
}
static u32 replace_on_register (const u32 in, const u32 r, const u32 p1)
u32 replace_on_register (const u32 in, const u32 r, const u32 p1)
{
u32 out = in;
@ -1680,7 +1680,7 @@ static u32 replace_on_register (const u32 in, const u32 r, const u32 p1)
return out;
}
static u32 rule_op_mangle_replace (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_replace (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
const u32 r0 = search_on_register (buf0[0], p0);
const u32 r1 = search_on_register (buf0[1], p0);
@ -1707,7 +1707,7 @@ static u32 rule_op_mangle_replace (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const
return in_len;
}
static u32 rule_op_mangle_purgechar (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_purgechar (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
const u32 r0 = search_on_register (buf0[0], p0);
const u32 r1 = search_on_register (buf0[1], p0);
@ -1761,7 +1761,7 @@ static u32 rule_op_mangle_purgechar (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED con
return out_len;
}
static u32 rule_op_mangle_dupechar_first (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_dupechar_first (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if ( in_len == 0) return in_len;
if ((in_len + p0) >= 32) return in_len;
@ -1796,7 +1796,7 @@ static u32 rule_op_mangle_dupechar_first (MAYBE_UNUSED const u32 p0, MAYBE_UNUSE
return out_len;
}
static u32 rule_op_mangle_dupechar_last (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_dupechar_last (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if ( in_len == 0) return in_len;
if ((in_len + p0) >= 32) return in_len;
@ -1830,7 +1830,7 @@ static u32 rule_op_mangle_dupechar_last (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED
return out_len;
}
static u32 rule_op_mangle_dupechar_all (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_dupechar_all (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if ( in_len == 0) return in_len;
if ((in_len + in_len) >= 32) return in_len;
@ -1863,7 +1863,7 @@ static u32 rule_op_mangle_dupechar_all (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED
return out_len;
}
static u32 rule_op_mangle_switch_first (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_switch_first (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (in_len < 2) return in_len;
@ -1872,7 +1872,7 @@ static u32 rule_op_mangle_switch_first (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED
return in_len;
}
static u32 rule_op_mangle_switch_last (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_switch_last (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (in_len < 2) return in_len;
@ -1901,7 +1901,7 @@ static u32 rule_op_mangle_switch_last (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED c
return in_len;
}
static u32 rule_op_mangle_switch_at (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_switch_at (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 >= in_len) return in_len;
if (p1 >= in_len) return in_len;
@ -1931,7 +1931,7 @@ static u32 rule_op_mangle_switch_at (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED con
return in_len;
}
static u32 rule_op_mangle_chr_shiftl (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_chr_shiftl (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 >= in_len) return in_len;
@ -1965,7 +1965,7 @@ static u32 rule_op_mangle_chr_shiftl (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED co
return in_len;
}
static u32 rule_op_mangle_chr_shiftr (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_chr_shiftr (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 >= in_len) return in_len;
@ -1999,7 +1999,7 @@ static u32 rule_op_mangle_chr_shiftr (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED co
return in_len;
}
static u32 rule_op_mangle_chr_incr (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_chr_incr (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 >= in_len) return in_len;
@ -2035,7 +2035,7 @@ static u32 rule_op_mangle_chr_incr (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED cons
return in_len;
}
static u32 rule_op_mangle_chr_decr (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_chr_decr (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 >= in_len) return in_len;
@ -2071,7 +2071,7 @@ static u32 rule_op_mangle_chr_decr (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED cons
return in_len;
}
static u32 rule_op_mangle_replace_np1 (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_replace_np1 (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if ((p0 + 1) >= in_len) return in_len;
@ -2111,7 +2111,7 @@ static u32 rule_op_mangle_replace_np1 (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED c
return in_len;
}
static u32 rule_op_mangle_replace_nm1 (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_replace_nm1 (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 == 0) return in_len;
@ -2153,7 +2153,7 @@ static u32 rule_op_mangle_replace_nm1 (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED c
return in_len;
}
static u32 rule_op_mangle_dupeblock_first (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_dupeblock_first (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 > in_len) return in_len;
@ -2191,7 +2191,7 @@ static u32 rule_op_mangle_dupeblock_first (MAYBE_UNUSED const u32 p0, MAYBE_UNUS
return out_len;
}
static u32 rule_op_mangle_dupeblock_last (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_dupeblock_last (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (p0 > in_len) return in_len;
@ -2220,7 +2220,7 @@ static u32 rule_op_mangle_dupeblock_last (MAYBE_UNUSED const u32 p0, MAYBE_UNUSE
return out_len;
}
static u32 toggle_on_register (const u32 in, const u32 r)
u32 toggle_on_register (const u32 in, const u32 r)
{
u32 out = in;
@ -2234,7 +2234,7 @@ static u32 toggle_on_register (const u32 in, const u32 r)
return out;
}
static u32 rule_op_mangle_title_sep (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
u32 rule_op_mangle_title_sep (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED const u32 p1, MAYBE_UNUSED u32 buf0[4], MAYBE_UNUSED u32 buf1[4], const u32 in_len)
{
if (in_len == 0) return in_len;
@ -2274,7 +2274,7 @@ static u32 rule_op_mangle_title_sep (MAYBE_UNUSED const u32 p0, MAYBE_UNUSED con
return in_len;
}
static u32 apply_rule (const u32 name, const u32 p0, const u32 p1, u32 buf0[4], u32 buf1[4], const u32 in_len)
u32 apply_rule (const u32 name, const u32 p0, const u32 p1, u32 buf0[4], u32 buf1[4], const u32 in_len)
{
u32 out_len = in_len;
@ -2326,7 +2326,7 @@ static u32 apply_rule (const u32 name, const u32 p0, const u32 p1, u32 buf0[4],
return out_len;
}
static u32 apply_rules (__constant const u32 *cmds, u32 buf0[4], u32 buf1[4], const u32 len)
u32 apply_rules (__constant const u32 *cmds, u32 buf0[4], u32 buf1[4], const u32 len)
{
u32 out_len = len;
@ -2344,7 +2344,7 @@ static u32 apply_rules (__constant const u32 *cmds, u32 buf0[4], u32 buf1[4], co
return out_len;
}
static u32x apply_rules_vect (const u32 pw_buf0[4], const u32 pw_buf1[4], const u32 pw_len, __constant const kernel_rule_t *rules_buf, const u32 il_pos, u32x buf0[4], u32x buf1[4])
u32x apply_rules_vect (const u32 pw_buf0[4], const u32 pw_buf1[4], const u32 pw_len, __constant const kernel_rule_t *rules_buf, const u32 il_pos, u32x buf0[4], u32x buf1[4])
{
#if VECT_SIZE == 1

@ -1048,7 +1048,7 @@
// attack-mode 0
static u32x ix_create_bft (__global const bf_t *bfs_buf, const u32 il_pos)
u32x ix_create_bft (__global const bf_t *bfs_buf, const u32 il_pos)
{
#if VECT_SIZE == 1
const u32x ix = (u32x) (bfs_buf[il_pos + 0].i);
@ -1067,7 +1067,7 @@ static u32x ix_create_bft (__global const bf_t *bfs_buf, const u32 il_pos)
// attack-mode 1
static u32x pwlenx_create_combt (__global const pw_t *combs_buf, const u32 il_pos)
u32x pwlenx_create_combt (__global const pw_t *combs_buf, const u32 il_pos)
{
#if VECT_SIZE == 1
const u32x pw_lenx = (u32x) (combs_buf[il_pos + 0].pw_len);
@ -1084,7 +1084,7 @@ static u32x pwlenx_create_combt (__global const pw_t *combs_buf, const u32 il_po
return pw_lenx;
}
static u32x ix_create_combt (__global const pw_t *combs_buf, const u32 il_pos, const int idx)
u32x ix_create_combt (__global const pw_t *combs_buf, const u32 il_pos, const int idx)
{
#if VECT_SIZE == 1
const u32x ix = (u32x) (combs_buf[il_pos + 0].i[idx]);

@ -66,7 +66,7 @@ __constant u32a crc32tab[0x100] =
0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d
};
static u32 round_crc32 (u32 a, const u32 v)
u32 round_crc32 (u32 a, const u32 v)
{
const u32 k = (a ^ v) & 0xff;
@ -79,7 +79,7 @@ static u32 round_crc32 (u32 a, const u32 v)
return a;
}
static u32 round_crc32_4 (const u32 w, const u32 iv)
u32 round_crc32_4 (const u32 w, const u32 iv)
{
u32 a = iv;

@ -1,4 +1,4 @@
static void xts_mul2 (u32 *in, u32 *out)
void xts_mul2 (u32 *in, u32 *out)
{
const u32 c = in[3] >> 31;
@ -10,7 +10,7 @@ static void xts_mul2 (u32 *in, u32 *out)
out[0] ^= c * 0x87;
}
static void aes256_decrypt_xts_first (const u32 *ukey1, const u32 *ukey2, const u32 *in, u32 *out, u32 *S, u32 *T, u32 *ks, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void aes256_decrypt_xts_first (const u32 *ukey1, const u32 *ukey2, const u32 *in, u32 *out, u32 *S, u32 *T, u32 *ks, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
out[0] = in[0];
out[1] = in[1];
@ -34,7 +34,7 @@ static void aes256_decrypt_xts_first (const u32 *ukey1, const u32 *ukey2, const
out[3] ^= T[3];
}
static void aes256_decrypt_xts_next (const u32 *in, u32 *out, u32 *T, u32 *ks, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
void aes256_decrypt_xts_next (const u32 *in, u32 *out, u32 *T, u32 *ks, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
out[0] = in[0];
out[1] = in[1];
@ -56,7 +56,7 @@ static void aes256_decrypt_xts_next (const u32 *in, u32 *out, u32 *T, u32 *ks, S
out[3] ^= T[3];
}
static void serpent256_decrypt_xts_first (const u32 *ukey1, const u32 *ukey2, const u32 *in, u32 *out, u32 *S, u32 *T, u32 *ks)
void serpent256_decrypt_xts_first (const u32 *ukey1, const u32 *ukey2, const u32 *in, u32 *out, u32 *S, u32 *T, u32 *ks)
{
out[0] = in[0];
out[1] = in[1];
@ -80,7 +80,7 @@ static void serpent256_decrypt_xts_first (const u32 *ukey1, const u32 *ukey2, co
out[3] ^= T[3];
}
static void serpent256_decrypt_xts_next (const u32 *in, u32 *out, u32 *T, u32 *ks)
void serpent256_decrypt_xts_next (const u32 *in, u32 *out, u32 *T, u32 *ks)
{
out[0] = in[0];
out[1] = in[1];
@ -102,7 +102,7 @@ static void serpent256_decrypt_xts_next (const u32 *in, u32 *out, u32 *T, u32 *k
out[3] ^= T[3];
}
static void twofish256_decrypt_xts_first (const u32 *ukey1, const u32 *ukey2, const u32 *in, u32 *out, u32 *S, u32 *T, u32 *sk, u32 *lk)
void twofish256_decrypt_xts_first (const u32 *ukey1, const u32 *ukey2, const u32 *in, u32 *out, u32 *S, u32 *T, u32 *sk, u32 *lk)
{
out[0] = in[0];
out[1] = in[1];
@ -126,7 +126,7 @@ static void twofish256_decrypt_xts_first (const u32 *ukey1, const u32 *ukey2, co
out[3] ^= T[3];
}
static void twofish256_decrypt_xts_next (const u32 *in, u32 *out, u32 *T, u32 *sk, u32 *lk)
void twofish256_decrypt_xts_next (const u32 *in, u32 *out, u32 *T, u32 *sk, u32 *lk)
{
out[0] = in[0];
out[1] = in[1];
@ -150,7 +150,7 @@ static void twofish256_decrypt_xts_next (const u32 *in, u32 *out, u32 *T, u32 *s
// 512 bit
static int verify_header_aes (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
int verify_header_aes (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 ks_aes[60];
@ -206,7 +206,7 @@ static int verify_header_aes (__global const tc_t *esalt_bufs, const u32 *ukey1,
return 1;
}
static int verify_header_serpent (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2)
int verify_header_serpent (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2)
{
u32 ks_serpent[140];
@ -262,7 +262,7 @@ static int verify_header_serpent (__global const tc_t *esalt_bufs, const u32 *uk
return 1;
}
static int verify_header_twofish (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2)
int verify_header_twofish (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2)
{
u32 sk_twofish[4];
u32 lk_twofish[40];
@ -321,7 +321,7 @@ static int verify_header_twofish (__global const tc_t *esalt_bufs, const u32 *uk
// 1024 bit
static int verify_header_aes_twofish (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2, const u32 *ukey3, const u32 *ukey4, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
int verify_header_aes_twofish (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2, const u32 *ukey3, const u32 *ukey4, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 ks_aes[60];
@ -384,7 +384,7 @@ static int verify_header_aes_twofish (__global const tc_t *esalt_bufs, const u32
return 1;
}
static int verify_header_serpent_aes (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2, const u32 *ukey3, const u32 *ukey4, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
int verify_header_serpent_aes (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2, const u32 *ukey3, const u32 *ukey4, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 ks_serpent[140];
u32 ks_aes[60];
@ -445,7 +445,7 @@ static int verify_header_serpent_aes (__global const tc_t *esalt_bufs, const u32
return 1;
}
static int verify_header_twofish_serpent (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2, const u32 *ukey3, const u32 *ukey4)
int verify_header_twofish_serpent (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2, const u32 *ukey3, const u32 *ukey4)
{
u32 sk_twofish[4];
u32 lk_twofish[40];
@ -510,7 +510,7 @@ static int verify_header_twofish_serpent (__global const tc_t *esalt_bufs, const
// 1536 bit
static int verify_header_aes_twofish_serpent (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2, const u32 *ukey3, const u32 *ukey4, const u32 *ukey5, const u32 *ukey6, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
int verify_header_aes_twofish_serpent (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2, const u32 *ukey3, const u32 *ukey4, const u32 *ukey5, const u32 *ukey6, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 ks_aes[60];
@ -579,7 +579,7 @@ static int verify_header_aes_twofish_serpent (__global const tc_t *esalt_bufs, c
return 1;
}
static int verify_header_serpent_twofish_aes (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2, const u32 *ukey3, const u32 *ukey4, const u32 *ukey5, const u32 *ukey6, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
int verify_header_serpent_twofish_aes (__global const tc_t *esalt_bufs, const u32 *ukey1, const u32 *ukey2, const u32 *ukey3, const u32 *ukey4, const u32 *ukey5, const u32 *ukey6, SHM_TYPE u32 *s_te0, SHM_TYPE u32 *s_te1, SHM_TYPE u32 *s_te2, SHM_TYPE u32 *s_te3, SHM_TYPE u32 *s_te4, SHM_TYPE u32 *s_td0, SHM_TYPE u32 *s_td1, SHM_TYPE u32 *s_td2, SHM_TYPE u32 *s_td3, SHM_TYPE u32 *s_td4)
{
u32 ks_serpent[140];

@ -33,14 +33,14 @@ typedef VTYPE(uint, VECT_SIZE) u32x;
typedef VTYPE(ulong, VECT_SIZE) u64x;
#endif
static u32 l32_from_64_S (u64 a)
u32 l32_from_64_S (u64 a)
{
const u32 r = (u32) (a);
return r;
}
static u32 h32_from_64_S (u64 a)
u32 h32_from_64_S (u64 a)
{
a >>= 32;
@ -49,12 +49,12 @@ static u32 h32_from_64_S (u64 a)
return r;
}
static u64 hl32_to_64_S (const u32 a, const u32 b)
u64 hl32_to_64_S (const u32 a, const u32 b)
{
return as_ulong ((uint2) (b, a));
}
static u32x l32_from_64 (u64x a)
u32x l32_from_64 (u64x a)
{
u32x r;
@ -93,7 +93,7 @@ static u32x l32_from_64 (u64x a)
return r;
}
static u32x h32_from_64 (u64x a)
u32x h32_from_64 (u64x a)
{
a >>= 32;
@ -134,7 +134,7 @@ static u32x h32_from_64 (u64x a)
return r;
}
static u64x hl32_to_64 (const u32x a, const u32x b)
u64x hl32_to_64 (const u32x a, const u32x b)
{
u64x r;
@ -176,7 +176,7 @@ static u64x hl32_to_64 (const u32x a, const u32x b)
#ifdef IS_AMD
#if AMD_GCN >= 3
static u32 swap32_S (const u32 v)
u32 swap32_S (const u32 v)
{
u32 r;
@ -185,7 +185,7 @@ static u32 swap32_S (const u32 v)
return r;
}
static u64 swap64_S (const u64 v)
u64 swap64_S (const u64 v)
{
const u32 v0 = h32_from_64_S (v);
const u32 v1 = l32_from_64_S (v);
@ -201,28 +201,28 @@ static u64 swap64_S (const u64 v)
return r;
}
#else
static u32 swap32_S (const u32 v)
u32 swap32_S (const u32 v)
{
return as_uint (as_uchar4 (v).s3210);
}
static u64 swap64_S (const u64 v)
u64 swap64_S (const u64 v)
{
return (as_ulong (as_uchar8 (v).s76543210));
}
#endif
static u32 rotr32_S (const u32 a, const u32 n)
u32 rotr32_S (const u32 a, const u32 n)
{
return rotate (a, (32 - n));
}
static u32 rotl32_S (const u32 a, const u32 n)
u32 rotl32_S (const u32 a, const u32 n)
{
return rotate (a, n);
}
static u64 rotr64_S (const u64 a, const u32 n)
u64 rotr64_S (const u64 a, const u32 n)
{
const u32 a0 = h32_from_64_S (a);
const u32 a1 = l32_from_64_S (a);
@ -235,18 +235,18 @@ static u64 rotr64_S (const u64 a, const u32 n)
return r;
}
static u64 rotl64_S (const u64 a, const u32 n)
u64 rotl64_S (const u64 a, const u32 n)
{
return rotr64_S (a, 64 - n);
}
#if AMD_GCN >= 3
static u32x swap32 (const u32x v)
u32x swap32 (const u32x v)
{
return bitselect (rotate (v, 24u), rotate (v, 8u), 0x00ff00ffu);
}
static u64x swap64 (const u64x v)
u64x swap64 (const u64x v)
{
const u32x a0 = h32_from_64 (v);
const u32x a1 = l32_from_64 (v);
@ -308,12 +308,12 @@ static u64x swap64 (const u64x v)
return r;
}
#else
static u32x swap32 (const u32x v)
u32x swap32 (const u32x v)
{
return bitselect (rotate (v, 24u), rotate (v, 8u), 0x00ff00ffu);
}
static u64x swap64 (const u64x v)
u64x swap64 (const u64x v)
{
return bitselect (bitselect (rotate (v, 24ul),
rotate (v, 8ul), 0x000000ff000000fful),
@ -323,17 +323,17 @@ static u64x swap64 (const u64x v)
}
#endif
static u32x rotr32 (const u32x a, const u32 n)
u32x rotr32 (const u32x a, const u32 n)
{
return rotate (a, (32 - n));
}
static u32x rotl32 (const u32x a, const u32 n)
u32x rotl32 (const u32x a, const u32 n)
{
return rotate (a, n);
}
static u64x rotr64 (const u64x a, const u32 n)
u64x rotr64 (const u64x a, const u32 n)
{
const u32x a0 = h32_from_64 (a);
const u32x a1 = l32_from_64 (a);
@ -346,28 +346,28 @@ static u64x rotr64 (const u64x a, const u32 n)
return r;
}
static u64x rotl64 (const u64x a, const u32 n)
u64x rotl64 (const u64x a, const u32 n)
{
return rotr64 (a, 64 - n);
}
static u32x __bfe (const u32x a, const u32x b, const u32x c)
u32x __bfe (const u32x a, const u32x b, const u32x c)
{
return amd_bfe (a, b, c);
}
static u32 __bfe_S (const u32 a, const u32 b, const u32 c)
u32 __bfe_S (const u32 a, const u32 b, const u32 c)
{
return amd_bfe (a, b, c);
}
static u32 amd_bytealign_S (const u32 a, const u32 b, const u32 c)
u32 amd_bytealign_S (const u32 a, const u32 b, const u32 c)
{
return amd_bytealign (a, b, c);
}
#if AMD_GCN >= 3
static u32x __byte_perm (const u32x a, const u32x b, const u32x c)
u32x __byte_perm (const u32x a, const u32x b, const u32x c)
{
u32x r;
@ -420,7 +420,7 @@ static u32x __byte_perm (const u32x a, const u32x b, const u32x c)
return r;
}
static u32 __byte_perm_S (const u32 a, const u32 b, const u32 c)
u32 __byte_perm_S (const u32 a, const u32 b, const u32 c)
{
u32 r;
@ -431,7 +431,7 @@ static u32 __byte_perm_S (const u32 a, const u32 b, const u32 c)
#endif
#if AMD_GCN >= 5
static u32x __add3 (const u32x a, const u32x b, const u32x c)
u32x __add3 (const u32x a, const u32x b, const u32x c)
{
u32x r;
@ -484,7 +484,7 @@ static u32x __add3 (const u32x a, const u32x b, const u32x c)
return r;
}
static u32 __add3_S (const u32 a, const u32 b, const u32 c)
u32 __add3_S (const u32 a, const u32 b, const u32 c)
{
u32 r;
@ -493,12 +493,12 @@ static u32 __add3_S (const u32 a, const u32 b, const u32 c)
return r;
}
#else
static u32x __add3 (const u32x a, const u32x b, const u32x c)
u32x __add3 (const u32x a, const u32x b, const u32x c)
{
return a + b + c;
}
static u32 __add3_S (const u32 a, const u32 b, const u32 c)
u32 __add3_S (const u32 a, const u32 b, const u32 c)
{
return a + b + c;
}
@ -507,7 +507,7 @@ static u32 __add3_S (const u32 a, const u32 b, const u32 c)
#endif
#ifdef IS_NV
static u32 swap32_S (const u32 v)
u32 swap32_S (const u32 v)
{
u32 r;
@ -516,7 +516,7 @@ static u32 swap32_S (const u32 v)
return r;
}
static u64 swap64_S (const u64 v)
u64 swap64_S (const u64 v)
{
u32 il;
u32 ir;
@ -536,27 +536,27 @@ static u64 swap64_S (const u64 v)
return r;
}
static u32 rotr32_S (const u32 a, const u32 n)
u32 rotr32_S (const u32 a, const u32 n)
{
return rotate (a, (32 - n));
}
static u32 rotl32_S (const u32 a, const u32 n)
u32 rotl32_S (const u32 a, const u32 n)
{
return rotate (a, n);
}
static u64 rotr64_S (const u64 a, const u32 n)
u64 rotr64_S (const u64 a, const u32 n)
{
return rotate (a, (u64) (64 - n));
}
static u64 rotl64_S (const u64 a, const u32 n)
u64 rotl64_S (const u64 a, const u32 n)
{
return rotate (a, (u64) n);
}
static u32x swap32 (const u32x v)
u32x swap32 (const u32x v)
{
u32x r;
@ -595,7 +595,7 @@ static u32x swap32 (const u32x v)
return r;
}
static u64x swap64 (const u64x v)
u64x swap64 (const u64x v)
{
u32x il;
u32x ir;
@ -721,27 +721,27 @@ static u64x swap64 (const u64x v)
return r;
}
static u32x rotr32 (const u32x a, const u32 n)
u32x rotr32 (const u32x a, const u32 n)
{
return rotate (a, (32 - n));
}
static u32x rotl32 (const u32x a, const u32 n)
u32x rotl32 (const u32x a, const u32 n)
{
return rotate (a, n);
}
static u64x rotr64 (const u64x a, const u32 n)
u64x rotr64 (const u64x a, const u32 n)
{
return rotate (a, (u64x) (64 - n));
}
static u64x rotl64 (const u64x a, const u32 n)
u64x rotl64 (const u64x a, const u32 n)
{
return rotate (a, (u64x) n);
}
static u32x __byte_perm (const u32x a, const u32x b, const u32x c)
u32x __byte_perm (const u32x a, const u32x b, const u32x c)
{
u32x r;
@ -780,7 +780,7 @@ static u32x __byte_perm (const u32x a, const u32x b, const u32x c)
return r;
}
static u32 __byte_perm_S (const u32 a, const u32 b, const u32 c)
u32 __byte_perm_S (const u32 a, const u32 b, const u32 c)
{
u32 r;
@ -789,7 +789,7 @@ static u32 __byte_perm_S (const u32 a, const u32 b, const u32 c)
return r;
}
static u32x __bfe (const u32x a, const u32x b, const u32x c)
u32x __bfe (const u32x a, const u32x b, const u32x c)
{
u32x r;
@ -828,7 +828,7 @@ static u32x __bfe (const u32x a, const u32x b, const u32x c)
return r;
}
static u32 __bfe_S (const u32 a, const u32 b, const u32 c)
u32 __bfe_S (const u32 a, const u32 b, const u32 c)
{
u32 r;
@ -837,7 +837,7 @@ static u32 __bfe_S (const u32 a, const u32 b, const u32 c)
return r;
}
static u32x amd_bytealign (const u32x a, const u32x b, const u32x c)
u32x amd_bytealign (const u32x a, const u32x b, const u32x c)
{
u32x r;
@ -884,7 +884,7 @@ static u32x amd_bytealign (const u32x a, const u32x b, const u32x c)
return r;
}
static u32 amd_bytealign_S (const u32 a, const u32 b, const u32 c)
u32 amd_bytealign_S (const u32 a, const u32 b, const u32 c)
{
u32 r;
@ -901,12 +901,12 @@ static u32 amd_bytealign_S (const u32 a, const u32 b, const u32 c)
return r;
}
static u32x __add3 (const u32x a, const u32x b, const u32x c)
u32x __add3 (const u32x a, const u32x b, const u32x c)
{
return a + b + c;
}
static u32 __add3_S (const u32 a, const u32 b, const u32 c)
u32 __add3_S (const u32 a, const u32 b, const u32 c)
{
return a + b + c;
}
@ -914,37 +914,37 @@ static u32 __add3_S (const u32 a, const u32 b, const u32 c)
#endif
#ifdef IS_GENERIC
static u32 swap32_S (const u32 v)
u32 swap32_S (const u32 v)
{
return (as_uint (as_uchar4 (v).s3210));
}
static u64 swap64_S (const u64 v)
u64 swap64_S (const u64 v)
{
return (as_ulong (as_uchar8 (v).s76543210));
}
static u32 rotr32_S (const u32 a, const u32 n)
u32 rotr32_S (const u32 a, const u32 n)
{
return rotate (a, (32 - n));
}
static u32 rotl32_S (const u32 a, const u32 n)
u32 rotl32_S (const u32 a, const u32 n)
{
return rotate (a, n);
}
static u64 rotr64_S (const u64 a, const u32 n)
u64 rotr64_S (const u64 a, const u32 n)
{
return rotate (a, (u64) (64 - n));
}
static u64 rotl64_S (const u64 a, const u32 n)
u64 rotl64_S (const u64 a, const u32 n)
{
return rotate (a, (u64) n);
}
static u32x swap32 (const u32x v)
u32x swap32 (const u32x v)
{
return ((v >> 24) & 0x000000ff)
| ((v >> 8) & 0x0000ff00)
@ -952,7 +952,7 @@ static u32x swap32 (const u32x v)
| ((v << 24) & 0xff000000);
}
static u64x swap64 (const u64x v)
u64x swap64 (const u64x v)
{
return ((v >> 56) & 0x00000000000000ff)
| ((v >> 40) & 0x000000000000ff00)
@ -964,27 +964,27 @@ static u64x swap64 (const u64x v)
| ((v << 56) & 0xff00000000000000);
}
static u32x rotr32 (const u32x a, const u32 n)
u32x rotr32 (const u32x a, const u32 n)
{
return rotate (a, (32 - n));
}
static u32x rotl32 (const u32x a, const u32 n)
u32x rotl32 (const u32x a, const u32 n)
{
return rotate (a, n);
}
static u64x rotr64 (const u64x a, const u32 n)
u64x rotr64 (const u64x a, const u32 n)
{
return rotate (a, (u64x) (64 - n));
}
static u64x rotl64 (const u64x a, const u32 n)
u64x rotl64 (const u64x a, const u32 n)
{
return rotate (a, (u64x) n);
}
static u32x __bfe (const u32x a, const u32x b, const u32x c)
u32x __bfe (const u32x a, const u32x b, const u32x c)
{
#define BIT(x) ((u32x) (1u) << (x))
#define BIT_MASK(x) (BIT (x) - 1)
@ -997,7 +997,7 @@ static u32x __bfe (const u32x a, const u32x b, const u32x c)
#undef BFE
}
static u32 __bfe_S (const u32 a, const u32 b, const u32 c)
u32 __bfe_S (const u32 a, const u32 b, const u32 c)
{
#define BIT(x) (1u << (x))
#define BIT_MASK(x) (BIT (x) - 1)
@ -1010,7 +1010,7 @@ static u32 __bfe_S (const u32 a, const u32 b, const u32 c)
#undef BFE
}
static u32x amd_bytealign (const u32x a, const u32x b, const u32 c)
u32x amd_bytealign (const u32x a, const u32x b, const u32 c)
{
#if VECT_SIZE == 1
const u64x tmp = ((((u64x) (a)) << 32) | ((u64x) (b))) >> ((c & 3) * 8);
@ -1043,19 +1043,19 @@ static u32x amd_bytealign (const u32x a, const u32x b, const u32 c)
#endif
}
static u32 amd_bytealign_S (const u32 a, const u32 b, const u32 c)
u32 amd_bytealign_S (const u32 a, const u32 b, const u32 c)
{
const u64 tmp = ((((u64) a) << 32) | ((u64) b)) >> ((c & 3) * 8);
return (u32) (tmp);
}
static u32x __add3 (const u32x a, const u32x b, const u32x c)
u32x __add3 (const u32x a, const u32x b, const u32x c)
{
return a + b + c;
}
static u32 __add3_S (const u32 a, const u32 b, const u32 c)
u32 __add3_S (const u32 a, const u32 b, const u32 c)
{
return a + b + c;
}

Loading…
Cancel
Save