Merge pull request #3321 from ventaquil/luks-strings

LUKS strings
pull/3346/head
Jens Steube 2 years ago committed by GitHub
commit 145794d463
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -2571,7 +2571,7 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2597,10 +2597,10 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv128_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv128_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2630,13 +2630,13 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2656,10 +2656,10 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain128_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain128_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2683,13 +2683,13 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2718,10 +2718,10 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -2758,16 +2758,16 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain512_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain512_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2824,7 +2824,7 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
luks_decrypt_sector_aes_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2869,7 +2869,7 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
luks_decrypt_sector_aes_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2962,7 +2962,7 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2988,10 +2988,10 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv128_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv128_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3021,13 +3021,13 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3047,10 +3047,10 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain128_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain128_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3074,13 +3074,13 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3109,10 +3109,10 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3149,16 +3149,16 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain512_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain512_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3207,7 +3207,7 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
luks_decrypt_sector_aes_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3244,7 +3244,7 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
luks_decrypt_sector_aes_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3340,7 +3340,7 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3366,10 +3366,10 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv128_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv128_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3399,13 +3399,13 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3425,10 +3425,10 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain128_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain128_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3452,13 +3452,13 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3487,10 +3487,10 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3527,16 +3527,16 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain512_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain512_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3585,7 +3585,7 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
luks_decrypt_sector_aes_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3622,7 +3622,7 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
luks_decrypt_sector_aes_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3699,7 +3699,7 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3725,10 +3725,10 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3758,13 +3758,13 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3784,10 +3784,10 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3811,13 +3811,13 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3846,10 +3846,10 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3886,16 +3886,16 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain512_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain512_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3944,7 +3944,7 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
luks_decrypt_sector_aes_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3981,7 +3981,7 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
luks_decrypt_sector_aes_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{

@ -2567,7 +2567,7 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2593,10 +2593,10 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2626,13 +2626,13 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2652,10 +2652,10 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain128_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain128_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2679,13 +2679,13 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2714,10 +2714,10 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -2754,16 +2754,16 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain512_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain512_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2812,7 +2812,7 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
luks_decrypt_sector_serpent_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2849,7 +2849,7 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
luks_decrypt_sector_serpent_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2926,7 +2926,7 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2952,10 +2952,10 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2985,13 +2985,13 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3011,10 +3011,10 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain128_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain128_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3038,13 +3038,13 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3073,10 +3073,10 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3113,16 +3113,16 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain512_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain512_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3171,7 +3171,7 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
luks_decrypt_sector_serpent_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3208,7 +3208,7 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
luks_decrypt_sector_serpent_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3304,7 +3304,7 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3330,10 +3330,10 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3363,13 +3363,13 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3389,10 +3389,10 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain128_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain128_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3416,13 +3416,13 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3451,10 +3451,10 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3491,16 +3491,16 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain512_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain512_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3549,7 +3549,7 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
luks_decrypt_sector_serpent_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3586,7 +3586,7 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
luks_decrypt_sector_serpent_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3663,7 +3663,7 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3689,10 +3689,10 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3722,13 +3722,13 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3748,10 +3748,10 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3775,13 +3775,13 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3810,10 +3810,10 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3850,16 +3850,16 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3908,7 +3908,7 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
luks_decrypt_sector_serpent_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3945,7 +3945,7 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
luks_decrypt_sector_serpent_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{

@ -2567,7 +2567,7 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2593,10 +2593,10 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2626,13 +2626,13 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2652,10 +2652,10 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_plain128_mk_sha1 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain128_mk_sha1 (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
luks_decrypt_sector_twofish_cbc_plain128_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain128_mk_sha1_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2679,13 +2679,13 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_plain256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain256_mk_sha1 (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
luks_decrypt_sector_twofish_cbc_plain256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain256_mk_sha1_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2714,10 +2714,10 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_xts_plain256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain256_mk_sha1 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_xts_plain256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain256_mk_sha1_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -2754,16 +2754,16 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_xts_plain512_mk_sha1 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain512_mk_sha1 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_xts_plain512_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain512_mk_sha1_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2812,7 +2812,7 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
luks_decrypt_sector_twofish_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, sk1, lk1, sk2, lk2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2849,7 +2849,7 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
luks_decrypt_sector_twofish_cbc_plain256 (luks_bufs->ct_buf, pt_buf, sk1, lk1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2926,7 +2926,7 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2952,10 +2952,10 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2985,13 +2985,13 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3011,10 +3011,10 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_plain128_mk_sha256 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain128_mk_sha256 (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
luks_decrypt_sector_twofish_cbc_plain128_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain128_mk_sha256_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3038,13 +3038,13 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_plain256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain256_mk_sha256 (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
luks_decrypt_sector_twofish_cbc_plain256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain256_mk_sha256_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3073,10 +3073,10 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_xts_plain256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain256_mk_sha256 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_xts_plain256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain256_mk_sha256_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3113,16 +3113,16 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_xts_plain512_mk_sha256 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain512_mk_sha256 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_xts_plain512_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain512_mk_sha256_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3171,7 +3171,7 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
luks_decrypt_sector_twofish_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, sk1, lk1, sk2, lk2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3208,7 +3208,7 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
luks_decrypt_sector_twofish_cbc_plain256 (luks_bufs->ct_buf, pt_buf, sk1, lk1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3304,7 +3304,7 @@ DECLSPEC void luks_af_sha512_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3330,10 +3330,10 @@ DECLSPEC void luks_af_sha512_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha512 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha512 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha512_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3363,13 +3363,13 @@ DECLSPEC void luks_af_sha512_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha512 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha512_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3389,10 +3389,10 @@ DECLSPEC void luks_af_sha512_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_plain128_mk_sha512 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain128_mk_sha512 (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
luks_decrypt_sector_twofish_cbc_plain128_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain128_mk_sha512_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3416,13 +3416,13 @@ DECLSPEC void luks_af_sha512_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_plain256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain256_mk_sha512 (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
luks_decrypt_sector_twofish_cbc_plain256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain256_mk_sha512_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3451,10 +3451,10 @@ DECLSPEC void luks_af_sha512_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_xts_plain256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain256_mk_sha512 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_xts_plain256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain256_mk_sha512_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3491,16 +3491,16 @@ DECLSPEC void luks_af_sha512_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_xts_plain512_mk_sha512 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain512_mk_sha512 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_xts_plain512_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain512_mk_sha512_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3549,7 +3549,7 @@ DECLSPEC void luks_af_sha512_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
luks_decrypt_sector_twofish_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, sk1, lk1, sk2, lk2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3586,7 +3586,7 @@ DECLSPEC void luks_af_sha512_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
luks_decrypt_sector_twofish_cbc_plain256 (luks_bufs->ct_buf, pt_buf, sk1, lk1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3663,7 +3663,7 @@ DECLSPEC void luks_af_ripemd160_then_twofish_decrypt (GLOBAL_AS const luks_t *lu
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3689,10 +3689,10 @@ DECLSPEC void luks_af_ripemd160_then_twofish_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_essiv128_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv128_mk_ripemd160 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_cbc_essiv128_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv128_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3722,13 +3722,13 @@ DECLSPEC void luks_af_ripemd160_then_twofish_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_essiv256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_cbc_essiv256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3748,10 +3748,10 @@ DECLSPEC void luks_af_ripemd160_then_twofish_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_plain128_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain128_mk_ripemd160 (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
luks_decrypt_sector_twofish_cbc_plain128_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain128_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3775,13 +3775,13 @@ DECLSPEC void luks_af_ripemd160_then_twofish_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_plain256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
luks_decrypt_sector_twofish_cbc_plain256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3810,10 +3810,10 @@ DECLSPEC void luks_af_ripemd160_then_twofish_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_xts_plain256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_xts_plain256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3850,16 +3850,16 @@ DECLSPEC void luks_af_ripemd160_then_twofish_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_xts_plain512_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain512_mk_ripemd160 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_xts_plain512_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain512_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3908,7 +3908,7 @@ DECLSPEC void luks_af_ripemd160_then_twofish_decrypt (GLOBAL_AS const luks_t *lu
luks_decrypt_sector_twofish_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, sk1, lk1, sk2, lk2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3945,7 +3945,7 @@ DECLSPEC void luks_af_ripemd160_then_twofish_decrypt (GLOBAL_AS const luks_t *lu
luks_decrypt_sector_twofish_cbc_plain256 (luks_bufs->ct_buf, pt_buf, sk1, lk1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{

@ -18,7 +18,9 @@
#include M2S(INCLUDE_PATH/inc_cipher_aes.cl)
#endif
#define LUKS_STRIPES 4000
#define LUKS_STRIPES 4000
#define LUKS_CT_LEN 512
#define LUKS_AF_MAX_LEN ((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES)
typedef enum hc_luks_hash_type
{
@ -48,9 +50,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -61,9 +65,10 @@ typedef struct luks
int cipher_type; // hc_luks_cipher_type_t
int cipher_mode; // hc_luks_cipher_mode_t
u32 ct_buf[128];
u32 ct_buf[LUKS_CT_LEN / 4];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[LUKS_AF_MAX_LEN / 4];
u32 af_len;
} luks_t;
@ -367,7 +372,7 @@ KERNEL_FQ void m14611_comp (KERN_ATTR_TMPS_ESALT (luks_tmp_t, luks_t))
{
if (hc_atomic_inc (&hashes_shown[DIGESTS_OFFSET_HOST]) == 0)
{
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, 0, gid, 0, 0, 0);
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, DIGESTS_OFFSET_HOST + 0, gid, 0, 0, 0);
}
}
}

@ -18,7 +18,9 @@
#include M2S(INCLUDE_PATH/inc_cipher_serpent.cl)
#endif
#define LUKS_STRIPES 4000
#define LUKS_STRIPES 4000
#define LUKS_CT_LEN 512
#define LUKS_AF_MAX_LEN ((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES)
typedef enum hc_luks_hash_type
{
@ -48,9 +50,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -61,9 +65,10 @@ typedef struct luks
int cipher_type; // hc_luks_cipher_type_t
int cipher_mode; // hc_luks_cipher_mode_t
u32 ct_buf[128];
u32 ct_buf[LUKS_CT_LEN / 4];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[LUKS_AF_MAX_LEN / 4];
u32 af_len;
} luks_t;
@ -314,7 +319,7 @@ KERNEL_FQ void m14612_comp (KERN_ATTR_TMPS_ESALT (luks_tmp_t, luks_t))
{
if (hc_atomic_inc (&hashes_shown[DIGESTS_OFFSET_HOST]) == 0)
{
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, 0, gid, 0, 0, 0);
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, DIGESTS_OFFSET_HOST + 0, gid, 0, 0, 0);
}
}
}

@ -18,7 +18,9 @@
#include M2S(INCLUDE_PATH/inc_cipher_twofish.cl)
#endif
#define LUKS_STRIPES 4000
#define LUKS_STRIPES 4000
#define LUKS_CT_LEN 512
#define LUKS_AF_MAX_LEN ((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES)
typedef enum hc_luks_hash_type
{
@ -48,9 +50,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -61,9 +65,10 @@ typedef struct luks
int cipher_type; // hc_luks_cipher_type_t
int cipher_mode; // hc_luks_cipher_mode_t
u32 ct_buf[128];
u32 ct_buf[LUKS_CT_LEN / 4];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[LUKS_AF_MAX_LEN / 4];
u32 af_len;
} luks_t;
@ -314,7 +319,7 @@ KERNEL_FQ void m14613_comp (KERN_ATTR_TMPS_ESALT (luks_tmp_t, luks_t))
{
if (hc_atomic_inc (&hashes_shown[DIGESTS_OFFSET_HOST]) == 0)
{
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, 0, gid, 0, 0, 0);
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, DIGESTS_OFFSET_HOST + 0, gid, 0, 0, 0);
}
}
}

@ -18,7 +18,9 @@
#include M2S(INCLUDE_PATH/inc_cipher_aes.cl)
#endif
#define LUKS_STRIPES 4000
#define LUKS_STRIPES 4000
#define LUKS_CT_LEN 512
#define LUKS_AF_MAX_LEN ((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES)
typedef enum hc_luks_hash_type
{
@ -48,9 +50,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -61,9 +65,10 @@ typedef struct luks
int cipher_type; // hc_luks_cipher_type_t
int cipher_mode; // hc_luks_cipher_mode_t
u32 ct_buf[128];
u32 ct_buf[LUKS_CT_LEN / 4];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[LUKS_AF_MAX_LEN / 4];
u32 af_len;
} luks_t;
@ -406,7 +411,7 @@ KERNEL_FQ void m14621_comp (KERN_ATTR_TMPS_ESALT (luks_tmp_t, luks_t))
{
if (hc_atomic_inc (&hashes_shown[DIGESTS_OFFSET_HOST]) == 0)
{
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, 0, gid, 0, 0, 0);
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, DIGESTS_OFFSET_HOST + 0, gid, 0, 0, 0);
}
}
}

@ -18,7 +18,9 @@
#include M2S(INCLUDE_PATH/inc_cipher_serpent.cl)
#endif
#define LUKS_STRIPES 4000
#define LUKS_STRIPES 4000
#define LUKS_CT_LEN 512
#define LUKS_AF_MAX_LEN ((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES)
typedef enum hc_luks_hash_type
{
@ -48,9 +50,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -61,9 +65,10 @@ typedef struct luks
int cipher_type; // hc_luks_cipher_type_t
int cipher_mode; // hc_luks_cipher_mode_t
u32 ct_buf[128];
u32 ct_buf[LUKS_CT_LEN / 4];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[LUKS_AF_MAX_LEN / 4];
u32 af_len;
} luks_t;
@ -353,7 +358,7 @@ KERNEL_FQ void m14622_comp (KERN_ATTR_TMPS_ESALT (luks_tmp_t, luks_t))
{
if (hc_atomic_inc (&hashes_shown[DIGESTS_OFFSET_HOST]) == 0)
{
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, 0, gid, 0, 0, 0);
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, DIGESTS_OFFSET_HOST + 0, gid, 0, 0, 0);
}
}
}

@ -18,7 +18,9 @@
#include M2S(INCLUDE_PATH/inc_cipher_twofish.cl)
#endif
#define LUKS_STRIPES 4000
#define LUKS_STRIPES 4000
#define LUKS_CT_LEN 512
#define LUKS_AF_MAX_LEN ((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES)
typedef enum hc_luks_hash_type
{
@ -48,9 +50,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -61,9 +65,10 @@ typedef struct luks
int cipher_type; // hc_luks_cipher_type_t
int cipher_mode; // hc_luks_cipher_mode_t
u32 ct_buf[128];
u32 ct_buf[LUKS_CT_LEN / 4];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[LUKS_AF_MAX_LEN / 4];
u32 af_len;
} luks_t;
@ -353,7 +358,7 @@ KERNEL_FQ void m14623_comp (KERN_ATTR_TMPS_ESALT (luks_tmp_t, luks_t))
{
if (hc_atomic_inc (&hashes_shown[DIGESTS_OFFSET_HOST]) == 0)
{
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, 0, gid, 0, 0, 0);
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, DIGESTS_OFFSET_HOST + 0, gid, 0, 0, 0);
}
}
}

@ -18,7 +18,9 @@
#include M2S(INCLUDE_PATH/inc_cipher_aes.cl)
#endif
#define LUKS_STRIPES 4000
#define LUKS_STRIPES 4000
#define LUKS_CT_LEN 512
#define LUKS_AF_MAX_LEN ((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES)
typedef enum hc_luks_hash_type
{
@ -48,9 +50,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -61,9 +65,10 @@ typedef struct luks
int cipher_type; // hc_luks_cipher_type_t
int cipher_mode; // hc_luks_cipher_mode_t
u32 ct_buf[128];
u32 ct_buf[LUKS_CT_LEN / 4];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[LUKS_AF_MAX_LEN / 4];
u32 af_len;
} luks_t;
@ -462,7 +467,7 @@ KERNEL_FQ void m14631_comp (KERN_ATTR_TMPS_ESALT (luks_tmp_t, luks_t))
{
if (hc_atomic_inc (&hashes_shown[DIGESTS_OFFSET_HOST]) == 0)
{
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, 0, gid, 0, 0, 0);
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, DIGESTS_OFFSET_HOST + 0, gid, 0, 0, 0);
}
}
}

@ -18,7 +18,9 @@
#include M2S(INCLUDE_PATH/inc_cipher_serpent.cl)
#endif
#define LUKS_STRIPES 4000
#define LUKS_STRIPES 4000
#define LUKS_CT_LEN 512
#define LUKS_AF_MAX_LEN ((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES)
typedef enum hc_luks_hash_type
{
@ -48,9 +50,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -61,9 +65,10 @@ typedef struct luks
int cipher_type; // hc_luks_cipher_type_t
int cipher_mode; // hc_luks_cipher_mode_t
u32 ct_buf[128];
u32 ct_buf[LUKS_CT_LEN / 4];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[LUKS_AF_MAX_LEN / 4];
u32 af_len;
} luks_t;
@ -409,7 +414,7 @@ KERNEL_FQ void m14632_comp (KERN_ATTR_TMPS_ESALT (luks_tmp_t, luks_t))
{
if (hc_atomic_inc (&hashes_shown[DIGESTS_OFFSET_HOST]) == 0)
{
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, 0, gid, 0, 0, 0);
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, DIGESTS_OFFSET_HOST + 0, gid, 0, 0, 0);
}
}
}

@ -18,7 +18,9 @@
#include M2S(INCLUDE_PATH/inc_cipher_twofish.cl)
#endif
#define LUKS_STRIPES 4000
#define LUKS_STRIPES 4000
#define LUKS_CT_LEN 512
#define LUKS_AF_MAX_LEN ((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES)
typedef enum hc_luks_hash_type
{
@ -48,9 +50,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -61,9 +65,10 @@ typedef struct luks
int cipher_type; // hc_luks_cipher_type_t
int cipher_mode; // hc_luks_cipher_mode_t
u32 ct_buf[128];
u32 ct_buf[LUKS_CT_LEN / 4];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[LUKS_AF_MAX_LEN / 4];
u32 af_len;
} luks_t;
@ -409,7 +414,7 @@ KERNEL_FQ void m14633_comp (KERN_ATTR_TMPS_ESALT (luks_tmp_t, luks_t))
{
if (hc_atomic_inc (&hashes_shown[DIGESTS_OFFSET_HOST]) == 0)
{
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, 0, gid, 0, 0, 0);
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, DIGESTS_OFFSET_HOST + 0, gid, 0, 0, 0);
}
}
}

@ -18,7 +18,9 @@
#include M2S(INCLUDE_PATH/inc_cipher_aes.cl)
#endif
#define LUKS_STRIPES 4000
#define LUKS_STRIPES 4000
#define LUKS_CT_LEN 512
#define LUKS_AF_MAX_LEN ((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES)
typedef enum hc_luks_hash_type
{
@ -48,9 +50,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -61,9 +65,10 @@ typedef struct luks
int cipher_type; // hc_luks_cipher_type_t
int cipher_mode; // hc_luks_cipher_mode_t
u32 ct_buf[128];
u32 ct_buf[LUKS_CT_LEN / 4];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[LUKS_AF_MAX_LEN / 4];
u32 af_len;
} luks_t;
@ -367,7 +372,7 @@ KERNEL_FQ void m14641_comp (KERN_ATTR_TMPS_ESALT (luks_tmp_t, luks_t))
{
if (hc_atomic_inc (&hashes_shown[DIGESTS_OFFSET_HOST]) == 0)
{
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, 0, gid, 0, 0, 0);
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, DIGESTS_OFFSET_HOST + 0, gid, 0, 0, 0);
}
}
}

@ -18,7 +18,9 @@
#include M2S(INCLUDE_PATH/inc_cipher_serpent.cl)
#endif
#define LUKS_STRIPES 4000
#define LUKS_STRIPES 4000
#define LUKS_CT_LEN 512
#define LUKS_AF_MAX_LEN ((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES)
typedef enum hc_luks_hash_type
{
@ -48,9 +50,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -61,9 +65,10 @@ typedef struct luks
int cipher_type; // hc_luks_cipher_type_t
int cipher_mode; // hc_luks_cipher_mode_t
u32 ct_buf[128];
u32 ct_buf[LUKS_CT_LEN / 4];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[LUKS_AF_MAX_LEN / 4];
u32 af_len;
} luks_t;
@ -314,7 +319,7 @@ KERNEL_FQ void m14642_comp (KERN_ATTR_TMPS_ESALT (luks_tmp_t, luks_t))
{
if (hc_atomic_inc (&hashes_shown[DIGESTS_OFFSET_HOST]) == 0)
{
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, 0, gid, 0, 0, 0);
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, DIGESTS_OFFSET_HOST + 0, gid, 0, 0, 0);
}
}
}

@ -18,7 +18,9 @@
#include M2S(INCLUDE_PATH/inc_cipher_twofish.cl)
#endif
#define LUKS_STRIPES 4000
#define LUKS_STRIPES 4000
#define LUKS_CT_LEN 512
#define LUKS_AF_MAX_LEN ((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES)
typedef enum hc_luks_hash_type
{
@ -48,9 +50,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -61,9 +65,10 @@ typedef struct luks
int cipher_type; // hc_luks_cipher_type_t
int cipher_mode; // hc_luks_cipher_mode_t
u32 ct_buf[128];
u32 ct_buf[LUKS_CT_LEN / 4];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[LUKS_AF_MAX_LEN / 4];
u32 af_len;
} luks_t;
@ -314,7 +319,7 @@ KERNEL_FQ void m14643_comp (KERN_ATTR_TMPS_ESALT (luks_tmp_t, luks_t))
{
if (hc_atomic_inc (&hashes_shown[DIGESTS_OFFSET_HOST]) == 0)
{
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, 0, gid, 0, 0, 0);
mark_hash (plains_buf, d_return_buf, SALT_POS_HOST, DIGESTS_CNT, 0, DIGESTS_OFFSET_HOST + 0, gid, 0, 0, 0);
}
}
}

@ -118,6 +118,8 @@
- Modules: New VeraCrypt modules (29411-29483) which do not use `module_hash_binary_parse` to get data from containers anymore (use new tool `tools/veracrypt2hashcat.py`).
- Modules: Added suffix *legacy* to old VeraCrypt modules (13711-13783).
- Terminal: Increased size of hash name column in `--help` and `--identify` options.
- Modules: New LUKS v1 modules (29511-29543) which do not use `module_hash_binary_parse` to get data from containers anymore (use new tool `tools/luks2hashcat.py`).
- Modules: Renamed old LUKS module into LUKS v1 and added suffix *legacy* (14600).
* changes v6.2.4 -> v6.2.5

@ -18,7 +18,7 @@ static const u32 DGST_POS2 = 2;
static const u32 DGST_POS3 = 3;
static const u32 DGST_SIZE = DGST_SIZE_4_16;
static const u32 HASH_CATEGORY = HASH_CATEGORY_FDE;
static const char *HASH_NAME = "LUKS";
static const char *HASH_NAME = "LUKS v1 (legacy)";
static const u64 KERN_TYPE = 14611; // this gets overwritten later instead of in benchmark
static const u32 OPTI_TYPE = OPTI_TYPE_ZERO_BYTE
| OPTI_TYPE_SLOW_HASH_SIMD_LOOP;
@ -145,9 +145,11 @@ typedef enum hc_luks_cipher_type
typedef enum hc_luks_cipher_mode
{
HC_LUKS_CIPHER_MODE_CBC_ESSIV = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 3,
HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256 = 1,
HC_LUKS_CIPHER_MODE_CBC_PLAIN = 2,
HC_LUKS_CIPHER_MODE_CBC_PLAIN64 = 3,
HC_LUKS_CIPHER_MODE_XTS_PLAIN = 4,
HC_LUKS_CIPHER_MODE_XTS_PLAIN64 = 5,
} hc_luks_cipher_mode_t;
@ -160,7 +162,8 @@ typedef struct luks
u32 ct_buf[128];
u32 af_src_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_buf[((HC_LUKS_KEY_SIZE_512 / 8) * LUKS_STRIPES) / 4];
u32 af_len;
} luks_t;
@ -432,7 +435,7 @@ int module_hash_decode (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSE
if (strcmp (hdr.cipherMode, "cbc-essiv:sha256") == 0)
{
luks->cipher_mode = HC_LUKS_CIPHER_MODE_CBC_ESSIV;
luks->cipher_mode = HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256;
}
else if (strcmp (hdr.cipherMode, "cbc-plain") == 0)
{
@ -440,7 +443,7 @@ int module_hash_decode (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSE
}
else if (strcmp (hdr.cipherMode, "cbc-plain64") == 0)
{
luks->cipher_mode = HC_LUKS_CIPHER_MODE_CBC_PLAIN;
luks->cipher_mode = HC_LUKS_CIPHER_MODE_CBC_PLAIN64;
}
else if (strcmp (hdr.cipherMode, "xts-plain") == 0)
{
@ -448,7 +451,7 @@ int module_hash_decode (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSE
}
else if (strcmp (hdr.cipherMode, "xts-plain64") == 0)
{
luks->cipher_mode = HC_LUKS_CIPHER_MODE_XTS_PLAIN;
luks->cipher_mode = HC_LUKS_CIPHER_MODE_XTS_PLAIN64;
}
else
{
@ -556,7 +559,7 @@ int module_hash_decode (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSE
return (PARSER_LUKS_FILE_SIZE);
}
const size_t nread2 = hc_fread (luks->af_src_buf, keyBytes, stripes, &fp);
const size_t nread2 = hc_fread (luks->af_buf, keyBytes, stripes, &fp);
if (nread2 != stripes)
{
@ -565,6 +568,8 @@ int module_hash_decode (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSE
return (PARSER_LUKS_FILE_SIZE);
}
luks->af_len = keyBytes * stripes;
// finally, copy some encrypted payload data for entropy check
const u32 payloadOffset = byte_swap_32 (hdr.payloadOffset);

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -0,0 +1,328 @@
#!/usr/bin/env python3
import sys
from argparse import ArgumentParser
from collections import namedtuple
from dataclasses import dataclass
from os import SEEK_SET
from struct import Struct
from sys import stderr
from typing import List
try:
from enum import auto, IntEnum, StrEnum
except ImportError:
from enum import auto, Enum, IntEnum
class StrEnum(str, Enum):
def _generate_next_value_(name, start, count, last_values):
return name.lower()
__str__ = str.__str__
__format__ = str.__format__
# consts
SIGNATURE = "$luks$"
SECTOR_SIZE = 512
# utils
def bytes_to_str(value):
"""
Convert encoded padded bytes string into str.
"""
return value.rstrip(b"\0").decode()
# pre-header
TmpHeaderPre = namedtuple(
"TmpHeaderPre",
(
"magic",
"version",
),
)
# version 1
TmpKeyVersion1 = namedtuple(
"TmpKeyVersion1",
(
"active",
"iterations",
"salt",
"material_offset",
"stripes",
),
)
@dataclass(init=False)
class KeyVersion1:
class Active(IntEnum):
ENABLED = 0x00AC71F3
DISABLED = 0x0000DEAD
ENABLED_OLD = 0xCAFE
DISABLED_OLD = 0x0000
active: Active
iterations: int
salt: bytes
af: bytes
def __init__(self, active, iterations, salt, af):
self.active = self.Active(active)
assert iterations >= 0, "key iterations cannot be less than zero"
self.iterations = iterations
self.salt = salt
self.af = af
TmpHeaderVersion1 = namedtuple(
"TmpHeaderVersion1",
(
"magic",
"version",
"cipher",
"mode",
"hash",
"payload_offset",
"key_bytes",
"digest",
"salt",
"iterations",
"uuid",
"keys",
),
)
@dataclass(init=False)
class HeaderVersion1:
MAGIC = b"LUKS\xba\xbe"
VERSION = 0x0001
class Cipher(StrEnum):
AES = auto()
TWOFISH = auto()
SERPENT = auto()
class Mode(StrEnum):
CBC_ESSIV_SHA256 = "cbc-essiv:sha256"
CBC_PLAIN = "cbc-plain"
CBC_PLAIN64 = "cbc-plain64"
XTS_PLAIN = "xts-plain"
XTS_PLAIN64 = "xts-plain64"
class Hash(StrEnum):
RIPEMD160 = auto()
SHA1 = auto()
SHA256 = auto()
SHA512 = auto()
WHIRLPOOL = auto()
class KeySize(IntEnum):
SIZE_128 = 128
SIZE_256 = 256
SIZE_512 = 512
magic: bytes
version: int
cipher: Cipher
mode: Mode
hash: Hash
payload: bytes
key_size: KeySize
digest: bytes
salt: bytes
iterations: int
uuid: str
keys: List[KeyVersion1]
def __init__(self, magic, version, cipher, mode, hash, payload, key_size, digest, salt, iterations, uuid, keys):
assert magic == self.MAGIC, "Invalid magic bytes"
self.magic = magic
assert version == self.VERSION, "Invalid version"
self.version = version
if isinstance(cipher, bytes):
try:
cipher = bytes_to_str(cipher)
except UnicodeDecodeError as e:
raise ValueError("Cannot decode cipher") from e
self.cipher = self.Cipher(cipher)
if isinstance(mode, bytes):
try:
mode = bytes_to_str(mode)
except UnicodeDecodeError as e:
raise ValueError("Cannot decode mode") from e
self.mode = self.Mode(mode)
if isinstance(hash, bytes):
try:
hash = bytes_to_str(hash)
except UnicodeDecodeError as e:
raise ValueError("Cannot decode hash") from e
self.hash = self.Hash(hash)
self.payload = payload
self.key_size = self.KeySize(key_size)
self.digest = digest
self.salt = salt
assert iterations > 0, "Iterations cannot be less or equal to zero"
self.iterations = iterations
if isinstance(uuid, bytes):
try:
uuid = bytes_to_str(uuid)
except UnicodeDecodeError as e:
raise ValueError("Cannot decode UUID") from e
self.uuid = uuid
if all(isinstance(key, tuple) for key in keys):
keys = [KeyVersion1(*key) for key in keys]
elif all(isinstance(key, dict) for key in keys):
keys = [KeyVersion1(**key) for key in keys]
assert all(isinstance(key, KeyVersion1) for key in keys), "Not a key object provided"
self.keys = keys
def extract_version1(file):
# consts
KEYS_COUNT = 8
PADDING_LENGTH = 432
PAYLOAD_SIZE = 512 # sizeof (u32) * 128
# prepare structs
key_struct = Struct(">LL32sLL")
header_struct = Struct(
">6sH32s32s32sLL20s32sL40s"
+ str(key_struct.size * KEYS_COUNT)
+ "s"
+ str(PADDING_LENGTH)
+ "x"
)
# read header
header = file.read(header_struct.size)
assert len(header) == header_struct.size, "File contains less data than needed"
# convert bytes into temporary header
header = header_struct.unpack(header)
header = TmpHeaderVersion1(*header)
# convert bytes into temporary keys
tmp_keys = [TmpKeyVersion1(*key) for key in key_struct.iter_unpack(header.keys)]
# read keys' af
keys = []
for key in tmp_keys:
file.seek(key.material_offset * SECTOR_SIZE, SEEK_SET)
af = file.read(header.key_bytes * key.stripes)
assert len(af) == (header.key_bytes * key.stripes), "File contains less data than needed"
key = KeyVersion1(key.active, key.iterations, key.salt, af)
keys.append(key)
# read payload
file.seek(header.payload_offset * SECTOR_SIZE, SEEK_SET)
payload = file.read(PAYLOAD_SIZE)
assert len(payload) == PAYLOAD_SIZE, "File contains less data than needed"
# convert into header
header = HeaderVersion1(
header.magic,
header.version,
header.cipher,
header.mode,
header.hash,
payload,
header.key_bytes * 8,
header.digest,
header.salt,
header.iterations,
header.uuid,
keys,
)
# check for any active key
for key in header.keys:
if key.active not in [KeyVersion1.Active.ENABLED, KeyVersion1.Active.ENABLED_OLD]:
continue
hash = SIGNATURE + "$".join(
map(
str,
[
header.version,
header.hash,
header.cipher,
header.mode,
int(header.key_size),
key.iterations,
key.salt.hex(),
key.af.hex(),
header.payload.hex(),
],
)
)
print(hash)
break
else:
# all keys are disabled
raise ValueError("All keys are disabled")
# main
def main(args):
# prepare parser and parse args
parser = ArgumentParser(description="luks2hashcat extraction tool")
parser.add_argument("path", type=str, help="path to LUKS container")
args = parser.parse_args(args)
# prepare struct
header_struct = Struct(">6sH")
with open(args.path, "rb") as file:
# read pre header
header = file.read(header_struct.size)
assert len(header) == header_struct.size, "File contains less data than needed"
# convert bytes into temporary pre header
header = header_struct.unpack(header)
header = TmpHeaderPre(*header)
# check magic bytes
magic_bytes = {
HeaderVersion1.MAGIC,
}
assert header.magic in magic_bytes, "Improper magic bytes"
# back to start of the file
file.seek(0, SEEK_SET)
# extract with proper function
try:
mapping = {
HeaderVersion1.VERSION: extract_version1,
}
extract = mapping[header.version]
extract(file)
except KeyError as e:
raise ValueError("Unsupported version") from e
if __name__ == "__main__":
try:
main(sys.argv[1:])
except IOError as e:
print('Error:', e.strerror, file=stderr)
except (AssertionError, ValueError) as e:
print('Error:', e, file=stderr)

@ -24,8 +24,8 @@ NEVER_CRACK="9720 9820 14900 18100 27800"
# List of modes which return a different output hash format than the input hash format
NOCHECK_ENCODING="16800 22000"
# LUKS mode has test containers
LUKS_MODE="14600"
# List of LUKS modes which have test containers
LUKS_MODES="14600 29511 29512 29513 29521 29522 29523 29531 29532 29533 29541 29542 29543"
# Cryptoloop mode which have test containers
CL_MODES="14511 14512 14513 14521 14522 14523 14531 14532 14533 14541 14542 14543 14551 14552 14553"
@ -33,7 +33,7 @@ CL_MODES="14511 14512 14513 14521 14522 14523 14531 14532 14533 14541 14542 1454
# missing hash types: 5200
HASH_TYPES=$(ls "${TDIR}"/test_modules/*.pm | sed -E 's/.*m0*([0-9]+).pm/\1/')
HASH_TYPES="${HASH_TYPES} ${TC_MODES} ${VC_MODES} ${LUKS_MODE} ${CL_MODES}"
HASH_TYPES="${HASH_TYPES} ${TC_MODES} ${VC_MODES} ${LUKS_MODES} ${CL_MODES}"
HASH_TYPES=$(echo -n "${HASH_TYPES}" | tr ' ' '\n' | sort -u -n | tr '\n' ' ')
VECTOR_WIDTHS="1 2 4 8 16"
@ -187,7 +187,7 @@ function init()
return 0
fi
if [ "${hash_type}" -eq ${LUKS_MODE} ]; then
if is_in_array "${hash_type}" ${LUKS_MODES}; then
which 7z &>/dev/null
if [ $? -eq 1 ]; then
echo "ATTENTION: 7z is missing. Skipping download and extract luks test files."
@ -3170,17 +3170,223 @@ function luks_test()
attackType=3
fi
mkdir -p "${OUTD}/luks_tests"
chmod u+x "${TDIR}/luks2hashcat.py"
for luksMode in "cbc-essiv" "cbc-plain64" "xts-plain64"; do
for luksKeySize in "128" "256" "512"; do
CMD="unset"
# filter out not supported combinations:
case "${luksKeySize}" in
128)
case "${luksMode}" in
cbc-essiv|cbc-plain64)
;;
*)
continue
;;
esac
;;
256)
case "${luksMode}" in
cbc-essiv|cbc-plain64|xts-plain64)
;;
*)
continue
;;
esac
;;
512)
case "${luksMode}" in
xts-plain64)
;;
*)
continue
;;
esac
;;
esac
case $hashType in
29511)
luksHash="sha1"
luksCipher="aes"
;;
29512)
luksHash="sha1"
luksCipher="serpent"
;;
29513)
luksHash="sha1"
luksCipher="twofish"
;;
29521)
luksHash="sha256"
luksCipher="aes"
;;
29522)
luksHash="sha256"
luksCipher="serpent"
;;
29523)
luksHash="sha256"
luksCipher="twofish"
;;
29531)
luksHash="sha512"
luksCipher="aes"
;;
29532)
luksHash="sha512"
luksCipher="serpent"
;;
29533)
luksHash="sha512"
luksCipher="twofish"
;;
29541)
luksHash="ripemd160"
luksCipher="aes"
;;
29542)
luksHash="ripemd160"
luksCipher="serpent"
;;
29543)
luksHash="ripemd160"
luksCipher="twofish"
;;
esac
luksMainMask="?l"
luksMask="${luksMainMask}"
# for combination or hybrid attacks
luksPassPartFile1="${OUTD}/${hashType}_dict1"
luksPassPartFile2="${OUTD}/${hashType}_dict2"
luksContainer="${TDIR}/luks_tests/hashcat_${luksHash}_${luksCipher}_${luksMode}_${luksKeySize}.luks"
luksHashFile="${OUTD}/luks_tests/hashcat_${luksHash}_${luksCipher}_${luksMode}_${luksKeySize}.hash"
case $attackType in
0)
CMD="./${BIN} ${OPTS} -a 0 -m ${hashType} '${luksHashFile}' '${TDIR}/luks_tests/pw'"
;;
1)
luksPassPart1Len=$((${#LUKS_PASSWORD} / 2))
luksPassPart2Start=$((luksPassPart1Len + 1))
echo "${LUKS_PASSWORD}" | cut -c-${luksPassPart1Len} > "${luksPassPartFile1}" 2>/dev/null
echo "${LUKS_PASSWORD}" | cut -c${luksPassPart2Start}- > "${luksPassPartFile2}" 2>/dev/null
CMD="./${BIN} ${OPTS} -a 6 -m ${hashType} '${luksHashFile}' ${luksPassPartFile1} ${luksPassPartFile2}"
;;
3)
luksMaskFixedLen=$((${#LUKS_PASSWORD} - 1))
luksMask="$(echo "${LUKS_PASSWORD}" | cut -c-${luksMaskFixedLen} 2>/dev/null)"
luksMask="${luksMask}${luksMainMask}"
CMD="./${BIN} ${OPTS} -a 3 -m ${hashType} '${luksHashFile}' ${luksMask}"
;;
6)
luksPassPart1Len=$((${#LUKS_PASSWORD} - 1))
echo "${LUKS_PASSWORD}" | cut -c-${luksPassPart1Len} > "${luksPassPartFile1}" 2>/dev/null
CMD="./${BIN} ${OPTS} -a 6 -m ${hashType} '${luksHashFile}' ${luksPassPartFile1} ${luksMask}"
;;
7)
echo "${LUKS_PASSWORD}" | cut -c2- > "${luksPassPartFile1}" 2>/dev/null
CMD="./${BIN} ${OPTS} -a 7 -m ${hashType} '${luksHashFile}' ${luksMask} ${luksPassPartFile1}"
;;
esac
eval \"${TDIR}/luks2hashcat.py\" \"${luksContainer}\" > "${luksHashFile}"
luksMode="${luksHash}-${luksCipher}-${luksMode}-${luksKeySize}"
if [ -n "${CMD}" ] && [ ${#CMD} -gt 5 ]; then
echo "> Testing hash type ${hashType} with attack mode ${attackType}, markov ${MARKOV}, single hash, Device-Type ${DEVICE_TYPE}, Kernel-Type ${KERNEL_TYPE}, Vector-Width ${VECTOR}, Luks-Mode ${luksMode}" >> "${OUTD}/logfull.txt" 2>> "${OUTD}/logfull.txt"
if [ -f "${luks_first_test_file}" ]; then
output=$(eval ${CMD} 2>&1)
ret=${?}
echo "${output}" >> "${OUTD}/logfull.txt"
else
ret=30
fi
e_ce=0
e_rs=0
e_to=0
e_nf=0
e_nm=0
cnt=0
status ${ret}
cnt=1
msg="OK"
if [ "${e_ce}" -ne 0 ]; then
msg="Compare Error"
elif [ "${e_rs}" -ne 0 ]; then
msg="Skip"
elif [ "${e_nf}" -ne 0 ] || [ "${e_nm}" -ne 0 ]; then
msg="Error"
elif [ "${e_to}" -ne 0 ]; then
msg="Warning"
fi
echo "[ ${OUTD} ] [ Type ${hash_type}, Attack ${attackType}, Mode single, Device-Type ${DEVICE_TYPE}, Kernel-Type ${KERNEL_TYPE}, Vector-Width ${VECTOR}, Luks-Mode ${luksMode} ] > $msg : ${e_nf}/${cnt} not found, ${e_nm}/${cnt} not matched, ${e_to}/${cnt} timeout, ${e_rs}/${cnt} skipped"
status ${ret}
fi
done
done
}
function luks_legacy_test()
{
hashType=$1
attackType=$2
# if -m all was set let us default to -a 3 only. You could specify the attack type directly, e.g. -m 0
# the problem with defaulting to all=0,1,3,6,7 is that it could take way too long
if [ "${attackType}" -eq 65535 ]; then
attackType=3
fi
#LUKS_HASHES="sha1 sha256 sha512 ripemd160 whirlpool"
LUKS_HASHES="sha1 sha256 sha512 ripemd160"
LUKS_CIPHERS="aes serpent twofish"
LUKS_MODES="cbc-essiv cbc-plain64 xts-plain64"
LUKS_CIPHER_MODES="cbc-essiv cbc-plain64 xts-plain64"
LUKS_KEYSIZES="128 256 512"
LUKS_PASSWORD=$(cat "${TDIR}/luks_tests/pw" 2>/dev/null)
for luks_h in ${LUKS_HASHES}; do
for luks_c in ${LUKS_CIPHERS}; do
for luks_m in ${LUKS_MODES}; do
for luks_m in ${LUKS_CIPHER_MODES}; do
for luks_k in ${LUKS_KEYSIZES}; do
CMD=""
@ -3646,7 +3852,7 @@ if [ "${PACKAGE}" -eq 0 ] || [ -z "${PACKAGE_FOLDER}" ]; then
# generate random test entry
if [ "${HT}" -eq 65535 ]; then
for TMP_HT in ${HASH_TYPES}; do
if [ "${TMP_HT}" -ne ${LUKS_MODE} ]; then
if ! is_in_array "${TMP_HT}" ${LUKS_MODES}; then
if ! is_in_array "${TMP_HT}" ${TC_MODES}; then
if ! is_in_array "${TMP_HT}" ${VC_MODES}; then
if ! is_in_array "${TMP_HT}" ${CL_MODES}; then
@ -3662,7 +3868,7 @@ if [ "${PACKAGE}" -eq 0 ] || [ -z "${PACKAGE_FOLDER}" ]; then
continue
fi
if [ "${TMP_HT}" -ne ${LUKS_MODE} ]; then
if ! is_in_array "${TMP_HT}" ${LUKS_MODES}; then
# Exclude TrueCrypt and VeraCrypt testing modes
if ! is_in_array "${TMP_HT}" ${TC_MODES}; then
if ! is_in_array "${TMP_HT}" ${VC_MODES}; then
@ -3812,9 +4018,15 @@ if [ "${PACKAGE}" -eq 0 ] || [ -z "${PACKAGE_FOLDER}" ]; then
truecrypt_test "${hash_type}" 0
truecrypt_test "${hash_type}" 1
truecrypt_test "${hash_type}" 2
elif [ "${hash_type}" -eq ${LUKS_MODE} ]; then
elif is_in_array "${hash_type}" ${LUKS_MODES}; then
# run luks tests
luks_test "${hash_type}" ${ATTACK}
if [ ${hash_type} -eq 14600 ]; then
# for legacy mode
luks_legacy_test "${hash_type}" ${ATTACK}
else
# for new modes
luks_test "${hash_type}" ${ATTACK}
fi
else
# run attack mode 0 (stdin)
if [ ${ATTACK} -eq 65535 ] || [ ${ATTACK} -eq 0 ]; then attack_0; fi
@ -3884,7 +4096,7 @@ if [ "${PACKAGE}" -eq 1 ]; then
copy_cl_dir=1
else
for TMP_HT in $(seq "${HT_MIN}" "${HT_MAX}"); do
if [ "${TMP_HT}" -eq "${LUKS_MODE}" ]; then
if is_in_array "${TMP_HT}" "${LUKS_MODES}"; then
copy_luks_dir=1
elif is_in_array "${TMP_HT}" ${TC_MODES}; then
copy_tc_dir=1

Loading…
Cancel
Save