Browse Source

New LUKS v1 modules

pull/3321/head
Konrad Goławski 2 months ago
parent
commit
ddb2d6e61a
  1. 144
      OpenCL/inc_luks_aes.cl
  2. 144
      OpenCL/inc_luks_serpent.cl
  3. 144
      OpenCL/inc_luks_twofish.cl
  4. 19
      OpenCL/m14611-pure.cl
  5. 19
      OpenCL/m14612-pure.cl
  6. 19
      OpenCL/m14613-pure.cl
  7. 19
      OpenCL/m14621-pure.cl
  8. 19
      OpenCL/m14622-pure.cl
  9. 19
      OpenCL/m14623-pure.cl
  10. 19
      OpenCL/m14631-pure.cl
  11. 19
      OpenCL/m14632-pure.cl
  12. 19
      OpenCL/m14633-pure.cl
  13. 19
      OpenCL/m14641-pure.cl
  14. 19
      OpenCL/m14642-pure.cl
  15. 19
      OpenCL/m14643-pure.cl
  16. 2
      docs/changes.txt
  17. 23
      src/modules/module_14600.c
  18. 491
      src/modules/module_29511.c
  19. 471
      src/modules/module_29512.c
  20. 471
      src/modules/module_29513.c
  21. 471
      src/modules/module_29521.c
  22. 471
      src/modules/module_29522.c
  23. 471
      src/modules/module_29523.c
  24. 471
      src/modules/module_29531.c
  25. 471
      src/modules/module_29532.c
  26. 471
      src/modules/module_29533.c
  27. 471
      src/modules/module_29541.c
  28. 471
      src/modules/module_29542.c
  29. 471
      src/modules/module_29543.c
  30. 328
      tools/luks2hashcat.py
  31. 234
      tools/test.sh

144
OpenCL/inc_luks_aes.cl

@ -2571,7 +2571,7 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2597,10 +2597,10 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv128_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv128_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2630,13 +2630,13 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2656,10 +2656,10 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain128_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain128_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2683,13 +2683,13 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2718,10 +2718,10 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -2758,16 +2758,16 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain512_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain512_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2824,7 +2824,7 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
luks_decrypt_sector_aes_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2869,7 +2869,7 @@ DECLSPEC void luks_af_sha1_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs,
luks_decrypt_sector_aes_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2962,7 +2962,7 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2988,10 +2988,10 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv128_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv128_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3021,13 +3021,13 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3047,10 +3047,10 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain128_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain128_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3074,13 +3074,13 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3109,10 +3109,10 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3149,16 +3149,16 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain512_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain512_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3207,7 +3207,7 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
luks_decrypt_sector_aes_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3244,7 +3244,7 @@ DECLSPEC void luks_af_sha256_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
luks_decrypt_sector_aes_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3340,7 +3340,7 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3366,10 +3366,10 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv128_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv128_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3399,13 +3399,13 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3425,10 +3425,10 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain128_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain128_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3452,13 +3452,13 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3487,10 +3487,10 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3527,16 +3527,16 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain512_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain512_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3585,7 +3585,7 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
luks_decrypt_sector_aes_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3622,7 +3622,7 @@ DECLSPEC void luks_af_sha512_then_aes_decrypt (GLOBAL_AS const luks_t *luks_bufs
luks_decrypt_sector_aes_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3699,7 +3699,7 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3725,10 +3725,10 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv128_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3758,13 +3758,13 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_essiv256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3784,10 +3784,10 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain128_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3811,13 +3811,13 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_cbc_plain256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3846,10 +3846,10 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3886,16 +3886,16 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_aes_xts_plain512_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
luks_decrypt_sector_aes_xts_plain512_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
luks_decrypt_sector_aes_xts_plain512_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3944,7 +3944,7 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
luks_decrypt_sector_aes_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3981,7 +3981,7 @@ DECLSPEC void luks_af_ripemd160_then_aes_decrypt (GLOBAL_AS const luks_t *luks_b
luks_decrypt_sector_aes_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{

144
OpenCL/inc_luks_serpent.cl

@ -2567,7 +2567,7 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2593,10 +2593,10 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2626,13 +2626,13 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2652,10 +2652,10 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain128_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain128_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2679,13 +2679,13 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2714,10 +2714,10 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -2754,16 +2754,16 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain512_mk_sha1 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_sha1 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain512_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_sha1_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2812,7 +2812,7 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
luks_decrypt_sector_serpent_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2849,7 +2849,7 @@ DECLSPEC void luks_af_sha1_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_bu
luks_decrypt_sector_serpent_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2926,7 +2926,7 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2952,10 +2952,10 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2985,13 +2985,13 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3011,10 +3011,10 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain128_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain128_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3038,13 +3038,13 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3073,10 +3073,10 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3113,16 +3113,16 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain512_mk_sha256 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_sha256 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain512_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_sha256_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3171,7 +3171,7 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
luks_decrypt_sector_serpent_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3208,7 +3208,7 @@ DECLSPEC void luks_af_sha256_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
luks_decrypt_sector_serpent_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3304,7 +3304,7 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3330,10 +3330,10 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3363,13 +3363,13 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3389,10 +3389,10 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain128_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain128_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3416,13 +3416,13 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3451,10 +3451,10 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain256_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain256_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3491,16 +3491,16 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain512_mk_sha512 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_sha512 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain512_mk_sha512_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_sha512_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3549,7 +3549,7 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
luks_decrypt_sector_serpent_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3586,7 +3586,7 @@ DECLSPEC void luks_af_sha512_then_serpent_decrypt (GLOBAL_AS const luks_t *luks_
luks_decrypt_sector_serpent_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3663,7 +3663,7 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3689,10 +3689,10 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv128_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3722,13 +3722,13 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_cbc_essiv256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3748,10 +3748,10 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain128_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3775,13 +3775,13 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, sector);
}
luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, sector);
luks_decrypt_sector_serpent_cbc_plain256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -3810,10 +3810,10 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain256_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -3850,16 +3850,16 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160 (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160 (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160_final (luks_bufs->af_src_buf + offset, mk, ks1, ks2, sector);
luks_decrypt_sector_serpent_xts_plain512_mk_ripemd160_final (luks_bufs->af_buf + offset, mk, ks1, ks2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3908,7 +3908,7 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
luks_decrypt_sector_serpent_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, ks1, ks2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3945,7 +3945,7 @@ DECLSPEC void luks_af_ripemd160_then_serpent_decrypt (GLOBAL_AS const luks_t *lu
luks_decrypt_sector_serpent_cbc_plain256 (luks_bufs->ct_buf, pt_buf, ks1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{

144
OpenCL/inc_luks_twofish.cl

@ -2567,7 +2567,7 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2593,10 +2593,10 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha1_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2626,13 +2626,13 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha1_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2652,10 +2652,10 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_plain128_mk_sha1 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain128_mk_sha1 (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
luks_decrypt_sector_twofish_cbc_plain128_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain128_mk_sha1_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2679,13 +2679,13 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_plain256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain256_mk_sha1 (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
luks_decrypt_sector_twofish_cbc_plain256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain256_mk_sha1_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2714,10 +2714,10 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_xts_plain256_mk_sha1 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain256_mk_sha1 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_xts_plain256_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain256_mk_sha1_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_512)
{
@ -2754,16 +2754,16 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_xts_plain512_mk_sha1 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain512_mk_sha1 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_xts_plain512_mk_sha1_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_xts_plain512_mk_sha1_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
}
// decrypt payload data
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2812,7 +2812,7 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
luks_decrypt_sector_twofish_cbc_essiv256 (luks_bufs->ct_buf, pt_buf, sk1, lk1, sk2, lk2, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2849,7 +2849,7 @@ DECLSPEC void luks_af_sha1_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_bu
luks_decrypt_sector_twofish_cbc_plain256 (luks_bufs->ct_buf, pt_buf, sk1, lk1, 0);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_XTS_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2926,7 +2926,7 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
u32 mk[16] = { 0 };
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV)
if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_ESSIV_SHA256)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -2952,10 +2952,10 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv128_mk_sha256_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
{
@ -2985,13 +2985,13 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256 (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
luks_decrypt_sector_twofish_cbc_essiv256_mk_sha256_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sk2, lk2, sector);
}
}
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN)
else if (cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN || cipher_mode == HC_LUKS_CIPHER_MODE_CBC_PLAIN64)
{
if (key_size == HC_LUKS_KEY_SIZE_128)
{
@ -3011,10 +3011,10 @@ DECLSPEC void luks_af_sha256_then_twofish_decrypt (GLOBAL_AS const luks_t *luks_
for (sector = 0; sector < SECTOR_PER_AF - 1; sector++, offset += OFFSET_PER_SECTOR)
{
luks_decrypt_sector_twofish_cbc_plain128_mk_sha256 (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain128_mk_sha256 (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
luks_decrypt_sector_twofish_cbc_plain128_mk_sha256_final (luks_bufs->af_src_buf + offset, mk, sk1, lk1, sector);
luks_decrypt_sector_twofish_cbc_plain128_mk_sha256_final (luks_bufs->af_buf + offset, mk, sk1, lk1, sector);
}
else if (key_size == HC_LUKS_KEY_SIZE_256)
<