Check hashes_shown[] whenever calling mark_hash directly.

Not really needed right now (because those algorithms to have a single digest per salt), but this can change in the future
pull/1284/head
jsteube 7 years ago
parent 4b2d9f0f29
commit cf57365e7c

@ -690,16 +690,25 @@ __kernel void m06211_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes (esalt_bufs, ukey1, ukey2, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -698,17 +698,26 @@ __kernel void m06212_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes (esalt_bufs, ukey1, ukey2, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
#if defined (IS_APPLE) && defined (IS_GPU)
@ -743,16 +752,25 @@ __kernel void m06212_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes_twofish (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent_aes (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish_serpent (esalt_bufs, ukey1, ukey2, ukey3, ukey4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -698,17 +698,26 @@ __kernel void m06213_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes (esalt_bufs, ukey1, ukey2, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
#if defined (IS_APPLE) && defined (IS_GPU)
@ -743,17 +752,26 @@ __kernel void m06213_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes_twofish (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent_aes (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish_serpent (esalt_bufs, ukey1, ukey2, ukey3, ukey4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
#if defined (IS_APPLE) && defined (IS_GPU)
@ -788,11 +806,17 @@ __kernel void m06213_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes_twofish_serpent (esalt_bufs, ukey1, ukey2, ukey3, ukey4, ukey5, ukey6, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent_twofish_aes (esalt_bufs, ukey1, ukey2, ukey3, ukey4, ukey5, ukey6, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -598,16 +598,25 @@ __kernel void m06221_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes (esalt_bufs, ukey1, ukey2, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -606,17 +606,26 @@ __kernel void m06222_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes (esalt_bufs, ukey1, ukey2, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
#if defined (IS_APPLE) && defined (IS_GPU)
@ -651,16 +660,25 @@ __kernel void m06222_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes_twofish (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent_aes (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish_serpent (esalt_bufs, ukey1, ukey2, ukey3, ukey4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -655,17 +655,26 @@ __kernel void m06223_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes (esalt_bufs, ukey1, ukey2, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
#if defined (IS_APPLE) && defined (IS_GPU)
@ -700,17 +709,26 @@ __kernel void m06223_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes_twofish (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent_aes (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish_serpent (esalt_bufs, ukey1, ukey2, ukey3, ukey4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
#if defined (IS_APPLE) && defined (IS_GPU)
@ -745,11 +763,17 @@ __kernel void m06223_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes_twofish_serpent (esalt_bufs, ukey1, ukey2, ukey3, ukey4, ukey5, ukey6, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent_twofish_aes (esalt_bufs, ukey1, ukey2, ukey3, ukey4, ukey5, ukey6, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -2233,16 +2233,25 @@ __kernel void m06231_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes (esalt_bufs, ukey1, ukey2, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -2010,17 +2010,26 @@ __kernel void m06232_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes (esalt_bufs, ukey1, ukey2, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
#if defined (IS_APPLE) && defined (IS_GPU)
@ -2055,16 +2064,25 @@ __kernel void m06232_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes_twofish (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent_aes (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish_serpent (esalt_bufs, ukey1, ukey2, ukey3, ukey4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -2010,17 +2010,26 @@ __kernel void m06233_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes (esalt_bufs, ukey1, ukey2, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
#if defined (IS_APPLE) && defined (IS_GPU)
@ -2055,17 +2064,26 @@ __kernel void m06233_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes_twofish (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent_aes (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish_serpent (esalt_bufs, ukey1, ukey2, ukey3, ukey4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
#if defined (IS_APPLE) && defined (IS_GPU)
@ -2100,11 +2118,17 @@ __kernel void m06233_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes_twofish_serpent (esalt_bufs, ukey1, ukey2, ukey3, ukey4, ukey5, ukey6, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent_twofish_aes (esalt_bufs, ukey1, ukey2, ukey3, ukey4, ukey5, ukey6, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[0]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -1563,7 +1563,10 @@ __kernel void m06800_comp (__global pw_t *pws, __global const kernel_rule_t *rul
&& (out[2] == salt_buf[2])
&& (out[3] == salt_buf[3]))
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
}
}
/**

@ -632,7 +632,10 @@ __kernel void m07500_m04 (__global pw_t *pws, __global const kernel_rule_t *rule
if (decrypt_and_check (&rc4_keys[lid], tmp, timestamp_ct) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}
@ -733,7 +736,10 @@ __kernel void m07500_s04 (__global pw_t *pws, __global const kernel_rule_t *rule
if (decrypt_and_check (&rc4_keys[lid], tmp, timestamp_ct) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}

@ -680,7 +680,10 @@ __kernel void m07500_m04 (__global pw_t *pws, __global const kernel_rule_t *rule
if (decrypt_and_check (&rc4_keys[lid], tmp, timestamp_ct) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}
@ -831,7 +834,10 @@ __kernel void m07500_s04 (__global pw_t *pws, __global const kernel_rule_t *rule
if (decrypt_and_check (&rc4_keys[lid], tmp, timestamp_ct) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}

@ -624,7 +624,10 @@ void m07500 (__local RC4_KEY *rc4_keys, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[
if (decrypt_and_check (&rc4_keys[lid], tmp, timestamp_ct) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}

@ -1883,7 +1883,10 @@ __kernel void m08800_comp (__global pw_t *pws, __global const kernel_rule_t *rul
// MSDOS5.0
if ((r0 == 0x4f44534d) && (r1 == 0x302e3553))
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
}
}
}
@ -1949,7 +1952,10 @@ __kernel void m08800_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if ((r[5] < 2) && (r[6] < 16) && ((r[14] & 0xffff) == 0xEF53))
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
}
}
}
}

@ -1339,6 +1339,9 @@ __kernel void m11300_comp (__global pw_t *pws, __global const kernel_rule_t *rul
&& (out[2] == 0x10101010)
&& (out[3] == 0x10101010))
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
}
}
}

@ -851,8 +851,11 @@ __kernel void m11600_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (seven_zip_hook[gid].hook_success == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
}
return;
}
}
}

@ -721,58 +721,6 @@ void kerb_prepare (const u32 w0[4], const u32 w1[4], const u32 pw_len, const u32
hmac_md5_run (w0_t, w1_t, w2_t, w3_t, ipad, opad, digest);
}
void m13100 (__local RC4_KEY *rc4_keys, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 pw_len, __global pw_t *pws, __global const kernel_rule_t *rules_buf, __global const comb_t *combs_buf, __global const bf_t *bfs_buf, __global void *tmps, __global void *hooks, __global const u32 *bitmaps_buf_s1_a, __global const u32 *bitmaps_buf_s1_b, __global const u32 *bitmaps_buf_s1_c, __global const u32 *bitmaps_buf_s1_d, __global const u32 *bitmaps_buf_s2_a, __global const u32 *bitmaps_buf_s2_b, __global const u32 *bitmaps_buf_s2_c, __global const u32 *bitmaps_buf_s2_d, __global plain_t *plains_buf, __global const digest_t *digests_buf, __global u32 *hashes_shown, __global const salt_t *salt_bufs, __global krb5tgs_t *krb5tgs_bufs, __global u32 *d_return_buf, __global u32 *d_scryptV0_buf, __global u32 *d_scryptV1_buf, __global u32 *d_scryptV2_buf, __global u32 *d_scryptV3_buf, const u32 bitmap_mask, const u32 bitmap_shift1, const u32 bitmap_shift2, const u32 salt_pos, const u32 loop_pos, const u32 loop_cnt, const u32 il_cnt, const u32 digests_cnt, const u32 digests_offset)
{
/**
* modifier
*/
const u32 gid = get_global_id (0);
const u32 lid = get_local_id (0);
/**
* salt
*/
u32 checksum[4];
checksum[0] = krb5tgs_bufs[digests_offset].checksum[0];
checksum[1] = krb5tgs_bufs[digests_offset].checksum[1];
checksum[2] = krb5tgs_bufs[digests_offset].checksum[2];
checksum[3] = krb5tgs_bufs[digests_offset].checksum[3];
/**
* loop
*/
u32 w0l = w0[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
const u32 w0r = bfs_buf[il_pos].i;
w0[0] = w0l | w0r;
u32 digest[4];
u32 K2[4];
kerb_prepare (w0, w1, pw_len, checksum, digest, K2);
u32 tmp[4];
tmp[0] = digest[0];
tmp[1] = digest[1];
tmp[2] = digest[2];
tmp[3] = digest[3];
if (decrypt_and_check (&rc4_keys[lid], tmp, krb5tgs_bufs[digests_offset].edata2, krb5tgs_bufs[digests_offset].edata2_len, K2, checksum) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
__kernel void m13100_m04 (__global pw_t *pws, __global const kernel_rule_t *rules_buf, __global const comb_t *combs_buf, __global const bf_t *bfs_buf, __global void *tmps, __global void *hooks, __global const u32 *bitmaps_buf_s1_a, __global const u32 *bitmaps_buf_s1_b, __global const u32 *bitmaps_buf_s1_c, __global const u32 *bitmaps_buf_s1_d, __global const u32 *bitmaps_buf_s2_a, __global const u32 *bitmaps_buf_s2_b, __global const u32 *bitmaps_buf_s2_c, __global const u32 *bitmaps_buf_s2_d, __global plain_t *plains_buf, __global const digest_t *digests_buf, __global u32 *hashes_shown, __global const salt_t *salt_bufs, __global krb5tgs_t *krb5tgs_bufs, __global u32 *d_return_buf, __global u32 *d_scryptV0_buf, __global u32 *d_scryptV1_buf, __global u32 *d_scryptV2_buf, __global u32 *d_scryptV3_buf, const u32 bitmap_mask, const u32 bitmap_shift1, const u32 bitmap_shift2, const u32 salt_pos, const u32 loop_pos, const u32 loop_cnt, const u32 il_cnt, const u32 digests_cnt, const u32 digests_offset, const u32 combs_mode, const u32 gid_max)
{
/**
@ -853,7 +801,10 @@ __kernel void m13100_m04 (__global pw_t *pws, __global const kernel_rule_t *rule
if (decrypt_and_check (&rc4_keys[lid], tmp, krb5tgs_bufs[digests_offset].edata2, krb5tgs_bufs[digests_offset].edata2_len, K2, checksum) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}
@ -946,7 +897,10 @@ __kernel void m13100_s04 (__global pw_t *pws, __global const kernel_rule_t *rule
if (decrypt_and_check (&rc4_keys[lid], tmp, krb5tgs_bufs[digests_offset].edata2, krb5tgs_bufs[digests_offset].edata2_len, K2, checksum) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}

@ -848,7 +848,10 @@ __kernel void m13100_m04 (__global pw_t *pws, __global const kernel_rule_t *rule
if (decrypt_and_check (&rc4_keys[lid], tmp, krb5tgs_bufs[digests_offset].edata2, krb5tgs_bufs[digests_offset].edata2_len, K2, checksum) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}
@ -990,7 +993,10 @@ __kernel void m13100_s04 (__global pw_t *pws, __global const kernel_rule_t *rule
if (decrypt_and_check (&rc4_keys[lid], tmp, krb5tgs_bufs[digests_offset].edata2, krb5tgs_bufs[digests_offset].edata2_len, K2, checksum) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}

@ -770,7 +770,10 @@ void m13100 (__local RC4_KEY *rc4_keys, u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[
if (decrypt_and_check (&rc4_keys[lid], tmp, krb5tgs_bufs[digests_offset].edata2, krb5tgs_bufs[digests_offset].edata2_len, K2, checksum) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}

@ -1206,8 +1206,11 @@ __kernel void m13200_comp (__global pw_t *pws, __global const kernel_rule_t *rul
#define il_pos 0
if(tmps[gid].cipher[0]==0xA6A6A6A6 && tmps[gid].cipher[1]==0xA6A6A6A6)
if (tmps[gid].cipher[0] == 0xA6A6A6A6 && tmps[gid].cipher[1] == 0xA6A6A6A6)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}

@ -991,9 +991,12 @@ __kernel void m13400_comp (__global pw_t *pws, __global const kernel_rule_t *rul
&& esalt_bufs[digests_offset].contents_hash[5] == final_digest[5]
&& esalt_bufs[digests_offset].contents_hash[6] == final_digest[6]
&& esalt_bufs[digests_offset].contents_hash[7] == final_digest[7])
{
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
else
{
@ -1195,9 +1198,12 @@ __kernel void m13400_comp (__global pw_t *pws, __global const kernel_rule_t *rul
&& esalt_bufs[digests_offset].contents_hash[5] == final_digest[5]
&& esalt_bufs[digests_offset].contents_hash[6] == final_digest[6]
&& esalt_bufs[digests_offset].contents_hash[7] == final_digest[7])
{
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}
else
@ -1233,8 +1239,11 @@ __kernel void m13400_comp (__global pw_t *pws, __global const kernel_rule_t *rul
&& esalt_bufs[digests_offset].expected_bytes[1] == out[1]
&& esalt_bufs[digests_offset].expected_bytes[2] == out[2]
&& esalt_bufs[digests_offset].expected_bytes[3] == out[3])
{
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}

@ -661,16 +661,25 @@ __kernel void m13751_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes (esalt_bufs, ukey1, ukey2, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -661,17 +661,26 @@ __kernel void m13752_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes (esalt_bufs, ukey1, ukey2, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
u32 ukey3[8];
@ -698,16 +707,25 @@ __kernel void m13752_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes_twofish (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent_aes (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish_serpent (esalt_bufs, ukey1, ukey2, ukey3, ukey4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -661,17 +661,26 @@ __kernel void m13753_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes (esalt_bufs, ukey1, ukey2, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish (esalt_bufs, ukey1, ukey2) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
u32 ukey3[8];
@ -698,17 +707,26 @@ __kernel void m13753_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes_twofish (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent_aes (esalt_bufs, ukey1, ukey2, ukey3, ukey4, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_twofish_serpent (esalt_bufs, ukey1, ukey2, ukey3, ukey4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
u32 ukey5[8];
@ -735,11 +753,17 @@ __kernel void m13753_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (verify_header_aes_twofish_serpent (esalt_bufs, ukey1, ukey2, ukey3, ukey4, ukey5, ukey6, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
if (verify_header_serpent_twofish_aes (esalt_bufs, ukey1, ukey2, ukey3, ukey4, ukey5, ukey6, s_te0, s_te1, s_te2, s_te3, s_te4, s_td0, s_td1, s_td2, s_td3, s_td4) == 1)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -699,6 +699,9 @@ __kernel void m14611_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (entropy < MAX_ENTROPY)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -646,6 +646,9 @@ __kernel void m14612_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (entropy < MAX_ENTROPY)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -646,6 +646,9 @@ __kernel void m14613_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (entropy < MAX_ENTROPY)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -690,6 +690,9 @@ __kernel void m14621_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (entropy < MAX_ENTROPY)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -637,6 +637,9 @@ __kernel void m14622_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (entropy < MAX_ENTROPY)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -637,6 +637,9 @@ __kernel void m14623_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (entropy < MAX_ENTROPY)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -666,6 +666,9 @@ __kernel void m14631_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (entropy < MAX_ENTROPY)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -613,6 +613,9 @@ __kernel void m14632_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (entropy < MAX_ENTROPY)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -613,6 +613,9 @@ __kernel void m14633_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (entropy < MAX_ENTROPY)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -854,6 +854,9 @@ __kernel void m14641_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (entropy < MAX_ENTROPY)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -801,6 +801,9 @@ __kernel void m14642_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (entropy < MAX_ENTROPY)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -801,6 +801,9 @@ __kernel void m14643_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if (entropy < MAX_ENTROPY)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, 0, gid, 0);
}
}
}

@ -1740,7 +1740,10 @@ __kernel void m14700_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if ((cipher[0] == 0xa6a6a6a6) && (cipher[1] == 0xa6a6a6a6))
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
}
return;
}

@ -2297,7 +2297,10 @@ __kernel void m14800_comp (__global pw_t *pws, __global const kernel_rule_t *rul
if ((cipher[0] == 0xa6a6a6a6) && (cipher[1] == 0xa6a6a6a6))
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, 0);
}
return;
}

@ -2376,7 +2376,10 @@ __kernel void m15300_comp (__global pw_t *pws, __global const kernel_rule_t *rul
&& expectedHmac[3] == digest[3]
&& expectedHmac[4] == digest[4])
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
else if (esalt_bufs[digests_offset].version == 2)
@ -2628,7 +2631,10 @@ __kernel void m15300_comp (__global pw_t *pws, __global const kernel_rule_t *rul
&& expectedHmac[14] == h32_from_64_S (dgst64[7])
&& expectedHmac[15] == l32_from_64_S (dgst64[7]))
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
if (atomic_inc (&hashes_shown[digests_offset]) == 0)
{
mark_hash (plains_buf, d_return_buf, salt_pos, digests_cnt, 0, digests_offset + 0, gid, il_pos);
}
}
}
}

Loading…
Cancel
Save