diff --git a/OpenCL/m16600_a0-optimized.cl b/OpenCL/m16600_a0-optimized.cl index d5baafd3f..952a06c83 100644 --- a/OpenCL/m16600_a0-optimized.cl +++ b/OpenCL/m16600_a0-optimized.cl @@ -106,6 +106,26 @@ KERNEL_FQ void m16600_m04 (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) const u32 pw_len = pws[gid].pw_len & 63; + /** + * data + */ + + const u32 salt_type = esalt_bufs[digests_offset].salt_type; + + u32 encrypted[4]; + + encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; + encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; + encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; + encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; + + u32 iv[4]; + + iv[0] = esalt_bufs[digests_offset].iv[0]; + iv[1] = esalt_bufs[digests_offset].iv[1]; + iv[2] = esalt_bufs[digests_offset].iv[2]; + iv[3] = esalt_bufs[digests_offset].iv[3]; + /** * loop */ @@ -348,30 +368,16 @@ KERNEL_FQ void m16600_m04 (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) aes256_set_decrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3, s_td0, s_td1, s_td2, s_td3); - u32 encrypted[4]; - - encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; - encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; - encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; - encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; - u32 out[4]; aes256_decrypt (ks, encrypted, out, s_td0, s_td1, s_td2, s_td3, s_td4); - u32 iv[4]; - - iv[0] = esalt_bufs[digests_offset].iv[0]; - iv[1] = esalt_bufs[digests_offset].iv[1]; - iv[2] = esalt_bufs[digests_offset].iv[2]; - iv[3] = esalt_bufs[digests_offset].iv[3]; - out[0] ^= iv[0]; out[1] ^= iv[1]; out[2] ^= iv[2]; out[3] ^= iv[3]; - if (esalt_bufs[digests_offset].salt_type == 1) + if (salt_type == 1) { if (is_valid_hex_32 (out[0]) == 0) continue; if (is_valid_hex_32 (out[1]) == 0) continue; @@ -384,7 +390,7 @@ KERNEL_FQ void m16600_m04 (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 2) + if (salt_type == 2) { if ((u8) (out[0] >> 0) != 'x') continue; if ((u8) (out[0] >> 8) != 'p') continue; @@ -400,7 +406,7 @@ KERNEL_FQ void m16600_m04 (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 3) + if (salt_type == 3) { // check PKCS7 padding (either 13 times 0x0d or 12 times 0x0c at the end, we only check 12 bytes, it's enough): @@ -512,6 +518,26 @@ KERNEL_FQ void m16600_s04 (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) const u32 pw_len = pws[gid].pw_len & 63; + /** + * data + */ + + const u32 salt_type = esalt_bufs[digests_offset].salt_type; + + u32 encrypted[4]; + + encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; + encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; + encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; + encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; + + u32 iv[4]; + + iv[0] = esalt_bufs[digests_offset].iv[0]; + iv[1] = esalt_bufs[digests_offset].iv[1]; + iv[2] = esalt_bufs[digests_offset].iv[2]; + iv[3] = esalt_bufs[digests_offset].iv[3]; + /** * loop */ @@ -754,30 +780,16 @@ KERNEL_FQ void m16600_s04 (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) aes256_set_decrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3, s_td0, s_td1, s_td2, s_td3); - u32 encrypted[4]; - - encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; - encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; - encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; - encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; - u32 out[4]; aes256_decrypt (ks, encrypted, out, s_td0, s_td1, s_td2, s_td3, s_td4); - u32 iv[4]; - - iv[0] = esalt_bufs[digests_offset].iv[0]; - iv[1] = esalt_bufs[digests_offset].iv[1]; - iv[2] = esalt_bufs[digests_offset].iv[2]; - iv[3] = esalt_bufs[digests_offset].iv[3]; - out[0] ^= iv[0]; out[1] ^= iv[1]; out[2] ^= iv[2]; out[3] ^= iv[3]; - if (esalt_bufs[digests_offset].salt_type == 1) + if (salt_type == 1) { if (is_valid_hex_32 (out[0]) == 0) continue; if (is_valid_hex_32 (out[1]) == 0) continue; @@ -790,7 +802,7 @@ KERNEL_FQ void m16600_s04 (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 2) + if (salt_type == 2) { if ((u8) (out[0] >> 0) != 'x') continue; if ((u8) (out[0] >> 8) != 'p') continue; @@ -806,7 +818,7 @@ KERNEL_FQ void m16600_s04 (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 3) + if (salt_type == 3) { // check PKCS7 padding (either 13 times 0x0d or 12 times 0x0c at the end, we only check 12 bytes, it's enough): diff --git a/OpenCL/m16600_a0-pure.cl b/OpenCL/m16600_a0-pure.cl index f5e29d805..551e751d2 100644 --- a/OpenCL/m16600_a0-pure.cl +++ b/OpenCL/m16600_a0-pure.cl @@ -94,6 +94,26 @@ KERNEL_FQ void m16600_mxx (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) COPY_PW (pws[gid]); + /** + * data + */ + + const u32 salt_type = esalt_bufs[digests_offset].salt_type; + + u32 encrypted[4]; + + encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; + encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; + encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; + encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; + + u32 iv[4]; + + iv[0] = esalt_bufs[digests_offset].iv[0]; + iv[1] = esalt_bufs[digests_offset].iv[1]; + iv[2] = esalt_bufs[digests_offset].iv[2]; + iv[3] = esalt_bufs[digests_offset].iv[3]; + /** * loop */ @@ -162,30 +182,16 @@ KERNEL_FQ void m16600_mxx (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) aes256_set_decrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3, s_td0, s_td1, s_td2, s_td3); - u32 encrypted[4]; - - encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; - encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; - encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; - encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; - u32 out[4]; aes256_decrypt (ks, encrypted, out, s_td0, s_td1, s_td2, s_td3, s_td4); - u32 iv[4]; - - iv[0] = esalt_bufs[digests_offset].iv[0]; - iv[1] = esalt_bufs[digests_offset].iv[1]; - iv[2] = esalt_bufs[digests_offset].iv[2]; - iv[3] = esalt_bufs[digests_offset].iv[3]; - out[0] ^= iv[0]; out[1] ^= iv[1]; out[2] ^= iv[2]; out[3] ^= iv[3]; - if (esalt_bufs[digests_offset].salt_type == 1) + if (salt_type == 1) { if (is_valid_hex_32 (out[0]) == 0) continue; if (is_valid_hex_32 (out[1]) == 0) continue; @@ -198,7 +204,7 @@ KERNEL_FQ void m16600_mxx (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 2) + if (salt_type == 2) { if ((u8) (out[0] >> 0) != 'x') continue; if ((u8) (out[0] >> 8) != 'p') continue; @@ -214,7 +220,7 @@ KERNEL_FQ void m16600_mxx (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 3) + if (salt_type == 3) { // check PKCS7 padding (either 13 times 0x0d or 12 times 0x0c at the end, we only check 12 bytes, it's enough): @@ -306,6 +312,26 @@ KERNEL_FQ void m16600_sxx (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) COPY_PW (pws[gid]); + /** + * data + */ + + const u32 salt_type = esalt_bufs[digests_offset].salt_type; + + u32 encrypted[4]; + + encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; + encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; + encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; + encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; + + u32 iv[4]; + + iv[0] = esalt_bufs[digests_offset].iv[0]; + iv[1] = esalt_bufs[digests_offset].iv[1]; + iv[2] = esalt_bufs[digests_offset].iv[2]; + iv[3] = esalt_bufs[digests_offset].iv[3]; + /** * loop */ @@ -374,30 +400,16 @@ KERNEL_FQ void m16600_sxx (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) aes256_set_decrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3, s_td0, s_td1, s_td2, s_td3); - u32 encrypted[4]; - - encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; - encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; - encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; - encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; - u32 out[4]; aes256_decrypt (ks, encrypted, out, s_td0, s_td1, s_td2, s_td3, s_td4); - u32 iv[4]; - - iv[0] = esalt_bufs[digests_offset].iv[0]; - iv[1] = esalt_bufs[digests_offset].iv[1]; - iv[2] = esalt_bufs[digests_offset].iv[2]; - iv[3] = esalt_bufs[digests_offset].iv[3]; - out[0] ^= iv[0]; out[1] ^= iv[1]; out[2] ^= iv[2]; out[3] ^= iv[3]; - if (esalt_bufs[digests_offset].salt_type == 1) + if (salt_type == 1) { if (is_valid_hex_32 (out[0]) == 0) continue; if (is_valid_hex_32 (out[1]) == 0) continue; @@ -410,7 +422,7 @@ KERNEL_FQ void m16600_sxx (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 2) + if (salt_type == 2) { if ((u8) (out[0] >> 0) != 'x') continue; if ((u8) (out[0] >> 8) != 'p') continue; @@ -426,7 +438,7 @@ KERNEL_FQ void m16600_sxx (KERN_ATTR_RULES_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 3) + if (salt_type == 3) { // check PKCS7 padding (either 13 times 0x0d or 12 times 0x0c at the end, we only check 12 bytes, it's enough): diff --git a/OpenCL/m16600_a1-optimized.cl b/OpenCL/m16600_a1-optimized.cl index 7896a77e5..72aba70f5 100644 --- a/OpenCL/m16600_a1-optimized.cl +++ b/OpenCL/m16600_a1-optimized.cl @@ -104,6 +104,26 @@ KERNEL_FQ void m16600_m04 (KERN_ATTR_ESALT (electrum_wallet_t)) const u32 pw_l_len = pws[gid].pw_len & 63; + /** + * data + */ + + const u32 salt_type = esalt_bufs[digests_offset].salt_type; + + u32 encrypted[4]; + + encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; + encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; + encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; + encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; + + u32 iv[4]; + + iv[0] = esalt_bufs[digests_offset].iv[0]; + iv[1] = esalt_bufs[digests_offset].iv[1]; + iv[2] = esalt_bufs[digests_offset].iv[2]; + iv[3] = esalt_bufs[digests_offset].iv[3]; + /** * loop */ @@ -404,30 +424,16 @@ KERNEL_FQ void m16600_m04 (KERN_ATTR_ESALT (electrum_wallet_t)) aes256_set_decrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3, s_td0, s_td1, s_td2, s_td3); - u32 encrypted[4]; - - encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; - encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; - encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; - encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; - u32 out[4]; aes256_decrypt (ks, encrypted, out, s_td0, s_td1, s_td2, s_td3, s_td4); - u32 iv[4]; - - iv[0] = esalt_bufs[digests_offset].iv[0]; - iv[1] = esalt_bufs[digests_offset].iv[1]; - iv[2] = esalt_bufs[digests_offset].iv[2]; - iv[3] = esalt_bufs[digests_offset].iv[3]; - out[0] ^= iv[0]; out[1] ^= iv[1]; out[2] ^= iv[2]; out[3] ^= iv[3]; - if (esalt_bufs[digests_offset].salt_type == 1) + if (salt_type == 1) { if (is_valid_hex_32 (out[0]) == 0) continue; if (is_valid_hex_32 (out[1]) == 0) continue; @@ -440,7 +446,7 @@ KERNEL_FQ void m16600_m04 (KERN_ATTR_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 2) + if (salt_type == 2) { if ((u8) (out[0] >> 0) != 'x') continue; if ((u8) (out[0] >> 8) != 'p') continue; @@ -456,7 +462,7 @@ KERNEL_FQ void m16600_m04 (KERN_ATTR_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 3) + if (salt_type == 3) { // check PKCS7 padding (either 13 times 0x0d or 12 times 0x0c at the end, we only check 12 bytes, it's enough): @@ -568,6 +574,26 @@ KERNEL_FQ void m16600_s04 (KERN_ATTR_ESALT (electrum_wallet_t)) const u32 pw_l_len = pws[gid].pw_len & 63; + /** + * data + */ + + const u32 salt_type = esalt_bufs[digests_offset].salt_type; + + u32 encrypted[4]; + + encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; + encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; + encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; + encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; + + u32 iv[4]; + + iv[0] = esalt_bufs[digests_offset].iv[0]; + iv[1] = esalt_bufs[digests_offset].iv[1]; + iv[2] = esalt_bufs[digests_offset].iv[2]; + iv[3] = esalt_bufs[digests_offset].iv[3]; + /** * loop */ @@ -868,30 +894,16 @@ KERNEL_FQ void m16600_s04 (KERN_ATTR_ESALT (electrum_wallet_t)) aes256_set_decrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3, s_td0, s_td1, s_td2, s_td3); - u32 encrypted[4]; - - encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; - encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; - encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; - encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; - u32 out[4]; aes256_decrypt (ks, encrypted, out, s_td0, s_td1, s_td2, s_td3, s_td4); - u32 iv[4]; - - iv[0] = esalt_bufs[digests_offset].iv[0]; - iv[1] = esalt_bufs[digests_offset].iv[1]; - iv[2] = esalt_bufs[digests_offset].iv[2]; - iv[3] = esalt_bufs[digests_offset].iv[3]; - out[0] ^= iv[0]; out[1] ^= iv[1]; out[2] ^= iv[2]; out[3] ^= iv[3]; - if (esalt_bufs[digests_offset].salt_type == 1) + if (salt_type == 1) { if (is_valid_hex_32 (out[0]) == 0) continue; if (is_valid_hex_32 (out[1]) == 0) continue; @@ -904,7 +916,7 @@ KERNEL_FQ void m16600_s04 (KERN_ATTR_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 2) + if (salt_type == 2) { if ((u8) (out[0] >> 0) != 'x') continue; if ((u8) (out[0] >> 8) != 'p') continue; @@ -920,7 +932,7 @@ KERNEL_FQ void m16600_s04 (KERN_ATTR_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 3) + if (salt_type == 3) { // check PKCS7 padding (either 13 times 0x0d or 12 times 0x0c at the end, we only check 12 bytes, it's enough): diff --git a/OpenCL/m16600_a1-pure.cl b/OpenCL/m16600_a1-pure.cl index dde2ad588..976c72176 100644 --- a/OpenCL/m16600_a1-pure.cl +++ b/OpenCL/m16600_a1-pure.cl @@ -96,6 +96,26 @@ KERNEL_FQ void m16600_mxx (KERN_ATTR_ESALT (electrum_wallet_t)) sha256_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len); + /** + * data + */ + + const u32 salt_type = esalt_bufs[digests_offset].salt_type; + + u32 encrypted[4]; + + encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; + encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; + encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; + encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; + + u32 iv[4]; + + iv[0] = esalt_bufs[digests_offset].iv[0]; + iv[1] = esalt_bufs[digests_offset].iv[1]; + iv[2] = esalt_bufs[digests_offset].iv[2]; + iv[3] = esalt_bufs[digests_offset].iv[3]; + /** * loop */ @@ -158,30 +178,16 @@ KERNEL_FQ void m16600_mxx (KERN_ATTR_ESALT (electrum_wallet_t)) aes256_set_decrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3, s_td0, s_td1, s_td2, s_td3); - u32 encrypted[4]; - - encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; - encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; - encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; - encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; - u32 out[4]; aes256_decrypt (ks, encrypted, out, s_td0, s_td1, s_td2, s_td3, s_td4); - u32 iv[4]; - - iv[0] = esalt_bufs[digests_offset].iv[0]; - iv[1] = esalt_bufs[digests_offset].iv[1]; - iv[2] = esalt_bufs[digests_offset].iv[2]; - iv[3] = esalt_bufs[digests_offset].iv[3]; - out[0] ^= iv[0]; out[1] ^= iv[1]; out[2] ^= iv[2]; out[3] ^= iv[3]; - if (esalt_bufs[digests_offset].salt_type == 1) + if (salt_type == 1) { if (is_valid_hex_32 (out[0]) == 0) continue; if (is_valid_hex_32 (out[1]) == 0) continue; @@ -194,7 +200,7 @@ KERNEL_FQ void m16600_mxx (KERN_ATTR_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 2) + if (salt_type == 2) { if ((u8) (out[0] >> 0) != 'x') continue; if ((u8) (out[0] >> 8) != 'p') continue; @@ -210,7 +216,7 @@ KERNEL_FQ void m16600_mxx (KERN_ATTR_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 3) + if (salt_type == 3) { // check PKCS7 padding (either 13 times 0x0d or 12 times 0x0c at the end, we only check 12 bytes, it's enough): @@ -306,6 +312,26 @@ KERNEL_FQ void m16600_sxx (KERN_ATTR_ESALT (electrum_wallet_t)) sha256_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len); + /** + * data + */ + + const u32 salt_type = esalt_bufs[digests_offset].salt_type; + + u32 encrypted[4]; + + encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; + encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; + encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; + encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; + + u32 iv[4]; + + iv[0] = esalt_bufs[digests_offset].iv[0]; + iv[1] = esalt_bufs[digests_offset].iv[1]; + iv[2] = esalt_bufs[digests_offset].iv[2]; + iv[3] = esalt_bufs[digests_offset].iv[3]; + /** * loop */ @@ -368,30 +394,16 @@ KERNEL_FQ void m16600_sxx (KERN_ATTR_ESALT (electrum_wallet_t)) aes256_set_decrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3, s_td0, s_td1, s_td2, s_td3); - u32 encrypted[4]; - - encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; - encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; - encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; - encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; - u32 out[4]; aes256_decrypt (ks, encrypted, out, s_td0, s_td1, s_td2, s_td3, s_td4); - u32 iv[4]; - - iv[0] = esalt_bufs[digests_offset].iv[0]; - iv[1] = esalt_bufs[digests_offset].iv[1]; - iv[2] = esalt_bufs[digests_offset].iv[2]; - iv[3] = esalt_bufs[digests_offset].iv[3]; - out[0] ^= iv[0]; out[1] ^= iv[1]; out[2] ^= iv[2]; out[3] ^= iv[3]; - if (esalt_bufs[digests_offset].salt_type == 1) + if (salt_type == 1) { if (is_valid_hex_32 (out[0]) == 0) continue; if (is_valid_hex_32 (out[1]) == 0) continue; @@ -404,7 +416,7 @@ KERNEL_FQ void m16600_sxx (KERN_ATTR_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 2) + if (salt_type == 2) { if ((u8) (out[0] >> 0) != 'x') continue; if ((u8) (out[0] >> 8) != 'p') continue; @@ -420,7 +432,7 @@ KERNEL_FQ void m16600_sxx (KERN_ATTR_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 3) + if (salt_type == 3) { // check PKCS7 padding (either 13 times 0x0d or 12 times 0x0c at the end, we only check 12 bytes, it's enough): diff --git a/OpenCL/m16600_a3-optimized.cl b/OpenCL/m16600_a3-optimized.cl index cfe2a1d9d..bf0ed87fd 100644 --- a/OpenCL/m16600_a3-optimized.cl +++ b/OpenCL/m16600_a3-optimized.cl @@ -31,6 +31,26 @@ DECLSPEC void m16600 (SHM_TYPE u32a *s_te0, SHM_TYPE u32a *s_te1, SHM_TYPE u32a const u64 gid = get_global_id (0); + /** + * data + */ + + const u32 salt_type = esalt_bufs[digests_offset].salt_type; + + u32 encrypted[4]; + + encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; + encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; + encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; + encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; + + u32 iv[4]; + + iv[0] = esalt_bufs[digests_offset].iv[0]; + iv[1] = esalt_bufs[digests_offset].iv[1]; + iv[2] = esalt_bufs[digests_offset].iv[2]; + iv[3] = esalt_bufs[digests_offset].iv[3]; + /** * loop */ @@ -41,255 +61,100 @@ DECLSPEC void m16600 (SHM_TYPE u32a *s_te0, SHM_TYPE u32a *s_te1, SHM_TYPE u32a { const u32x w0r = ix_create_bft (bfs_buf, il_pos); - const u32x w0 = w0l | w0r; - - u32x w0_t = w0; - u32x w1_t = w[ 1]; - u32x w2_t = w[ 2]; - u32x w3_t = w[ 3]; - u32x w4_t = w[ 4]; - u32x w5_t = w[ 5]; - u32x w6_t = w[ 6]; - u32x w7_t = w[ 7]; - u32x w8_t = w[ 8]; - u32x w9_t = w[ 9]; - u32x wa_t = w[10]; - u32x wb_t = w[11]; - u32x wc_t = w[12]; - u32x wd_t = w[13]; - u32x we_t = w[14]; - u32x wf_t = w[15]; - - u32x a = SHA256M_A; - u32x b = SHA256M_B; - u32x c = SHA256M_C; - u32x d = SHA256M_D; - u32x e = SHA256M_E; - u32x f = SHA256M_F; - u32x g = SHA256M_G; - u32x h = SHA256M_H; - - SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w0_t, SHA256C00); - SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w1_t, SHA256C01); - SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, w2_t, SHA256C02); - SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, w3_t, SHA256C03); - SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, w4_t, SHA256C04); - SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, w5_t, SHA256C05); - SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, w6_t, SHA256C06); - SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, w7_t, SHA256C07); - SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w8_t, SHA256C08); - SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w9_t, SHA256C09); - SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, wa_t, SHA256C0a); - SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, wb_t, SHA256C0b); - SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, wc_t, SHA256C0c); - SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, wd_t, SHA256C0d); - SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, we_t, SHA256C0e); - SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, wf_t, SHA256C0f); - - w0_t = SHA256_EXPAND (we_t, w9_t, w1_t, w0_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w0_t, SHA256C10); - w1_t = SHA256_EXPAND (wf_t, wa_t, w2_t, w1_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w1_t, SHA256C11); - w2_t = SHA256_EXPAND (w0_t, wb_t, w3_t, w2_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, w2_t, SHA256C12); - w3_t = SHA256_EXPAND (w1_t, wc_t, w4_t, w3_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, w3_t, SHA256C13); - w4_t = SHA256_EXPAND (w2_t, wd_t, w5_t, w4_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, w4_t, SHA256C14); - w5_t = SHA256_EXPAND (w3_t, we_t, w6_t, w5_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, w5_t, SHA256C15); - w6_t = SHA256_EXPAND (w4_t, wf_t, w7_t, w6_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, w6_t, SHA256C16); - w7_t = SHA256_EXPAND (w5_t, w0_t, w8_t, w7_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, w7_t, SHA256C17); - w8_t = SHA256_EXPAND (w6_t, w1_t, w9_t, w8_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w8_t, SHA256C18); - w9_t = SHA256_EXPAND (w7_t, w2_t, wa_t, w9_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w9_t, SHA256C19); - wa_t = SHA256_EXPAND (w8_t, w3_t, wb_t, wa_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, wa_t, SHA256C1a); - wb_t = SHA256_EXPAND (w9_t, w4_t, wc_t, wb_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, wb_t, SHA256C1b); - wc_t = SHA256_EXPAND (wa_t, w5_t, wd_t, wc_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, wc_t, SHA256C1c); - wd_t = SHA256_EXPAND (wb_t, w6_t, we_t, wd_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, wd_t, SHA256C1d); - we_t = SHA256_EXPAND (wc_t, w7_t, wf_t, we_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, we_t, SHA256C1e); - wf_t = SHA256_EXPAND (wd_t, w8_t, w0_t, wf_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, wf_t, SHA256C1f); - - w0_t = SHA256_EXPAND (we_t, w9_t, w1_t, w0_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w0_t, SHA256C20); - w1_t = SHA256_EXPAND (wf_t, wa_t, w2_t, w1_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w1_t, SHA256C21); - w2_t = SHA256_EXPAND (w0_t, wb_t, w3_t, w2_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, w2_t, SHA256C22); - w3_t = SHA256_EXPAND (w1_t, wc_t, w4_t, w3_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, w3_t, SHA256C23); - w4_t = SHA256_EXPAND (w2_t, wd_t, w5_t, w4_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, w4_t, SHA256C24); - w5_t = SHA256_EXPAND (w3_t, we_t, w6_t, w5_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, w5_t, SHA256C25); - w6_t = SHA256_EXPAND (w4_t, wf_t, w7_t, w6_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, w6_t, SHA256C26); - w7_t = SHA256_EXPAND (w5_t, w0_t, w8_t, w7_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, w7_t, SHA256C27); - w8_t = SHA256_EXPAND (w6_t, w1_t, w9_t, w8_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w8_t, SHA256C28); - w9_t = SHA256_EXPAND (w7_t, w2_t, wa_t, w9_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w9_t, SHA256C29); - wa_t = SHA256_EXPAND (w8_t, w3_t, wb_t, wa_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, wa_t, SHA256C2a); - wb_t = SHA256_EXPAND (w9_t, w4_t, wc_t, wb_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, wb_t, SHA256C2b); - wc_t = SHA256_EXPAND (wa_t, w5_t, wd_t, wc_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, wc_t, SHA256C2c); - wd_t = SHA256_EXPAND (wb_t, w6_t, we_t, wd_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, wd_t, SHA256C2d); - we_t = SHA256_EXPAND (wc_t, w7_t, wf_t, we_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, we_t, SHA256C2e); - wf_t = SHA256_EXPAND (wd_t, w8_t, w0_t, wf_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, wf_t, SHA256C2f); - - w0_t = SHA256_EXPAND (we_t, w9_t, w1_t, w0_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w0_t, SHA256C30); - w1_t = SHA256_EXPAND (wf_t, wa_t, w2_t, w1_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w1_t, SHA256C31); - w2_t = SHA256_EXPAND (w0_t, wb_t, w3_t, w2_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, w2_t, SHA256C32); - w3_t = SHA256_EXPAND (w1_t, wc_t, w4_t, w3_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, w3_t, SHA256C33); - w4_t = SHA256_EXPAND (w2_t, wd_t, w5_t, w4_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, w4_t, SHA256C34); - w5_t = SHA256_EXPAND (w3_t, we_t, w6_t, w5_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, w5_t, SHA256C35); - w6_t = SHA256_EXPAND (w4_t, wf_t, w7_t, w6_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, w6_t, SHA256C36); - w7_t = SHA256_EXPAND (w5_t, w0_t, w8_t, w7_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, w7_t, SHA256C37); - w8_t = SHA256_EXPAND (w6_t, w1_t, w9_t, w8_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w8_t, SHA256C38); - w9_t = SHA256_EXPAND (w7_t, w2_t, wa_t, w9_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w9_t, SHA256C39); - wa_t = SHA256_EXPAND (w8_t, w3_t, wb_t, wa_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, wa_t, SHA256C3a); - wb_t = SHA256_EXPAND (w9_t, w4_t, wc_t, wb_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, wb_t, SHA256C3b); - wc_t = SHA256_EXPAND (wa_t, w5_t, wd_t, wc_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, wc_t, SHA256C3c); - wd_t = SHA256_EXPAND (wb_t, w6_t, we_t, wd_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, wd_t, SHA256C3d); - we_t = SHA256_EXPAND (wc_t, w7_t, wf_t, we_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, we_t, SHA256C3e); - wf_t = SHA256_EXPAND (wd_t, w8_t, w0_t, wf_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, wf_t, SHA256C3f); - - a += SHA256M_A; - b += SHA256M_B; - c += SHA256M_C; - d += SHA256M_D; - e += SHA256M_E; - f += SHA256M_F; - g += SHA256M_G; - h += SHA256M_H; - - w0_t = a; - w1_t = b; - w2_t = c; - w3_t = d; - w4_t = e; - w5_t = f; - w6_t = g; - w7_t = h; - w8_t = 0x80000000; - w9_t = 0; - wa_t = 0; - wb_t = 0; - wc_t = 0; - wd_t = 0; - we_t = 0; - wf_t = 32 * 8; - - a = SHA256M_A; - b = SHA256M_B; - c = SHA256M_C; - d = SHA256M_D; - e = SHA256M_E; - f = SHA256M_F; - g = SHA256M_G; - h = SHA256M_H; - - SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w0_t, SHA256C00); - SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w1_t, SHA256C01); - SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, w2_t, SHA256C02); - SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, w3_t, SHA256C03); - SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, w4_t, SHA256C04); - SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, w5_t, SHA256C05); - SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, w6_t, SHA256C06); - SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, w7_t, SHA256C07); - SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w8_t, SHA256C08); - SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w9_t, SHA256C09); - SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, wa_t, SHA256C0a); - SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, wb_t, SHA256C0b); - SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, wc_t, SHA256C0c); - SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, wd_t, SHA256C0d); - SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, we_t, SHA256C0e); - SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, wf_t, SHA256C0f); - - w0_t = SHA256_EXPAND (we_t, w9_t, w1_t, w0_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w0_t, SHA256C10); - w1_t = SHA256_EXPAND (wf_t, wa_t, w2_t, w1_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w1_t, SHA256C11); - w2_t = SHA256_EXPAND (w0_t, wb_t, w3_t, w2_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, w2_t, SHA256C12); - w3_t = SHA256_EXPAND (w1_t, wc_t, w4_t, w3_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, w3_t, SHA256C13); - w4_t = SHA256_EXPAND (w2_t, wd_t, w5_t, w4_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, w4_t, SHA256C14); - w5_t = SHA256_EXPAND (w3_t, we_t, w6_t, w5_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, w5_t, SHA256C15); - w6_t = SHA256_EXPAND (w4_t, wf_t, w7_t, w6_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, w6_t, SHA256C16); - w7_t = SHA256_EXPAND (w5_t, w0_t, w8_t, w7_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, w7_t, SHA256C17); - w8_t = SHA256_EXPAND (w6_t, w1_t, w9_t, w8_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w8_t, SHA256C18); - w9_t = SHA256_EXPAND (w7_t, w2_t, wa_t, w9_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w9_t, SHA256C19); - wa_t = SHA256_EXPAND (w8_t, w3_t, wb_t, wa_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, wa_t, SHA256C1a); - wb_t = SHA256_EXPAND (w9_t, w4_t, wc_t, wb_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, wb_t, SHA256C1b); - wc_t = SHA256_EXPAND (wa_t, w5_t, wd_t, wc_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, wc_t, SHA256C1c); - wd_t = SHA256_EXPAND (wb_t, w6_t, we_t, wd_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, wd_t, SHA256C1d); - we_t = SHA256_EXPAND (wc_t, w7_t, wf_t, we_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, we_t, SHA256C1e); - wf_t = SHA256_EXPAND (wd_t, w8_t, w0_t, wf_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, wf_t, SHA256C1f); - - w0_t = SHA256_EXPAND (we_t, w9_t, w1_t, w0_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w0_t, SHA256C20); - w1_t = SHA256_EXPAND (wf_t, wa_t, w2_t, w1_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w1_t, SHA256C21); - w2_t = SHA256_EXPAND (w0_t, wb_t, w3_t, w2_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, w2_t, SHA256C22); - w3_t = SHA256_EXPAND (w1_t, wc_t, w4_t, w3_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, w3_t, SHA256C23); - w4_t = SHA256_EXPAND (w2_t, wd_t, w5_t, w4_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, w4_t, SHA256C24); - w5_t = SHA256_EXPAND (w3_t, we_t, w6_t, w5_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, w5_t, SHA256C25); - w6_t = SHA256_EXPAND (w4_t, wf_t, w7_t, w6_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, w6_t, SHA256C26); - w7_t = SHA256_EXPAND (w5_t, w0_t, w8_t, w7_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, w7_t, SHA256C27); - w8_t = SHA256_EXPAND (w6_t, w1_t, w9_t, w8_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w8_t, SHA256C28); - w9_t = SHA256_EXPAND (w7_t, w2_t, wa_t, w9_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w9_t, SHA256C29); - wa_t = SHA256_EXPAND (w8_t, w3_t, wb_t, wa_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, wa_t, SHA256C2a); - wb_t = SHA256_EXPAND (w9_t, w4_t, wc_t, wb_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, wb_t, SHA256C2b); - wc_t = SHA256_EXPAND (wa_t, w5_t, wd_t, wc_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, wc_t, SHA256C2c); - wd_t = SHA256_EXPAND (wb_t, w6_t, we_t, wd_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, wd_t, SHA256C2d); - we_t = SHA256_EXPAND (wc_t, w7_t, wf_t, we_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, we_t, SHA256C2e); - wf_t = SHA256_EXPAND (wd_t, w8_t, w0_t, wf_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, wf_t, SHA256C2f); - - w0_t = SHA256_EXPAND (we_t, w9_t, w1_t, w0_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w0_t, SHA256C30); - w1_t = SHA256_EXPAND (wf_t, wa_t, w2_t, w1_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w1_t, SHA256C31); - w2_t = SHA256_EXPAND (w0_t, wb_t, w3_t, w2_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, w2_t, SHA256C32); - w3_t = SHA256_EXPAND (w1_t, wc_t, w4_t, w3_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, w3_t, SHA256C33); - w4_t = SHA256_EXPAND (w2_t, wd_t, w5_t, w4_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, w4_t, SHA256C34); - w5_t = SHA256_EXPAND (w3_t, we_t, w6_t, w5_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, w5_t, SHA256C35); - w6_t = SHA256_EXPAND (w4_t, wf_t, w7_t, w6_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, w6_t, SHA256C36); - w7_t = SHA256_EXPAND (w5_t, w0_t, w8_t, w7_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, w7_t, SHA256C37); - w8_t = SHA256_EXPAND (w6_t, w1_t, w9_t, w8_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, a, b, c, d, e, f, g, h, w8_t, SHA256C38); - w9_t = SHA256_EXPAND (w7_t, w2_t, wa_t, w9_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, h, a, b, c, d, e, f, g, w9_t, SHA256C39); - wa_t = SHA256_EXPAND (w8_t, w3_t, wb_t, wa_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, g, h, a, b, c, d, e, f, wa_t, SHA256C3a); - wb_t = SHA256_EXPAND (w9_t, w4_t, wc_t, wb_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, f, g, h, a, b, c, d, e, wb_t, SHA256C3b); - wc_t = SHA256_EXPAND (wa_t, w5_t, wd_t, wc_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, e, f, g, h, a, b, c, d, wc_t, SHA256C3c); - wd_t = SHA256_EXPAND (wb_t, w6_t, we_t, wd_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, d, e, f, g, h, a, b, c, wd_t, SHA256C3d); - we_t = SHA256_EXPAND (wc_t, w7_t, wf_t, we_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, c, d, e, f, g, h, a, b, we_t, SHA256C3e); - wf_t = SHA256_EXPAND (wd_t, w8_t, w0_t, wf_t); SHA256_STEP (SHA256_F0o, SHA256_F1o, b, c, d, e, f, g, h, a, wf_t, SHA256C3f); - - a += SHA256M_A; - b += SHA256M_B; - c += SHA256M_C; - d += SHA256M_D; - e += SHA256M_E; - f += SHA256M_F; - g += SHA256M_G; - h += SHA256M_H; + const u32x w0lr = w0l | w0r; + + u32 w0[4]; + u32 w1[4]; + u32 w2[4]; + u32 w3[4]; + + w0[0] = w0lr; + w0[1] = w[ 1]; + w0[2] = w[ 2]; + w0[3] = w[ 3]; + w1[0] = w[ 4]; + w1[1] = w[ 5]; + w1[2] = w[ 6]; + w1[3] = w[ 7]; + w2[0] = w[ 8]; + w2[1] = w[ 9]; + w2[2] = w[10]; + w2[3] = w[11]; + w3[0] = w[12]; + w3[1] = w[13]; + w3[2] = w[14]; + w3[3] = w[15]; + + u32 digest[8]; + + digest[0] = SHA256M_A; + digest[1] = SHA256M_B; + digest[2] = SHA256M_C; + digest[3] = SHA256M_D; + digest[4] = SHA256M_E; + digest[5] = SHA256M_F; + digest[6] = SHA256M_G; + digest[7] = SHA256M_H; + + sha256_transform (w0, w1, w2, w3, digest); + + w0[0] = digest[0]; + w0[1] = digest[1]; + w0[2] = digest[2]; + w0[3] = digest[3]; + w1[0] = digest[4]; + w1[1] = digest[5]; + w1[2] = digest[6]; + w1[3] = digest[7]; + w2[0] = 0x80000000; + w2[1] = 0; + w2[2] = 0; + w2[3] = 0; + w3[0] = 0; + w3[1] = 0; + w3[2] = 0; + w3[3] = 32 * 8; + + digest[0] = SHA256M_A; + digest[1] = SHA256M_B; + digest[2] = SHA256M_C; + digest[3] = SHA256M_D; + digest[4] = SHA256M_E; + digest[5] = SHA256M_F; + digest[6] = SHA256M_G; + digest[7] = SHA256M_H; + + sha256_transform (w0, w1, w2, w3, digest); u32 ukey[8]; - ukey[0] = hc_swap32_S (a); - ukey[1] = hc_swap32_S (b); - ukey[2] = hc_swap32_S (c); - ukey[3] = hc_swap32_S (d); - ukey[4] = hc_swap32_S (e); - ukey[5] = hc_swap32_S (f); - ukey[6] = hc_swap32_S (g); - ukey[7] = hc_swap32_S (h); + ukey[0] = digest[0]; + ukey[1] = digest[1]; + ukey[2] = digest[2]; + ukey[3] = digest[3]; + ukey[4] = digest[4]; + ukey[5] = digest[5]; + ukey[6] = digest[6]; + ukey[7] = digest[7]; #define KEYLEN 60 u32 ks[KEYLEN]; - aes256_set_decrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3, s_td0, s_td1, s_td2, s_td3); + aes256_ExpandKey (ks, ukey, s_te0, s_te1, s_te2, s_te3); - u32 encrypted[4]; - - encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; - encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; - encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; - encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; + aes256_InvertKey (ks, s_te1, s_td0, s_td1, s_td2, s_td3); u32 out[4]; aes256_decrypt (ks, encrypted, out, s_td0, s_td1, s_td2, s_td3, s_td4); - u32 iv[4]; - - iv[0] = esalt_bufs[digests_offset].iv[0]; - iv[1] = esalt_bufs[digests_offset].iv[1]; - iv[2] = esalt_bufs[digests_offset].iv[2]; - iv[3] = esalt_bufs[digests_offset].iv[3]; - out[0] ^= iv[0]; out[1] ^= iv[1]; out[2] ^= iv[2]; out[3] ^= iv[3]; - if (esalt_bufs[digests_offset].salt_type == 1) + if (salt_type == 1) { if (is_valid_hex_32 (out[0]) == 0) continue; if (is_valid_hex_32 (out[1]) == 0) continue; @@ -302,7 +167,7 @@ DECLSPEC void m16600 (SHM_TYPE u32a *s_te0, SHM_TYPE u32a *s_te1, SHM_TYPE u32a } } - if (esalt_bufs[digests_offset].salt_type == 2) + if (salt_type == 2) { if ((u8) (out[0] >> 0) != 'x') continue; if ((u8) (out[0] >> 8) != 'p') continue; @@ -318,7 +183,7 @@ DECLSPEC void m16600 (SHM_TYPE u32a *s_te0, SHM_TYPE u32a *s_te1, SHM_TYPE u32a } } - if (esalt_bufs[digests_offset].salt_type == 3) + if (salt_type == 3) { // check PKCS7 padding (either 13 times 0x0d or 12 times 0x0c at the end, we only check 12 bytes, it's enough): diff --git a/OpenCL/m16600_a3-pure.cl b/OpenCL/m16600_a3-pure.cl index 49667e50a..b7ead62fd 100644 --- a/OpenCL/m16600_a3-pure.cl +++ b/OpenCL/m16600_a3-pure.cl @@ -99,6 +99,26 @@ KERNEL_FQ void m16600_mxx (KERN_ATTR_VECTOR_ESALT (electrum_wallet_t)) w[idx] = pws[gid].i[idx]; } + /** + * data + */ + + const u32 salt_type = esalt_bufs[digests_offset].salt_type; + + u32 encrypted[4]; + + encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; + encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; + encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; + encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; + + u32 iv[4]; + + iv[0] = esalt_bufs[digests_offset].iv[0]; + iv[1] = esalt_bufs[digests_offset].iv[1]; + iv[2] = esalt_bufs[digests_offset].iv[2]; + iv[3] = esalt_bufs[digests_offset].iv[3]; + /** * loop */ @@ -171,30 +191,16 @@ KERNEL_FQ void m16600_mxx (KERN_ATTR_VECTOR_ESALT (electrum_wallet_t)) aes256_set_decrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3, s_td0, s_td1, s_td2, s_td3); - u32 encrypted[4]; - - encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; - encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; - encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; - encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; - u32 out[4]; aes256_decrypt (ks, encrypted, out, s_td0, s_td1, s_td2, s_td3, s_td4); - u32 iv[4]; - - iv[0] = esalt_bufs[digests_offset].iv[0]; - iv[1] = esalt_bufs[digests_offset].iv[1]; - iv[2] = esalt_bufs[digests_offset].iv[2]; - iv[3] = esalt_bufs[digests_offset].iv[3]; - out[0] ^= iv[0]; out[1] ^= iv[1]; out[2] ^= iv[2]; out[3] ^= iv[3]; - if (esalt_bufs[digests_offset].salt_type == 1) + if (salt_type == 1) { if (is_valid_hex_32 (out[0]) == 0) continue; if (is_valid_hex_32 (out[1]) == 0) continue; @@ -207,7 +213,7 @@ KERNEL_FQ void m16600_mxx (KERN_ATTR_VECTOR_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 2) + if (salt_type == 2) { if ((u8) (out[0] >> 0) != 'x') continue; if ((u8) (out[0] >> 8) != 'p') continue; @@ -223,7 +229,7 @@ KERNEL_FQ void m16600_mxx (KERN_ATTR_VECTOR_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 3) + if (salt_type == 3) { // check PKCS7 padding (either 13 times 0x0d or 12 times 0x0c at the end, we only check 12 bytes, it's enough): @@ -322,6 +328,26 @@ KERNEL_FQ void m16600_sxx (KERN_ATTR_VECTOR_ESALT (electrum_wallet_t)) w[idx] = pws[gid].i[idx]; } + /** + * data + */ + + const u32 salt_type = esalt_bufs[digests_offset].salt_type; + + u32 encrypted[4]; + + encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; + encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; + encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; + encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; + + u32 iv[4]; + + iv[0] = esalt_bufs[digests_offset].iv[0]; + iv[1] = esalt_bufs[digests_offset].iv[1]; + iv[2] = esalt_bufs[digests_offset].iv[2]; + iv[3] = esalt_bufs[digests_offset].iv[3]; + /** * loop */ @@ -394,30 +420,16 @@ KERNEL_FQ void m16600_sxx (KERN_ATTR_VECTOR_ESALT (electrum_wallet_t)) aes256_set_decrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3, s_td0, s_td1, s_td2, s_td3); - u32 encrypted[4]; - - encrypted[0] = esalt_bufs[digests_offset].encrypted[0]; - encrypted[1] = esalt_bufs[digests_offset].encrypted[1]; - encrypted[2] = esalt_bufs[digests_offset].encrypted[2]; - encrypted[3] = esalt_bufs[digests_offset].encrypted[3]; - u32 out[4]; aes256_decrypt (ks, encrypted, out, s_td0, s_td1, s_td2, s_td3, s_td4); - u32 iv[4]; - - iv[0] = esalt_bufs[digests_offset].iv[0]; - iv[1] = esalt_bufs[digests_offset].iv[1]; - iv[2] = esalt_bufs[digests_offset].iv[2]; - iv[3] = esalt_bufs[digests_offset].iv[3]; - out[0] ^= iv[0]; out[1] ^= iv[1]; out[2] ^= iv[2]; out[3] ^= iv[3]; - if (esalt_bufs[digests_offset].salt_type == 1) + if (salt_type == 1) { if (is_valid_hex_32 (out[0]) == 0) continue; if (is_valid_hex_32 (out[1]) == 0) continue; @@ -430,7 +442,7 @@ KERNEL_FQ void m16600_sxx (KERN_ATTR_VECTOR_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 2) + if (salt_type == 2) { if ((u8) (out[0] >> 0) != 'x') continue; if ((u8) (out[0] >> 8) != 'p') continue; @@ -446,7 +458,7 @@ KERNEL_FQ void m16600_sxx (KERN_ATTR_VECTOR_ESALT (electrum_wallet_t)) } } - if (esalt_bufs[digests_offset].salt_type == 3) + if (salt_type == 3) { // check PKCS7 padding (either 13 times 0x0d or 12 times 0x0c at the end, we only check 12 bytes, it's enough):