All remaining parser functions have been rewritten to make use of input_tokenizer():

chacha20_parse_hash
crammd5_dovecot_parse_hash
electrum_wallet13_parse_hash
ethereum_pbkdf2_parse_hash
ethereum_presale_parse_hash
ethereum_scrypt_parse_hash
filevault2_parse_hash
filezilla_server_parse_hash
jks_sha1_parse_hash
jwt_parse_hash
mywalletv2_parse_hash
netbsd_sha1crypt_parse_hash
plaintext_parse_hash
tacacs_plus_parse_hash
tripcode_parse_hash
Removed old generic salt parser function parse_and_store_salt()
pull/1629/head
jsteube 6 years ago
parent 85aef30dff
commit 048298020e

@ -18,6 +18,8 @@ bool is_valid_base64a_string (const u8 *s, const size_t len);
bool is_valid_base64a_char (const u8 c);
bool is_valid_base64b_string (const u8 *s, const size_t len);
bool is_valid_base64b_char (const u8 c);
bool is_valid_base64c_string (const u8 *s, const size_t len);
bool is_valid_base64c_char (const u8 c);
bool is_valid_hex_string (const u8 *s, const size_t len);
bool is_valid_hex_char (const u8 c);
bool is_valid_digit_string (const u8 *s, const size_t len);

@ -1018,43 +1018,6 @@ typedef struct hccapx hccapx_t;
* hashtypes enums
*/
typedef enum display_len
{
DISPLAY_LEN_MIN_15000 = 128 + 1 + 64,
DISPLAY_LEN_MAX_15000 = 128 + 1 + 64,
DISPLAY_LEN_MIN_15100 = 6 + 3 + 1 + 8 + 1 + 28,
DISPLAY_LEN_MAX_15100 = 6 + 6 + 1 + 8 + 1 + 28,
DISPLAY_LEN_MIN_15200 = 1 + 10 + 1 + 2 + 1 + 1 + 1 + 1 + 1 + 64,
DISPLAY_LEN_MAX_15200 = 1 + 10 + 1 + 2 + 1 + 8 + 1 + 5 + 1 + 20000,
DISPLAY_LEN_MIN_15400 = 10 + 1 + 16 + 1 + 1 + 1 + 16 + 1 + 16 + 1 + 16,
DISPLAY_LEN_MAX_15400 = 10 + 1 + 16 + 1 + 2 + 1 + 16 + 1 + 16 + 1 + 16,
DISPLAY_LEN_MIN_15500 = 10 + 1 + 40 + 1 + 40 + 1 + 1 + 1 + 2 + 1 + 28 + 1 + 1,
DISPLAY_LEN_MAX_15500 = 10 + 1 + 40 + 1 + 40 + 1 + 16384 + 1 + 2 + 1 + 28 + 1 + 64,
DISPLAY_LEN_MIN_15600 = 11 + 1 + 1 + 1 + 32 + 1 + 64 + 1 + 64,
DISPLAY_LEN_MAX_15600 = 11 + 1 + 6 + 1 + 64 + 1 + 64 + 1 + 64,
DISPLAY_LEN_MIN_15700 = 11 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 64 + 1 + 64 + 1 + 64,
DISPLAY_LEN_MAX_15700 = 11 + 1 + 6 + 1 + 1 + 1 + 1 + 1 + 64 + 1 + 64 + 1 + 64,
DISPLAY_LEN_MIN_16000 = 10,
DISPLAY_LEN_MAX_16000 = 10,
DISPLAY_LEN_MIN_16100 = 1 + 11 + 1 + 1 + 1 + 8 + 12 + 1 + 4,
DISPLAY_LEN_MAX_16100 = 1 + 11 + 1 + 1 + 1 + 8 + 256 + 1 + 4,
DISPLAY_LEN_MIN_16200 = 5 + 1 + 1 + 1 + 5 + 1 + 32 + 1 + 48,
DISPLAY_LEN_MAX_16200 = 5 + 1 + 6 + 1 + 5 + 1 + 32 + 1 + 48,
DISPLAY_LEN_MIN_16300 = 11 + 1 + 64 + 1 + 40 + 1 + 32,
DISPLAY_LEN_MAX_16300 = 11 + 1 + 1248 + 1 + 40 + 1 + 32,
DISPLAY_LEN_MIN_16400 = 10 + 32 + 32,
DISPLAY_LEN_MAX_16400 = 10 + 32 + 32,
DISPLAY_LEN_MIN_16500 = 1 + 1 + 1 + 1 + 43,
DISPLAY_LEN_MAX_16500 = 2047 + 1 + 2047 + 1 + 86,
DISPLAY_LEN_MIN_16600 = 10 + 1 + 1 + 32 + 1 + 32,
DISPLAY_LEN_MAX_16600 = 10 + 1 + 1 + 32 + 1 + 32,
DISPLAY_LEN_MIN_16700 = 1 + 4 + 1 + 1 + 1 + 2 + 1 + 32 + 1 + 1 + 1 + 48,
DISPLAY_LEN_MAX_16700 = 1 + 4 + 1 + 1 + 1 + 2 + 1 + 32 + 1 + 6 + 1 + 48,
DISPLAY_LEN_MIN_99999 = 1,
DISPLAY_LEN_MAX_99999 = 55,
} display_len_t;
typedef enum hash_type
{
HASH_TYPE_MD4 = 1,

@ -694,7 +694,8 @@ typedef enum token_attr
TOKEN_ATTR_VERIFY_HEX = 1 << 5,
TOKEN_ATTR_VERIFY_BASE64A = 1 << 6,
TOKEN_ATTR_VERIFY_BASE64B = 1 << 7,
TOKEN_ATTR_TERMINATE_STRING = 1 << 8,
TOKEN_ATTR_VERIFY_BASE64C = 1 << 8,
TOKEN_ATTR_TERMINATE_STRING = 1 << 9,
} token_attr_t;

@ -265,6 +265,31 @@ bool is_valid_base64b_char (const u8 c)
return false;
}
bool is_valid_base64c_string (const u8 *s, const size_t len)
{
for (size_t i = 0; i < len; i++)
{
const u8 c = s[i];
if (is_valid_base64c_char (c) == false) return false;
}
return true;
}
bool is_valid_base64c_char (const u8 c)
{
if ((c >= '0') && (c <= '9')) return true;
if ((c >= 'A') && (c <= 'Z')) return true;
if ((c >= 'a') && (c <= 'z')) return true;
if (c == '_') return true;
if (c == '-') return true;
if (c == '=') return true;
return false;
}
bool is_valid_hex_string (const u8 *s, const size_t len)
{
for (size_t i = 0; i < len; i++)

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save