Add tokenizer option TOKEN_ATTR_SEPARATOR_FARTHEST as an option to deal with hash formats where separator character could be part of the salt data itself and at the same time does not allow escape character logic to be applied. This can only work if it is guaranteed by the design of the hash format that the separator character does not occur after the position of the separator character.

pull/2315/head
Jens Steube 4 years ago
parent c4daa0c260
commit 9957422191

@ -79,6 +79,9 @@ u64 round_up_multiple_64 (const u64 v, const u64 m);
void hc_strncat (u8 *dst, const u8 *src, const size_t n);
const u8 *hc_strchr_next (const u8 *input_buf, const int input_len, const u8 separator);
const u8 *hc_strchr_last (const u8 *input_buf, const int input_len, const u8 separator);
int count_char (const u8 *buf, const int len, const u8 c);
float get_entropy (const u8 *buf, const int len);

@ -770,16 +770,17 @@ typedef enum user_options_map
typedef enum token_attr
{
TOKEN_ATTR_FIXED_LENGTH = 1 << 0,
TOKEN_ATTR_OPTIONAL_ROUNDS = 1 << 1,
TOKEN_ATTR_VERIFY_SIGNATURE = 1 << 2,
TOKEN_ATTR_VERIFY_LENGTH = 1 << 3,
TOKEN_ATTR_VERIFY_DIGIT = 1 << 4,
TOKEN_ATTR_VERIFY_FLOAT = 1 << 5,
TOKEN_ATTR_VERIFY_HEX = 1 << 6,
TOKEN_ATTR_VERIFY_BASE64A = 1 << 7,
TOKEN_ATTR_VERIFY_BASE64B = 1 << 8,
TOKEN_ATTR_VERIFY_BASE64C = 1 << 9
TOKEN_ATTR_FIXED_LENGTH = 1 << 0,
TOKEN_ATTR_SEPARATOR_FARTHEST = 1 << 1,
TOKEN_ATTR_OPTIONAL_ROUNDS = 1 << 2,
TOKEN_ATTR_VERIFY_SIGNATURE = 1 << 3,
TOKEN_ATTR_VERIFY_LENGTH = 1 << 4,
TOKEN_ATTR_VERIFY_DIGIT = 1 << 5,
TOKEN_ATTR_VERIFY_FLOAT = 1 << 6,
TOKEN_ATTR_VERIFY_HEX = 1 << 7,
TOKEN_ATTR_VERIFY_BASE64A = 1 << 8,
TOKEN_ATTR_VERIFY_BASE64B = 1 << 9,
TOKEN_ATTR_VERIFY_BASE64C = 1 << 10,
} token_attr_t;

@ -95,7 +95,8 @@ int module_hash_decode (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSE
token.len_min[2] = SALT_MIN;
token.len_max[2] = SALT_MAX;
token.sep[2] = '#';
token.attr[2] = TOKEN_ATTR_VERIFY_LENGTH;
token.attr[2] = TOKEN_ATTR_VERIFY_LENGTH
| TOKEN_ATTR_SEPARATOR_FARTHEST;
token.len_min[3] = 32;
token.len_max[3] = 32;

@ -1028,6 +1028,26 @@ static int rounds_count_length (const char *input_buf, const int input_len)
return -1;
}
const u8 *hc_strchr_next (const u8 *input_buf, const int input_len, const u8 separator)
{
for (int i = 0; i < input_len; i++)
{
if (input_buf[i] == separator) return &input_buf[i];
}
return NULL;
}
const u8 *hc_strchr_last (const u8 *input_buf, const int input_len, const u8 separator)
{
for (int i = input_len - 1; i >= 0; i--)
{
if (input_buf[i] == separator) return &input_buf[i];
}
return NULL;
}
int input_tokenizer (const u8 *input_buf, const int input_len, token_t *token)
{
int len_left = input_len;
@ -1066,7 +1086,16 @@ int input_tokenizer (const u8 *input_buf, const int input_len, token_t *token)
}
}
const u8 *next_pos = (const u8 *) strchr ((const char *) token->buf[token_idx], token->sep[token_idx]);
const u8 *next_pos = NULL;
if (token->attr[token_idx] & TOKEN_ATTR_SEPARATOR_FARTHEST)
{
next_pos = hc_strchr_last (token->buf[token_idx], len_left, token->sep[token_idx]);
}
else
{
next_pos = hc_strchr_next (token->buf[token_idx], len_left, token->sep[token_idx]);
}
if (next_pos == NULL) return (PARSER_SEPARATOR_UNMATCHED);

Loading…
Cancel
Save