mirror of
https://github.com/hashcat/hashcat
synced 2024-11-28 05:21:38 +01:00
Convert cisco4_parse_hash() to make use of input_tokenizer()
Convert lm_parse_hash() to make use of input_tokenizer()
This commit is contained in:
parent
2abe8caf24
commit
5f83dd8620
@ -14,10 +14,10 @@ size_t exec_unhexify (const u8 *in_buf, const size_t in_len, u8 *out_buf, const
|
||||
bool need_hexify (const u8 *buf, const size_t len, const char separator, bool always_ascii);
|
||||
void exec_hexify (const u8 *buf, const size_t len, u8 *out);
|
||||
|
||||
bool is_valid_bf64_string (const u8 *s, const size_t len);
|
||||
bool is_valid_bf64_char (const u8 c);
|
||||
bool is_valid_hex_string (const u8 *s, const size_t len);
|
||||
bool is_valid_hex_char (const u8 c);
|
||||
bool is_valid_base64_string (const u8 *s, const size_t len);
|
||||
bool is_valid_base64_char (const u8 c);
|
||||
bool is_valid_hex_string (const u8 *s, const size_t len);
|
||||
bool is_valid_hex_char (const u8 c);
|
||||
|
||||
u8 hex_convert (const u8 c);
|
||||
|
||||
|
@ -688,7 +688,7 @@ typedef enum token_attr
|
||||
TOKEN_ATTR_SIGNATURE = 1 << 0,
|
||||
TOKEN_ATTR_FIXED_LENGTH = 1 << 1,
|
||||
TOKEN_ATTR_ENCODED_HEX = 1 << 2,
|
||||
TOKEN_ATTR_ENCODED_BF64 = 1 << 3,
|
||||
TOKEN_ATTR_ENCODED_BASE64 = 1 << 3,
|
||||
TOKEN_ATTR_VERIFY_LENGTH = 1 << 4,
|
||||
|
||||
} token_attr_t;
|
||||
|
@ -215,18 +215,30 @@ void exec_hexify (const u8 *buf, const size_t len, u8 *out)
|
||||
out[max_len * 2] = 0;
|
||||
}
|
||||
|
||||
bool is_valid_bf64_string (const u8 *s, const size_t len)
|
||||
bool is_valid_base64_string (const u8 *s, const size_t len)
|
||||
{
|
||||
for (size_t i = 0; i < len; i++)
|
||||
{
|
||||
const u8 c = s[i];
|
||||
|
||||
if (is_valid_bf64_char (c) == false) return false;
|
||||
if (is_valid_base64_char (c) == false) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool is_valid_base64_char (const u8 c)
|
||||
{
|
||||
if ((c >= '0') && (c <= '9')) return true;
|
||||
if ((c >= 'A') && (c <= 'Z')) return true;
|
||||
if ((c >= 'a') && (c <= 'z')) return true;
|
||||
|
||||
if (c == '.') return true;
|
||||
if (c == '/') return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool is_valid_hex_string (const u8 *s, const size_t len)
|
||||
{
|
||||
for (size_t i = 0; i < len; i++)
|
||||
@ -239,18 +251,6 @@ bool is_valid_hex_string (const u8 *s, const size_t len)
|
||||
return true;
|
||||
}
|
||||
|
||||
bool is_valid_bf64_char (const u8 c)
|
||||
{
|
||||
if ((c >= '0') && (c <= '9')) return true;
|
||||
if ((c >= 'A') && (c <= 'Z')) return true;
|
||||
if ((c >= 'a') && (c <= 'z')) return true;
|
||||
|
||||
if (c == '.') return true;
|
||||
if (c == '/') return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool is_valid_hex_char (const u8 c)
|
||||
{
|
||||
if ((c >= '0') && (c <= '9')) return true;
|
||||
|
@ -2503,9 +2503,9 @@ static int input_tokenizer (u8 *input_buf, int input_len, token_t *token)
|
||||
if (token->len[token_idx] > token->len_max[token_idx]) return (PARSER_TOKEN_LENGTH);
|
||||
}
|
||||
|
||||
if (token->attr[token_idx] & TOKEN_ATTR_ENCODED_BF64)
|
||||
if (token->attr[token_idx] & TOKEN_ATTR_ENCODED_BASE64)
|
||||
{
|
||||
if (is_valid_bf64_string (token->buf[token_idx], token->len[token_idx]) == false) return (PARSER_TOKEN_ENCODING);
|
||||
if (is_valid_base64_string (token->buf[token_idx], token->len[token_idx]) == false) return (PARSER_TOKEN_ENCODING);
|
||||
}
|
||||
|
||||
if (token->attr[token_idx] & TOKEN_ATTR_ENCODED_HEX)
|
||||
@ -2686,11 +2686,11 @@ int bcrypt_parse_hash (u8 *input_buf, u32 input_len, hash_t *hash_buf, MAYBE_UNU
|
||||
|
||||
token.len[2] = 22;
|
||||
token.attr[2] = TOKEN_ATTR_FIXED_LENGTH
|
||||
| TOKEN_ATTR_ENCODED_BF64;
|
||||
| TOKEN_ATTR_ENCODED_BASE64;
|
||||
|
||||
token.len[3] = 31;
|
||||
token.attr[3] = TOKEN_ATTR_FIXED_LENGTH
|
||||
| TOKEN_ATTR_ENCODED_BF64;
|
||||
| TOKEN_ATTR_ENCODED_BASE64;
|
||||
|
||||
const int rc_tokenizer = input_tokenizer (input_buf, input_len, &token);
|
||||
|
||||
@ -2741,13 +2741,27 @@ int bcrypt_parse_hash (u8 *input_buf, u32 input_len, hash_t *hash_buf, MAYBE_UNU
|
||||
|
||||
int cisco4_parse_hash (u8 *input_buf, u32 input_len, hash_t *hash_buf, MAYBE_UNUSED hashconfig_t *hashconfig)
|
||||
{
|
||||
if ((input_len < DISPLAY_LEN_MIN_5700) || (input_len > DISPLAY_LEN_MAX_5700)) return (PARSER_GLOBAL_LENGTH);
|
||||
|
||||
u32 *digest = (u32 *) hash_buf->digest;
|
||||
|
||||
token_t token;
|
||||
|
||||
token.token_cnt = 1;
|
||||
|
||||
token.len_min[0] = 43;
|
||||
token.len_max[0] = 43;
|
||||
token.attr[0] = TOKEN_ATTR_VERIFY_LENGTH
|
||||
| TOKEN_ATTR_ENCODED_BASE64;
|
||||
|
||||
const int rc_tokenizer = input_tokenizer (input_buf, input_len, &token);
|
||||
|
||||
if (rc_tokenizer != PARSER_OK) return (rc_tokenizer);
|
||||
|
||||
u8 *hash_pos = token.buf[0];
|
||||
int hash_len = token.len[0];
|
||||
|
||||
u8 tmp_buf[100] = { 0 };
|
||||
|
||||
base64_decode (itoa64_to_int, (const u8 *) input_buf, 43, tmp_buf);
|
||||
base64_decode (itoa64_to_int, (const u8 *) hash_pos, hash_len, tmp_buf);
|
||||
|
||||
memcpy (digest, tmp_buf, 32);
|
||||
|
||||
@ -2777,22 +2791,32 @@ int cisco4_parse_hash (u8 *input_buf, u32 input_len, hash_t *hash_buf, MAYBE_UNU
|
||||
|
||||
int lm_parse_hash (u8 *input_buf, u32 input_len, hash_t *hash_buf, MAYBE_UNUSED hashconfig_t *hashconfig)
|
||||
{
|
||||
if ((input_len < DISPLAY_LEN_MIN_3000) || (input_len > DISPLAY_LEN_MAX_3000)) return (PARSER_GLOBAL_LENGTH);
|
||||
|
||||
u32 *digest = (u32 *) hash_buf->digest;
|
||||
|
||||
if (is_valid_hex_string (input_buf, 16) == false) return (PARSER_HASH_ENCODING);
|
||||
token_t token;
|
||||
|
||||
digest[0] = hex_to_u32 ((const u8 *) &input_buf[ 0]);
|
||||
digest[1] = hex_to_u32 ((const u8 *) &input_buf[ 8]);
|
||||
token.token_cnt = 1;
|
||||
|
||||
token.len_min[0] = 16;
|
||||
token.len_max[0] = 16;
|
||||
token.attr[0] = TOKEN_ATTR_VERIFY_LENGTH
|
||||
| TOKEN_ATTR_ENCODED_HEX;
|
||||
|
||||
const int rc_tokenizer = input_tokenizer (input_buf, input_len, &token);
|
||||
|
||||
if (rc_tokenizer != PARSER_OK) return (rc_tokenizer);
|
||||
|
||||
u8 *hash_pos = token.buf[0];
|
||||
|
||||
digest[0] = hex_to_u32 (hash_pos + 0);
|
||||
digest[1] = hex_to_u32 (hash_pos + 8);
|
||||
digest[2] = 0;
|
||||
digest[3] = 0;
|
||||
|
||||
u32 tt;
|
||||
|
||||
IP (digest[0], digest[1], tt);
|
||||
|
||||
digest[2] = 0;
|
||||
digest[3] = 0;
|
||||
|
||||
return (PARSER_OK);
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user