1
mirror of https://github.com/hashcat/hashcat synced 2024-11-24 14:27:14 +01:00

cryptoapi opts (1)

This commit is contained in:
Gabriele Gristina 2021-05-06 23:24:46 +02:00
parent 286002a8d9
commit 17a83a50af
54 changed files with 16465 additions and 23 deletions

View File

@ -15,6 +15,7 @@
/* -------------------------------------------------------------------- */ /* -------------------------------------------------------------------- */
/* */ /* */
/* Cleaned and optimized for GPU use with hashcat by Jens Steube */ /* Cleaned and optimized for GPU use with hashcat by Jens Steube */
/* Added 192-bit functions by Gabriele Gristina */
#include "inc_vendor.h" #include "inc_vendor.h"
#include "inc_types.h" #include "inc_types.h"
@ -690,6 +691,289 @@ DECLSPEC void serpent128_decrypt (const u32 *ks, const u32 *in, u32 *out)
out[3] = d; out[3] = d;
} }
// 192 bit key
DECLSPEC void serpent192_set_key (u32 *ks, const u32 *ukey)
{
ks[ 0] = ukey[0];
ks[ 1] = ukey[1];
ks[ 2] = ukey[2];
ks[ 3] = ukey[3];
ks[ 4] = ukey[4];
ks[ 5] = ukey[5];
ks[ 6] = 1;
ks[ 7] = 0;
ks[ 8] = hc_rotl32_S ((ks[ 7] ^ ks[ 5] ^ ks[ 3] ^ ks[ 0] ^ 0x9e3779b9 ^ 0), 11);
ks[ 9] = hc_rotl32_S ((ks[ 8] ^ ks[ 6] ^ ks[ 4] ^ ks[ 1] ^ 0x9e3779b9 ^ 1), 11);
ks[ 10] = hc_rotl32_S ((ks[ 9] ^ ks[ 7] ^ ks[ 5] ^ ks[ 2] ^ 0x9e3779b9 ^ 2), 11);
ks[ 11] = hc_rotl32_S ((ks[ 10] ^ ks[ 8] ^ ks[ 6] ^ ks[ 3] ^ 0x9e3779b9 ^ 3), 11);
ks[ 12] = hc_rotl32_S ((ks[ 11] ^ ks[ 9] ^ ks[ 7] ^ ks[ 4] ^ 0x9e3779b9 ^ 4), 11);
ks[ 13] = hc_rotl32_S ((ks[ 12] ^ ks[ 10] ^ ks[ 8] ^ ks[ 5] ^ 0x9e3779b9 ^ 5), 11);
ks[ 14] = hc_rotl32_S ((ks[ 13] ^ ks[ 11] ^ ks[ 9] ^ ks[ 6] ^ 0x9e3779b9 ^ 6), 11);
ks[ 15] = hc_rotl32_S ((ks[ 14] ^ ks[ 12] ^ ks[ 10] ^ ks[ 7] ^ 0x9e3779b9 ^ 7), 11);
ks[ 16] = hc_rotl32_S ((ks[ 15] ^ ks[ 13] ^ ks[ 11] ^ ks[ 8] ^ 0x9e3779b9 ^ 8), 11);
ks[ 17] = hc_rotl32_S ((ks[ 16] ^ ks[ 14] ^ ks[ 12] ^ ks[ 9] ^ 0x9e3779b9 ^ 9), 11);
ks[ 18] = hc_rotl32_S ((ks[ 17] ^ ks[ 15] ^ ks[ 13] ^ ks[ 10] ^ 0x9e3779b9 ^ 10), 11);
ks[ 19] = hc_rotl32_S ((ks[ 18] ^ ks[ 16] ^ ks[ 14] ^ ks[ 11] ^ 0x9e3779b9 ^ 11), 11);
ks[ 20] = hc_rotl32_S ((ks[ 19] ^ ks[ 17] ^ ks[ 15] ^ ks[ 12] ^ 0x9e3779b9 ^ 12), 11);
ks[ 21] = hc_rotl32_S ((ks[ 20] ^ ks[ 18] ^ ks[ 16] ^ ks[ 13] ^ 0x9e3779b9 ^ 13), 11);
ks[ 22] = hc_rotl32_S ((ks[ 21] ^ ks[ 19] ^ ks[ 17] ^ ks[ 14] ^ 0x9e3779b9 ^ 14), 11);
ks[ 23] = hc_rotl32_S ((ks[ 22] ^ ks[ 20] ^ ks[ 18] ^ ks[ 15] ^ 0x9e3779b9 ^ 15), 11);
ks[ 24] = hc_rotl32_S ((ks[ 23] ^ ks[ 21] ^ ks[ 19] ^ ks[ 16] ^ 0x9e3779b9 ^ 16), 11);
ks[ 25] = hc_rotl32_S ((ks[ 24] ^ ks[ 22] ^ ks[ 20] ^ ks[ 17] ^ 0x9e3779b9 ^ 17), 11);
ks[ 26] = hc_rotl32_S ((ks[ 25] ^ ks[ 23] ^ ks[ 21] ^ ks[ 18] ^ 0x9e3779b9 ^ 18), 11);
ks[ 27] = hc_rotl32_S ((ks[ 26] ^ ks[ 24] ^ ks[ 22] ^ ks[ 19] ^ 0x9e3779b9 ^ 19), 11);
ks[ 28] = hc_rotl32_S ((ks[ 27] ^ ks[ 25] ^ ks[ 23] ^ ks[ 20] ^ 0x9e3779b9 ^ 20), 11);
ks[ 29] = hc_rotl32_S ((ks[ 28] ^ ks[ 26] ^ ks[ 24] ^ ks[ 21] ^ 0x9e3779b9 ^ 21), 11);
ks[ 30] = hc_rotl32_S ((ks[ 29] ^ ks[ 27] ^ ks[ 25] ^ ks[ 22] ^ 0x9e3779b9 ^ 22), 11);
ks[ 31] = hc_rotl32_S ((ks[ 30] ^ ks[ 28] ^ ks[ 26] ^ ks[ 23] ^ 0x9e3779b9 ^ 23), 11);
ks[ 32] = hc_rotl32_S ((ks[ 31] ^ ks[ 29] ^ ks[ 27] ^ ks[ 24] ^ 0x9e3779b9 ^ 24), 11);
ks[ 33] = hc_rotl32_S ((ks[ 32] ^ ks[ 30] ^ ks[ 28] ^ ks[ 25] ^ 0x9e3779b9 ^ 25), 11);
ks[ 34] = hc_rotl32_S ((ks[ 33] ^ ks[ 31] ^ ks[ 29] ^ ks[ 26] ^ 0x9e3779b9 ^ 26), 11);
ks[ 35] = hc_rotl32_S ((ks[ 34] ^ ks[ 32] ^ ks[ 30] ^ ks[ 27] ^ 0x9e3779b9 ^ 27), 11);
ks[ 36] = hc_rotl32_S ((ks[ 35] ^ ks[ 33] ^ ks[ 31] ^ ks[ 28] ^ 0x9e3779b9 ^ 28), 11);
ks[ 37] = hc_rotl32_S ((ks[ 36] ^ ks[ 34] ^ ks[ 32] ^ ks[ 29] ^ 0x9e3779b9 ^ 29), 11);
ks[ 38] = hc_rotl32_S ((ks[ 37] ^ ks[ 35] ^ ks[ 33] ^ ks[ 30] ^ 0x9e3779b9 ^ 30), 11);
ks[ 39] = hc_rotl32_S ((ks[ 38] ^ ks[ 36] ^ ks[ 34] ^ ks[ 31] ^ 0x9e3779b9 ^ 31), 11);
ks[ 40] = hc_rotl32_S ((ks[ 39] ^ ks[ 37] ^ ks[ 35] ^ ks[ 32] ^ 0x9e3779b9 ^ 32), 11);
ks[ 41] = hc_rotl32_S ((ks[ 40] ^ ks[ 38] ^ ks[ 36] ^ ks[ 33] ^ 0x9e3779b9 ^ 33), 11);
ks[ 42] = hc_rotl32_S ((ks[ 41] ^ ks[ 39] ^ ks[ 37] ^ ks[ 34] ^ 0x9e3779b9 ^ 34), 11);
ks[ 43] = hc_rotl32_S ((ks[ 42] ^ ks[ 40] ^ ks[ 38] ^ ks[ 35] ^ 0x9e3779b9 ^ 35), 11);
ks[ 44] = hc_rotl32_S ((ks[ 43] ^ ks[ 41] ^ ks[ 39] ^ ks[ 36] ^ 0x9e3779b9 ^ 36), 11);
ks[ 45] = hc_rotl32_S ((ks[ 44] ^ ks[ 42] ^ ks[ 40] ^ ks[ 37] ^ 0x9e3779b9 ^ 37), 11);
ks[ 46] = hc_rotl32_S ((ks[ 45] ^ ks[ 43] ^ ks[ 41] ^ ks[ 38] ^ 0x9e3779b9 ^ 38), 11);
ks[ 47] = hc_rotl32_S ((ks[ 46] ^ ks[ 44] ^ ks[ 42] ^ ks[ 39] ^ 0x9e3779b9 ^ 39), 11);
ks[ 48] = hc_rotl32_S ((ks[ 47] ^ ks[ 45] ^ ks[ 43] ^ ks[ 40] ^ 0x9e3779b9 ^ 40), 11);
ks[ 49] = hc_rotl32_S ((ks[ 48] ^ ks[ 46] ^ ks[ 44] ^ ks[ 41] ^ 0x9e3779b9 ^ 41), 11);
ks[ 50] = hc_rotl32_S ((ks[ 49] ^ ks[ 47] ^ ks[ 45] ^ ks[ 42] ^ 0x9e3779b9 ^ 42), 11);
ks[ 51] = hc_rotl32_S ((ks[ 50] ^ ks[ 48] ^ ks[ 46] ^ ks[ 43] ^ 0x9e3779b9 ^ 43), 11);
ks[ 52] = hc_rotl32_S ((ks[ 51] ^ ks[ 49] ^ ks[ 47] ^ ks[ 44] ^ 0x9e3779b9 ^ 44), 11);
ks[ 53] = hc_rotl32_S ((ks[ 52] ^ ks[ 50] ^ ks[ 48] ^ ks[ 45] ^ 0x9e3779b9 ^ 45), 11);
ks[ 54] = hc_rotl32_S ((ks[ 53] ^ ks[ 51] ^ ks[ 49] ^ ks[ 46] ^ 0x9e3779b9 ^ 46), 11);
ks[ 55] = hc_rotl32_S ((ks[ 54] ^ ks[ 52] ^ ks[ 50] ^ ks[ 47] ^ 0x9e3779b9 ^ 47), 11);
ks[ 56] = hc_rotl32_S ((ks[ 55] ^ ks[ 53] ^ ks[ 51] ^ ks[ 48] ^ 0x9e3779b9 ^ 48), 11);
ks[ 57] = hc_rotl32_S ((ks[ 56] ^ ks[ 54] ^ ks[ 52] ^ ks[ 49] ^ 0x9e3779b9 ^ 49), 11);
ks[ 58] = hc_rotl32_S ((ks[ 57] ^ ks[ 55] ^ ks[ 53] ^ ks[ 50] ^ 0x9e3779b9 ^ 50), 11);
ks[ 59] = hc_rotl32_S ((ks[ 58] ^ ks[ 56] ^ ks[ 54] ^ ks[ 51] ^ 0x9e3779b9 ^ 51), 11);
ks[ 60] = hc_rotl32_S ((ks[ 59] ^ ks[ 57] ^ ks[ 55] ^ ks[ 52] ^ 0x9e3779b9 ^ 52), 11);
ks[ 61] = hc_rotl32_S ((ks[ 60] ^ ks[ 58] ^ ks[ 56] ^ ks[ 53] ^ 0x9e3779b9 ^ 53), 11);
ks[ 62] = hc_rotl32_S ((ks[ 61] ^ ks[ 59] ^ ks[ 57] ^ ks[ 54] ^ 0x9e3779b9 ^ 54), 11);
ks[ 63] = hc_rotl32_S ((ks[ 62] ^ ks[ 60] ^ ks[ 58] ^ ks[ 55] ^ 0x9e3779b9 ^ 55), 11);
ks[ 64] = hc_rotl32_S ((ks[ 63] ^ ks[ 61] ^ ks[ 59] ^ ks[ 56] ^ 0x9e3779b9 ^ 56), 11);
ks[ 65] = hc_rotl32_S ((ks[ 64] ^ ks[ 62] ^ ks[ 60] ^ ks[ 57] ^ 0x9e3779b9 ^ 57), 11);
ks[ 66] = hc_rotl32_S ((ks[ 65] ^ ks[ 63] ^ ks[ 61] ^ ks[ 58] ^ 0x9e3779b9 ^ 58), 11);
ks[ 67] = hc_rotl32_S ((ks[ 66] ^ ks[ 64] ^ ks[ 62] ^ ks[ 59] ^ 0x9e3779b9 ^ 59), 11);
ks[ 68] = hc_rotl32_S ((ks[ 67] ^ ks[ 65] ^ ks[ 63] ^ ks[ 60] ^ 0x9e3779b9 ^ 60), 11);
ks[ 69] = hc_rotl32_S ((ks[ 68] ^ ks[ 66] ^ ks[ 64] ^ ks[ 61] ^ 0x9e3779b9 ^ 61), 11);
ks[ 70] = hc_rotl32_S ((ks[ 69] ^ ks[ 67] ^ ks[ 65] ^ ks[ 62] ^ 0x9e3779b9 ^ 62), 11);
ks[ 71] = hc_rotl32_S ((ks[ 70] ^ ks[ 68] ^ ks[ 66] ^ ks[ 63] ^ 0x9e3779b9 ^ 63), 11);
ks[ 72] = hc_rotl32_S ((ks[ 71] ^ ks[ 69] ^ ks[ 67] ^ ks[ 64] ^ 0x9e3779b9 ^ 64), 11);
ks[ 73] = hc_rotl32_S ((ks[ 72] ^ ks[ 70] ^ ks[ 68] ^ ks[ 65] ^ 0x9e3779b9 ^ 65), 11);
ks[ 74] = hc_rotl32_S ((ks[ 73] ^ ks[ 71] ^ ks[ 69] ^ ks[ 66] ^ 0x9e3779b9 ^ 66), 11);
ks[ 75] = hc_rotl32_S ((ks[ 74] ^ ks[ 72] ^ ks[ 70] ^ ks[ 67] ^ 0x9e3779b9 ^ 67), 11);
ks[ 76] = hc_rotl32_S ((ks[ 75] ^ ks[ 73] ^ ks[ 71] ^ ks[ 68] ^ 0x9e3779b9 ^ 68), 11);
ks[ 77] = hc_rotl32_S ((ks[ 76] ^ ks[ 74] ^ ks[ 72] ^ ks[ 69] ^ 0x9e3779b9 ^ 69), 11);
ks[ 78] = hc_rotl32_S ((ks[ 77] ^ ks[ 75] ^ ks[ 73] ^ ks[ 70] ^ 0x9e3779b9 ^ 70), 11);
ks[ 79] = hc_rotl32_S ((ks[ 78] ^ ks[ 76] ^ ks[ 74] ^ ks[ 71] ^ 0x9e3779b9 ^ 71), 11);
ks[ 80] = hc_rotl32_S ((ks[ 79] ^ ks[ 77] ^ ks[ 75] ^ ks[ 72] ^ 0x9e3779b9 ^ 72), 11);
ks[ 81] = hc_rotl32_S ((ks[ 80] ^ ks[ 78] ^ ks[ 76] ^ ks[ 73] ^ 0x9e3779b9 ^ 73), 11);
ks[ 82] = hc_rotl32_S ((ks[ 81] ^ ks[ 79] ^ ks[ 77] ^ ks[ 74] ^ 0x9e3779b9 ^ 74), 11);
ks[ 83] = hc_rotl32_S ((ks[ 82] ^ ks[ 80] ^ ks[ 78] ^ ks[ 75] ^ 0x9e3779b9 ^ 75), 11);
ks[ 84] = hc_rotl32_S ((ks[ 83] ^ ks[ 81] ^ ks[ 79] ^ ks[ 76] ^ 0x9e3779b9 ^ 76), 11);
ks[ 85] = hc_rotl32_S ((ks[ 84] ^ ks[ 82] ^ ks[ 80] ^ ks[ 77] ^ 0x9e3779b9 ^ 77), 11);
ks[ 86] = hc_rotl32_S ((ks[ 85] ^ ks[ 83] ^ ks[ 81] ^ ks[ 78] ^ 0x9e3779b9 ^ 78), 11);
ks[ 87] = hc_rotl32_S ((ks[ 86] ^ ks[ 84] ^ ks[ 82] ^ ks[ 79] ^ 0x9e3779b9 ^ 79), 11);
ks[ 88] = hc_rotl32_S ((ks[ 87] ^ ks[ 85] ^ ks[ 83] ^ ks[ 80] ^ 0x9e3779b9 ^ 80), 11);
ks[ 89] = hc_rotl32_S ((ks[ 88] ^ ks[ 86] ^ ks[ 84] ^ ks[ 81] ^ 0x9e3779b9 ^ 81), 11);
ks[ 90] = hc_rotl32_S ((ks[ 89] ^ ks[ 87] ^ ks[ 85] ^ ks[ 82] ^ 0x9e3779b9 ^ 82), 11);
ks[ 91] = hc_rotl32_S ((ks[ 90] ^ ks[ 88] ^ ks[ 86] ^ ks[ 83] ^ 0x9e3779b9 ^ 83), 11);
ks[ 92] = hc_rotl32_S ((ks[ 91] ^ ks[ 89] ^ ks[ 87] ^ ks[ 84] ^ 0x9e3779b9 ^ 84), 11);
ks[ 93] = hc_rotl32_S ((ks[ 92] ^ ks[ 90] ^ ks[ 88] ^ ks[ 85] ^ 0x9e3779b9 ^ 85), 11);
ks[ 94] = hc_rotl32_S ((ks[ 93] ^ ks[ 91] ^ ks[ 89] ^ ks[ 86] ^ 0x9e3779b9 ^ 86), 11);
ks[ 95] = hc_rotl32_S ((ks[ 94] ^ ks[ 92] ^ ks[ 90] ^ ks[ 87] ^ 0x9e3779b9 ^ 87), 11);
ks[ 96] = hc_rotl32_S ((ks[ 95] ^ ks[ 93] ^ ks[ 91] ^ ks[ 88] ^ 0x9e3779b9 ^ 88), 11);
ks[ 97] = hc_rotl32_S ((ks[ 96] ^ ks[ 94] ^ ks[ 92] ^ ks[ 89] ^ 0x9e3779b9 ^ 89), 11);
ks[ 98] = hc_rotl32_S ((ks[ 97] ^ ks[ 95] ^ ks[ 93] ^ ks[ 90] ^ 0x9e3779b9 ^ 90), 11);
ks[ 99] = hc_rotl32_S ((ks[ 98] ^ ks[ 96] ^ ks[ 94] ^ ks[ 91] ^ 0x9e3779b9 ^ 91), 11);
ks[100] = hc_rotl32_S ((ks[ 99] ^ ks[ 97] ^ ks[ 95] ^ ks[ 92] ^ 0x9e3779b9 ^ 92), 11);
ks[101] = hc_rotl32_S ((ks[100] ^ ks[ 98] ^ ks[ 96] ^ ks[ 93] ^ 0x9e3779b9 ^ 93), 11);
ks[102] = hc_rotl32_S ((ks[101] ^ ks[ 99] ^ ks[ 97] ^ ks[ 94] ^ 0x9e3779b9 ^ 94), 11);
ks[103] = hc_rotl32_S ((ks[102] ^ ks[100] ^ ks[ 98] ^ ks[ 95] ^ 0x9e3779b9 ^ 95), 11);
ks[104] = hc_rotl32_S ((ks[103] ^ ks[101] ^ ks[ 99] ^ ks[ 96] ^ 0x9e3779b9 ^ 96), 11);
ks[105] = hc_rotl32_S ((ks[104] ^ ks[102] ^ ks[100] ^ ks[ 97] ^ 0x9e3779b9 ^ 97), 11);
ks[106] = hc_rotl32_S ((ks[105] ^ ks[103] ^ ks[101] ^ ks[ 98] ^ 0x9e3779b9 ^ 98), 11);
ks[107] = hc_rotl32_S ((ks[106] ^ ks[104] ^ ks[102] ^ ks[ 99] ^ 0x9e3779b9 ^ 99), 11);
ks[108] = hc_rotl32_S ((ks[107] ^ ks[105] ^ ks[103] ^ ks[100] ^ 0x9e3779b9 ^ 100), 11);
ks[109] = hc_rotl32_S ((ks[108] ^ ks[106] ^ ks[104] ^ ks[101] ^ 0x9e3779b9 ^ 101), 11);
ks[110] = hc_rotl32_S ((ks[109] ^ ks[107] ^ ks[105] ^ ks[102] ^ 0x9e3779b9 ^ 102), 11);
ks[111] = hc_rotl32_S ((ks[110] ^ ks[108] ^ ks[106] ^ ks[103] ^ 0x9e3779b9 ^ 103), 11);
ks[112] = hc_rotl32_S ((ks[111] ^ ks[109] ^ ks[107] ^ ks[104] ^ 0x9e3779b9 ^ 104), 11);
ks[113] = hc_rotl32_S ((ks[112] ^ ks[110] ^ ks[108] ^ ks[105] ^ 0x9e3779b9 ^ 105), 11);
ks[114] = hc_rotl32_S ((ks[113] ^ ks[111] ^ ks[109] ^ ks[106] ^ 0x9e3779b9 ^ 106), 11);
ks[115] = hc_rotl32_S ((ks[114] ^ ks[112] ^ ks[110] ^ ks[107] ^ 0x9e3779b9 ^ 107), 11);
ks[116] = hc_rotl32_S ((ks[115] ^ ks[113] ^ ks[111] ^ ks[108] ^ 0x9e3779b9 ^ 108), 11);
ks[117] = hc_rotl32_S ((ks[116] ^ ks[114] ^ ks[112] ^ ks[109] ^ 0x9e3779b9 ^ 109), 11);
ks[118] = hc_rotl32_S ((ks[117] ^ ks[115] ^ ks[113] ^ ks[110] ^ 0x9e3779b9 ^ 110), 11);
ks[119] = hc_rotl32_S ((ks[118] ^ ks[116] ^ ks[114] ^ ks[111] ^ 0x9e3779b9 ^ 111), 11);
ks[120] = hc_rotl32_S ((ks[119] ^ ks[117] ^ ks[115] ^ ks[112] ^ 0x9e3779b9 ^ 112), 11);
ks[121] = hc_rotl32_S ((ks[120] ^ ks[118] ^ ks[116] ^ ks[113] ^ 0x9e3779b9 ^ 113), 11);
ks[122] = hc_rotl32_S ((ks[121] ^ ks[119] ^ ks[117] ^ ks[114] ^ 0x9e3779b9 ^ 114), 11);
ks[123] = hc_rotl32_S ((ks[122] ^ ks[120] ^ ks[118] ^ ks[115] ^ 0x9e3779b9 ^ 115), 11);
ks[124] = hc_rotl32_S ((ks[123] ^ ks[121] ^ ks[119] ^ ks[116] ^ 0x9e3779b9 ^ 116), 11);
ks[125] = hc_rotl32_S ((ks[124] ^ ks[122] ^ ks[120] ^ ks[117] ^ 0x9e3779b9 ^ 117), 11);
ks[126] = hc_rotl32_S ((ks[125] ^ ks[123] ^ ks[121] ^ ks[118] ^ 0x9e3779b9 ^ 118), 11);
ks[127] = hc_rotl32_S ((ks[126] ^ ks[124] ^ ks[122] ^ ks[119] ^ 0x9e3779b9 ^ 119), 11);
ks[128] = hc_rotl32_S ((ks[127] ^ ks[125] ^ ks[123] ^ ks[120] ^ 0x9e3779b9 ^ 120), 11);
ks[129] = hc_rotl32_S ((ks[128] ^ ks[126] ^ ks[124] ^ ks[121] ^ 0x9e3779b9 ^ 121), 11);
ks[130] = hc_rotl32_S ((ks[129] ^ ks[127] ^ ks[125] ^ ks[122] ^ 0x9e3779b9 ^ 122), 11);
ks[131] = hc_rotl32_S ((ks[130] ^ ks[128] ^ ks[126] ^ ks[123] ^ 0x9e3779b9 ^ 123), 11);
ks[132] = hc_rotl32_S ((ks[131] ^ ks[129] ^ ks[127] ^ ks[124] ^ 0x9e3779b9 ^ 124), 11);
ks[133] = hc_rotl32_S ((ks[132] ^ ks[130] ^ ks[128] ^ ks[125] ^ 0x9e3779b9 ^ 125), 11);
ks[134] = hc_rotl32_S ((ks[133] ^ ks[131] ^ ks[129] ^ ks[126] ^ 0x9e3779b9 ^ 126), 11);
ks[135] = hc_rotl32_S ((ks[134] ^ ks[132] ^ ks[130] ^ ks[127] ^ 0x9e3779b9 ^ 127), 11);
ks[136] = hc_rotl32_S ((ks[135] ^ ks[133] ^ ks[131] ^ ks[128] ^ 0x9e3779b9 ^ 128), 11);
ks[137] = hc_rotl32_S ((ks[136] ^ ks[134] ^ ks[132] ^ ks[129] ^ 0x9e3779b9 ^ 129), 11);
ks[138] = hc_rotl32_S ((ks[137] ^ ks[135] ^ ks[133] ^ ks[130] ^ 0x9e3779b9 ^ 130), 11);
ks[139] = hc_rotl32_S ((ks[138] ^ ks[136] ^ ks[134] ^ ks[131] ^ 0x9e3779b9 ^ 131), 11);
u32 a,b,c,d,e,f,g,h;
u32 t1,t2,t3,t4,t5,t6,t7,t8,t9,t10,t11,t12,t13,t14,t15,t16;
k_set( 0,a,b,c,d); sb3(a,b,c,d,e,f,g,h); k_get( 0,e,f,g,h);
k_set( 1,a,b,c,d); sb2(a,b,c,d,e,f,g,h); k_get( 1,e,f,g,h);
k_set( 2,a,b,c,d); sb1(a,b,c,d,e,f,g,h); k_get( 2,e,f,g,h);
k_set( 3,a,b,c,d); sb0(a,b,c,d,e,f,g,h); k_get( 3,e,f,g,h);
k_set( 4,a,b,c,d); sb7(a,b,c,d,e,f,g,h); k_get( 4,e,f,g,h);
k_set( 5,a,b,c,d); sb6(a,b,c,d,e,f,g,h); k_get( 5,e,f,g,h);
k_set( 6,a,b,c,d); sb5(a,b,c,d,e,f,g,h); k_get( 6,e,f,g,h);
k_set( 7,a,b,c,d); sb4(a,b,c,d,e,f,g,h); k_get( 7,e,f,g,h);
k_set( 8,a,b,c,d); sb3(a,b,c,d,e,f,g,h); k_get( 8,e,f,g,h);
k_set( 9,a,b,c,d); sb2(a,b,c,d,e,f,g,h); k_get( 9,e,f,g,h);
k_set(10,a,b,c,d); sb1(a,b,c,d,e,f,g,h); k_get(10,e,f,g,h);
k_set(11,a,b,c,d); sb0(a,b,c,d,e,f,g,h); k_get(11,e,f,g,h);
k_set(12,a,b,c,d); sb7(a,b,c,d,e,f,g,h); k_get(12,e,f,g,h);
k_set(13,a,b,c,d); sb6(a,b,c,d,e,f,g,h); k_get(13,e,f,g,h);
k_set(14,a,b,c,d); sb5(a,b,c,d,e,f,g,h); k_get(14,e,f,g,h);
k_set(15,a,b,c,d); sb4(a,b,c,d,e,f,g,h); k_get(15,e,f,g,h);
k_set(16,a,b,c,d); sb3(a,b,c,d,e,f,g,h); k_get(16,e,f,g,h);
k_set(17,a,b,c,d); sb2(a,b,c,d,e,f,g,h); k_get(17,e,f,g,h);
k_set(18,a,b,c,d); sb1(a,b,c,d,e,f,g,h); k_get(18,e,f,g,h);
k_set(19,a,b,c,d); sb0(a,b,c,d,e,f,g,h); k_get(19,e,f,g,h);
k_set(20,a,b,c,d); sb7(a,b,c,d,e,f,g,h); k_get(20,e,f,g,h);
k_set(21,a,b,c,d); sb6(a,b,c,d,e,f,g,h); k_get(21,e,f,g,h);
k_set(22,a,b,c,d); sb5(a,b,c,d,e,f,g,h); k_get(22,e,f,g,h);
k_set(23,a,b,c,d); sb4(a,b,c,d,e,f,g,h); k_get(23,e,f,g,h);
k_set(24,a,b,c,d); sb3(a,b,c,d,e,f,g,h); k_get(24,e,f,g,h);
k_set(25,a,b,c,d); sb2(a,b,c,d,e,f,g,h); k_get(25,e,f,g,h);
k_set(26,a,b,c,d); sb1(a,b,c,d,e,f,g,h); k_get(26,e,f,g,h);
k_set(27,a,b,c,d); sb0(a,b,c,d,e,f,g,h); k_get(27,e,f,g,h);
k_set(28,a,b,c,d); sb7(a,b,c,d,e,f,g,h); k_get(28,e,f,g,h);
k_set(29,a,b,c,d); sb6(a,b,c,d,e,f,g,h); k_get(29,e,f,g,h);
k_set(30,a,b,c,d); sb5(a,b,c,d,e,f,g,h); k_get(30,e,f,g,h);
k_set(31,a,b,c,d); sb4(a,b,c,d,e,f,g,h); k_get(31,e,f,g,h);
k_set(32,a,b,c,d); sb3(a,b,c,d,e,f,g,h); k_get(32,e,f,g,h);
}
DECLSPEC void serpent192_encrypt (const u32 *ks, const u32 *in, u32 *out)
{
u32 a,b,c,d,e,f,g,h;
u32 t1,t2,t3,t4,t5,t6,t7,t8,t9,t10,t11,t12,t13,t14,t15,t16;
a = in[0];
b = in[1];
c = in[2];
d = in[3];
k_xor( 0,a,b,c,d); sb0(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor( 1,e,f,g,h); sb1(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor( 2,a,b,c,d); sb2(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor( 3,e,f,g,h); sb3(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor( 4,a,b,c,d); sb4(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor( 5,e,f,g,h); sb5(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor( 6,a,b,c,d); sb6(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor( 7,e,f,g,h); sb7(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor( 8,a,b,c,d); sb0(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor( 9,e,f,g,h); sb1(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor(10,a,b,c,d); sb2(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor(11,e,f,g,h); sb3(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor(12,a,b,c,d); sb4(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor(13,e,f,g,h); sb5(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor(14,a,b,c,d); sb6(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor(15,e,f,g,h); sb7(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor(16,a,b,c,d); sb0(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor(17,e,f,g,h); sb1(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor(18,a,b,c,d); sb2(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor(19,e,f,g,h); sb3(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor(20,a,b,c,d); sb4(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor(21,e,f,g,h); sb5(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor(22,a,b,c,d); sb6(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor(23,e,f,g,h); sb7(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor(24,a,b,c,d); sb0(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor(25,e,f,g,h); sb1(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor(26,a,b,c,d); sb2(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor(27,e,f,g,h); sb3(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor(28,a,b,c,d); sb4(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor(29,e,f,g,h); sb5(e,f,g,h,a,b,c,d); rot(a,b,c,d);
k_xor(30,a,b,c,d); sb6(a,b,c,d,e,f,g,h); rot(e,f,g,h);
k_xor(31,e,f,g,h); sb7(e,f,g,h,a,b,c,d);
k_xor(32,a,b,c,d);
out[0] = a;
out[1] = b;
out[2] = c;
out[3] = d;
}
DECLSPEC void serpent192_decrypt (const u32 *ks, const u32 *in, u32 *out)
{
u32 a,b,c,d,e,f,g,h;
u32 t1,t2,t3,t4,t5,t6,t7,t8,t9,t10,t11,t12,t13,t14,t15,t16;
a = in[0];
b = in[1];
c = in[2];
d = in[3];
k_xor(32,a,b,c,d);
ib7(a,b,c,d,e,f,g,h); k_xor(31,e,f,g,h);
irot(e,f,g,h); ib6(e,f,g,h,a,b,c,d); k_xor(30,a,b,c,d);
irot(a,b,c,d); ib5(a,b,c,d,e,f,g,h); k_xor(29,e,f,g,h);
irot(e,f,g,h); ib4(e,f,g,h,a,b,c,d); k_xor(28,a,b,c,d);
irot(a,b,c,d); ib3(a,b,c,d,e,f,g,h); k_xor(27,e,f,g,h);
irot(e,f,g,h); ib2(e,f,g,h,a,b,c,d); k_xor(26,a,b,c,d);
irot(a,b,c,d); ib1(a,b,c,d,e,f,g,h); k_xor(25,e,f,g,h);
irot(e,f,g,h); ib0(e,f,g,h,a,b,c,d); k_xor(24,a,b,c,d);
irot(a,b,c,d); ib7(a,b,c,d,e,f,g,h); k_xor(23,e,f,g,h);
irot(e,f,g,h); ib6(e,f,g,h,a,b,c,d); k_xor(22,a,b,c,d);
irot(a,b,c,d); ib5(a,b,c,d,e,f,g,h); k_xor(21,e,f,g,h);
irot(e,f,g,h); ib4(e,f,g,h,a,b,c,d); k_xor(20,a,b,c,d);
irot(a,b,c,d); ib3(a,b,c,d,e,f,g,h); k_xor(19,e,f,g,h);
irot(e,f,g,h); ib2(e,f,g,h,a,b,c,d); k_xor(18,a,b,c,d);
irot(a,b,c,d); ib1(a,b,c,d,e,f,g,h); k_xor(17,e,f,g,h);
irot(e,f,g,h); ib0(e,f,g,h,a,b,c,d); k_xor(16,a,b,c,d);
irot(a,b,c,d); ib7(a,b,c,d,e,f,g,h); k_xor(15,e,f,g,h);
irot(e,f,g,h); ib6(e,f,g,h,a,b,c,d); k_xor(14,a,b,c,d);
irot(a,b,c,d); ib5(a,b,c,d,e,f,g,h); k_xor(13,e,f,g,h);
irot(e,f,g,h); ib4(e,f,g,h,a,b,c,d); k_xor(12,a,b,c,d);
irot(a,b,c,d); ib3(a,b,c,d,e,f,g,h); k_xor(11,e,f,g,h);
irot(e,f,g,h); ib2(e,f,g,h,a,b,c,d); k_xor(10,a,b,c,d);
irot(a,b,c,d); ib1(a,b,c,d,e,f,g,h); k_xor( 9,e,f,g,h);
irot(e,f,g,h); ib0(e,f,g,h,a,b,c,d); k_xor( 8,a,b,c,d);
irot(a,b,c,d); ib7(a,b,c,d,e,f,g,h); k_xor( 7,e,f,g,h);
irot(e,f,g,h); ib6(e,f,g,h,a,b,c,d); k_xor( 6,a,b,c,d);
irot(a,b,c,d); ib5(a,b,c,d,e,f,g,h); k_xor( 5,e,f,g,h);
irot(e,f,g,h); ib4(e,f,g,h,a,b,c,d); k_xor( 4,a,b,c,d);
irot(a,b,c,d); ib3(a,b,c,d,e,f,g,h); k_xor( 3,e,f,g,h);
irot(e,f,g,h); ib2(e,f,g,h,a,b,c,d); k_xor( 2,a,b,c,d);
irot(a,b,c,d); ib1(a,b,c,d,e,f,g,h); k_xor( 1,e,f,g,h);
irot(e,f,g,h); ib0(e,f,g,h,a,b,c,d); k_xor( 0,a,b,c,d);
out[0] = a;
out[1] = b;
out[2] = c;
out[3] = d;
}
// 256 bit key // 256 bit key
DECLSPEC void serpent256_set_key (u32 *ks, const u32 *ukey) DECLSPEC void serpent256_set_key (u32 *ks, const u32 *ukey)

View File

@ -9,6 +9,9 @@
DECLSPEC void serpent128_set_key (u32 *ks, const u32 *ukey); DECLSPEC void serpent128_set_key (u32 *ks, const u32 *ukey);
DECLSPEC void serpent128_encrypt (const u32 *ks, const u32 *in, u32 *out); DECLSPEC void serpent128_encrypt (const u32 *ks, const u32 *in, u32 *out);
DECLSPEC void serpent128_decrypt (const u32 *ks, const u32 *in, u32 *out); DECLSPEC void serpent128_decrypt (const u32 *ks, const u32 *in, u32 *out);
DECLSPEC void serpent192_set_key (u32 *ks, const u32 *ukey);
DECLSPEC void serpent192_encrypt (const u32 *ks, const u32 *in, u32 *out);
DECLSPEC void serpent192_decrypt (const u32 *ks, const u32 *in, u32 *out);
DECLSPEC void serpent256_set_key (u32 *ks, const u32 *ukey); DECLSPEC void serpent256_set_key (u32 *ks, const u32 *ukey);
DECLSPEC void serpent256_encrypt (const u32 *ks, const u32 *in, u32 *out); DECLSPEC void serpent256_encrypt (const u32 *ks, const u32 *in, u32 *out);
DECLSPEC void serpent256_decrypt (const u32 *ks, const u32 *in, u32 *out); DECLSPEC void serpent256_decrypt (const u32 *ks, const u32 *in, u32 *out);

View File

@ -18,6 +18,7 @@
/* -------------------------------------------------------------------- */ /* -------------------------------------------------------------------- */
/* */ /* */
/* Cleaned and optimized for GPU use with hashcat by Jens Steube */ /* Cleaned and optimized for GPU use with hashcat by Jens Steube */
/* Added 192-bit functions by Gabriele Gristina */
#include "inc_vendor.h" #include "inc_vendor.h"
#include "inc_types.h" #include "inc_types.h"
@ -79,7 +80,8 @@ CONSTANT_VK u32a q_tab[2][256] =
CONSTANT_VK u32a m_tab[4][256] = CONSTANT_VK u32a m_tab[4][256] =
{ {
{ 0xBCBC3275, 0xECEC21F3, 0x202043C6, 0xB3B3C9F4, 0xDADA03DB, 0x02028B7B, {
0xBCBC3275, 0xECEC21F3, 0x202043C6, 0xB3B3C9F4, 0xDADA03DB, 0x02028B7B,
0xE2E22BFB, 0x9E9EFAC8, 0xC9C9EC4A, 0xD4D409D3, 0x18186BE6, 0x1E1E9F6B, 0xE2E22BFB, 0x9E9EFAC8, 0xC9C9EC4A, 0xD4D409D3, 0x18186BE6, 0x1E1E9F6B,
0x98980E45, 0xB2B2387D, 0xA6A6D2E8, 0x2626B74B, 0x3C3C57D6, 0x93938A32, 0x98980E45, 0xB2B2387D, 0xA6A6D2E8, 0x2626B74B, 0x3C3C57D6, 0x93938A32,
0x8282EED8, 0x525298FD, 0x7B7BD437, 0xBBBB3771, 0x5B5B97F1, 0x474783E1, 0x8282EED8, 0x525298FD, 0x7B7BD437, 0xBBBB3771, 0x5B5B97F1, 0x474783E1,
@ -121,9 +123,10 @@ CONSTANT_VK u32a m_tab[4][256] =
0xE6E6540D, 0xDBDBF252, 0x92927BBB, 0xB7B7B602, 0x6969CA2F, 0x3939D9A9, 0xE6E6540D, 0xDBDBF252, 0x92927BBB, 0xB7B7B602, 0x6969CA2F, 0x3939D9A9,
0xD3D30CD7, 0xA7A72361, 0xA2A2AD1E, 0xC3C399B4, 0x6C6C4450, 0x07070504, 0xD3D30CD7, 0xA7A72361, 0xA2A2AD1E, 0xC3C399B4, 0x6C6C4450, 0x07070504,
0x04047FF6, 0x272746C2, 0xACACA716, 0xD0D07625, 0x50501386, 0xDCDCF756, 0x04047FF6, 0x272746C2, 0xACACA716, 0xD0D07625, 0x50501386, 0xDCDCF756,
0x84841A55, 0xE1E15109, 0x7A7A25BE, 0x1313EF91 }, 0x84841A55, 0xE1E15109, 0x7A7A25BE, 0x1313EF91
},
{ 0xA9D93939, 0x67901717, 0xB3719C9C, 0xE8D2A6A6, 0x04050707, 0xFD985252, {
0xA9D93939, 0x67901717, 0xB3719C9C, 0xE8D2A6A6, 0x04050707, 0xFD985252,
0xA3658080, 0x76DFE4E4, 0x9A084545, 0x92024B4B, 0x80A0E0E0, 0x78665A5A, 0xA3658080, 0x76DFE4E4, 0x9A084545, 0x92024B4B, 0x80A0E0E0, 0x78665A5A,
0xE4DDAFAF, 0xDDB06A6A, 0xD1BF6363, 0x38362A2A, 0x0D54E6E6, 0xC6432020, 0xE4DDAFAF, 0xDDB06A6A, 0xD1BF6363, 0x38362A2A, 0x0D54E6E6, 0xC6432020,
0x3562CCCC, 0x98BEF2F2, 0x181E1212, 0xF724EBEB, 0xECD7A1A1, 0x6C774141, 0x3562CCCC, 0x98BEF2F2, 0x181E1212, 0xF724EBEB, 0xECD7A1A1, 0x6C774141,
@ -165,9 +168,10 @@ CONSTANT_VK u32a m_tab[4][256] =
0x2BCF6E6E, 0x40507070, 0xDCEB8585, 0xFE750A0A, 0x328A9393, 0xA48DDFDF, 0x2BCF6E6E, 0x40507070, 0xDCEB8585, 0xFE750A0A, 0x328A9393, 0xA48DDFDF,
0xCA4C2929, 0x10141C1C, 0x2173D7D7, 0xF0CCB4B4, 0xD309D4D4, 0x5D108A8A, 0xCA4C2929, 0x10141C1C, 0x2173D7D7, 0xF0CCB4B4, 0xD309D4D4, 0x5D108A8A,
0x0FE25151, 0x00000000, 0x6F9A1919, 0x9DE01A1A, 0x368F9494, 0x42E6C7C7, 0x0FE25151, 0x00000000, 0x6F9A1919, 0x9DE01A1A, 0x368F9494, 0x42E6C7C7,
0x4AECC9C9, 0x5EFDD2D2, 0xC1AB7F7F, 0xE0D8A8A8 }, 0x4AECC9C9, 0x5EFDD2D2, 0xC1AB7F7F, 0xE0D8A8A8
},
{ 0xBC75BC32, 0xECF3EC21, 0x20C62043, 0xB3F4B3C9, 0xDADBDA03, 0x027B028B, {
0xBC75BC32, 0xECF3EC21, 0x20C62043, 0xB3F4B3C9, 0xDADBDA03, 0x027B028B,
0xE2FBE22B, 0x9EC89EFA, 0xC94AC9EC, 0xD4D3D409, 0x18E6186B, 0x1E6B1E9F, 0xE2FBE22B, 0x9EC89EFA, 0xC94AC9EC, 0xD4D3D409, 0x18E6186B, 0x1E6B1E9F,
0x9845980E, 0xB27DB238, 0xA6E8A6D2, 0x264B26B7, 0x3CD63C57, 0x9332938A, 0x9845980E, 0xB27DB238, 0xA6E8A6D2, 0x264B26B7, 0x3CD63C57, 0x9332938A,
0x82D882EE, 0x52FD5298, 0x7B377BD4, 0xBB71BB37, 0x5BF15B97, 0x47E14783, 0x82D882EE, 0x52FD5298, 0x7B377BD4, 0xBB71BB37, 0x5BF15B97, 0x47E14783,
@ -209,9 +213,10 @@ CONSTANT_VK u32a m_tab[4][256] =
0xE60DE654, 0xDB52DBF2, 0x92BB927B, 0xB702B7B6, 0x692F69CA, 0x39A939D9, 0xE60DE654, 0xDB52DBF2, 0x92BB927B, 0xB702B7B6, 0x692F69CA, 0x39A939D9,
0xD3D7D30C, 0xA761A723, 0xA21EA2AD, 0xC3B4C399, 0x6C506C44, 0x07040705, 0xD3D7D30C, 0xA761A723, 0xA21EA2AD, 0xC3B4C399, 0x6C506C44, 0x07040705,
0x04F6047F, 0x27C22746, 0xAC16ACA7, 0xD025D076, 0x50865013, 0xDC56DCF7, 0x04F6047F, 0x27C22746, 0xAC16ACA7, 0xD025D076, 0x50865013, 0xDC56DCF7,
0x8455841A, 0xE109E151, 0x7ABE7A25, 0x139113EF }, 0x8455841A, 0xE109E151, 0x7ABE7A25, 0x139113EF
},
{ 0xD939A9D9, 0x90176790, 0x719CB371, 0xD2A6E8D2, 0x05070405, 0x9852FD98, {
0xD939A9D9, 0x90176790, 0x719CB371, 0xD2A6E8D2, 0x05070405, 0x9852FD98,
0x6580A365, 0xDFE476DF, 0x08459A08, 0x024B9202, 0xA0E080A0, 0x665A7866, 0x6580A365, 0xDFE476DF, 0x08459A08, 0x024B9202, 0xA0E080A0, 0x665A7866,
0xDDAFE4DD, 0xB06ADDB0, 0xBF63D1BF, 0x362A3836, 0x54E60D54, 0x4320C643, 0xDDAFE4DD, 0xB06ADDB0, 0xBF63D1BF, 0x362A3836, 0x54E60D54, 0x4320C643,
0x62CC3562, 0xBEF298BE, 0x1E12181E, 0x24EBF724, 0xD7A1ECD7, 0x77416C77, 0x62CC3562, 0xBEF298BE, 0x1E12181E, 0x24EBF724, 0xD7A1ECD7, 0x77416C77,
@ -253,7 +258,8 @@ CONSTANT_VK u32a m_tab[4][256] =
0xCF6E2BCF, 0x50704050, 0xEB85DCEB, 0x750AFE75, 0x8A93328A, 0x8DDFA48D, 0xCF6E2BCF, 0x50704050, 0xEB85DCEB, 0x750AFE75, 0x8A93328A, 0x8DDFA48D,
0x4C29CA4C, 0x141C1014, 0x73D72173, 0xCCB4F0CC, 0x09D4D309, 0x108A5D10, 0x4C29CA4C, 0x141C1014, 0x73D72173, 0xCCB4F0CC, 0x09D4D309, 0x108A5D10,
0xE2510FE2, 0x00000000, 0x9A196F9A, 0xE01A9DE0, 0x8F94368F, 0xE6C742E6, 0xE2510FE2, 0x00000000, 0x9A196F9A, 0xE01A9DE0, 0x8F94368F, 0xE6C742E6,
0xECC94AEC, 0xFDD25EFD, 0xAB7FC1AB, 0xD8A8E0D8 } 0xECC94AEC, 0xFDD25EFD, 0xAB7FC1AB, 0xD8A8E0D8
}
}; };
#define g1_fun128(x) \ #define g1_fun128(x) \
@ -268,6 +274,31 @@ CONSTANT_VK u32a m_tab[4][256] =
mds (2, q22 (unpack_v8c_from_v32_S (x), sk)) ^ \ mds (2, q22 (unpack_v8c_from_v32_S (x), sk)) ^ \
mds (3, q23 (unpack_v8d_from_v32_S (x), sk))) mds (3, q23 (unpack_v8d_from_v32_S (x), sk)))
#define g1_fun192(x) \
(mds (0, q30 (unpack_v8d_from_v32_S (x), sk)) ^ \
mds (1, q31 (unpack_v8a_from_v32_S (x), sk)) ^ \
mds (2, q32 (unpack_v8b_from_v32_S (x), sk)) ^ \
mds (3, q33 (unpack_v8c_from_v32_S (x), sk)))
#define g0_fun192(x) \
(mds (0, q30 (unpack_v8a_from_v32_S (x), sk)) ^ \
mds (1, q31 (unpack_v8b_from_v32_S (x), sk)) ^ \
mds (2, q32 (unpack_v8c_from_v32_S (x), sk)) ^ \
mds (3, q33 (unpack_v8d_from_v32_S (x), sk)))
#define g1_fun256(x) \
(mds (0, q40 (unpack_v8d_from_v32_S (x), sk)) ^ \
mds (1, q41 (unpack_v8a_from_v32_S (x), sk)) ^ \
mds (2, q42 (unpack_v8b_from_v32_S (x), sk)) ^ \
mds (3, q43 (unpack_v8c_from_v32_S (x), sk)))
#define g0_fun256(x) \
(mds (0, q40 (unpack_v8a_from_v32_S (x), sk)) ^ \
mds (1, q41 (unpack_v8b_from_v32_S (x), sk)) ^ \
mds (2, q42 (unpack_v8c_from_v32_S (x), sk)) ^ \
mds (3, q43 (unpack_v8d_from_v32_S (x), sk)))
#define f_rnd128(i) \ #define f_rnd128(i) \
{ \ { \
const u32 t0 = g0_fun128 (data[0]); \ const u32 t0 = g0_fun128 (data[0]); \
@ -292,6 +323,30 @@ CONSTANT_VK u32a m_tab[4][256] =
data[1] = hc_rotr32_S (data[1] ^ (t2 + 2 * t3 + lk[4 * (i) + 9]), 1); \ data[1] = hc_rotr32_S (data[1] ^ (t2 + 2 * t3 + lk[4 * (i) + 9]), 1); \
} }
#define f_rnd192(i) \
{ \
const u32 t0 = g0_fun192 (data[0]); \
const u32 t1 = g1_fun192 (data[1]); \
data[2] = hc_rotr32_S (data[2] ^ (t0 + t1 + lk[4 * (i) + 8]), 1); \
data[3] = hc_rotl32_S (data[3], 1) ^ (t0 + 2 * t1 + lk[4 * (i) + 9]); \
const u32 t2 = g0_fun192 (data[2]); \
const u32 t3 = g1_fun192 (data[3]); \
data[0] = hc_rotr32_S (data[0] ^ (t2 + t3 + lk[4 * (i) + 10]), 1); \
data[1] = hc_rotl32_S (data[1], 1) ^ (t2 + 2 * t3 + lk[4 * (i) + 11]); \
}
#define i_rnd192(i) \
{ \
const u32 t0 = g0_fun192 (data[0]); \
const u32 t1 = g1_fun192 (data[1]); \
data[2] = hc_rotl32_S (data[2], 1) ^ (t0 + t1 + lk[4 * (i) + 10]); \
data[3] = hc_rotr32_S (data[3] ^ (t0 + 2 * t1 + lk[4 * (i) + 11]), 1); \
const u32 t2 = g0_fun192 (data[2]); \
const u32 t3 = g1_fun192 (data[3]); \
data[0] = hc_rotl32_S (data[0], 1) ^ (t2 + t3 + lk[4 * (i) + 8]); \
data[1] = hc_rotr32_S (data[1] ^ (t2 + 2 * t3 + lk[4 * (i) + 9]), 1); \
}
#define f_rnd256(i) \ #define f_rnd256(i) \
{ \ { \
const u32 t0 = g0_fun256 (data[0]); \ const u32 t0 = g0_fun256 (data[0]); \
@ -325,6 +380,11 @@ CONSTANT_VK u32a m_tab[4][256] =
#define q22(x,k) q (1, q (0, x) ^ unpack_v8c_from_v32_S (k[1])) ^ unpack_v8c_from_v32_S (k[0]) #define q22(x,k) q (1, q (0, x) ^ unpack_v8c_from_v32_S (k[1])) ^ unpack_v8c_from_v32_S (k[0])
#define q23(x,k) q (1, q (1, x) ^ unpack_v8d_from_v32_S (k[1])) ^ unpack_v8d_from_v32_S (k[0]) #define q23(x,k) q (1, q (1, x) ^ unpack_v8d_from_v32_S (k[1])) ^ unpack_v8d_from_v32_S (k[0])
#define q30(x,k) q (0, q (0, q (1, x) ^ unpack_v8a_from_v32_S (k[2])) ^ unpack_v8a_from_v32_S (k[1])) ^ unpack_v8a_from_v32_S (k[0])
#define q31(x,k) q (0, q (1, q (1, x) ^ unpack_v8b_from_v32_S (k[2])) ^ unpack_v8b_from_v32_S (k[1])) ^ unpack_v8b_from_v32_S (k[0])
#define q32(x,k) q (1, q (0, q (0, x) ^ unpack_v8c_from_v32_S (k[2])) ^ unpack_v8c_from_v32_S (k[1])) ^ unpack_v8c_from_v32_S (k[0])
#define q33(x,k) q (1, q (1, q (0, x) ^ unpack_v8d_from_v32_S (k[2])) ^ unpack_v8d_from_v32_S (k[1])) ^ unpack_v8d_from_v32_S (k[0])
#define q40(x,k) q (0, q (0, q (1, q (1, x) ^ unpack_v8a_from_v32_S (k[3])) ^ unpack_v8a_from_v32_S (k[2])) ^ unpack_v8a_from_v32_S (k[1])) ^ unpack_v8a_from_v32_S (k[0]) #define q40(x,k) q (0, q (0, q (1, q (1, x) ^ unpack_v8a_from_v32_S (k[3])) ^ unpack_v8a_from_v32_S (k[2])) ^ unpack_v8a_from_v32_S (k[1])) ^ unpack_v8a_from_v32_S (k[0])
#define q41(x,k) q (0, q (1, q (1, q (0, x) ^ unpack_v8b_from_v32_S (k[3])) ^ unpack_v8b_from_v32_S (k[2])) ^ unpack_v8b_from_v32_S (k[1])) ^ unpack_v8b_from_v32_S (k[0]) #define q41(x,k) q (0, q (1, q (1, q (0, x) ^ unpack_v8b_from_v32_S (k[3])) ^ unpack_v8b_from_v32_S (k[2])) ^ unpack_v8b_from_v32_S (k[1])) ^ unpack_v8b_from_v32_S (k[0])
#define q42(x,k) q (1, q (0, q (0, q (0, x) ^ unpack_v8c_from_v32_S (k[3])) ^ unpack_v8c_from_v32_S (k[2])) ^ unpack_v8c_from_v32_S (k[1])) ^ unpack_v8c_from_v32_S (k[0]) #define q42(x,k) q (1, q (0, q (0, q (0, x) ^ unpack_v8c_from_v32_S (k[3])) ^ unpack_v8c_from_v32_S (k[2])) ^ unpack_v8c_from_v32_S (k[1])) ^ unpack_v8c_from_v32_S (k[0])
@ -361,6 +421,8 @@ DECLSPEC u32 mds_rem (u32 p0, u32 p1)
return p1; return p1;
} }
// 128 bit key
DECLSPEC u32 h_fun128 (const u32 x, const u32 *key) DECLSPEC u32 h_fun128 (const u32 x, const u32 *key)
{ {
u32 b0, b1, b2, b3; u32 b0, b1, b2, b3;
@ -459,20 +521,116 @@ DECLSPEC void twofish128_decrypt (const u32 *sk, const u32 *lk, const u32 *in, u
out[3] = data[1] ^ lk[3]; out[3] = data[1] ^ lk[3];
} }
// 192 bit key
DECLSPEC u32 h_fun192 (const u32 x, const u32 *key)
{
u32 b0, b1, b2, b3;
b0 = unpack_v8a_from_v32_S (x);
b1 = unpack_v8b_from_v32_S (x);
b2 = unpack_v8c_from_v32_S (x);
b3 = unpack_v8d_from_v32_S (x);
b0 = q (1, b0) ^ unpack_v8a_from_v32_S (key[2]);
b1 = q (1, b1) ^ unpack_v8b_from_v32_S (key[2]);
b2 = q (0, b2) ^ unpack_v8c_from_v32_S (key[2]);
b3 = q (0, b3) ^ unpack_v8d_from_v32_S (key[2]);
b0 = q (0, (q (0, b0) ^ unpack_v8a_from_v32_S (key[1]))) ^ unpack_v8a_from_v32_S (key[0]);
b1 = q (0, (q (1, b1) ^ unpack_v8b_from_v32_S (key[1]))) ^ unpack_v8b_from_v32_S (key[0]);
b2 = q (1, (q (0, b2) ^ unpack_v8c_from_v32_S (key[1]))) ^ unpack_v8c_from_v32_S (key[0]);
b3 = q (1, (q (1, b3) ^ unpack_v8d_from_v32_S (key[1]))) ^ unpack_v8d_from_v32_S (key[0]);
return mds (0, b0) ^ mds (1, b1) ^ mds (2, b2) ^ mds (3, b3);
}
DECLSPEC void twofish192_set_key (u32 *sk, u32 *lk, const u32 *ukey)
{
u32 me_key[3];
me_key[0] = ukey[0];
me_key[1] = ukey[2];
me_key[2] = ukey[4];
u32 mo_key[3];
mo_key[0] = ukey[1];
mo_key[1] = ukey[3];
mo_key[2] = ukey[5];
sk[2] = mds_rem (me_key[0], mo_key[0]);
sk[1] = mds_rem (me_key[1], mo_key[1]);
sk[0] = mds_rem (me_key[2], mo_key[2]);
#ifdef _unroll
#pragma unroll
#endif
for (int i = 0; i < 40; i += 2)
{
u32 a = 0x01010101 * i;
u32 b = 0x01010101 + a;
a = h_fun192 (a, me_key);
b = h_fun192 (b, mo_key);
b = hc_rotl32_S (b, 8);
lk[i + 0] = a + b;
lk[i + 1] = hc_rotl32_S (a + 2 * b, 9);
}
}
DECLSPEC void twofish192_encrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out)
{
u32 data[4];
data[0] = in[0] ^ lk[0];
data[1] = in[1] ^ lk[1];
data[2] = in[2] ^ lk[2];
data[3] = in[3] ^ lk[3];
f_rnd192 (0);
f_rnd192 (1);
f_rnd192 (2);
f_rnd192 (3);
f_rnd192 (4);
f_rnd192 (5);
f_rnd192 (6);
f_rnd192 (7);
out[0] = data[2] ^ lk[4];
out[1] = data[3] ^ lk[5];
out[2] = data[0] ^ lk[6];
out[3] = data[1] ^ lk[7];
}
DECLSPEC void twofish192_decrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out)
{
u32 data[4];
data[0] = in[0] ^ lk[4];
data[1] = in[1] ^ lk[5];
data[2] = in[2] ^ lk[6];
data[3] = in[3] ^ lk[7];
i_rnd192 (7);
i_rnd192 (6);
i_rnd192 (5);
i_rnd192 (4);
i_rnd192 (3);
i_rnd192 (2);
i_rnd192 (1);
i_rnd192 (0);
out[0] = data[2] ^ lk[0];
out[1] = data[3] ^ lk[1];
out[2] = data[0] ^ lk[2];
out[3] = data[1] ^ lk[3];
}
// 256 bit key // 256 bit key
#define g1_fun256(x) \
(mds (0, q40 (unpack_v8d_from_v32_S (x), sk)) ^ \
mds (1, q41 (unpack_v8a_from_v32_S (x), sk)) ^ \
mds (2, q42 (unpack_v8b_from_v32_S (x), sk)) ^ \
mds (3, q43 (unpack_v8c_from_v32_S (x), sk)))
#define g0_fun256(x) \
(mds (0, q40 (unpack_v8a_from_v32_S (x), sk)) ^ \
mds (1, q41 (unpack_v8b_from_v32_S (x), sk)) ^ \
mds (2, q42 (unpack_v8c_from_v32_S (x), sk)) ^ \
mds (3, q43 (unpack_v8d_from_v32_S (x), sk)))
DECLSPEC u32 h_fun256 (const u32 x, const u32 *key) DECLSPEC u32 h_fun256 (const u32 x, const u32 *key)
{ {
u32 b0, b1, b2, b3; u32 b0, b1, b2, b3;
@ -589,8 +747,15 @@ DECLSPEC void twofish256_decrypt (const u32 *sk, const u32 *lk, const u32 *in, u
#undef g1_fun128 #undef g1_fun128
#undef g0_fun128 #undef g0_fun128
#undef g1_fun192
#undef g0_fun192
#undef g1_fun256
#undef g0_fun256
#undef f_rnd128 #undef f_rnd128
#undef i_rnd128 #undef i_rnd128
#undef f_rnd192
#undef i_rnd192
#undef f_rnd256 #undef f_rnd256
#undef i_rnd256 #undef i_rnd256
@ -602,6 +767,12 @@ DECLSPEC void twofish256_decrypt (const u32 *sk, const u32 *lk, const u32 *in, u
#undef q21 #undef q21
#undef q22 #undef q22
#undef q23 #undef q23
#undef q30
#undef q31
#undef q32
#undef q33
#undef q40 #undef q40
#undef q41 #undef q41
#undef q42 #undef q42

View File

@ -11,6 +11,10 @@ DECLSPEC u32 h_fun128 (const u32 x, const u32 *key);
DECLSPEC void twofish128_set_key (u32 *sk, u32 *lk, const u32 *ukey); DECLSPEC void twofish128_set_key (u32 *sk, u32 *lk, const u32 *ukey);
DECLSPEC void twofish128_encrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out); DECLSPEC void twofish128_encrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out);
DECLSPEC void twofish128_decrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out); DECLSPEC void twofish128_decrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out);
DECLSPEC u32 h_fun192 (const u32 x, const u32 *key);
DECLSPEC void twofish192_set_key (u32 *sk, u32 *lk, const u32 *ukey);
DECLSPEC void twofish192_encrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out);
DECLSPEC void twofish192_decrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out);
DECLSPEC u32 h_fun256 (const u32 x, const u32 *key); DECLSPEC u32 h_fun256 (const u32 x, const u32 *key);
DECLSPEC void twofish256_set_key (u32 *sk, u32 *lk, const u32 *ukey); DECLSPEC void twofish256_set_key (u32 *sk, u32 *lk, const u32 *ukey);
DECLSPEC void twofish256_encrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out); DECLSPEC void twofish256_encrypt (const u32 *sk, const u32 *lk, const u32 *in, u32 *out);

398
OpenCL/m14511_a0-pure.cl Normal file
View File

@ -0,0 +1,398 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha1.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14511_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
u32 w[64];
u32 w_len = tmp.pw_len;
for (u32 i = 0; i < 64; i++) w[i] = tmp.i[i];
sha1_ctx_t ctx0;
sha1_init (&ctx0);
sha1_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha1_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx0.h[4];
sha1_ctx_t ctx;
sha1_init (&ctx);
sha1_update_swap (&ctx, padding, 1);
sha1_update_swap (&ctx, w, w_len);
sha1_final (&ctx);
k5 = ctx.h[0];
if (aes_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14511_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
u32 w[64];
u32 w_len = tmp.pw_len;
for (u32 i = 0; i < 64; i++) w[i] = tmp.i[i];
sha1_ctx_t ctx0;
sha1_init (&ctx0);
sha1_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha1_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx0.h[4];
sha1_ctx_t ctx;
sha1_init (&ctx);
sha1_update_swap (&ctx, padding, 1);
sha1_update_swap (&ctx, w, w_len);
sha1_final (&ctx);
k5 = ctx.h[0];
if (aes_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

415
OpenCL/m14511_a1-pure.cl Normal file
View File

@ -0,0 +1,415 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha1.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14511_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
sha1_ctx_t ctx0, ctx0_padding;
sha1_init (&ctx0);
u32 w[64] = { 0 };
u32 w_len = 0;
if (aes_key_len > 128)
{
w_len = pws[gid].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = pws[gid].i[i];
ctx0_padding = ctx0;
sha1_update_swap (&ctx0_padding, padding, 1);
sha1_update_swap (&ctx0_padding, w, w_len);
}
sha1_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha1_ctx_t ctx = ctx0;
if (aes_key_len > 128)
{
w_len = combs_buf[il_pos].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = combs_buf[il_pos].i[i];
}
sha1_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha1_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx.h[4];
sha1_ctx_t ctx0_tmp = ctx0_padding;
sha1_update_swap (&ctx0_tmp, w, w_len);
sha1_final (&ctx0_tmp);
k5 = ctx0_tmp.h[0];
if (aes_key_len > 192)
{
k6 = ctx0_tmp.h[1];
k7 = ctx0_tmp.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14511_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
sha1_ctx_t ctx0, ctx0_padding;
sha1_init (&ctx0);
u32 w[64] = { 0 };
u32 w_len = 0;
if (aes_key_len > 128)
{
w_len = pws[gid].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = pws[gid].i[i];
ctx0_padding = ctx0;
sha1_update_swap (&ctx0_padding, padding, 1);
sha1_update_swap (&ctx0_padding, w, w_len);
}
sha1_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha1_ctx_t ctx = ctx0;
if (aes_key_len > 128)
{
w_len = combs_buf[il_pos].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = combs_buf[il_pos].i[i];
}
sha1_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha1_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx.h[4];
sha1_ctx_t ctx0_tmp = ctx0_padding;
sha1_update_swap (&ctx0_tmp, w, w_len);
sha1_final (&ctx0_tmp);
k5 = ctx0_tmp.h[0];
if (aes_key_len > 192)
{
k6 = ctx0_tmp.h[1];
k7 = ctx0_tmp.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

413
OpenCL/m14511_a3-pure.cl Normal file
View File

@ -0,0 +1,413 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha1.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14511_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
u32x _w[64];
u32 _w_len = pw_len;
for (u32 i = 0; i < 64; i++) _w[i] = w[i];
sha1_ctx_t ctx0;
sha1_init (&ctx0);
sha1_update_swap (&ctx0, w, pw_len);
sha1_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx0.h[4];
sha1_ctx_t ctx;
sha1_init (&ctx);
sha1_update_swap(&ctx, padding, 1);
sha1_update_swap(&ctx, _w, _w_len);
sha1_final (&ctx);
k5 = ctx.h[0];
if (aes_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14511_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
u32x _w[64];
u32 _w_len = pw_len;
for (u32 i = 0; i < 64; i++) _w[i] = w[i];
sha1_ctx_t ctx0;
sha1_init (&ctx0);
sha1_update_swap(&ctx0, w, pw_len);
sha1_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx0.h[4];
sha1_ctx_t ctx;
sha1_init (&ctx);
sha1_update_swap(&ctx, padding, 1);
sha1_update_swap(&ctx, _w, _w_len);
sha1_final (&ctx);
k5 = ctx.h[0];
if (aes_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

324
OpenCL/m14512_a0-pure.cl Normal file
View File

@ -0,0 +1,324 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha1.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14512_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
u32 w[64] = { 0 };
u32 w_len = tmp.pw_len;
for (u32 i = 0; i < 64; i++) w[i] = tmp.i[i];
sha1_ctx_t ctx0;
sha1_init (&ctx0);
sha1_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha1_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
sha1_ctx_t ctx;
sha1_init (&ctx);
sha1_update_swap (&ctx, padding, 1);
sha1_update_swap (&ctx, w, w_len);
sha1_final (&ctx);
k5 = ctx.h[0];
if (serpent_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14512_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
u32 w[64] = { 0 };
u32 w_len = tmp.pw_len;
for (u32 i = 0; i < 64; i++) w[i] = tmp.i[i];
sha1_ctx_t ctx0;
sha1_init (&ctx0);
sha1_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha1_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
sha1_ctx_t ctx;
sha1_init (&ctx);
sha1_update_swap (&ctx, padding, 1);
sha1_update_swap (&ctx, w, w_len);
sha1_final (&ctx);
k5 = ctx.h[0];
if (serpent_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

343
OpenCL/m14512_a1-pure.cl Normal file
View File

@ -0,0 +1,343 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha1.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14512_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
sha1_ctx_t ctx0, ctx0_padding;
sha1_init (&ctx0);
u32 w[64] = { 0 };
u32 w_len = 0;
if (serpent_key_len > 128)
{
w_len = pws[gid].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = pws[gid].i[i];
ctx0_padding = ctx0;
sha1_update_swap (&ctx0_padding, padding, 1);
sha1_update_swap (&ctx0_padding, w, w_len);
}
sha1_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha1_ctx_t ctx = ctx0;
if (serpent_key_len > 128)
{
w_len = combs_buf[il_pos].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = combs_buf[il_pos].i[i];
}
sha1_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha1_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx.h[4];
sha1_ctx_t ctx0_tmp = ctx0_padding;
sha1_update_swap (&ctx0_tmp, w, w_len);
sha1_final (&ctx0_tmp);
k5 = ctx0_tmp.h[0];
if (serpent_key_len > 192)
{
k6 = ctx0_tmp.h[1];
k7 = ctx0_tmp.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14512_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
sha1_ctx_t ctx0, ctx0_padding;
sha1_init (&ctx0);
u32 w[64] = { 0 };
u32 w_len = 0;
if (serpent_key_len > 128)
{
w_len = pws[gid].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = pws[gid].i[i];
ctx0_padding = ctx0;
sha1_update_swap (&ctx0_padding, padding, 1);
sha1_update_swap (&ctx0_padding, w, w_len);
}
sha1_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha1_ctx_t ctx = ctx0;
if (serpent_key_len > 128)
{
w_len = combs_buf[il_pos].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = combs_buf[il_pos].i[i];
}
sha1_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha1_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx.h[4];
sha1_ctx_t ctx0_tmp = ctx0_padding;
sha1_update_swap (&ctx0_tmp, w, w_len);
sha1_final (&ctx0_tmp);
k5 = ctx0_tmp.h[0];
if (serpent_key_len > 192)
{
k6 = ctx0_tmp.h[1];
k7 = ctx0_tmp.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

341
OpenCL/m14512_a3-pure.cl Normal file
View File

@ -0,0 +1,341 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha1.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14512_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
u32x _w[64];
u32 _w_len = pw_len;
for (u32 i = 0; i < 64; i++) _w[i] = w[i];
sha1_ctx_t ctx0;
sha1_init (&ctx0);
sha1_update_swap (&ctx0, w, pw_len);
sha1_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
sha1_ctx_t ctx;
sha1_init (&ctx);
sha1_update_swap (&ctx, padding, 1);
sha1_update_swap (&ctx, _w, _w_len);
sha1_final (&ctx);
k5 = ctx.h[0];
if (serpent_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14512_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
u32x _w[64];
u32 _w_len = pw_len;
for (u32 i = 0; i < 64; i++) _w[i] = w[i];
sha1_ctx_t ctx0;
sha1_init (&ctx0);
sha1_update_swap (&ctx0, w, pw_len);
sha1_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
sha1_ctx_t ctx;
sha1_init (&ctx);
sha1_update_swap (&ctx, padding, 1);
sha1_update_swap (&ctx, _w, _w_len);
sha1_final (&ctx);
k5 = ctx.h[0];
if (serpent_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

327
OpenCL/m14513_a0-pure.cl Normal file
View File

@ -0,0 +1,327 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha1.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14513_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
u32 w[64] = { 0 };
u32 w_len = tmp.pw_len;
for (u32 i = 0; i < 64; i++) w[i] = tmp.i[i];
sha1_ctx_t ctx0;
sha1_init (&ctx0);
sha1_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha1_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
sha1_ctx_t ctx;
sha1_init (&ctx);
sha1_update_swap (&ctx, padding, 1);
sha1_update_swap (&ctx, w, w_len);
sha1_final (&ctx);
k5 = ctx.h[0];
if (twofish_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14513_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
u32 w[64] = { 0 };
u32 w_len = tmp.pw_len;
for (u32 i = 0; i < 64; i++) w[i] = tmp.i[i];
sha1_ctx_t ctx0;
sha1_init (&ctx0);
sha1_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha1_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
sha1_ctx_t ctx;
sha1_init (&ctx);
sha1_update_swap (&ctx, padding, 1);
sha1_update_swap (&ctx, w, w_len);
sha1_final (&ctx);
k5 = ctx.h[0];
if (twofish_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

345
OpenCL/m14513_a1-pure.cl Normal file
View File

@ -0,0 +1,345 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha1.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14513_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
sha1_ctx_t ctx0, ctx0_padding;
sha1_init (&ctx0);
u32 w[64] = { 0 };
u32 w_len = 0;
if (twofish_key_len > 128)
{
w_len = pws[gid].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = pws[gid].i[i];
ctx0_padding = ctx0;
sha1_update_swap (&ctx0_padding, padding, 1);
sha1_update_swap (&ctx0_padding, w, w_len);
}
sha1_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha1_ctx_t ctx = ctx0;
if (twofish_key_len > 128)
{
w_len = combs_buf[il_pos].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = combs_buf[il_pos].i[i];
}
sha1_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha1_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx.h[4];
sha1_ctx_t ctx0_tmp = ctx0_padding;
sha1_update_swap (&ctx0_tmp, w, w_len);
sha1_final (&ctx0_tmp);
k5 = ctx0_tmp.h[0];
if (twofish_key_len > 192)
{
k6 = ctx0_tmp.h[1];
k7 = ctx0_tmp.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14513_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
sha1_ctx_t ctx0, ctx0_padding;
sha1_init (&ctx0);
u32 w[64] = { 0 };
u32 w_len = 0;
if (twofish_key_len > 128)
{
w_len = pws[gid].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = pws[gid].i[i];
ctx0_padding = ctx0;
sha1_update_swap (&ctx0_padding, padding, 1);
sha1_update_swap (&ctx0_padding, w, w_len);
}
sha1_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha1_ctx_t ctx = ctx0;
if (twofish_key_len > 128)
{
w_len = combs_buf[il_pos].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = combs_buf[il_pos].i[i];
}
sha1_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha1_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx.h[4];
sha1_ctx_t ctx0_tmp = ctx0_padding;
sha1_update_swap (&ctx0_tmp, w, w_len);
sha1_final (&ctx0_tmp);
k5 = ctx0_tmp.h[0];
if (twofish_key_len > 192)
{
k6 = ctx0_tmp.h[1];
k7 = ctx0_tmp.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

343
OpenCL/m14513_a3-pure.cl Normal file
View File

@ -0,0 +1,343 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha1.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14513_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
u32x _w[64];
u32 _w_len = pw_len;
for (u32 i = 0; i < 64; i++) _w[i] = w[i];
sha1_ctx_t ctx0;
sha1_init (&ctx0);
sha1_update_swap (&ctx0, w, pw_len);
sha1_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
sha1_ctx_t ctx;
sha1_init (&ctx);
sha1_update_swap (&ctx, padding, 1);
sha1_update_swap (&ctx, _w, _w_len);
sha1_final (&ctx);
k5 = ctx.h[0];
if (twofish_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14513_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
u32x _w[64];
u32 _w_len = pw_len;
for (u32 i = 0; i < 64; i++) _w[i] = w[i];
sha1_ctx_t ctx0;
sha1_init (&ctx0);
sha1_update_swap (&ctx0, w, pw_len);
sha1_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
sha1_ctx_t ctx;
sha1_init (&ctx);
sha1_update_swap (&ctx, padding, 1);
sha1_update_swap (&ctx, _w, _w_len);
sha1_final (&ctx);
k5 = ctx.h[0];
if (twofish_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

356
OpenCL/m14521_a0-pure.cl Normal file
View File

@ -0,0 +1,356 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha256.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14521_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha256_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (aes_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14521_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha256_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (aes_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

345
OpenCL/m14521_a1-pure.cl Normal file
View File

@ -0,0 +1,345 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha256.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14521_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha256_ctx_t ctx = ctx0;
sha256_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha256_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx.h[4];
k5 = ctx.h[5];
if (aes_key_len > 192)
{
k4 = ctx.h[6];
k5 = ctx.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14521_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha256_ctx_t ctx = ctx0;
sha256_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha256_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx.h[4];
k5 = ctx.h[5];
if (aes_key_len > 192)
{
k6 = ctx.h[6];
k7 = ctx.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

371
OpenCL/m14521_a3-pure.cl Normal file
View File

@ -0,0 +1,371 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha256.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14521_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_swap (&ctx0, w, pw_len);
sha256_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (aes_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14521_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_swap(&ctx0, w, pw_len);
sha256_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (aes_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

283
OpenCL/m14522_a0-pure.cl Normal file
View File

@ -0,0 +1,283 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha256.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14522_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha256_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (serpent_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14522_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha256_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (serpent_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

273
OpenCL/m14522_a1-pure.cl Normal file
View File

@ -0,0 +1,273 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha256.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14522_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha256_ctx_t ctx = ctx0;
sha256_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha256_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx.h[4];
k5 = ctx.h[5];
if (serpent_key_len > 192)
{
k6 = ctx.h[6];
k7 = ctx.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14522_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha256_ctx_t ctx = ctx0;
sha256_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha256_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx.h[4];
k5 = ctx.h[5];
if (serpent_key_len > 192)
{
k6 = ctx.h[6];
k7 = ctx.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

299
OpenCL/m14522_a3-pure.cl Normal file
View File

@ -0,0 +1,299 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha256.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14522_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_swap (&ctx0, w, pw_len);
sha256_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (serpent_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14522_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_swap (&ctx0, w, pw_len);
sha256_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (serpent_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

285
OpenCL/m14523_a0-pure.cl Normal file
View File

@ -0,0 +1,285 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha256.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14523_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha256_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (twofish_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14523_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha256_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (twofish_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

275
OpenCL/m14523_a1-pure.cl Normal file
View File

@ -0,0 +1,275 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha256.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14523_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha256_ctx_t ctx = ctx0;
sha256_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha256_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx.h[4];
k5 = ctx.h[5];
if (twofish_key_len > 192)
{
k6 = ctx.h[6];
k7 = ctx.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14523_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha256_ctx_t ctx = ctx0;
sha256_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha256_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx.h[4];
k5 = ctx.h[5];
if (twofish_key_len > 192)
{
k6 = ctx.h[6];
k7 = ctx.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

301
OpenCL/m14523_a3-pure.cl Normal file
View File

@ -0,0 +1,301 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha256.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14523_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_swap (&ctx0, w, pw_len);
sha256_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (twofish_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14523_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
sha256_ctx_t ctx0;
sha256_init (&ctx0);
sha256_update_swap (&ctx0, w, pw_len);
sha256_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (twofish_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

355
OpenCL/m14531_a0-pure.cl Normal file
View File

@ -0,0 +1,355 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha512.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14531_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha512_final (&ctx0);
const u32 k0 = h32_from_64_S (ctx0.h[0]);
const u32 k1 = l32_from_64_S (ctx0.h[0]);
const u32 k2 = h32_from_64_S (ctx0.h[1]);
const u32 k3 = l32_from_64_S (ctx0.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = h32_from_64_S (ctx0.h[2]);
k5 = l32_from_64_S (ctx0.h[2]);
if (aes_key_len > 192)
{
k6 = h32_from_64_S (ctx0.h[3]);
k7 = l32_from_64_S (ctx0.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14531_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha512_final (&ctx0);
const u32 k0 = h32_from_64_S (ctx0.h[0]);
const u32 k1 = l32_from_64_S (ctx0.h[0]);
const u32 k2 = h32_from_64_S (ctx0.h[1]);
const u32 k3 = l32_from_64_S (ctx0.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = h32_from_64_S (ctx0.h[2]);
k5 = l32_from_64_S (ctx0.h[2]);
if (aes_key_len > 192)
{
k6 = h32_from_64_S (ctx0.h[3]);
k7 = l32_from_64_S (ctx0.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

345
OpenCL/m14531_a1-pure.cl Normal file
View File

@ -0,0 +1,345 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha512.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14531_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha512_ctx_t ctx = ctx0;
sha512_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha512_final (&ctx);
const u32 k0 = h32_from_64_S (ctx.h[0]);
const u32 k1 = l32_from_64_S (ctx.h[0]);
const u32 k2 = h32_from_64_S (ctx.h[1]);
const u32 k3 = l32_from_64_S (ctx.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = h32_from_64_S (ctx.h[2]);
k5 = l32_from_64_S (ctx.h[2]);
if (aes_key_len > 192)
{
k6 = h32_from_64_S (ctx.h[3]);
k7 = l32_from_64_S (ctx.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14531_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha512_ctx_t ctx = ctx0;
sha512_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha512_final (&ctx);
const u32 k0 = h32_from_64_S (ctx.h[0]);
const u32 k1 = l32_from_64_S (ctx.h[0]);
const u32 k2 = h32_from_64_S (ctx.h[1]);
const u32 k3 = l32_from_64_S (ctx.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = h32_from_64_S (ctx.h[2]);
k5 = l32_from_64_S (ctx.h[2]);
if (aes_key_len > 192)
{
k6 = h32_from_64_S (ctx.h[3]);
k7 = l32_from_64_S (ctx.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

371
OpenCL/m14531_a3-pure.cl Normal file
View File

@ -0,0 +1,371 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha512.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14531_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_swap (&ctx0, w, pw_len);
sha512_final (&ctx0);
const u32 k0 = h32_from_64_S (ctx0.h[0]);
const u32 k1 = l32_from_64_S (ctx0.h[0]);
const u32 k2 = h32_from_64_S (ctx0.h[1]);
const u32 k3 = l32_from_64_S (ctx0.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = h32_from_64_S (ctx0.h[2]);
k5 = l32_from_64_S (ctx0.h[2]);
if (aes_key_len > 192)
{
k6 = h32_from_64_S (ctx0.h[3]);
k7 = l32_from_64_S (ctx0.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14531_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_swap(&ctx0, w, pw_len);
sha512_final (&ctx0);
const u32 k0 = h32_from_64_S (ctx0.h[0]);
const u32 k1 = l32_from_64_S (ctx0.h[0]);
const u32 k2 = h32_from_64_S (ctx0.h[1]);
const u32 k3 = l32_from_64_S (ctx0.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = h32_from_64_S (ctx0.h[2]);
k5 = l32_from_64_S (ctx0.h[2]);
if (aes_key_len > 192)
{
k6 = h32_from_64_S (ctx0.h[3]);
k7 = l32_from_64_S (ctx0.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

282
OpenCL/m14532_a0-pure.cl Normal file
View File

@ -0,0 +1,282 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha512.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14532_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha512_final (&ctx0);
const u32 k0 = h32_from_64_S (ctx0.h[0]);
const u32 k1 = l32_from_64_S (ctx0.h[0]);
const u32 k2 = h32_from_64_S (ctx0.h[1]);
const u32 k3 = l32_from_64_S (ctx0.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = h32_from_64_S (ctx0.h[2]);
k5 = l32_from_64_S (ctx0.h[2]);
if (serpent_key_len > 192)
{
k6 = h32_from_64_S (ctx0.h[3]);
k7 = l32_from_64_S (ctx0.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14532_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha512_final (&ctx0);
const u32 k0 = h32_from_64_S (ctx0.h[0]);
const u32 k1 = l32_from_64_S (ctx0.h[0]);
const u32 k2 = h32_from_64_S (ctx0.h[1]);
const u32 k3 = l32_from_64_S (ctx0.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = h32_from_64_S (ctx0.h[2]);
k5 = l32_from_64_S (ctx0.h[2]);
if (serpent_key_len > 192)
{
k6 = h32_from_64_S (ctx0.h[3]);
k7 = l32_from_64_S (ctx0.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

273
OpenCL/m14532_a1-pure.cl Normal file
View File

@ -0,0 +1,273 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha512.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14532_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha512_ctx_t ctx = ctx0;
sha512_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha512_final (&ctx);
const u32 k0 = h32_from_64_S (ctx.h[0]);
const u32 k1 = l32_from_64_S (ctx.h[0]);
const u32 k2 = h32_from_64_S (ctx.h[1]);
const u32 k3 = l32_from_64_S (ctx.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = h32_from_64_S (ctx.h[2]);
k5 = l32_from_64_S (ctx.h[2]);
if (serpent_key_len > 192)
{
k6 = h32_from_64_S (ctx.h[3]);
k7 = l32_from_64_S (ctx.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14532_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha512_ctx_t ctx = ctx0;
sha512_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha512_final (&ctx);
const u32 k0 = h32_from_64_S (ctx.h[0]);
const u32 k1 = l32_from_64_S (ctx.h[0]);
const u32 k2 = h32_from_64_S (ctx.h[1]);
const u32 k3 = l32_from_64_S (ctx.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = h32_from_64_S (ctx.h[2]);
k5 = l32_from_64_S (ctx.h[2]);
if (serpent_key_len > 192)
{
k6 = h32_from_64_S (ctx.h[3]);
k7 = l32_from_64_S (ctx.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

299
OpenCL/m14532_a3-pure.cl Normal file
View File

@ -0,0 +1,299 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha512.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14532_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_swap (&ctx0, w, pw_len);
sha512_final (&ctx0);
const u32 k0 = h32_from_64_S (ctx0.h[0]);
const u32 k1 = l32_from_64_S (ctx0.h[0]);
const u32 k2 = h32_from_64_S (ctx0.h[1]);
const u32 k3 = l32_from_64_S (ctx0.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = h32_from_64_S (ctx0.h[2]);
k5 = l32_from_64_S (ctx0.h[2]);
if (serpent_key_len > 192)
{
k6 = h32_from_64_S (ctx0.h[3]);
k7 = l32_from_64_S (ctx0.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14532_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_swap (&ctx0, w, pw_len);
sha512_final (&ctx0);
const u32 k0 = h32_from_64_S (ctx0.h[0]);
const u32 k1 = l32_from_64_S (ctx0.h[0]);
const u32 k2 = h32_from_64_S (ctx0.h[1]);
const u32 k3 = l32_from_64_S (ctx0.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = h32_from_64_S (ctx0.h[2]);
k5 = l32_from_64_S (ctx0.h[2]);
if (serpent_key_len > 192)
{
k6 = h32_from_64_S (ctx0.h[3]);
k7 = l32_from_64_S (ctx0.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

285
OpenCL/m14533_a0-pure.cl Normal file
View File

@ -0,0 +1,285 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha512.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14533_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha512_final (&ctx0);
const u32 k0 = h32_from_64_S (ctx0.h[0]);
const u32 k1 = l32_from_64_S (ctx0.h[0]);
const u32 k2 = h32_from_64_S (ctx0.h[1]);
const u32 k3 = l32_from_64_S (ctx0.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = h32_from_64_S (ctx0.h[2]);
k5 = l32_from_64_S (ctx0.h[2]);
if (twofish_key_len > 192)
{
k6 = h32_from_64_S (ctx0.h[3]);
k7 = l32_from_64_S (ctx0.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14533_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_swap (&ctx0, tmp.i, tmp.pw_len);
sha512_final (&ctx0);
const u32 k0 = h32_from_64_S (ctx0.h[0]);
const u32 k1 = l32_from_64_S (ctx0.h[0]);
const u32 k2 = h32_from_64_S (ctx0.h[1]);
const u32 k3 = l32_from_64_S (ctx0.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = h32_from_64_S (ctx0.h[2]);
k5 = l32_from_64_S (ctx0.h[2]);
if (twofish_key_len > 192)
{
k6 = h32_from_64_S (ctx0.h[3]);
k7 = l32_from_64_S (ctx0.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

275
OpenCL/m14533_a1-pure.cl Normal file
View File

@ -0,0 +1,275 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha512.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14533_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha512_ctx_t ctx = ctx0;
sha512_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha512_final (&ctx);
const u32 k0 = h32_from_64_S (ctx.h[0]);
const u32 k1 = l32_from_64_S (ctx.h[0]);
const u32 k2 = h32_from_64_S (ctx.h[1]);
const u32 k3 = l32_from_64_S (ctx.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = h32_from_64_S (ctx.h[2]);
k5 = l32_from_64_S (ctx.h[2]);
if (twofish_key_len > 192)
{
k6 = h32_from_64_S (ctx.h[3]);
k7 = l32_from_64_S (ctx.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14533_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
sha512_ctx_t ctx = ctx0;
sha512_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
sha512_final (&ctx);
const u32 k0 = h32_from_64_S (ctx.h[0]);
const u32 k1 = l32_from_64_S (ctx.h[0]);
const u32 k2 = h32_from_64_S (ctx.h[1]);
const u32 k3 = l32_from_64_S (ctx.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = h32_from_64_S (ctx.h[2]);
k5 = l32_from_64_S (ctx.h[2]);
if (twofish_key_len > 192)
{
k6 = h32_from_64_S (ctx.h[3]);
k7 = l32_from_64_S (ctx.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

301
OpenCL/m14533_a3-pure.cl Normal file
View File

@ -0,0 +1,301 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_sha512.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14533_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_swap (&ctx0, w, pw_len);
sha512_final (&ctx0);
const u32 k0 = h32_from_64_S (ctx0.h[0]);
const u32 k1 = l32_from_64_S (ctx0.h[0]);
const u32 k2 = h32_from_64_S (ctx0.h[1]);
const u32 k3 = l32_from_64_S (ctx0.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = h32_from_64_S (ctx0.h[2]);
k5 = l32_from_64_S (ctx0.h[2]);
if (twofish_key_len > 192)
{
k6 = h32_from_64_S (ctx0.h[3]);
k7 = l32_from_64_S (ctx0.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14533_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
sha512_ctx_t ctx0;
sha512_init (&ctx0);
sha512_update_swap (&ctx0, w, pw_len);
sha512_final (&ctx0);
const u32 k0 = h32_from_64_S (ctx0.h[0]);
const u32 k1 = l32_from_64_S (ctx0.h[0]);
const u32 k2 = h32_from_64_S (ctx0.h[1]);
const u32 k3 = l32_from_64_S (ctx0.h[1]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = h32_from_64_S (ctx0.h[2]);
k5 = l32_from_64_S (ctx0.h[2]);
if (twofish_key_len > 192)
{
k6 = h32_from_64_S (ctx0.h[3]);
k7 = l32_from_64_S (ctx0.h[3]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

397
OpenCL/m14541_a0-pure.cl Normal file
View File

@ -0,0 +1,397 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_ripemd160.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14541_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
u32 w[64];
u32 w_len = tmp.pw_len;
for (u32 i = 0; i < 64; i++) w[i] = tmp.i[i];
ripemd160_ctx_t ctx0;
ripemd160_init (&ctx0);
ripemd160_update (&ctx0, tmp.i, tmp.pw_len);
ripemd160_final (&ctx0);
const u32 k0 = hc_swap32_S (ctx0.h[0]);
const u32 k1 = hc_swap32_S (ctx0.h[1]);
const u32 k2 = hc_swap32_S (ctx0.h[2]);
const u32 k3 = hc_swap32_S (ctx0.h[3]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = hc_swap32_S (ctx0.h[4]);
ripemd160_ctx_t ctx;
ripemd160_init (&ctx);
ripemd160_update (&ctx, padding, 1);
ripemd160_update (&ctx, w, w_len);
ripemd160_final (&ctx);
k5 = hc_swap32_S (ctx.h[0]);
if (aes_key_len > 192)
{
k6 = hc_swap32_S (ctx.h[1]);
k7 = hc_swap32_S (ctx.h[2]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14541_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
u32 w[64];
u32 w_len = tmp.pw_len;
for (u32 i = 0; i < 64; i++) w[i] = tmp.i[i];
ripemd160_ctx_t ctx0;
ripemd160_init (&ctx0);
ripemd160_update (&ctx0, tmp.i, tmp.pw_len);
ripemd160_final (&ctx0);
const u32 k0 = hc_swap32_S (ctx0.h[0]);
const u32 k1 = hc_swap32_S (ctx0.h[1]);
const u32 k2 = hc_swap32_S (ctx0.h[2]);
const u32 k3 = hc_swap32_S (ctx0.h[3]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = hc_swap32_S (ctx0.h[4]);
ripemd160_ctx_t ctx;
ripemd160_init (&ctx);
ripemd160_update (&ctx, padding, 1);
ripemd160_update (&ctx, w, w_len);
ripemd160_final (&ctx);
k5 = hc_swap32_S (ctx.h[0]);
if (aes_key_len > 192)
{
k6 = hc_swap32_S (ctx.h[1]);
k7 = hc_swap32_S (ctx.h[2]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

415
OpenCL/m14541_a1-pure.cl Normal file
View File

@ -0,0 +1,415 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_ripemd160.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14541_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
ripemd160_ctx_t ctx0, ctx0_padding;
ripemd160_init (&ctx0);
u32 w[64] = { 0 };
u32 w_len = 0;
if (aes_key_len > 128)
{
w_len = pws[gid].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = pws[gid].i[i];
ctx0_padding = ctx0;
ripemd160_update (&ctx0_padding, padding, 1);
ripemd160_update (&ctx0_padding, w, w_len);
}
ripemd160_update_global (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
ripemd160_ctx_t ctx = ctx0;
if (aes_key_len > 128)
{
w_len = combs_buf[il_pos].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = combs_buf[il_pos].i[i];
}
ripemd160_update_global (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
ripemd160_final (&ctx);
const u32 k0 = hc_swap32_S (ctx.h[0]);
const u32 k1 = hc_swap32_S (ctx.h[1]);
const u32 k2 = hc_swap32_S (ctx.h[2]);
const u32 k3 = hc_swap32_S (ctx.h[3]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = hc_swap32_S (ctx.h[4]);
ripemd160_ctx_t ctx0_tmp = ctx0_padding;
ripemd160_update (&ctx0_tmp, w, w_len);
ripemd160_final (&ctx0_tmp);
k5 = hc_swap32_S (ctx0_tmp.h[0]);
if (aes_key_len > 192)
{
k6 = hc_swap32_S (ctx0_tmp.h[1]);
k7 = hc_swap32_S (ctx0_tmp.h[2]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14541_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
ripemd160_ctx_t ctx0, ctx0_padding;
ripemd160_init (&ctx0);
u32 w[64] = { 0 };
u32 w_len = 0;
if (aes_key_len > 128)
{
w_len = pws[gid].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = pws[gid].i[i];
ctx0_padding = ctx0;
ripemd160_update (&ctx0_padding, padding, 1);
ripemd160_update (&ctx0_padding, w, w_len);
}
ripemd160_update_global (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
ripemd160_ctx_t ctx = ctx0;
if (aes_key_len > 128)
{
w_len = combs_buf[il_pos].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = combs_buf[il_pos].i[i];
}
ripemd160_update_global (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
ripemd160_final (&ctx);
const u32 k0 = hc_swap32_S (ctx.h[0]);
const u32 k1 = hc_swap32_S (ctx.h[1]);
const u32 k2 = hc_swap32_S (ctx.h[2]);
const u32 k3 = hc_swap32_S (ctx.h[3]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = hc_swap32_S (ctx.h[4]);
ripemd160_ctx_t ctx0_tmp = ctx0_padding;
ripemd160_update (&ctx0_tmp, w, w_len);
ripemd160_final (&ctx0_tmp);
k5 = hc_swap32_S (ctx0_tmp.h[0]);
if (aes_key_len > 192)
{
k6 = hc_swap32_S (ctx0_tmp.h[1]);
k7 = hc_swap32_S (ctx0_tmp.h[2]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

413
OpenCL/m14541_a3-pure.cl Normal file
View File

@ -0,0 +1,413 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_ripemd160.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14541_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
u32x _w[64];
u32 _w_len = pw_len;
for (u32 i = 0; i < 64; i++) _w[i] = w[i];
ripemd160_ctx_t ctx0;
ripemd160_init (&ctx0);
ripemd160_update (&ctx0, w, pw_len);
ripemd160_final (&ctx0);
const u32 k0 = hc_swap32_S (ctx0.h[0]);
const u32 k1 = hc_swap32_S (ctx0.h[1]);
const u32 k2 = hc_swap32_S (ctx0.h[2]);
const u32 k3 = hc_swap32_S (ctx0.h[3]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = hc_swap32_S (ctx0.h[4]);
ripemd160_ctx_t ctx;
ripemd160_init (&ctx);
ripemd160_update (&ctx, padding, 1);
ripemd160_update (&ctx, _w, _w_len);
ripemd160_final (&ctx);
k5 = hc_swap32_S (ctx.h[0]);
if (aes_key_len > 192)
{
k6 = hc_swap32_S (ctx.h[1]);
k7 = hc_swap32_S (ctx.h[2]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14541_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
u32x _w[64];
u32 _w_len = pw_len;
for (u32 i = 0; i < 64; i++) _w[i] = w[i];
ripemd160_ctx_t ctx0;
ripemd160_init (&ctx0);
ripemd160_update (&ctx0, w, pw_len);
ripemd160_final (&ctx0);
const u32 k0 = hc_swap32_S (ctx0.h[0]);
const u32 k1 = hc_swap32_S (ctx0.h[1]);
const u32 k2 = hc_swap32_S (ctx0.h[2]);
const u32 k3 = hc_swap32_S (ctx0.h[3]);
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = hc_swap32_S (ctx0.h[4]);
ripemd160_ctx_t ctx;
ripemd160_init (&ctx);
ripemd160_update (&ctx, padding, 1);
ripemd160_update (&ctx, _w, _w_len);
ripemd160_final (&ctx);
k5 = hc_swap32_S (ctx.h[0]);
if (aes_key_len > 192)
{
k6 = hc_swap32_S (ctx.h[1]);
k7 = hc_swap32_S (ctx.h[2]);
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

325
OpenCL/m14542_a0-pure.cl Normal file
View File

@ -0,0 +1,325 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_ripemd160.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14542_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
u32 w[64] = { 0 };
u32 w_len = tmp.pw_len;
for (u32 i = 0; i < 64; i++) w[i] = tmp.i[i];
ripemd160_ctx_t ctx0;
ripemd160_init (&ctx0);
ripemd160_update (&ctx0, tmp.i, tmp.pw_len);
ripemd160_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
ripemd160_ctx_t ctx;
ripemd160_init (&ctx);
ripemd160_update (&ctx, padding, 1);
ripemd160_update (&ctx, w, w_len);
ripemd160_final (&ctx);
k5 = ctx.h[0];
if (serpent_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (serpent_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (serpent_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14542_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
u32 w[64] = { 0 };
u32 w_len = tmp.pw_len;
for (u32 i = 0; i < 64; i++) w[i] = tmp.i[i];
ripemd160_ctx_t ctx0;
ripemd160_init (&ctx0);
ripemd160_update (&ctx0, tmp.i, tmp.pw_len);
ripemd160_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
ripemd160_ctx_t ctx;
ripemd160_init (&ctx);
ripemd160_update (&ctx, padding, 1);
ripemd160_update (&ctx, w, w_len);
ripemd160_final (&ctx);
k5 = ctx.h[0];
if (serpent_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (serpent_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (serpent_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

343
OpenCL/m14542_a1-pure.cl Normal file
View File

@ -0,0 +1,343 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_ripemd160.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14542_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
ripemd160_ctx_t ctx0, ctx0_padding;
ripemd160_init (&ctx0);
u32 w[64] = { 0 };
u32 w_len = 0;
if (serpent_key_len > 128)
{
w_len = pws[gid].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = pws[gid].i[i];
ctx0_padding = ctx0;
ripemd160_update (&ctx0_padding, padding, 1);
ripemd160_update (&ctx0_padding, w, w_len);
}
ripemd160_update_global (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
ripemd160_ctx_t ctx = ctx0;
if (serpent_key_len > 128)
{
w_len = combs_buf[il_pos].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = combs_buf[il_pos].i[i];
}
ripemd160_update_global (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
ripemd160_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx.h[4];
ripemd160_ctx_t ctx0_tmp = ctx0_padding;
ripemd160_update (&ctx0_tmp, w, w_len);
ripemd160_final (&ctx0_tmp);
k5 = ctx0_tmp.h[0];
if (serpent_key_len > 192)
{
k6 = ctx0_tmp.h[1];
k7 = ctx0_tmp.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (serpent_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (serpent_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14542_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
ripemd160_ctx_t ctx0, ctx0_padding;
ripemd160_init (&ctx0);
u32 w[64] = { 0 };
u32 w_len = 0;
if (serpent_key_len > 128)
{
w_len = pws[gid].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = pws[gid].i[i];
ctx0_padding = ctx0;
ripemd160_update (&ctx0_padding, padding, 1);
ripemd160_update (&ctx0_padding, w, w_len);
}
ripemd160_update_global (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
ripemd160_ctx_t ctx = ctx0;
if (serpent_key_len > 128)
{
w_len = combs_buf[il_pos].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = combs_buf[il_pos].i[i];
}
ripemd160_update_global (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
ripemd160_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx.h[4];
ripemd160_ctx_t ctx0_tmp = ctx0_padding;
ripemd160_update (&ctx0_tmp, w, w_len);
ripemd160_final (&ctx0_tmp);
k5 = ctx0_tmp.h[0];
if (serpent_key_len > 192)
{
k6 = ctx0_tmp.h[1];
k7 = ctx0_tmp.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (serpent_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (serpent_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

341
OpenCL/m14542_a3-pure.cl Normal file
View File

@ -0,0 +1,341 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_ripemd160.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14542_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
u32x _w[64];
u32 _w_len = pw_len;
for (u32 i = 0; i < 64; i++) _w[i] = w[i];
ripemd160_ctx_t ctx0;
ripemd160_init (&ctx0);
ripemd160_update (&ctx0, w, pw_len);
ripemd160_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
ripemd160_ctx_t ctx;
ripemd160_init (&ctx);
ripemd160_update (&ctx, padding, 1);
ripemd160_update (&ctx, _w, _w_len);
ripemd160_final (&ctx);
k5 = ctx.h[0];
if (serpent_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (serpent_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (serpent_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14542_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
u32x _w[64];
u32 _w_len = pw_len;
for (u32 i = 0; i < 64; i++) _w[i] = w[i];
ripemd160_ctx_t ctx0;
ripemd160_init (&ctx0);
ripemd160_update (&ctx0, w, pw_len);
ripemd160_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
ripemd160_ctx_t ctx;
ripemd160_init (&ctx);
ripemd160_update (&ctx, padding, 1);
ripemd160_update (&ctx, _w, _w_len);
ripemd160_final (&ctx);
k5 = ctx.h[0];
if (serpent_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (serpent_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (serpent_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

326
OpenCL/m14543_a0-pure.cl Normal file
View File

@ -0,0 +1,326 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_ripemd160.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14543_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
u32 w[64] = { 0 };
u32 w_len = tmp.pw_len;
for (u32 i = 0; i < 64; i++) w[i] = tmp.i[i];
ripemd160_ctx_t ctx0;
ripemd160_init (&ctx0);
ripemd160_update (&ctx0, tmp.i, tmp.pw_len);
ripemd160_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
ripemd160_ctx_t ctx;
ripemd160_init (&ctx);
ripemd160_update (&ctx, padding, 1);
ripemd160_update (&ctx, w, w_len);
ripemd160_final (&ctx);
k5 = ctx.h[0];
if (twofish_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (twofish_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (twofish_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14543_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
u32 w[64] = { 0 };
u32 w_len = tmp.pw_len;
for (u32 i = 0; i < 64; i++) w[i] = tmp.i[i];
ripemd160_ctx_t ctx0;
ripemd160_init (&ctx0);
ripemd160_update (&ctx0, tmp.i, tmp.pw_len);
ripemd160_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
ripemd160_ctx_t ctx;
ripemd160_init (&ctx);
ripemd160_update (&ctx, padding, 1);
ripemd160_update (&ctx, w, w_len);
ripemd160_final (&ctx);
k5 = ctx.h[0];
if (twofish_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (twofish_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (twofish_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish setkey and encrypt
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

345
OpenCL/m14543_a1-pure.cl Normal file
View File

@ -0,0 +1,345 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_ripemd160.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14543_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
ripemd160_ctx_t ctx0, ctx0_padding;
ripemd160_init (&ctx0);
u32 w[64] = { 0 };
u32 w_len = 0;
if (twofish_key_len > 128)
{
w_len = pws[gid].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = pws[gid].i[i];
ctx0_padding = ctx0;
ripemd160_update (&ctx0_padding, padding, 1);
ripemd160_update (&ctx0_padding, w, w_len);
}
ripemd160_update_global (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
ripemd160_ctx_t ctx = ctx0;
if (twofish_key_len > 128)
{
w_len = combs_buf[il_pos].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = combs_buf[il_pos].i[i];
}
ripemd160_update_global (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
ripemd160_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx.h[4];
ripemd160_ctx_t ctx0_tmp = ctx0_padding;
ripemd160_update (&ctx0_tmp, w, w_len);
ripemd160_final (&ctx0_tmp);
k5 = ctx0_tmp.h[0];
if (twofish_key_len > 192)
{
k6 = ctx0_tmp.h[1];
k7 = ctx0_tmp.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (twofish_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (twofish_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14543_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
ripemd160_ctx_t ctx0, ctx0_padding;
ripemd160_init (&ctx0);
u32 w[64] = { 0 };
u32 w_len = 0;
if (twofish_key_len > 128)
{
w_len = pws[gid].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = pws[gid].i[i];
ctx0_padding = ctx0;
ripemd160_update (&ctx0_padding, padding, 1);
ripemd160_update (&ctx0_padding, w, w_len);
}
ripemd160_update_global (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
ripemd160_ctx_t ctx = ctx0;
if (twofish_key_len > 128)
{
w_len = combs_buf[il_pos].pw_len;
for (u32 i = 0; i < 64; i++) w[i] = combs_buf[il_pos].i[i];
}
ripemd160_update_global (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
ripemd160_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx.h[4];
ripemd160_ctx_t ctx0_tmp = ctx0_padding;
ripemd160_update (&ctx0_tmp, w, w_len);
ripemd160_final (&ctx0_tmp);
k5 = ctx0_tmp.h[0];
if (twofish_key_len > 192)
{
k6 = ctx0_tmp.h[1];
k7 = ctx0_tmp.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (twofish_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (twofish_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

343
OpenCL/m14543_a3-pure.cl Normal file
View File

@ -0,0 +1,343 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_ripemd160.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14543_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
u32x _w[64];
u32 _w_len = pw_len;
for (u32 i = 0; i < 64; i++) _w[i] = w[i];
ripemd160_ctx_t ctx0;
ripemd160_init (&ctx0);
ripemd160_update (&ctx0, w, pw_len);
ripemd160_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
ripemd160_ctx_t ctx;
ripemd160_init (&ctx);
ripemd160_update (&ctx, padding, 1);
ripemd160_update (&ctx, _w, _w_len);
ripemd160_final (&ctx);
k5 = ctx.h[0];
if (twofish_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (twofish_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (twofish_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14543_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
u32 padding[64] = { 0 };
padding[0] = 0x00000041;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
u32x _w[64];
u32 _w_len = pw_len;
for (u32 i = 0; i < 64; i++) _w[i] = w[i];
ripemd160_ctx_t ctx0;
ripemd160_init (&ctx0);
ripemd160_update (&ctx0, w, pw_len);
ripemd160_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
ripemd160_ctx_t ctx;
ripemd160_init (&ctx);
ripemd160_update (&ctx, padding, 1);
ripemd160_update (&ctx, _w, _w_len);
ripemd160_final (&ctx);
k5 = ctx.h[0];
if (twofish_key_len > 192)
{
k6 = ctx.h[1];
k7 = ctx.h[2];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (twofish_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (twofish_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

410
OpenCL/m14551_a0-pure.cl Normal file
View File

@ -0,0 +1,410 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_whirlpool.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14551_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes/whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_swap (&ctx0, tmp.i, tmp.pw_len);
whirlpool_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (aes_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14551_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes/whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_swap (&ctx0, tmp.i, tmp.pw_len);
whirlpool_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (aes_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

399
OpenCL/m14551_a1-pure.cl Normal file
View File

@ -0,0 +1,399 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_whirlpool.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14551_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes/whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
whirlpool_ctx_t ctx = ctx0;
whirlpool_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
whirlpool_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx.h[4];
k5 = ctx.h[5];
if (aes_key_len > 192)
{
k6 = ctx.h[6];
k7 = ctx.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14551_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes/whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
whirlpool_ctx_t ctx = ctx0;
whirlpool_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
whirlpool_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx.h[4];
k5 = ctx.h[5];
if (aes_key_len > 192)
{
k6 = ctx.h[6];
k7 = ctx.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

425
OpenCL/m14551_a3-pure.cl Normal file
View File

@ -0,0 +1,425 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_whirlpool.cl"
#include "inc_cipher_aes.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14551_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes/whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_swap (&ctx0, w, pw_len);
whirlpool_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (aes_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14551_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* aes/whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u32 s_te0[256];
LOCAL_VK u32 s_te1[256];
LOCAL_VK u32 s_te2[256];
LOCAL_VK u32 s_te3[256];
LOCAL_VK u32 s_te4[256];
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_te0[i] = te0[i];
s_te1[i] = te1[i];
s_te2[i] = te2[i];
s_te3[i] = te3[i];
s_te4[i] = te4[i];
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u32a *s_te0 = te0;
CONSTANT_AS u32a *s_te1 = te1;
CONSTANT_AS u32a *s_te2 = te2;
CONSTANT_AS u32a *s_te3 = te3;
CONSTANT_AS u32a *s_te4 = te4;
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 aes_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_swap(&ctx0, w, pw_len);
whirlpool_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (aes_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (aes_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = k0;
ukey[1] = k1;
ukey[2] = k2;
ukey[3] = k3;
if (aes_key_len > 128)
{
ukey[4] = k4;
ukey[5] = k5;
if (aes_key_len > 192)
{
ukey[6] = k6;
ukey[7] = k7;
}
}
// IV
const u32 iv[4] = {
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[0]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[1]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[2]),
hc_swap32_S(salt_bufs[SALT_POS].salt_buf[3])
};
// CT
u32 CT[4] = { 0 };
// aes
u32 ks[60] = { 0 };
if (aes_key_len == 128)
{
AES128_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES128_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else if (aes_key_len == 192)
{
AES192_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES192_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
else
{
AES256_set_encrypt_key (ks, ukey, s_te0, s_te1, s_te2, s_te3);
AES256_encrypt (ks, iv, CT, s_te0, s_te1, s_te2, s_te3, s_te4);
}
const u32 r0 = CT[0];
const u32 r1 = CT[1];
const u32 r2 = CT[2];
const u32 r3 = CT[3];
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

373
OpenCL/m14552_a0-pure.cl Normal file
View File

@ -0,0 +1,373 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_whirlpool.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14552_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_swap (&ctx0, tmp.i, tmp.pw_len);
whirlpool_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (serpent_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14552_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_swap (&ctx0, tmp.i, tmp.pw_len);
whirlpool_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (serpent_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

363
OpenCL/m14552_a1-pure.cl Normal file
View File

@ -0,0 +1,363 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_whirlpool.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14552_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
whirlpool_ctx_t ctx = ctx0;
whirlpool_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
whirlpool_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx.h[4];
k5 = ctx.h[5];
if (serpent_key_len > 192)
{
k6 = ctx.h[6];
k7 = ctx.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14552_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
whirlpool_ctx_t ctx = ctx0;
whirlpool_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
whirlpool_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx.h[4];
k5 = ctx.h[5];
if (serpent_key_len > 192)
{
k6 = ctx.h[6];
k7 = ctx.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

389
OpenCL/m14552_a3-pure.cl Normal file
View File

@ -0,0 +1,389 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_whirlpool.cl"
#include "inc_cipher_serpent.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14552_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_swap (&ctx0, w, pw_len);
whirlpool_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (serpent_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14552_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 serpent_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_swap (&ctx0, w, pw_len);
whirlpool_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (serpent_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (serpent_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (serpent_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (serpent_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// serpent
u32 ks[140] = { 0 };
if (serpent_key_len == 128)
{
serpent128_set_key (ks, ukey);
serpent128_encrypt (ks, iv, CT);
}
else if (serpent_key_len == 192)
{
serpent192_set_key (ks, ukey);
serpent192_encrypt (ks, iv, CT);
}
else
{
serpent256_set_key (ks, ukey);
serpent256_encrypt (ks, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

375
OpenCL/m14553_a0-pure.cl Normal file
View File

@ -0,0 +1,375 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_rp.h"
#include "inc_rp.cl"
#include "inc_scalar.cl"
#include "inc_hash_whirlpool.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14553_mxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_swap (&ctx0, tmp.i, tmp.pw_len);
whirlpool_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (twofish_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14553_sxx (KERN_ATTR_RULES_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
COPY_PW (pws[gid]);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
pw_t tmp = PASTE_PW;
tmp.pw_len = apply_rules (rules_buf[il_pos].cmds, tmp.i, tmp.pw_len);
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_swap (&ctx0, tmp.i, tmp.pw_len);
whirlpool_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (twofish_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

365
OpenCL/m14553_a1-pure.cl Normal file
View File

@ -0,0 +1,365 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_whirlpool.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14553_mxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
whirlpool_ctx_t ctx = ctx0;
whirlpool_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
whirlpool_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx.h[4];
k5 = ctx.h[5];
if (twofish_key_len > 192)
{
k6 = ctx.h[6];
k7 = ctx.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14553_sxx (KERN_ATTR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_global_swap (&ctx0, pws[gid].i, pws[gid].pw_len);
/**
* loop
*/
for (u32 il_pos = 0; il_pos < il_cnt; il_pos++)
{
whirlpool_ctx_t ctx = ctx0;
whirlpool_update_global_swap (&ctx, combs_buf[il_pos].i, combs_buf[il_pos].pw_len);
whirlpool_final (&ctx);
const u32 k0 = ctx.h[0];
const u32 k1 = ctx.h[1];
const u32 k2 = ctx.h[2];
const u32 k3 = ctx.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx.h[4];
k5 = ctx.h[5];
if (twofish_key_len > 192)
{
k6 = ctx.h[6];
k7 = ctx.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

391
OpenCL/m14553_a3-pure.cl Normal file
View File

@ -0,0 +1,391 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
//#define NEW_SIMD_CODE
#ifdef KERNEL_STATIC
#include "inc_vendor.h"
#include "inc_types.h"
#include "inc_platform.cl"
#include "inc_common.cl"
#include "inc_scalar.cl"
#include "inc_hash_whirlpool.cl"
#include "inc_cipher_twofish.cl"
#endif
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
} cryptoapi_t;
KERNEL_FQ void m14553_mxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_swap (&ctx0, w, pw_len);
whirlpool_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (twofish_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_M_SCALAR (r0, r1, r2, r3);
}
}
KERNEL_FQ void m14553_sxx (KERN_ATTR_VECTOR_ESALT (cryptoapi_t))
{
/**
* modifier
*/
const u64 gid = get_global_id (0);
/**
* whirlpool shared
*/
#ifdef REAL_SHM
const u64 lid = get_local_id (0);
const u64 lsz = get_local_size (0);
LOCAL_VK u64 s_MT0[256];
LOCAL_VK u64 s_MT1[256];
LOCAL_VK u64 s_MT2[256];
LOCAL_VK u64 s_MT3[256];
LOCAL_VK u64 s_MT4[256];
LOCAL_VK u64 s_MT5[256];
LOCAL_VK u64 s_MT6[256];
LOCAL_VK u64 s_MT7[256];
for (u32 i = lid; i < 256; i += lsz)
{
s_MT0[i] = MT0[i];
s_MT1[i] = MT1[i];
s_MT2[i] = MT2[i];
s_MT3[i] = MT3[i];
s_MT4[i] = MT4[i];
s_MT5[i] = MT5[i];
s_MT6[i] = MT6[i];
s_MT7[i] = MT7[i];
}
SYNC_THREADS ();
#else
CONSTANT_AS u64a *s_MT0 = MT0;
CONSTANT_AS u64a *s_MT1 = MT1;
CONSTANT_AS u64a *s_MT2 = MT2;
CONSTANT_AS u64a *s_MT3 = MT3;
CONSTANT_AS u64a *s_MT4 = MT4;
CONSTANT_AS u64a *s_MT5 = MT5;
CONSTANT_AS u64a *s_MT6 = MT6;
CONSTANT_AS u64a *s_MT7 = MT7;
#endif
if (gid >= gid_max) return;
/**
* digest
*/
const u32 search[4] =
{
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R0],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R1],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R2],
digests_buf[DIGESTS_OFFSET].digest_buf[DGST_R3]
};
/**
* base
*/
u32 twofish_key_len = esalt_bufs[DIGESTS_OFFSET].key_size;
const u32 pw_len = pws[gid].pw_len;
u32x w[64] = { 0 };
for (u32 i = 0, idx = 0; i < pw_len; i += 4, idx += 1)
{
w[idx] = pws[gid].i[idx];
}
/**
* loop
*/
u32x w0l = w[0];
for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
{
const u32x w0r = words_buf_r[il_pos / VECT_SIZE];
const u32x w0 = w0l | w0r;
w[0] = w0;
whirlpool_ctx_t ctx0;
whirlpool_init (&ctx0, s_MT0, s_MT1, s_MT2, s_MT3, s_MT4, s_MT5, s_MT6, s_MT7);
whirlpool_update_swap (&ctx0, w, pw_len);
whirlpool_final (&ctx0);
const u32 k0 = ctx0.h[0];
const u32 k1 = ctx0.h[1];
const u32 k2 = ctx0.h[2];
const u32 k3 = ctx0.h[3];
u32 k4 = 0, k5 = 0, k6 = 0, k7 = 0;
if (twofish_key_len > 128)
{
k4 = ctx0.h[4];
k5 = ctx0.h[5];
if (twofish_key_len > 192)
{
k6 = ctx0.h[6];
k7 = ctx0.h[7];
}
}
// key
u32 ukey[8] = { 0 };
ukey[0] = hc_swap32_S (k0);
ukey[1] = hc_swap32_S (k1);
ukey[2] = hc_swap32_S (k2);
ukey[3] = hc_swap32_S (k3);
if (twofish_key_len > 128)
{
ukey[4] = hc_swap32_S (k4);
ukey[5] = hc_swap32_S (k5);
if (twofish_key_len > 192)
{
ukey[6] = hc_swap32_S (k6);
ukey[7] = hc_swap32_S (k7);
}
}
// IV
const u32 iv[4] = {
salt_bufs[SALT_POS].salt_buf[0],
salt_bufs[SALT_POS].salt_buf[1],
salt_bufs[SALT_POS].salt_buf[2],
salt_bufs[SALT_POS].salt_buf[3]
};
// CT
u32 CT[4] = { 0 };
// twofish
u32 sk1[4] = { 0 };
u32 lk1[40] = { 0 };
if (twofish_key_len == 128)
{
twofish128_set_key (sk1, lk1, ukey);
twofish128_encrypt (sk1, lk1, iv, CT);
}
else if (twofish_key_len == 192)
{
twofish192_set_key (sk1, lk1, ukey);
twofish192_encrypt (sk1, lk1, iv, CT);
}
else
{
twofish256_set_key (sk1, lk1, ukey);
twofish256_encrypt (sk1, lk1, iv, CT);
}
const u32 r0 = hc_swap32_S (CT[0]);
const u32 r1 = hc_swap32_S (CT[1]);
const u32 r2 = hc_swap32_S (CT[2]);
const u32 r3 = hc_swap32_S (CT[3]);
COMPARE_S_SCALAR (r0, r1, r2, r3);
}
}

View File

@ -30,6 +30,7 @@
- Added hash-mode: Umbraco HMAC-SHA1 - Added hash-mode: Umbraco HMAC-SHA1
- Added hash-mode: sha1($salt.sha1($pass.$salt)) - Added hash-mode: sha1($salt.sha1($pass.$salt))
- Added hash-mode: sha1(sha1($pass).$salt) - Added hash-mode: sha1(sha1($pass).$salt)
- Added hash-mode: Linux Kernel Crypto API (2.4)
## ##
## Features ## Features

View File

@ -269,6 +269,7 @@ NVIDIA GPUs require "NVIDIA Driver" (440.64 or later) and "CUDA Toolkit" (9.0 or
- Huawei sha1(md5($pass).$salt) - Huawei sha1(md5($pass).$salt)
- AuthMe sha256 - AuthMe sha256
- eCryptfs - eCryptfs
- Linux Kernel Crypto API (2.4)
- AES Crypt (SHA256) - AES Crypt (SHA256)
- LUKS - LUKS
- VeraCrypt - VeraCrypt

View File

@ -385,6 +385,13 @@ DEVICE_TYPE_GPU * 9300 1 N
DEVICE_TYPE_GPU * 15700 1 1 A DEVICE_TYPE_GPU * 15700 1 1 A
DEVICE_TYPE_GPU * 22700 1 N A DEVICE_TYPE_GPU * 22700 1 N A
##
## CryptoAPI
##
DEVICE_TYPE_CPU * 14500 1 A A
DEVICE_TYPE_GPU * 14500 1 A A
## Here's an example of how to manually tune SCRYPT algorithm kernels for your hardware. ## Here's an example of how to manually tune SCRYPT algorithm kernels for your hardware.
## Manually tuning the GPU will yield increased performance. There is typically no noticeable change to CPU performance. ## Manually tuning the GPU will yield increased performance. There is typically no noticeable change to CPU performance.
## ##

View File

@ -554,6 +554,8 @@ typedef enum parser_rc
PARSER_FILE_SIZE = -41, PARSER_FILE_SIZE = -41,
PARSER_IV_LENGTH = -42, PARSER_IV_LENGTH = -42,
PARSER_CT_LENGTH = -43, PARSER_CT_LENGTH = -43,
PARSER_CRYPTOAPI_KERNELTYPE = -44,
PARSER_CRYPTOAPI_KEYSIZE = -45,
PARSER_HAVE_ERRNO = -100, PARSER_HAVE_ERRNO = -100,
PARSER_UNKNOWN_ERROR = -255 PARSER_UNKNOWN_ERROR = -255

408
src/modules/module_14500.c Normal file
View File

@ -0,0 +1,408 @@
/**
* Author......: See docs/credits.txt
* License.....: MIT
*/
#include "common.h"
#include "types.h"
#include "modules.h"
#include "bitops.h"
#include "convert.h"
#include "shared.h"
static const u32 ATTACK_EXEC = ATTACK_EXEC_INSIDE_KERNEL;
static const u32 DGST_POS0 = 0;
static const u32 DGST_POS1 = 1;
static const u32 DGST_POS2 = 2;
static const u32 DGST_POS3 = 3;
static const u32 DGST_SIZE = DGST_SIZE_4_4;
static const u32 HASH_CATEGORY = HASH_CATEGORY_RAW_CIPHER_KPA;
static const char *HASH_NAME = "Linux Kernel Crypto API (2.4)";
static const u64 KERN_TYPE = 14541; // will be modified below
static const u32 OPTI_TYPE = OPTI_TYPE_ZERO_BYTE
| OPTI_TYPE_NOT_ITERATED
| OPTI_TYPE_NOT_SALTED;
static const u64 OPTS_TYPE = OPTS_TYPE_PT_GENERATE_LE
| OPTS_TYPE_SELF_TEST_DISABLE
| OPTS_TYPE_PT_ADD80;
static const u32 SALT_TYPE = SALT_TYPE_EMBEDDED;
static const char *ST_PASS = "hashcat";
static const char *ST_HASH = "$cryptoapi$9$2$03000000000000000000000000000000$00000000000000000000000000000000$d1d20e91a8f2e18881dc79369d8af761";
u32 module_attack_exec (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return ATTACK_EXEC; }
u32 module_dgst_pos0 (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return DGST_POS0; }
u32 module_dgst_pos1 (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return DGST_POS1; }
u32 module_dgst_pos2 (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return DGST_POS2; }
u32 module_dgst_pos3 (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return DGST_POS3; }
u32 module_dgst_size (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return DGST_SIZE; }
u32 module_hash_category (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return HASH_CATEGORY; }
const char *module_hash_name (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return HASH_NAME; }
u64 module_kern_type (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return KERN_TYPE; }
u32 module_opti_type (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return OPTI_TYPE; }
u64 module_opts_type (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return OPTS_TYPE; }
u32 module_salt_type (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return SALT_TYPE; }
const char *module_st_hash (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return ST_HASH; }
const char *module_st_pass (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra) { return ST_PASS; }
static const char *SIGNATURE_CRYPTOAPI = "$cryptoapi$";
typedef enum kern_type_cryptoapi
{
KERN_TYPE_CRYPTOAPI_SHA1_AES = 14511, // 0
KERN_TYPE_CRYPTOAPI_SHA1_SERPENT = 14512, // 1
KERN_TYPE_CRYPTOAPI_SHA1_TWOFISH = 14513, // 2
KERN_TYPE_CRYPTOAPI_SHA256_AES = 14521, // 3
KERN_TYPE_CRYPTOAPI_SHA256_SERPENT = 14522, // 4
KERN_TYPE_CRYPTOAPI_SHA256_TWOFISH = 14523, // 5
KERN_TYPE_CRYPTOAPI_SHA512_AES = 14531, // 6
KERN_TYPE_CRYPTOAPI_SHA512_SERPENT = 14532, // 7
KERN_TYPE_CRYPTOAPI_SHA512_TWOFISH = 14533, // 8
KERN_TYPE_CRYPTOAPI_RIPEMD160_AES = 14541, // 9
KERN_TYPE_CRYPTOAPI_RIPEMD160_SERPENT = 14542, // 10
KERN_TYPE_CRYPTOAPI_RIPEMD160_TWOFISH = 14543, // 11
KERN_TYPE_CRYPTOAPI_WHIRLPOOL_AES = 14551, // 12
KERN_TYPE_CRYPTOAPI_WHIRLPOOL_SERPENT = 14552, // 13
KERN_TYPE_CRYPTOAPI_WHIRLPOOL_TWOFISH = 14553, // 14
} kern_type_cryptoapi_t;
typedef enum hc_cryptoapi_key_size
{
HC_CRYPTOAPI_KEY_SIZE_128 = 128,
HC_CRYPTOAPI_KEY_SIZE_192 = 192,
HC_CRYPTOAPI_KEY_SIZE_256 = 256,
} hc_cryptoapi_key_size_t;
typedef enum hc_cryptoapi_cipher_type
{
HC_CRYPTOAPI_CIPHER_TYPE_AES = 1,
HC_CRYPTOAPI_CIPHER_TYPE_SERPENT = 2,
HC_CRYPTOAPI_CIPHER_TYPE_TWOFISH = 3,
} hc_cryptoapi_cypher_type_t;
typedef struct cryptoapi
{
u32 kern_type;
u32 key_size;
u32 iv_buf[4];
u32 pt_buf[4];
} cryptoapi_t;
char *module_jit_build_options (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra, MAYBE_UNUSED const hashes_t *hashes, MAYBE_UNUSED const hc_device_param_t *device_param)
{
char *jit_build_options = NULL;
// Extra treatment for Apple systems
if (device_param->opencl_platform_vendor_id == VENDOR_ID_APPLE)
{
return jit_build_options;
}
// Intel CPU
if ((device_param->opencl_device_vendor_id == VENDOR_ID_INTEL_SDK) && (device_param->opencl_device_type & CL_DEVICE_TYPE_CPU))
{
hc_asprintf (&jit_build_options, "-D _unroll");
}
// AMD-GPU-PRO
if ((device_param->opencl_device_vendor_id == VENDOR_ID_AMD) && (device_param->has_vperm == false))
{
hc_asprintf (&jit_build_options, "-D _unroll");
}
// ROCM
if ((device_param->opencl_device_vendor_id == VENDOR_ID_AMD) && (device_param->has_vperm == true))
{
hc_asprintf (&jit_build_options, "-D _unroll");
}
return jit_build_options;
}
u64 module_esalt_size (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const user_options_t *user_options, MAYBE_UNUSED const user_options_extra_t *user_options_extra)
{
const u64 esalt_size = (const u64) sizeof (cryptoapi_t);
return esalt_size;
}
int module_hash_decode (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED void *digest_buf, MAYBE_UNUSED salt_t *salt, MAYBE_UNUSED void *esalt_buf, MAYBE_UNUSED void *hook_salt_buf, MAYBE_UNUSED hashinfo_t *hash_info, const char *line_buf, MAYBE_UNUSED const int line_len)
{
u32 *digest = (u32 *) digest_buf;
cryptoapi_t *cryptoapi = (cryptoapi_t *) esalt_buf;
token_t token;
token.token_cnt = 6;
token.signatures_cnt = 1;
token.signatures_buf[0] = SIGNATURE_CRYPTOAPI;
token.len[0] = 11;
token.attr[0] = TOKEN_ATTR_FIXED_LENGTH
| TOKEN_ATTR_VERIFY_SIGNATURE;
token.sep[1] = '$';
token.len_min[1] = 1;
token.len_max[1] = 2;
token.attr[1] = TOKEN_ATTR_VERIFY_LENGTH
| TOKEN_ATTR_VERIFY_DIGIT;
token.sep[2] = '$';
token.len_min[2] = 1;
token.len_max[2] = 1;
token.attr[2] = TOKEN_ATTR_VERIFY_LENGTH
| TOKEN_ATTR_VERIFY_DIGIT;
token.sep[3] = '$';
token.len_min[3] = 16 * 2;
token.len_max[3] = 16 * 2;
token.attr[3] = TOKEN_ATTR_VERIFY_LENGTH
| TOKEN_ATTR_VERIFY_HEX;
token.sep[4] = '$';
token.len_min[4] = 16 * 2;
token.len_max[4] = 16 * 2;
token.attr[4] = TOKEN_ATTR_VERIFY_LENGTH
| TOKEN_ATTR_VERIFY_HEX;
token.sep[5] = '$';
token.len_min[5] = 16 * 2;
token.len_max[5] = 16 * 2;
token.attr[5] = TOKEN_ATTR_VERIFY_LENGTH
| TOKEN_ATTR_VERIFY_HEX;
const int rc_tokenizer = input_tokenizer ((const u8 *) line_buf, line_len, &token);
if (rc_tokenizer != PARSER_OK) return (rc_tokenizer);
const u32 type = atoi ((char *)token.buf[1]);
if (type > 14) return (PARSER_CRYPTOAPI_KERNELTYPE);
switch (type)
{
case 0: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_SHA1_AES; break;
case 1: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_SHA1_SERPENT; break;
case 2: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_SHA1_TWOFISH; break;
case 3: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_SHA256_AES; break;
case 4: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_SHA256_SERPENT; break;
case 5: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_SHA256_TWOFISH; break;
case 6: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_SHA512_AES; break;
case 7: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_SHA512_SERPENT; break;
case 8: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_SHA512_TWOFISH; break;
case 9: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_RIPEMD160_AES; break;
case 10: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_RIPEMD160_SERPENT; break;
case 11: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_RIPEMD160_TWOFISH; break;
case 12: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_WHIRLPOOL_AES; break;
case 13: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_WHIRLPOOL_SERPENT; break;
case 14: cryptoapi->kern_type = KERN_TYPE_CRYPTOAPI_WHIRLPOOL_TWOFISH; break;
}
const u32 key_size = atoi ((char *)token.buf[2]);
if (key_size > 2) return (PARSER_CRYPTOAPI_KEYSIZE);
switch (key_size)
{
case 0: cryptoapi->key_size = HC_CRYPTOAPI_KEY_SIZE_128; break;
case 1: cryptoapi->key_size = HC_CRYPTOAPI_KEY_SIZE_192; break;
case 2: cryptoapi->key_size = HC_CRYPTOAPI_KEY_SIZE_256; break;
}
// IV
const u8 *iv_pos = token.buf[3];
cryptoapi->iv_buf[0] = hex_to_u32 (iv_pos + 0);
cryptoapi->iv_buf[1] = hex_to_u32 (iv_pos + 8);
cryptoapi->iv_buf[2] = hex_to_u32 (iv_pos + 16);
cryptoapi->iv_buf[3] = hex_to_u32 (iv_pos + 24);
// PT
const u8 *pt_pos = token.buf[4];
cryptoapi->pt_buf[0] = hex_to_u32 (pt_pos + 0);
cryptoapi->pt_buf[1] = hex_to_u32 (pt_pos + 8);
cryptoapi->pt_buf[2] = hex_to_u32 (pt_pos + 16);
cryptoapi->pt_buf[3] = hex_to_u32 (pt_pos + 24);
// salt_buf
salt->salt_len = 16;
salt->salt_buf[0] = cryptoapi->pt_buf[0] ^ cryptoapi->iv_buf[0];
salt->salt_buf[1] = cryptoapi->pt_buf[1] ^ cryptoapi->iv_buf[1];
salt->salt_buf[2] = cryptoapi->pt_buf[2] ^ cryptoapi->iv_buf[2];
salt->salt_buf[3] = cryptoapi->pt_buf[3] ^ cryptoapi->iv_buf[3];
// hash
const u8 *hash_pos = token.buf[5];
digest[0] = hex_to_u32 (hash_pos + 0);
digest[1] = hex_to_u32 (hash_pos + 8);
digest[2] = hex_to_u32 (hash_pos + 16);
digest[3] = hex_to_u32 (hash_pos + 24);
digest[0] = byte_swap_32 (digest[0]);
digest[1] = byte_swap_32 (digest[1]);
digest[2] = byte_swap_32 (digest[2]);
digest[3] = byte_swap_32 (digest[3]);
return (PARSER_OK);
}
int module_hash_encode (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const void *digest_buf, MAYBE_UNUSED const salt_t *salt, MAYBE_UNUSED const void *esalt_buf, MAYBE_UNUSED const void *hook_salt_buf, MAYBE_UNUSED const hashinfo_t *hash_info, char *line_buf, MAYBE_UNUSED const int line_size)
{
const cryptoapi_t *cryptoapi = (const cryptoapi_t *) esalt_buf;
const u32 *digest = (const u32 *) digest_buf;
u32 type = cryptoapi->kern_type;
switch (type)
{
case KERN_TYPE_CRYPTOAPI_SHA1_AES: type = 0; break;
case KERN_TYPE_CRYPTOAPI_SHA1_SERPENT: type = 1; break;
case KERN_TYPE_CRYPTOAPI_SHA1_TWOFISH: type = 2; break;
case KERN_TYPE_CRYPTOAPI_SHA256_AES: type = 3; break;
case KERN_TYPE_CRYPTOAPI_SHA256_SERPENT: type = 4; break;
case KERN_TYPE_CRYPTOAPI_SHA256_TWOFISH: type = 5; break;
case KERN_TYPE_CRYPTOAPI_SHA512_AES: type = 6; break;
case KERN_TYPE_CRYPTOAPI_SHA512_SERPENT: type = 7; break;
case KERN_TYPE_CRYPTOAPI_SHA512_TWOFISH: type = 8; break;
case KERN_TYPE_CRYPTOAPI_RIPEMD160_AES: type = 9; break;
case KERN_TYPE_CRYPTOAPI_RIPEMD160_SERPENT: type = 10; break;
case KERN_TYPE_CRYPTOAPI_RIPEMD160_TWOFISH: type = 11; break;
case KERN_TYPE_CRYPTOAPI_WHIRLPOOL_AES: type = 12; break;
case KERN_TYPE_CRYPTOAPI_WHIRLPOOL_SERPENT: type = 13; break;
case KERN_TYPE_CRYPTOAPI_WHIRLPOOL_TWOFISH: type = 14; break;
}
u32 key_size = cryptoapi->key_size;
switch (key_size)
{
case HC_CRYPTOAPI_KEY_SIZE_128: key_size = 0; break;
case HC_CRYPTOAPI_KEY_SIZE_192: key_size = 1; break;
case HC_CRYPTOAPI_KEY_SIZE_256: key_size = 2; break;
}
u32 tmp[4];
tmp[0] = byte_swap_32 (digest[0]);
tmp[1] = byte_swap_32 (digest[1]);
tmp[2] = byte_swap_32 (digest[2]);
tmp[3] = byte_swap_32 (digest[3]);
int out_len = snprintf (line_buf, line_size, "%s%u$%u$%08x%08x%08x%08x$%08x%08x%08x%08x$",
SIGNATURE_CRYPTOAPI,
type,
key_size,
byte_swap_32 (cryptoapi->iv_buf[0]),
byte_swap_32 (cryptoapi->iv_buf[1]),
byte_swap_32 (cryptoapi->iv_buf[2]),
byte_swap_32 (cryptoapi->iv_buf[3]),
byte_swap_32 (cryptoapi->pt_buf[0]),
byte_swap_32 (cryptoapi->pt_buf[1]),
byte_swap_32 (cryptoapi->pt_buf[2]),
byte_swap_32 (cryptoapi->pt_buf[3]));
u8 *out_buf = (u8 *) line_buf;
u32_to_hex (tmp[0], out_buf + out_len); out_len += 8;
u32_to_hex (tmp[1], out_buf + out_len); out_len += 8;
u32_to_hex (tmp[2], out_buf + out_len); out_len += 8;
u32_to_hex (tmp[3], out_buf + out_len); out_len += 8;
return out_len;
}
u64 module_kern_type_dynamic (MAYBE_UNUSED const hashconfig_t *hashconfig, MAYBE_UNUSED const void *digest_buf, MAYBE_UNUSED const salt_t *salt, MAYBE_UNUSED const void *esalt_buf, MAYBE_UNUSED const void *hook_salt_buf, MAYBE_UNUSED const hashinfo_t *hash_info)
{
const cryptoapi_t *cryptoapi = (const cryptoapi_t *) esalt_buf;
return cryptoapi->kern_type;
}
void module_init (module_ctx_t *module_ctx)
{
module_ctx->module_context_size = MODULE_CONTEXT_SIZE_CURRENT;
module_ctx->module_interface_version = MODULE_INTERFACE_VERSION_CURRENT;
module_ctx->module_attack_exec = module_attack_exec;
module_ctx->module_benchmark_esalt = MODULE_DEFAULT;
module_ctx->module_benchmark_hook_salt = MODULE_DEFAULT;
module_ctx->module_benchmark_mask = MODULE_DEFAULT;
module_ctx->module_benchmark_salt = MODULE_DEFAULT;
module_ctx->module_build_plain_postprocess = MODULE_DEFAULT;
module_ctx->module_deep_comp_kernel = MODULE_DEFAULT;
module_ctx->module_dgst_pos0 = module_dgst_pos0;
module_ctx->module_dgst_pos1 = module_dgst_pos1;
module_ctx->module_dgst_pos2 = module_dgst_pos2;
module_ctx->module_dgst_pos3 = module_dgst_pos3;
module_ctx->module_dgst_size = module_dgst_size;
module_ctx->module_dictstat_disable = MODULE_DEFAULT;
module_ctx->module_esalt_size = module_esalt_size;
module_ctx->module_extra_buffer_size = MODULE_DEFAULT;
module_ctx->module_extra_tmp_size = MODULE_DEFAULT;
module_ctx->module_forced_outfile_format = MODULE_DEFAULT;
module_ctx->module_hash_binary_count = MODULE_DEFAULT;
module_ctx->module_hash_binary_parse = MODULE_DEFAULT;
module_ctx->module_hash_binary_save = MODULE_DEFAULT;
module_ctx->module_hash_decode_potfile = MODULE_DEFAULT;
module_ctx->module_hash_decode_zero_hash = MODULE_DEFAULT;
module_ctx->module_hash_decode = module_hash_decode;
module_ctx->module_hash_encode_status = MODULE_DEFAULT;
module_ctx->module_hash_encode_potfile = MODULE_DEFAULT;
module_ctx->module_hash_encode = module_hash_encode;
module_ctx->module_hash_init_selftest = MODULE_DEFAULT;
module_ctx->module_hash_mode = MODULE_DEFAULT;
module_ctx->module_hash_category = module_hash_category;
module_ctx->module_hash_name = module_hash_name;
module_ctx->module_hashes_count_min = MODULE_DEFAULT;
module_ctx->module_hashes_count_max = MODULE_DEFAULT;
module_ctx->module_hlfmt_disable = MODULE_DEFAULT;
module_ctx->module_hook_extra_param_size = MODULE_DEFAULT;
module_ctx->module_hook_extra_param_init = MODULE_DEFAULT;
module_ctx->module_hook_extra_param_term = MODULE_DEFAULT;
module_ctx->module_hook12 = MODULE_DEFAULT;
module_ctx->module_hook23 = MODULE_DEFAULT;
module_ctx->module_hook_salt_size = MODULE_DEFAULT;
module_ctx->module_hook_size = MODULE_DEFAULT;
module_ctx->module_jit_build_options = module_jit_build_options;
module_ctx->module_jit_cache_disable = MODULE_DEFAULT;
module_ctx->module_kernel_accel_max = MODULE_DEFAULT;
module_ctx->module_kernel_accel_min = MODULE_DEFAULT;
module_ctx->module_kernel_loops_max = MODULE_DEFAULT;
module_ctx->module_kernel_loops_min = MODULE_DEFAULT;
module_ctx->module_kernel_threads_max = MODULE_DEFAULT;
module_ctx->module_kernel_threads_min = MODULE_DEFAULT;
module_ctx->module_kern_type = module_kern_type;
module_ctx->module_kern_type_dynamic = module_kern_type_dynamic;
module_ctx->module_opti_type = module_opti_type;
module_ctx->module_opts_type = module_opts_type;
module_ctx->module_outfile_check_disable = MODULE_DEFAULT;
module_ctx->module_outfile_check_nocomp = MODULE_DEFAULT;
module_ctx->module_potfile_custom_check = MODULE_DEFAULT;
module_ctx->module_potfile_disable = MODULE_DEFAULT;
module_ctx->module_potfile_keep_all_hashes = MODULE_DEFAULT;
module_ctx->module_pwdump_column = MODULE_DEFAULT;
module_ctx->module_pw_max = MODULE_DEFAULT;
module_ctx->module_pw_min = MODULE_DEFAULT;
module_ctx->module_salt_max = MODULE_DEFAULT;
module_ctx->module_salt_min = MODULE_DEFAULT;
module_ctx->module_salt_type = module_salt_type;
module_ctx->module_separator = MODULE_DEFAULT;
module_ctx->module_st_hash = module_st_hash;
module_ctx->module_st_pass = module_st_pass;
module_ctx->module_tmp_size = MODULE_DEFAULT;
module_ctx->module_unstable_warning = MODULE_DEFAULT;
module_ctx->module_warmup_disable = MODULE_DEFAULT;
}