19extern const char LSH512_SSE_FNAME[] = __FILE__;
21#if defined(CRYPTOPP_SSSE3_AVAILABLE) && defined(CRYPTOPP_ENABLE_64BIT_SSE)
23#if defined(CRYPTOPP_SSSE3_AVAILABLE)
24# include <emmintrin.h>
25# include <tmmintrin.h>
28#if defined(CRYPTOPP_XOP_AVAILABLE)
29# include <ammintrin.h>
32#if defined(CRYPTOPP_GCC_COMPATIBLE)
33# include <x86intrin.h>
36ANONYMOUS_NAMESPACE_BEGIN
40const unsigned int LSH512_MSG_BLK_BYTE_LEN = 256;
43const unsigned int LSH512_HASH_VAL_MAX_BYTE_LEN = 64;
46const unsigned int CV_WORD_LEN = 16;
47const unsigned int CONST_WORD_LEN = 8;
49const unsigned int NUM_STEPS = 28;
51const unsigned int ROT_EVEN_ALPHA = 23;
52const unsigned int ROT_EVEN_BETA = 59;
53const unsigned int ROT_ODD_ALPHA = 7;
54const unsigned int ROT_ODD_BETA = 3;
56const unsigned int LSH_TYPE_512_512 = 0x0010040;
57const unsigned int LSH_TYPE_512_384 = 0x0010030;
58const unsigned int LSH_TYPE_512_256 = 0x0010020;
59const unsigned int LSH_TYPE_512_224 = 0x001001C;
66const unsigned int LSH_SUCCESS = 0x0;
69const unsigned int LSH_ERR_INVALID_DATABITLEN = 0x2403;
70const unsigned int LSH_ERR_INVALID_STATE = 0x2404;
74const unsigned int AlgorithmType = 80;
75const unsigned int RemainingBits = 81;
83extern const word64 LSH512_IV224[CV_WORD_LEN];
84extern const word64 LSH512_IV256[CV_WORD_LEN];
85extern const word64 LSH512_IV384[CV_WORD_LEN];
86extern const word64 LSH512_IV512[CV_WORD_LEN];
87extern const word64 LSH512_StepConstants[CONST_WORD_LEN * NUM_STEPS];
92ANONYMOUS_NAMESPACE_BEGIN
95using CryptoPP::word32;
96using CryptoPP::word64;
97using CryptoPP::rotlFixed;
98using CryptoPP::rotlConstant;
100using CryptoPP::GetBlock;
101using CryptoPP::LittleEndian;
102using CryptoPP::ConditionalByteReverse;
103using CryptoPP::LITTLE_ENDIAN_ORDER;
105using CryptoPP::LSH::LSH512_IV224;
106using CryptoPP::LSH::LSH512_IV256;
107using CryptoPP::LSH::LSH512_IV384;
108using CryptoPP::LSH::LSH512_IV512;
109using CryptoPP::LSH::LSH512_StepConstants;
118struct LSH512_SSSE3_Context
121 cv_l(state+0), cv_r(state+8), sub_msgs(state+16),
122 last_block(reinterpret_cast<
byte*>(state+48)),
123 remain_databitlen(remainingBitLength),
124 alg_type(static_cast<lsh_type>(algType)) {}
130 lsh_u64& remain_databitlen;
134struct LSH512_SSSE3_Internal
136 LSH512_SSSE3_Internal(
word64* state) :
137 submsg_e_l(state+16), submsg_e_r(state+24),
138 submsg_o_l(state+32), submsg_o_r(state+40) { }
150inline bool LSH_IS_LSH512(lsh_uint val) {
151 return (val & 0xf0000) == 0x10000;
154inline lsh_uint LSH_GET_SMALL_HASHBIT(lsh_uint val) {
158inline lsh_uint LSH_GET_HASHBYTE(lsh_uint val) {
162inline lsh_uint LSH_GET_HASHBIT(lsh_uint val) {
163 return (LSH_GET_HASHBYTE(val) << 3) - LSH_GET_SMALL_HASHBIT(val);
166inline lsh_u64 loadLE64(lsh_u64 v) {
170lsh_u64 ROTL64(lsh_u64 x, lsh_u32 r) {
175inline void load_msg_blk(LSH512_SSSE3_Internal* i_state,
const lsh_u8 msgblk[LSH512_MSG_BLK_BYTE_LEN])
177 lsh_u64* submsg_e_l = i_state->submsg_e_l;
178 lsh_u64* submsg_e_r = i_state->submsg_e_r;
179 lsh_u64* submsg_o_l = i_state->submsg_o_l;
180 lsh_u64* submsg_o_r = i_state->submsg_o_r;
182 _mm_storeu_si128(
M128_CAST(submsg_e_l+0),
184 _mm_storeu_si128(
M128_CAST(submsg_e_l+2),
186 _mm_storeu_si128(
M128_CAST(submsg_e_l+4),
188 _mm_storeu_si128(
M128_CAST(submsg_e_l+6),
191 _mm_storeu_si128(
M128_CAST(submsg_e_r+0),
193 _mm_storeu_si128(
M128_CAST(submsg_e_r+2),
195 _mm_storeu_si128(
M128_CAST(submsg_e_r+4),
197 _mm_storeu_si128(
M128_CAST(submsg_e_r+6),
200 _mm_storeu_si128(
M128_CAST(submsg_o_l+0),
202 _mm_storeu_si128(
M128_CAST(submsg_o_l+2),
204 _mm_storeu_si128(
M128_CAST(submsg_o_l+4),
206 _mm_storeu_si128(
M128_CAST(submsg_o_l+6),
209 _mm_storeu_si128(
M128_CAST(submsg_o_r+0),
211 _mm_storeu_si128(
M128_CAST(submsg_o_r+2),
213 _mm_storeu_si128(
M128_CAST(submsg_o_r+4),
215 _mm_storeu_si128(
M128_CAST(submsg_o_r+6),
219inline void msg_exp_even(LSH512_SSSE3_Internal* i_state)
223 lsh_u64* submsg_e_l = i_state->submsg_e_l;
224 lsh_u64* submsg_e_r = i_state->submsg_e_r;
225 lsh_u64* submsg_o_l = i_state->submsg_o_l;
226 lsh_u64* submsg_o_r = i_state->submsg_o_r;
229 _mm_storeu_si128(
M128_CAST(submsg_e_l+2), _mm_shuffle_epi32(
230 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_l+2)), _MM_SHUFFLE(1,0,3,2)));
233 _mm_storeu_si128(
M128_CAST(submsg_e_l+0),
235 _mm_storeu_si128(
M128_CAST(submsg_e_l+2), temp);
236 _mm_storeu_si128(
M128_CAST(submsg_e_l+6), _mm_shuffle_epi32(
237 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_l+6)), _MM_SHUFFLE(1,0,3,2)));
240 _mm_storeu_si128(
M128_CAST(submsg_e_l+4), _mm_unpacklo_epi64(
243 _mm_storeu_si128(
M128_CAST(submsg_e_l+6), _mm_unpackhi_epi64(
245 _mm_storeu_si128(
M128_CAST(submsg_e_r+2), _mm_shuffle_epi32(
246 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_r+2)), _MM_SHUFFLE(1,0,3,2)));
249 _mm_storeu_si128(
M128_CAST(submsg_e_r+0),
251 _mm_storeu_si128(
M128_CAST(submsg_e_r+2), temp);
252 _mm_storeu_si128(
M128_CAST(submsg_e_r+6), _mm_shuffle_epi32(
253 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_r+6)), _MM_SHUFFLE(1,0,3,2)));
256 _mm_storeu_si128(
M128_CAST(submsg_e_r+4), _mm_unpacklo_epi64(
259 _mm_storeu_si128(
M128_CAST(submsg_e_r+6), _mm_unpackhi_epi64(
262 _mm_storeu_si128(
M128_CAST(submsg_e_l+0), _mm_add_epi64(
265 _mm_storeu_si128(
M128_CAST(submsg_e_l+2), _mm_add_epi64(
268 _mm_storeu_si128(
M128_CAST(submsg_e_l+4), _mm_add_epi64(
271 _mm_storeu_si128(
M128_CAST(submsg_e_l+6), _mm_add_epi64(
275 _mm_storeu_si128(
M128_CAST(submsg_e_r+0), _mm_add_epi64(
278 _mm_storeu_si128(
M128_CAST(submsg_e_r+2), _mm_add_epi64(
281 _mm_storeu_si128(
M128_CAST(submsg_e_r+4), _mm_add_epi64(
284 _mm_storeu_si128(
M128_CAST(submsg_e_r+6), _mm_add_epi64(
289inline void msg_exp_odd(LSH512_SSSE3_Internal* i_state)
293 lsh_u64* submsg_e_l = i_state->submsg_e_l;
294 lsh_u64* submsg_e_r = i_state->submsg_e_r;
295 lsh_u64* submsg_o_l = i_state->submsg_o_l;
296 lsh_u64* submsg_o_r = i_state->submsg_o_r;
299 _mm_storeu_si128(
M128_CAST(submsg_o_l+2), _mm_shuffle_epi32(
300 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_l+2)), _MM_SHUFFLE(1,0,3,2)));
303 _mm_storeu_si128(
M128_CAST(submsg_o_l+0),
305 _mm_storeu_si128(
M128_CAST(submsg_o_l+2), temp);
306 _mm_storeu_si128(
M128_CAST(submsg_o_l+6), _mm_shuffle_epi32(
307 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_l+6)), _MM_SHUFFLE(1,0,3,2)));
310 _mm_storeu_si128(
M128_CAST(submsg_o_l+4), _mm_unpacklo_epi64(
313 _mm_storeu_si128(
M128_CAST(submsg_o_l+6), _mm_unpackhi_epi64(
315 _mm_storeu_si128(
M128_CAST(submsg_o_r+2), _mm_shuffle_epi32(
316 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_r+2)), _MM_SHUFFLE(1,0,3,2)));
319 _mm_storeu_si128(
M128_CAST(submsg_o_r+0),
321 _mm_storeu_si128(
M128_CAST(submsg_o_r+2), temp);
322 _mm_storeu_si128(
M128_CAST(submsg_o_r+6), _mm_shuffle_epi32(
323 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_r+6)), _MM_SHUFFLE(1,0,3,2)));
326 _mm_storeu_si128(
M128_CAST(submsg_o_r+4), _mm_unpacklo_epi64(
329 _mm_storeu_si128(
M128_CAST(submsg_o_r+6), _mm_unpackhi_epi64(
332 _mm_storeu_si128(
M128_CAST(submsg_o_l+0), _mm_add_epi64(
335 _mm_storeu_si128(
M128_CAST(submsg_o_l+2), _mm_add_epi64(
338 _mm_storeu_si128(
M128_CAST(submsg_o_l+4), _mm_add_epi64(
341 _mm_storeu_si128(
M128_CAST(submsg_o_l+6), _mm_add_epi64(
345 _mm_storeu_si128(
M128_CAST(submsg_o_r+0), _mm_add_epi64(
348 _mm_storeu_si128(
M128_CAST(submsg_o_r+2), _mm_add_epi64(
351 _mm_storeu_si128(
M128_CAST(submsg_o_r+4), _mm_add_epi64(
354 _mm_storeu_si128(
M128_CAST(submsg_o_r+6), _mm_add_epi64(
359inline void load_sc(
const lsh_u64** p_const_v,
size_t i)
361 *p_const_v = &LSH512_StepConstants[i];
364inline void msg_add_even(lsh_u64 cv_l[8], lsh_u64 cv_r[8], LSH512_SSSE3_Internal* i_state)
368 lsh_u64* submsg_e_l = i_state->submsg_e_l;
369 lsh_u64* submsg_e_r = i_state->submsg_e_r;
371 _mm_storeu_si128(
M128_CAST(cv_l), _mm_xor_si128(
374 _mm_storeu_si128(
M128_CAST(cv_r), _mm_xor_si128(
377 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_xor_si128(
380 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_xor_si128(
383 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_xor_si128(
386 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_xor_si128(
389 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_xor_si128(
392 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_xor_si128(
397inline void msg_add_odd(lsh_u64 cv_l[8], lsh_u64 cv_r[8], LSH512_SSSE3_Internal* i_state)
401 lsh_u64* submsg_o_l = i_state->submsg_o_l;
402 lsh_u64* submsg_o_r = i_state->submsg_o_r;
404 _mm_storeu_si128(
M128_CAST(cv_l), _mm_xor_si128(
407 _mm_storeu_si128(
M128_CAST(cv_r), _mm_xor_si128(
410 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_xor_si128(
413 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_xor_si128(
416 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_xor_si128(
419 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_xor_si128(
422 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_xor_si128(
425 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_xor_si128(
430inline void add_blk(lsh_u64 cv_l[8], lsh_u64 cv_r[8])
432 _mm_storeu_si128(
M128_CAST(cv_l), _mm_add_epi64(
435 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_add_epi64(
438 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_add_epi64(
441 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_add_epi64(
446template <
unsigned int R>
447inline void rotate_blk(lsh_u64 cv[8])
449#if defined(CRYPTOPP_XOP_AVAILABLE)
460 _mm_storeu_si128(
M128_CAST(cv), _mm_or_si128(
463 _mm_storeu_si128(
M128_CAST(cv+2), _mm_or_si128(
466 _mm_storeu_si128(
M128_CAST(cv+4), _mm_or_si128(
469 _mm_storeu_si128(
M128_CAST(cv+6), _mm_or_si128(
475inline void xor_with_const(lsh_u64 cv_l[8],
const lsh_u64 const_v[8])
477 _mm_storeu_si128(
M128_CAST(cv_l), _mm_xor_si128(
480 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_xor_si128(
483 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_xor_si128(
486 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_xor_si128(
491inline void rotate_msg_gamma(lsh_u64 cv_r[8])
496 _mm_set_epi8(13,12,11,10, 9,8,15,14, 7,6,5,4, 3,2,1,0)));
499 _mm_set_epi8(9,8,15,14, 13,12,11,10, 3,2,1,0, 7,6,5,4)));
503 _mm_set_epi8(12,11,10,9, 8,15,14,13, 6,5,4,3, 2,1,0,7)));
506 _mm_set_epi8(8,15,14,13, 12,11,10,9, 2,1,0,7, 6,5,4,3)));
509inline void word_perm(lsh_u64 cv_l[8], lsh_u64 cv_r[8])
513 _mm_storeu_si128(
M128_CAST(cv_l+0), _mm_unpacklo_epi64(
516 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_unpackhi_epi64(
520 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_unpacklo_epi64(
523 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_unpackhi_epi64(
525 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_shuffle_epi32(
529 _mm_storeu_si128(
M128_CAST(cv_r+0), _mm_unpacklo_epi64(
532 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_unpackhi_epi64(
534 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_shuffle_epi32(
538 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_unpacklo_epi64(
541 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_unpackhi_epi64(
560 _mm_storeu_si128(
M128_CAST(cv_r+0), temp[0]);
561 _mm_storeu_si128(
M128_CAST(cv_r+2), temp[1]);
568template <
unsigned int Alpha,
unsigned int Beta>
569inline void mix(lsh_u64 cv_l[8], lsh_u64 cv_r[8],
const lsh_u64 const_v[8])
572 rotate_blk<Alpha>(cv_l);
573 xor_with_const(cv_l, const_v);
575 rotate_blk<Beta>(cv_r);
577 rotate_msg_gamma(cv_r);
584inline void compress(LSH512_SSSE3_Context* ctx,
const lsh_u8 pdMsgBlk[LSH512_MSG_BLK_BYTE_LEN])
588 LSH512_SSSE3_Internal s_state(ctx->cv_l);
589 LSH512_SSSE3_Internal* i_state = &s_state;
591 const lsh_u64* const_v = NULL;
592 lsh_u64 *cv_l = ctx->cv_l;
593 lsh_u64 *cv_r = ctx->cv_r;
595 load_msg_blk(i_state, pdMsgBlk);
597 msg_add_even(cv_l, cv_r, i_state);
598 load_sc(&const_v, 0);
599 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
600 word_perm(cv_l, cv_r);
602 msg_add_odd(cv_l, cv_r, i_state);
603 load_sc(&const_v, 8);
604 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
605 word_perm(cv_l, cv_r);
607 for (
size_t i = 1; i < NUM_STEPS / 2; i++)
609 msg_exp_even(i_state);
610 msg_add_even(cv_l, cv_r, i_state);
611 load_sc(&const_v, 16 * i);
612 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
613 word_perm(cv_l, cv_r);
615 msg_exp_odd(i_state);
616 msg_add_odd(cv_l, cv_r, i_state);
617 load_sc(&const_v, 16 * i + 8);
618 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
619 word_perm(cv_l, cv_r);
622 msg_exp_even(i_state);
623 msg_add_even(cv_l, cv_r, i_state);
649inline void zero_iv(lsh_u64 cv_l[8], lsh_u64 cv_r[8])
651 _mm_storeu_si128(
M128_CAST(cv_l+0), _mm_setzero_si128());
652 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_setzero_si128());
653 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_setzero_si128());
654 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_setzero_si128());
655 _mm_storeu_si128(
M128_CAST(cv_r+0), _mm_setzero_si128());
656 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_setzero_si128());
657 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_setzero_si128());
658 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_setzero_si128());
661inline void zero_submsgs(LSH512_SSSE3_Context* ctx)
663 lsh_u64* sub_msgs = ctx->sub_msgs;
666 _mm_setzero_si128());
668 _mm_setzero_si128());
670 _mm_setzero_si128());
672 _mm_setzero_si128());
674 _mm_setzero_si128());
676 _mm_setzero_si128());
678 _mm_setzero_si128());
680 _mm_setzero_si128());
683inline void init224(LSH512_SSSE3_Context* ctx)
688 load_iv(ctx->cv_l, ctx->cv_r, LSH512_IV224);
691inline void init256(LSH512_SSSE3_Context* ctx)
696 load_iv(ctx->cv_l, ctx->cv_r, LSH512_IV256);
699inline void init384(LSH512_SSSE3_Context* ctx)
704 load_iv(ctx->cv_l, ctx->cv_r, LSH512_IV384);
707inline void init512(LSH512_SSSE3_Context* ctx)
712 load_iv(ctx->cv_l, ctx->cv_r, LSH512_IV512);
717inline void fin(LSH512_SSSE3_Context* ctx)
721 _mm_storeu_si128(
M128_CAST(ctx->cv_l+0), _mm_xor_si128(
724 _mm_storeu_si128(
M128_CAST(ctx->cv_l+2), _mm_xor_si128(
727 _mm_storeu_si128(
M128_CAST(ctx->cv_l+4), _mm_xor_si128(
730 _mm_storeu_si128(
M128_CAST(ctx->cv_l+6), _mm_xor_si128(
737inline void get_hash(LSH512_SSSE3_Context* ctx, lsh_u8* pbHashVal)
743 lsh_uint alg_type = ctx->alg_type;
744 lsh_uint hash_val_byte_len = LSH_GET_HASHBYTE(alg_type);
745 lsh_uint hash_val_bit_len = LSH_GET_SMALL_HASHBIT(alg_type);
748 std::memcpy(pbHashVal, ctx->cv_l, hash_val_byte_len);
749 if (hash_val_bit_len){
750 pbHashVal[hash_val_byte_len-1] &= (((lsh_u8)0xff) << hash_val_bit_len);
756lsh_err lsh512_init_ssse3(LSH512_SSSE3_Context* ctx)
761 lsh_u32 alg_type = ctx->alg_type;
762 const lsh_u64* const_v = NULL;
763 ctx->remain_databitlen = 0;
766 case LSH_TYPE_512_512:
769 case LSH_TYPE_512_384:
772 case LSH_TYPE_512_256:
775 case LSH_TYPE_512_224:
782 lsh_u64* cv_l = ctx->cv_l;
783 lsh_u64* cv_r = ctx->cv_r;
786 cv_l[0] = LSH512_HASH_VAL_MAX_BYTE_LEN;
787 cv_l[1] = LSH_GET_HASHBIT(alg_type);
789 for (
size_t i = 0; i < NUM_STEPS / 2; i++)
792 load_sc(&const_v, i * 16);
793 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
794 word_perm(cv_l, cv_r);
796 load_sc(&const_v, i * 16 + 8);
797 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
798 word_perm(cv_l, cv_r);
804lsh_err lsh512_update_ssse3(LSH512_SSSE3_Context* ctx,
const lsh_u8* data,
size_t databitlen)
811 if (databitlen == 0){
816 size_t databytelen = databitlen >> 3;
818 const size_t pos2 = 0;
820 size_t remain_msg_byte =
static_cast<size_t>(ctx->remain_databitlen >> 3);
822 const size_t remain_msg_bit = 0;
824 if (remain_msg_byte >= LSH512_MSG_BLK_BYTE_LEN){
825 return LSH_ERR_INVALID_STATE;
827 if (remain_msg_bit > 0){
828 return LSH_ERR_INVALID_DATABITLEN;
831 if (databytelen + remain_msg_byte < LSH512_MSG_BLK_BYTE_LEN){
832 std::memcpy(ctx->last_block + remain_msg_byte, data, databytelen);
833 ctx->remain_databitlen += (lsh_uint)databitlen;
834 remain_msg_byte += (lsh_uint)databytelen;
836 ctx->last_block[remain_msg_byte] = data[databytelen] & ((0xff >> pos2) ^ 0xff);
841 if (remain_msg_byte > 0){
842 size_t more_byte = LSH512_MSG_BLK_BYTE_LEN - remain_msg_byte;
843 std::memcpy(ctx->last_block + remain_msg_byte, data, more_byte);
844 compress(ctx, ctx->last_block);
846 databytelen -= more_byte;
848 ctx->remain_databitlen = 0;
851 while (databytelen >= LSH512_MSG_BLK_BYTE_LEN)
857 data += LSH512_MSG_BLK_BYTE_LEN;
858 databytelen -= LSH512_MSG_BLK_BYTE_LEN;
861 if (databytelen > 0){
862 std::memcpy(ctx->last_block, data, databytelen);
863 ctx->remain_databitlen = (lsh_uint)(databytelen << 3);
867 ctx->last_block[databytelen] = data[databytelen] & ((0xff >> pos2) ^ 0xff);
868 ctx->remain_databitlen += pos2;
873lsh_err lsh512_final_ssse3(LSH512_SSSE3_Context* ctx, lsh_u8* hashval)
879 size_t remain_msg_byte =
static_cast<size_t>(ctx->remain_databitlen >> 3);
881 const size_t remain_msg_bit = 0;
883 if (remain_msg_byte >= LSH512_MSG_BLK_BYTE_LEN){
884 return LSH_ERR_INVALID_STATE;
888 ctx->last_block[remain_msg_byte] |= (0x1 << (7 - remain_msg_bit));
891 ctx->last_block[remain_msg_byte] = 0x80;
893 std::memset(ctx->last_block + remain_msg_byte + 1, 0, LSH512_MSG_BLK_BYTE_LEN - remain_msg_byte - 1);
895 compress(ctx, ctx->last_block);
898 get_hash(ctx, hashval);
903ANONYMOUS_NAMESPACE_END
908void LSH512_Base_Restart_SSSE3(
word64* state)
910 state[RemainingBits] = 0;
911 LSH512_SSSE3_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
912 lsh_err err = lsh512_init_ssse3(&ctx);
914 if (err != LSH_SUCCESS)
919void LSH512_Base_Update_SSSE3(
word64* state,
const byte *input,
size_t size)
921 LSH512_SSSE3_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
922 lsh_err err = lsh512_update_ssse3(&ctx, input, 8*size);
924 if (err != LSH_SUCCESS)
929void LSH512_Base_TruncatedFinal_SSSE3(
word64* state,
byte *hash,
size_t)
931 LSH512_SSSE3_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
932 lsh_err err = lsh512_final_ssse3(&ctx, hash);
934 if (err != LSH_SUCCESS)
#define M128_CAST(x)
Clang workaround.
#define CONST_M128_CAST(x)
Clang workaround.
Base class for all exceptions thrown by the library.
@ OTHER_ERROR
Some other error occurred not belonging to other categories.
Library configuration file.
unsigned char byte
8-bit unsigned datatype
unsigned int word32
32-bit unsigned datatype
unsigned long long word64
64-bit unsigned datatype
@ LITTLE_ENDIAN_ORDER
byte order is little-endian
Classes for the LSH hash functions.
Utility functions for the Crypto++ library.
T ConditionalByteReverse(ByteOrder order, T value)
Reverses bytes in a value depending upon endianness.
T rotlFixed(T x, unsigned int y)
Performs a left rotate.
Crypto++ library namespace.
#define CRYPTOPP_ASSERT(exp)
Debugging and diagnostic assertion.