18#if defined(CRYPTOPP_SSSE3_AVAILABLE) && defined(CRYPTOPP_ENABLE_64BIT_SSE)
20#if defined(CRYPTOPP_SSSE3_AVAILABLE)
21# include <emmintrin.h>
22# include <tmmintrin.h>
25#if defined(CRYPTOPP_XOP_AVAILABLE)
26# include <ammintrin.h>
30#if (CRYPTOPP_GCC_VERSION >= 40500)
31# include <x86intrin.h>
34ANONYMOUS_NAMESPACE_BEGIN
38const unsigned int LSH512_MSG_BLK_BYTE_LEN = 256;
41const unsigned int LSH512_HASH_VAL_MAX_BYTE_LEN = 64;
44const unsigned int CV_WORD_LEN = 16;
45const unsigned int CONST_WORD_LEN = 8;
47const unsigned int NUM_STEPS = 28;
49const unsigned int ROT_EVEN_ALPHA = 23;
50const unsigned int ROT_EVEN_BETA = 59;
51const unsigned int ROT_ODD_ALPHA = 7;
52const unsigned int ROT_ODD_BETA = 3;
54const unsigned int LSH_TYPE_512_512 = 0x0010040;
55const unsigned int LSH_TYPE_512_384 = 0x0010030;
56const unsigned int LSH_TYPE_512_256 = 0x0010020;
57const unsigned int LSH_TYPE_512_224 = 0x001001C;
64const unsigned int LSH_SUCCESS = 0x0;
67const unsigned int LSH_ERR_INVALID_DATABITLEN = 0x2403;
68const unsigned int LSH_ERR_INVALID_STATE = 0x2404;
72const unsigned int AlgorithmType = 80;
73const unsigned int RemainingBits = 81;
81extern const word64 LSH512_IV224[CV_WORD_LEN];
82extern const word64 LSH512_IV256[CV_WORD_LEN];
83extern const word64 LSH512_IV384[CV_WORD_LEN];
84extern const word64 LSH512_IV512[CV_WORD_LEN];
85extern const word64 LSH512_StepConstants[CONST_WORD_LEN * NUM_STEPS];
90ANONYMOUS_NAMESPACE_BEGIN
93using CryptoPP::word32;
94using CryptoPP::word64;
95using CryptoPP::rotlFixed;
96using CryptoPP::rotlConstant;
98using CryptoPP::GetBlock;
99using CryptoPP::LittleEndian;
100using CryptoPP::ConditionalByteReverse;
101using CryptoPP::LITTLE_ENDIAN_ORDER;
103using CryptoPP::LSH::LSH512_IV224;
104using CryptoPP::LSH::LSH512_IV256;
105using CryptoPP::LSH::LSH512_IV384;
106using CryptoPP::LSH::LSH512_IV512;
107using CryptoPP::LSH::LSH512_StepConstants;
116struct LSH512_SSSE3_Context
119 cv_l(state+0), cv_r(state+8), sub_msgs(state+16),
120 last_block(reinterpret_cast<
byte*>(state+48)),
121 remain_databitlen(remainingBitLength),
122 alg_type(static_cast<lsh_type>(algType)) {}
128 lsh_u64& remain_databitlen;
132struct LSH512_SSSE3_Internal
134 LSH512_SSSE3_Internal(
word64* state) :
135 submsg_e_l(state+16), submsg_e_r(state+24),
136 submsg_o_l(state+32), submsg_o_r(state+40) { }
148inline bool LSH_IS_LSH512(lsh_uint val) {
149 return (val & 0xf0000) == 0x10000;
152inline lsh_uint LSH_GET_SMALL_HASHBIT(lsh_uint val) {
156inline lsh_uint LSH_GET_HASHBYTE(lsh_uint val) {
160inline lsh_uint LSH_GET_HASHBIT(lsh_uint val) {
161 return (LSH_GET_HASHBYTE(val) << 3) - LSH_GET_SMALL_HASHBIT(val);
164inline lsh_u64 loadLE64(lsh_u64 v) {
168lsh_u64 ROTL64(lsh_u64 x, lsh_u32 r) {
173inline void load_msg_blk(LSH512_SSSE3_Internal* i_state,
const lsh_u8 msgblk[LSH512_MSG_BLK_BYTE_LEN])
175 lsh_u64* submsg_e_l = i_state->submsg_e_l;
176 lsh_u64* submsg_e_r = i_state->submsg_e_r;
177 lsh_u64* submsg_o_l = i_state->submsg_o_l;
178 lsh_u64* submsg_o_r = i_state->submsg_o_r;
180 _mm_storeu_si128(
M128_CAST(submsg_e_l+0),
182 _mm_storeu_si128(
M128_CAST(submsg_e_l+2),
184 _mm_storeu_si128(
M128_CAST(submsg_e_l+4),
186 _mm_storeu_si128(
M128_CAST(submsg_e_l+6),
189 _mm_storeu_si128(
M128_CAST(submsg_e_r+0),
191 _mm_storeu_si128(
M128_CAST(submsg_e_r+2),
193 _mm_storeu_si128(
M128_CAST(submsg_e_r+4),
195 _mm_storeu_si128(
M128_CAST(submsg_e_r+6),
198 _mm_storeu_si128(
M128_CAST(submsg_o_l+0),
200 _mm_storeu_si128(
M128_CAST(submsg_o_l+2),
202 _mm_storeu_si128(
M128_CAST(submsg_o_l+4),
204 _mm_storeu_si128(
M128_CAST(submsg_o_l+6),
207 _mm_storeu_si128(
M128_CAST(submsg_o_r+0),
209 _mm_storeu_si128(
M128_CAST(submsg_o_r+2),
211 _mm_storeu_si128(
M128_CAST(submsg_o_r+4),
213 _mm_storeu_si128(
M128_CAST(submsg_o_r+6),
217inline void msg_exp_even(LSH512_SSSE3_Internal* i_state)
221 lsh_u64* submsg_e_l = i_state->submsg_e_l;
222 lsh_u64* submsg_e_r = i_state->submsg_e_r;
223 lsh_u64* submsg_o_l = i_state->submsg_o_l;
224 lsh_u64* submsg_o_r = i_state->submsg_o_r;
227 _mm_storeu_si128(
M128_CAST(submsg_e_l+2), _mm_shuffle_epi32(
228 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_l+2)), _MM_SHUFFLE(1,0,3,2)));
231 _mm_storeu_si128(
M128_CAST(submsg_e_l+0),
233 _mm_storeu_si128(
M128_CAST(submsg_e_l+2), temp);
234 _mm_storeu_si128(
M128_CAST(submsg_e_l+6), _mm_shuffle_epi32(
235 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_l+6)), _MM_SHUFFLE(1,0,3,2)));
238 _mm_storeu_si128(
M128_CAST(submsg_e_l+4), _mm_unpacklo_epi64(
241 _mm_storeu_si128(
M128_CAST(submsg_e_l+6), _mm_unpackhi_epi64(
243 _mm_storeu_si128(
M128_CAST(submsg_e_r+2), _mm_shuffle_epi32(
244 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_r+2)), _MM_SHUFFLE(1,0,3,2)));
247 _mm_storeu_si128(
M128_CAST(submsg_e_r+0),
249 _mm_storeu_si128(
M128_CAST(submsg_e_r+2), temp);
250 _mm_storeu_si128(
M128_CAST(submsg_e_r+6), _mm_shuffle_epi32(
251 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_r+6)), _MM_SHUFFLE(1,0,3,2)));
254 _mm_storeu_si128(
M128_CAST(submsg_e_r+4), _mm_unpacklo_epi64(
257 _mm_storeu_si128(
M128_CAST(submsg_e_r+6), _mm_unpackhi_epi64(
260 _mm_storeu_si128(
M128_CAST(submsg_e_l+0), _mm_add_epi64(
263 _mm_storeu_si128(
M128_CAST(submsg_e_l+2), _mm_add_epi64(
266 _mm_storeu_si128(
M128_CAST(submsg_e_l+4), _mm_add_epi64(
269 _mm_storeu_si128(
M128_CAST(submsg_e_l+6), _mm_add_epi64(
273 _mm_storeu_si128(
M128_CAST(submsg_e_r+0), _mm_add_epi64(
276 _mm_storeu_si128(
M128_CAST(submsg_e_r+2), _mm_add_epi64(
279 _mm_storeu_si128(
M128_CAST(submsg_e_r+4), _mm_add_epi64(
282 _mm_storeu_si128(
M128_CAST(submsg_e_r+6), _mm_add_epi64(
287inline void msg_exp_odd(LSH512_SSSE3_Internal* i_state)
291 lsh_u64* submsg_e_l = i_state->submsg_e_l;
292 lsh_u64* submsg_e_r = i_state->submsg_e_r;
293 lsh_u64* submsg_o_l = i_state->submsg_o_l;
294 lsh_u64* submsg_o_r = i_state->submsg_o_r;
297 _mm_storeu_si128(
M128_CAST(submsg_o_l+2), _mm_shuffle_epi32(
298 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_l+2)), _MM_SHUFFLE(1,0,3,2)));
301 _mm_storeu_si128(
M128_CAST(submsg_o_l+0),
303 _mm_storeu_si128(
M128_CAST(submsg_o_l+2), temp);
304 _mm_storeu_si128(
M128_CAST(submsg_o_l+6), _mm_shuffle_epi32(
305 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_l+6)), _MM_SHUFFLE(1,0,3,2)));
308 _mm_storeu_si128(
M128_CAST(submsg_o_l+4), _mm_unpacklo_epi64(
311 _mm_storeu_si128(
M128_CAST(submsg_o_l+6), _mm_unpackhi_epi64(
313 _mm_storeu_si128(
M128_CAST(submsg_o_r+2), _mm_shuffle_epi32(
314 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_r+2)), _MM_SHUFFLE(1,0,3,2)));
317 _mm_storeu_si128(
M128_CAST(submsg_o_r+0),
319 _mm_storeu_si128(
M128_CAST(submsg_o_r+2), temp);
320 _mm_storeu_si128(
M128_CAST(submsg_o_r+6), _mm_shuffle_epi32(
321 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_r+6)), _MM_SHUFFLE(1,0,3,2)));
324 _mm_storeu_si128(
M128_CAST(submsg_o_r+4), _mm_unpacklo_epi64(
327 _mm_storeu_si128(
M128_CAST(submsg_o_r+6), _mm_unpackhi_epi64(
330 _mm_storeu_si128(
M128_CAST(submsg_o_l+0), _mm_add_epi64(
333 _mm_storeu_si128(
M128_CAST(submsg_o_l+2), _mm_add_epi64(
336 _mm_storeu_si128(
M128_CAST(submsg_o_l+4), _mm_add_epi64(
339 _mm_storeu_si128(
M128_CAST(submsg_o_l+6), _mm_add_epi64(
343 _mm_storeu_si128(
M128_CAST(submsg_o_r+0), _mm_add_epi64(
346 _mm_storeu_si128(
M128_CAST(submsg_o_r+2), _mm_add_epi64(
349 _mm_storeu_si128(
M128_CAST(submsg_o_r+4), _mm_add_epi64(
352 _mm_storeu_si128(
M128_CAST(submsg_o_r+6), _mm_add_epi64(
357inline void load_sc(
const lsh_u64** p_const_v,
size_t i)
359 *p_const_v = &LSH512_StepConstants[i];
362inline void msg_add_even(lsh_u64 cv_l[8], lsh_u64 cv_r[8], LSH512_SSSE3_Internal* i_state)
366 lsh_u64* submsg_e_l = i_state->submsg_e_l;
367 lsh_u64* submsg_e_r = i_state->submsg_e_r;
369 _mm_storeu_si128(
M128_CAST(cv_l), _mm_xor_si128(
372 _mm_storeu_si128(
M128_CAST(cv_r), _mm_xor_si128(
375 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_xor_si128(
378 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_xor_si128(
381 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_xor_si128(
384 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_xor_si128(
387 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_xor_si128(
390 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_xor_si128(
395inline void msg_add_odd(lsh_u64 cv_l[8], lsh_u64 cv_r[8], LSH512_SSSE3_Internal* i_state)
399 lsh_u64* submsg_o_l = i_state->submsg_o_l;
400 lsh_u64* submsg_o_r = i_state->submsg_o_r;
402 _mm_storeu_si128(
M128_CAST(cv_l), _mm_xor_si128(
405 _mm_storeu_si128(
M128_CAST(cv_r), _mm_xor_si128(
408 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_xor_si128(
411 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_xor_si128(
414 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_xor_si128(
417 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_xor_si128(
420 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_xor_si128(
423 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_xor_si128(
428inline void add_blk(lsh_u64 cv_l[8], lsh_u64 cv_r[8])
430 _mm_storeu_si128(
M128_CAST(cv_l), _mm_add_epi64(
433 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_add_epi64(
436 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_add_epi64(
439 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_add_epi64(
444template <
unsigned int R>
445inline void rotate_blk(lsh_u64 cv[8])
447#if defined(CRYPTOPP_XOP_AVAILABLE)
458 _mm_storeu_si128(
M128_CAST(cv), _mm_or_si128(
461 _mm_storeu_si128(
M128_CAST(cv+2), _mm_or_si128(
464 _mm_storeu_si128(
M128_CAST(cv+4), _mm_or_si128(
467 _mm_storeu_si128(
M128_CAST(cv+6), _mm_or_si128(
473inline void xor_with_const(lsh_u64 cv_l[8],
const lsh_u64 const_v[8])
475 _mm_storeu_si128(
M128_CAST(cv_l), _mm_xor_si128(
478 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_xor_si128(
481 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_xor_si128(
484 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_xor_si128(
489inline void rotate_msg_gamma(lsh_u64 cv_r[8])
494 _mm_set_epi8(13,12,11,10, 9,8,15,14, 7,6,5,4, 3,2,1,0)));
497 _mm_set_epi8(9,8,15,14, 13,12,11,10, 3,2,1,0, 7,6,5,4)));
501 _mm_set_epi8(12,11,10,9, 8,15,14,13, 6,5,4,3, 2,1,0,7)));
504 _mm_set_epi8(8,15,14,13, 12,11,10,9, 2,1,0,7, 6,5,4,3)));
507inline void word_perm(lsh_u64 cv_l[8], lsh_u64 cv_r[8])
511 _mm_storeu_si128(
M128_CAST(cv_l+0), _mm_unpacklo_epi64(
514 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_unpackhi_epi64(
518 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_unpacklo_epi64(
521 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_unpackhi_epi64(
523 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_shuffle_epi32(
527 _mm_storeu_si128(
M128_CAST(cv_r+0), _mm_unpacklo_epi64(
530 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_unpackhi_epi64(
532 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_shuffle_epi32(
536 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_unpacklo_epi64(
539 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_unpackhi_epi64(
558 _mm_storeu_si128(
M128_CAST(cv_r+0), temp[0]);
559 _mm_storeu_si128(
M128_CAST(cv_r+2), temp[1]);
566template <
unsigned int Alpha,
unsigned int Beta>
567inline void mix(lsh_u64 cv_l[8], lsh_u64 cv_r[8],
const lsh_u64 const_v[8])
570 rotate_blk<Alpha>(cv_l);
571 xor_with_const(cv_l, const_v);
573 rotate_blk<Beta>(cv_r);
575 rotate_msg_gamma(cv_r);
582inline void compress(LSH512_SSSE3_Context* ctx,
const lsh_u8 pdMsgBlk[LSH512_MSG_BLK_BYTE_LEN])
586 LSH512_SSSE3_Internal s_state(ctx->cv_l);
587 LSH512_SSSE3_Internal* i_state = &s_state;
589 const lsh_u64* const_v = NULL;
590 lsh_u64 *cv_l = ctx->cv_l;
591 lsh_u64 *cv_r = ctx->cv_r;
593 load_msg_blk(i_state, pdMsgBlk);
595 msg_add_even(cv_l, cv_r, i_state);
596 load_sc(&const_v, 0);
597 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
598 word_perm(cv_l, cv_r);
600 msg_add_odd(cv_l, cv_r, i_state);
601 load_sc(&const_v, 8);
602 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
603 word_perm(cv_l, cv_r);
605 for (
size_t i = 1; i < NUM_STEPS / 2; i++)
607 msg_exp_even(i_state);
608 msg_add_even(cv_l, cv_r, i_state);
609 load_sc(&const_v, 16 * i);
610 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
611 word_perm(cv_l, cv_r);
613 msg_exp_odd(i_state);
614 msg_add_odd(cv_l, cv_r, i_state);
615 load_sc(&const_v, 16 * i + 8);
616 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
617 word_perm(cv_l, cv_r);
620 msg_exp_even(i_state);
621 msg_add_even(cv_l, cv_r, i_state);
647inline void zero_iv(lsh_u64 cv_l[8], lsh_u64 cv_r[8])
649 _mm_storeu_si128(
M128_CAST(cv_l+0), _mm_setzero_si128());
650 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_setzero_si128());
651 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_setzero_si128());
652 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_setzero_si128());
653 _mm_storeu_si128(
M128_CAST(cv_r+0), _mm_setzero_si128());
654 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_setzero_si128());
655 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_setzero_si128());
656 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_setzero_si128());
659inline void zero_submsgs(LSH512_SSSE3_Context* ctx)
661 lsh_u64* sub_msgs = ctx->sub_msgs;
664 _mm_setzero_si128());
666 _mm_setzero_si128());
668 _mm_setzero_si128());
670 _mm_setzero_si128());
672 _mm_setzero_si128());
674 _mm_setzero_si128());
676 _mm_setzero_si128());
678 _mm_setzero_si128());
681inline void init224(LSH512_SSSE3_Context* ctx)
686 load_iv(ctx->cv_l, ctx->cv_r, LSH512_IV224);
689inline void init256(LSH512_SSSE3_Context* ctx)
694 load_iv(ctx->cv_l, ctx->cv_r, LSH512_IV256);
697inline void init384(LSH512_SSSE3_Context* ctx)
702 load_iv(ctx->cv_l, ctx->cv_r, LSH512_IV384);
705inline void init512(LSH512_SSSE3_Context* ctx)
710 load_iv(ctx->cv_l, ctx->cv_r, LSH512_IV512);
715inline void fin(LSH512_SSSE3_Context* ctx)
719 _mm_storeu_si128(
M128_CAST(ctx->cv_l+0), _mm_xor_si128(
722 _mm_storeu_si128(
M128_CAST(ctx->cv_l+2), _mm_xor_si128(
725 _mm_storeu_si128(
M128_CAST(ctx->cv_l+4), _mm_xor_si128(
728 _mm_storeu_si128(
M128_CAST(ctx->cv_l+6), _mm_xor_si128(
735inline void get_hash(LSH512_SSSE3_Context* ctx, lsh_u8* pbHashVal)
741 lsh_uint alg_type = ctx->alg_type;
742 lsh_uint hash_val_byte_len = LSH_GET_HASHBYTE(alg_type);
743 lsh_uint hash_val_bit_len = LSH_GET_SMALL_HASHBIT(alg_type);
746 memcpy(pbHashVal, ctx->cv_l, hash_val_byte_len);
747 if (hash_val_bit_len){
748 pbHashVal[hash_val_byte_len-1] &= (((lsh_u8)0xff) << hash_val_bit_len);
754lsh_err lsh512_init_ssse3(LSH512_SSSE3_Context* ctx)
759 lsh_u32 alg_type = ctx->alg_type;
760 const lsh_u64* const_v = NULL;
761 ctx->remain_databitlen = 0;
764 case LSH_TYPE_512_512:
767 case LSH_TYPE_512_384:
770 case LSH_TYPE_512_256:
773 case LSH_TYPE_512_224:
780 lsh_u64* cv_l = ctx->cv_l;
781 lsh_u64* cv_r = ctx->cv_r;
784 cv_l[0] = LSH512_HASH_VAL_MAX_BYTE_LEN;
785 cv_l[1] = LSH_GET_HASHBIT(alg_type);
787 for (
size_t i = 0; i < NUM_STEPS / 2; i++)
790 load_sc(&const_v, i * 16);
791 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
792 word_perm(cv_l, cv_r);
794 load_sc(&const_v, i * 16 + 8);
795 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
796 word_perm(cv_l, cv_r);
802lsh_err lsh512_update_ssse3(LSH512_SSSE3_Context* ctx,
const lsh_u8* data,
size_t databitlen)
809 if (databitlen == 0){
814 size_t databytelen = databitlen >> 3;
816 const size_t pos2 = 0;
818 size_t remain_msg_byte =
static_cast<size_t>(ctx->remain_databitlen >> 3);
820 const size_t remain_msg_bit = 0;
822 if (remain_msg_byte >= LSH512_MSG_BLK_BYTE_LEN){
823 return LSH_ERR_INVALID_STATE;
825 if (remain_msg_bit > 0){
826 return LSH_ERR_INVALID_DATABITLEN;
829 if (databytelen + remain_msg_byte < LSH512_MSG_BLK_BYTE_LEN){
830 memcpy(ctx->last_block + remain_msg_byte, data, databytelen);
831 ctx->remain_databitlen += (lsh_uint)databitlen;
832 remain_msg_byte += (lsh_uint)databytelen;
834 ctx->last_block[remain_msg_byte] = data[databytelen] & ((0xff >> pos2) ^ 0xff);
839 if (remain_msg_byte > 0){
840 size_t more_byte = LSH512_MSG_BLK_BYTE_LEN - remain_msg_byte;
841 memcpy(ctx->last_block + remain_msg_byte, data, more_byte);
842 compress(ctx, ctx->last_block);
844 databytelen -= more_byte;
846 ctx->remain_databitlen = 0;
849 while (databytelen >= LSH512_MSG_BLK_BYTE_LEN)
855 data += LSH512_MSG_BLK_BYTE_LEN;
856 databytelen -= LSH512_MSG_BLK_BYTE_LEN;
859 if (databytelen > 0){
860 memcpy(ctx->last_block, data, databytelen);
861 ctx->remain_databitlen = (lsh_uint)(databytelen << 3);
865 ctx->last_block[databytelen] = data[databytelen] & ((0xff >> pos2) ^ 0xff);
866 ctx->remain_databitlen += pos2;
871lsh_err lsh512_final_ssse3(LSH512_SSSE3_Context* ctx, lsh_u8* hashval)
877 size_t remain_msg_byte =
static_cast<size_t>(ctx->remain_databitlen >> 3);
879 const size_t remain_msg_bit = 0;
881 if (remain_msg_byte >= LSH512_MSG_BLK_BYTE_LEN){
882 return LSH_ERR_INVALID_STATE;
886 ctx->last_block[remain_msg_byte] |= (0x1 << (7 - remain_msg_bit));
889 ctx->last_block[remain_msg_byte] = 0x80;
891 memset(ctx->last_block + remain_msg_byte + 1, 0, LSH512_MSG_BLK_BYTE_LEN - remain_msg_byte - 1);
893 compress(ctx, ctx->last_block);
896 get_hash(ctx, hashval);
901ANONYMOUS_NAMESPACE_END
906void LSH512_Base_Restart_SSSE3(
word64* state)
908 state[RemainingBits] = 0;
909 LSH512_SSSE3_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
910 lsh_err err = lsh512_init_ssse3(&ctx);
912 if (err != LSH_SUCCESS)
917void LSH512_Base_Update_SSSE3(
word64* state,
const byte *input,
size_t size)
919 LSH512_SSSE3_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
920 lsh_err err = lsh512_update_ssse3(&ctx, input, 8*size);
922 if (err != LSH_SUCCESS)
927void LSH512_Base_TruncatedFinal_SSSE3(
word64* state,
byte *hash,
size_t)
929 LSH512_SSSE3_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
930 lsh_err err = lsh512_final_ssse3(&ctx, hash);
932 if (err != LSH_SUCCESS)
#define M128_CAST(x)
Clang workaround.
#define CONST_M128_CAST(x)
Clang workaround.
Base class for all exceptions thrown by the library.
@ OTHER_ERROR
Some other error occurred not belonging to other categories.
Library configuration file.
unsigned char byte
8-bit unsigned datatype
unsigned int word32
32-bit unsigned datatype
unsigned long long word64
64-bit unsigned datatype
@ LITTLE_ENDIAN_ORDER
byte order is little-endian
Classes for the LSH hash functions.
Utility functions for the Crypto++ library.
T ConditionalByteReverse(ByteOrder order, T value)
Reverses bytes in a value depending upon endianness.
T rotlFixed(T x, unsigned int y)
Performs a left rotate.
Crypto++ library namespace.
#define CRYPTOPP_ASSERT(exp)
Debugging and diagnostic assertion.