19extern const char LSH512_SSE_FNAME[] = __FILE__;
21#if defined(CRYPTOPP_SSSE3_AVAILABLE) && defined(CRYPTOPP_ENABLE_64BIT_SSE)
23#if defined(CRYPTOPP_SSSE3_AVAILABLE)
24# include <emmintrin.h>
25# include <tmmintrin.h>
28#if defined(CRYPTOPP_XOP_AVAILABLE)
29# include <ammintrin.h>
33#if (CRYPTOPP_GCC_VERSION >= 40500)
34# include <x86intrin.h>
37ANONYMOUS_NAMESPACE_BEGIN
41const unsigned int LSH512_MSG_BLK_BYTE_LEN = 256;
44const unsigned int LSH512_HASH_VAL_MAX_BYTE_LEN = 64;
47const unsigned int CV_WORD_LEN = 16;
48const unsigned int CONST_WORD_LEN = 8;
50const unsigned int NUM_STEPS = 28;
52const unsigned int ROT_EVEN_ALPHA = 23;
53const unsigned int ROT_EVEN_BETA = 59;
54const unsigned int ROT_ODD_ALPHA = 7;
55const unsigned int ROT_ODD_BETA = 3;
57const unsigned int LSH_TYPE_512_512 = 0x0010040;
58const unsigned int LSH_TYPE_512_384 = 0x0010030;
59const unsigned int LSH_TYPE_512_256 = 0x0010020;
60const unsigned int LSH_TYPE_512_224 = 0x001001C;
67const unsigned int LSH_SUCCESS = 0x0;
70const unsigned int LSH_ERR_INVALID_DATABITLEN = 0x2403;
71const unsigned int LSH_ERR_INVALID_STATE = 0x2404;
75const unsigned int AlgorithmType = 80;
76const unsigned int RemainingBits = 81;
84extern const word64 LSH512_IV224[CV_WORD_LEN];
85extern const word64 LSH512_IV256[CV_WORD_LEN];
86extern const word64 LSH512_IV384[CV_WORD_LEN];
87extern const word64 LSH512_IV512[CV_WORD_LEN];
88extern const word64 LSH512_StepConstants[CONST_WORD_LEN * NUM_STEPS];
93ANONYMOUS_NAMESPACE_BEGIN
101using CryptoPP::GetBlock;
106using CryptoPP::LSH::LSH512_IV224;
107using CryptoPP::LSH::LSH512_IV256;
108using CryptoPP::LSH::LSH512_IV384;
109using CryptoPP::LSH::LSH512_IV512;
110using CryptoPP::LSH::LSH512_StepConstants;
119struct LSH512_SSSE3_Context
122 cv_l(state+0), cv_r(state+8), sub_msgs(state+16),
123 last_block(reinterpret_cast<
byte*>(state+48)),
124 remain_databitlen(remainingBitLength),
125 alg_type(static_cast<lsh_type>(algType)) {}
131 lsh_u64& remain_databitlen;
135struct LSH512_SSSE3_Internal
137 LSH512_SSSE3_Internal(
word64* state) :
138 submsg_e_l(state+16), submsg_e_r(state+24),
139 submsg_o_l(state+32), submsg_o_r(state+40) { }
151inline bool LSH_IS_LSH512(lsh_uint val) {
152 return (val & 0xf0000) == 0x10000;
155inline lsh_uint LSH_GET_SMALL_HASHBIT(lsh_uint val) {
159inline lsh_uint LSH_GET_HASHBYTE(lsh_uint val) {
163inline lsh_uint LSH_GET_HASHBIT(lsh_uint val) {
164 return (LSH_GET_HASHBYTE(val) << 3) - LSH_GET_SMALL_HASHBIT(val);
167inline lsh_u64 loadLE64(lsh_u64 v) {
171lsh_u64 ROTL64(lsh_u64 x, lsh_u32 r) {
176inline void load_msg_blk(LSH512_SSSE3_Internal* i_state,
const lsh_u8 msgblk[LSH512_MSG_BLK_BYTE_LEN])
178 lsh_u64* submsg_e_l = i_state->submsg_e_l;
179 lsh_u64* submsg_e_r = i_state->submsg_e_r;
180 lsh_u64* submsg_o_l = i_state->submsg_o_l;
181 lsh_u64* submsg_o_r = i_state->submsg_o_r;
183 _mm_storeu_si128(
M128_CAST(submsg_e_l+0),
185 _mm_storeu_si128(
M128_CAST(submsg_e_l+2),
187 _mm_storeu_si128(
M128_CAST(submsg_e_l+4),
189 _mm_storeu_si128(
M128_CAST(submsg_e_l+6),
192 _mm_storeu_si128(
M128_CAST(submsg_e_r+0),
194 _mm_storeu_si128(
M128_CAST(submsg_e_r+2),
196 _mm_storeu_si128(
M128_CAST(submsg_e_r+4),
198 _mm_storeu_si128(
M128_CAST(submsg_e_r+6),
201 _mm_storeu_si128(
M128_CAST(submsg_o_l+0),
203 _mm_storeu_si128(
M128_CAST(submsg_o_l+2),
205 _mm_storeu_si128(
M128_CAST(submsg_o_l+4),
207 _mm_storeu_si128(
M128_CAST(submsg_o_l+6),
210 _mm_storeu_si128(
M128_CAST(submsg_o_r+0),
212 _mm_storeu_si128(
M128_CAST(submsg_o_r+2),
214 _mm_storeu_si128(
M128_CAST(submsg_o_r+4),
216 _mm_storeu_si128(
M128_CAST(submsg_o_r+6),
220inline void msg_exp_even(LSH512_SSSE3_Internal* i_state)
224 lsh_u64* submsg_e_l = i_state->submsg_e_l;
225 lsh_u64* submsg_e_r = i_state->submsg_e_r;
226 lsh_u64* submsg_o_l = i_state->submsg_o_l;
227 lsh_u64* submsg_o_r = i_state->submsg_o_r;
230 _mm_storeu_si128(
M128_CAST(submsg_e_l+2), _mm_shuffle_epi32(
231 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_l+2)), _MM_SHUFFLE(1,0,3,2)));
234 _mm_storeu_si128(
M128_CAST(submsg_e_l+0),
236 _mm_storeu_si128(
M128_CAST(submsg_e_l+2), temp);
237 _mm_storeu_si128(
M128_CAST(submsg_e_l+6), _mm_shuffle_epi32(
238 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_l+6)), _MM_SHUFFLE(1,0,3,2)));
241 _mm_storeu_si128(
M128_CAST(submsg_e_l+4), _mm_unpacklo_epi64(
244 _mm_storeu_si128(
M128_CAST(submsg_e_l+6), _mm_unpackhi_epi64(
246 _mm_storeu_si128(
M128_CAST(submsg_e_r+2), _mm_shuffle_epi32(
247 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_r+2)), _MM_SHUFFLE(1,0,3,2)));
250 _mm_storeu_si128(
M128_CAST(submsg_e_r+0),
252 _mm_storeu_si128(
M128_CAST(submsg_e_r+2), temp);
253 _mm_storeu_si128(
M128_CAST(submsg_e_r+6), _mm_shuffle_epi32(
254 _mm_loadu_si128(
CONST_M128_CAST(submsg_e_r+6)), _MM_SHUFFLE(1,0,3,2)));
257 _mm_storeu_si128(
M128_CAST(submsg_e_r+4), _mm_unpacklo_epi64(
260 _mm_storeu_si128(
M128_CAST(submsg_e_r+6), _mm_unpackhi_epi64(
263 _mm_storeu_si128(
M128_CAST(submsg_e_l+0), _mm_add_epi64(
266 _mm_storeu_si128(
M128_CAST(submsg_e_l+2), _mm_add_epi64(
269 _mm_storeu_si128(
M128_CAST(submsg_e_l+4), _mm_add_epi64(
272 _mm_storeu_si128(
M128_CAST(submsg_e_l+6), _mm_add_epi64(
276 _mm_storeu_si128(
M128_CAST(submsg_e_r+0), _mm_add_epi64(
279 _mm_storeu_si128(
M128_CAST(submsg_e_r+2), _mm_add_epi64(
282 _mm_storeu_si128(
M128_CAST(submsg_e_r+4), _mm_add_epi64(
285 _mm_storeu_si128(
M128_CAST(submsg_e_r+6), _mm_add_epi64(
290inline void msg_exp_odd(LSH512_SSSE3_Internal* i_state)
294 lsh_u64* submsg_e_l = i_state->submsg_e_l;
295 lsh_u64* submsg_e_r = i_state->submsg_e_r;
296 lsh_u64* submsg_o_l = i_state->submsg_o_l;
297 lsh_u64* submsg_o_r = i_state->submsg_o_r;
300 _mm_storeu_si128(
M128_CAST(submsg_o_l+2), _mm_shuffle_epi32(
301 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_l+2)), _MM_SHUFFLE(1,0,3,2)));
304 _mm_storeu_si128(
M128_CAST(submsg_o_l+0),
306 _mm_storeu_si128(
M128_CAST(submsg_o_l+2), temp);
307 _mm_storeu_si128(
M128_CAST(submsg_o_l+6), _mm_shuffle_epi32(
308 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_l+6)), _MM_SHUFFLE(1,0,3,2)));
311 _mm_storeu_si128(
M128_CAST(submsg_o_l+4), _mm_unpacklo_epi64(
314 _mm_storeu_si128(
M128_CAST(submsg_o_l+6), _mm_unpackhi_epi64(
316 _mm_storeu_si128(
M128_CAST(submsg_o_r+2), _mm_shuffle_epi32(
317 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_r+2)), _MM_SHUFFLE(1,0,3,2)));
320 _mm_storeu_si128(
M128_CAST(submsg_o_r+0),
322 _mm_storeu_si128(
M128_CAST(submsg_o_r+2), temp);
323 _mm_storeu_si128(
M128_CAST(submsg_o_r+6), _mm_shuffle_epi32(
324 _mm_loadu_si128(
CONST_M128_CAST(submsg_o_r+6)), _MM_SHUFFLE(1,0,3,2)));
327 _mm_storeu_si128(
M128_CAST(submsg_o_r+4), _mm_unpacklo_epi64(
330 _mm_storeu_si128(
M128_CAST(submsg_o_r+6), _mm_unpackhi_epi64(
333 _mm_storeu_si128(
M128_CAST(submsg_o_l+0), _mm_add_epi64(
336 _mm_storeu_si128(
M128_CAST(submsg_o_l+2), _mm_add_epi64(
339 _mm_storeu_si128(
M128_CAST(submsg_o_l+4), _mm_add_epi64(
342 _mm_storeu_si128(
M128_CAST(submsg_o_l+6), _mm_add_epi64(
346 _mm_storeu_si128(
M128_CAST(submsg_o_r+0), _mm_add_epi64(
349 _mm_storeu_si128(
M128_CAST(submsg_o_r+2), _mm_add_epi64(
352 _mm_storeu_si128(
M128_CAST(submsg_o_r+4), _mm_add_epi64(
355 _mm_storeu_si128(
M128_CAST(submsg_o_r+6), _mm_add_epi64(
360inline void load_sc(
const lsh_u64** p_const_v,
size_t i)
362 *p_const_v = &LSH512_StepConstants[i];
365inline void msg_add_even(lsh_u64 cv_l[8], lsh_u64 cv_r[8], LSH512_SSSE3_Internal* i_state)
369 lsh_u64* submsg_e_l = i_state->submsg_e_l;
370 lsh_u64* submsg_e_r = i_state->submsg_e_r;
372 _mm_storeu_si128(
M128_CAST(cv_l), _mm_xor_si128(
375 _mm_storeu_si128(
M128_CAST(cv_r), _mm_xor_si128(
378 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_xor_si128(
381 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_xor_si128(
384 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_xor_si128(
387 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_xor_si128(
390 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_xor_si128(
393 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_xor_si128(
398inline void msg_add_odd(lsh_u64 cv_l[8], lsh_u64 cv_r[8], LSH512_SSSE3_Internal* i_state)
402 lsh_u64* submsg_o_l = i_state->submsg_o_l;
403 lsh_u64* submsg_o_r = i_state->submsg_o_r;
405 _mm_storeu_si128(
M128_CAST(cv_l), _mm_xor_si128(
408 _mm_storeu_si128(
M128_CAST(cv_r), _mm_xor_si128(
411 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_xor_si128(
414 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_xor_si128(
417 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_xor_si128(
420 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_xor_si128(
423 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_xor_si128(
426 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_xor_si128(
431inline void add_blk(lsh_u64 cv_l[8], lsh_u64 cv_r[8])
433 _mm_storeu_si128(
M128_CAST(cv_l), _mm_add_epi64(
436 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_add_epi64(
439 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_add_epi64(
442 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_add_epi64(
447template <
unsigned int R>
448inline void rotate_blk(lsh_u64 cv[8])
450#if defined(CRYPTOPP_XOP_AVAILABLE)
461 _mm_storeu_si128(
M128_CAST(cv), _mm_or_si128(
464 _mm_storeu_si128(
M128_CAST(cv+2), _mm_or_si128(
467 _mm_storeu_si128(
M128_CAST(cv+4), _mm_or_si128(
470 _mm_storeu_si128(
M128_CAST(cv+6), _mm_or_si128(
476inline void xor_with_const(lsh_u64 cv_l[8],
const lsh_u64 const_v[8])
478 _mm_storeu_si128(
M128_CAST(cv_l), _mm_xor_si128(
481 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_xor_si128(
484 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_xor_si128(
487 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_xor_si128(
492inline void rotate_msg_gamma(lsh_u64 cv_r[8])
497 _mm_set_epi8(13,12,11,10, 9,8,15,14, 7,6,5,4, 3,2,1,0)));
500 _mm_set_epi8(9,8,15,14, 13,12,11,10, 3,2,1,0, 7,6,5,4)));
504 _mm_set_epi8(12,11,10,9, 8,15,14,13, 6,5,4,3, 2,1,0,7)));
507 _mm_set_epi8(8,15,14,13, 12,11,10,9, 2,1,0,7, 6,5,4,3)));
510inline void word_perm(lsh_u64 cv_l[8], lsh_u64 cv_r[8])
514 _mm_storeu_si128(
M128_CAST(cv_l+0), _mm_unpacklo_epi64(
517 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_unpackhi_epi64(
521 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_unpacklo_epi64(
524 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_unpackhi_epi64(
526 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_shuffle_epi32(
530 _mm_storeu_si128(
M128_CAST(cv_r+0), _mm_unpacklo_epi64(
533 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_unpackhi_epi64(
535 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_shuffle_epi32(
539 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_unpacklo_epi64(
542 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_unpackhi_epi64(
561 _mm_storeu_si128(
M128_CAST(cv_r+0), temp[0]);
562 _mm_storeu_si128(
M128_CAST(cv_r+2), temp[1]);
569template <
unsigned int Alpha,
unsigned int Beta>
570inline void mix(lsh_u64 cv_l[8], lsh_u64 cv_r[8],
const lsh_u64 const_v[8])
573 rotate_blk<Alpha>(cv_l);
574 xor_with_const(cv_l, const_v);
576 rotate_blk<Beta>(cv_r);
578 rotate_msg_gamma(cv_r);
585inline void compress(LSH512_SSSE3_Context* ctx,
const lsh_u8 pdMsgBlk[LSH512_MSG_BLK_BYTE_LEN])
589 LSH512_SSSE3_Internal s_state(ctx->cv_l);
590 LSH512_SSSE3_Internal* i_state = &s_state;
592 const lsh_u64* const_v = NULL;
593 lsh_u64 *cv_l = ctx->cv_l;
594 lsh_u64 *cv_r = ctx->cv_r;
596 load_msg_blk(i_state, pdMsgBlk);
598 msg_add_even(cv_l, cv_r, i_state);
599 load_sc(&const_v, 0);
600 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
601 word_perm(cv_l, cv_r);
603 msg_add_odd(cv_l, cv_r, i_state);
604 load_sc(&const_v, 8);
605 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
606 word_perm(cv_l, cv_r);
608 for (
size_t i = 1; i < NUM_STEPS / 2; i++)
610 msg_exp_even(i_state);
611 msg_add_even(cv_l, cv_r, i_state);
612 load_sc(&const_v, 16 * i);
613 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
614 word_perm(cv_l, cv_r);
616 msg_exp_odd(i_state);
617 msg_add_odd(cv_l, cv_r, i_state);
618 load_sc(&const_v, 16 * i + 8);
619 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
620 word_perm(cv_l, cv_r);
623 msg_exp_even(i_state);
624 msg_add_even(cv_l, cv_r, i_state);
650inline void zero_iv(lsh_u64 cv_l[8], lsh_u64 cv_r[8])
652 _mm_storeu_si128(
M128_CAST(cv_l+0), _mm_setzero_si128());
653 _mm_storeu_si128(
M128_CAST(cv_l+2), _mm_setzero_si128());
654 _mm_storeu_si128(
M128_CAST(cv_l+4), _mm_setzero_si128());
655 _mm_storeu_si128(
M128_CAST(cv_l+6), _mm_setzero_si128());
656 _mm_storeu_si128(
M128_CAST(cv_r+0), _mm_setzero_si128());
657 _mm_storeu_si128(
M128_CAST(cv_r+2), _mm_setzero_si128());
658 _mm_storeu_si128(
M128_CAST(cv_r+4), _mm_setzero_si128());
659 _mm_storeu_si128(
M128_CAST(cv_r+6), _mm_setzero_si128());
662inline void zero_submsgs(LSH512_SSSE3_Context* ctx)
664 lsh_u64* sub_msgs = ctx->sub_msgs;
667 _mm_setzero_si128());
669 _mm_setzero_si128());
671 _mm_setzero_si128());
673 _mm_setzero_si128());
675 _mm_setzero_si128());
677 _mm_setzero_si128());
679 _mm_setzero_si128());
681 _mm_setzero_si128());
684inline void init224(LSH512_SSSE3_Context* ctx)
689 load_iv(ctx->cv_l, ctx->cv_r, LSH512_IV224);
692inline void init256(LSH512_SSSE3_Context* ctx)
697 load_iv(ctx->cv_l, ctx->cv_r, LSH512_IV256);
700inline void init384(LSH512_SSSE3_Context* ctx)
705 load_iv(ctx->cv_l, ctx->cv_r, LSH512_IV384);
708inline void init512(LSH512_SSSE3_Context* ctx)
713 load_iv(ctx->cv_l, ctx->cv_r, LSH512_IV512);
718inline void fin(LSH512_SSSE3_Context* ctx)
722 _mm_storeu_si128(
M128_CAST(ctx->cv_l+0), _mm_xor_si128(
725 _mm_storeu_si128(
M128_CAST(ctx->cv_l+2), _mm_xor_si128(
728 _mm_storeu_si128(
M128_CAST(ctx->cv_l+4), _mm_xor_si128(
731 _mm_storeu_si128(
M128_CAST(ctx->cv_l+6), _mm_xor_si128(
738inline void get_hash(LSH512_SSSE3_Context* ctx, lsh_u8* pbHashVal)
744 lsh_uint alg_type = ctx->alg_type;
745 lsh_uint hash_val_byte_len = LSH_GET_HASHBYTE(alg_type);
746 lsh_uint hash_val_bit_len = LSH_GET_SMALL_HASHBIT(alg_type);
749 memcpy(pbHashVal, ctx->cv_l, hash_val_byte_len);
750 if (hash_val_bit_len){
751 pbHashVal[hash_val_byte_len-1] &= (((lsh_u8)0xff) << hash_val_bit_len);
757lsh_err lsh512_init_ssse3(LSH512_SSSE3_Context* ctx)
762 lsh_u32 alg_type = ctx->alg_type;
763 const lsh_u64* const_v = NULL;
764 ctx->remain_databitlen = 0;
767 case LSH_TYPE_512_512:
770 case LSH_TYPE_512_384:
773 case LSH_TYPE_512_256:
776 case LSH_TYPE_512_224:
783 lsh_u64* cv_l = ctx->cv_l;
784 lsh_u64* cv_r = ctx->cv_r;
787 cv_l[0] = LSH512_HASH_VAL_MAX_BYTE_LEN;
788 cv_l[1] = LSH_GET_HASHBIT(alg_type);
790 for (
size_t i = 0; i < NUM_STEPS / 2; i++)
793 load_sc(&const_v, i * 16);
794 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
795 word_perm(cv_l, cv_r);
797 load_sc(&const_v, i * 16 + 8);
798 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
799 word_perm(cv_l, cv_r);
805lsh_err lsh512_update_ssse3(LSH512_SSSE3_Context* ctx,
const lsh_u8* data,
size_t databitlen)
812 if (databitlen == 0){
817 size_t databytelen = databitlen >> 3;
819 const size_t pos2 = 0;
821 size_t remain_msg_byte =
static_cast<size_t>(ctx->remain_databitlen >> 3);
823 const size_t remain_msg_bit = 0;
825 if (remain_msg_byte >= LSH512_MSG_BLK_BYTE_LEN){
826 return LSH_ERR_INVALID_STATE;
828 if (remain_msg_bit > 0){
829 return LSH_ERR_INVALID_DATABITLEN;
832 if (databytelen + remain_msg_byte < LSH512_MSG_BLK_BYTE_LEN){
833 memcpy(ctx->last_block + remain_msg_byte, data, databytelen);
834 ctx->remain_databitlen += (lsh_uint)databitlen;
835 remain_msg_byte += (lsh_uint)databytelen;
837 ctx->last_block[remain_msg_byte] = data[databytelen] & ((0xff >> pos2) ^ 0xff);
842 if (remain_msg_byte > 0){
843 size_t more_byte = LSH512_MSG_BLK_BYTE_LEN - remain_msg_byte;
844 memcpy(ctx->last_block + remain_msg_byte, data, more_byte);
845 compress(ctx, ctx->last_block);
847 databytelen -= more_byte;
849 ctx->remain_databitlen = 0;
852 while (databytelen >= LSH512_MSG_BLK_BYTE_LEN)
858 data += LSH512_MSG_BLK_BYTE_LEN;
859 databytelen -= LSH512_MSG_BLK_BYTE_LEN;
862 if (databytelen > 0){
863 memcpy(ctx->last_block, data, databytelen);
864 ctx->remain_databitlen = (lsh_uint)(databytelen << 3);
868 ctx->last_block[databytelen] = data[databytelen] & ((0xff >> pos2) ^ 0xff);
869 ctx->remain_databitlen += pos2;
874lsh_err lsh512_final_ssse3(LSH512_SSSE3_Context* ctx, lsh_u8* hashval)
880 size_t remain_msg_byte =
static_cast<size_t>(ctx->remain_databitlen >> 3);
882 const size_t remain_msg_bit = 0;
884 if (remain_msg_byte >= LSH512_MSG_BLK_BYTE_LEN){
885 return LSH_ERR_INVALID_STATE;
889 ctx->last_block[remain_msg_byte] |= (0x1 << (7 - remain_msg_bit));
892 ctx->last_block[remain_msg_byte] = 0x80;
894 memset(ctx->last_block + remain_msg_byte + 1, 0, LSH512_MSG_BLK_BYTE_LEN - remain_msg_byte - 1);
896 compress(ctx, ctx->last_block);
899 get_hash(ctx, hashval);
904ANONYMOUS_NAMESPACE_END
909void LSH512_Base_Restart_SSSE3(
word64* state)
911 state[RemainingBits] = 0;
912 LSH512_SSSE3_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
913 lsh_err err = lsh512_init_ssse3(&ctx);
915 if (err != LSH_SUCCESS)
920void LSH512_Base_Update_SSSE3(
word64* state,
const byte *input,
size_t size)
922 LSH512_SSSE3_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
923 lsh_err err = lsh512_update_ssse3(&ctx, input, 8*size);
925 if (err != LSH_SUCCESS)
930void LSH512_Base_TruncatedFinal_SSSE3(
word64* state,
byte *hash,
size_t)
932 LSH512_SSSE3_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
933 lsh_err err = lsh512_final_ssse3(&ctx, hash);
935 if (err != LSH_SUCCESS)
#define M128_CAST(x)
Clang workaround.
#define CONST_M128_CAST(x)
Clang workaround.
Base class for all exceptions thrown by the library.
@ OTHER_ERROR
Some other error occurred not belonging to other categories.
Library configuration file.
unsigned char byte
8-bit unsigned datatype
unsigned int word32
32-bit unsigned datatype
unsigned long long word64
64-bit unsigned datatype
@ LITTLE_ENDIAN_ORDER
byte order is little-endian
EnumToType< ByteOrder, LITTLE_ENDIAN_ORDER > LittleEndian
Provides a constant for LittleEndian.
Classes for the LSH hash functions.
Utility functions for the Crypto++ library.
T rotlConstant(T x)
Performs a left rotate.
T ConditionalByteReverse(ByteOrder order, T value)
Reverses bytes in a value depending upon endianness.
T rotlFixed(T x, unsigned int y)
Performs a left rotate.
Crypto++ library namespace.
#define CRYPTOPP_ASSERT(exp)
Debugging and diagnostic assertion.