1 #ifndef CRYPTOPP_MISC_H
2 #define CRYPTOPP_MISC_H
11 #define _interlockedbittestandset CRYPTOPP_DISABLED_INTRINSIC_1
12 #define _interlockedbittestandreset CRYPTOPP_DISABLED_INTRINSIC_2
13 #define _interlockedbittestandset64 CRYPTOPP_DISABLED_INTRINSIC_3
14 #define _interlockedbittestandreset64 CRYPTOPP_DISABLED_INTRINSIC_4
16 #undef _interlockedbittestandset
17 #undef _interlockedbittestandreset
18 #undef _interlockedbittestandset64
19 #undef _interlockedbittestandreset64
20 #define CRYPTOPP_FAST_ROTATE(x) 1
21 #elif _MSC_VER >= 1300
22 #define CRYPTOPP_FAST_ROTATE(x) ((x) == 32 | (x) == 64)
24 #define CRYPTOPP_FAST_ROTATE(x) ((x) == 32)
26 #elif (defined(__MWERKS__) && TARGET_CPU_PPC) || \
27 (defined(__GNUC__) && (defined(_ARCH_PWR2) || defined(_ARCH_PWR) || defined(_ARCH_PPC) || defined(_ARCH_PPC64) || defined(_ARCH_COM)))
28 #define CRYPTOPP_FAST_ROTATE(x) ((x) == 32)
29 #elif defined(__GNUC__) && (CRYPTOPP_BOOL_X64 || CRYPTOPP_BOOL_X86) // depend on GCC's peephole optimization to generate rotate instructions
30 #define CRYPTOPP_FAST_ROTATE(x) 1
32 #define CRYPTOPP_FAST_ROTATE(x) 0
39 #if defined(__GNUC__) && defined(__linux__)
40 #define CRYPTOPP_BYTESWAP_AVAILABLE
44 NAMESPACE_BEGIN(CryptoPP)
51 static char dummy[2*b-1];
54 #define CRYPTOPP_COMPILE_ASSERT(assertion) CRYPTOPP_COMPILE_ASSERT_INSTANCE(assertion, __LINE__)
55 #if defined(CRYPTOPP_EXPORTS) || defined(CRYPTOPP_IMPORTS)
56 #define CRYPTOPP_COMPILE_ASSERT_INSTANCE(assertion, instance)
58 #define CRYPTOPP_COMPILE_ASSERT_INSTANCE(assertion, instance) static CompileAssert<(assertion)> CRYPTOPP_ASSERT_JOIN(cryptopp_assert_, instance)
60 #define CRYPTOPP_ASSERT_JOIN(X, Y) CRYPTOPP_DO_ASSERT_JOIN(X, Y)
61 #define CRYPTOPP_DO_ASSERT_JOIN(X, Y) X##Y
70 template <
class BASE1,
class BASE2>
71 class CRYPTOPP_NO_VTABLE
TwoBases :
public BASE1,
public BASE2
76 template <
class BASE1,
class BASE2,
class BASE3>
77 class CRYPTOPP_NO_VTABLE
ThreeBases :
public BASE1,
public BASE2,
public BASE3
100 T* operator()()
const {
return new T;}
107 template <
class T,
class F = NewObject<T>,
int instance=0>
111 Singleton(F objectFactory = F()) : m_objectFactory(objectFactory) {}
114 CRYPTOPP_NOINLINE
const T & Ref(CRYPTOPP_NOINLINE_DOTDOTDOT)
const;
120 template <
class T,
class F,
int instance>
124 T *p = s_pObject.m_p;
129 T *newObject = m_objectFactory();
138 s_pObject.m_p = newObject;
144 #if (!__STDC_WANT_SECURE_LIB__)
145 inline void memcpy_s(
void *dest,
size_t sizeInBytes,
const void *src,
size_t count)
147 if (count > sizeInBytes)
149 memcpy(dest, src, count);
152 inline void memmove_s(
void *dest,
size_t sizeInBytes,
const void *src,
size_t count)
154 if (count > sizeInBytes)
156 memmove(dest, src, count);
159 #if __BORLANDC__ >= 0x620
161 #define memcpy_s CryptoPP::memcpy_s
162 #define memmove_s CryptoPP::memmove_s
166 inline void * memset_z(
void *ptr,
int value,
size_t num)
169 #if CRYPTOPP_GCC_VERSION >= 30001
170 if (__builtin_constant_p(num) && num==0)
173 return memset(ptr, value, num);
177 template <
class T>
inline const T& STDMIN(
const T& a,
const T& b)
179 return b < a ? b : a;
182 template <
class T1,
class T2>
inline const T1 UnsignedMin(
const T1& a,
const T2& b)
184 CRYPTOPP_COMPILE_ASSERT((
sizeof(T1)<=
sizeof(T2) && T2(-1)>0) || (
sizeof(T1)>
sizeof(T2) && T1(-1)>0));
188 if (
sizeof(T1)<=
sizeof(T2))
189 return b < (T2)a ? (T1)b : a;
191 return (T1)b < a ? (T1)b : a;
194 template <class T> inline const T& STDMAX(const T& a, const T& b)
196 return a < b ? b : a;
199 #define RETURN_IF_NONZERO(x) size_t returnedValue = x; if (returnedValue) return returnedValue
202 #define GETBYTE(x, y) (unsigned int)byte((x)>>(8*(y)))
207 #define CRYPTOPP_GET_BYTE_AS_BYTE(x, y) byte((x)>>(8*(y)))
210 unsigned int Parity(T value)
212 for (
unsigned int i=8*
sizeof(value)/2; i>0; i/=2)
214 return (
unsigned int)value&1;
218 unsigned int BytePrecision(
const T &value)
223 unsigned int l=0, h=8*
sizeof(value);
227 unsigned int t = (l+h)/2;
238 unsigned int BitPrecision(
const T &value)
243 unsigned int l=0, h=8*
sizeof(value);
247 unsigned int t = (l+h)/2;
257 inline unsigned int TrailingZeros(word32 v)
259 #if defined(__GNUC__) && CRYPTOPP_GCC_VERSION >= 30400
260 return __builtin_ctz(v);
261 #elif defined(_MSC_VER) && _MSC_VER >= 1400
262 unsigned long result;
263 _BitScanForward(&result, v);
267 static const int MultiplyDeBruijnBitPosition[32] =
269 0, 1, 28, 2, 29, 14, 24, 3, 30, 22, 20, 15, 25, 17, 4, 8,
270 31, 27, 13, 23, 21, 19, 16, 7, 26, 12, 18, 6, 11, 5, 10, 9
272 return MultiplyDeBruijnBitPosition[((word32)((v & -v) * 0x077CB531U)) >> 27];
276 inline unsigned int TrailingZeros(word64 v)
278 #if defined(__GNUC__) && CRYPTOPP_GCC_VERSION >= 30400
279 return __builtin_ctzll(v);
280 #elif defined(_MSC_VER) && _MSC_VER >= 1400 && (defined(_M_X64) || defined(_M_IA64))
281 unsigned long result;
282 _BitScanForward64(&result, v);
285 return word32(v) ? TrailingZeros(word32(v)) : 32 + TrailingZeros(word32(v>>32));
290 inline T Crop(T value,
size_t size)
292 if (size < 8*
sizeof(value))
293 return T(value & ((T(1) << size) - 1));
298 template <
class T1,
class T2>
299 inline bool SafeConvert(T1 from, T2 &to)
302 if (from != to || (from > 0) != (to > 0))
307 inline size_t BitsToBytes(
size_t bitCount)
309 return ((bitCount+7)/(8));
312 inline size_t BytesToWords(
size_t byteCount)
314 return ((byteCount+WORD_SIZE-1)/WORD_SIZE);
317 inline size_t BitsToWords(
size_t bitCount)
319 return ((bitCount+WORD_BITS-1)/(WORD_BITS));
322 inline size_t BitsToDwords(
size_t bitCount)
324 return ((bitCount+2*WORD_BITS-1)/(2*WORD_BITS));
327 CRYPTOPP_DLL
void CRYPTOPP_API xorbuf(byte *buf,
const byte *mask,
size_t count);
328 CRYPTOPP_DLL
void CRYPTOPP_API xorbuf(byte *output,
const byte *input,
const byte *mask,
size_t count);
330 CRYPTOPP_DLL
bool CRYPTOPP_API VerifyBufsEqual(
const byte *buf1,
const byte *buf2,
size_t count);
333 inline bool IsPowerOf2(
const T &n)
335 return n > 0 && (n & (n-1)) == 0;
338 template <
class T1,
class T2>
339 inline T2 ModPowerOf2(
const T1 &a,
const T2 &b)
341 assert(IsPowerOf2(b));
342 return T2(a) & (b-1);
345 template <
class T1,
class T2>
346 inline T1 RoundDownToMultipleOf(
const T1 &n,
const T2 &m)
349 return n - ModPowerOf2(n, m);
354 template <
class T1,
class T2>
355 inline T1 RoundUpToMultipleOf(
const T1 &n,
const T2 &m)
359 return RoundDownToMultipleOf(n+m-1, m);
363 inline unsigned int GetAlignmentOf(T *dummy=NULL)
365 #ifdef CRYPTOPP_ALLOW_UNALIGNED_DATA_ACCESS
370 #if (_MSC_VER >= 1300)
372 #elif defined(__GNUC__)
373 return __alignof__(T);
374 #elif CRYPTOPP_BOOL_SLOW_WORD64
375 return UnsignedMin(4U,
sizeof(T));
381 inline bool IsAlignedOn(
const void *p,
unsigned int alignment)
383 return alignment==1 || (IsPowerOf2(alignment) ? ModPowerOf2((
size_t)p, alignment) == 0 : (size_t)p % alignment == 0);
387 inline bool IsAligned(
const void *p, T *dummy=NULL)
389 return IsAlignedOn(p, GetAlignmentOf<T>());
392 #ifdef IS_LITTLE_ENDIAN
398 inline ByteOrder GetNativeByteOrder()
400 return NativeByteOrder::ToEnum();
403 inline bool NativeByteOrderIs(ByteOrder order)
405 return order == GetNativeByteOrder();
409 std::string IntToString(T a,
unsigned int base = 10)
423 result = char((digit < 10 ?
'0' : (
'a' - 10)) + digit) + result;
427 result =
"-" + result;
431 template <
class T1,
class T2>
432 inline T1 SaturatingSubtract(
const T1 &a,
const T2 &b)
434 return T1((a > b) ? (a - b) : 0);
438 inline CipherDir GetCipherDir(
const T &obj)
440 return obj.IsForwardTransformation() ? ENCRYPTION : DECRYPTION;
443 CRYPTOPP_DLL
void CRYPTOPP_API CallNewHandler();
445 inline void IncrementCounterByOne(byte *inout,
unsigned int s)
447 for (
int i=s-1, carry=1; i>=0 && carry; i--)
451 inline void IncrementCounterByOne(byte *output,
const byte *input,
unsigned int s)
454 for (i=s-1, carry=1; i>=0 && carry; i--)
455 carry = ((output[i] = input[i]+1) == 0);
456 memcpy_s(output, s, input, i+1);
460 inline void ConditionalSwap(
bool c, T &a, T &b)
468 inline void ConditionalSwapPointers(
bool c, T &a, T &b)
470 ptrdiff_t t = c * (a - b);
478 void SecureWipeBuffer(T *buf,
size_t n)
481 volatile T *p = buf+n;
486 #if (_MSC_VER >= 1400 || defined(__GNUC__)) && (CRYPTOPP_BOOL_X64 || CRYPTOPP_BOOL_X86)
488 template<>
inline void SecureWipeBuffer(byte *buf,
size_t n)
490 volatile byte *p = buf;
492 asm volatile(
"rep stosb" :
"+c"(n),
"+D"(p) :
"a"(0) :
"memory");
494 __stosb((byte *)(
size_t)p, 0, n);
498 template<>
inline void SecureWipeBuffer(word16 *buf,
size_t n)
500 volatile word16 *p = buf;
502 asm volatile(
"rep stosw" :
"+c"(n),
"+D"(p) :
"a"(0) :
"memory");
504 __stosw((word16 *)(
size_t)p, 0, n);
508 template<>
inline void SecureWipeBuffer(word32 *buf,
size_t n)
510 volatile word32 *p = buf;
512 asm volatile(
"rep stosl" :
"+c"(n),
"+D"(p) :
"a"(0) :
"memory");
514 __stosd((
unsigned long *)(
size_t)p, 0, n);
518 template<>
inline void SecureWipeBuffer(word64 *buf,
size_t n)
520 #if CRYPTOPP_BOOL_X64
521 volatile word64 *p = buf;
523 asm volatile(
"rep stosq" :
"+c"(n),
"+D"(p) :
"a"(0) :
"memory");
525 __stosq((word64 *)(
size_t)p, 0, n);
528 SecureWipeBuffer((word32 *)buf, 2*n);
532 #endif // #if (_MSC_VER >= 1400 || defined(__GNUC__)) && (CRYPTOPP_BOOL_X64 || CRYPTOPP_BOOL_X86)
535 inline void SecureWipeArray(T *buf,
size_t n)
537 if (
sizeof(T) % 8 == 0 && GetAlignmentOf<T>() % GetAlignmentOf<word64>() == 0)
538 SecureWipeBuffer((word64 *)buf, n * (
sizeof(T)/8));
539 else if (
sizeof(T) % 4 == 0 && GetAlignmentOf<T>() % GetAlignmentOf<word32>() == 0)
540 SecureWipeBuffer((word32 *)buf, n * (
sizeof(T)/4));
541 else if (
sizeof(T) % 2 == 0 && GetAlignmentOf<T>() % GetAlignmentOf<word16>() == 0)
542 SecureWipeBuffer((word16 *)buf, n * (
sizeof(T)/2));
544 SecureWipeBuffer((byte *)buf, n *
sizeof(T));
548 static std::string StringNarrow(
const wchar_t *str,
bool throwOnError =
true)
551 #pragma warning(push)
552 #pragma warning(disable: 4996) // 'wcstombs': This function or variable may be unsafe.
554 size_t size = wcstombs(NULL, str, 0);
555 if (size ==
size_t(0)-1)
560 return std::string();
562 std::string result(size, 0);
563 wcstombs(&result[0], str, size);
570 #if CRYPTOPP_BOOL_ALIGN16_ENABLED
571 CRYPTOPP_DLL
void * CRYPTOPP_API AlignedAllocate(
size_t size);
572 CRYPTOPP_DLL
void CRYPTOPP_API AlignedDeallocate(
void *p);
575 CRYPTOPP_DLL
void * CRYPTOPP_API UnalignedAllocate(
size_t size);
576 CRYPTOPP_DLL
void CRYPTOPP_API UnalignedDeallocate(
void *p);
580 template <
class T>
inline T rotlFixed(T x,
unsigned int y)
582 assert(y <
sizeof(T)*8);
583 return T((x<<y) | (x>>(
sizeof(T)*8-y)));
586 template <
class T>
inline T rotrFixed(T x,
unsigned int y)
588 assert(y <
sizeof(T)*8);
589 return T((x>>y) | (x<<(
sizeof(T)*8-y)));
592 template <
class T>
inline T rotlVariable(T x,
unsigned int y)
594 assert(y <
sizeof(T)*8);
595 return T((x<<y) | (x>>(
sizeof(T)*8-y)));
598 template <
class T>
inline T rotrVariable(T x,
unsigned int y)
600 assert(y <
sizeof(T)*8);
601 return T((x>>y) | (x<<(
sizeof(T)*8-y)));
604 template <
class T>
inline T rotlMod(T x,
unsigned int y)
607 return T((x<<y) | (x>>(
sizeof(T)*8-y)));
610 template <
class T>
inline T rotrMod(T x,
unsigned int y)
613 return T((x>>y) | (x<<(
sizeof(T)*8-y)));
618 template<>
inline word32 rotlFixed<word32>(word32 x,
unsigned int y)
620 assert(y < 8*
sizeof(x));
621 return y ? _lrotl(x, y) : x;
624 template<>
inline word32 rotrFixed<word32>(word32 x,
unsigned int y)
626 assert(y < 8*
sizeof(x));
627 return y ? _lrotr(x, y) : x;
630 template<>
inline word32 rotlVariable<word32>(word32 x,
unsigned int y)
632 assert(y < 8*
sizeof(x));
636 template<>
inline word32 rotrVariable<word32>(word32 x,
unsigned int y)
638 assert(y < 8*
sizeof(x));
642 template<>
inline word32 rotlMod<word32>(word32 x,
unsigned int y)
647 template<>
inline word32 rotrMod<word32>(word32 x,
unsigned int y)
652 #endif // #ifdef _MSC_VER
654 #if _MSC_VER >= 1300 && !defined(__INTEL_COMPILER)
657 template<>
inline word64 rotlFixed<word64>(word64 x,
unsigned int y)
659 assert(y < 8*
sizeof(x));
660 return y ? _rotl64(x, y) : x;
663 template<>
inline word64 rotrFixed<word64>(word64 x,
unsigned int y)
665 assert(y < 8*
sizeof(x));
666 return y ? _rotr64(x, y) : x;
669 template<>
inline word64 rotlVariable<word64>(word64 x,
unsigned int y)
671 assert(y < 8*
sizeof(x));
672 return _rotl64(x, y);
675 template<>
inline word64 rotrVariable<word64>(word64 x,
unsigned int y)
677 assert(y < 8*
sizeof(x));
678 return _rotr64(x, y);
681 template<>
inline word64 rotlMod<word64>(word64 x,
unsigned int y)
683 return _rotl64(x, y);
686 template<>
inline word64 rotrMod<word64>(word64 x,
unsigned int y)
688 return _rotr64(x, y);
691 #endif // #if _MSC_VER >= 1310
693 #if _MSC_VER >= 1400 && !defined(__INTEL_COMPILER)
696 template<>
inline word16 rotlFixed<word16>(word16 x,
unsigned int y)
698 assert(y < 8*
sizeof(x));
699 return y ? _rotl16(x, y) : x;
702 template<>
inline word16 rotrFixed<word16>(word16 x,
unsigned int y)
704 assert(y < 8*
sizeof(x));
705 return y ? _rotr16(x, y) : x;
708 template<>
inline word16 rotlVariable<word16>(word16 x,
unsigned int y)
710 assert(y < 8*
sizeof(x));
711 return _rotl16(x, y);
714 template<>
inline word16 rotrVariable<word16>(word16 x,
unsigned int y)
716 assert(y < 8*
sizeof(x));
717 return _rotr16(x, y);
720 template<>
inline word16 rotlMod<word16>(word16 x,
unsigned int y)
722 return _rotl16(x, y);
725 template<>
inline word16 rotrMod<word16>(word16 x,
unsigned int y)
727 return _rotr16(x, y);
730 template<>
inline byte rotlFixed<byte>(byte x,
unsigned int y)
732 assert(y < 8*
sizeof(x));
733 return y ? _rotl8(x, y) : x;
736 template<>
inline byte rotrFixed<byte>(byte x,
unsigned int y)
738 assert(y < 8*
sizeof(x));
739 return y ? _rotr8(x, y) : x;
742 template<>
inline byte rotlVariable<byte>(byte x,
unsigned int y)
744 assert(y < 8*
sizeof(x));
748 template<>
inline byte rotrVariable<byte>(byte x,
unsigned int y)
750 assert(y < 8*
sizeof(x));
754 template<>
inline byte rotlMod<byte>(byte x,
unsigned int y)
759 template<>
inline byte rotrMod<byte>(byte x,
unsigned int y)
764 #endif // #if _MSC_VER >= 1400
766 #if (defined(__MWERKS__) && TARGET_CPU_PPC)
768 template<>
inline word32 rotlFixed<word32>(word32 x,
unsigned int y)
771 return y ? __rlwinm(x,y,0,31) : x;
774 template<>
inline word32 rotrFixed<word32>(word32 x,
unsigned int y)
777 return y ? __rlwinm(x,32-y,0,31) : x;
780 template<>
inline word32 rotlVariable<word32>(word32 x,
unsigned int y)
783 return (__rlwnm(x,y,0,31));
786 template<>
inline word32 rotrVariable<word32>(word32 x,
unsigned int y)
789 return (__rlwnm(x,32-y,0,31));
792 template<>
inline word32 rotlMod<word32>(word32 x,
unsigned int y)
794 return (__rlwnm(x,y,0,31));
797 template<>
inline word32 rotrMod<word32>(word32 x,
unsigned int y)
799 return (__rlwnm(x,32-y,0,31));
802 #endif // #if (defined(__MWERKS__) && TARGET_CPU_PPC)
807 inline unsigned int GetByte(ByteOrder order, T value,
unsigned int index)
809 if (order == LITTLE_ENDIAN_ORDER)
810 return GETBYTE(value, index);
812 return GETBYTE(value,
sizeof(T)-index-1);
815 inline byte ByteReverse(byte value)
820 inline word16 ByteReverse(word16 value)
822 #ifdef CRYPTOPP_BYTESWAP_AVAILABLE
823 return bswap_16(value);
824 #elif defined(_MSC_VER) && _MSC_VER >= 1300
825 return _byteswap_ushort(value);
827 return rotlFixed(value, 8U);
831 inline word32 ByteReverse(word32 value)
833 #if defined(__GNUC__) && defined(CRYPTOPP_X86_ASM_AVAILABLE)
834 __asm__ (
"bswap %0" :
"=r" (value) :
"0" (value));
836 #elif defined(CRYPTOPP_BYTESWAP_AVAILABLE)
837 return bswap_32(value);
838 #elif defined(__MWERKS__) && TARGET_CPU_PPC
839 return (word32)__lwbrx(&value,0);
840 #elif _MSC_VER >= 1400 || (_MSC_VER >= 1300 && !defined(_DLL))
841 return _byteswap_ulong(value);
842 #elif CRYPTOPP_FAST_ROTATE(32)
844 return (rotrFixed(value, 8U) & 0xff00ff00) | (rotlFixed(value, 8U) & 0x00ff00ff);
847 value = ((value & 0xFF00FF00) >> 8) | ((value & 0x00FF00FF) << 8);
848 return rotlFixed(value, 16U);
852 inline word64 ByteReverse(word64 value)
854 #if defined(__GNUC__) && defined(CRYPTOPP_X86_ASM_AVAILABLE) && defined(__x86_64__)
855 __asm__ (
"bswap %0" :
"=r" (value) :
"0" (value));
857 #elif defined(CRYPTOPP_BYTESWAP_AVAILABLE)
858 return bswap_64(value);
859 #elif defined(_MSC_VER) && _MSC_VER >= 1300
860 return _byteswap_uint64(value);
861 #elif CRYPTOPP_BOOL_SLOW_WORD64
862 return (word64(ByteReverse(word32(value))) << 32) | ByteReverse(word32(value>>32));
864 value = ((value & W64LIT(0xFF00FF00FF00FF00)) >> 8) | ((value & W64LIT(0x00FF00FF00FF00FF)) << 8);
865 value = ((value & W64LIT(0xFFFF0000FFFF0000)) >> 16) | ((value & W64LIT(0x0000FFFF0000FFFF)) << 16);
866 return rotlFixed(value, 32U);
870 inline byte BitReverse(byte value)
872 value = ((value & 0xAA) >> 1) | ((value & 0x55) << 1);
873 value = ((value & 0xCC) >> 2) | ((value & 0x33) << 2);
874 return rotlFixed(value, 4U);
877 inline word16 BitReverse(word16 value)
879 value = ((value & 0xAAAA) >> 1) | ((value & 0x5555) << 1);
880 value = ((value & 0xCCCC) >> 2) | ((value & 0x3333) << 2);
881 value = ((value & 0xF0F0) >> 4) | ((value & 0x0F0F) << 4);
882 return ByteReverse(value);
885 inline word32 BitReverse(word32 value)
887 value = ((value & 0xAAAAAAAA) >> 1) | ((value & 0x55555555) << 1);
888 value = ((value & 0xCCCCCCCC) >> 2) | ((value & 0x33333333) << 2);
889 value = ((value & 0xF0F0F0F0) >> 4) | ((value & 0x0F0F0F0F) << 4);
890 return ByteReverse(value);
893 inline word64 BitReverse(word64 value)
895 #if CRYPTOPP_BOOL_SLOW_WORD64
896 return (word64(BitReverse(word32(value))) << 32) | BitReverse(word32(value>>32));
898 value = ((value & W64LIT(0xAAAAAAAAAAAAAAAA)) >> 1) | ((value & W64LIT(0x5555555555555555)) << 1);
899 value = ((value & W64LIT(0xCCCCCCCCCCCCCCCC)) >> 2) | ((value & W64LIT(0x3333333333333333)) << 2);
900 value = ((value & W64LIT(0xF0F0F0F0F0F0F0F0)) >> 4) | ((value & W64LIT(0x0F0F0F0F0F0F0F0F)) << 4);
901 return ByteReverse(value);
906 inline T BitReverse(T value)
909 return (T)BitReverse((byte)value);
910 else if (
sizeof(T) == 2)
911 return (T)BitReverse((word16)value);
912 else if (
sizeof(T) == 4)
913 return (T)BitReverse((word32)value);
916 assert(
sizeof(T) == 8);
917 return (T)BitReverse((word64)value);
922 inline T ConditionalByteReverse(ByteOrder order, T value)
924 return NativeByteOrderIs(order) ? value : ByteReverse(value);
928 void ByteReverse(T *out,
const T *in,
size_t byteCount)
930 assert(byteCount %
sizeof(T) == 0);
931 size_t count = byteCount/
sizeof(T);
932 for (
size_t i=0; i<count; i++)
933 out[i] = ByteReverse(in[i]);
937 inline void ConditionalByteReverse(ByteOrder order, T *out,
const T *in,
size_t byteCount)
939 if (!NativeByteOrderIs(order))
940 ByteReverse(out, in, byteCount);
942 memcpy_s(out, byteCount, in, byteCount);
946 inline void GetUserKey(ByteOrder order, T *out,
size_t outlen,
const byte *in,
size_t inlen)
948 const size_t U =
sizeof(T);
949 assert(inlen <= outlen*U);
950 memcpy_s(out, outlen*U, in, inlen);
951 memset_z((byte *)out+inlen, 0, outlen*U-inlen);
952 ConditionalByteReverse(order, out, out, RoundUpToMultipleOf(inlen, U));
955 #ifndef CRYPTOPP_ALLOW_UNALIGNED_DATA_ACCESS
956 inline byte UnalignedGetWordNonTemplate(ByteOrder order,
const byte *block,
const byte *)
961 inline word16 UnalignedGetWordNonTemplate(ByteOrder order,
const byte *block,
const word16 *)
963 return (order == BIG_ENDIAN_ORDER)
964 ? block[1] | (block[0] << 8)
965 : block[0] | (block[1] << 8);
968 inline word32 UnalignedGetWordNonTemplate(ByteOrder order,
const byte *block,
const word32 *)
970 return (order == BIG_ENDIAN_ORDER)
971 ? word32(block[3]) | (word32(block[2]) << 8) | (word32(block[1]) << 16) | (word32(block[0]) << 24)
972 : word32(block[0]) | (word32(block[1]) << 8) | (word32(block[2]) << 16) | (word32(block[3]) << 24);
975 inline word64 UnalignedGetWordNonTemplate(ByteOrder order,
const byte *block,
const word64 *)
977 return (order == BIG_ENDIAN_ORDER)
980 (word64(block[6]) << 8) |
981 (word64(block[5]) << 16) |
982 (word64(block[4]) << 24) |
983 (word64(block[3]) << 32) |
984 (word64(block[2]) << 40) |
985 (word64(block[1]) << 48) |
986 (word64(block[0]) << 56))
989 (word64(block[1]) << 8) |
990 (word64(block[2]) << 16) |
991 (word64(block[3]) << 24) |
992 (word64(block[4]) << 32) |
993 (word64(block[5]) << 40) |
994 (word64(block[6]) << 48) |
995 (word64(block[7]) << 56));
998 inline void UnalignedPutWordNonTemplate(ByteOrder order, byte *block, byte value,
const byte *xorBlock)
1000 block[0] = xorBlock ? (value ^ xorBlock[0]) : value;
1003 inline void UnalignedPutWordNonTemplate(ByteOrder order, byte *block, word16 value,
const byte *xorBlock)
1005 if (order == BIG_ENDIAN_ORDER)
1009 block[0] = xorBlock[0] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1010 block[1] = xorBlock[1] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1014 block[0] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1015 block[1] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1022 block[0] = xorBlock[0] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1023 block[1] = xorBlock[1] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1027 block[0] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1028 block[1] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1033 inline void UnalignedPutWordNonTemplate(ByteOrder order, byte *block, word32 value,
const byte *xorBlock)
1035 if (order == BIG_ENDIAN_ORDER)
1039 block[0] = xorBlock[0] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1040 block[1] = xorBlock[1] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1041 block[2] = xorBlock[2] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1042 block[3] = xorBlock[3] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1046 block[0] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1047 block[1] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1048 block[2] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1049 block[3] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1056 block[0] = xorBlock[0] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1057 block[1] = xorBlock[1] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1058 block[2] = xorBlock[2] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1059 block[3] = xorBlock[3] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1063 block[0] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1064 block[1] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1065 block[2] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1066 block[3] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1071 inline void UnalignedPutWordNonTemplate(ByteOrder order, byte *block, word64 value,
const byte *xorBlock)
1073 if (order == BIG_ENDIAN_ORDER)
1077 block[0] = xorBlock[0] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 7);
1078 block[1] = xorBlock[1] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 6);
1079 block[2] = xorBlock[2] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 5);
1080 block[3] = xorBlock[3] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 4);
1081 block[4] = xorBlock[4] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1082 block[5] = xorBlock[5] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1083 block[6] = xorBlock[6] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1084 block[7] = xorBlock[7] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1088 block[0] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 7);
1089 block[1] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 6);
1090 block[2] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 5);
1091 block[3] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 4);
1092 block[4] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1093 block[5] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1094 block[6] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1095 block[7] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1102 block[0] = xorBlock[0] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1103 block[1] = xorBlock[1] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1104 block[2] = xorBlock[2] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1105 block[3] = xorBlock[3] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1106 block[4] = xorBlock[4] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 4);
1107 block[5] = xorBlock[5] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 5);
1108 block[6] = xorBlock[6] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 6);
1109 block[7] = xorBlock[7] ^ CRYPTOPP_GET_BYTE_AS_BYTE(value, 7);
1113 block[0] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 0);
1114 block[1] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 1);
1115 block[2] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 2);
1116 block[3] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 3);
1117 block[4] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 4);
1118 block[5] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 5);
1119 block[6] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 6);
1120 block[7] = CRYPTOPP_GET_BYTE_AS_BYTE(value, 7);
1124 #endif // #ifndef CRYPTOPP_ALLOW_UNALIGNED_DATA_ACCESS
1127 inline T GetWord(
bool assumeAligned, ByteOrder order,
const byte *block)
1129 #ifndef CRYPTOPP_ALLOW_UNALIGNED_DATA_ACCESS
1131 return UnalignedGetWordNonTemplate(order, block, (T*)NULL);
1132 assert(IsAligned<T>(block));
1134 return ConditionalByteReverse(order, *reinterpret_cast<const T *>(block));
1138 inline void GetWord(
bool assumeAligned, ByteOrder order, T &result,
const byte *block)
1140 result = GetWord<T>(assumeAligned, order, block);
1144 inline void PutWord(
bool assumeAligned, ByteOrder order, byte *block, T value,
const byte *xorBlock = NULL)
1146 #ifndef CRYPTOPP_ALLOW_UNALIGNED_DATA_ACCESS
1148 return UnalignedPutWordNonTemplate(order, block, value, xorBlock);
1149 assert(IsAligned<T>(block));
1150 assert(IsAligned<T>(xorBlock));
1152 *
reinterpret_cast<T *
>(block) = ConditionalByteReverse(order, value) ^ (xorBlock ? *
reinterpret_cast<const T *
>(xorBlock) : 0);
1155 template <
class T,
class B,
bool A=false>
1160 : m_block((
const byte *)block) {}
1165 CRYPTOPP_COMPILE_ASSERT(
sizeof(U) >=
sizeof(T));
1166 x = GetWord<T>(A, B::ToEnum(), m_block);
1167 m_block +=
sizeof(T);
1172 const byte *m_block;
1175 template <
class T,
class B,
bool A=false>
1179 PutBlock(
const void *xorBlock,
void *block)
1180 : m_xorBlock((
const byte *)xorBlock), m_block((byte *)block) {}
1185 PutWord(A, B::ToEnum(), m_block, (T)x, m_xorBlock);
1186 m_block +=
sizeof(T);
1188 m_xorBlock +=
sizeof(T);
1193 const byte *m_xorBlock;
1197 template <
class T,
class B,
bool GA=false,
bool PA=false>
1206 std::string WordToString(T value, ByteOrder order = BIG_ENDIAN_ORDER)
1208 if (!NativeByteOrderIs(order))
1209 value = ByteReverse(value);
1211 return std::string((
char *)&value,
sizeof(value));
1215 T StringToWord(
const std::string &str, ByteOrder order = BIG_ENDIAN_ORDER)
1218 memcpy_s(&value,
sizeof(value), str.data(), UnsignedMin(str.size(),
sizeof(value)));
1219 return NativeByteOrderIs(order) ? value : ByteReverse(value);
1224 template <
bool overflow>
struct SafeShifter;
1226 template<>
struct SafeShifter<true>
1229 static inline T RightShift(T value,
unsigned int bits)
1235 static inline T LeftShift(T value,
unsigned int bits)
1241 template<>
struct SafeShifter<false>
1244 static inline T RightShift(T value,
unsigned int bits)
1246 return value >> bits;
1250 static inline T LeftShift(T value,
unsigned int bits)
1252 return value << bits;
1256 template <
unsigned int bits,
class T>
1257 inline T SafeRightShift(T value)
1259 return SafeShifter<(bits>=(8*
sizeof(T)))>::RightShift(value, bits);
1262 template <
unsigned int bits,
class T>
1263 inline T SafeLeftShift(T value)
1265 return SafeShifter<(bits>=(8*
sizeof(T)))>::LeftShift(value, bits);
1270 #define CRYPTOPP_BLOCK_1(n, t, s) t* m_##n() {return (t *)(m_aggregate+0);} size_t SS1() {return sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1271 #define CRYPTOPP_BLOCK_2(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS1());} size_t SS2() {return SS1()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1272 #define CRYPTOPP_BLOCK_3(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS2());} size_t SS3() {return SS2()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1273 #define CRYPTOPP_BLOCK_4(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS3());} size_t SS4() {return SS3()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1274 #define CRYPTOPP_BLOCK_5(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS4());} size_t SS5() {return SS4()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1275 #define CRYPTOPP_BLOCK_6(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS5());} size_t SS6() {return SS5()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1276 #define CRYPTOPP_BLOCK_7(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS6());} size_t SS7() {return SS6()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1277 #define CRYPTOPP_BLOCK_8(n, t, s) t* m_##n() {return (t *)(m_aggregate+SS7());} size_t SS8() {return SS7()+sizeof(t)*(s);} size_t m_##n##Size() {return (s);}
1278 #define CRYPTOPP_BLOCKS_END(i) size_t SST() {return SS##i();} void AllocateBlocks() {m_aggregate.New(SST());} AlignedSecByteBlock m_aggregate;