Fix VMAC compile on 32-bit Solaris

The change tested good on 32-bit Linux and 32-bit OS X
This commit is contained in:
Jeffrey Walton 2020-07-08 03:21:58 -04:00
parent d1764c9e8b
commit cd6ed6aeda
No known key found for this signature in database
GPG Key ID: B36AB348921B1838

View File

@ -193,22 +193,24 @@ void VMAC_Base::VHASH_Update_SSE2(const word64 *data, size_t blocksRemainingInWo
CRYPTOPP_UNUSED(blocksRemainingInWord64); CRYPTOPP_UNUSED(blocksRemainingInWord64);
// This inline ASM is tricky, and down right difficult when PIC is // This inline ASM is tricky, and down right difficult when PIC is
// in effect. The ASM uses all the general purpose registers. When // in effect. The ASM uses all the general purpose registers and all
// PIC is in effect, GCC uses EBX as a base register. Saving EBX with // the XMM registers on 32-bit machines. When PIC is in effect on a
// 32-bit machine, GCC uses EBX as a base register. Saving EBX with
// 'mov %%ebx, %0' and restoring EBX with 'mov %0, %%ebx' causes GCC // 'mov %%ebx, %0' and restoring EBX with 'mov %0, %%ebx' causes GCC
// to generate 'mov -0x40(%ebx), %ebx' for the restore. That obviously // to generate 'mov -0x40(%ebx), %ebx' for the restore. That obviously
// won't work. We can push and pop EBX, but then we have to be careful // won't work because EBX is no longer accurate. We can push and pop
// because GCC references %1 (L1KeyLength) relative to ESP, which is // EBX, but that breaks stack-based references. Attempting to sidestep
// also used in the function and no longer accurate. Attempting to // the issues with clobber lists results in "error: asm operand has
// sidestep the issues with clobber lists results in "error: asm // impossible constraints". Eventually, we found we could save EBX to
// operand has impossible constraints", though we were able to tell // ESP-20, which is one word below our stack in the frame.
// GCC that ESP is dirty. The problems with GCC are the reason for the
// pushes and pops rather than the original moves.
#ifdef __GNUC__ #ifdef __GNUC__
__asm__ __volatile__ __asm__ __volatile__
( (
AS1( push %0) // L1KeyLength # if defined(__i386__) || defined(__i686__)
AS1( pop %%ebx) // Save EBX for PIC
AS2( mov %%ebx, -20(%%esp))
# endif
AS2( mov %0, %%ebx) // L1KeyLength
INTEL_NOPREFIX INTEL_NOPREFIX
#else #else
#if defined(__INTEL_COMPILER) #if defined(__INTEL_COMPILER)
@ -427,11 +429,18 @@ void VMAC_Base::VHASH_Update_SSE2(const word64 *data, size_t blocksRemainingInWo
AS_POP_IF86( bp) AS_POP_IF86( bp)
AS1( emms) AS1( emms)
#ifdef __GNUC__ #ifdef __GNUC__
ATT_PREFIX ATT_PREFIX
# if defined(__i386__) || defined(__i686__)
// Restore EBX for PIC
AS2( mov -20(%%esp), %%ebx)
# endif
: :
: "m" (L1KeyLength), "c" (blocksRemainingInWord64), "S" (data), : "m" (L1KeyLength), "c" (blocksRemainingInWord64), "S" (data),
"D" (nhK+tagPart*2), "d" (m_isFirstBlock), "a" (polyS+tagPart*4) "D" (nhK+tagPart*2), "d" (m_isFirstBlock), "a" (polyS+tagPart*4)
: "ebx", "memory", "cc" : "memory", "cc"
); );
#endif #endif
} }