Save all registers by default, as they can be used to pass parameters

for "inreg" calls


git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@33631 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Anton Korobeynikov 2007-01-29 21:28:01 +00:00
parent 908049b7b6
commit 1620f1ad91

View File

@ -118,10 +118,9 @@ extern "C" {
ASMPREFIX "X86CompilationCallback:\n" ASMPREFIX "X86CompilationCallback:\n"
"pushl %ebp\n" "pushl %ebp\n"
"movl %esp, %ebp\n" // Standard prologue "movl %esp, %ebp\n" // Standard prologue
#if FASTCC_NUM_INT_ARGS_INREGS > 0
"pushl %eax\n" "pushl %eax\n"
"pushl %edx\n" // Save EAX/EDX "pushl %edx\n" // Save EAX/EDX/ECX
#endif "pushl %ecx\n"
#if defined(__APPLE__) #if defined(__APPLE__)
"andl $-16, %esp\n" // Align ESP on 16-byte boundary "andl $-16, %esp\n" // Align ESP on 16-byte boundary
#endif #endif
@ -131,11 +130,10 @@ extern "C" {
"movl %ebp, (%esp)\n" "movl %ebp, (%esp)\n"
"call " ASMPREFIX "X86CompilationCallback2\n" "call " ASMPREFIX "X86CompilationCallback2\n"
"movl %ebp, %esp\n" // Restore ESP "movl %ebp, %esp\n" // Restore ESP
#if FASTCC_NUM_INT_ARGS_INREGS > 0 "subl $12, %esp\n"
"subl $8, %esp\n" "popl %ecx\n"
"popl %edx\n" "popl %edx\n"
"popl %eax\n" "popl %eax\n"
#endif
"popl %ebp\n" "popl %ebp\n"
"ret\n"); "ret\n");
@ -148,10 +146,9 @@ extern "C" {
ASMPREFIX "X86CompilationCallback_SSE:\n" ASMPREFIX "X86CompilationCallback_SSE:\n"
"pushl %ebp\n" "pushl %ebp\n"
"movl %esp, %ebp\n" // Standard prologue "movl %esp, %ebp\n" // Standard prologue
#if FASTCC_NUM_INT_ARGS_INREGS > 0
"pushl %eax\n" "pushl %eax\n"
"pushl %edx\n" // Save EAX/EDX "pushl %edx\n" // Save EAX/EDX/ECX
#endif "pushl %ecx\n"
"andl $-16, %esp\n" // Align ESP on 16-byte boundary "andl $-16, %esp\n" // Align ESP on 16-byte boundary
// Save all XMM arg registers // Save all XMM arg registers
"subl $64, %esp\n" "subl $64, %esp\n"
@ -170,11 +167,10 @@ extern "C" {
"movaps 16(%esp), %xmm1\n" "movaps 16(%esp), %xmm1\n"
"movaps (%esp), %xmm0\n" "movaps (%esp), %xmm0\n"
"movl %ebp, %esp\n" // Restore ESP "movl %ebp, %esp\n" // Restore ESP
#if FASTCC_NUM_INT_ARGS_INREGS > 0 "subl $12, %esp\n"
"subl $8, %esp\n" "popl %ecx\n"
"popl %edx\n" "popl %edx\n"
"popl %eax\n" "popl %eax\n"
#endif
"popl %ebp\n" "popl %ebp\n"
"ret\n"); "ret\n");
#else #else
@ -184,7 +180,9 @@ extern "C" {
__asm { __asm {
push eax push eax
push edx push edx
push ecx
call X86CompilationCallback2 call X86CompilationCallback2
pop ecx
pop edx pop edx
pop eax pop eax
ret ret
@ -208,7 +206,7 @@ extern "C" {
extern "C" void X86CompilationCallback2() { extern "C" void X86CompilationCallback2() {
assert(sizeof(size_t) == 4); // FIXME: handle Win64 assert(sizeof(size_t) == 4); // FIXME: handle Win64
unsigned *RetAddrLoc = (unsigned *)_AddressOfReturnAddress(); unsigned *RetAddrLoc = (unsigned *)_AddressOfReturnAddress();
RetAddrLoc += 3; // skip over ret addr, edx, eax RetAddrLoc += 4; // skip over ret addr, edx, eax, ecx
unsigned RetAddr = *RetAddrLoc; unsigned RetAddr = *RetAddrLoc;
#else #else
extern "C" void X86CompilationCallback2(intptr_t *StackPtr, intptr_t RetAddr) { extern "C" void X86CompilationCallback2(intptr_t *StackPtr, intptr_t RetAddr) {