From df7814ce92f6b837d1e753b53300f26c2ec1ba0b Mon Sep 17 00:00:00 2001 From: Anton Korobeynikov Date: Mon, 10 Dec 2007 15:13:55 +0000 Subject: [PATCH] Provide annotation for SSE version of callback. It's even more broken, because doesn't mark xmm regs properly git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@44793 91177308-0d34-0410-b5e6-96231b3b80d8 --- lib/Target/X86/X86JITInfo.cpp | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/lib/Target/X86/X86JITInfo.cpp b/lib/Target/X86/X86JITInfo.cpp index f750b560706..01d2c5c693a 100644 --- a/lib/Target/X86/X86JITInfo.cpp +++ b/lib/Target/X86/X86JITInfo.cpp @@ -162,14 +162,24 @@ extern "C" { ".align 8\n" ".globl " ASMPREFIX "X86CompilationCallback_SSE\n" ASMPREFIX "X86CompilationCallback_SSE:\n" + ".cfi_startproc\n" "pushl %ebp\n" + ".cfi_def_cfa_offset 8\n" + ".cfi_offset ebp, -8\n" "movl %esp, %ebp\n" // Standard prologue + ".cfi_def_cfa_register ebp\n" "pushl %eax\n" + ".cfi_rel_offset eax, 0\n" "pushl %edx\n" // Save EAX/EDX/ECX + ".cfi_rel_offset edx, 4\n" "pushl %ecx\n" + ".cfi_rel_offset ecx, 8\n" "andl $-16, %esp\n" // Align ESP on 16-byte boundary // Save all XMM arg registers "subl $64, %esp\n" + // FIXME: provide frame move information for xmm registers. + // This can be tricky, because CFA register is ebp (unaligned) + // and we need to produce offsets relative to it. "movaps %xmm0, (%esp)\n" "movaps %xmm1, 16(%esp)\n" "movaps %xmm2, 32(%esp)\n" @@ -181,16 +191,31 @@ extern "C" { "call " ASMPREFIX "X86CompilationCallback2\n" "addl $16, %esp\n" "movaps 48(%esp), %xmm3\n" + ".cfi_restore xmm3\n" "movaps 32(%esp), %xmm2\n" + ".cfi_restore xmm2\n" "movaps 16(%esp), %xmm1\n" + ".cfi_restore xmm1\n" "movaps (%esp), %xmm0\n" + ".cfi_restore xmm0\n" "movl %ebp, %esp\n" // Restore ESP + ".cfi_def_cfa_register esp\n" "subl $12, %esp\n" + ".cfi_adjust_cfa_offset 12\n" "popl %ecx\n" + ".cfi_adjust_cfa_offset -4\n" + ".cfi_restore ecx\n" "popl %edx\n" + ".cfi_adjust_cfa_offset -4\n" + ".cfi_restore edx\n" "popl %eax\n" + ".cfi_adjust_cfa_offset -4\n" + ".cfi_restore eax\n" "popl %ebp\n" - "ret\n"); + ".cfi_adjust_cfa_offset -4\n" + ".cfi_restore ebp\n" + "ret\n" + ".cfi_endproc\n"); #else void X86CompilationCallback2(void);