x86/paravirt: define PARA_INDIRECT for indirect asm calls

On 32-bit it's best to use a %cs: prefix to access memory where the
other segments may not bet set up properly yet.  On 64-bit it's best
to use a rip-relative addressing mode.  Define PARA_INDIRECT() to
abstract this and generate the proper addressing mode in each case.

Signed-off-by: Jeremy Fitzhardinge <jeremy.fitzhardinge@citrix.com>
Cc: xen-devel <xen-devel@lists.xensource.com>
Cc: Stephen Tweedie <sct@redhat.com>
Cc: Eduardo Habkost <ehabkost@redhat.com>
Cc: Mark McLoughlin <markmc@redhat.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
This commit is contained in:
Jeremy Fitzhardinge 2008-06-25 00:19:15 -04:00 committed by Ingo Molnar
parent 97349135fe
commit 491eccb721

View File

@ -1455,51 +1455,53 @@ static inline unsigned long __raw_local_irq_save(void)
#define PV_RESTORE_REGS popq %rdx; popq %rcx; popq %rdi; popq %rax #define PV_RESTORE_REGS popq %rdx; popq %rcx; popq %rdi; popq %rax
#define PARA_PATCH(struct, off) ((PARAVIRT_PATCH_##struct + (off)) / 8) #define PARA_PATCH(struct, off) ((PARAVIRT_PATCH_##struct + (off)) / 8)
#define PARA_SITE(ptype, clobbers, ops) _PVSITE(ptype, clobbers, ops, .quad, 8) #define PARA_SITE(ptype, clobbers, ops) _PVSITE(ptype, clobbers, ops, .quad, 8)
#define PARA_INDIRECT(addr) *addr(%rip)
#else #else
#define PV_SAVE_REGS pushl %eax; pushl %edi; pushl %ecx; pushl %edx #define PV_SAVE_REGS pushl %eax; pushl %edi; pushl %ecx; pushl %edx
#define PV_RESTORE_REGS popl %edx; popl %ecx; popl %edi; popl %eax #define PV_RESTORE_REGS popl %edx; popl %ecx; popl %edi; popl %eax
#define PARA_PATCH(struct, off) ((PARAVIRT_PATCH_##struct + (off)) / 4) #define PARA_PATCH(struct, off) ((PARAVIRT_PATCH_##struct + (off)) / 4)
#define PARA_SITE(ptype, clobbers, ops) _PVSITE(ptype, clobbers, ops, .long, 4) #define PARA_SITE(ptype, clobbers, ops) _PVSITE(ptype, clobbers, ops, .long, 4)
#define PARA_INDIRECT(addr) *%cs:addr
#endif #endif
#define INTERRUPT_RETURN \ #define INTERRUPT_RETURN \
PARA_SITE(PARA_PATCH(pv_cpu_ops, PV_CPU_iret), CLBR_NONE, \ PARA_SITE(PARA_PATCH(pv_cpu_ops, PV_CPU_iret), CLBR_NONE, \
jmp *%cs:pv_cpu_ops+PV_CPU_iret) jmp PARA_INDIRECT(pv_cpu_ops+PV_CPU_iret))
#define DISABLE_INTERRUPTS(clobbers) \ #define DISABLE_INTERRUPTS(clobbers) \
PARA_SITE(PARA_PATCH(pv_irq_ops, PV_IRQ_irq_disable), clobbers, \ PARA_SITE(PARA_PATCH(pv_irq_ops, PV_IRQ_irq_disable), clobbers, \
PV_SAVE_REGS; \ PV_SAVE_REGS; \
call *%cs:pv_irq_ops+PV_IRQ_irq_disable; \ call PARA_INDIRECT(pv_irq_ops+PV_IRQ_irq_disable); \
PV_RESTORE_REGS;) \ PV_RESTORE_REGS;) \
#define ENABLE_INTERRUPTS(clobbers) \ #define ENABLE_INTERRUPTS(clobbers) \
PARA_SITE(PARA_PATCH(pv_irq_ops, PV_IRQ_irq_enable), clobbers, \ PARA_SITE(PARA_PATCH(pv_irq_ops, PV_IRQ_irq_enable), clobbers, \
PV_SAVE_REGS; \ PV_SAVE_REGS; \
call *%cs:pv_irq_ops+PV_IRQ_irq_enable; \ call PARA_INDIRECT(pv_irq_ops+PV_IRQ_irq_enable); \
PV_RESTORE_REGS;) PV_RESTORE_REGS;)
#define ENABLE_INTERRUPTS_SYSCALL_RET \ #define ENABLE_INTERRUPTS_SYSCALL_RET \
PARA_SITE(PARA_PATCH(pv_cpu_ops, PV_CPU_irq_enable_syscall_ret),\ PARA_SITE(PARA_PATCH(pv_cpu_ops, PV_CPU_irq_enable_syscall_ret),\
CLBR_NONE, \ CLBR_NONE, \
jmp *%cs:pv_cpu_ops+PV_CPU_irq_enable_syscall_ret) jmp PARA_INDIRECT(pv_cpu_ops+PV_CPU_irq_enable_syscall_ret))
#ifdef CONFIG_X86_32 #ifdef CONFIG_X86_32
#define GET_CR0_INTO_EAX \ #define GET_CR0_INTO_EAX \
push %ecx; push %edx; \ push %ecx; push %edx; \
call *pv_cpu_ops+PV_CPU_read_cr0; \ call PARA_INDIRECT(pv_cpu_ops+PV_CPU_read_cr0); \
pop %edx; pop %ecx pop %edx; pop %ecx
#else #else
#define SWAPGS \ #define SWAPGS \
PARA_SITE(PARA_PATCH(pv_cpu_ops, PV_CPU_swapgs), CLBR_NONE, \ PARA_SITE(PARA_PATCH(pv_cpu_ops, PV_CPU_swapgs), CLBR_NONE, \
PV_SAVE_REGS; \ PV_SAVE_REGS; \
call *pv_cpu_ops+PV_CPU_swapgs; \ call PARA_INDIRECT(pv_cpu_ops+PV_CPU_swapgs); \
PV_RESTORE_REGS \ PV_RESTORE_REGS \
) )
#define GET_CR2_INTO_RCX \ #define GET_CR2_INTO_RCX \
call *pv_mmu_ops+PV_MMU_read_cr2; \ call PARA_INDIRECT(pv_mmu_ops+PV_MMU_read_cr2); \
movq %rax, %rcx; \ movq %rax, %rcx; \
xorq %rax, %rax; xorq %rax, %rax;
#endif #endif