#include <asm/pdc.h>
#include <asm/psw.h>
#include <asm/assembly.h>
#include <asm/asm-offsets.h>
#include <linux/linkage.h>
.export real_stack
.export real64_stack
__PAGE_ALIGNED_BSS
real_stack:
real64_stack:
.block 8192
#define N_SAVED_REGS 9
.section .bss
save_cr_space:
.block REG_SZ * N_SAVED_REGS
save_cr_end:
.text
ENTRY_CFI(real32_call_asm)
STREG %rp, -RP_OFFSET(%sp)
#ifdef CONFIG_64BIT
callee_save
ldo 2*REG_SZ(%sp), %sp
STREG %r27, -1*REG_SZ(%sp)
STREG %r29, -2*REG_SZ(%sp)
#endif
STREG %sp, -REG_SZ(%arg0)
copy %arg0, %sp
copy %arg2, %r31
ldw 0(%arg1), %arg0
ldw -8(%arg1), %arg2
ldw -12(%arg1), %arg3
ldw -4(%arg1), %arg1
tophys_r1 %sp
b,l rfi_virt2real,%r2
nop
b,l save_control_regs,%r2
nop
#ifdef CONFIG_64BIT
rsm PSW_SM_W, %r0
#endif
load32 PA(ric_ret), %r2
bv 0(%r31)
nop
ric_ret:
#ifdef CONFIG_64BIT
ssm PSW_SM_W, %r0
#endif
b,l restore_control_regs, %r2
nop
b,l rfi_real2virt,%r2
nop
tovirt_r1 %sp
LDREG -REG_SZ(%sp), %sp
#ifdef CONFIG_64BIT
LDREG -1*REG_SZ(%sp), %r27
LDREG -2*REG_SZ(%sp), %r29
ldo -2*REG_SZ(%sp), %sp
callee_rest
#endif
LDREG -RP_OFFSET(%sp), %rp
bv 0(%rp)
nop
ENDPROC_CFI(real32_call_asm)
# define PUSH_CR(r, where) mfctl r, %r1 ! STREG,ma %r1, REG_SZ(where)
# define POP_CR(r, where) LDREG,mb -REG_SZ(where), %r1 ! mtctl %r1, r
.text
ENTRY_CFI(save_control_regs)
load32 PA(save_cr_space), %r28
PUSH_CR(%cr24, %r28)
PUSH_CR(%cr25, %r28)
PUSH_CR(%cr26, %r28)
PUSH_CR(%cr27, %r28)
PUSH_CR(%cr28, %r28)
PUSH_CR(%cr29, %r28)
PUSH_CR(%cr30, %r28)
PUSH_CR(%cr31, %r28)
PUSH_CR(%cr15, %r28)
bv 0(%r2)
nop
ENDPROC_CFI(save_control_regs)
ENTRY_CFI(restore_control_regs)
load32 PA(save_cr_end), %r26
POP_CR(%cr15, %r26)
POP_CR(%cr31, %r26)
POP_CR(%cr30, %r26)
POP_CR(%cr29, %r26)
POP_CR(%cr28, %r26)
POP_CR(%cr27, %r26)
POP_CR(%cr26, %r26)
POP_CR(%cr25, %r26)
POP_CR(%cr24, %r26)
bv 0(%r2)
nop
ENDPROC_CFI(restore_control_regs)
.text
.align 128
ENTRY_CFI(rfi_virt2real)
#if !defined(BOOTLOADER)
rsm PSW_SM_I,%r0
load32 PA(rfi_v2r_1), %r1
nop
nop
nop
nop
nop
rsm PSW_SM_Q,%r0
mtctl %r0, %cr17
mtctl %r0, %cr17
mtctl %r1, %cr18
ldo 4(%r1), %r1
mtctl %r1, %cr18
load32 REAL_MODE_PSW, %r1
mtctl %r1, %cr22
rfi
nop
nop
nop
nop
nop
nop
nop
nop
rfi_v2r_1:
tophys_r1 %r2
#endif
bv 0(%r2)
nop
ENDPROC_CFI(rfi_virt2real)
.text
.align 128
ENTRY_CFI(rfi_real2virt)
#if !defined(BOOTLOADER)
rsm PSW_SM_I,%r0
load32 (rfi_r2v_1), %r1
nop
nop
nop
nop
nop
rsm PSW_SM_Q,%r0
mtctl %r0, %cr17
mtctl %r0, %cr17
mtctl %r1, %cr18
ldo 4(%r1), %r1
mtctl %r1, %cr18
load32 KERNEL_PSW, %r1
mtctl %r1, %cr22
rfi
nop
nop
nop
nop
nop
nop
nop
nop
rfi_r2v_1:
tovirt_r1 %r2
#endif
bv 0(%r2)
nop
ENDPROC_CFI(rfi_real2virt)
#ifdef CONFIG_64BIT
.text
ENTRY_CFI(real64_call_asm)
std %rp, -0x10(%sp)
std %sp, -8(%arg0)
copy %arg0, %sp
copy %arg2, %r31
ldd 0*REG_SZ(%arg1), %arg0
ldd 2*REG_SZ(%arg1), %arg2
ldd 3*REG_SZ(%arg1), %arg3
ldd 4*REG_SZ(%arg1), %r22
ldd 5*REG_SZ(%arg1), %r21
ldd 6*REG_SZ(%arg1), %r20
ldd 7*REG_SZ(%arg1), %r19
ldd 1*REG_SZ(%arg1), %arg1
tophys_r1 %sp
ldo -16(%sp), %r29
b,l rfi_virt2real,%r2
nop
b,l save_control_regs,%r2
nop
load32 PA(r64_ret), %r2
bv 0(%r31)
nop
r64_ret:
b,l restore_control_regs, %r2
nop
b,l rfi_real2virt,%r2
nop
tovirt_r1 %sp
ldd -8(%sp), %sp
ldd -0x10(%sp), %rp
bv 0(%rp)
nop
ENDPROC_CFI(real64_call_asm)
#endif
.text
ENTRY_CFI(__canonicalize_funcptr_for_compare)
#ifdef CONFIG_64BIT
bve (%r2)
#else
bv %r0(%r2)
#endif
copy %r26,%r28
ENDPROC_CFI(__canonicalize_funcptr_for_compare)