#include <linux/linkage.h>
#include <linux/export.h>
#include <asm/asm.h>
#include <asm/asm-extable.h>
#include <asm/csr.h>
#include <asm/hwcap.h>
#include <asm/alternative-macros.h>
.macro fixup op reg addr lbl
100:
\op \reg, \addr
_asm_extable 100b, \lbl
.endm
SYM_FUNC_START(__asm_copy_to_user)
#ifdef CONFIG_RISCV_ISA_V
ALTERNATIVE("j fallback_scalar_usercopy", "nop", 0, RISCV_ISA_EXT_ZVE32X, CONFIG_RISCV_ISA_V)
REG_L t0, riscv_v_usercopy_threshold
bltu a2, t0, fallback_scalar_usercopy
li a3, 1
tail enter_vector_usercopy
#endif
SYM_FUNC_END(__asm_copy_to_user)
EXPORT_SYMBOL(__asm_copy_to_user)
SYM_FUNC_ALIAS(__asm_copy_from_user, __asm_copy_to_user)
EXPORT_SYMBOL(__asm_copy_from_user)
SYM_FUNC_START(fallback_scalar_usercopy)
li t6, SR_SUM
csrs CSR_STATUS, t6
mv t6, ra
call fallback_scalar_usercopy_sum_enabled
mv ra, t6
li t6, SR_SUM
csrc CSR_STATUS, t6
ret
SYM_FUNC_END(fallback_scalar_usercopy)
SYM_FUNC_START(__asm_copy_to_user_sum_enabled)
#ifdef CONFIG_RISCV_ISA_V
ALTERNATIVE("j fallback_scalar_usercopy_sum_enabled", "nop", 0, RISCV_ISA_EXT_ZVE32X, CONFIG_RISCV_ISA_V)
REG_L t0, riscv_v_usercopy_threshold
bltu a2, t0, fallback_scalar_usercopy_sum_enabled
li a3, 0
tail enter_vector_usercopy
#endif
SYM_FUNC_END(__asm_copy_to_user_sum_enabled)
SYM_FUNC_ALIAS(__asm_copy_from_user_sum_enabled, __asm_copy_to_user_sum_enabled)
EXPORT_SYMBOL(__asm_copy_from_user_sum_enabled)
EXPORT_SYMBOL(__asm_copy_to_user_sum_enabled)
SYM_FUNC_START(fallback_scalar_usercopy_sum_enabled)
add t5, a0, a2
add t0, a0, a2
li a3, 9*SZREG-1
bltu a2, a3, .Lbyte_copy_tail
addi t1, a0, SZREG-1
andi t1, t1, ~(SZREG-1)
beq a0, t1, .Lskip_align_dst
1:
fixup lb a5, 0(a1), 10f
addi a1, a1, 1
fixup sb a5, 0(a0), 10f
addi a0, a0, 1
bltu a0, t1, 1b
.Lskip_align_dst:
andi a3, a1, SZREG-1
bnez a3, .Lshift_copy
.Lword_copy:
addi t0, t0, -(8*SZREG)
2:
fixup REG_L a4, 0(a1), 10f
fixup REG_L a5, SZREG(a1), 10f
fixup REG_L a6, 2*SZREG(a1), 10f
fixup REG_L a7, 3*SZREG(a1), 10f
fixup REG_L t1, 4*SZREG(a1), 10f
fixup REG_L t2, 5*SZREG(a1), 10f
fixup REG_L t3, 6*SZREG(a1), 10f
fixup REG_L t4, 7*SZREG(a1), 10f
fixup REG_S a4, 0(a0), 10f
fixup REG_S a5, SZREG(a0), 10f
fixup REG_S a6, 2*SZREG(a0), 10f
fixup REG_S a7, 3*SZREG(a0), 10f
fixup REG_S t1, 4*SZREG(a0), 10f
fixup REG_S t2, 5*SZREG(a0), 10f
fixup REG_S t3, 6*SZREG(a0), 10f
fixup REG_S t4, 7*SZREG(a0), 10f
addi a0, a0, 8*SZREG
addi a1, a1, 8*SZREG
bleu a0, t0, 2b
addi t0, t0, 8*SZREG
j .Lbyte_copy_tail
.Lshift_copy:
andi t1, t0, ~(SZREG-1)
andi a1, a1, ~(SZREG-1)
slli t3, a3, 3
li a5, SZREG*8
sub t4, a5, t3
fixup REG_L a5, 0(a1), 10f
3:
srl a4, a5, t3
fixup REG_L a5, SZREG(a1), 10f
addi a1, a1, SZREG
sll a2, a5, t4
or a2, a2, a4
fixup REG_S a2, 0(a0), 10f
addi a0, a0, SZREG
bltu a0, t1, 3b
add a1, a1, a3
.Lbyte_copy_tail:
bgeu a0, t0, .Lout_copy_user
4:
fixup lb a5, 0(a1), 10f
addi a1, a1, 1
fixup sb a5, 0(a0), 10f
addi a0, a0, 1
bltu a0, t0, 4b
.Lout_copy_user:
li a0, 0
ret
10:
sub a0, t5, a0
ret
SYM_FUNC_END(fallback_scalar_usercopy_sum_enabled)
SYM_FUNC_START(__clear_user)
li t6, SR_SUM
csrs CSR_STATUS, t6
add a3, a0, a1
addi t0, a0, SZREG-1
andi t1, a3, ~(SZREG-1)
andi t0, t0, ~(SZREG-1)
bgeu t0, t1, 2f
bltu a0, t0, 4f
1:
fixup REG_S, zero, (a0), 11f
addi a0, a0, SZREG
bltu a0, t1, 1b
2:
bltu a0, a3, 5f
3:
csrc CSR_STATUS, t6
li a0, 0
ret
4:
fixup sb, zero, (a0), 11f
addi a0, a0, 1
bltu a0, t0, 4b
j 1b
5:
fixup sb, zero, (a0), 11f
addi a0, a0, 1
bltu a0, a3, 5b
j 3b
11:
csrc CSR_STATUS, t6
sub a0, a3, a0
ret
SYM_FUNC_END(__clear_user)
EXPORT_SYMBOL(__clear_user)