#include <asm/asm-offsets.h>
#include <asm/asm.h>
#include <linux/init.h>
#include <linux/linkage.h>
#include <asm/thread_info.h>
#include <asm/page.h>
#include <asm/pgtable.h>
#include <asm/csr.h>
#include <asm/hwcap.h>
#include <asm/image.h>
#include <asm/scs.h>
#include <asm/xip_fixup.h>
#include <asm/usercfi.h>
#include "efi-header.S"
__HEAD
SYM_CODE_START(_start)
#ifdef CONFIG_EFI
c.li s4,-13
j _start_kernel
#else
j _start_kernel
.word 0
#endif
.balign 8
#ifdef CONFIG_RISCV_M_MODE
.dword 0
#else
#if __riscv_xlen == 64
.dword 0x200000
#else
.dword 0x400000
#endif
#endif
.dword _end - _start
.dword __HEAD_FLAGS
.word RISCV_HEADER_VERSION
.word 0
.dword 0
.ascii RISCV_IMAGE_MAGIC
.balign 4
.ascii RISCV_IMAGE_MAGIC2
#ifdef CONFIG_EFI
.word pe_head_start - _start
pe_head_start:
__EFI_PE_HEADER
#else
.word 0
#endif
.align 2
#ifdef CONFIG_MMU
.global relocate_enable_mmu
relocate_enable_mmu:
la a1, kernel_map
XIP_FIXUP_OFFSET a1
REG_L a1, KERNEL_MAP_VIRT_ADDR(a1)
la a2, _start
sub a1, a1, a2
add ra, ra, a1
la a2, 1f
add a2, a2, a1
csrw CSR_TVEC, a2
srl a2, a0, PAGE_SHIFT
la a1, satp_mode
XIP_FIXUP_OFFSET a1
REG_L a1, 0(a1)
or a2, a2, a1
la a0, trampoline_pg_dir
XIP_FIXUP_OFFSET a0
srl a0, a0, PAGE_SHIFT
or a0, a0, a1
sfence.vma
csrw CSR_SATP, a0
.align 2
1:
la a0, .Lsecondary_park
csrw CSR_TVEC, a0
load_global_pointer
csrw CSR_SATP, a2
sfence.vma
ret
#endif
#ifdef CONFIG_SMP
.global secondary_start_sbi
secondary_start_sbi:
csrw CSR_IE, zero
csrw CSR_IP, zero
#ifndef CONFIG_RISCV_M_MODE
li t0, 0x2
csrw CSR_SCOUNTEREN, t0
#endif
load_global_pointer
li t0, SR_FS_VS
csrc CSR_STATUS, t0
la a3, .Lsecondary_park
csrw CSR_TVEC, a3
li a2, SBI_HART_BOOT_TASK_PTR_OFFSET
XIP_FIXUP_OFFSET a2
add a2, a2, a1
REG_L tp, (a2)
li a3, SBI_HART_BOOT_STACK_PTR_OFFSET
XIP_FIXUP_OFFSET a3
add a3, a3, a1
REG_L sp, (a3)
.Lsecondary_start_common:
#ifdef CONFIG_MMU
la a0, swapper_pg_dir
XIP_FIXUP_OFFSET a0
call relocate_enable_mmu
#endif
call .Lsetup_trap_vector
#if defined(CONFIG_RISCV_SBI) && defined(CONFIG_RISCV_USER_CFI)
li a7, SBI_EXT_FWFT
li a6, SBI_EXT_FWFT_SET
li a0, SBI_FWFT_SHADOW_STACK
li a1, 1
li a2, SBI_FWFT_SET_FLAG_LOCK
ecall
beqz a0, 1f
la a1, riscv_nousercfi
li a0, CMDLINE_DISABLE_RISCV_USERCFI_BCFI
REG_S a0, (a1)
1:
#endif
scs_load_current
call smp_callin
#endif
.align 2
.Lsecondary_park:
wfi
j .Lsecondary_park
.align 2
.Lsetup_trap_vector:
la a0, handle_exception
csrw CSR_TVEC, a0
csrw CSR_SCRATCH, zero
ret
SYM_CODE_END(_start)
SYM_CODE_START(_start_kernel)
csrw CSR_IE, zero
csrw CSR_IP, zero
#ifdef CONFIG_RISCV_M_MODE
fence.i
call reset_regs
la a0, .Lpmp_done
csrw CSR_TVEC, a0
li a0, -1
csrw CSR_PMPADDR0, a0
li a0, (PMP_A_NAPOT | PMP_R | PMP_W | PMP_X)
csrw CSR_PMPCFG0, a0
.align 2
.Lpmp_done:
csrr a0, CSR_MHARTID
#else
li t0, 0x2
csrw CSR_SCOUNTEREN, t0
#endif
load_global_pointer
li t0, SR_FS_VS
csrc CSR_STATUS, t0
#ifdef CONFIG_RISCV_BOOT_SPINWAIT
li t0, CONFIG_NR_CPUS
blt a0, t0, .Lgood_cores
tail .Lsecondary_park
.Lgood_cores:
#ifndef CONFIG_XIP_KERNEL
la a3, hart_lottery
li a2, 1
amoadd.w a3, a2, (a3)
bnez a3, .Lsecondary_start
#else
la a3, hart_lottery
mv a2, a3
XIP_FIXUP_OFFSET a2
XIP_FIXUP_FLASH_OFFSET a3
lw t1, (a3)
amoswap.w t0, t1, (a2)
beq t0, t1, .Lsecondary_start
#endif
#endif
#ifdef CONFIG_XIP_KERNEL
la sp, _end + THREAD_SIZE
XIP_FIXUP_OFFSET sp
mv s0, a0
mv s1, a1
call __copy_data
mv a0, s0
mv a1, s1
#endif
#ifndef CONFIG_XIP_KERNEL
la a3, __bss_start
la a4, __bss_stop
ble a4, a3, .Lclear_bss_done
.Lclear_bss:
REG_S zero, (a3)
add a3, a3, RISCV_SZPTR
blt a3, a4, .Lclear_bss
.Lclear_bss_done:
#endif
la a2, boot_cpu_hartid
XIP_FIXUP_OFFSET a2
REG_S a0, (a2)
la tp, init_task
la sp, init_thread_union + THREAD_SIZE
XIP_FIXUP_OFFSET sp
addi sp, sp, -PT_SIZE_ON_STACK
scs_load_init_stack
#ifdef CONFIG_BUILTIN_DTB
la a0, __dtb_start
XIP_FIXUP_OFFSET a0
#else
mv a0, a1
#endif
la a3, .Lsecondary_park
csrw CSR_TVEC, a3
call setup_vm
#ifdef CONFIG_MMU
la a0, early_pg_dir
XIP_FIXUP_OFFSET a0
call relocate_enable_mmu
#endif
call .Lsetup_trap_vector
la tp, init_task
la sp, init_thread_union + THREAD_SIZE
addi sp, sp, -PT_SIZE_ON_STACK
#if defined(CONFIG_RISCV_SBI) && defined(CONFIG_RISCV_USER_CFI)
li a7, SBI_EXT_FWFT
li a6, SBI_EXT_FWFT_SET
li a0, SBI_FWFT_SHADOW_STACK
li a1, 1
li a2, SBI_FWFT_SET_FLAG_LOCK
ecall
beqz a0, 1f
la a1, riscv_nousercfi
li a0, CMDLINE_DISABLE_RISCV_USERCFI_BCFI
REG_S a0, (a1)
1:
#endif
scs_load_current
#ifdef CONFIG_KASAN
call kasan_early_init
#endif
call soc_early_init
tail start_kernel
#ifdef CONFIG_RISCV_BOOT_SPINWAIT
.Lsecondary_start:
la a3, .Lsecondary_park
csrw CSR_TVEC, a3
slli a3, a0, LGREG
la a1, __cpu_spinwait_stack_pointer
XIP_FIXUP_OFFSET a1
la a2, __cpu_spinwait_task_pointer
XIP_FIXUP_OFFSET a2
add a1, a3, a1
add a2, a3, a2
.Lwait_for_cpu_up:
REG_L sp, (a1)
REG_L tp, (a2)
beqz sp, .Lwait_for_cpu_up
beqz tp, .Lwait_for_cpu_up
fence
tail .Lsecondary_start_common
#endif
SYM_CODE_END(_start_kernel)
#ifdef CONFIG_RISCV_M_MODE
SYM_CODE_START_LOCAL(reset_regs)
li sp, 0
li gp, 0
li tp, 0
li t0, 0
li t1, 0
li t2, 0
li s0, 0
li s1, 0
li a2, 0
li a3, 0
li a4, 0
li a5, 0
li a6, 0
li a7, 0
li s2, 0
li s3, 0
li s4, 0
li s5, 0
li s6, 0
li s7, 0
li s8, 0
li s9, 0
li s10, 0
li s11, 0
li t3, 0
li t4, 0
li t5, 0
li t6, 0
csrw CSR_SCRATCH, 0
#ifdef CONFIG_FPU
csrr t0, CSR_MISA
andi t0, t0, (COMPAT_HWCAP_ISA_F | COMPAT_HWCAP_ISA_D)
beqz t0, .Lreset_regs_done_fpu
li t1, SR_FS
csrs CSR_STATUS, t1
fmv.s.x f0, zero
fmv.s.x f1, zero
fmv.s.x f2, zero
fmv.s.x f3, zero
fmv.s.x f4, zero
fmv.s.x f5, zero
fmv.s.x f6, zero
fmv.s.x f7, zero
fmv.s.x f8, zero
fmv.s.x f9, zero
fmv.s.x f10, zero
fmv.s.x f11, zero
fmv.s.x f12, zero
fmv.s.x f13, zero
fmv.s.x f14, zero
fmv.s.x f15, zero
fmv.s.x f16, zero
fmv.s.x f17, zero
fmv.s.x f18, zero
fmv.s.x f19, zero
fmv.s.x f20, zero
fmv.s.x f21, zero
fmv.s.x f22, zero
fmv.s.x f23, zero
fmv.s.x f24, zero
fmv.s.x f25, zero
fmv.s.x f26, zero
fmv.s.x f27, zero
fmv.s.x f28, zero
fmv.s.x f29, zero
fmv.s.x f30, zero
fmv.s.x f31, zero
csrw fcsr, 0
.Lreset_regs_done_fpu:
#endif
#ifdef CONFIG_RISCV_ISA_V
csrr t0, CSR_MISA
li t1, COMPAT_HWCAP_ISA_V
and t0, t0, t1
beqz t0, .Lreset_regs_done_vector
li t1, SR_VS
csrs CSR_STATUS, t1
csrs CSR_VCSR, x0
vsetvli t1, x0, e8, m8, ta, ma
vmv.v.i v0, 0
vmv.v.i v8, 0
vmv.v.i v16, 0
vmv.v.i v24, 0
.Lreset_regs_done_vector:
#endif
ret
SYM_CODE_END(reset_regs)
#endif