#include <arch/arm/arch_cpu.h>
#include <arch/arm/arch_cpu_defs.h>
#include <asm_defs.h>
#include "asm_offsets.h"
#include "syscall_numbers.h"
.text
FUNCTION(arch_int_enable_interrupts):
mrs r0, cpsr
bic r0, r0, #(1<<7)
msr cpsr_c, r0
bx lr
FUNCTION_END(arch_int_enable_interrupts)
FUNCTION(arch_int_disable_interrupts):
mrs r0, cpsr
orr r1, r0, #(1<<7)
msr cpsr_c, r1
bx lr
FUNCTION_END(arch_int_disable_interrupts)
FUNCTION(arch_int_restore_interrupts):
mrs r1, cpsr
and r0, r0, #(1<<7)
bic r1, r1, #(1<<7)
orr r1, r1, r0
msr cpsr_c, r1
bx lr
FUNCTION_END(arch_int_restore_interrupts)
FUNCTION(arch_int_are_interrupts_enabled):
mrs r0, cpsr
and r0, r0, #(1<<7)
cmp r0, #0
moveq r0, #1
movne r0, #0
bx lr
FUNCTION_END(arch_int_are_interrupts_enabled)
FUNCTION(arm_context_switch):
stmfd sp!, { r0-r12, lr }
str sp, [r0]
ldr sp, [r1]
ldmfd sp!, { r0-r12, lr }
bx lr
FUNCTION_END(arm_context_switch)
FUNCTION(arm_save_fpu):
fstmiad r0!, {d0-d15}
fstmiad r0!, {d16-d31}
vmrs r1, fpscr
str r1, [r0]
bx lr
FUNCTION_END(arm_save_fpu)
FUNCTION(arm_restore_fpu):
fldmiad r0!, {d0-d15}
fldmiad r0!, {d16-d31}
ldr r1, [r0]
vmsr fpscr, r1
bx lr
FUNCTION_END(arm_restore_fpu)
FUNCTION(_arch_cpu_user_memcpy):
stmfd sp!, { r4-r6, lr }
ldr r6, [r3]
ldr r4, =.L_user_memcpy_error
str r4, [r3]
mov r4, r2, lsr #2
1:
ldr r5, [r1]
str r5, [r0]
add r1, #4
add r0, #4
subs r4, #1
bne 1b
ands r4, r2, #3
beq 3f
2:
ldrb r5, [r1]
strb r5, [r0]
add r1, #1
add r0, #1
subs r4, #1
bne 2b
3:
str r6, [r3]
mov r0, #0
ldmfd sp!, { r4-r6, pc }
.L_user_memcpy_error:
str r6, [r3]
mov r0, #-1
ldmfd sp!, { r4-r6, pc }
FUNCTION_END(_arch_cpu_user_memcpy)
FUNCTION(_arch_cpu_user_memset):
stmfd sp!, { r4-r5, lr }
ldr r5, [r3]
ldr r4, =.L_user_memset_error
str r4, [r3]
and r1, r1, #0xff
add r1, r1, lsl #8
add r1, r1, lsl #16
add r1, r1, lsl #24
mov r4, r2, lsr #2
1:
str r1, [r0]
add r0, r0, #4
subs r4, r4, #1
bne 1b
and r4, r2, #3
2:
strb r1, [r0]
add r0, r0, #1
subs r4, r4, #1
bne 2b
mov r0, #0
str r5, [r3]
ldmfd sp!, { r4-r5, pc }
.L_user_memset_error:
mov r0, #-1
str r5, [r3]
ldmfd sp!, { r4-r5, pc }
FUNCTION_END(_arch_cpu_user_memset)
FUNCTION(_arch_cpu_user_strlcpy):
stmfd sp!, { r4-r6, lr }
ldr r5, [r3]
ldr r4, =.L_user_strlcpy_error
str r4, [r3]
mov r6, #0
1:
ldrb r4, [r1, r6]
strb r4, [r0, r6]
add r6, r6, #1
cmp r4, #0
beq 2f
cmp r6, r2
blt 1b
2:
mov r4, #0
strb r4, [r0, r6]
mov r0, r6
str r5, [r3]
ldmfd sp!, { r4-r6, pc }
.L_user_strlcpy_error:
mov r0, #-1
str r5, [r3]
ldmfd sp!, { r4-r6, pc }
FUNCTION_END(_arch_cpu_user_strlcpy)
FUNCTION(arch_debug_call_with_fault_handler):
stmfd sp!, { r1, r4, lr }
ldr r4, =1f
str r4, [r0, #CPU_ENT_fault_handler]
str sp, [r0, #CPU_ENT_fault_handler_stack_pointer]
mov r4, r1
mov r0, r3
blx r2
ldmfd sp!, { r1, r4, pc }
1:
ldmfd sp!, { r0, r4, lr }
mov r1, #1
b longjmp
FUNCTION_END(arch_debug_call_with_fault_handler)
FUNCTION(arch_return_to_userland):
mrs ip, cpsr
bic ip, ip, #(CPSR_MODE_MASK | CPSR_T | CPSR_F | CPSR_I)
orr ip, ip, #(CPSR_MODE_USR | CPSR_F)
msr spsr, ip
ldr r4, [r0, #IFRAME_usr_sp]
ldr r5, [r0, #IFRAME_usr_lr]
mrs ip, cpsr
bic ip, ip, #(CPSR_MODE_MASK)
orr ip, ip, #(CPSR_MODE_SYS)
msr cpsr, ip
mov sp, r4
mov lr, r5
bic ip, ip, #(CPSR_MODE_MASK)
orr ip, ip, #(CPSR_MODE_SVC)
msr cpsr, ip
ldr lr, [r0, #IFRAME_pc]
mov sp, r0
add sp, sp, #4
ldmfd sp!, { r0-r12 }
movs pc, lr
FUNCTION_END(arch_return_to_userland)
FUNCTION(arch_user_thread_exit):
svc SYSCALL_EXIT_THREAD
bx lr
FUNCTION_END(arch_user_thread_exit)