#include "assym.h"
#include <sys/mutex_impl.h>
#include <sys/asm_linkage.h>
#include <sys/asm_misc.h>
#include <sys/regset.h>
#include <sys/rwlock_impl.h>
#include <sys/lockstat.h>
#if defined(OPTERON_ERRATUM_147)
#define ERRATUM147_PATCH_POINT(name) \
.align 4, NOP_INSTR; \
.##name##_147_patch_point: \
nop; \
nop; \
nop; \
nop;
#else
#define ERRATUM147_PATCH_POINT(name)
#endif
#define LOCKSTAT_RET(name) \
.##name##_lockstat_patch_point: \
ret;
.globl kernelbase
ENTRY(lock_try)
movb $-1, %dl
movzbq %dl, %rax
xchgb %dl, (%rdi)
xorb %dl, %al
LOCKSTAT_RET(lock_try)
testb %al, %al
jnz 0f
ret
0:
movq %gs:CPU_THREAD, %rdx
movq %rdi, %rsi
movl $LS_LOCK_TRY_ACQUIRE, %edi
jmp lockstat_wrapper
SET_SIZE(lock_try)
ENTRY(lock_spin_try)
movb $-1, %dl
movzbq %dl, %rax
xchgb %dl, (%rdi)
xorb %dl, %al
ret
SET_SIZE(lock_spin_try)
ENTRY(ulock_try)
#ifdef DEBUG
movq kernelbase(%rip), %rax
cmpq %rax, %rdi
jb ulock_pass
movq %rdi, %r12
leaq .ulock_panic_msg(%rip), %rdi
pushq %rbp
movq %rsp, %rbp
xorl %eax, %eax
call panic
#endif
ulock_pass:
movl $1, %eax
xchgb %al, (%rdi)
xorb $1, %al
ret
SET_SIZE(ulock_try)
#ifdef DEBUG
.data
.ulock_panic_msg:
.string "ulock_try: Argument is above kernelbase"
.text
#endif
ENTRY(lock_clear)
movb $0, (%rdi)
LOCKSTAT_RET(lock_clear)
movq %rdi, %rsi
movq %gs:CPU_THREAD, %rdx
movl $LS_LOCK_CLEAR_RELEASE, %edi
jmp lockstat_wrapper
SET_SIZE(lock_clear)
ENTRY(ulock_clear)
#ifdef DEBUG
movq kernelbase(%rip), %rcx
cmpq %rcx, %rdi
jb ulock_clr
leaq .ulock_clear_msg(%rip), %rdi
pushq %rbp
movq %rsp, %rbp
xorl %eax, %eax
call panic
#endif
ulock_clr:
movb $0, (%rdi)
ret
SET_SIZE(ulock_clear)
#ifdef DEBUG
.data
.ulock_clear_msg:
.string "ulock_clear: Argument is above kernelbase"
.text
#endif
ENTRY(lock_set_spl)
pushq %rbp
movq %rsp, %rbp
subq $32, %rsp
movl %esi, 8(%rsp)
movq %rdx, 16(%rsp)
movq %rdi, 24(%rsp)
movl %esi, %edi
call splr
movq 24(%rsp), %rdi
movb $-1, %dl
xchgb %dl, (%rdi)
testb %dl, %dl
jnz .lss_miss
movq 16(%rsp), %rdx
movw %ax, (%rdx)
leave
LOCKSTAT_RET(lock_set_spl)
movq %rdi, %rsi
movq %gs:CPU_THREAD, %rdx
movl $LS_LOCK_SET_SPL_ACQUIRE, %edi
jmp lockstat_wrapper
.lss_miss:
movl 8(%rsp), %esi
movq 16(%rsp), %rdx
movl %eax, %ecx
leave
jmp lock_set_spl_spin
SET_SIZE(lock_set_spl)
ENTRY(lock_init)
movb $0, (%rdi)
ret
SET_SIZE(lock_init)
ENTRY(lock_set)
movb $-1, %dl
xchgb %dl, (%rdi)
testb %dl, %dl
jnz lock_set_spin
LOCKSTAT_RET(lock_set)
movq %rdi, %rsi
movq %gs:CPU_THREAD, %rdx
movl $LS_LOCK_SET_ACQUIRE, %edi
jmp lockstat_wrapper
SET_SIZE(lock_set)
ENTRY(lock_clear_splx)
pushq %rbp
movq %rsp, %rbp
pushq %rdi
movb $0, (%rdi)
movl %esi, %edi
call splx
popq %rsi
leave
LOCKSTAT_RET(lock_clear_splx)
movq %gs:CPU_THREAD, %rdx
movl $LS_LOCK_CLEAR_SPLX_RELEASE, %edi
jmp lockstat_wrapper
SET_SIZE(lock_clear_splx)
ENTRY_NP(mutex_enter)
movq %gs:CPU_THREAD, %rdx
xorl %eax, %eax
lock
cmpxchgq %rdx, (%rdi)
jnz mutex_vector_enter
ERRATUM147_PATCH_POINT(mutex_enter)
LOCKSTAT_RET(mutex_enter)
movq %rdi, %rsi
movl $LS_MUTEX_ENTER_ACQUIRE, %edi
jmp lockstat_wrapper
SET_SIZE(mutex_enter)
ENTRY_NP(lockstat_wrapper)
incb T_LOCKSTAT(%rdx)
leaq lockstat_probemap(%rip), %rax
movl (%rax, %rdi, DTRACE_IDSIZE), %eax
testl %eax, %eax
jz 1f
pushq %rbp
movq %rsp, %rbp
movl %eax, %edi
movq lockstat_probe, %rax
INDIRECT_CALL_REG(rax)
leave
1:
movq %gs:CPU_THREAD, %rdx
decb T_LOCKSTAT(%rdx)
movl $1, %eax
ret
SET_SIZE(lockstat_wrapper)
ENTRY(lockstat_wrapper_arg)
incb T_LOCKSTAT(%rcx)
leaq lockstat_probemap(%rip), %rax
movl (%rax, %rdi, DTRACE_IDSIZE), %eax
testl %eax, %eax
jz 1f
pushq %rbp
movq %rsp, %rbp
movl %eax, %edi
movq lockstat_probe, %rax
INDIRECT_CALL_REG(rax)
leave
1:
movq %gs:CPU_THREAD, %rdx
decb T_LOCKSTAT(%rdx)
movl $1, %eax
ret
SET_SIZE(lockstat_wrapper_arg)
ENTRY(mutex_tryenter)
movq %gs:CPU_THREAD, %rdx
xorl %eax, %eax
lock
cmpxchgq %rdx, (%rdi)
jnz mutex_vector_tryenter
not %eax
ERRATUM147_PATCH_POINT(mutex_tryenter)
LOCKSTAT_RET(mutex_tryenter)
movq %rdi, %rsi
movl $LS_MUTEX_TRYENTER_ACQUIRE, %edi
jmp lockstat_wrapper
SET_SIZE(mutex_tryenter)
ENTRY(mutex_adaptive_tryenter)
movq %gs:CPU_THREAD, %rdx
xorl %eax, %eax
lock
cmpxchgq %rdx, (%rdi)
jnz 0f
not %eax
ERRATUM147_PATCH_POINT(mutex_atryenter)
ret
0:
xorl %eax, %eax
ret
SET_SIZE(mutex_adaptive_tryenter)
.globl mutex_owner_running_critical_start
ENTRY(mutex_owner_running)
mutex_owner_running_critical_start:
movq (%rdi), %r11
andq $MUTEX_THREAD, %r11
cmpq $0, %r11
je 1f
movq T_CPU(%r11), %r8
movq CPU_THREAD(%r8), %r9
.mutex_owner_running_critical_end:
cmpq %r11, %r9
je 2f
1:
xorq %rax, %rax
ret
2:
movq %r8, %rax
ret
SET_SIZE(mutex_owner_running)
.globl mutex_owner_running_critical_size
.type mutex_owner_running_critical_size, @object
.align CPTRSIZE
mutex_owner_running_critical_size:
.quad .mutex_owner_running_critical_end - mutex_owner_running_critical_start
SET_SIZE(mutex_owner_running_critical_size)
.globl mutex_exit_critical_start
ENTRY(mutex_exit)
mutex_exit_critical_start:
movq %gs:CPU_THREAD, %rdx
cmpq %rdx, (%rdi)
jne mutex_vector_exit
movq $0, (%rdi)
.mutex_exit_critical_end:
LOCKSTAT_RET(mutex_exit)
movq %rdi, %rsi
movl $LS_MUTEX_EXIT_RELEASE, %edi
jmp lockstat_wrapper
SET_SIZE(mutex_exit)
.globl mutex_exit_critical_size
.type mutex_exit_critical_size, @object
.align CPTRSIZE
mutex_exit_critical_size:
.quad .mutex_exit_critical_end - mutex_exit_critical_start
SET_SIZE(mutex_exit_critical_size)
ENTRY(rw_enter)
cmpl $RW_WRITER, %esi
je .rw_write_enter
movq (%rdi), %rax
testl $RW_WRITE_LOCKED|RW_WRITE_WANTED, %eax
jnz rw_enter_sleep
leaq RW_READ_LOCK(%rax), %rdx
lock
cmpxchgq %rdx, (%rdi)
jnz rw_enter_sleep
LOCKSTAT_RET(rw_read_enter)
movq %gs:CPU_THREAD, %rcx
movq %rdi, %rsi
movl $LS_RW_ENTER_ACQUIRE, %edi
movl $RW_READER, %edx
jmp lockstat_wrapper_arg
.rw_write_enter:
movq %gs:CPU_THREAD, %rdx
orq $RW_WRITE_LOCKED, %rdx
xorl %eax, %eax
lock
cmpxchgq %rdx, (%rdi)
jnz rw_enter_sleep
ERRATUM147_PATCH_POINT(rw_write_enter)
LOCKSTAT_RET(rw_write_enter)
movq %gs:CPU_THREAD, %rcx
movq %rdi, %rsi
movl $LS_RW_ENTER_ACQUIRE, %edi
movl $RW_WRITER, %edx
jmp lockstat_wrapper_arg
SET_SIZE(rw_enter)
ENTRY(rw_exit)
movq (%rdi), %rax
cmpl $RW_READ_LOCK, %eax
jne .rw_not_single_reader
xorl %edx, %edx
.rw_read_exit:
lock
cmpxchgq %rdx, (%rdi)
jnz rw_exit_wakeup
LOCKSTAT_RET(rw_read_exit)
movq %gs:CPU_THREAD, %rcx
movq %rdi, %rsi
movl $LS_RW_EXIT_RELEASE, %edi
movl $RW_READER, %edx
jmp lockstat_wrapper_arg
.rw_not_single_reader:
testl $RW_WRITE_LOCKED, %eax
jnz .rw_write_exit
leaq -RW_READ_LOCK(%rax), %rdx
cmpl $RW_READ_LOCK, %edx
jge .rw_read_exit
jmp rw_exit_wakeup
.rw_write_exit:
movq %gs:CPU_THREAD, %rax
xorl %edx, %edx
orq $RW_WRITE_LOCKED, %rax
lock
cmpxchgq %rdx, (%rdi)
jnz rw_exit_wakeup
LOCKSTAT_RET(rw_write_exit)
movq %gs:CPU_THREAD, %rcx
movq %rdi, %rsi
movl $LS_RW_EXIT_RELEASE, %edi
movl $RW_WRITER, %edx
jmp lockstat_wrapper_arg
SET_SIZE(rw_exit)
#if defined(OPTERON_ERRATUM_147)
DGDEF3(erratum_147_patched, 4, 4)
.long 0
#define HOT_MUTEX_PATCH(iaddr, insn_reg) \
movq $iaddr, %rdi; \
movl %insn_reg, %esi; \
movl $4, %edx; \
call hot_patch_kernel_text;
ENTRY_NP(patch_erratum_147)
pushq %rbp
movq %rsp, %rbp
pushq %r12
movl $0x90e8ae0f, %r12d
HOT_MUTEX_PATCH(.mutex_enter_147_patch_point, r12d)
HOT_MUTEX_PATCH(.mutex_tryenter_147_patch_point, r12d)
HOT_MUTEX_PATCH(.mutex_atryenter_147_patch_point, r12d)
HOT_MUTEX_PATCH(.rw_write_enter_147_patch_point, r12d)
movl $1, erratum_147_patched
popq %r12
movq %rbp, %rsp
popq %rbp
ret
SET_SIZE(patch_erratum_147)
#endif
ENTRY(lockstat_hotpatch_site)
pushq %rbp
movq %rsp, %rbp
pushq %rdi
pushq %rsi
testl %esi, %esi
jz .do_disable
movl $NOP_INSTR, %esi
movl $1, %edx
call hot_patch_kernel_text
leave
ret
.do_disable:
movl $RET_INSTR, %esi
movl $1, %edx
call hot_patch_kernel_text
leave
ret
SET_SIZE(lockstat_hotpatch_site)
#define HOT_PATCH_MATCH(name, probe, reg) \
cmpl $probe, %reg; \
jne 1f; \
leaq lockstat_probemap(%rip), %rax; \
movl _MUL(probe, DTRACE_IDSIZE)(%rax), %esi; \
movq $.##name##_lockstat_patch_point, %rdi; \
call lockstat_hotpatch_site; \
1:
ENTRY(lockstat_hotpatch_probe)
pushq %rbp
movq %rsp, %rbp
pushq %r12
movl %edi, %r12d
HOT_PATCH_MATCH(mutex_enter, LS_MUTEX_ENTER_ACQUIRE, r12d)
HOT_PATCH_MATCH(mutex_tryenter, LS_MUTEX_TRYENTER_ACQUIRE, r12d)
HOT_PATCH_MATCH(mutex_exit, LS_MUTEX_EXIT_RELEASE, r12d)
HOT_PATCH_MATCH(rw_write_enter, LS_RW_ENTER_ACQUIRE, r12d)
HOT_PATCH_MATCH(rw_read_enter, LS_RW_ENTER_ACQUIRE, r12d)
HOT_PATCH_MATCH(rw_write_exit, LS_RW_EXIT_RELEASE, r12d)
HOT_PATCH_MATCH(rw_read_exit, LS_RW_EXIT_RELEASE, r12d)
HOT_PATCH_MATCH(lock_set, LS_LOCK_SET_ACQUIRE, r12d)
HOT_PATCH_MATCH(lock_try, LS_LOCK_TRY_ACQUIRE, r12d)
HOT_PATCH_MATCH(lock_clear, LS_LOCK_CLEAR_RELEASE, r12d)
HOT_PATCH_MATCH(lock_set_spl, LS_LOCK_SET_SPL_ACQUIRE, r12d)
HOT_PATCH_MATCH(lock_clear_splx, LS_LOCK_CLEAR_SPLX_RELEASE, r12d)
popq %r12
leave
ret
SET_SIZE(lockstat_hotpatch_probe)
ENTRY(membar_enter)
ALTENTRY(membar_exit)
ALTENTRY(membar_sync)
mfence
ret
SET_SIZE(membar_sync)
SET_SIZE(membar_exit)
SET_SIZE(membar_enter)
ENTRY(membar_producer)
sfence
ret
SET_SIZE(membar_producer)
ENTRY(membar_consumer)
lfence
ret
SET_SIZE(membar_consumer)
ENTRY(thread_onproc)
addq $CPU_THREAD_LOCK, %rsi
movl $ONPROC_THREAD, T_STATE(%rdi)
movq %rsi, T_LOCKP(%rdi)
ret
SET_SIZE(thread_onproc)
ENTRY(mutex_delay_default)
movq $92,%r11
0: decq %r11
jg 0b
ret
SET_SIZE(mutex_delay_default)