#include "assym.h"
#include <sys/asm_linkage.h>
#include <sys/machtrap.h>
#include <sys/machasi.h>
#include <sys/sun4asi.h>
#include <sys/pte.h>
#include <sys/mmu.h>
#include <vm/hat_sfmmu.h>
#include <vm/seg_spt.h>
#include <sys/machparam.h>
#include <sys/privregs.h>
#include <sys/scb.h>
#include <sys/intreg.h>
#include <sys/machthread.h>
#include <sys/intr.h>
#include <sys/clock.h>
#include <sys/trapstat.h>
#ifdef TRAPTRACE
#include <sys/traptrace.h>
#define TT_TRACE(label) \
ba label ;\
rd %pc, %g7
#else
#define TT_TRACE(label)
#endif
#if (TTE_SUSPEND_SHIFT > 0)
#define TTE_SUSPEND_INT_SHIFT(reg) \
sllx reg, TTE_SUSPEND_SHIFT, reg
#else
#define TTE_SUSPEND_INT_SHIFT(reg)
#endif
#if TSBE_TAG != 0
#error "TSB_UPDATE and TSB_INVALIDATE assume TSBE_TAG = 0"
#endif
#if TSBTAG_INTHI != 0
#error "TSB_UPDATE and TSB_INVALIDATE assume TSBTAG_INTHI = 0"
#endif
#define GET_TSBE_POINTER(vpshift, tsbbase, tagacc, szc, tmp) \
mov TSB_ENTRIES(0), tmp ;\
srlx tagacc, vpshift, tagacc ;\
sllx tmp, szc, tmp ;\
sub tmp, 1, tmp ;\
and tagacc, tmp, tmp ;\
sllx tmp, TSB_ENTRY_SHIFT, tmp ;\
add tsbbase, tmp, tsbbase
#define GET_KPM_TSBE_POINTER(vpshift, tsbp, vaddr, szc, tmp) \
cmp vpshift, MMU_PAGESHIFT ;\
bne,pn %icc, 1f ;\
sethi %hi(kpmsm_tsbsz), szc ;\
sethi %hi(kpmsm_tsbbase), tsbp ;\
ld [szc + %lo(kpmsm_tsbsz)], szc ;\
ldx [tsbp + %lo(kpmsm_tsbbase)], tsbp ;\
ba,pt %icc, 2f ;\
nop ;\
1: sethi %hi(kpm_tsbsz), szc ;\
sethi %hi(kpm_tsbbase), tsbp ;\
ld [szc + %lo(kpm_tsbsz)], szc ;\
ldx [tsbp + %lo(kpm_tsbbase)], tsbp ;\
2: GET_TSBE_POINTER(vpshift, tsbp, vaddr, szc, tmp)
#if defined(UTSB_PHYS)
#define TSB_LOCK_ENTRY(tsbep, tmp1, tmp2, label) \
lda [tsbep]ASI_MEM, tmp1 ;\
sethi %hi(TSBTAG_LOCKED), tmp2 ;\
cmp tmp1, tmp2 ;\
be,a,pn %icc, label ;\
nop ;\
casa [tsbep]ASI_MEM, tmp1, tmp2 ;\
cmp tmp1, tmp2 ;\
bne,a,pn %icc, label ;\
nop ;\
;\
membar #StoreStore
#else
#define TSB_LOCK_ENTRY(tsbep, tmp1, tmp2, label) \
lda [tsbep]%asi, tmp1 ;\
sethi %hi(TSBTAG_LOCKED), tmp2 ;\
cmp tmp1, tmp2 ;\
be,a,pn %icc, label ;\
nop ;\
casa [tsbep]%asi, tmp1, tmp2 ;\
cmp tmp1, tmp2 ;\
bne,a,pn %icc, label ;\
nop ;\
;\
membar #StoreStore
#endif
#if defined(UTSB_PHYS)
#define TSB_INSERT_UNLOCK_ENTRY(tsbep, tte, tagtarget, tmp1) \
add tsbep, TSBE_TTE, tmp1 ;\
stxa tte, [tmp1]ASI_MEM ;\
membar #StoreStore ;\
add tsbep, TSBE_TAG, tmp1 ;\
stxa tagtarget, [tmp1]ASI_MEM
#else
#define TSB_INSERT_UNLOCK_ENTRY(tsbep, tte, tagtarget,tmp1) \
stxa tte, [tsbep + TSBE_TTE]%asi ;\
membar #StoreStore ;\
stxa tagtarget, [tsbep + TSBE_TAG]%asi
#endif
#if defined(UTSB_PHYS)
#define TSB_UPDATE_TL(tsbep, tte, tagtarget, ttepa, tmp1, tmp2, label) \
TSB_LOCK_ENTRY(tsbep, tmp1, tmp2, label) ;\
;\
sllx tagtarget, TTARGET_VA_SHIFT, tagtarget ;\
ldxa [ttepa]ASI_MEM, tte ;\
srlx tagtarget, TTARGET_VA_SHIFT, tagtarget ;\
sethi %hi(TSBTAG_INVALID), tmp2 ;\
add tsbep, TSBE_TAG, tmp1 ;\
brgez,a,pn tte, label ;\
sta tmp2, [tmp1]ASI_MEM ;\
TSB_INSERT_UNLOCK_ENTRY(tsbep, tte, tagtarget, tmp1) ;\
label:
#else
#define TSB_UPDATE_TL(tsbep, tte, tagtarget, ttepa, tmp1, tmp2, label) \
TSB_LOCK_ENTRY(tsbep, tmp1, tmp2, label) ;\
;\
sllx tagtarget, TTARGET_VA_SHIFT, tagtarget ;\
ldxa [ttepa]ASI_MEM, tte ;\
srlx tagtarget, TTARGET_VA_SHIFT, tagtarget ;\
sethi %hi(TSBTAG_INVALID), tmp2 ;\
brgez,a,pn tte, label ;\
sta tmp2, [tsbep + TSBE_TAG]%asi ;\
TSB_INSERT_UNLOCK_ENTRY(tsbep, tte, tagtarget, tmp1) ;\
label:
#endif
#define TSB_UPDATE_TL_PN(tsbep, tte, tagtarget, ttepa, tmp1, tmp2, label) \
TSB_LOCK_ENTRY(tsbep, tmp1, tmp2, label) ;\
;\
sllx tagtarget, TTARGET_VA_SHIFT, tagtarget ;\
mov tte, tmp1 ;\
ldxa [ttepa]ASI_MEM, tte ;\
srlx tagtarget, TTARGET_VA_SHIFT, tagtarget ;\
sethi %hi(TSBTAG_INVALID), tmp2 ;\
brgez,a,pn tte, label ;\
sta tmp2, [tsbep + TSBE_TAG]%asi ;\
or tte, tmp1, tte ;\
andn tte, TTE_EXECPRM_INT, tte ;\
or tte, TTE_E_SYNTH_INT, tte ;\
TSB_INSERT_UNLOCK_ENTRY(tsbep, tte, tagtarget, tmp1) ;\
label:
#define GET_4M_PFN_OFF(tte, tagaccess, pfn, tmp, label) \
;\
srlx tagaccess, MMU_PAGESHIFT4M, tagaccess ;\
srlx tte, TTE_SZ_SHFT, tmp ;\
andcc tmp, TTE_SZ_BITS, %g0 ;\
bz,a,pt %icc, label##f ;\
and tagaccess, 0x7, tagaccess ;\
and tagaccess, 0x3f, tagaccess ;\
label: ;\
sllx tagaccess, MMU_PAGESHIFT4M, pfn
#define SET_TTE4M_PN(tte, tmp) \
;\
set TTE4M, tmp ;\
sllx tmp, TTE_SZ_SHFT, tmp ;\
or tte, tmp, tte
#if defined(UTSB_PHYS)
#define TSB_UPDATE(tsbep, tteva, tagtarget, tmp1, tmp2, label) \
;\
ldx [tteva], tteva ;\
TSB_LOCK_ENTRY(tsbep, tmp1, tmp2, label) ;\
sethi %hi(TSBTAG_INVALID), tmp2 ;\
add tsbep, TSBE_TAG, tmp1 ;\
brgez,a,pn tteva, label ;\
sta tmp2, [tmp1]ASI_MEM ;\
TSB_INSERT_UNLOCK_ENTRY(tsbep, tteva, tagtarget, tmp1) ;\
label:
#else
#define TSB_UPDATE(tsbep, tteva, tagtarget, tmp1, tmp2, label) \
;\
ldx [tteva], tteva ;\
TSB_LOCK_ENTRY(tsbep, tmp1, tmp2, label) ;\
sethi %hi(TSBTAG_INVALID), tmp2 ;\
brgez,a,pn tteva, label ;\
sta tmp2, [tsbep + TSBE_TAG]%asi ;\
TSB_INSERT_UNLOCK_ENTRY(tsbep, tteva, tagtarget, tmp1) ;\
label:
#endif
#if defined(UTSB_PHYS)
#define TSB_INVALIDATE(tsbep, tag, tmp1, tmp2, tmp3, label) \
lda [tsbep]ASI_MEM, tmp1 ;\
sethi %hi(TSBTAG_LOCKED), tmp2 ;\
label##1: ;\
cmp tmp1, tmp2 ;\
be,a,pn %icc, label##1 ;\
lda [tsbep]ASI_MEM, tmp1 ;\
ldxa [tsbep]ASI_MEM, tmp3 ;\
cmp tag, tmp3 ;\
bne,pt %xcc, label##2 ;\
sethi %hi(TSBTAG_INVALID), tmp3 ;\
casa [tsbep]ASI_MEM, tmp1, tmp3 ;\
cmp tmp1, tmp3 ;\
bne,a,pn %icc, label##1 ;\
lda [tsbep]ASI_MEM, tmp1 ;\
label##2:
#else
#define TSB_INVALIDATE(tsbep, tag, tmp1, tmp2, tmp3, label) \
lda [tsbep]%asi, tmp1 ;\
sethi %hi(TSBTAG_LOCKED), tmp2 ;\
label##1: ;\
cmp tmp1, tmp2 ;\
be,a,pn %icc, label##1 ;\
lda [tsbep]%asi, tmp1 ;\
ldxa [tsbep]%asi, tmp3 ;\
cmp tag, tmp3 ;\
bne,pt %xcc, label##2 ;\
sethi %hi(TSBTAG_INVALID), tmp3 ;\
casa [tsbep]%asi, tmp1, tmp3 ;\
cmp tmp1, tmp3 ;\
bne,a,pn %icc, label##1 ;\
lda [tsbep]%asi, tmp1 ;\
label##2:
#endif
#if TSB_SOFTSZ_MASK < TSB_SZ_MASK
#error - TSB_SOFTSZ_MASK too small
#endif
#define RUNTIME_PATCH_SETX(dest, tmp) \
sethi %hh(RUNTIME_PATCH), tmp ;\
sethi %lm(RUNTIME_PATCH), dest ;\
or tmp, %hm(RUNTIME_PATCH), tmp ;\
or dest, %lo(RUNTIME_PATCH), dest ;\
sllx tmp, 32, tmp ;\
nop ;\
or tmp, dest, dest
.seg ".data"
.global sfmmu_panic1
sfmmu_panic1:
.asciz "sfmmu_asm: interrupts already disabled"
.global sfmmu_panic3
sfmmu_panic3:
.asciz "sfmmu_asm: sfmmu_vatopfn called for user"
.global sfmmu_panic4
sfmmu_panic4:
.asciz "sfmmu_asm: 4M tsb pointer mis-match"
.global sfmmu_panic5
sfmmu_panic5:
.asciz "sfmmu_asm: no unlocked TTEs in TLB 0"
.global sfmmu_panic6
sfmmu_panic6:
.asciz "sfmmu_asm: interrupts not disabled"
.global sfmmu_panic7
sfmmu_panic7:
.asciz "sfmmu_asm: kernel as"
.global sfmmu_panic8
sfmmu_panic8:
.asciz "sfmmu_asm: gnum is zero"
.global sfmmu_panic9
sfmmu_panic9:
.asciz "sfmmu_asm: cnum is greater than MAX_SFMMU_CTX_VAL"
.global sfmmu_panic10
sfmmu_panic10:
.asciz "sfmmu_asm: valid SCD with no 3rd scd TSB"
.global sfmmu_panic11
sfmmu_panic11:
.asciz "sfmmu_asm: ktsb_phys must not be 0 on a sun4v platform"
ENTRY(sfmmu_disable_intrs)
rdpr %pstate, %o0
#ifdef DEBUG
PANIC_IF_INTR_DISABLED_PSTR(%o0, sfmmu_di_l0, %g1)
#endif
retl
wrpr %o0, PSTATE_IE, %pstate
SET_SIZE(sfmmu_disable_intrs)
ENTRY(sfmmu_enable_intrs)
retl
wrpr %g0, %o0, %pstate
SET_SIZE(sfmmu_enable_intrs)
ENTRY_NP(sfmmu_alloc_ctx)
#ifdef DEBUG
sethi %hi(ksfmmup), %g1
ldx [%g1 + %lo(ksfmmup)], %g1
cmp %g1, %o0
bne,pt %xcc, 0f
nop
sethi %hi(panicstr), %g1 ! if kernel as, panic
ldx [%g1 + %lo(panicstr)], %g1
tst %g1
bnz,pn %icc, 7f
nop
sethi %hi(sfmmu_panic7), %o0
call panic
or %o0, %lo(sfmmu_panic7), %o0
7:
retl
mov %g0, %o0 ! %o0 = ret = 0
0:
PANIC_IF_INTR_ENABLED_PSTR(sfmmu_ei_l1, %g1)
#endif
mov %o3, %g1 ! save sfmmu pri/sh flag in %g1
! load global mmu_ctxp info
ldx [%o2 + CPU_MMU_CTXP], %o3 ! %o3 = mmu_ctx_t ptr
#ifdef sun4v
brz,a,pn %o3, 0f
nop
#endif
lduw [%o2 + CPU_MMU_IDX], %g2 ! %g2 = mmu index
! load global mmu_ctxp gnum
ldx [%o3 + MMU_CTX_GNUM], %o4 ! %o4 = mmu_ctxp->gnum
#ifdef DEBUG
cmp %o4, %g0 ! mmu_ctxp->gnum should never be 0
bne,pt %xcc, 3f
nop
sethi %hi(panicstr), %g1 ! test if panicstr is already set
ldx [%g1 + %lo(panicstr)], %g1
tst %g1
bnz,pn %icc, 1f
nop
sethi %hi(sfmmu_panic8), %o0
call panic
or %o0, %lo(sfmmu_panic8), %o0
1:
retl
mov %g0, %o0 ! %o0 = ret = 0
3:
#endif
! load HAT sfmmu_ctxs[mmuid] gnum, cnum
sllx %g2, SFMMU_MMU_CTX_SHIFT, %g2
add %o0, %g2, %g2 ! %g2 = &sfmmu_ctxs[mmuid] - SFMMU_CTXS
SFMMU_MMUID_GNUM_CNUM(%g2, %g5, %g6, %g4)
cmp %g6, INVALID_CONTEXT ! hat cnum == INVALID ??
bne,pt %icc, 1f ! valid hat cnum, check gnum
nop
! cnum == INVALID, check allocflag
mov %g0, %g4 ! %g4 = ret = 0
brz,pt %o1, 8f ! allocflag == 0, skip ctx allocation, bail
mov %g6, %o1
! (invalid HAT cnum) && (allocflag == 1)
ba,pt %icc, 2f
nop
#ifdef sun4v
0:
set INVALID_CONTEXT, %o1
membar #LoadStore|#StoreStore
ba,pt %icc, 8f
mov %g0, %g4 ! %g4 = ret = 0
#endif
1:
! valid HAT cnum, check gnum
cmp %g5, %o4
mov 1, %g4 !%g4 = ret = 1
be,a,pt %icc, 8f ! gnum unchanged, go to done
mov %g6, %o1
2:
ldstub [%o0 + SFMMU_CTX_LOCK], %g3 ! %g3 = per process (PP) lock
3:
brz %g3, 5f
nop
4:
brnz,a,pt %g3, 4b ! spin if lock is 1
ldub [%o0 + SFMMU_CTX_LOCK], %g3
ba %xcc, 3b ! retry the lock
ldstub [%o0 + SFMMU_CTX_LOCK], %g3 ! %g3 = PP lock
5:
membar #LoadLoad
SFMMU_MMUID_GNUM_CNUM(%g2, %g5, %g6, %g4)
cmp %g6, INVALID_CONTEXT ! hat cnum == INVALID ??
bne,pt %icc, 1f ! valid hat cnum, check gnum
nop
! cnum == INVALID, check allocflag
mov %g0, %g4 ! %g4 = ret = 0
brz,pt %o1, 2f ! allocflag == 0, called from resume, set hw
mov %g6, %o1
! (invalid HAT cnum) && (allocflag == 1)
ba,pt %icc, 6f
nop
1:
! valid HAT cnum, check gnum
cmp %g5, %o4
mov 1, %g4 ! %g4 = ret = 1
be,a,pt %icc, 2f ! gnum unchanged, go to done
mov %g6, %o1
ba,pt %icc, 6f
nop
2:
membar #LoadStore|#StoreStore
ba,pt %icc, 8f
clrb [%o0 + SFMMU_CTX_LOCK]
6:
add %o3, MMU_CTX_CNUM, %g3
ld [%o3 + MMU_CTX_NCTXS], %g4
ld [%g3], %o1
0:
cmp %o1, %g4
bl,a,pt %icc, 1f
add %o1, 1, %o5 ! %o5 = mmu_ctxp->cnum + 1
set INVALID_CONTEXT, %o1
mov %g0, %g4 ! %g4 = ret = 0
membar #LoadStore|#StoreStore
ba,pt %icc, 8f
clrb [%o0 + SFMMU_CTX_LOCK]
1:
! %g3 = addr of mmu_ctxp->cnum
! %o5 = mmu_ctxp->cnum + 1
cas [%g3], %o1, %o5
cmp %o1, %o5
bne,a,pn %xcc, 0b ! cas failed
ld [%g3], %o1
#ifdef DEBUG
set MAX_SFMMU_CTX_VAL, %o5
cmp %o1, %o5
ble,pt %icc, 2f
nop
sethi %hi(sfmmu_panic9), %o0
call panic
or %o0, %lo(sfmmu_panic9), %o0
2:
#endif
! update hat gnum and cnum
sllx %o4, SFMMU_MMU_GNUM_RSHIFT, %o4
or %o4, %o1, %o4
stx %o4, [%g2 + SFMMU_CTXS]
membar #LoadStore|#StoreStore
clrb [%o0 + SFMMU_CTX_LOCK]
mov 1, %g4 ! %g4 = ret = 1
8:
cmp %o1, INVALID_CONTEXT
be,a,pn %icc, 9f
clr %g1
9:
#ifdef sun4u
ldub [%o0 + SFMMU_CEXT], %o2
sll %o2, CTXREG_EXT_SHIFT, %o2
or %o1, %o2, %o1
#endif
SET_SECCTX(%o1, %g1, %o4, %o5, alloc_ctx_lbl1)
retl
mov %g4, %o0 ! %o0 = ret
SET_SIZE(sfmmu_alloc_ctx)
ENTRY_NP(sfmmu_modifytte)
ldx [%o2], %g3
ldx [%o0], %g1
2:
ldx [%o1], %g2
cmp %g2, %g3
be,a,pt %xcc,1f
stx %g3, [%o0]
casx [%o2], %g1, %g2
cmp %g1, %g2
be,pt %xcc, 1f
nop
ldx [%o2], %g3
stx %g3, [%o0]
ba,pt %xcc, 2b
mov %g3, %g1
1: retl
membar #StoreLoad
SET_SIZE(sfmmu_modifytte)
ENTRY_NP(sfmmu_modifytte_try)
ldx [%o1], %g2
ldx [%o2], %g3
ldx [%o0], %g1
cmp %g3, %g2
be,a,pn %xcc,1f
mov 0, %o1
casx [%o2], %g1, %g2
membar #StoreLoad
cmp %g1, %g2
movne %xcc, -1, %o1
move %xcc, 1, %o1
1:
stx %g2, [%o0]
retl
mov %o1, %o0
SET_SIZE(sfmmu_modifytte_try)
ENTRY_NP(sfmmu_copytte)
ldx [%o0], %g1
retl
stx %g1, [%o1]
SET_SIZE(sfmmu_copytte)
ENTRY_NP(sfmmu_get_tsbe)
GET_TSBE_POINTER(%o2, %o0, %o1, %o3, %o4)
retl
nop
SET_SIZE(sfmmu_get_tsbe)
ENTRY_NP(sfmmu_make_tsbtag)
retl
srln %o0, TTARGET_VA_SHIFT, %o0
SET_SIZE(sfmmu_make_tsbtag)
#define I_SIZE 4
ENTRY_NP(sfmmu_fix_ktlb_traptable)
ld [%o0], %o3
sub %o3, %o1, %o3
st %o3, [%o0]
flush %o0
add %o0, I_SIZE, %o0
ld [%o0], %o3
sub %o3, %o1, %o3
st %o3, [%o0]
retl
flush %o0
SET_SIZE(sfmmu_fix_ktlb_traptable)
ENTRY_NP(sfmmu_fixup_ktsbbase)
ld [%o0], %o3
srl %o5, 10, %o2 ! offset is bits 32:10
or %o3, %o2, %o3 ! set imm22
st %o3, [%o0]
add %o0, I_SIZE, %o0 ! next instr
ld [%o0], %o3
and %o5, 0x3ff, %o2 ! set imm13 to bits 9:0
or %o3, %o2, %o3
st %o3, [%o0]
retl
flush %o0
SET_SIZE(sfmmu_fixup_ktsbbase)
ENTRY_NP(sfmmu_fixup_setx)
ld [%o0], %o3
srlx %o4, 42, %o2 ! bits [63:42]
or %o3, %o2, %o3 ! set imm22
st %o3, [%o0]
add %o0, I_SIZE, %o0 ! next instr
ld [%o0], %o3
sllx %o4, 32, %o2 ! clear upper bits
srlx %o2, 42, %o2 ! bits [31:10]
or %o3, %o2, %o3 ! set imm22
st %o3, [%o0]
add %o0, I_SIZE, %o0 ! next instr
ld [%o0], %o3
srlx %o4, 32, %o2 ! bits [63:32]
and %o2, 0x3ff, %o2 ! bits [41:32]
or %o3, %o2, %o3 ! set imm
st %o3, [%o0]
add %o0, I_SIZE, %o0 ! next instr
ld [%o0], %o3
and %o4, 0x3ff, %o2 ! bits [9:0]
or %o3, %o2, %o3 ! set imm
st %o3, [%o0]
retl
flush %o0
SET_SIZE(sfmmu_fixup_setx)
ENTRY_NP(sfmmu_fixup_or)
ld [%o0], %o3
and %o4, 0x3ff, %o2 ! bits [9:0]
or %o3, %o2, %o3 ! set imm
st %o3, [%o0]
retl
flush %o0
SET_SIZE(sfmmu_fixup_or)
ENTRY_NP(sfmmu_fixup_shiftx)
ld [%o0], %o3 ! %o3 = instruction to patch
and %o3, 0x3f, %o2 ! %o2 = existing imm value
add %o2, %o4, %o2 ! %o2 = new imm value
andn %o3, 0x3f, %o3 ! clear old imm value
and %o2, 0x3f, %o2 ! truncate new imm value
or %o3, %o2, %o3 ! set new imm value
st %o3, [%o0] ! store updated instruction
retl
flush %o0
SET_SIZE(sfmmu_fixup_shiftx)
ENTRY_NP(sfmmu_fixup_mmu_asi)
1: ldsw [%o0], %o2 ! load instruction to %o2
brgez,pt %o2, 2f
srl %o2, 30, %o5
btst 1, %o5 ! test bit 30; skip if not set
bz,pt %icc, 2f
sllx %o2, 39, %o5 ! bit 24 -> bit 63
srlx %o5, 58, %o5 ! isolate op3 part of opcode
xor %o5, 0x13, %o5 ! 01 0011 binary == ldda
brnz,pt %o5, 2f ! skip if not a match
or %o2, %o1, %o2 ! or in imm_asi
st %o2, [%o0] ! write patched instruction
2: dec %o3
brnz,a,pt %o3, 1b ! loop until we're done
add %o0, I_SIZE, %o0
retl
flush %o0
SET_SIZE(sfmmu_fixup_mmu_asi)
ENTRY_NP(sfmmu_patch_mmu_asi)
mov %o7, %o4 ! save return pc in %o4
mov ASI_QUAD_LDD_PHYS, %o3 ! set QUAD_LDD_PHYS by default
#ifdef sun4v
brnz,pt %o0, do_patch
nop
sethi %hi(sfmmu_panic11), %o0
call panic
or %o0, %lo(sfmmu_panic11), %o0
do_patch:
#else
movrz %o0, ASI_NQUAD_LD, %o3
#endif
sll %o3, 5, %o1 ! imm_asi offset
mov 6, %o3 ! number of instructions
sethi %hi(dktsb), %o0 ! to search
call sfmmu_fixup_mmu_asi ! patch kdtlb miss
or %o0, %lo(dktsb), %o0
mov 6, %o3 ! number of instructions
sethi %hi(dktsb4m), %o0 ! to search
call sfmmu_fixup_mmu_asi ! patch kdtlb4m miss
or %o0, %lo(dktsb4m), %o0
mov 6, %o3 ! number of instructions
sethi %hi(iktsb), %o0 ! to search
call sfmmu_fixup_mmu_asi ! patch kitlb miss
or %o0, %lo(iktsb), %o0
mov 6, %o3 ! number of instructions
sethi %hi(iktsb4m), %o0 ! to search
call sfmmu_fixup_mmu_asi ! patch kitlb4m miss
or %o0, %lo(iktsb4m), %o0
mov %o4, %o7 ! retore return pc -- leaf
retl
nop
SET_SIZE(sfmmu_patch_mmu_asi)
ENTRY_NP(sfmmu_patch_ktsb)
save %sp, -SA(MINFRAME), %sp
set ktsb_phys, %o1
ld [%o1], %o4
set ktsb_base, %o5
set ktsb4m_base, %l1
brz,pt %o4, 1f
nop
set ktsb_pbase, %o5
set ktsb4m_pbase, %l1
1:
sethi %hi(ktsb_szcode), %o1
ld [%o1 + %lo(ktsb_szcode)], %o1
sethi %hi(iktsb), %o0
call sfmmu_fix_ktlb_traptable
or %o0, %lo(iktsb), %o0
sethi %hi(dktsb), %o0
call sfmmu_fix_ktlb_traptable
or %o0, %lo(dktsb), %o0
sethi %hi(ktsb4m_szcode), %o1
ld [%o1 + %lo(ktsb4m_szcode)], %o1
sethi %hi(iktsb4m), %o0
call sfmmu_fix_ktlb_traptable
or %o0, %lo(iktsb4m), %o0
sethi %hi(dktsb4m), %o0
call sfmmu_fix_ktlb_traptable
or %o0, %lo(dktsb4m), %o0
#ifndef sun4v
mov ASI_N, %o2
movrnz %o4, ASI_MEM, %o2 ! setup kernel 32bit ASI to patch
mov %o2, %o4 ! sfmmu_fixup_or needs this in %o4
sethi %hi(tsb_kernel_patch_asi), %o0
call sfmmu_fixup_or
or %o0, %lo(tsb_kernel_patch_asi), %o0
#endif
ldx [%o5], %o4 ! load ktsb base addr (VA or PA)
sethi %hi(dktsbbase), %o0
call sfmmu_fixup_setx ! patch value of ktsb base addr
or %o0, %lo(dktsbbase), %o0
sethi %hi(iktsbbase), %o0
call sfmmu_fixup_setx ! patch value of ktsb base addr
or %o0, %lo(iktsbbase), %o0
sethi %hi(sfmmu_kprot_patch_ktsb_base), %o0
call sfmmu_fixup_setx ! patch value of ktsb base addr
or %o0, %lo(sfmmu_kprot_patch_ktsb_base), %o0
#ifdef sun4v
sethi %hi(sfmmu_dslow_patch_ktsb_base), %o0
call sfmmu_fixup_setx ! patch value of ktsb base addr
or %o0, %lo(sfmmu_dslow_patch_ktsb_base), %o0
#endif
ldx [%l1], %o4 ! load ktsb4m base addr (VA or PA)
sethi %hi(dktsb4mbase), %o0
call sfmmu_fixup_setx ! patch value of ktsb4m base addr
or %o0, %lo(dktsb4mbase), %o0
sethi %hi(iktsb4mbase), %o0
call sfmmu_fixup_setx ! patch value of ktsb4m base addr
or %o0, %lo(iktsb4mbase), %o0
sethi %hi(sfmmu_kprot_patch_ktsb4m_base), %o0
call sfmmu_fixup_setx ! patch value of ktsb4m base addr
or %o0, %lo(sfmmu_kprot_patch_ktsb4m_base), %o0
#ifdef sun4v
sethi %hi(sfmmu_dslow_patch_ktsb4m_base), %o0
call sfmmu_fixup_setx ! patch value of ktsb4m base addr
or %o0, %lo(sfmmu_dslow_patch_ktsb4m_base), %o0
#endif
set ktsb_szcode, %o4
ld [%o4], %o4
sethi %hi(sfmmu_kprot_patch_ktsb_szcode), %o0
call sfmmu_fixup_or ! patch value of ktsb_szcode
or %o0, %lo(sfmmu_kprot_patch_ktsb_szcode), %o0
#ifdef sun4v
sethi %hi(sfmmu_dslow_patch_ktsb_szcode), %o0
call sfmmu_fixup_or ! patch value of ktsb_szcode
or %o0, %lo(sfmmu_dslow_patch_ktsb_szcode), %o0
#endif
set ktsb4m_szcode, %o4
ld [%o4], %o4
sethi %hi(sfmmu_kprot_patch_ktsb4m_szcode), %o0
call sfmmu_fixup_or ! patch value of ktsb4m_szcode
or %o0, %lo(sfmmu_kprot_patch_ktsb4m_szcode), %o0
#ifdef sun4v
sethi %hi(sfmmu_dslow_patch_ktsb4m_szcode), %o0
call sfmmu_fixup_or ! patch value of ktsb4m_szcode
or %o0, %lo(sfmmu_dslow_patch_ktsb4m_szcode), %o0
#endif
ret
restore
SET_SIZE(sfmmu_patch_ktsb)
ENTRY_NP(sfmmu_kpm_patch_tlbm)
retl
nop
SET_SIZE(sfmmu_kpm_patch_tlbm)
ENTRY_NP(sfmmu_kpm_patch_tsbm)
set dktsb4m_kpmcheck_small, %o0
MAKE_NOP_INSTR(%o1)
st %o1, [%o0]
flush %o0
retl
nop
SET_SIZE(sfmmu_kpm_patch_tsbm)
ENTRY_NP(sfmmu_patch_utsb)
#ifdef UTSB_PHYS
retl
nop
#else
save %sp, -SA(MINFRAME), %sp
set utsb_vabase, %o1
ldx [%o1], %o4
sethi %hi(sfmmu_uprot_get_1st_tsbe_ptr), %o0
call sfmmu_fixup_setx
or %o0, %lo(sfmmu_uprot_get_1st_tsbe_ptr), %o0
sethi %hi(sfmmu_uitlb_get_1st_tsbe_ptr), %o0
call sfmmu_fixup_setx
or %o0, %lo(sfmmu_uitlb_get_1st_tsbe_ptr), %o0
sethi %hi(sfmmu_udtlb_get_1st_tsbe_ptr), %o0
call sfmmu_fixup_setx
or %o0, %lo(sfmmu_udtlb_get_1st_tsbe_ptr), %o0
set utsb4m_vabase, %o1
ldx [%o1], %o4
sethi %hi(sfmmu_uprot_get_2nd_tsb_base), %o0
call sfmmu_fixup_setx
or %o0, %lo(sfmmu_uprot_get_2nd_tsb_base), %o0
sethi %hi(sfmmu_uitlb_get_2nd_tsb_base), %o0
call sfmmu_fixup_setx
or %o0, %lo(sfmmu_uitlb_get_2nd_tsb_base), %o0
sethi %hi(sfmmu_udtlb_get_2nd_tsb_base), %o0
call sfmmu_fixup_setx
or %o0, %lo(sfmmu_udtlb_get_2nd_tsb_base), %o0
set tsb_slab_shift, %o1
set MMU_PAGESHIFT4M, %o4
lduw [%o1], %o3
subcc %o4, %o3, %o4
bz,pt %icc, 1f
sethi %hi(sfmmu_tsb_1st_resv_offset), %o0
call sfmmu_fixup_shiftx
or %o0, %lo(sfmmu_tsb_1st_resv_offset), %o0
call sfmmu_fixup_shiftx
add %o0, I_SIZE, %o0
sethi %hi(sfmmu_tsb_2nd_resv_offset), %o0
call sfmmu_fixup_shiftx
or %o0, %lo(sfmmu_tsb_2nd_resv_offset), %o0
call sfmmu_fixup_shiftx
add %o0, I_SIZE, %o0
1:
set tsb_slab_mask, %o1
ldx [%o1], %o4
sethi %hi(sfmmu_tsb_1st_tsbreg_vamask), %o0
call sfmmu_fixup_or
or %o0, %lo(sfmmu_tsb_1st_tsbreg_vamask), %o0
sethi %hi(sfmmu_tsb_2nd_tsbreg_vamask), %o0
call sfmmu_fixup_or
or %o0, %lo(sfmmu_tsb_2nd_tsbreg_vamask), %o0
ret
restore
#endif
SET_SIZE(sfmmu_patch_utsb)
ENTRY_NP(sfmmu_patch_shctx)
#ifdef sun4u
retl
nop
#else
set sfmmu_shctx_cpu_mondo_patch, %o0
MAKE_JMP_INSTR(5, %o1, %o2) ! jmp %g5
st %o1, [%o0]
flush %o0
MAKE_NOP_INSTR(%o1)
add %o0, I_SIZE, %o0 ! next instr
st %o1, [%o0]
flush %o0
set sfmmu_shctx_user_rtt_patch, %o0
st %o1, [%o0] ! nop 1st instruction
flush %o0
add %o0, I_SIZE, %o0
st %o1, [%o0] ! nop 2nd instruction
flush %o0
add %o0, I_SIZE, %o0
st %o1, [%o0] ! nop 3rd instruction
flush %o0
add %o0, I_SIZE, %o0
st %o1, [%o0] ! nop 4th instruction
flush %o0
add %o0, I_SIZE, %o0
st %o1, [%o0] ! nop 5th instruction
flush %o0
add %o0, I_SIZE, %o0
st %o1, [%o0] ! nop 6th instruction
retl
flush %o0
#endif
SET_SIZE(sfmmu_patch_shctx)
ENTRY_NP(sfmmu_load_tsbe)
rdpr %pstate, %o5
#ifdef DEBUG
PANIC_IF_INTR_DISABLED_PSTR(%o5, sfmmu_di_l2, %g1)
#endif
wrpr %o5, PSTATE_IE, %pstate
SETUP_TSB_ASI(%o3, %g3)
TSB_UPDATE(%o0, %o2, %o1, %g1, %g2, locked_tsb_l8)
wrpr %g0, %o5, %pstate
retl
membar #StoreStore|#StoreLoad
SET_SIZE(sfmmu_load_tsbe)
ENTRY(sfmmu_unload_tsbe)
SETUP_TSB_ASI(%o2, %g1)
TSB_INVALIDATE(%o0, %o1, %g1, %o2, %o3, unload_tsbe)
retl
membar #StoreStore|#StoreLoad
SET_SIZE(sfmmu_unload_tsbe)
ENTRY_NP(sfmmu_kpm_load_tsb)
rdpr %pstate, %o5 ! %o5 = saved pstate
#ifdef DEBUG
PANIC_IF_INTR_DISABLED_PSTR(%o5, sfmmu_di_l3, %g1)
#endif
wrpr %o5, PSTATE_IE, %pstate ! disable interrupts
#ifndef sun4v
sethi %hi(ktsb_phys), %o4
mov ASI_N, %o3
ld [%o4 + %lo(ktsb_phys)], %o4
movrnz %o4, ASI_MEM, %o3
mov %o3, %asi
#endif
mov %o0, %g1 ! %g1 = vaddr
GET_KPM_TSBE_POINTER(%o2, %g2, %g1, %o3, %o4)
srlx %o0, TTARGET_VA_SHIFT, %g1; ! %g1 = tag target
TSB_UPDATE(%g2, %o1, %g1, %o3, %o4, locked_tsb_l9)
wrpr %g0, %o5, %pstate ! enable interrupts
retl
membar #StoreStore|#StoreLoad
SET_SIZE(sfmmu_kpm_load_tsb)
ENTRY_NP(sfmmu_kpm_unload_tsb)
#ifndef sun4v
sethi %hi(ktsb_phys), %o4
mov ASI_N, %o3
ld [%o4 + %lo(ktsb_phys)], %o4
movrnz %o4, ASI_MEM, %o3
mov %o3, %asi
#endif
mov %o0, %g1 ! %g1 = vaddr
GET_KPM_TSBE_POINTER(%o1, %g2, %g1, %o3, %o4)
srlx %o0, TTARGET_VA_SHIFT, %g1; ! %g1 = tag target
TSB_INVALIDATE(%g2, %g1, %o3, %o4, %o1, kpm_tsbinval)
retl
membar #StoreStore|#StoreLoad
SET_SIZE(sfmmu_kpm_unload_tsb)
ENTRY_NP(sfmmu_ttetopfn)
ldx [%o0], %g1
TTETOPFN(%g1, %o1, sfmmu_ttetopfn_l1, %g2, %g3, %g4)
retl
mov %g1, %o0
SET_SIZE(sfmmu_ttetopfn)
#if defined(DEBUG) || defined(SFMMU_STAT_GATHER)
#define HAT_HSEARCH_DBSTAT(hatid, tsbarea, tmp1, tmp2) \
ldn [tsbarea + TSBMISS_KHATID], tmp1 ;\
mov HATSTAT_KHASH_SEARCH, tmp2 ;\
cmp tmp1, hatid ;\
movne %ncc, HATSTAT_UHASH_SEARCH, tmp2 ;\
set sfmmu_global_stat, tmp1 ;\
add tmp1, tmp2, tmp1 ;\
ld [tmp1], tmp2 ;\
inc tmp2 ;\
st tmp2, [tmp1]
#define HAT_HLINK_DBSTAT(hatid, tsbarea, tmp1, tmp2) \
ldn [tsbarea + TSBMISS_KHATID], tmp1 ;\
mov HATSTAT_KHASH_LINKS, tmp2 ;\
cmp tmp1, hatid ;\
movne %ncc, HATSTAT_UHASH_LINKS, tmp2 ;\
set sfmmu_global_stat, tmp1 ;\
add tmp1, tmp2, tmp1 ;\
ld [tmp1], tmp2 ;\
inc tmp2 ;\
st tmp2, [tmp1]
#else
#define HAT_HSEARCH_DBSTAT(hatid, tsbarea, tmp1, tmp2)
#define HAT_HLINK_DBSTAT(hatid, tsbarea, tmp1, tmp2)
#endif
#define HAT_GLOBAL_STAT(statname, tmp1, tmp2) \
sethi %hi(sfmmu_global_stat), tmp1 ;\
add tmp1, statname, tmp1 ;\
ld [tmp1 + %lo(sfmmu_global_stat)], tmp2 ;\
inc tmp2 ;\
st tmp2, [tmp1 + %lo(sfmmu_global_stat)]
#define HAT_PERCPU_STAT32(tsbarea, stat, tmp1) \
ld [tsbarea + stat], tmp1 ;\
inc tmp1 ;\
st tmp1, [tsbarea + stat]
#define HAT_PERCPU_STAT16(tsbarea, stat, tmp1) \
lduh [tsbarea + stat], tmp1 ;\
inc tmp1 ;\
stuh tmp1, [tsbarea + stat]
#if defined(KPM_TLBMISS_STATS_GATHER)
#define KPM_TLBMISS_STAT_INCR(tagacc, val, tsbma, tmp1, label) \
brgez tagacc, label ;\
nop ;\
CPU_INDEX(tmp1, tsbma) ;\
sethi %hi(kpmtsbm_area), tsbma ;\
sllx tmp1, KPMTSBM_SHIFT, tmp1 ;\
or tsbma, %lo(kpmtsbm_area), tsbma ;\
add tsbma, tmp1, tsbma ;\
;\
ldx [tsbma + KPMTSBM_VBASE], val ;\
cmp tagacc, val ;\
blu,pn %xcc, label ;\
ldx [tsbma + KPMTSBM_VEND], tmp1 ;\
cmp tagacc, tmp1 ;\
bgeu,pn %xcc, label ;\
lduw [tsbma + KPMTSBM_DTLBMISS], val ;\
inc val ;\
st val, [tsbma + KPMTSBM_DTLBMISS] ;\
label:
#else
#define KPM_TLBMISS_STAT_INCR(tagacc, val, tsbma, tmp1, label)
#endif
#ifdef PTL1_PANIC_DEBUG
.seg ".data"
.global test_ptl1_panic
test_ptl1_panic:
.word 0
.align 8
.seg ".text"
.align 4
#endif
ENTRY_NP(sfmmu_pagefault)
SET_GL_REG(1)
USE_ALTERNATE_GLOBALS(%g5)
GET_MMU_BOTH_TAGACC(%g5 , %g2 , %g6, %g4)
rdpr %tt, %g6
cmp %g6, FAST_IMMU_MISS_TT
be,a,pn %icc, 1f
mov T_INSTR_MMU_MISS, %g3
cmp %g6, T_INSTR_MMU_MISS
be,a,pn %icc, 1f
mov T_INSTR_MMU_MISS, %g3
mov %g5, %g2
mov T_DATA_PROT, %g3
cmp %g6, FAST_DMMU_MISS_TT
move %icc, T_DATA_MMU_MISS, %g3
cmp %g6, T_DATA_MMU_MISS
move %icc, T_DATA_MMU_MISS, %g3
#ifdef PTL1_PANIC_DEBUG
sethi %hi(test_ptl1_panic), %g4
ld [%g4 + %lo(test_ptl1_panic)], %g1
st %g0, [%g4 + %lo(test_ptl1_panic)]
cmp %g1, %g0
bne,a,pn %icc, ptl1_panic
or %g0, PTL1_BAD_DEBUG, %g1
#endif
1:
HAT_GLOBAL_STAT(HATSTAT_PAGEFAULT, %g6, %g4)
sethi %hi(trap), %g1
or %g1, %lo(trap), %g1
2:
ba,pt %xcc, sys_trap
mov -1, %g4
SET_SIZE(sfmmu_pagefault)
ENTRY_NP(sfmmu_mmu_trap)
SET_GL_REG(1)
USE_ALTERNATE_GLOBALS(%g5)
GET_MMU_BOTH_TAGACC(%g5 , %g2 , %g4, %g6)
rdpr %tt, %g6
cmp %g6, FAST_IMMU_MISS_TT
be,a,pn %icc, 1f
mov T_INSTR_MMU_MISS, %g3
cmp %g6, T_INSTR_MMU_MISS
be,a,pn %icc, 1f
mov T_INSTR_MMU_MISS, %g3
mov %g5, %g2
mov T_DATA_PROT, %g3
cmp %g6, FAST_DMMU_MISS_TT
move %icc, T_DATA_MMU_MISS, %g3
cmp %g6, T_DATA_MMU_MISS
move %icc, T_DATA_MMU_MISS, %g3
1:
sethi %hi(sfmmu_tsbmiss_exception), %g1
or %g1, %lo(sfmmu_tsbmiss_exception), %g1
ba,pt %xcc, sys_trap
mov -1, %g4
SET_SIZE(sfmmu_mmu_trap)
ENTRY_NP(sfmmu_suspend_tl)
SET_GL_REG(1)
USE_ALTERNATE_GLOBALS(%g5)
GET_MMU_BOTH_TAGACC(%g5 , %g2 , %g4, %g3)
rdpr %tt, %g6
cmp %g6, FAST_IMMU_MISS_TT
be,a,pn %icc, 1f
mov T_INSTR_MMU_MISS, %g3
mov %g5, %g2
cmp %g6, FAST_DMMU_MISS_TT
move %icc, T_DATA_MMU_MISS, %g3
movne %icc, T_DATA_PROT, %g3
1:
sethi %hi(sfmmu_tsbmiss_suspended), %g1
or %g1, %lo(sfmmu_tsbmiss_suspended), %g1
ba,pt %xcc, sys_trap
mov PIL_15, %g4
SET_SIZE(sfmmu_suspend_tl)
ENTRY_NP(sfmmu_window_trap)
rdpr %tpc, %g1
#ifdef sun4v
#ifdef DEBUG
rdpr %tstate, %g4
srlx %g4, TSTATE_GL_SHIFT, %g4
and %g4, TSTATE_GL_MASK, %g4
cmp %g4, 1
bne,a,pn %icc, ptl1_panic
mov PTL1_BAD_WTRAP, %g1
#endif
set rtt_fill_start, %g4
cmp %g1, %g4
blu,pn %xcc, 6f
.empty
set rtt_fill_end, %g4
cmp %g1, %g4
bgeu,pn %xcc, 6f
nop
set fault_rtt_fn1, %g1
wrpr %g0, %g1, %tnpc
ba,a 7f
6:
! must save this trap level before descending trap stack
! no need to save %tnpc, either overwritten or discarded
! already got it: rdpr %tpc, %g1
rdpr %tstate, %g6
rdpr %tt, %g7
! trap level saved, go get underlying trap type
rdpr %tl, %g5
sub %g5, 1, %g3
wrpr %g3, %tl
rdpr %tt, %g2
wrpr %g5, %tl
! restore saved trap level
wrpr %g1, %tpc
wrpr %g6, %tstate
wrpr %g7, %tt
#else
rdpr %tl, %g5
sub %g5, 1, %g3
wrpr %g3, %tl
rdpr %tt, %g2
wrpr %g5, %tl
#endif
and %g2, WTRAP_TTMASK, %g4
cmp %g4, WTRAP_TYPE
bne,pn %xcc, 1f
nop
set trap_table, %g4
cmp %g1, %g4
blt,pn %xcc, 1f
.empty
set etrap_table, %g4
cmp %g1, %g4
bge,pn %xcc, 1f
.empty
andn %g1, WTRAP_ALIGN, %g1
add %g1, WTRAP_FAULTOFF, %g1
wrpr %g0, %g1, %tnpc
7:
SET_GL_REG(1)
USE_ALTERNATE_GLOBALS(%g5)
GET_MMU_D_TAGACC(%g6 , %g5 )
rdpr %tt, %g7
cmp %g7, FAST_IMMU_MISS_TT
be,a,pn %icc, ptl1_panic
mov PTL1_BAD_WTRAP, %g1
cmp %g7, T_INSTR_MMU_MISS
be,a,pn %icc, ptl1_panic
mov PTL1_BAD_WTRAP, %g1
mov T_DATA_PROT, %g5
cmp %g7, FAST_DMMU_MISS_TT
move %icc, T_DATA_MMU_MISS, %g5
cmp %g7, T_DATA_MMU_MISS
move %icc, T_DATA_MMU_MISS, %g5
! XXXQ AGS re-check out this one
done
1:
CPU_PADDR(%g1, %g4)
add %g1, CPU_TL1_HDLR, %g1
lda [%g1]ASI_MEM, %g4
brnz,a,pt %g4, sfmmu_mmu_trap
sta %g0, [%g1]ASI_MEM
ba,pt %icc, ptl1_panic
mov PTL1_BAD_TRAP, %g1
SET_SIZE(sfmmu_window_trap)
ENTRY_NP(sfmmu_kpm_exception)
CPU_INDEX(%g1, %g2)
set cpu_core, %g2
sllx %g1, CPU_CORE_SHIFT, %g1
add %g1, %g2, %g1
lduh [%g1 + CPUC_DTRACE_FLAGS], %g2
andcc %g2, CPU_DTRACE_NOFAULT, %g0
bz 0f
or %g2, CPU_DTRACE_BADADDR, %g2
stuh %g2, [%g1 + CPUC_DTRACE_FLAGS]
GET_MMU_D_ADDR(%g3, %g4)
stx %g3, [%g1 + CPUC_DTRACE_ILLVAL]
done
0:
TSTAT_CHECK_TL1(1f, %g1, %g2)
1:
SET_GL_REG(1)
USE_ALTERNATE_GLOBALS(%g5)
GET_MMU_D_TAGACC(%g2 , %g4 )
mov T_DATA_MMU_MISS, %g3
sethi %hi(trap), %g1
or %g1, %lo(trap), %g1
ba,pt %xcc, sys_trap
mov -1, %g4
SET_SIZE(sfmmu_kpm_exception)
#if (IMAP_SEG != 0)
#error - ism_map->ism_seg offset is not zero
#endif
#define ISM_CHECK(tagacc, tsbmiss, ismseg, ismhat, tmp1, tmp2, tmp3 \
label, ismhit) \
ldx [tsbmiss + TSBMISS_ISMBLKPA], tmp1 ;\
brlz,pt tmp1, label##3 ;\
add tmp1, IBLK_MAPS, ismhat ;\
label##1: ;\
ldxa [ismhat]ASI_MEM, ismseg ;\
mov tmp1, tmp3 ;\
label##2: ;\
brz,pt ismseg, label##3 ;\
add ismhat, IMAP_VB_SHIFT, tmp1 ;\
lduba [tmp1]ASI_MEM, tmp1 ;\
srlx ismseg, tmp1, tmp2 ;\
srlx tagacc, tmp1, tmp1 ;\
sub tmp1, tmp2, tmp2 ;\
add ismhat, IMAP_SZ_MASK, tmp1 ;\
lda [tmp1]ASI_MEM, tmp1 ;\
and ismseg, tmp1, tmp1 ;\
cmp tmp2, tmp1 ;\
blu,a,pt %xcc, ismhit ;\
add ismhat, IMAP_ISMHAT, ismhat ;\
;\
add ismhat, ISM_MAP_SZ, ismhat ;\
add tmp3, (IBLK_MAPS + ISM_MAP_SLOTS * ISM_MAP_SZ), tmp1 ;\
cmp ismhat, tmp1 ;\
bl,pt %xcc, label##2 ;\
ldxa [ismhat]ASI_MEM, ismseg ;\
;\
add tmp3, IBLK_NEXTPA, tmp1 ;\
ldxa [tmp1]ASI_MEM, tmp1 ;\
brgez,pt tmp1, label##1 ;\
add tmp1, IBLK_MAPS, ismhat ;\
label##3:
#define HMEHASH_FUNC_ASM(tagacc, hatid, tsbarea, hmeshift, hmebp, \
vapg, label, tmp1, tmp2) \
sllx tagacc, TAGACC_CTX_LSHIFT, tmp1 ;\
brnz,a,pt tmp1, label##1 ;\
ld [tsbarea + TSBMISS_UHASHSZ], hmebp ;\
ld [tsbarea + TSBMISS_KHASHSZ], hmebp ;\
ba,pt %xcc, label##2 ;\
ldx [tsbarea + TSBMISS_KHASHSTART], tmp1 ;\
label##1: ;\
ldx [tsbarea + TSBMISS_UHASHSTART], tmp1 ;\
label##2: ;\
srlx tagacc, hmeshift, vapg ;\
xor vapg, hatid, tmp2 ;\
and tmp2, hmebp, hmebp ;\
mulx hmebp, HMEBUCK_SIZE, hmebp ;\
add hmebp, tmp1, hmebp
#define MAKE_HASHTAG(vapg, hatid, hmeshift, hashno, hblktag) \
sllx vapg, hmeshift, vapg ;\
mov hashno, hblktag ;\
sllx hblktag, HTAG_REHASH_SHIFT, hblktag ;\
or vapg, hblktag, hblktag
#define HMEHASH_SEARCH(hmebp, hmeblktag, hatid, hmeblkpa, tsbarea, \
tmp1, label) \
add hmebp, HMEBUCK_NEXTPA, hmeblkpa ;\
ldxa [hmeblkpa]ASI_MEM, hmeblkpa ;\
HAT_HSEARCH_DBSTAT(hatid, tsbarea, hmebp, tmp1) ;\
label##1: ;\
cmp hmeblkpa, HMEBLK_ENDPA ;\
be,pn %xcc, label##2 ;\
HAT_HLINK_DBSTAT(hatid, tsbarea, hmebp, tmp1) ;\
add hmeblkpa, HMEBLK_TAG, hmebp ;\
ldxa [hmebp]ASI_MEM, tmp1 ;\
add hmebp, CLONGSIZE, hmebp ;\
ldxa [hmebp]ASI_MEM, hmebp ;\
xor tmp1, hmeblktag, tmp1 ;\
xor hmebp, hatid, hmebp ;\
or hmebp, tmp1, hmebp ;\
brz,pn hmebp, label##2 ;\
add hmeblkpa, HMEBLK_NEXTPA, hmebp ;\
ba,pt %xcc, label##1 ;\
ldxa [hmebp]ASI_MEM, hmeblkpa ;\
label##2:
#define HMEHASH_SEARCH_SHME(hmeblktag, hatid, hmeblkpa, tsbarea, \
tmp1, tmp2, label) \
label##1: ;\
cmp hmeblkpa, HMEBLK_ENDPA ;\
be,pn %xcc, label##4 ;\
HAT_HLINK_DBSTAT(hatid, tsbarea, tmp1, tmp2) ;\
add hmeblkpa, HMEBLK_TAG, tmp2 ;\
ldxa [tmp2]ASI_MEM, tmp1 ;\
add tmp2, CLONGSIZE, tmp2 ;\
ldxa [tmp2]ASI_MEM, tmp2 ;\
xor tmp1, hmeblktag, tmp1 ;\
xor tmp2, hatid, tmp2 ;\
brz,pn tmp2, label##3 ;\
add hmeblkpa, HMEBLK_NEXTPA, tmp2 ;\
label##2: ;\
ba,pt %xcc, label##1 ;\
ldxa [tmp2]ASI_MEM, hmeblkpa ;\
label##3: ;\
cmp tmp1, SFMMU_MAX_HME_REGIONS ;\
bgeu,pt %xcc, label##2 ;\
add hmeblkpa, HMEBLK_NEXTPA, tmp2 ;\
and tmp1, BT_ULMASK, tmp2 ;\
srlx tmp1, BT_ULSHIFT, tmp1 ;\
sllx tmp1, CLONGSHIFT, tmp1 ;\
add tsbarea, tmp1, tmp1 ;\
ldx [tmp1 + TSBMISS_SHMERMAP], tmp1 ;\
srlx tmp1, tmp2, tmp1 ;\
btst 0x1, tmp1 ;\
bz,pn %xcc, label##2 ;\
add hmeblkpa, HMEBLK_NEXTPA, tmp2 ;\
label##4:
#if ((1 << SFHME_SHIFT) != SFHME_SIZE)
#error HMEBLK_TO_HMENT assumes sf_hment is power of 2 in size
#endif
#define HMEBLK_TO_HMENT(vaddr, hmeblkpa, hmentoff, hmemisc, tmp1, label1)\
add hmeblkpa, HMEBLK_MISC, hmentoff ;\
lda [hmentoff]ASI_MEM, hmemisc ;\
andcc hmemisc, HBLK_SZMASK, %g0 ;\
bnz,a,pn %icc, label1 ;\
or %g0, HMEBLK_HME1, hmentoff ;\
srl vaddr, MMU_PAGESHIFT, tmp1 ;\
and tmp1, NHMENTS - 1, tmp1 ;\
sllx tmp1, SFHME_SHIFT, tmp1 ;\
add tmp1, HMEBLK_HME1, hmentoff ;\
label1:
#define GET_TTE(tagacc, hatid, tte, hmeblkpa, tsbarea, hmemisc, hmeshift, \
hashno, tmp, label, foundlabel, suspendlabel, exitlabel) \
;\
stn tagacc, [tsbarea + (TSBMISS_SCRATCH + TSB_TAGACC)] ;\
stn hatid, [tsbarea + (TSBMISS_SCRATCH + TSBMISS_HATID)] ;\
HMEHASH_FUNC_ASM(tagacc, hatid, tsbarea, hmeshift, tte, \
hmeblkpa, label##5, hmemisc, tmp) ;\
;\
;\
MAKE_HASHTAG(hmeblkpa, hatid, hmeshift, hashno, hmemisc) ;\
or hmemisc, SFMMU_INVALID_SHMERID, hmemisc ;\
;\
;\
stn tte, [tsbarea + (TSBMISS_SCRATCH + TSBMISS_HMEBP)] ;\
HMEHASH_SEARCH(tte, hmemisc, hatid, hmeblkpa, \
tsbarea, tagacc, label##1) ;\
;\
cmp hmeblkpa, HMEBLK_ENDPA ;\
bne,pn %xcc, label##4 ;\
ldn [tsbarea + (TSBMISS_SCRATCH + TSB_TAGACC)], tagacc ;\
ba,pt %xcc, exitlabel ;\
nop ;\
label##4: ;\
;\
HMEBLK_TO_HMENT(tagacc, hmeblkpa, hatid, hmemisc, tte, \
label##2) ;\
;\
;\
;\
add hatid, SFHME_TTE, hatid ;\
add hmeblkpa, hatid, hmeblkpa ;\
ldxa [hmeblkpa]ASI_MEM, tte ;\
ldn [tsbarea + (TSBMISS_SCRATCH + TSBMISS_HMEBP)], hatid ;\
set TTE_SUSPEND, hatid ;\
TTE_SUSPEND_INT_SHIFT(hatid) ;\
btst tte, hatid ;\
bz,pt %xcc, foundlabel ;\
ldn [tsbarea + (TSBMISS_SCRATCH + TSBMISS_HATID)], hatid ;\
;\
;\
ba,pt %xcc, suspendlabel ;\
nop
#define GET_SHME_TTE(tagacc, hatid, tte, hmeblkpa, tsbarea, hmemisc, \
hmeshift, hashno, tmp, label, foundlabel, \
suspendlabel, exitlabel) \
;\
stn tagacc, [tsbarea + (TSBMISS_SCRATCH + TSB_TAGACC)] ;\
stn hatid, [tsbarea + (TSBMISS_SCRATCH + TSBMISS_HATID)] ;\
HMEHASH_FUNC_ASM(tagacc, hatid, tsbarea, hmeshift, tte, \
hmeblkpa, label##5, hmemisc, tmp) ;\
;\
;\
MAKE_HASHTAG(hmeblkpa, hatid, hmeshift, hashno, hmemisc) ;\
;\
;\
stn tte, [tsbarea + (TSBMISS_SCRATCH + TSBMISS_HMEBP)] ;\
;\
add tte, HMEBUCK_NEXTPA, hmeblkpa ;\
ldxa [hmeblkpa]ASI_MEM, hmeblkpa ;\
HAT_HSEARCH_DBSTAT(hatid, tsbarea, tagacc, tte) ;\
;\
label##8: ;\
HMEHASH_SEARCH_SHME(hmemisc, hatid, hmeblkpa, \
tsbarea, tagacc, tte, label##1) ;\
;\
cmp hmeblkpa, HMEBLK_ENDPA ;\
bne,pn %xcc, label##4 ;\
ldn [tsbarea + (TSBMISS_SCRATCH + TSB_TAGACC)], tagacc ;\
ba,pt %xcc, exitlabel ;\
nop ;\
label##4: ;\
;\
HMEBLK_TO_HMENT(tagacc, hmeblkpa, hatid, hmemisc, tte, \
label##2) ;\
;\
;\
;\
add hatid, SFHME_TTE, hatid ;\
add hmeblkpa, hatid, hmeblkpa ;\
ldxa [hmeblkpa]ASI_MEM, tte ;\
brlz,pt tte, label##6 ;\
nop ;\
btst HBLK_SZMASK, hmemisc ;\
bnz,a,pt %icc, label##7 ;\
ldn [tsbarea + (TSBMISS_SCRATCH + TSBMISS_HMEBP)], hatid ;\
;\
;\
sub hmeblkpa, hatid, hmeblkpa ;\
ldn [tsbarea + (TSBMISS_SCRATCH + TSBMISS_HATID)], hatid ;\
srlx tagacc, hmeshift, tte ;\
add hmeblkpa, HMEBLK_NEXTPA, hmeblkpa ;\
ldxa [hmeblkpa]ASI_MEM, hmeblkpa ;\
MAKE_HASHTAG(tte, hatid, hmeshift, hashno, hmemisc) ;\
ba,a,pt %xcc, label##8 ;\
label##6: ;\
GET_SCDSHMERMAP(tsbarea, hmeblkpa, hatid, hmemisc) ;\
ldn [tsbarea + (TSBMISS_SCRATCH + TSBMISS_HMEBP)], hatid ;\
label##7: ;\
set TTE_SUSPEND, hatid ;\
TTE_SUSPEND_INT_SHIFT(hatid) ;\
btst tte, hatid ;\
bz,pt %xcc, foundlabel ;\
ldn [tsbarea + (TSBMISS_SCRATCH + TSBMISS_HATID)], hatid ;\
;\
;\
ba,pt %xcc, suspendlabel ;\
nop
ENTRY_NP(sfmmu_kprot_trap)
mov %g2, %g7 ! TSB pointer macro clobbers tagacc
sfmmu_kprot_patch_ktsb_base:
RUNTIME_PATCH_SETX(%g1, %g6)
sfmmu_kprot_patch_ktsb_szcode:
or %g0, RUNTIME_PATCH, %g3 ! ktsb_szcode (hot patched)
GET_TSBE_POINTER(MMU_PAGESHIFT, %g1, %g7, %g3, %g5)
! %g1 = First TSB entry pointer, as TSB miss handler expects
mov %g2, %g7 ! TSB pointer macro clobbers tagacc
sfmmu_kprot_patch_ktsb4m_base:
RUNTIME_PATCH_SETX(%g3, %g6)
sfmmu_kprot_patch_ktsb4m_szcode:
or %g0, RUNTIME_PATCH, %g6 ! ktsb4m_szcode (hot patched)
GET_TSBE_POINTER(MMU_PAGESHIFT4M, %g3, %g7, %g6, %g5)
! %g3 = 4M tsb entry pointer, as TSB miss handler expects
CPU_TSBMISS_AREA(%g6, %g7)
HAT_PERCPU_STAT16(%g6, TSBMISS_KPROTS, %g7)
ba,pt %xcc, sfmmu_tsb_miss_tt
nop
ALTENTRY(sfmmu_uprot_trap)
#ifdef sun4v
GET_1ST_TSBE_PTR(%g2, %g1, %g4, %g5)
GET_UTSBREG(SCRATCHPAD_UTSBREG2, %g3)
brlz,pt %g3, 9f
nop
GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
#else
#ifdef UTSB_PHYS
GET_UTSBREG(SCRATCHPAD_UTSBREG2, %g3)
brlz,pt %g3, 9f
nop
GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
#else
brgez,pt %g1, 9f
mov -1, %g3
mov %g2, %g7
GET_2ND_TSBE_PTR(%g7, %g1, %g3, %g4, %g5, sfmmu_uprot)
mov %g1, %g7
GET_1ST_TSBE_PTR(%g7, %g1, %g5, sfmmu_uprot)
#endif
#endif
9:
CPU_TSBMISS_AREA(%g6, %g7)
HAT_PERCPU_STAT16(%g6, TSBMISS_UPROTS, %g7)
ba,pt %xcc, sfmmu_tsb_miss_tt
nop
.align 64
ALTENTRY(sfmmu_kitlb_miss)
brnz,pn %g3, tsb_tl0_noctxt
nop
iktsbbase:
RUNTIME_PATCH_SETX(%g4, %g5)
iktsb: sllx %g2, 64-(TAGACC_SHIFT + TSB_START_SIZE + RUNTIME_PATCH), %g1
srlx %g1, 64-(TSB_START_SIZE + TSB_ENTRY_SHIFT + RUNTIME_PATCH), %g1
or %g4, %g1, %g1 ! form tsb ptr
ldda [%g1]RUNTIME_PATCH, %g4 ! %g4 = tag, %g5 = data
cmp %g4, %g7
bne,pn %xcc, iktsb4mbase ! check 4m ktsb
srlx %g2, MMU_PAGESHIFT4M, %g3 ! use 4m virt-page as TSB index
andcc %g5, TTE_EXECPRM_INT, %g0 ! check exec bit
bz,pn %icc, exec_fault
nop
TT_TRACE(trace_tsbhit) ! 2 instr traptrace
ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)
retry
iktsb4mbase:
RUNTIME_PATCH_SETX(%g4, %g6)
iktsb4m:
sllx %g3, 64-(TSB_START_SIZE + RUNTIME_PATCH), %g3
srlx %g3, 64-(TSB_START_SIZE + TSB_ENTRY_SHIFT + RUNTIME_PATCH), %g3
add %g4, %g3, %g3 ! %g3 = 4m tsbe ptr
ldda [%g3]RUNTIME_PATCH, %g4 ! %g4 = tag, %g5 = data
cmp %g4, %g7
bne,pn %xcc, sfmmu_tsb_miss_tt ! branch on miss
andcc %g5, TTE_EXECPRM_INT, %g0 ! check exec bit
bz,pn %icc, exec_fault
nop
TT_TRACE(trace_tsbhit) ! 2 instr traptrace
ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)
retry
.align 64
ALTENTRY(sfmmu_kdtlb_miss)
brnz,pn %g3, tsb_tl0_noctxt
nop
KPM_TLBMISS_STAT_INCR(%g2, %g4, %g5, %g6, kpmtlbm_stat_out)
dktsbbase:
RUNTIME_PATCH_SETX(%g7, %g6)
dktsb: sllx %g2, 64-(TAGACC_SHIFT + TSB_START_SIZE + RUNTIME_PATCH), %g1
srlx %g1, 64-(TSB_START_SIZE + TSB_ENTRY_SHIFT + RUNTIME_PATCH), %g1
ldda [%g7 + %g1]RUNTIME_PATCH, %g4 ! %g4 = tag, %g5 = data
srlx %g2, TAG_VALO_SHIFT, %g6 ! make tag to compare
cmp %g6, %g4 ! compare tag
bne,pn %xcc, dktsb4m_kpmcheck_small
add %g7, %g1, %g1
TT_TRACE(trace_tsbhit)
DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)
retry
dktsb4m_kpmcheck_small:
brlz,pn %g2, sfmmu_kpm_dtsb_miss_small
srlx %g2, MMU_PAGESHIFT4M, %g3
dktsb4mbase:
RUNTIME_PATCH_SETX(%g7, %g6)
dktsb4m:
sllx %g3, 64-(TSB_START_SIZE + RUNTIME_PATCH), %g3
srlx %g3, 64-(TSB_START_SIZE + TSB_ENTRY_SHIFT + RUNTIME_PATCH), %g3
ldda [%g7 + %g3]RUNTIME_PATCH, %g4 ! %g4 = tag, %g5 = data
srlx %g2, TAG_VALO_SHIFT, %g6 ! make tag to compare
cmp %g6, %g4 ! compare tag
dktsb4m_tsbmiss:
bne,pn %xcc, dktsb4m_kpmcheck
add %g7, %g3, %g3 ! %g3 = kernel second TSB ptr
TT_TRACE(trace_tsbhit)
DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)
retry
dktsb4m_kpmcheck:
cmp %g2, %g0
bl,pn %xcc, sfmmu_kpm_dtsb_miss
nop
ba,a,pt %icc, sfmmu_tsb_miss_tt
nop
#ifdef sun4v
.align 64
ALTENTRY(sfmmu_uitlb_fastpath)
PROBE_1ST_ITSB(%g1, %g7, uitlb_fast_8k_probefail)
ba,pn %xcc, sfmmu_tsb_miss_tt
mov -1, %g3
.align 64
ALTENTRY(sfmmu_udtlb_fastpath)
PROBE_1ST_DTSB(%g1, %g7, udtlb_fast_8k_probefail)
ba,pn %xcc, sfmmu_tsb_miss_tt
mov -1, %g3
.align 64
ALTENTRY(sfmmu_uitlb_slowpath)
GET_1ST_TSBE_PTR(%g2, %g1, %g4, %g5)
PROBE_1ST_ITSB(%g1, %g7, uitlb_8k_probefail)
GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
srlx %g2, TAG_VALO_SHIFT, %g7
PROBE_2ND_ITSB(%g3, %g7)
#else
.align 64
ALTENTRY(sfmmu_uitlb_slowpath)
#ifdef UTSB_PHYS
GET_UTSBREG(SCRATCHPAD_UTSBREG4, %g6)
brlz,pt %g6, 1f
nop
GET_4TH_TSBE_PTR(%g2, %g6, %g4, %g5)
PROBE_4TH_ITSB(%g6, %g7, uitlb_4m_scd_probefail)
1:
GET_UTSBREG(SCRATCHPAD_UTSBREG2, %g3)
brlz,pt %g3, 2f
nop
GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
PROBE_2ND_ITSB(%g3, %g7, uitlb_4m_probefail)
2:
GET_UTSBREG(SCRATCHPAD_UTSBREG3, %g6)
brlz,pt %g6, sfmmu_tsb_miss_tt
nop
GET_3RD_TSBE_PTR(%g2, %g6, %g4, %g5)
PROBE_3RD_ITSB(%g6, %g7, uitlb_8K_scd_probefail)
ba,pn %xcc, sfmmu_tsb_miss_tt
nop
#else
mov %g1, %g3
GET_1ST_TSBE_PTR(%g3, %g1, %g5, sfmmu_uitlb)
PROBE_1ST_ITSB(%g1, %g7, uitlb_8k_probefail)
mov %g2, %g6
mov %g3, %g7
GET_2ND_TSBE_PTR(%g6, %g7, %g3, %g4, %g5, sfmmu_uitlb)
srlx %g2, TAG_VALO_SHIFT, %g7
PROBE_2ND_ITSB(%g3, %g7, isynth)
ba,pn %xcc, sfmmu_tsb_miss_tt
nop
#endif
#endif
#if defined(sun4u) && defined(UTSB_PHYS)
.align 64
ALTENTRY(sfmmu_udtlb_slowpath_noismpred)
GET_UTSBREG(SCRATCHPAD_UTSBREG2, %g3)
brlz,pt %g3, 1f
nop
GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
PROBE_2ND_DTSB(%g3, %g7, udtlb_4m_probefail)
1:
GET_UTSBREG(SCRATCHPAD_UTSBREG4, %g6)
brlz,pt %g6, 2f
nop
GET_4TH_TSBE_PTR(%g2, %g6, %g4, %g5)
PROBE_4TH_DTSB(%g6, %g7, udtlb_4m_shctx_probefail)
2:
GET_UTSBREG(SCRATCHPAD_UTSBREG3, %g6)
brlz,pt %g6, sfmmu_tsb_miss_tt
nop
GET_3RD_TSBE_PTR(%g2, %g6, %g4, %g5)
PROBE_3RD_DTSB(%g6, %g7, udtlb_8k_shctx_probefail)
ba,pn %xcc, sfmmu_tsb_miss_tt
nop
.align 64
ALTENTRY(sfmmu_udtlb_slowpath_ismpred)
GET_UTSBREG(SCRATCHPAD_UTSBREG4, %g6)
brlz,pt %g6, 4f
nop
GET_4TH_TSBE_PTR(%g2, %g6, %g4, %g5)
PROBE_4TH_DTSB(%g6, %g7, udtlb_4m_shctx_probefail2)
4:
GET_UTSBREG(SCRATCHPAD_UTSBREG2, %g3)
brlz,pt %g3, 5f
nop
GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
PROBE_2ND_DTSB(%g3, %g7, udtlb_4m_probefail2)
5:
PROBE_1ST_DTSB(%g1, %g7, udtlb_8k_first_probefail2)
GET_UTSBREG(SCRATCHPAD_UTSBREG3, %g6)
brlz,pt %g6, 6f
nop
GET_3RD_TSBE_PTR(%g2, %g6, %g4, %g5)
PROBE_3RD_DTSB(%g6, %g7, udtlb_8k_shctx_probefail2)
6:
ba,pn %xcc, sfmmu_tsb_miss_tt
nop
#else
.align 64
ALTENTRY(sfmmu_udtlb_slowpath)
srax %g2, PREDISM_BASESHIFT, %g6
brgz,pn %g6, udtlb_miss_probesecond
mov %g1, %g3
udtlb_miss_probefirst:
#ifdef sun4v
GET_1ST_TSBE_PTR(%g2, %g1, %g4, %g5)
PROBE_1ST_DTSB(%g1, %g7, udtlb_first_probefail)
brgz,pn %g6, sfmmu_tsb_miss_tt
nop
#else
mov %g1, %g4
GET_1ST_TSBE_PTR(%g4, %g1, %g5, sfmmu_udtlb)
PROBE_1ST_DTSB(%g1, %g7, udtlb_first_probefail)
brgz,pn %g6, sfmmu_tsb_miss_tt
nop
ldxa [%g0]ASI_DMMU_TSB_8K, %g3
#endif
udtlb_miss_probesecond:
#ifdef sun4v
GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
#else
mov %g3, %g7
GET_2ND_TSBE_PTR(%g2, %g7, %g3, %g4, %g5, sfmmu_udtlb)
mov MMU_TAG_ACCESS, %g2
ldxa [%g2]ASI_DMMU, %g2
#endif
srlx %g2, TAG_VALO_SHIFT, %g7
PROBE_2ND_DTSB(%g3, %g7, udtlb_4m_probefail)
brgz,pn %g6, udtlb_miss_probefirst
nop
#endif
ALTENTRY(sfmmu_tsb_miss_tt)
TT_TRACE(trace_tsbmiss)
ALTENTRY(sfmmu_tsb_miss)
rdpr %tl, %g7
cmp %g7, 1
ble,pt %xcc, 0f
sethi %hi(KERNELBASE), %g6
rdpr %tpc, %g7
or %g6, %lo(KERNELBASE), %g6
cmp %g7, %g6
bgeu,pt %xcc, 0f
ALTENTRY(tsbmiss_trapstat_patch_point)
add %g7, RUNTIME_PATCH, %g7
wrpr %g7, %tpc
add %g7, 4, %g7
wrpr %g7, %tnpc
0:
CPU_TSBMISS_AREA(%g6, %g7)
stn %g1, [%g6 + TSBMISS_TSBPTR]
stn %g3, [%g6 + TSBMISS_TSBPTR4M]
sllx %g2, TAGACC_CTX_LSHIFT, %g3
brz,a,pn %g3, 1f
ldn [%g6 + TSBMISS_KHATID], %g7
srlx %g3, TAGACC_CTX_LSHIFT, %g3
ldn [%g6 + TSBMISS_UHATID], %g7
HAT_PERCPU_STAT32(%g6, TSBMISS_UTSBMISS, %g5)
cmp %g3, INVALID_CONTEXT
be,pn %icc, tsb_tl0_noctxt
stn %g7, [%g6 + (TSBMISS_SCRATCH + TSBMISS_HATID)]
#if defined(sun4v) || defined(UTSB_PHYS)
ldub [%g6 + TSBMISS_URTTEFLAGS], %g7
andn %g7, HAT_CHKCTX1_FLAG, %g7
stub %g7, [%g6 + TSBMISS_URTTEFLAGS]
#endif
ISM_CHECK(%g2, %g6, %g3, %g4, %g5, %g7, %g1, tsb_l1, tsb_ism)
ba,pt %icc, 2f
ldn [%g6 + (TSBMISS_SCRATCH + TSBMISS_HATID)], %g7
1:
HAT_PERCPU_STAT32(%g6, TSBMISS_KTSBMISS, %g5)
2:
GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
MMU_PAGESHIFT64K, TTE64K, %g5, tsb_l8K, tsb_checktte,
sfmmu_suspend_tl, tsb_512K)
tsb_512K:
sllx %g2, TAGACC_CTX_LSHIFT, %g5
brz,pn %g5, 3f
ldub [%g6 + TSBMISS_UTTEFLAGS], %g4
and %g4, HAT_512K_FLAG, %g5
brz,pn %g5, tsb_4M
nop
3:
GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
MMU_PAGESHIFT512K, TTE512K, %g5, tsb_l512K, tsb_checktte,
sfmmu_suspend_tl, tsb_4M)
tsb_4M:
sllx %g2, TAGACC_CTX_LSHIFT, %g5
brz,pn %g5, 4f
ldub [%g6 + TSBMISS_UTTEFLAGS], %g4
and %g4, HAT_4M_FLAG, %g5
brz,pn %g5, tsb_32M
nop
4:
GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
MMU_PAGESHIFT4M, TTE4M, %g5, tsb_l4M, tsb_checktte,
sfmmu_suspend_tl, tsb_32M)
tsb_32M:
sllx %g2, TAGACC_CTX_LSHIFT, %g5
#ifdef sun4v
brz,pn %g5, 6f
#else
brz,pn %g5, tsb_pagefault
#endif
ldub [%g6 + TSBMISS_UTTEFLAGS], %g4
and %g4, HAT_32M_FLAG, %g5
brz,pn %g5, tsb_256M
nop
5:
GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
MMU_PAGESHIFT32M, TTE32M, %g5, tsb_l32M, tsb_checktte,
sfmmu_suspend_tl, tsb_256M)
#if defined(sun4u) && !defined(UTSB_PHYS)
#define tsb_shme tsb_pagefault
#endif
tsb_256M:
ldub [%g6 + TSBMISS_UTTEFLAGS], %g4
and %g4, HAT_256M_FLAG, %g5
brz,pn %g5, tsb_shme
nop
6:
GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
MMU_PAGESHIFT256M, TTE256M, %g5, tsb_l256M, tsb_checktte,
sfmmu_suspend_tl, tsb_shme)
tsb_checktte:
brlz,a,pt %g3, tsb_validtte
rdpr %tt, %g7
#if defined(sun4u) && !defined(UTSB_PHYS)
#undef tsb_shme
ba tsb_pagefault
nop
#else
tsb_shme:
sllx %g2, TAGACC_CTX_LSHIFT, %g5
brz,pn %g5, tsb_pagefault
nop
ldx [%g6 + TSBMISS_SHARED_UHATID], %g7
brz,pn %g7, tsb_pagefault
nop
GET_SHME_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
MMU_PAGESHIFT64K, TTE64K, %g5, tsb_shme_l8K, tsb_shme_checktte,
sfmmu_suspend_tl, tsb_shme_512K)
tsb_shme_512K:
ldub [%g6 + TSBMISS_URTTEFLAGS], %g4
and %g4, HAT_512K_FLAG, %g5
brz,pn %g5, tsb_shme_4M
nop
GET_SHME_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
MMU_PAGESHIFT512K, TTE512K, %g5, tsb_shme_l512K, tsb_shme_checktte,
sfmmu_suspend_tl, tsb_shme_4M)
tsb_shme_4M:
ldub [%g6 + TSBMISS_URTTEFLAGS], %g4
and %g4, HAT_4M_FLAG, %g5
brz,pn %g5, tsb_shme_32M
nop
4:
GET_SHME_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
MMU_PAGESHIFT4M, TTE4M, %g5, tsb_shme_l4M, tsb_shme_checktte,
sfmmu_suspend_tl, tsb_shme_32M)
tsb_shme_32M:
ldub [%g6 + TSBMISS_URTTEFLAGS], %g4
and %g4, HAT_32M_FLAG, %g5
brz,pn %g5, tsb_shme_256M
nop
GET_SHME_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
MMU_PAGESHIFT32M, TTE32M, %g5, tsb_shme_l32M, tsb_shme_checktte,
sfmmu_suspend_tl, tsb_shme_256M)
tsb_shme_256M:
ldub [%g6 + TSBMISS_URTTEFLAGS], %g4
and %g4, HAT_256M_FLAG, %g5
brz,pn %g5, tsb_pagefault
nop
GET_SHME_TTE(%g2, %g7, %g3, %g4, %g6, %g1,
MMU_PAGESHIFT256M, TTE256M, %g5, tsb_shme_l256M, tsb_shme_checktte,
sfmmu_suspend_tl, tsb_pagefault)
tsb_shme_checktte:
brgez,pn %g3, tsb_pagefault
rdpr %tt, %g7
brz,pt %g1, tsb_validtte
nop
ldub [%g6 + TSBMISS_URTTEFLAGS], %g1
or %g1, HAT_CHKCTX1_FLAG, %g1
stub %g1, [%g6 + TSBMISS_URTTEFLAGS]
SAVE_CTX1(%g7, %g2, %g1, tsb_shmel)
#endif
tsb_validtte:
cmp %g7, FAST_PROT_TT
bne,pt %icc, 4f
nop
TTE_SET_REFMOD_ML(%g3, %g4, %g6, %g7, %g5, tsb_lset_refmod,
tsb_protfault)
GET_MMU_D_TTARGET(%g2, %g7)
#ifdef sun4v
MMU_FAULT_STATUS_AREA(%g7)
ldx [%g7 + MMFSA_D_ADDR], %g5
#else
mov MMU_TAG_ACCESS, %g5
ldxa [%g5]ASI_DMMU, %g5
#endif
ba,pt %xcc, tsb_update_tl1
nop
4:
cmp %g7, T_INSTR_MMU_MISS
be,pn %icc, 5f
andcc %g3, TTE_EXECPRM_INT, %g0
cmp %g7, FAST_IMMU_MISS_TT
bne,pt %icc, 3f
andcc %g3, TTE_EXECPRM_INT, %g0
5:
bz,pn %icc, tsb_protfault
nop
3:
TTE_SET_REF_ML(%g3, %g4, %g6, %g7, %g5, tsb_lset_ref)
rdpr %tt, %g7
#ifdef sun4v
MMU_FAULT_STATUS_AREA(%g2)
cmp %g7, T_INSTR_MMU_MISS
be,a,pt %icc, 9f
nop
cmp %g7, FAST_IMMU_MISS_TT
be,a,pt %icc, 9f
nop
add %g2, MMFSA_D_, %g2
9:
ldx [%g2 + MMFSA_CTX_], %g7
sllx %g7, TTARGET_CTX_SHIFT, %g7
ldx [%g2 + MMFSA_ADDR_], %g2
mov %g2, %g5 ! load the fault addr for later use
srlx %g2, TTARGET_VA_SHIFT, %g2
or %g2, %g7, %g2
#else
mov MMU_TAG_ACCESS, %g5
cmp %g7, FAST_IMMU_MISS_TT
be,a,pt %icc, 9f
ldxa [%g0]ASI_IMMU, %g2
ldxa [%g0]ASI_DMMU, %g2
ba,pt %icc, tsb_update_tl1
ldxa [%g5]ASI_DMMU, %g5
9:
ldxa [%g5]ASI_IMMU, %g5
#endif
tsb_update_tl1:
srlx %g2, TTARGET_CTX_SHIFT, %g7
brz,pn %g7, tsb_kernel
#ifdef sun4v
and %g3, TTE_SZ_BITS, %g7 ! assumes TTE_SZ_SHFT is 0
#else
srlx %g3, TTE_SZ_SHFT, %g7
#endif
tsb_user:
#ifdef sun4v
cmp %g7, TTE4M
bge,pn %icc, tsb_user4m
nop
#else
cmp %g7, TTESZ_VALID | TTE4M
be,pn %icc, tsb_user4m
srlx %g3, TTE_SZ2_SHFT, %g7
andcc %g7, TTE_SZ2_BITS, %g7 ! check 32/256MB
#ifdef ITLB_32M_256M_SUPPORT
bnz,pn %icc, tsb_user4m
nop
#else
bnz,a,pn %icc, tsb_user_pn_synth
nop
#endif
#endif
tsb_user8k:
#if defined(sun4v) || defined(UTSB_PHYS)
ldub [%g6 + TSBMISS_URTTEFLAGS], %g7
and %g7, HAT_CHKCTX1_FLAG, %g1
brz,a,pn %g1, 1f
ldn [%g6 + TSBMISS_TSBPTR], %g1 ! g1 = 1ST TSB ptr
GET_UTSBREG_SHCTX(%g6, TSBMISS_TSBSCDPTR, %g1)
brlz,a,pn %g1, ptl1_panic ! if no shared 3RD tsb
mov PTL1_NO_SCDTSB8K, %g1 ! panic
GET_3RD_TSBE_PTR(%g5, %g1, %g6, %g7)
1:
#else
ldn [%g6 + TSBMISS_TSBPTR], %g1 ! g1 = 1ST TSB ptr
#endif
#ifndef UTSB_PHYS
mov ASI_N, %g7 ! user TSBs accessed by VA
mov %g7, %asi
#endif
TSB_UPDATE_TL(%g1, %g3, %g2, %g4, %g7, %g6, locked_tsb_l3)
rdpr %tt, %g5
#ifdef sun4v
cmp %g5, T_INSTR_MMU_MISS
be,a,pn %xcc, 9f
mov %g3, %g5
#endif
cmp %g5, FAST_IMMU_MISS_TT
be,pn %xcc, 9f
mov %g3, %g5
DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)
! trapstat wants TTE in %g5
retry
9:
ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)
! trapstat wants TTE in %g5
retry
tsb_user4m:
#if defined(sun4v) || defined(UTSB_PHYS)
ldub [%g6 + TSBMISS_URTTEFLAGS], %g7
and %g7, HAT_CHKCTX1_FLAG, %g1
brz,a,pn %g1, 4f
ldn [%g6 + TSBMISS_TSBPTR4M], %g1 ! g1 = 2ND TSB ptr
GET_UTSBREG_SHCTX(%g6, TSBMISS_TSBSCDPTR4M, %g1)! g1 = 4TH TSB ptr
brlz,a,pn %g1, 5f ! if no shared 4TH TSB
nop
GET_4TH_TSBE_PTR(%g5, %g1, %g6, %g7)
#else
ldn [%g6 + TSBMISS_TSBPTR4M], %g1 ! g1 = 2ND TSB ptr
#endif
4:
brlz,pn %g1, 5f
nop
#ifndef UTSB_PHYS
mov ASI_N, %g7 ! user TSBs accessed by VA
mov %g7, %asi
#endif
TSB_UPDATE_TL(%g1, %g3, %g2, %g4, %g7, %g6, locked_tsb_l4)
5:
rdpr %tt, %g5
#ifdef sun4v
cmp %g5, T_INSTR_MMU_MISS
be,a,pn %xcc, 9f
mov %g3, %g5
#endif
cmp %g5, FAST_IMMU_MISS_TT
be,pn %xcc, 9f
mov %g3, %g5
DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)
! trapstat wants TTE in %g5
retry
9:
ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)
! trapstat wants TTE in %g5
retry
#if !defined(sun4v) && !defined(ITLB_32M_256M_SUPPORT)
tsb_user_pn_synth:
rdpr %tt, %g5
cmp %g5, FAST_IMMU_MISS_TT
be,pt %xcc, tsb_user_itlb_synth
andcc %g3, TTE_EXECPRM_INT, %g0
bz,pn %icc, 4b
ldn [%g6 + TSBMISS_TSBPTR4M], %g1
brlz,a,pn %g1, 5f
mov %g3, %g5
mov MMU_TAG_ACCESS, %g7
ldxa [%g7]ASI_DMMU, %g6
GET_4M_PFN_OFF(%g3, %g6, %g5, %g7, 1)
mov ASI_N, %g7
mov %g7, %asi
TSB_UPDATE_TL_PN(%g1, %g5, %g2, %g4, %g7, %g3, locked_tsb_l5)
5:
DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)
retry
tsb_user_itlb_synth:
ldn [%g6 + TSBMISS_TSBPTR4M], %g1
mov MMU_TAG_ACCESS, %g7
ldxa [%g7]ASI_IMMU, %g6
GET_4M_PFN_OFF(%g3, %g6, %g5, %g7, 2)
brlz,a,pn %g1, 7f
or %g5, %g3, %g5
mov ASI_N, %g7
mov %g7, %asi
TSB_UPDATE_TL_PN(%g1, %g5, %g2, %g4, %g7, %g3, locked_tsb_l6)
7:
SET_TTE4M_PN(%g5, %g7)
ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)
retry
#endif
tsb_kernel:
rdpr %tt, %g5
#ifdef sun4v
cmp %g7, TTE4M
bge,pn %icc, 5f
#else
cmp %g7, TTESZ_VALID | TTE4M ! no 32M or 256M support
be,pn %icc, 5f
#endif
nop
ldn [%g6 + TSBMISS_TSBPTR], %g1 ! g1 = 8K TSB ptr
ba,pt %xcc, 6f
nop
5:
ldn [%g6 + TSBMISS_TSBPTR4M], %g1 ! g1 = 4M TSB ptr
brlz,pn %g1, 3f
nop
6:
#ifndef sun4v
tsb_kernel_patch_asi:
or %g0, RUNTIME_PATCH, %g6
mov %g6, %asi ! XXX avoid writing to %asi !!
#endif
TSB_UPDATE_TL(%g1, %g3, %g2, %g4, %g7, %g6, locked_tsb_l7)
3:
#ifdef sun4v
cmp %g5, T_INSTR_MMU_MISS
be,a,pn %icc, 1f
mov %g3, %g5 ! trapstat wants TTE in %g5
#endif
cmp %g5, FAST_IMMU_MISS_TT
be,pn %icc, 1f
mov %g3, %g5 ! trapstat wants TTE in %g5
DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)
! trapstat wants TTE in %g5
retry
1:
ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)
! trapstat wants TTE in %g5
retry
tsb_ism:
ldna [%g4]ASI_MEM, %g7
brz,a,pn %g7, ptl1_panic
mov PTL1_BAD_ISM, %g1
sub %g4, (IMAP_ISMHAT - IMAP_VB_SHIFT), %g5
lduba [%g5]ASI_MEM, %g4
srlx %g3, %g4, %g3
set TAGACC_CTX_MASK, %g1
sllx %g3, %g4, %g3
and %g2, %g1, %g4
andn %g2, %g1, %g1
sub %g1, %g3, %g2
or %g2, %g4, %g2
sub %g5, (IMAP_VB_SHIFT - IMAP_HATFLAGS), %g5
lduha [%g5]ASI_MEM, %g4
#if defined(sun4v) || defined(UTSB_PHYS)
and %g4, HAT_CTX1_FLAG, %g5
brz,pt %g5, tsb_chk4M_ism
nop
ldub [%g6 + TSBMISS_URTTEFLAGS], %g5
or %g5, HAT_CHKCTX1_FLAG, %g5
stub %g5, [%g6 + TSBMISS_URTTEFLAGS]
rdpr %tt, %g5
SAVE_CTX1(%g5, %g3, %g1, tsb_shctxl)
#endif
tsb_chk4M_ism:
and %g4, HAT_4M_FLAG, %g5
brnz,pt %g5, tsb_ism_4M
nop
tsb_ism_32M:
and %g4, HAT_32M_FLAG, %g5
brz,pn %g5, tsb_ism_256M
nop
GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1, MMU_PAGESHIFT32M,
TTE32M, %g5, tsb_ism_l32M, tsb_ism_32M_found, sfmmu_suspend_tl,
tsb_ism_4M)
tsb_ism_32M_found:
brlz,a,pt %g3, tsb_validtte
rdpr %tt, %g7
ba,pt %xcc, tsb_ism_4M
nop
tsb_ism_256M:
and %g4, HAT_256M_FLAG, %g5
brz,a,pn %g5, ptl1_panic
mov PTL1_BAD_ISM, %g1
GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1, MMU_PAGESHIFT256M,
TTE256M, %g5, tsb_ism_l256M, tsb_ism_256M_found, sfmmu_suspend_tl,
tsb_ism_4M)
tsb_ism_256M_found:
brlz,a,pt %g3, tsb_validtte
rdpr %tt, %g7
tsb_ism_4M:
GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1, MMU_PAGESHIFT4M,
TTE4M, %g5, tsb_ism_l4M, tsb_ism_4M_found, sfmmu_suspend_tl,
tsb_ism_8K)
tsb_ism_4M_found:
brlz,a,pt %g3, tsb_validtte
rdpr %tt, %g7
tsb_ism_8K:
GET_TTE(%g2, %g7, %g3, %g4, %g6, %g1, MMU_PAGESHIFT64K,
TTE64K, %g5, tsb_ism_l8K, tsb_ism_8K_found, sfmmu_suspend_tl,
tsb_pagefault)
tsb_ism_8K_found:
brlz,a,pt %g3, tsb_validtte
rdpr %tt, %g7
tsb_pagefault:
rdpr %tt, %g7
cmp %g7, FAST_PROT_TT
be,a,pn %icc, tsb_protfault
wrpr %g0, FAST_DMMU_MISS_TT, %tt
tsb_protfault:
cmp %g7, FAST_IMMU_MISS_TT
#ifdef sun4v
MMU_FAULT_STATUS_AREA(%g4)
ldx [%g4 + MMFSA_I_CTX], %g5
ldx [%g4 + MMFSA_D_CTX], %g4
move %icc, %g5, %g4
cmp %g7, T_INSTR_MMU_MISS
move %icc, %g5, %g4
#else
mov MMU_TAG_ACCESS, %g4
ldxa [%g4]ASI_DMMU, %g2
ldxa [%g4]ASI_IMMU, %g5
move %icc, %g5, %g2
cmp %g7, T_INSTR_MMU_MISS
move %icc, %g5, %g2
sllx %g2, TAGACC_CTX_LSHIFT, %g4
#endif
brnz,pn %g4, 3f
rdpr %tl, %g5
add %sp, STACK_BIAS, %g3
srlx %g3, MMU_PAGESHIFT, %g3
srlx %g2, MMU_PAGESHIFT, %g4
cmp %g3, %g4
be,a,pn %icc, ptl1_panic
mov PTL1_BAD_STACK, %g1
cmp %g5, 1
ble,pt %icc, 2f
nop
TSTAT_CHECK_TL1(2f, %g1, %g2)
rdpr %tt, %g2
cmp %g2, FAST_PROT_TT
mov PTL1_BAD_KPROT_FAULT, %g1
movne %icc, PTL1_BAD_KMISS, %g1
ba,pt %icc, ptl1_panic
nop
2:
CPU_INDEX(%g1, %g2)
set cpu_core, %g2
sllx %g1, CPU_CORE_SHIFT, %g1
add %g1, %g2, %g1
lduh [%g1 + CPUC_DTRACE_FLAGS], %g2
andcc %g2, CPU_DTRACE_NOFAULT, %g0
bz sfmmu_pagefault
or %g2, CPU_DTRACE_BADADDR, %g2
stuh %g2, [%g1 + CPUC_DTRACE_FLAGS]
GET_MMU_D_ADDR(%g3, %g4)
stx %g3, [%g1 + CPUC_DTRACE_ILLVAL]
done
3:
cmp %g5, 1
ble,pt %icc, 4f
nop
TSTAT_CHECK_TL1(4f, %g1, %g2)
ba,pt %icc, sfmmu_window_trap
nop
4:
CPU_INDEX(%g1, %g2)
set cpu_core, %g2
sllx %g1, CPU_CORE_SHIFT, %g1
add %g1, %g2, %g1
lduh [%g1 + CPUC_DTRACE_FLAGS], %g2
andcc %g2, CPU_DTRACE_NOFAULT, %g0
bz sfmmu_mmu_trap
or %g2, CPU_DTRACE_BADADDR, %g2
stuh %g2, [%g1 + CPUC_DTRACE_FLAGS]
GET_MMU_D_ADDR(%g3, %g4)
stx %g3, [%g1 + CPUC_DTRACE_ILLVAL]
rdpr %tstate, %g2
btst TSTATE_PRIV, %g2
bz,a ptl1_panic
mov PTL1_BAD_DTRACE_FLAGS, %g1
done
ALTENTRY(tsb_tl0_noctxt)
CPU_INDEX(%g5, %g6)
set cpu_core, %g6
sllx %g5, CPU_CORE_SHIFT, %g5
add %g5, %g6, %g5
lduh [%g5 + CPUC_DTRACE_FLAGS], %g6
andcc %g6, CPU_DTRACE_NOFAULT, %g0
bz 1f
or %g6, CPU_DTRACE_BADADDR, %g6
stuh %g6, [%g5 + CPUC_DTRACE_FLAGS]
GET_MMU_D_ADDR(%g3, %g4)
stx %g3, [%g5 + CPUC_DTRACE_ILLVAL]
rdpr %tstate, %g5
btst TSTATE_PRIV, %g5
bz,a ptl1_panic
mov PTL1_BAD_DTRACE_FLAGS, %g1
TSTAT_CHECK_TL1(2f, %g1, %g2);
2:
done
1:
rdpr %tt, %g5
cmp %g5, FAST_IMMU_MISS_TT
#ifdef sun4v
MMU_FAULT_STATUS_AREA(%g2)
be,a,pt %icc, 2f
ldx [%g2 + MMFSA_I_CTX], %g3
cmp %g5, T_INSTR_MMU_MISS
be,a,pt %icc, 2f
ldx [%g2 + MMFSA_I_CTX], %g3
ldx [%g2 + MMFSA_D_CTX], %g3
2:
#else
mov MMU_TAG_ACCESS, %g2
be,a,pt %icc, 2f
ldxa [%g2]ASI_IMMU, %g3
ldxa [%g2]ASI_DMMU, %g3
2: sllx %g3, TAGACC_CTX_LSHIFT, %g3
#endif
brz,a,pn %g3, ptl1_panic ! panic if called for kernel
mov PTL1_BAD_CTX_STEAL, %g1 ! since kernel ctx was stolen
rdpr %tl, %g5
cmp %g5, 1
ble,pt %icc, sfmmu_mmu_trap
nop
TSTAT_CHECK_TL1(sfmmu_mmu_trap, %g1, %g2)
ba,pt %icc, sfmmu_window_trap
nop
SET_SIZE(sfmmu_tsb_miss)
ENTRY_NP(sfmmu_vatopfn)
rdpr %pstate, %o3
#ifdef DEBUG
PANIC_IF_INTR_DISABLED_PSTR(%o3, sfmmu_di_l5, %g1)
#endif
andn %o3, PSTATE_IE, %o5
wrpr %o5, 0, %pstate
CPU_TSBMISS_AREA(%g1, %o5)
ldn [%g1 + TSBMISS_KHATID], %o4
cmp %o4, %o1
bne,pn %ncc, vatopfn_nokernel
mov TTE64K, %g5
mov %g1,%o5
mov HBLK_RANGE_SHIFT, %g6
1:
set TAGACC_CTX_MASK, %g1
andn %o0, %g1, %o0
GET_TTE(%o0, %o4, %g1, %g2, %o5, %g4, %g6, %g5, %g3,
vatopfn_l1, kvtop_hblk_found, tsb_suspend, kvtop_nohblk)
kvtop_hblk_found:
brgez,a,pn %g1, 6f
mov -1, %o0
stx %g1,[%o2]
TTETOPFN(%g1, %o0, vatopfn_l2, %g2, %g3, %g4)
ba,pt %xcc, 6f
mov %g1, %o0
kvtop_nohblk:
ldn [%o5 + (TSBMISS_SCRATCH + TSB_TAGACC)], %o0
#ifdef sun4v
cmp %g5, MAX_HASHCNT
#else
cmp %g5, DEFAULT_MAX_HASHCNT
#endif
be,a,pn %icc, 6f
mov -1, %o0
mov %o1, %o4
#ifdef sun4v
add %g5, 2, %g5
cmp %g5, 3
move %icc, MMU_PAGESHIFT4M, %g6
ba,pt %icc, 1b
movne %icc, MMU_PAGESHIFT256M, %g6
#else
inc %g5
cmp %g5, 2
move %icc, MMU_PAGESHIFT512K, %g6
ba,pt %icc, 1b
movne %icc, MMU_PAGESHIFT4M, %g6
#endif
6:
retl
wrpr %g0, %o3, %pstate
tsb_suspend:
stx %g1,[%o2]
brgez,a,pn %g1, 8f
sub %g0, 1, %o0
sub %g0, 2, %o0
8:
retl
wrpr %g0, %o3, %pstate
vatopfn_nokernel:
wrpr %g0, %o3, %pstate
save %sp, -SA(MINFRAME), %sp
sethi %hi(sfmmu_panic3), %o0
call panic
or %o0, %lo(sfmmu_panic3), %o0
SET_SIZE(sfmmu_vatopfn)
ENTRY_NP(sfmmu_kvaszc2pfn)
rdpr %pstate, %o3
#ifdef DEBUG
PANIC_IF_INTR_DISABLED_PSTR(%o3, sfmmu_di_l6, %g1)
#endif
andn %o3, PSTATE_IE, %o5
wrpr %o5, 0, %pstate
CPU_TSBMISS_AREA(%g1, %o5)
ldn [%g1 + TSBMISS_KHATID], %o4
sll %o1, 1, %g6
add %g6, %o1, %g6
add %g6, MMU_PAGESHIFT, %g6
srlx %o0, MMU_PAGESHIFT, %o0
sllx %o0, MMU_PAGESHIFT, %o0
GET_TTE(%o0, %o4, %g3, %g4, %g1, %o5, %g6, %o1, %g5,
kvaszc2pfn_l1, kvaszc2pfn_hblk_found, kvaszc2pfn_nohblk,
kvaszc2pfn_nohblk)
kvaszc2pfn_hblk_found:
brgez,a,pn %g3, 1f
mov -1, %o0
TTETOPFN(%g3, %o0, kvaszc2pfn_l2, %g2, %g4, %g5)
ba,pt %xcc, 1f
mov %g3, %o0
kvaszc2pfn_nohblk:
mov -1, %o0
1:
retl
wrpr %g0, %o3, %pstate
SET_SIZE(sfmmu_kvaszc2pfn)
#define KPMLOCK_ENTER(kpmlckp, tmp1, label1, asi) \
mov 0xff, tmp1 ;\
label1: ;\
casa [kpmlckp]asi, %g0, tmp1 ;\
brnz,pn tmp1, label1 ;\
mov 0xff, tmp1 ;\
membar #LoadLoad
#define KPMLOCK_EXIT(kpmlckp, asi) \
membar #LoadStore|#StoreStore ;\
sta %g0, [kpmlckp]asi
#define PAGE_NUM2MEMSEG_NOLOCK_PA(pfn, mseg, tsbmp, tmp1, tmp2, tmp3, label) \
sethi %hi(mhash_per_slot), tmp3 ;\
ldx [tmp3 + %lo(mhash_per_slot)], mseg ;\
udivx pfn, mseg, mseg ;\
ldx [tsbmp + KPMTSBM_MSEGPHASHPA], tmp1 ;\
and mseg, SFMMU_N_MEM_SLOTS - 1, mseg ;\
sllx mseg, SFMMU_MEM_HASH_ENTRY_SHIFT, mseg ;\
add tmp1, mseg, tmp1 ;\
ldxa [tmp1]%asi, mseg ;\
cmp mseg, MSEG_NULLPTR_PA ;\
be,pn %xcc, label##1 ;\
nop ;\
ldxa [mseg + MEMSEG_PAGES_BASE]%asi, tmp1 ;\
cmp pfn, tmp1 ;\
blu,pn %xcc, label##1 ;\
ldxa [mseg + MEMSEG_PAGES_END]%asi, tmp2 ;\
cmp pfn, tmp2 ;\
bgeu,pn %xcc, label##1 ;\
sub pfn, tmp1, tmp1 ;\
mulx tmp1, PAGE_SIZE, tmp1 ;\
ldxa [mseg + MEMSEG_PAGESPA]%asi, tmp2 ;\
add tmp2, tmp1, tmp1 ;\
lduwa [tmp1 + PAGE_PAGENUM]%asi, tmp2 ;\
cmp tmp2, pfn ;\
be,pt %xcc, label##_ok ;\
label##1: ;\
;\
sethi %hi(memsegspa), tmp3 ;\
ldx [tmp3 + %lo(memsegspa)], mseg ;\
label##2: ;\
cmp mseg, MSEG_NULLPTR_PA ;\
be,pn %xcc, label##_ok ;\
nop ;\
ldxa [mseg + MEMSEG_PAGES_BASE]%asi, tmp1 ;\
cmp pfn, tmp1 ;\
blu,a,pt %xcc, label##2 ;\
ldxa [mseg + MEMSEG_NEXTPA]%asi, mseg ;\
ldxa [mseg + MEMSEG_PAGES_END]%asi, tmp2 ;\
cmp pfn, tmp2 ;\
bgeu,a,pt %xcc, label##2 ;\
ldxa [mseg + MEMSEG_NEXTPA]%asi, mseg ;\
label##_ok:
ALTENTRY(sfmmu_kpm_dtsb_miss)
TT_TRACE(trace_tsbmiss)
CPU_INDEX(%g7, %g6)
sethi %hi(kpmtsbm_area), %g6
sllx %g7, KPMTSBM_SHIFT, %g7
or %g6, %lo(kpmtsbm_area), %g6
add %g6, %g7, %g6
ldub [%g6 + KPMTSBM_FLAGS], %g4
and %g4, KPMTSBM_ENABLE_FLAG, %g5
brz,pn %g5, sfmmu_tsb_miss
nop
ldx [%g6 + KPMTSBM_VBASE], %g7
cmp %g2, %g7
blu,pn %xcc, sfmmu_tsb_miss
ldx [%g6 + KPMTSBM_VEND], %g5
cmp %g2, %g5
bgeu,pn %xcc, sfmmu_tsb_miss
stx %g3, [%g6 + KPMTSBM_TSBPTR]
lduw [%g6 + KPMTSBM_TSBMISS], %g5
#ifdef DEBUG
and %g4, KPMTSBM_TLTSBM_FLAG, %g3
inc %g5
brz,pn %g3, sfmmu_kpm_exception
st %g5, [%g6 + KPMTSBM_TSBMISS]
#else
inc %g5
st %g5, [%g6 + KPMTSBM_TSBMISS]
#endif
ldub [%g6 + KPMTSBM_SZSHIFT], %g3
sub %g2, %g7, %g4
srax %g4, %g3, %g2
brnz,pn %g2, sfmmu_kpm_exception
srlx %g4, MMU_PAGESHIFT, %g2
mov ASI_MEM, %asi
PAGE_NUM2MEMSEG_NOLOCK_PA(%g2, %g3, %g6, %g4, %g5, %g7, kpmtsbmp2m)
cmp %g3, MSEG_NULLPTR_PA
be,pn %xcc, sfmmu_kpm_exception
nop
ldub [%g6 + KPMTSBM_KPMP2PSHFT], %g5
ldxa [%g3 + MEMSEG_KPM_PBASE]%asi, %g7
srlx %g2, %g5, %g4
sllx %g4, %g5, %g4
sub %g4, %g7, %g4
srlx %g4, %g5, %g4
#ifdef DEBUG
ldxa [%g3 + MEMSEG_KPM_NKPMPGS]%asi, %g5
cmp %g4, %g5
bgeu,pn %xcc, sfmmu_kpm_exception
ld [%g6 + KPMTSBM_KPMPTABLESZ], %g7
#else
ld [%g6 + KPMTSBM_KPMPTABLESZ], %g7
#endif
sllx %g4, KPMPAGE_SHIFT, %g4
ldxa [%g3 + MEMSEG_KPM_PAGES]%asi, %g5
add %g5, %g4, %g5
ldub [%g6 + KPMTSBM_KPMPSHIFT], %g1
sub %g7, 1, %g7
srlx %g5, %g1, %g1
add %g5, %g1, %g5
and %g5, %g7, %g5
ldxa [%g3 + MEMSEG_KPM_PAGESPA]%asi, %g1
add %g1, %g4, %g1
ldx [%g6 + KPMTSBM_KPMPTABLEPA], %g4
sllx %g5, KPMHLK_SHIFT, %g5
add %g4, %g5, %g3
add %g3, KPMHLK_LOCK, %g3
#ifdef sun4v
sethi %hi(TTE_VALID_INT), %g5
sllx %g5, 32, %g5
mov (TTE_CP_INT|TTE_CV_INT|TTE_PRIV_INT|TTE_HWWR_INT), %g4
or %g4, TTE4M, %g4
or %g5, %g4, %g5
#else
sethi %hi(TTE_VALID_INT), %g4
mov TTE4M, %g5
sllx %g5, TTE_SZ_SHFT_INT, %g5
or %g5, %g4, %g5
sllx %g5, 32, %g5
mov (TTE_CP_INT|TTE_CV_INT|TTE_PRIV_INT|TTE_HWWR_INT), %g4
or %g5, %g4, %g5
#endif
sllx %g2, MMU_PAGESHIFT, %g4
or %g5, %g4, %g5
ldx [%g6 + KPMTSBM_TSBPTR], %g4
GET_MMU_D_TTARGET(%g2, %g7)
KPMLOCK_ENTER(%g3, %g7, kpmtsbmhdlr1, ASI_MEM)
ldsha [%g1 + KPMPAGE_REFCNTC]%asi, %g7
cmp %g7, -1
bne,pn %xcc, 5f
nop
#ifdef DEBUG
ldsha [%g1 + KPMPAGE_REFCNT]%asi, %g7
brz,pn %g7, 5f
nop
#endif
#ifndef sun4v
ldub [%g6 + KPMTSBM_FLAGS], %g7
mov ASI_N, %g1
andcc %g7, KPMTSBM_TSBPHYS_FLAG, %g0
movnz %icc, ASI_MEM, %g1
mov %g1, %asi
#endif
TSB_LOCK_ENTRY(%g4, %g1, %g7, locked_tsb_l1)
TSB_INSERT_UNLOCK_ENTRY(%g4, %g5, %g2, %g7)
locked_tsb_l1:
DTLB_STUFF(%g5, %g1, %g2, %g4, %g6)
KPMLOCK_EXIT(%g3, ASI_MEM)
rdpr %tl, %g7
cmp %g7, 1
ble %icc, 0f
sethi %hi(KERNELBASE), %g6
rdpr %tpc, %g7
or %g6, %lo(KERNELBASE), %g6
cmp %g7, %g6
bgeu %xcc, 0f
ALTENTRY(tsbmiss_trapstat_patch_point_kpm)
add %g7, RUNTIME_PATCH, %g7
wrpr %g7, %tpc
add %g7, 4, %g7
wrpr %g7, %tnpc
0:
retry
5:
KPMLOCK_EXIT(%g3, ASI_MEM)
ba,pt %icc, sfmmu_kpm_exception
nop
SET_SIZE(sfmmu_kpm_dtsb_miss)
ALTENTRY(sfmmu_kpm_dtsb_miss_small)
TT_TRACE(trace_tsbmiss)
CPU_INDEX(%g7, %g6)
sethi %hi(kpmtsbm_area), %g6
sllx %g7, KPMTSBM_SHIFT, %g7
or %g6, %lo(kpmtsbm_area), %g6
add %g6, %g7, %g6
ldub [%g6 + KPMTSBM_FLAGS], %g4
and %g4, KPMTSBM_ENABLE_FLAG, %g5
brz,pn %g5, sfmmu_tsb_miss
nop
ldx [%g6 + KPMTSBM_VBASE], %g7
cmp %g2, %g7
blu,pn %xcc, sfmmu_tsb_miss
ldx [%g6 + KPMTSBM_VEND], %g5
cmp %g2, %g5
bgeu,pn %xcc, sfmmu_tsb_miss
stx %g1, [%g6 + KPMTSBM_TSBPTR]
lduw [%g6 + KPMTSBM_TSBMISS], %g5
#ifdef DEBUG
and %g4, KPMTSBM_TLTSBM_FLAG, %g1
inc %g5
brz,pn %g1, sfmmu_kpm_exception
st %g5, [%g6 + KPMTSBM_TSBMISS]
#else
inc %g5
st %g5, [%g6 + KPMTSBM_TSBMISS]
#endif
ldub [%g6 + KPMTSBM_SZSHIFT], %g3
sub %g2, %g7, %g4
srax %g4, %g3, %g7
brz,pt %g7, 2f
sethi %hi(vac_colors_mask), %g5
ld [%g5 + %lo(vac_colors_mask)], %g5
srlx %g2, MMU_PAGESHIFT, %g1
and %g1, %g5, %g1
sllx %g7, %g3, %g5
cmp %g7, %g1
bleu,pn %xcc, 1f
sub %g4, %g5, %g4
sub %g7, %g1, %g5
sllx %g5, MMU_PAGESHIFT, %g7
add %g4, %g7, %g4
ba 2f
nop
1:
sllx %g7, MMU_PAGESHIFT, %g5
sub %g4, %g5, %g4
2:
srlx %g4, MMU_PAGESHIFT, %g2
mov ASI_MEM, %asi
PAGE_NUM2MEMSEG_NOLOCK_PA(%g2, %g3, %g6, %g4, %g5, %g7, kpmtsbmsp2m)
cmp %g3, MSEG_NULLPTR_PA
be,pn %xcc, sfmmu_kpm_exception
nop
ldxa [%g3 + MEMSEG_KPM_PBASE]%asi, %g7
sub %g2, %g7, %g4
#ifdef DEBUG
ldxa [%g3 + MEMSEG_KPM_NKPMPGS]%asi, %g5
cmp %g4, %g5
bgeu,pn %xcc, sfmmu_kpm_exception
ld [%g6 + KPMTSBM_KPMPTABLESZ], %g7
#else
ld [%g6 + KPMTSBM_KPMPTABLESZ], %g7
#endif
ldxa [%g3 + MEMSEG_KPM_SPAGES]%asi, %g5
add %g5, %g4, %g5
ldub [%g6 + KPMTSBM_KPMPSHIFT], %g1
sub %g7, 1, %g7
sllx %g5, %g1, %g1
add %g5, %g1, %g5
and %g5, %g7, %g5
ldxa [%g3 + MEMSEG_KPM_PAGESPA]%asi, %g1
add %g1, %g4, %g1
ldx [%g6 + KPMTSBM_KPMPTABLEPA], %g4
sllx %g5, KPMSHLK_SHIFT, %g5
add %g4, %g5, %g3
sethi %hi(TTE_VALID_INT), %g5
sllx %g5, 32, %g5
mov (TTE_CP_INT|TTE_PRIV_INT|TTE_HWWR_INT), %g4
or %g5, %g4, %g5
sllx %g2, MMU_PAGESHIFT, %g4
or %g5, %g4, %g5
ldx [%g6 + KPMTSBM_TSBPTR], %g4
GET_MMU_D_TTARGET(%g2, %g7)
KPMLOCK_ENTER(%g3, %g7, kpmtsbsmlock, ASI_MEM)
ldsba [%g1 + KPMSPAGE_MAPPED]%asi, %g7
andcc %g7, KPM_MAPPED_GO, %g0
bz,pt %icc, 5f
nop
and %g7, KPM_MAPPED_MASK, %g7
cmp %g7, KPM_MAPPEDS
be,a,pn %xcc, 3f
or %g5, TTE_CV_INT, %g5
3:
#ifndef sun4v
ldub [%g6 + KPMTSBM_FLAGS], %g7
mov ASI_N, %g1
andcc %g7, KPMTSBM_TSBPHYS_FLAG, %g0
movnz %icc, ASI_MEM, %g1
mov %g1, %asi
#endif
TSB_LOCK_ENTRY(%g4, %g1, %g7, locked_tsb_l2)
TSB_INSERT_UNLOCK_ENTRY(%g4, %g5, %g2, %g7)
locked_tsb_l2:
DTLB_STUFF(%g5, %g2, %g4, %g5, %g6)
KPMLOCK_EXIT(%g3, ASI_MEM)
rdpr %tl, %g7
cmp %g7, 1
ble %icc, 0f
sethi %hi(KERNELBASE), %g6
rdpr %tpc, %g7
or %g6, %lo(KERNELBASE), %g6
cmp %g7, %g6
bgeu %xcc, 0f
ALTENTRY(tsbmiss_trapstat_patch_point_kpm_small)
add %g7, RUNTIME_PATCH, %g7
wrpr %g7, %tpc
add %g7, 4, %g7
wrpr %g7, %tnpc
0:
retry
5:
KPMLOCK_EXIT(%g3, ASI_MEM)
ba,pt %icc, sfmmu_kpm_exception
nop
SET_SIZE(sfmmu_kpm_dtsb_miss_small)
#if (1<< KPMTSBM_SHIFT) != KPMTSBM_SIZE
#error - KPMTSBM_SHIFT does not correspond to size of kpmtsbm struct
#endif
.seg ".data"
sfmmu_kpm_tsbmtl_panic:
.ascii "sfmmu_kpm_tsbmtl: interrupts disabled"
.byte 0
sfmmu_kpm_stsbmtl_panic:
.ascii "sfmmu_kpm_stsbmtl: interrupts disabled"
.byte 0
.align 4
.seg ".text"
ENTRY_NP(sfmmu_kpm_tsbmtl)
rdpr %pstate, %o3
#ifdef DEBUG
andcc %o3, PSTATE_IE, %g0
bnz,pt %icc, 1f
nop
save %sp, -SA(MINFRAME), %sp
sethi %hi(sfmmu_kpm_tsbmtl_panic), %o0
call panic
or %o0, %lo(sfmmu_kpm_tsbmtl_panic), %o0
ret
restore
1:
#endif
wrpr %o3, PSTATE_IE, %pstate
KPMLOCK_ENTER(%o1, %o4, kpmtsbmtl1, ASI_N)
mov -1, %o5
brz,a %o2, 2f
mov 0, %o5
2:
sth %o5, [%o0]
KPMLOCK_EXIT(%o1, ASI_N)
retl
wrpr %g0, %o3, %pstate
SET_SIZE(sfmmu_kpm_tsbmtl)
ENTRY_NP(sfmmu_kpm_stsbmtl)
rdpr %pstate, %o3
#ifdef DEBUG
andcc %o3, PSTATE_IE, %g0
bnz,pt %icc, 1f
nop
save %sp, -SA(MINFRAME), %sp
sethi %hi(sfmmu_kpm_stsbmtl_panic), %o0
call panic
or %o0, %lo(sfmmu_kpm_stsbmtl_panic), %o0
ret
restore
1:
#endif
wrpr %o3, PSTATE_IE, %pstate
KPMLOCK_ENTER(%o1, %o4, kpmstsbmtl1, ASI_N)
ldsb [%o0], %o5
stb %o2, [%o0]
KPMLOCK_EXIT(%o1, ASI_N)
and %o5, KPM_MAPPED_MASK, %o0
retl
wrpr %g0, %o3, %pstate
SET_SIZE(sfmmu_kpm_stsbmtl)
#ifdef sun4v
ALTENTRY(sfmmu_slow_dmmu_miss)
GET_MMU_D_PTAGACC_CTXTYPE(%g2, %g3) ! %g2 = ptagacc, %g3 = ctx type
slow_miss_common:
brnz,pt %g3, 8f ! check for user context
nop
mov %g2, %g7 ! TSB pointer macro clobbers tagacc
sfmmu_dslow_patch_ktsb_base:
RUNTIME_PATCH_SETX(%g1, %g6) ! %g1 = contents of ktsb_pbase
sfmmu_dslow_patch_ktsb_szcode:
or %g0, RUNTIME_PATCH, %g3 ! ktsb_szcode (hot patched)
GET_TSBE_POINTER(MMU_PAGESHIFT, %g1, %g7, %g3, %g5)
! %g1 = First TSB entry pointer, as TSB miss handler expects
mov %g2, %g7 ! TSB pointer macro clobbers tagacc
sfmmu_dslow_patch_ktsb4m_base:
RUNTIME_PATCH_SETX(%g3, %g6) ! %g3 = contents of ktsb4m_pbase
sfmmu_dslow_patch_ktsb4m_szcode:
or %g0, RUNTIME_PATCH, %g6 ! ktsb4m_szcode (hot patched)
GET_TSBE_POINTER(MMU_PAGESHIFT4M, %g3, %g7, %g6, %g5)
! %g3 = 4M tsb entry pointer, as TSB miss handler expects
ba,a,pt %xcc, sfmmu_tsb_miss_tt
.empty
8:
GET_1ST_TSBE_PTR(%g2, %g1, %g4, %g5)
GET_UTSBREG(SCRATCHPAD_UTSBREG2, %g3)
brlz,pt %g3, sfmmu_tsb_miss_tt
nop
GET_2ND_TSBE_PTR(%g2, %g3, %g4, %g5)
9:
ba,a,pt %xcc, sfmmu_tsb_miss_tt
.empty
SET_SIZE(sfmmu_slow_dmmu_miss)
ALTENTRY(sfmmu_slow_immu_miss)
GET_MMU_I_PTAGACC_CTXTYPE(%g2, %g3)
ba,a,pt %xcc, slow_miss_common
SET_SIZE(sfmmu_slow_immu_miss)
#endif
.seg ".data"
.align 64
.global tsbmiss_area
tsbmiss_area:
.skip (TSBMISS_SIZE * NCPU)
.align 64
.global kpmtsbm_area
kpmtsbm_area:
.skip (KPMTSBM_SIZE * NCPU)