#include "dapl.h"
#include "dapl_tavor_hw.h"
#include "dapl_tavor_wr.h"
#include "dapl_tavor_ibtf_impl.h"
#define HERMON_WQE_SGL_INVALID_LKEY 0x00000100
#define HERMON_WQE_SEND_FENCE_MASK 0x40
#define HERMON_WQE_NDS_MASK 0x3F
#define HERMON_CQDB_NOTIFY_CQ_SOLICIT (0x1 << 24)
#define HERMON_CQDB_NOTIFY_CQ (0x2 << 24)
#define HERMON_CQE_RCV_SEND 0x1
#define HERMON_CQE_ERR_OPCODE 0x1E
#define HERMON_CQE_RESIZE_OPCODE 0x16
#define HERMON_CQE_OPCODE_GET(cqe) (((uint8_t *)cqe)[31] & 0x1F)
#define HERMON_CQE_SENDRECV_GET(cqe) (((uint8_t *)cqe)[31] & 0x40)
#define HERMON_CQE_OWNER_IS_SW(cq, cqe) ((((uint8_t *)cqe)[31] >> 7) == \
((cq->cq_consindx & cq->cq_size) >> cq->cq_log_cqsz))
#define HERMON_QP_WQEADDRSZ(wcnt) ((uint32_t)(wcnt << 6))
#define HERMON_WQE_SEND_SIGNALED_MASK 0x0000000C00000000ull
#define HERMON_WQE_SEND_SOLICIT_MASK 0x0000000200000000ull
#define HERMON_WQE_SETCTRL(desc, ctrl) \
((uint64_t *)(desc))[1] = HTOBE_64(ctrl)
#define HERMON_WQE_SETNEXT(desc, nopcode, size, fence) \
((uint64_t *)(desc))[0] = HTOBE_64((nopcode) | (size) | (fence) | \
(((uint64_t)((uint8_t *)desc)[0] &0x80) << 56))
#define HERMON_WQE_BUILD_DATA_SEG(ds, sgl) \
{ \
uint64_t *tmp; \
\
tmp = (uint64_t *)(ds); \
tmp[1] = HTOBE_64((sgl)->ds_va); \
((uint32_t *)tmp)[1] = HTOBE_32((sgl)->ds_key); \
membar_producer(); \
((uint32_t *)tmp)[0] = HTOBE_32((sgl)->ds_len); \
}
#define cq_wrap_around_mask (cq->cq_size - 1)
pthread_spinlock_t hermon_bf_lock;
extern uint64_t dapls_tavor_wrid_get_entry(ib_cq_handle_t, tavor_hw_cqe_t *,
uint_t, uint_t, dapls_tavor_wrid_entry_t *);
extern void dapls_tavor_wrid_cq_reap(ib_cq_handle_t);
extern DAPL_OS_LOCK g_tavor_uar_lock;
#ifndef _LP64
extern void dapls_atomic_assign_64(uint64_t, uint64_t *);
#endif
static int dapli_hermon_wqe_send_build(ib_qp_handle_t, ibt_send_wr_t *,
uint64_t *, uint_t *);
static DAT_RETURN dapli_hermon_wqe_recv_build(ib_qp_handle_t, ibt_recv_wr_t *,
uint64_t *, uint_t *);
static int dapli_hermon_cq_cqe_consume(ib_cq_handle_t, uint32_t *, ibt_wc_t *);
static int dapli_hermon_cq_errcqe_consume(ib_cq_handle_t, uint32_t *,
ibt_wc_t *);
extern void dapli_tavor_wrid_add_entry(dapls_tavor_workq_hdr_t *, uint64_t,
uint32_t, uint_t);
extern void dapli_tavor_wrid_add_entry_srq(ib_srq_handle_t, uint64_t, uint32_t);
static void
dapli_hermon_cq_doorbell(dapls_hw_uar_t ia_uar, uint32_t cq_cmd, uint32_t cqn,
uint32_t cmd_sn, uint32_t cq_param)
{
uint64_t doorbell;
doorbell = (cmd_sn | cq_cmd | cqn);
doorbell = (doorbell << 32) | cq_param;
#ifdef _LP64
((tavor_hw_uar_t *)ia_uar)->cq = HTOBE_64(doorbell);
#elif defined(i386)
dapl_os_lock(&g_tavor_uar_lock);
((tavor_hw_uar32_t *)ia_uar)->cq[0] =
(uint32_t)HTOBE_32(doorbell >> 32);
((tavor_hw_uar32_t *)ia_uar)->cq[1] =
(uint32_t)HTOBE_32(doorbell & 0x00000000ffffffff);
dapl_os_unlock(&g_tavor_uar_lock);
#else
dapls_atomic_assign_64(HTOBE_64(doorbell),
&((tavor_hw_uar_t *)ia_uar)->cq);
#endif
}
static void
dapli_hermon_sq_dbreg(dapls_hw_uar_t ia_uar, uint32_t qpn)
{
uint64_t doorbell;
doorbell = qpn << 8;
#ifdef _LP64
((tavor_hw_uar_t *)ia_uar)->send = HTOBE_64(doorbell);
#else
#if defined(i386)
dapl_os_lock(&g_tavor_uar_lock);
((tavor_hw_uar32_t *)ia_uar)->send[0] =
(uint32_t)HTOBE_32(doorbell >> 32);
((tavor_hw_uar32_t *)ia_uar)->send[1] =
(uint32_t)HTOBE_32(doorbell & 0x00000000ffffffff);
dapl_os_unlock(&g_tavor_uar_lock);
#else
dapls_atomic_assign_64(HTOBE_64(doorbell),
&((tavor_hw_uar_t *)ia_uar)->send);
#endif
#endif
}
static int
dapli_hermon_wqe_send_build(ib_qp_handle_t qp, ibt_send_wr_t *wr,
uint64_t *addr, uint_t *size)
{
tavor_hw_snd_wqe_remaddr_t *rc;
tavor_hw_snd_wqe_bind_t *bn;
tavor_hw_wqe_sgl_t *ds;
ibt_wr_ds_t *sgl;
uint8_t *src, *dst, *maxdst;
uint32_t nds;
int len, thislen, maxlen;
uint32_t new_rkey;
uint32_t old_rkey;
int i, num_ds;
int max_inline_bytes = -1;
uint64_t ctrl;
uint64_t nopcode;
uint_t my_size;
nds = wr->wr_nds;
sgl = wr->wr_sgl;
num_ds = 0;
ctrl = ((wr->wr_flags & IBT_WR_SEND_SIGNAL) ?
HERMON_WQE_SEND_SIGNALED_MASK : 0) |
((wr->wr_flags & IBT_WR_SEND_SOLICIT) ?
HERMON_WQE_SEND_SOLICIT_MASK : 0);
switch (wr->wr_opcode) {
case IBT_WRC_SEND:
ds = (tavor_hw_wqe_sgl_t *)((uintptr_t)addr +
sizeof (tavor_hw_snd_wqe_nextctrl_t));
if (qp->qp_sq_inline != 0)
max_inline_bytes =
qp->qp_sq_wqesz - TAVOR_INLINE_HEADER_SIZE_SEND;
nopcode = TAVOR_WQE_SEND_NOPCODE_SEND;
break;
case IBT_WRC_RDMAW:
if (qp->qp_sq_inline != 0)
max_inline_bytes =
qp->qp_sq_wqesz - TAVOR_INLINE_HEADER_SIZE_RDMAW;
nopcode = TAVOR_WQE_SEND_NOPCODE_RDMAW;
case IBT_WRC_RDMAR:
if (wr->wr_opcode == IBT_WRC_RDMAR) {
if (qp->qp_sq_inline < 0)
qp->qp_sq_inline = 0;
nopcode = TAVOR_WQE_SEND_NOPCODE_RDMAR;
}
rc = (tavor_hw_snd_wqe_remaddr_t *)((uintptr_t)addr +
sizeof (tavor_hw_snd_wqe_nextctrl_t));
TAVOR_WQE_BUILD_REMADDR(rc, &wr->wr.rc.rcwr.rdma);
ds = (tavor_hw_wqe_sgl_t *)((uintptr_t)rc +
sizeof (tavor_hw_snd_wqe_remaddr_t));
break;
case IBT_WRC_BIND:
#if 0
#endif
old_rkey = wr->wr.rc.rcwr.bind->bind_rkey;
new_rkey = old_rkey >> 8;
old_rkey = (old_rkey + 1) & 0xff;
new_rkey = (new_rkey << 8) | old_rkey;
wr->wr.rc.rcwr.bind->bind_rkey_out = new_rkey;
bn = (tavor_hw_snd_wqe_bind_t *)((uintptr_t)addr +
sizeof (tavor_hw_snd_wqe_nextctrl_t));
TAVOR_WQE_BUILD_BIND(bn, wr->wr.rc.rcwr.bind);
ds = (tavor_hw_wqe_sgl_t *)((uintptr_t)bn +
sizeof (tavor_hw_snd_wqe_bind_t));
nds = 0;
nopcode = TAVOR_WQE_SEND_NOPCODE_BIND;
break;
default:
dapl_dbg_log(DAPL_DBG_TYPE_ERR,
"dapli_hermon_wqe_send_build: invalid wr_opcode=%d\n",
wr->wr_opcode);
return (DAT_INTERNAL_ERROR);
}
if (nds > qp->qp_sq_sgl) {
return (DAT_INVALID_PARAMETER);
}
if (max_inline_bytes != -1) {
len = 0;
for (i = 0; i < nds; i++)
len += sgl[i].ds_len;
if (len == 0)
max_inline_bytes = -1;
else {
max_inline_bytes -= (len / 64) * sizeof (uint32_t);
if (len > max_inline_bytes)
max_inline_bytes = -1;
}
}
if (max_inline_bytes != -1) {
dst = (uint8_t *)((uint32_t *)ds + 1);
maxdst = (uint8_t *)(((uintptr_t)dst + 64) & ~(64 - 1));
maxlen = maxdst - dst;
thislen = 0;
i = 0;
src = (uint8_t *)(uintptr_t)sgl[i].ds_va;
len = sgl[i].ds_len;
do {
if (len > maxlen) {
if (maxlen)
(void) dapl_os_memcpy(dst,
src, maxlen);
membar_producer();
*(uint32_t *)ds =
HTOBE_32((thislen + maxlen) |
TAVOR_WQE_SGL_INLINE_MASK);
thislen = 0;
len -= maxlen;
src += maxlen;
dst = maxdst + sizeof (uint32_t);
ds = (tavor_hw_wqe_sgl_t *)(void *)maxdst;
maxdst += 64;
maxlen = 64 - sizeof (uint32_t);
} else {
(void) dapl_os_memcpy(dst,
src, len);
maxlen -= len;
thislen += len;
dst += len;
while (++i < nds)
if (sgl[i].ds_len)
break;
if (i >= nds)
break;
src = (uint8_t *)(uintptr_t)sgl[i].ds_va;
len = sgl[i].ds_len;
}
} while (i < nds);
membar_producer();
*(uint32_t *)ds = HTOBE_32(thislen |
TAVOR_WQE_SGL_INLINE_MASK);
my_size = ((uintptr_t)dst - (uintptr_t)addr + 15) >> 4;
if (my_size <= (256 >> 4))
*size = my_size;
else
*size = 0;
} else {
for (i = 0; i < nds; i++) {
if (sgl[i].ds_len == 0) {
continue;
}
HERMON_WQE_BUILD_DATA_SEG(&ds[num_ds], &sgl[i]);
num_ds++;
}
my_size = ((uintptr_t)&ds[num_ds] - (uintptr_t)addr) >> 4;
*size = 0;
}
HERMON_WQE_SETCTRL(addr, ctrl);
membar_producer();
HERMON_WQE_SETNEXT(addr, nopcode << 32, my_size,
(wr->wr_flags & IBT_WR_SEND_FENCE) ?
HERMON_WQE_SEND_FENCE_MASK : 0);
return (DAT_SUCCESS);
}
static DAT_RETURN
dapli_hermon_wqe_recv_build(ib_qp_handle_t qp, ibt_recv_wr_t *wr,
uint64_t *addr, uint_t *size)
{
tavor_hw_wqe_sgl_t *ds;
int i;
int num_ds;
ds = (tavor_hw_wqe_sgl_t *)addr;
num_ds = 0;
if (wr->wr_nds > qp->qp_rq_sgl) {
return (DAT_INVALID_PARAMETER);
}
for (i = 0; i < wr->wr_nds; i++) {
if (wr->wr_sgl[i].ds_len == 0) {
continue;
}
TAVOR_WQE_BUILD_DATA_SEG(&ds[num_ds], &wr->wr_sgl[i]);
num_ds++;
}
if (i < qp->qp_rq_sgl) {
ibt_wr_ds_t sgl;
sgl.ds_va = (ib_vaddr_t)0;
sgl.ds_len = (ib_msglen_t)0;
sgl.ds_key = (ibt_lkey_t)HERMON_WQE_SGL_INVALID_LKEY;
TAVOR_WQE_BUILD_DATA_SEG(&ds[num_ds], &sgl);
}
*size = qp->qp_rq_wqesz >> 4;
return (DAT_SUCCESS);
}
static DAT_RETURN
dapli_hermon_wqe_srq_build(ib_srq_handle_t srq, ibt_recv_wr_t *wr,
uint64_t *addr)
{
tavor_hw_wqe_sgl_t *ds;
ibt_wr_ds_t end_sgl;
int i;
int num_ds;
ds = (tavor_hw_wqe_sgl_t *)((uintptr_t)addr +
sizeof (tavor_hw_rcv_wqe_nextctrl_t));
num_ds = 0;
if (wr->wr_nds > srq->srq_wq_sgl) {
return (DAT_INVALID_PARAMETER);
}
for (i = 0; i < wr->wr_nds; i++) {
if (wr->wr_sgl[i].ds_len == 0) {
continue;
}
TAVOR_WQE_BUILD_DATA_SEG(&ds[num_ds], &wr->wr_sgl[i]);
num_ds++;
}
if (num_ds < srq->srq_wq_sgl) {
end_sgl.ds_va = (ib_vaddr_t)0;
end_sgl.ds_len = (ib_msglen_t)0;
end_sgl.ds_key = (ibt_lkey_t)HERMON_WQE_SGL_INVALID_LKEY;
TAVOR_WQE_BUILD_DATA_SEG(&ds[num_ds], &end_sgl);
}
return (DAT_SUCCESS);
}
static void
dapli_hermon_cq_peek(ib_cq_handle_t cq, int *num_cqe)
{
uint32_t *cqe;
uint32_t imm_eth_pkey_cred;
uint32_t cons_indx;
int polled_cnt;
uint_t doorbell_cnt;
uint_t opcode;
cons_indx = cq->cq_consindx & cq_wrap_around_mask;
cqe = (uint32_t *)&cq->cq_addr[cons_indx];
polled_cnt = 0;
while (HERMON_CQE_OWNER_IS_SW(cq, cqe)) {
opcode = HERMON_CQE_OPCODE_GET(cqe);
if (opcode == HERMON_CQE_ERR_OPCODE) {
imm_eth_pkey_cred =
TAVOR_CQE_IMM_ETH_PKEY_CRED_GET(cqe);
doorbell_cnt =
imm_eth_pkey_cred & TAVOR_CQE_ERR_DBDCNT_MASK;
polled_cnt += (doorbell_cnt + 1);
} else {
polled_cnt++;
}
cons_indx = (cons_indx + 1) & cq_wrap_around_mask;
cqe = (uint32_t *)&cq->cq_addr[cons_indx];
}
*num_cqe = polled_cnt;
}
#define dapli_hermon_cq_update_ci(cq, dbp) \
(dbp)[0] = HTOBE_32(cq->cq_consindx & 0xFFFFFF)
static int
dapli_hermon_cq_resize_helper(ib_cq_handle_t cq)
{
int i;
if ((cq->cq_resize_addr == 0) ||
(munmap((char *)cq->cq_addr, cq->cq_map_len) < 0)) {
dapl_dbg_log(DAPL_DBG_TYPE_ERR, "cq_resize_helper: "
"munmap(%p:0x%llx) failed(%d)\n", cq->cq_addr,
cq->cq_map_len, errno);
return (1);
}
cq->cq_addr = cq->cq_resize_addr;
cq->cq_map_offset = cq->cq_resize_map_offset;
cq->cq_map_len = cq->cq_resize_map_len;
cq->cq_size = cq->cq_resize_size;
cq->cq_cqesz = cq->cq_resize_cqesz;
cq->cq_resize_addr = 0;
cq->cq_resize_map_offset = 0;
cq->cq_resize_map_len = 0;
cq->cq_resize_size = 0;
cq->cq_resize_cqesz = 0;
for (i = 0; (1 << i) < cq->cq_size; i++)
;
cq->cq_log_cqsz = i;
cq->cq_consindx++;
return (0);
}
static DAT_RETURN
dapli_hermon_cq_poll(ib_cq_handle_t cq, ibt_wc_t *wc_p, uint_t num_wc,
uint_t *num_polled)
{
uint32_t *cqe;
uint32_t cons_indx;
uint32_t polled_cnt;
DAT_RETURN dat_status;
int status;
cons_indx = cq->cq_consindx & cq_wrap_around_mask;
cqe = (uint32_t *)&cq->cq_addr[cons_indx];
polled_cnt = 0;
while (HERMON_CQE_OWNER_IS_SW(cq, cqe)) {
if (HERMON_CQE_OPCODE_GET(cqe) == HERMON_CQE_RESIZE_OPCODE) {
if (dapli_hermon_cq_resize_helper(cq))
return (DAT_ERROR(DAT_INTERNAL_ERROR, 0));
cons_indx = cq->cq_consindx & cq_wrap_around_mask;
cqe = (uint32_t *)&cq->cq_addr[cons_indx];
continue;
}
status = dapli_hermon_cq_cqe_consume(cq, cqe,
&wc_p[polled_cnt++]);
if (status == TAVOR_CQ_SYNC_AND_DB) {
cq->cq_consindx++;
cons_indx = (cons_indx + 1) & cq_wrap_around_mask;
cqe = (uint32_t *)&cq->cq_addr[cons_indx];
}
if (polled_cnt >= num_wc) {
break;
}
}
dat_status = DAT_SUCCESS;
if (polled_cnt != 0) {
dapli_hermon_cq_update_ci(cq, cq->cq_poll_dbp);
} else if (polled_cnt == 0) {
if (cq->cq_wrid_reap_head)
dapls_tavor_wrid_cq_reap(cq);
dat_status = DAT_ERROR(DAT_QUEUE_EMPTY, 0);
}
if (num_polled != NULL) {
*num_polled = polled_cnt;
}
return (dat_status);
}
static DAT_RETURN
dapli_hermon_cq_poll_one(ib_cq_handle_t cq, ibt_wc_t *wc_p)
{
uint32_t *cqe;
uint32_t cons_indx;
DAT_RETURN dat_status;
int status;
start_over:
cons_indx = cq->cq_consindx & cq_wrap_around_mask;
cqe = (uint32_t *)&cq->cq_addr[cons_indx];
if (HERMON_CQE_OWNER_IS_SW(cq, cqe)) {
if (HERMON_CQE_OPCODE_GET(cqe) == HERMON_CQE_RESIZE_OPCODE) {
if (dapli_hermon_cq_resize_helper(cq))
return (DAT_ERROR(DAT_INTERNAL_ERROR, 0));
goto start_over;
}
status = dapli_hermon_cq_cqe_consume(cq, cqe, wc_p);
if (status == TAVOR_CQ_SYNC_AND_DB) {
cq->cq_consindx++;
dapli_hermon_cq_update_ci(cq, cq->cq_poll_dbp);
}
dat_status = DAT_SUCCESS;
} else {
if (cq->cq_wrid_reap_head)
dapls_tavor_wrid_cq_reap(cq);
dat_status = DAT_ERROR(DAT_QUEUE_EMPTY, 0);
}
return (dat_status);
}
static int
dapli_hermon_cq_cqe_consume(ib_cq_handle_t cqhdl, uint32_t *cqe,
ibt_wc_t *wc)
{
uint_t flags;
uint_t type;
uint_t opcode;
int status;
opcode = HERMON_CQE_OPCODE_GET(cqe);
if (opcode == HERMON_CQE_ERR_OPCODE) {
status = dapli_hermon_cq_errcqe_consume(cqhdl, cqe, wc);
return (status);
}
TAVOR_CQE_WQEADDRSZ_SET(cqe, (HTOBE_32(cqe[6]) >> 10) &
~HERMON_WQE_NDS_MASK);
wc->wc_id = dapls_tavor_wrid_get_entry(cqhdl, (tavor_hw_cqe_t *)cqe,
HERMON_CQE_SENDRECV_GET(cqe) >> 6, 0, NULL);
wc->wc_qpn = TAVOR_CQE_QPNUM_GET(cqe);
flags = IBT_WC_NO_FLAGS;
if (HERMON_CQE_SENDRECV_GET(cqe) != TAVOR_COMPLETION_RECV) {
switch (opcode) {
case TAVOR_CQE_SND_RDMAWR:
type = IBT_WRC_RDMAW;
break;
case TAVOR_CQE_SND_SEND:
type = IBT_WRC_SEND;
break;
case TAVOR_CQE_SND_RDMARD:
type = IBT_WRC_RDMAR;
wc->wc_bytes_xfer = TAVOR_CQE_BYTECNT_GET(cqe);
break;
case TAVOR_CQE_SND_BIND_MW:
type = IBT_WRC_BIND;
break;
default:
wc->wc_status = IBT_WC_LOCAL_CHAN_OP_ERR;
return (TAVOR_CQ_SYNC_AND_DB);
}
} else {
switch (opcode) {
case HERMON_CQE_RCV_SEND:
type = IBT_WRC_RECV;
wc->wc_bytes_xfer = TAVOR_CQE_BYTECNT_GET(cqe);
break;
default:
wc->wc_status = IBT_WC_LOCAL_CHAN_OP_ERR;
return (TAVOR_CQ_SYNC_AND_DB);
}
}
wc->wc_type = type;
wc->wc_flags = flags;
wc->wc_status = IBT_WC_SUCCESS;
return (TAVOR_CQ_SYNC_AND_DB);
}
static int
dapli_hermon_cq_errcqe_consume(ib_cq_handle_t cqhdl, uint32_t *cqe,
ibt_wc_t *wc)
{
dapls_tavor_wrid_entry_t wre;
uint_t status;
uint_t send_or_recv;
dapl_dbg_log(DAPL_DBG_TYPE_EVD, "errcqe_consume:cqe.eth=%x, wqe=%x\n",
TAVOR_CQE_IMM_ETH_PKEY_CRED_GET(cqe),
TAVOR_CQE_WQEADDRSZ_GET(cqe));
status = ((uint8_t *)cqe)[0x1B];
TAVOR_CQE_WQEADDRSZ_SET(cqe, (HTOBE_32(cqe[6]) >> 10) &
~HERMON_WQE_NDS_MASK);
if (HERMON_CQE_SENDRECV_GET(cqe) == 0) {
send_or_recv = 0;
} else {
send_or_recv = 1;
}
wc->wc_id = dapls_tavor_wrid_get_entry(cqhdl, (tavor_hw_cqe_t *)cqe,
send_or_recv, 1, &wre);
wc->wc_qpn = TAVOR_CQE_QPNUM_GET(cqe);
switch (status) {
case TAVOR_CQE_LOC_LEN_ERR:
status = IBT_WC_LOCAL_LEN_ERR;
break;
case TAVOR_CQE_LOC_OP_ERR:
status = IBT_WC_LOCAL_CHAN_OP_ERR;
break;
case TAVOR_CQE_LOC_PROT_ERR:
status = IBT_WC_LOCAL_PROTECT_ERR;
break;
case TAVOR_CQE_WR_FLUSHED_ERR:
status = IBT_WC_WR_FLUSHED_ERR;
break;
case TAVOR_CQE_MW_BIND_ERR:
status = IBT_WC_MEM_WIN_BIND_ERR;
break;
case TAVOR_CQE_BAD_RESPONSE_ERR:
status = IBT_WC_BAD_RESPONSE_ERR;
break;
case TAVOR_CQE_LOCAL_ACCESS_ERR:
status = IBT_WC_LOCAL_ACCESS_ERR;
break;
case TAVOR_CQE_REM_INV_REQ_ERR:
status = IBT_WC_REMOTE_INVALID_REQ_ERR;
break;
case TAVOR_CQE_REM_ACC_ERR:
status = IBT_WC_REMOTE_ACCESS_ERR;
break;
case TAVOR_CQE_REM_OP_ERR:
status = IBT_WC_REMOTE_OP_ERR;
break;
case TAVOR_CQE_TRANS_TO_ERR:
status = IBT_WC_TRANS_TIMEOUT_ERR;
break;
case TAVOR_CQE_RNRNAK_TO_ERR:
status = IBT_WC_RNR_NAK_TIMEOUT_ERR;
break;
default:
status = IBT_WC_LOCAL_CHAN_OP_ERR;
break;
}
wc->wc_status = status;
wc->wc_type = 0;
return (TAVOR_CQ_SYNC_AND_DB);
}
static DAT_RETURN
dapli_hermon_cq_notify(ib_cq_handle_t cq, int flags, uint32_t param)
{
uint32_t cqnum;
uint32_t *target;
uint32_t old_cmd, cmp, new, tmp, cmd_sn;
cqnum = cq->cq_num;
target = cq->cq_arm_dbp;
retry:
cmp = *target;
tmp = HTOBE_32(cmp);
old_cmd = tmp & (0x7 << 24);
cmd_sn = tmp & (0x3 << 28);
if (flags == IB_NOTIFY_ON_NEXT_COMP) {
if (old_cmd != HERMON_CQDB_NOTIFY_CQ) {
new = HTOBE_32(cmd_sn | HERMON_CQDB_NOTIFY_CQ |
(cq->cq_consindx & 0xFFFFFF));
tmp = atomic_cas_32(target, cmp, new);
if (tmp != cmp)
goto retry;
dapli_hermon_cq_doorbell(cq->cq_iauar,
HERMON_CQDB_NOTIFY_CQ, cqnum,
cmd_sn, cq->cq_consindx);
}
} else if (flags == IB_NOTIFY_ON_NEXT_SOLICITED) {
if (old_cmd != HERMON_CQDB_NOTIFY_CQ &&
old_cmd != HERMON_CQDB_NOTIFY_CQ_SOLICIT) {
new = HTOBE_32(cmd_sn | HERMON_CQDB_NOTIFY_CQ_SOLICIT |
(cq->cq_consindx & 0xFFFFFF));
tmp = atomic_cas_32(target, cmp, new);
if (tmp != cmp)
goto retry;
dapli_hermon_cq_doorbell(cq->cq_iauar,
HERMON_CQDB_NOTIFY_CQ_SOLICIT, cqnum,
cmd_sn, cq->cq_consindx);
}
} else {
return (DAT_INVALID_PARAMETER);
}
return (DAT_SUCCESS);
}
static void
dapli_hermon_wqe_headroom(ib_qp_handle_t qp, uint32_t start)
{
uint32_t *wqe_start, *wqe_top, *wqe_base, qsize, invalue;
int hdrmwqes, wqesizebytes, sectperwqe, i, j;
qsize = qp->qp_sq_numwqe;
wqesizebytes = qp->qp_sq_wqesz;
sectperwqe = wqesizebytes >> 6;
hdrmwqes = qp->qp_sq_headroom;
wqe_base = (uint32_t *)TAVOR_QP_SQ_ENTRY(qp, 0);
wqe_top = (uint32_t *)TAVOR_QP_SQ_ENTRY(qp, qsize);
wqe_start = (uint32_t *)TAVOR_QP_SQ_ENTRY(qp, start);
for (i = 0; i < hdrmwqes - 1; i++) {
wqe_start += sectperwqe * 16;
if (wqe_start == wqe_top)
wqe_start = wqe_base;
}
invalue = HTOBE_32(*wqe_start);
invalue |= 0x7FFFFFFF;
*wqe_start = HTOBE_32(invalue);
wqe_start += 16;
for (j = 1; j < sectperwqe; j++) {
*wqe_start = 0xFFFFFFFF;
wqe_start += 16;
}
}
static DAT_RETURN
dapli_hermon_post_send(DAPL_EP *ep, ibt_send_wr_t *wr, boolean_t ns)
{
dapls_tavor_wrid_list_hdr_t *wridlist;
dapls_tavor_wrid_entry_t *wre_last;
uint64_t *desc;
uint64_t *wqe_addr;
uint32_t desc_sz;
uint32_t wqeaddrsz, signaled_dbd;
uint32_t head, tail, next_tail, qsize_msk;
int status;
ib_qp_handle_t qp;
if ((ep->qp_state == IBT_STATE_RESET) ||
(ep->qp_state == IBT_STATE_INIT) ||
(ep->qp_state == IBT_STATE_RTR)) {
dapl_dbg_log(DAPL_DBG_TYPE_ERR,
"post_send: invalid qp_state %d\n", ep->qp_state);
return (DAT_INVALID_STATE);
}
qp = ep->qp_handle;
dapl_os_lock(&qp->qp_sq_wqhdr->wq_wrid_lock->wrl_lock);
wridlist = qp->qp_sq_wqhdr->wq_wrid_post;
qsize_msk = qp->qp_sq_wqhdr->wq_size - 1;
tail = qp->qp_sq_wqhdr->wq_tail;
head = qp->qp_sq_wqhdr->wq_head;
if (qp->qp_sq_wqhdr->wq_full != 0) {
dapl_os_unlock(&qp->qp_sq_wqhdr->wq_wrid_lock->wrl_lock);
return (DAT_INSUFFICIENT_RESOURCES);
}
next_tail = (tail + 1) & qsize_msk;
if (next_tail == head) {
qp->qp_sq_wqhdr->wq_full = 1;
}
wqe_addr = TAVOR_QP_SQ_ENTRY(qp, tail);
status = dapli_hermon_wqe_send_build(qp, wr, wqe_addr, &desc_sz);
if (status != DAT_SUCCESS) {
dapl_os_unlock(&qp->qp_sq_wqhdr->wq_wrid_lock->wrl_lock);
return (status);
}
desc = TAVOR_QP_SQ_ENTRY(qp, tail);
wqeaddrsz = HERMON_QP_WQEADDRSZ(qp->qp_sq_counter);
if (wr->wr_flags & IBT_WR_SEND_SIGNAL) {
signaled_dbd = TAVOR_WRID_ENTRY_SIGNALED;
}
dapli_tavor_wrid_add_entry(qp->qp_sq_wqhdr, wr->wr_id, wqeaddrsz,
signaled_dbd);
dapli_hermon_wqe_headroom(qp, next_tail);
*(uint8_t *)desc ^= 0x80;
wre_last = wridlist->wl_wre_old_tail;
if (wre_last != NULL) {
wre_last->wr_signaled_dbd |= TAVOR_WRID_ENTRY_DOORBELLED;
}
qp->qp_sq_lastwqeaddr = wqe_addr;
qp->qp_sq_wqhdr->wq_tail = next_tail;
if (desc_sz && qp->qp_ia_bf != NULL) {
uint64_t *bf_dest, *src64;
uint8_t *src8;
int i;
(void) pthread_spin_lock(&hermon_bf_lock);
src8 = (uint8_t *)desc;
src8[1] = (uint8_t)(qp->qp_sq_counter >> 8);
src8[2] = (uint8_t)qp->qp_sq_counter;
src8[4] = (uint8_t)(qp->qp_num >> 16);
src8[5] = (uint8_t)(qp->qp_num >> 8);
src8[6] = (uint8_t)qp->qp_num;
src64 = (uint64_t *)desc;
bf_dest = (uint64_t *)((uintptr_t)qp->qp_ia_bf +
*qp->qp_ia_bf_toggle);
*qp->qp_ia_bf_toggle ^= 256;
for (i = 0; i < desc_sz * 2; i += 8) {
bf_dest[i] = src64[i];
bf_dest[i + 1] = src64[i + 1];
bf_dest[i + 2] = src64[i + 2];
bf_dest[i + 3] = src64[i + 3];
bf_dest[i + 4] = src64[i + 4];
bf_dest[i + 5] = src64[i + 5];
bf_dest[i + 6] = src64[i + 6];
bf_dest[i + 7] = src64[i + 7];
}
(void) pthread_spin_unlock(&hermon_bf_lock);
} else {
dapli_hermon_sq_dbreg(qp->qp_iauar, qp->qp_num);
}
qp->qp_sq_counter++;
dapl_os_unlock(&qp->qp_sq_wqhdr->wq_wrid_lock->wrl_lock);
return (DAT_SUCCESS);
}
static DAT_RETURN
dapli_hermon_post_recv(DAPL_EP *ep, ibt_recv_wr_t *wr, boolean_t ns)
{
dapls_tavor_wrid_list_hdr_t *wridlist;
dapls_tavor_wrid_entry_t *wre_last;
ib_qp_handle_t qp;
DAT_RETURN status;
uint64_t *wqe_addr;
uint32_t desc_sz;
uint32_t wqeaddrsz;
uint32_t head, tail, next_tail, qsize_msk;
if (ep->qp_state == IBT_STATE_RESET) {
dapl_dbg_log(DAPL_DBG_TYPE_ERR,
"post_recv: invalid qp_state %d\n", ep->qp_state);
return (DAT_INVALID_STATE);
}
qp = ep->qp_handle;
dapl_os_lock(&qp->qp_rq_wqhdr->wq_wrid_lock->wrl_lock);
wridlist = qp->qp_rq_wqhdr->wq_wrid_post;
qsize_msk = qp->qp_rq_wqhdr->wq_size - 1;
tail = qp->qp_rq_wqhdr->wq_tail;
head = qp->qp_rq_wqhdr->wq_head;
if (qp->qp_rq_wqhdr->wq_full != 0) {
dapl_os_unlock(&qp->qp_rq_wqhdr->wq_wrid_lock->wrl_lock);
return (DAT_INSUFFICIENT_RESOURCES);
}
next_tail = (tail + 1) & qsize_msk;
if (next_tail == head) {
qp->qp_rq_wqhdr->wq_full = 1;
}
wqe_addr = TAVOR_QP_RQ_ENTRY(qp, tail);
status = dapli_hermon_wqe_recv_build(qp, wr, wqe_addr, &desc_sz);
if (status != DAT_SUCCESS) {
dapl_os_unlock(&qp->qp_rq_wqhdr->wq_wrid_lock->wrl_lock);
return (DAT_INTERNAL_ERROR);
}
wqeaddrsz = HERMON_QP_WQEADDRSZ(qp->qp_rq_counter);
dapli_tavor_wrid_add_entry(qp->qp_rq_wqhdr, wr->wr_id, wqeaddrsz,
(uint32_t)TAVOR_WRID_ENTRY_SIGNALED);
wre_last = wridlist->wl_wre_old_tail;
if (wre_last != NULL) {
wre_last->wr_signaled_dbd |= TAVOR_WRID_ENTRY_DOORBELLED;
}
qp->qp_rq_lastwqeaddr = wqe_addr;
qp->qp_rq_wqhdr->wq_tail = next_tail;
qp->qp_rq_counter++;
(qp->qp_rq_dbp)[0] = HTOBE_32(qp->qp_rq_counter);
dapl_os_unlock(&qp->qp_rq_wqhdr->wq_wrid_lock->wrl_lock);
return (DAT_SUCCESS);
}
static DAT_RETURN
dapli_hermon_post_srq(DAPL_SRQ *srqp, ibt_recv_wr_t *wr, boolean_t ns)
{
ib_srq_handle_t srq;
DAT_RETURN status;
uint32_t desc;
uint64_t *wqe_addr;
uint32_t head, next_head, qsize_msk;
uint32_t wqe_index;
srq = srqp->srq_handle;
dapl_os_lock(&srq->srq_wridlist->wl_lock->wrl_lock);
if (srq->srq_wridlist->wl_freel_entries == 0) {
dapl_os_unlock(&srq->srq_wridlist->wl_lock->wrl_lock);
return (DAT_INSUFFICIENT_RESOURCES);
}
qsize_msk = srq->srq_wridlist->wl_size - 1;
head = srq->srq_wridlist->wl_freel_head;
next_head = (head + 1) & qsize_msk;
desc = srq->srq_wridlist->wl_free_list[head];
wqe_index = TAVOR_SRQ_WQ_INDEX(srq->srq_wq_desc_addr, desc,
srq->srq_wq_wqesz);
wqe_addr = TAVOR_SRQ_WQ_ENTRY(srq, wqe_index);
status = dapli_hermon_wqe_srq_build(srq, wr, wqe_addr);
if (status != DAT_SUCCESS) {
dapl_os_unlock(&srq->srq_wridlist->wl_lock->wrl_lock);
return (status);
}
dapli_tavor_wrid_add_entry_srq(srq, wr->wr_id, wqe_index);
#if 0
if (srq->srq_wq_lastwqeindex == -1) {
last_wqe_addr = NULL;
} else {
last_wqe_addr = TAVOR_SRQ_WQ_ENTRY(srq,
srq->srq_wq_lastwqeindex);
}
XXX
dapli_tavor_wqe_srq_linknext(wqe_addr, ns, desc, last_wqe_addr);
#endif
srq->srq_wq_lastwqeindex = wqe_index;
srq->srq_wridlist->wl_freel_head = next_head;
srq->srq_wridlist->wl_freel_entries--;
dapl_os_assert(srq->srq_wridlist->wl_freel_entries <=
srq->srq_wridlist->wl_size);
srq->srq_counter++;
(srq->srq_dbp)[0] = HTOBE_32(srq->srq_counter);
dapl_os_unlock(&srq->srq_wridlist->wl_lock->wrl_lock);
return (DAT_SUCCESS);
}
static void
dapli_hermon_cq_srq_entries_flush(ib_qp_handle_t qp)
{
ib_cq_handle_t cq;
dapls_tavor_workq_hdr_t *wqhdr;
tavor_hw_cqe_t *cqe;
tavor_hw_cqe_t *next_cqe;
uint32_t cons_indx, tail_cons_indx;
uint32_t new_indx, check_indx, indx;
int cqe_qpnum, cqe_type;
int outstanding_cqes, removed_cqes;
int i;
cq = qp->qp_rq_cqhdl;
wqhdr = qp->qp_rq_wqhdr;
dapl_os_assert(wqhdr->wq_wrid_post != NULL);
dapl_os_assert(wqhdr->wq_wrid_post->wl_srq_en != 0);
cons_indx = cq->cq_consindx;
cqe = &cq->cq_addr[cons_indx];
outstanding_cqes = 0;
tail_cons_indx = cons_indx;
while (TAVOR_CQE_OWNER_IS_SW(cqe)) {
outstanding_cqes++;
tail_cons_indx = (tail_cons_indx + 1) & cq_wrap_around_mask;
cqe = &cq->cq_addr[tail_cons_indx];
}
check_indx = new_indx = (tail_cons_indx - 1) & cq_wrap_around_mask;
for (i = 0; i < outstanding_cqes; i++) {
cqe = &cq->cq_addr[check_indx];
cqe_qpnum = TAVOR_CQE_QPNUM_GET(cqe);
cqe_type = HERMON_CQE_SENDRECV_GET(cqe);
if (cqe_qpnum == qp->qp_num &&
cqe_type == TAVOR_COMPLETION_RECV) {
(void) dapli_tavor_wrid_find_match_srq(
wqhdr->wq_wrid_post, cqe);
} else {
if (check_indx != new_indx) {
next_cqe = &cq->cq_addr[new_indx];
(void) dapl_os_memcpy(next_cqe, cqe,
sizeof (tavor_hw_cqe_t));
}
new_indx = (new_indx - 1) & cq_wrap_around_mask;
}
check_indx = (check_indx - 1) & cq_wrap_around_mask;
}
removed_cqes = 0;
if (check_indx != new_indx) {
for (indx = cons_indx; indx <= new_indx;
indx = (indx + 1) & cq_wrap_around_mask) {
removed_cqes++;
cqe = &cq->cq_addr[indx];
TAVOR_CQE_OWNER_SET_HW(cqe);
}
}
cons_indx = (new_indx + 1) & cq_wrap_around_mask;
if ((removed_cqes != 0) && (cq->cq_consindx != cons_indx)) {
cq->cq_consindx = cons_indx;
dapli_hermon_cq_update_ci(cq, cq->cq_poll_dbp);
}
}
static void
dapli_hermon_rq_prelink(caddr_t first, uint32_t desc_off, uint32_t wqesz,
uint32_t numwqe, uint32_t nds)
{
int i;
uint32_t *p = (uint32_t *)(uintptr_t)first;
uint32_t off = desc_off;
uint32_t pincr = wqesz / sizeof (uint32_t);
ibt_wr_ds_t sgl;
sgl.ds_va = (ib_vaddr_t)0;
sgl.ds_key = HERMON_WQE_SGL_INVALID_LKEY;
sgl.ds_len = (ib_msglen_t)0;
for (i = 0; i < numwqe - 1; i++, p += pincr) {
off += wqesz;
p[0] = HTOBE_32(off);
p[1] = nds;
TAVOR_WQE_BUILD_DATA_SEG((void *)&p[2], &sgl);
}
p[0] = HTOBE_32(desc_off);
p[1] = nds;
TAVOR_WQE_BUILD_DATA_SEG((void *)&p[2], &sgl);
}
static void
dapli_hermon_sq_init(caddr_t first, uint32_t wqesz, uint32_t numwqe)
{
int i, j;
uint64_t *wqe = (uint64_t *)(uintptr_t)first;
for (i = 0; i < numwqe; i++) {
for (j = 0; j < wqesz; j += 64, wqe += 8)
*(uint32_t *)wqe = 0xFFFFFFFF;
}
}
static void
dapli_hermon_qp_init(ib_qp_handle_t qp)
{
dapli_hermon_sq_init(qp->qp_sq_buf, qp->qp_sq_wqesz, qp->qp_sq_numwqe);
qp->qp_rq_counter = 0;
qp->qp_sq_counter = 0;
}
static void
dapli_hermon_cq_init(ib_cq_handle_t cq)
{
uint32_t i;
(cq->cq_arm_dbp)[0] = HTOBE_32(1 << 28);
for (i = 0; (1 << i) < cq->cq_size; i++)
;
cq->cq_log_cqsz = i;
cq->cq_consindx = 0;
}
static void
dapli_hermon_srq_init(ib_srq_handle_t srq)
{
dapli_hermon_rq_prelink(srq->srq_addr, srq->srq_wq_desc_addr,
srq->srq_wq_wqesz, srq->srq_wq_numwqe, 0);
srq->srq_counter = 0;
}
void
dapls_init_funcs_hermon(DAPL_HCA *hca_ptr)
{
hca_ptr->post_send = dapli_hermon_post_send;
hca_ptr->post_recv = dapli_hermon_post_recv;
hca_ptr->post_srq = dapli_hermon_post_srq;
hca_ptr->cq_peek = dapli_hermon_cq_peek;
hca_ptr->cq_poll = dapli_hermon_cq_poll;
hca_ptr->cq_poll_one = dapli_hermon_cq_poll_one;
hca_ptr->cq_notify = dapli_hermon_cq_notify;
hca_ptr->srq_flush = dapli_hermon_cq_srq_entries_flush;
hca_ptr->qp_init = dapli_hermon_qp_init;
hca_ptr->cq_init = dapli_hermon_cq_init;
hca_ptr->srq_init = dapli_hermon_srq_init;
hca_ptr->hermon_resize_cq = 1;
(void) pthread_spin_init(&hermon_bf_lock, 0);
}