forked from Minki/linux
qedr: Add support for data path
Implement fastpath verbs like ib_send_post, ib_post_recv and ib_poll_cq. Signed-off-by: Rajesh Borundia <rajesh.borundia@cavium.com> Signed-off-by: Ram Amrani <Ram.Amrani@cavium.com> Signed-off-by: Doug Ledford <dledford@redhat.com>
This commit is contained in:
parent
e0290cce6a
commit
afa0e13be7
@ -102,7 +102,10 @@ static int qedr_register_device(struct qedr_dev *dev)
|
||||
QEDR_UVERBS(QUERY_QP) |
|
||||
QEDR_UVERBS(DESTROY_QP) |
|
||||
QEDR_UVERBS(REG_MR) |
|
||||
QEDR_UVERBS(DEREG_MR);
|
||||
QEDR_UVERBS(DEREG_MR) |
|
||||
QEDR_UVERBS(POLL_CQ) |
|
||||
QEDR_UVERBS(POST_SEND) |
|
||||
QEDR_UVERBS(POST_RECV);
|
||||
|
||||
dev->ibdev.phys_port_cnt = 1;
|
||||
dev->ibdev.num_comp_vectors = dev->num_cnq;
|
||||
@ -141,6 +144,10 @@ static int qedr_register_device(struct qedr_dev *dev)
|
||||
dev->ibdev.alloc_mr = qedr_alloc_mr;
|
||||
dev->ibdev.map_mr_sg = qedr_map_mr_sg;
|
||||
|
||||
dev->ibdev.poll_cq = qedr_poll_cq;
|
||||
dev->ibdev.post_send = qedr_post_send;
|
||||
dev->ibdev.post_recv = qedr_post_recv;
|
||||
|
||||
dev->ibdev.dma_device = &dev->pdev->dev;
|
||||
|
||||
dev->ibdev.get_link_layer = qedr_link_layer;
|
||||
|
@ -410,6 +410,25 @@ struct qedr_mr {
|
||||
u32 npages;
|
||||
};
|
||||
|
||||
#define SET_FIELD2(value, name, flag) ((value) |= ((flag) << (name ## _SHIFT)))
|
||||
|
||||
#define QEDR_RESP_IMM (RDMA_CQE_RESPONDER_IMM_FLG_MASK << \
|
||||
RDMA_CQE_RESPONDER_IMM_FLG_SHIFT)
|
||||
#define QEDR_RESP_RDMA (RDMA_CQE_RESPONDER_RDMA_FLG_MASK << \
|
||||
RDMA_CQE_RESPONDER_RDMA_FLG_SHIFT)
|
||||
#define QEDR_RESP_RDMA_IMM (QEDR_RESP_IMM | QEDR_RESP_RDMA)
|
||||
|
||||
static inline void qedr_inc_sw_cons(struct qedr_qp_hwq_info *info)
|
||||
{
|
||||
info->cons = (info->cons + 1) % info->max_wr;
|
||||
info->wqe_cons++;
|
||||
}
|
||||
|
||||
static inline void qedr_inc_sw_prod(struct qedr_qp_hwq_info *info)
|
||||
{
|
||||
info->prod = (info->prod + 1) % info->max_wr;
|
||||
}
|
||||
|
||||
static inline int qedr_get_dmac(struct qedr_dev *dev,
|
||||
struct ib_ah_attr *ah_attr, u8 *mac_addr)
|
||||
{
|
||||
|
@ -150,6 +150,12 @@ struct rdma_rq_sge {
|
||||
struct regpair addr;
|
||||
__le32 length;
|
||||
__le32 flags;
|
||||
#define RDMA_RQ_SGE_L_KEY_MASK 0x3FFFFFF
|
||||
#define RDMA_RQ_SGE_L_KEY_SHIFT 0
|
||||
#define RDMA_RQ_SGE_NUM_SGES_MASK 0x7
|
||||
#define RDMA_RQ_SGE_NUM_SGES_SHIFT 26
|
||||
#define RDMA_RQ_SGE_RESERVED0_MASK 0x7
|
||||
#define RDMA_RQ_SGE_RESERVED0_SHIFT 29
|
||||
};
|
||||
|
||||
struct rdma_srq_sge {
|
||||
@ -183,4 +189,560 @@ struct rdma_pwm_val32_data {
|
||||
__le32 value;
|
||||
};
|
||||
|
||||
/* DIF Block size options */
|
||||
enum rdma_dif_block_size {
|
||||
RDMA_DIF_BLOCK_512 = 0,
|
||||
RDMA_DIF_BLOCK_4096 = 1,
|
||||
MAX_RDMA_DIF_BLOCK_SIZE
|
||||
};
|
||||
|
||||
/* DIF CRC initial value */
|
||||
enum rdma_dif_crc_seed {
|
||||
RDMA_DIF_CRC_SEED_0000 = 0,
|
||||
RDMA_DIF_CRC_SEED_FFFF = 1,
|
||||
MAX_RDMA_DIF_CRC_SEED
|
||||
};
|
||||
|
||||
/* RDMA DIF Error Result Structure */
|
||||
struct rdma_dif_error_result {
|
||||
__le32 error_intervals;
|
||||
__le32 dif_error_1st_interval;
|
||||
u8 flags;
|
||||
#define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_CRC_MASK 0x1
|
||||
#define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_CRC_SHIFT 0
|
||||
#define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_APP_TAG_MASK 0x1
|
||||
#define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_APP_TAG_SHIFT 1
|
||||
#define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_REF_TAG_MASK 0x1
|
||||
#define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_REF_TAG_SHIFT 2
|
||||
#define RDMA_DIF_ERROR_RESULT_RESERVED0_MASK 0xF
|
||||
#define RDMA_DIF_ERROR_RESULT_RESERVED0_SHIFT 3
|
||||
#define RDMA_DIF_ERROR_RESULT_TOGGLE_BIT_MASK 0x1
|
||||
#define RDMA_DIF_ERROR_RESULT_TOGGLE_BIT_SHIFT 7
|
||||
u8 reserved1[55];
|
||||
};
|
||||
|
||||
/* DIF IO direction */
|
||||
enum rdma_dif_io_direction_flg {
|
||||
RDMA_DIF_DIR_RX = 0,
|
||||
RDMA_DIF_DIR_TX = 1,
|
||||
MAX_RDMA_DIF_IO_DIRECTION_FLG
|
||||
};
|
||||
|
||||
/* RDMA DIF Runt Result Structure */
|
||||
struct rdma_dif_runt_result {
|
||||
__le16 guard_tag;
|
||||
__le16 reserved[3];
|
||||
};
|
||||
|
||||
/* Memory window type enumeration */
|
||||
enum rdma_mw_type {
|
||||
RDMA_MW_TYPE_1,
|
||||
RDMA_MW_TYPE_2A,
|
||||
MAX_RDMA_MW_TYPE
|
||||
};
|
||||
|
||||
struct rdma_sq_atomic_wqe {
|
||||
__le32 reserved1;
|
||||
__le32 length;
|
||||
__le32 xrc_srq;
|
||||
u8 req_type;
|
||||
u8 flags;
|
||||
#define RDMA_SQ_ATOMIC_WQE_COMP_FLG_MASK 0x1
|
||||
#define RDMA_SQ_ATOMIC_WQE_COMP_FLG_SHIFT 0
|
||||
#define RDMA_SQ_ATOMIC_WQE_RD_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_ATOMIC_WQE_RD_FENCE_FLG_SHIFT 1
|
||||
#define RDMA_SQ_ATOMIC_WQE_INV_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_ATOMIC_WQE_INV_FENCE_FLG_SHIFT 2
|
||||
#define RDMA_SQ_ATOMIC_WQE_SE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_ATOMIC_WQE_SE_FLG_SHIFT 3
|
||||
#define RDMA_SQ_ATOMIC_WQE_INLINE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_ATOMIC_WQE_INLINE_FLG_SHIFT 4
|
||||
#define RDMA_SQ_ATOMIC_WQE_DIF_ON_HOST_FLG_MASK 0x1
|
||||
#define RDMA_SQ_ATOMIC_WQE_DIF_ON_HOST_FLG_SHIFT 5
|
||||
#define RDMA_SQ_ATOMIC_WQE_RESERVED0_MASK 0x3
|
||||
#define RDMA_SQ_ATOMIC_WQE_RESERVED0_SHIFT 6
|
||||
u8 wqe_size;
|
||||
u8 prev_wqe_size;
|
||||
struct regpair remote_va;
|
||||
__le32 r_key;
|
||||
__le32 reserved2;
|
||||
struct regpair cmp_data;
|
||||
struct regpair swap_data;
|
||||
};
|
||||
|
||||
/* First element (16 bytes) of atomic wqe */
|
||||
struct rdma_sq_atomic_wqe_1st {
|
||||
__le32 reserved1;
|
||||
__le32 length;
|
||||
__le32 xrc_srq;
|
||||
u8 req_type;
|
||||
u8 flags;
|
||||
#define RDMA_SQ_ATOMIC_WQE_1ST_COMP_FLG_MASK 0x1
|
||||
#define RDMA_SQ_ATOMIC_WQE_1ST_COMP_FLG_SHIFT 0
|
||||
#define RDMA_SQ_ATOMIC_WQE_1ST_RD_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_ATOMIC_WQE_1ST_RD_FENCE_FLG_SHIFT 1
|
||||
#define RDMA_SQ_ATOMIC_WQE_1ST_INV_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_ATOMIC_WQE_1ST_INV_FENCE_FLG_SHIFT 2
|
||||
#define RDMA_SQ_ATOMIC_WQE_1ST_SE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_ATOMIC_WQE_1ST_SE_FLG_SHIFT 3
|
||||
#define RDMA_SQ_ATOMIC_WQE_1ST_INLINE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_ATOMIC_WQE_1ST_INLINE_FLG_SHIFT 4
|
||||
#define RDMA_SQ_ATOMIC_WQE_1ST_RESERVED0_MASK 0x7
|
||||
#define RDMA_SQ_ATOMIC_WQE_1ST_RESERVED0_SHIFT 5
|
||||
u8 wqe_size;
|
||||
u8 prev_wqe_size;
|
||||
};
|
||||
|
||||
/* Second element (16 bytes) of atomic wqe */
|
||||
struct rdma_sq_atomic_wqe_2nd {
|
||||
struct regpair remote_va;
|
||||
__le32 r_key;
|
||||
__le32 reserved2;
|
||||
};
|
||||
|
||||
/* Third element (16 bytes) of atomic wqe */
|
||||
struct rdma_sq_atomic_wqe_3rd {
|
||||
struct regpair cmp_data;
|
||||
struct regpair swap_data;
|
||||
};
|
||||
|
||||
struct rdma_sq_bind_wqe {
|
||||
struct regpair addr;
|
||||
__le32 l_key;
|
||||
u8 req_type;
|
||||
u8 flags;
|
||||
#define RDMA_SQ_BIND_WQE_COMP_FLG_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_COMP_FLG_SHIFT 0
|
||||
#define RDMA_SQ_BIND_WQE_RD_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_RD_FENCE_FLG_SHIFT 1
|
||||
#define RDMA_SQ_BIND_WQE_INV_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_INV_FENCE_FLG_SHIFT 2
|
||||
#define RDMA_SQ_BIND_WQE_SE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_SE_FLG_SHIFT 3
|
||||
#define RDMA_SQ_BIND_WQE_INLINE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_INLINE_FLG_SHIFT 4
|
||||
#define RDMA_SQ_BIND_WQE_RESERVED0_MASK 0x7
|
||||
#define RDMA_SQ_BIND_WQE_RESERVED0_SHIFT 5
|
||||
u8 wqe_size;
|
||||
u8 prev_wqe_size;
|
||||
u8 bind_ctrl;
|
||||
#define RDMA_SQ_BIND_WQE_ZERO_BASED_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_ZERO_BASED_SHIFT 0
|
||||
#define RDMA_SQ_BIND_WQE_MW_TYPE_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_MW_TYPE_SHIFT 1
|
||||
#define RDMA_SQ_BIND_WQE_RESERVED1_MASK 0x3F
|
||||
#define RDMA_SQ_BIND_WQE_RESERVED1_SHIFT 2
|
||||
u8 access_ctrl;
|
||||
#define RDMA_SQ_BIND_WQE_REMOTE_READ_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_REMOTE_READ_SHIFT 0
|
||||
#define RDMA_SQ_BIND_WQE_REMOTE_WRITE_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_REMOTE_WRITE_SHIFT 1
|
||||
#define RDMA_SQ_BIND_WQE_ENABLE_ATOMIC_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_ENABLE_ATOMIC_SHIFT 2
|
||||
#define RDMA_SQ_BIND_WQE_LOCAL_READ_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_LOCAL_READ_SHIFT 3
|
||||
#define RDMA_SQ_BIND_WQE_LOCAL_WRITE_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_LOCAL_WRITE_SHIFT 4
|
||||
#define RDMA_SQ_BIND_WQE_RESERVED2_MASK 0x7
|
||||
#define RDMA_SQ_BIND_WQE_RESERVED2_SHIFT 5
|
||||
u8 reserved3;
|
||||
u8 length_hi;
|
||||
__le32 length_lo;
|
||||
__le32 parent_l_key;
|
||||
__le32 reserved4;
|
||||
};
|
||||
|
||||
/* First element (16 bytes) of bind wqe */
|
||||
struct rdma_sq_bind_wqe_1st {
|
||||
struct regpair addr;
|
||||
__le32 l_key;
|
||||
u8 req_type;
|
||||
u8 flags;
|
||||
#define RDMA_SQ_BIND_WQE_1ST_COMP_FLG_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_1ST_COMP_FLG_SHIFT 0
|
||||
#define RDMA_SQ_BIND_WQE_1ST_RD_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_1ST_RD_FENCE_FLG_SHIFT 1
|
||||
#define RDMA_SQ_BIND_WQE_1ST_INV_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_1ST_INV_FENCE_FLG_SHIFT 2
|
||||
#define RDMA_SQ_BIND_WQE_1ST_SE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_1ST_SE_FLG_SHIFT 3
|
||||
#define RDMA_SQ_BIND_WQE_1ST_INLINE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_1ST_INLINE_FLG_SHIFT 4
|
||||
#define RDMA_SQ_BIND_WQE_1ST_RESERVED0_MASK 0x7
|
||||
#define RDMA_SQ_BIND_WQE_1ST_RESERVED0_SHIFT 5
|
||||
u8 wqe_size;
|
||||
u8 prev_wqe_size;
|
||||
};
|
||||
|
||||
/* Second element (16 bytes) of bind wqe */
|
||||
struct rdma_sq_bind_wqe_2nd {
|
||||
u8 bind_ctrl;
|
||||
#define RDMA_SQ_BIND_WQE_2ND_ZERO_BASED_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_2ND_ZERO_BASED_SHIFT 0
|
||||
#define RDMA_SQ_BIND_WQE_2ND_MW_TYPE_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_2ND_MW_TYPE_SHIFT 1
|
||||
#define RDMA_SQ_BIND_WQE_2ND_RESERVED1_MASK 0x3F
|
||||
#define RDMA_SQ_BIND_WQE_2ND_RESERVED1_SHIFT 2
|
||||
u8 access_ctrl;
|
||||
#define RDMA_SQ_BIND_WQE_2ND_REMOTE_READ_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_2ND_REMOTE_READ_SHIFT 0
|
||||
#define RDMA_SQ_BIND_WQE_2ND_REMOTE_WRITE_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_2ND_REMOTE_WRITE_SHIFT 1
|
||||
#define RDMA_SQ_BIND_WQE_2ND_ENABLE_ATOMIC_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_2ND_ENABLE_ATOMIC_SHIFT 2
|
||||
#define RDMA_SQ_BIND_WQE_2ND_LOCAL_READ_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_2ND_LOCAL_READ_SHIFT 3
|
||||
#define RDMA_SQ_BIND_WQE_2ND_LOCAL_WRITE_MASK 0x1
|
||||
#define RDMA_SQ_BIND_WQE_2ND_LOCAL_WRITE_SHIFT 4
|
||||
#define RDMA_SQ_BIND_WQE_2ND_RESERVED2_MASK 0x7
|
||||
#define RDMA_SQ_BIND_WQE_2ND_RESERVED2_SHIFT 5
|
||||
u8 reserved3;
|
||||
u8 length_hi;
|
||||
__le32 length_lo;
|
||||
__le32 parent_l_key;
|
||||
__le32 reserved4;
|
||||
};
|
||||
|
||||
/* Structure with only the SQ WQE common
|
||||
* fields. Size is of one SQ element (16B)
|
||||
*/
|
||||
struct rdma_sq_common_wqe {
|
||||
__le32 reserved1[3];
|
||||
u8 req_type;
|
||||
u8 flags;
|
||||
#define RDMA_SQ_COMMON_WQE_COMP_FLG_MASK 0x1
|
||||
#define RDMA_SQ_COMMON_WQE_COMP_FLG_SHIFT 0
|
||||
#define RDMA_SQ_COMMON_WQE_RD_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_COMMON_WQE_RD_FENCE_FLG_SHIFT 1
|
||||
#define RDMA_SQ_COMMON_WQE_INV_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_COMMON_WQE_INV_FENCE_FLG_SHIFT 2
|
||||
#define RDMA_SQ_COMMON_WQE_SE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_COMMON_WQE_SE_FLG_SHIFT 3
|
||||
#define RDMA_SQ_COMMON_WQE_INLINE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_COMMON_WQE_INLINE_FLG_SHIFT 4
|
||||
#define RDMA_SQ_COMMON_WQE_RESERVED0_MASK 0x7
|
||||
#define RDMA_SQ_COMMON_WQE_RESERVED0_SHIFT 5
|
||||
u8 wqe_size;
|
||||
u8 prev_wqe_size;
|
||||
};
|
||||
|
||||
struct rdma_sq_fmr_wqe {
|
||||
struct regpair addr;
|
||||
__le32 l_key;
|
||||
u8 req_type;
|
||||
u8 flags;
|
||||
#define RDMA_SQ_FMR_WQE_COMP_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_COMP_FLG_SHIFT 0
|
||||
#define RDMA_SQ_FMR_WQE_RD_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_RD_FENCE_FLG_SHIFT 1
|
||||
#define RDMA_SQ_FMR_WQE_INV_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_INV_FENCE_FLG_SHIFT 2
|
||||
#define RDMA_SQ_FMR_WQE_SE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_SE_FLG_SHIFT 3
|
||||
#define RDMA_SQ_FMR_WQE_INLINE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_INLINE_FLG_SHIFT 4
|
||||
#define RDMA_SQ_FMR_WQE_DIF_ON_HOST_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_DIF_ON_HOST_FLG_SHIFT 5
|
||||
#define RDMA_SQ_FMR_WQE_RESERVED0_MASK 0x3
|
||||
#define RDMA_SQ_FMR_WQE_RESERVED0_SHIFT 6
|
||||
u8 wqe_size;
|
||||
u8 prev_wqe_size;
|
||||
u8 fmr_ctrl;
|
||||
#define RDMA_SQ_FMR_WQE_PAGE_SIZE_LOG_MASK 0x1F
|
||||
#define RDMA_SQ_FMR_WQE_PAGE_SIZE_LOG_SHIFT 0
|
||||
#define RDMA_SQ_FMR_WQE_ZERO_BASED_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_ZERO_BASED_SHIFT 5
|
||||
#define RDMA_SQ_FMR_WQE_BIND_EN_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_BIND_EN_SHIFT 6
|
||||
#define RDMA_SQ_FMR_WQE_RESERVED1_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_RESERVED1_SHIFT 7
|
||||
u8 access_ctrl;
|
||||
#define RDMA_SQ_FMR_WQE_REMOTE_READ_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_REMOTE_READ_SHIFT 0
|
||||
#define RDMA_SQ_FMR_WQE_REMOTE_WRITE_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_REMOTE_WRITE_SHIFT 1
|
||||
#define RDMA_SQ_FMR_WQE_ENABLE_ATOMIC_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_ENABLE_ATOMIC_SHIFT 2
|
||||
#define RDMA_SQ_FMR_WQE_LOCAL_READ_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_LOCAL_READ_SHIFT 3
|
||||
#define RDMA_SQ_FMR_WQE_LOCAL_WRITE_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_LOCAL_WRITE_SHIFT 4
|
||||
#define RDMA_SQ_FMR_WQE_RESERVED2_MASK 0x7
|
||||
#define RDMA_SQ_FMR_WQE_RESERVED2_SHIFT 5
|
||||
u8 reserved3;
|
||||
u8 length_hi;
|
||||
__le32 length_lo;
|
||||
struct regpair pbl_addr;
|
||||
__le32 dif_base_ref_tag;
|
||||
__le16 dif_app_tag;
|
||||
__le16 dif_app_tag_mask;
|
||||
__le16 dif_runt_crc_value;
|
||||
__le16 dif_flags;
|
||||
#define RDMA_SQ_FMR_WQE_DIF_IO_DIRECTION_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_DIF_IO_DIRECTION_FLG_SHIFT 0
|
||||
#define RDMA_SQ_FMR_WQE_DIF_BLOCK_SIZE_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_DIF_BLOCK_SIZE_SHIFT 1
|
||||
#define RDMA_SQ_FMR_WQE_DIF_RUNT_VALID_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_DIF_RUNT_VALID_FLG_SHIFT 2
|
||||
#define RDMA_SQ_FMR_WQE_DIF_VALIDATE_CRC_GUARD_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_DIF_VALIDATE_CRC_GUARD_SHIFT 3
|
||||
#define RDMA_SQ_FMR_WQE_DIF_VALIDATE_REF_TAG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_DIF_VALIDATE_REF_TAG_SHIFT 4
|
||||
#define RDMA_SQ_FMR_WQE_DIF_VALIDATE_APP_TAG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_DIF_VALIDATE_APP_TAG_SHIFT 5
|
||||
#define RDMA_SQ_FMR_WQE_DIF_CRC_SEED_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_DIF_CRC_SEED_SHIFT 6
|
||||
#define RDMA_SQ_FMR_WQE_RESERVED4_MASK 0x1FF
|
||||
#define RDMA_SQ_FMR_WQE_RESERVED4_SHIFT 7
|
||||
__le32 Reserved5;
|
||||
};
|
||||
|
||||
/* First element (16 bytes) of fmr wqe */
|
||||
struct rdma_sq_fmr_wqe_1st {
|
||||
struct regpair addr;
|
||||
__le32 l_key;
|
||||
u8 req_type;
|
||||
u8 flags;
|
||||
#define RDMA_SQ_FMR_WQE_1ST_COMP_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_1ST_COMP_FLG_SHIFT 0
|
||||
#define RDMA_SQ_FMR_WQE_1ST_RD_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_1ST_RD_FENCE_FLG_SHIFT 1
|
||||
#define RDMA_SQ_FMR_WQE_1ST_INV_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_1ST_INV_FENCE_FLG_SHIFT 2
|
||||
#define RDMA_SQ_FMR_WQE_1ST_SE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_1ST_SE_FLG_SHIFT 3
|
||||
#define RDMA_SQ_FMR_WQE_1ST_INLINE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_1ST_INLINE_FLG_SHIFT 4
|
||||
#define RDMA_SQ_FMR_WQE_1ST_DIF_ON_HOST_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_1ST_DIF_ON_HOST_FLG_SHIFT 5
|
||||
#define RDMA_SQ_FMR_WQE_1ST_RESERVED0_MASK 0x3
|
||||
#define RDMA_SQ_FMR_WQE_1ST_RESERVED0_SHIFT 6
|
||||
u8 wqe_size;
|
||||
u8 prev_wqe_size;
|
||||
};
|
||||
|
||||
/* Second element (16 bytes) of fmr wqe */
|
||||
struct rdma_sq_fmr_wqe_2nd {
|
||||
u8 fmr_ctrl;
|
||||
#define RDMA_SQ_FMR_WQE_2ND_PAGE_SIZE_LOG_MASK 0x1F
|
||||
#define RDMA_SQ_FMR_WQE_2ND_PAGE_SIZE_LOG_SHIFT 0
|
||||
#define RDMA_SQ_FMR_WQE_2ND_ZERO_BASED_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_2ND_ZERO_BASED_SHIFT 5
|
||||
#define RDMA_SQ_FMR_WQE_2ND_BIND_EN_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_2ND_BIND_EN_SHIFT 6
|
||||
#define RDMA_SQ_FMR_WQE_2ND_RESERVED1_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_2ND_RESERVED1_SHIFT 7
|
||||
u8 access_ctrl;
|
||||
#define RDMA_SQ_FMR_WQE_2ND_REMOTE_READ_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_2ND_REMOTE_READ_SHIFT 0
|
||||
#define RDMA_SQ_FMR_WQE_2ND_REMOTE_WRITE_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_2ND_REMOTE_WRITE_SHIFT 1
|
||||
#define RDMA_SQ_FMR_WQE_2ND_ENABLE_ATOMIC_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_2ND_ENABLE_ATOMIC_SHIFT 2
|
||||
#define RDMA_SQ_FMR_WQE_2ND_LOCAL_READ_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_2ND_LOCAL_READ_SHIFT 3
|
||||
#define RDMA_SQ_FMR_WQE_2ND_LOCAL_WRITE_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_2ND_LOCAL_WRITE_SHIFT 4
|
||||
#define RDMA_SQ_FMR_WQE_2ND_RESERVED2_MASK 0x7
|
||||
#define RDMA_SQ_FMR_WQE_2ND_RESERVED2_SHIFT 5
|
||||
u8 reserved3;
|
||||
u8 length_hi;
|
||||
__le32 length_lo;
|
||||
struct regpair pbl_addr;
|
||||
};
|
||||
|
||||
/* Third element (16 bytes) of fmr wqe */
|
||||
struct rdma_sq_fmr_wqe_3rd {
|
||||
__le32 dif_base_ref_tag;
|
||||
__le16 dif_app_tag;
|
||||
__le16 dif_app_tag_mask;
|
||||
__le16 dif_runt_crc_value;
|
||||
__le16 dif_flags;
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_IO_DIRECTION_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_IO_DIRECTION_FLG_SHIFT 0
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_BLOCK_SIZE_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_BLOCK_SIZE_SHIFT 1
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_RUNT_VALID_FLG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_RUNT_VALID_FLG_SHIFT 2
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_VALIDATE_CRC_GUARD_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_VALIDATE_CRC_GUARD_SHIFT 3
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_VALIDATE_REF_TAG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_VALIDATE_REF_TAG_SHIFT 4
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_VALIDATE_APP_TAG_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_VALIDATE_APP_TAG_SHIFT 5
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_CRC_SEED_MASK 0x1
|
||||
#define RDMA_SQ_FMR_WQE_3RD_DIF_CRC_SEED_SHIFT 6
|
||||
#define RDMA_SQ_FMR_WQE_3RD_RESERVED4_MASK 0x1FF
|
||||
#define RDMA_SQ_FMR_WQE_3RD_RESERVED4_SHIFT 7
|
||||
__le32 Reserved5;
|
||||
};
|
||||
|
||||
struct rdma_sq_local_inv_wqe {
|
||||
struct regpair reserved;
|
||||
__le32 inv_l_key;
|
||||
u8 req_type;
|
||||
u8 flags;
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_COMP_FLG_MASK 0x1
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_COMP_FLG_SHIFT 0
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_RD_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_RD_FENCE_FLG_SHIFT 1
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_INV_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_INV_FENCE_FLG_SHIFT 2
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_SE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_SE_FLG_SHIFT 3
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_INLINE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_INLINE_FLG_SHIFT 4
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_DIF_ON_HOST_FLG_MASK 0x1
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_DIF_ON_HOST_FLG_SHIFT 5
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_RESERVED0_MASK 0x3
|
||||
#define RDMA_SQ_LOCAL_INV_WQE_RESERVED0_SHIFT 6
|
||||
u8 wqe_size;
|
||||
u8 prev_wqe_size;
|
||||
};
|
||||
|
||||
struct rdma_sq_rdma_wqe {
|
||||
__le32 imm_data;
|
||||
__le32 length;
|
||||
__le32 xrc_srq;
|
||||
u8 req_type;
|
||||
u8 flags;
|
||||
#define RDMA_SQ_RDMA_WQE_COMP_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_COMP_FLG_SHIFT 0
|
||||
#define RDMA_SQ_RDMA_WQE_RD_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_RD_FENCE_FLG_SHIFT 1
|
||||
#define RDMA_SQ_RDMA_WQE_INV_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_INV_FENCE_FLG_SHIFT 2
|
||||
#define RDMA_SQ_RDMA_WQE_SE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_SE_FLG_SHIFT 3
|
||||
#define RDMA_SQ_RDMA_WQE_INLINE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_INLINE_FLG_SHIFT 4
|
||||
#define RDMA_SQ_RDMA_WQE_DIF_ON_HOST_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_DIF_ON_HOST_FLG_SHIFT 5
|
||||
#define RDMA_SQ_RDMA_WQE_RESERVED0_MASK 0x3
|
||||
#define RDMA_SQ_RDMA_WQE_RESERVED0_SHIFT 6
|
||||
u8 wqe_size;
|
||||
u8 prev_wqe_size;
|
||||
struct regpair remote_va;
|
||||
__le32 r_key;
|
||||
u8 dif_flags;
|
||||
#define RDMA_SQ_RDMA_WQE_DIF_BLOCK_SIZE_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_DIF_BLOCK_SIZE_SHIFT 0
|
||||
#define RDMA_SQ_RDMA_WQE_DIF_FIRST_RDMA_IN_IO_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_DIF_FIRST_RDMA_IN_IO_FLG_SHIFT 1
|
||||
#define RDMA_SQ_RDMA_WQE_DIF_LAST_RDMA_IN_IO_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_DIF_LAST_RDMA_IN_IO_FLG_SHIFT 2
|
||||
#define RDMA_SQ_RDMA_WQE_RESERVED1_MASK 0x1F
|
||||
#define RDMA_SQ_RDMA_WQE_RESERVED1_SHIFT 3
|
||||
u8 reserved2[3];
|
||||
};
|
||||
|
||||
/* First element (16 bytes) of rdma wqe */
|
||||
struct rdma_sq_rdma_wqe_1st {
|
||||
__le32 imm_data;
|
||||
__le32 length;
|
||||
__le32 xrc_srq;
|
||||
u8 req_type;
|
||||
u8 flags;
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_COMP_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_COMP_FLG_SHIFT 0
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_RD_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_RD_FENCE_FLG_SHIFT 1
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_INV_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_INV_FENCE_FLG_SHIFT 2
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_SE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_SE_FLG_SHIFT 3
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_INLINE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_INLINE_FLG_SHIFT 4
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_DIF_ON_HOST_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_DIF_ON_HOST_FLG_SHIFT 5
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_RESERVED0_MASK 0x3
|
||||
#define RDMA_SQ_RDMA_WQE_1ST_RESERVED0_SHIFT 6
|
||||
u8 wqe_size;
|
||||
u8 prev_wqe_size;
|
||||
};
|
||||
|
||||
/* Second element (16 bytes) of rdma wqe */
|
||||
struct rdma_sq_rdma_wqe_2nd {
|
||||
struct regpair remote_va;
|
||||
__le32 r_key;
|
||||
u8 dif_flags;
|
||||
#define RDMA_SQ_RDMA_WQE_2ND_DIF_BLOCK_SIZE_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_2ND_DIF_BLOCK_SIZE_SHIFT 0
|
||||
#define RDMA_SQ_RDMA_WQE_2ND_DIF_FIRST_SEGMENT_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_2ND_DIF_FIRST_SEGMENT_FLG_SHIFT 1
|
||||
#define RDMA_SQ_RDMA_WQE_2ND_DIF_LAST_SEGMENT_FLG_MASK 0x1
|
||||
#define RDMA_SQ_RDMA_WQE_2ND_DIF_LAST_SEGMENT_FLG_SHIFT 2
|
||||
#define RDMA_SQ_RDMA_WQE_2ND_RESERVED1_MASK 0x1F
|
||||
#define RDMA_SQ_RDMA_WQE_2ND_RESERVED1_SHIFT 3
|
||||
u8 reserved2[3];
|
||||
};
|
||||
|
||||
/* SQ WQE req type enumeration */
|
||||
enum rdma_sq_req_type {
|
||||
RDMA_SQ_REQ_TYPE_SEND,
|
||||
RDMA_SQ_REQ_TYPE_SEND_WITH_IMM,
|
||||
RDMA_SQ_REQ_TYPE_SEND_WITH_INVALIDATE,
|
||||
RDMA_SQ_REQ_TYPE_RDMA_WR,
|
||||
RDMA_SQ_REQ_TYPE_RDMA_WR_WITH_IMM,
|
||||
RDMA_SQ_REQ_TYPE_RDMA_RD,
|
||||
RDMA_SQ_REQ_TYPE_ATOMIC_CMP_AND_SWAP,
|
||||
RDMA_SQ_REQ_TYPE_ATOMIC_ADD,
|
||||
RDMA_SQ_REQ_TYPE_LOCAL_INVALIDATE,
|
||||
RDMA_SQ_REQ_TYPE_FAST_MR,
|
||||
RDMA_SQ_REQ_TYPE_BIND,
|
||||
RDMA_SQ_REQ_TYPE_INVALID,
|
||||
MAX_RDMA_SQ_REQ_TYPE
|
||||
};
|
||||
|
||||
struct rdma_sq_send_wqe {
|
||||
__le32 inv_key_or_imm_data;
|
||||
__le32 length;
|
||||
__le32 xrc_srq;
|
||||
u8 req_type;
|
||||
u8 flags;
|
||||
#define RDMA_SQ_SEND_WQE_COMP_FLG_MASK 0x1
|
||||
#define RDMA_SQ_SEND_WQE_COMP_FLG_SHIFT 0
|
||||
#define RDMA_SQ_SEND_WQE_RD_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_SEND_WQE_RD_FENCE_FLG_SHIFT 1
|
||||
#define RDMA_SQ_SEND_WQE_INV_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_SEND_WQE_INV_FENCE_FLG_SHIFT 2
|
||||
#define RDMA_SQ_SEND_WQE_SE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_SEND_WQE_SE_FLG_SHIFT 3
|
||||
#define RDMA_SQ_SEND_WQE_INLINE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_SEND_WQE_INLINE_FLG_SHIFT 4
|
||||
#define RDMA_SQ_SEND_WQE_DIF_ON_HOST_FLG_MASK 0x1
|
||||
#define RDMA_SQ_SEND_WQE_DIF_ON_HOST_FLG_SHIFT 5
|
||||
#define RDMA_SQ_SEND_WQE_RESERVED0_MASK 0x3
|
||||
#define RDMA_SQ_SEND_WQE_RESERVED0_SHIFT 6
|
||||
u8 wqe_size;
|
||||
u8 prev_wqe_size;
|
||||
__le32 reserved1[4];
|
||||
};
|
||||
|
||||
struct rdma_sq_send_wqe_1st {
|
||||
__le32 inv_key_or_imm_data;
|
||||
__le32 length;
|
||||
__le32 xrc_srq;
|
||||
u8 req_type;
|
||||
u8 flags;
|
||||
#define RDMA_SQ_SEND_WQE_1ST_COMP_FLG_MASK 0x1
|
||||
#define RDMA_SQ_SEND_WQE_1ST_COMP_FLG_SHIFT 0
|
||||
#define RDMA_SQ_SEND_WQE_1ST_RD_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_SEND_WQE_1ST_RD_FENCE_FLG_SHIFT 1
|
||||
#define RDMA_SQ_SEND_WQE_1ST_INV_FENCE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_SEND_WQE_1ST_INV_FENCE_FLG_SHIFT 2
|
||||
#define RDMA_SQ_SEND_WQE_1ST_SE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_SEND_WQE_1ST_SE_FLG_SHIFT 3
|
||||
#define RDMA_SQ_SEND_WQE_1ST_INLINE_FLG_MASK 0x1
|
||||
#define RDMA_SQ_SEND_WQE_1ST_INLINE_FLG_SHIFT 4
|
||||
#define RDMA_SQ_SEND_WQE_1ST_RESERVED0_MASK 0x7
|
||||
#define RDMA_SQ_SEND_WQE_1ST_RESERVED0_SHIFT 5
|
||||
u8 wqe_size;
|
||||
u8 prev_wqe_size;
|
||||
};
|
||||
|
||||
struct rdma_sq_send_wqe_2st {
|
||||
__le32 reserved1[4];
|
||||
};
|
||||
|
||||
#endif /* __QED_HSI_RDMA__ */
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -81,4 +81,9 @@ int qedr_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg,
|
||||
|
||||
struct ib_mr *qedr_alloc_mr(struct ib_pd *pd, enum ib_mr_type mr_type,
|
||||
u32 max_num_sg);
|
||||
int qedr_poll_cq(struct ib_cq *, int num_entries, struct ib_wc *wc);
|
||||
int qedr_post_send(struct ib_qp *, struct ib_send_wr *,
|
||||
struct ib_send_wr **bad_wr);
|
||||
int qedr_post_recv(struct ib_qp *, struct ib_recv_wr *,
|
||||
struct ib_recv_wr **bad_wr);
|
||||
#endif
|
||||
|
Loading…
Reference in New Issue
Block a user