c_ctx
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
struct chcr_dev *dev = c_ctx(tfm)->dev;
struct chcr_context *ctx = c_ctx(tfm);
chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
struct adapter *adap = padap(c_ctx(tfm)->dev);
err = chcr_cipher_dma_map(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
struct chcr_dev *dev = c_ctx(tfm)->dev;
struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
struct chcr_context *ctx = c_ctx(tfm);
struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
struct chcr_dev *dev = c_ctx(tfm)->dev;
struct chcr_context *ctx = c_ctx(tfm);
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
struct chcr_context *ctx = c_ctx(tfm);
struct chcr_context *ctx = c_ctx(tfm);
create_wreq(c_ctx(tfm), chcr_req, &(wrparam->req->base), reqctx->imm, 0,
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
struct sec_cipher_ctx c_ctx;
static int sec_aead_aes_set_key(struct sec_cipher_ctx *c_ctx,
c_ctx->c_key_len = SEC_CKEY_128BIT;
c_ctx->c_key_len = SEC_CKEY_192BIT;
c_ctx->c_key_len = SEC_CKEY_256BIT;
memcpy(c_ctx->c_key, keys->enckey, keys->enckeylen);
struct sec_cipher_ctx *c_ctx = &ctx->c_ctx;
ctx->c_ctx.c_alg = c_alg;
c_ctx->c_mode = c_mode;
ret = sec_skcipher_aes_sm4_setkey(c_ctx, keylen, c_mode);
memcpy(c_ctx->c_key, key, keylen);
ret = sec_aead_aes_set_key(c_ctx, &keys);
memcpy(c_req->c_ivin, sk_req->iv, ctx->c_ctx.ivsize);
struct sec_cipher_ctx *c_ctx = &ctx->c_ctx;
sec_sqe->type2.c_key_addr = cpu_to_le64(c_ctx->c_key_dma);
sec_sqe->type2.icvw_kmode |= cpu_to_le16(((u16)c_ctx->c_mode) <<
sec_sqe->type2.c_alg = c_ctx->c_alg;
sec_sqe->type2.icvw_kmode |= cpu_to_le16(((u16)c_ctx->c_key_len) <<
struct sec_cipher_ctx *c_ctx = &ctx->c_ctx;
sec_sqe3->c_key_addr = cpu_to_le64(c_ctx->c_key_dma);
sec_sqe3->c_mode_alg = ((u8)c_ctx->c_alg << SEC_CALG_OFFSET_V3) |
c_ctx->c_mode;
sec_sqe3->c_icv_key |= cpu_to_le16(((u16)c_ctx->c_key_len) <<
u32 iv_size = req->ctx->c_ctx.ivsize;
if (req->ctx->c_ctx.c_mode == SEC_CMODE_CBC) {
if (!err && (ctx->c_ctx.c_mode == SEC_CMODE_CBC ||
ctx->c_ctx.c_mode == SEC_CMODE_CTR) && req->c_req.encrypt)
c_req->c_ivin[ctx->c_ctx.ivsize - cl] = 0x00;
memset(&c_req->c_ivin[ctx->c_ctx.ivsize - cl], 0, cl);
c_req->c_ivin[ctx->c_ctx.ivsize - IV_LAST_BYTE1] = IV_CTR_INIT;
memcpy(a_req->a_ivin, c_req->c_ivin, ctx->c_ctx.ivsize);
a_req->a_ivin[ctx->c_ctx.ivsize - IV_LAST_BYTE1] =
a_req->a_ivin[ctx->c_ctx.ivsize - IV_LAST_BYTE2] =
memcpy(c_req->c_ivin, aead_req->iv, ctx->c_ctx.ivsize);
if (ctx->c_ctx.c_mode == SEC_CMODE_CCM) {
} else if (ctx->c_ctx.c_mode == SEC_CMODE_GCM) {
if (ctx->c_ctx.c_mode == SEC_CMODE_CCM ||
ctx->c_ctx.c_mode == SEC_CMODE_GCM)
if (ctx->c_ctx.c_mode == SEC_CMODE_CCM ||
ctx->c_ctx.c_mode == SEC_CMODE_GCM)
if (c->c_ctx.c_mode == SEC_CMODE_CBC)
if (!req->c_req.encrypt && (ctx->c_ctx.c_mode == SEC_CMODE_CBC ||
ctx->c_ctx.c_mode == SEC_CMODE_CTR))
if (ctx->c_ctx.c_mode == SEC_CMODE_CBC && !req->c_req.encrypt) {
ctx->c_ctx.ivsize);
ctx->c_ctx.ivsize);
ctx->c_ctx.ivsize = crypto_aead_ivsize(tfm);
if (ctx->c_ctx.ivsize < SEC_AIV_SIZE ||
ctx->c_ctx.ivsize > SEC_IV_SIZE) {
u8 c_mode = ctx->c_ctx.c_mode;
u8 c_alg = ctx->c_ctx.c_alg;
struct sec_cipher_ctx *c_ctx = &ctx->c_ctx;
SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, c_ctx->fbtfm);
if (!c_ctx->fbtfm) {
skcipher_request_set_sync_tfm(subreq, c_ctx->fbtfm);
if (ctx->c_ctx.c_mode == SEC_CMODE_XTS)
if (unlikely(ctx->c_ctx.fallback || need_fallback))
u8 c_mode = ctx->c_ctx.c_mode;
u8 c_alg = ctx->c_ctx.c_alg;
if (unlikely(ctx->c_ctx.c_mode == SEC_CMODE_CBC &&
struct sec_cipher_ctx *c_ctx = &ctx->c_ctx;
c_ctx->c_key = dma_alloc_coherent(ctx->dev, SEC_MAX_KEY_SIZE,
&c_ctx->c_key_dma, GFP_KERNEL);
if (!c_ctx->c_key)
struct sec_cipher_ctx *c_ctx = &ctx->c_ctx;
memzero_explicit(c_ctx->c_key, SEC_MAX_KEY_SIZE);
c_ctx->c_key, c_ctx->c_key_dma);
struct sec_cipher_ctx *c_ctx = &ctx->c_ctx;
c_ctx->fallback = false;
c_ctx->fbtfm = crypto_alloc_sync_skcipher(alg, 0,
if (IS_ERR(c_ctx->fbtfm)) {
return PTR_ERR(c_ctx->fbtfm);
ctx->c_ctx.ivsize = crypto_skcipher_ivsize(tfm);
if (ctx->c_ctx.ivsize > SEC_IV_SIZE) {
if (ctx->c_ctx.fbtfm)
crypto_free_sync_skcipher(ctx->c_ctx.fbtfm);
struct sec_cipher_ctx *c_ctx = &ctx->c_ctx;
c_ctx->c_key_len = SEC_CKEY_3DES_2KEY;
c_ctx->c_key_len = SEC_CKEY_3DES_3KEY;
static int sec_skcipher_aes_sm4_setkey(struct sec_cipher_ctx *c_ctx,
c_ctx->c_key_len = SEC_CKEY_128BIT;
c_ctx->fallback = true;
c_ctx->c_key_len = SEC_CKEY_256BIT;
if (c_ctx->c_alg == SEC_CALG_SM4 &&
c_ctx->c_key_len = SEC_CKEY_128BIT;
c_ctx->c_key_len = SEC_CKEY_192BIT;
c_ctx->c_key_len = SEC_CKEY_256BIT;
struct sec_cipher_ctx *c_ctx = &ctx->c_ctx;
c_ctx->c_alg = c_alg;
c_ctx->c_mode = c_mode;
ret = sec_skcipher_aes_sm4_setkey(c_ctx, keylen, c_mode);
memcpy(c_ctx->c_key, key, keylen);
ret = crypto_sync_skcipher_setkey(c_ctx->fbtfm, key, keylen);