crypto: nx - convert AES-CBC to skcipher API

Convert the PowerPC Nest (NX) implementation of AES-CBC from the
deprecated "blkcipher" API to the "skcipher" API.  This is needed in
order for the blkcipher API to be removed.

Signed-off-by: Eric Biggers <ebiggers@google.com>
Reviewed-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Eric Biggers 2019-10-12 21:39:17 -07:00 committed by Herbert Xu
parent bfd9efddf9
commit c1e9c386c9
3 changed files with 41 additions and 52 deletions

View File

@ -18,11 +18,11 @@
#include "nx.h" #include "nx.h"
static int cbc_aes_nx_set_key(struct crypto_tfm *tfm, static int cbc_aes_nx_set_key(struct crypto_skcipher *tfm,
const u8 *in_key, const u8 *in_key,
unsigned int key_len) unsigned int key_len)
{ {
struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); struct nx_crypto_ctx *nx_ctx = crypto_skcipher_ctx(tfm);
struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
nx_ctx_init(nx_ctx, HCOP_FC_AES); nx_ctx_init(nx_ctx, HCOP_FC_AES);
@ -50,13 +50,11 @@ static int cbc_aes_nx_set_key(struct crypto_tfm *tfm,
return 0; return 0;
} }
static int cbc_aes_nx_crypt(struct blkcipher_desc *desc, static int cbc_aes_nx_crypt(struct skcipher_request *req,
struct scatterlist *dst,
struct scatterlist *src,
unsigned int nbytes,
int enc) int enc)
{ {
struct nx_crypto_ctx *nx_ctx = crypto_blkcipher_ctx(desc->tfm); struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct nx_crypto_ctx *nx_ctx = crypto_skcipher_ctx(tfm);
struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
unsigned long irq_flags; unsigned long irq_flags;
unsigned int processed = 0, to_process; unsigned int processed = 0, to_process;
@ -70,9 +68,9 @@ static int cbc_aes_nx_crypt(struct blkcipher_desc *desc,
NX_CPB_FDM(csbcpb) &= ~NX_FDM_ENDE_ENCRYPT; NX_CPB_FDM(csbcpb) &= ~NX_FDM_ENDE_ENCRYPT;
do { do {
to_process = nbytes - processed; to_process = req->cryptlen - processed;
rc = nx_build_sg_lists(nx_ctx, desc->info, dst, src, rc = nx_build_sg_lists(nx_ctx, req->iv, req->dst, req->src,
&to_process, processed, &to_process, processed,
csbcpb->cpb.aes_cbc.iv); csbcpb->cpb.aes_cbc.iv);
if (rc) if (rc)
@ -84,56 +82,46 @@ static int cbc_aes_nx_crypt(struct blkcipher_desc *desc,
} }
rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
desc->flags & CRYPTO_TFM_REQ_MAY_SLEEP); req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
if (rc) if (rc)
goto out; goto out;
memcpy(desc->info, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE); memcpy(req->iv, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE);
atomic_inc(&(nx_ctx->stats->aes_ops)); atomic_inc(&(nx_ctx->stats->aes_ops));
atomic64_add(csbcpb->csb.processed_byte_count, atomic64_add(csbcpb->csb.processed_byte_count,
&(nx_ctx->stats->aes_bytes)); &(nx_ctx->stats->aes_bytes));
processed += to_process; processed += to_process;
} while (processed < nbytes); } while (processed < req->cryptlen);
out: out:
spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); spin_unlock_irqrestore(&nx_ctx->lock, irq_flags);
return rc; return rc;
} }
static int cbc_aes_nx_encrypt(struct blkcipher_desc *desc, static int cbc_aes_nx_encrypt(struct skcipher_request *req)
struct scatterlist *dst,
struct scatterlist *src,
unsigned int nbytes)
{ {
return cbc_aes_nx_crypt(desc, dst, src, nbytes, 1); return cbc_aes_nx_crypt(req, 1);
} }
static int cbc_aes_nx_decrypt(struct blkcipher_desc *desc, static int cbc_aes_nx_decrypt(struct skcipher_request *req)
struct scatterlist *dst,
struct scatterlist *src,
unsigned int nbytes)
{ {
return cbc_aes_nx_crypt(desc, dst, src, nbytes, 0); return cbc_aes_nx_crypt(req, 0);
} }
struct crypto_alg nx_cbc_aes_alg = { struct skcipher_alg nx_cbc_aes_alg = {
.cra_name = "cbc(aes)", .base.cra_name = "cbc(aes)",
.cra_driver_name = "cbc-aes-nx", .base.cra_driver_name = "cbc-aes-nx",
.cra_priority = 300, .base.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, .base.cra_blocksize = AES_BLOCK_SIZE,
.cra_blocksize = AES_BLOCK_SIZE, .base.cra_ctxsize = sizeof(struct nx_crypto_ctx),
.cra_ctxsize = sizeof(struct nx_crypto_ctx), .base.cra_alignmask = 0xf,
.cra_type = &crypto_blkcipher_type, .base.cra_module = THIS_MODULE,
.cra_alignmask = 0xf, .init = nx_crypto_ctx_aes_cbc_init,
.cra_module = THIS_MODULE, .exit = nx_crypto_ctx_skcipher_exit,
.cra_init = nx_crypto_ctx_aes_cbc_init,
.cra_exit = nx_crypto_ctx_exit,
.cra_blkcipher = {
.min_keysize = AES_MIN_KEY_SIZE, .min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE, .max_keysize = AES_MAX_KEY_SIZE,
.ivsize = AES_BLOCK_SIZE, .ivsize = AES_BLOCK_SIZE,
.setkey = cbc_aes_nx_set_key, .setkey = cbc_aes_nx_set_key,
.encrypt = cbc_aes_nx_encrypt, .encrypt = cbc_aes_nx_encrypt,
.decrypt = cbc_aes_nx_decrypt, .decrypt = cbc_aes_nx_decrypt,
}
}; };

View File

@ -589,7 +589,7 @@ static int nx_register_algs(void)
if (rc) if (rc)
goto out; goto out;
rc = nx_register_alg(&nx_cbc_aes_alg, NX_FC_AES, NX_MODE_AES_CBC); rc = nx_register_skcipher(&nx_cbc_aes_alg, NX_FC_AES, NX_MODE_AES_CBC);
if (rc) if (rc)
goto out_unreg_ecb; goto out_unreg_ecb;
@ -647,7 +647,7 @@ out_unreg_gcm:
out_unreg_ctr3686: out_unreg_ctr3686:
nx_unregister_alg(&nx_ctr3686_aes_alg, NX_FC_AES, NX_MODE_AES_CTR); nx_unregister_alg(&nx_ctr3686_aes_alg, NX_FC_AES, NX_MODE_AES_CTR);
out_unreg_cbc: out_unreg_cbc:
nx_unregister_alg(&nx_cbc_aes_alg, NX_FC_AES, NX_MODE_AES_CBC); nx_unregister_skcipher(&nx_cbc_aes_alg, NX_FC_AES, NX_MODE_AES_CBC);
out_unreg_ecb: out_unreg_ecb:
nx_unregister_skcipher(&nx_ecb_aes_alg, NX_FC_AES, NX_MODE_AES_ECB); nx_unregister_skcipher(&nx_ecb_aes_alg, NX_FC_AES, NX_MODE_AES_ECB);
out: out:
@ -722,9 +722,9 @@ int nx_crypto_ctx_aes_ctr_init(struct crypto_tfm *tfm)
NX_MODE_AES_CTR); NX_MODE_AES_CTR);
} }
int nx_crypto_ctx_aes_cbc_init(struct crypto_tfm *tfm) int nx_crypto_ctx_aes_cbc_init(struct crypto_skcipher *tfm)
{ {
return nx_crypto_ctx_init(crypto_tfm_ctx(tfm), NX_FC_AES, return nx_crypto_ctx_init(crypto_skcipher_ctx(tfm), NX_FC_AES,
NX_MODE_AES_CBC); NX_MODE_AES_CBC);
} }
@ -817,7 +817,8 @@ static int nx_remove(struct vio_dev *viodev)
NX_FC_AES, NX_MODE_AES_GCM); NX_FC_AES, NX_MODE_AES_GCM);
nx_unregister_alg(&nx_ctr3686_aes_alg, nx_unregister_alg(&nx_ctr3686_aes_alg,
NX_FC_AES, NX_MODE_AES_CTR); NX_FC_AES, NX_MODE_AES_CTR);
nx_unregister_alg(&nx_cbc_aes_alg, NX_FC_AES, NX_MODE_AES_CBC); nx_unregister_skcipher(&nx_cbc_aes_alg, NX_FC_AES,
NX_MODE_AES_CBC);
nx_unregister_skcipher(&nx_ecb_aes_alg, NX_FC_AES, nx_unregister_skcipher(&nx_ecb_aes_alg, NX_FC_AES,
NX_MODE_AES_ECB); NX_MODE_AES_ECB);
} }

View File

@ -146,7 +146,7 @@ int nx_crypto_ctx_aes_ccm_init(struct crypto_aead *tfm);
int nx_crypto_ctx_aes_gcm_init(struct crypto_aead *tfm); int nx_crypto_ctx_aes_gcm_init(struct crypto_aead *tfm);
int nx_crypto_ctx_aes_xcbc_init(struct crypto_tfm *tfm); int nx_crypto_ctx_aes_xcbc_init(struct crypto_tfm *tfm);
int nx_crypto_ctx_aes_ctr_init(struct crypto_tfm *tfm); int nx_crypto_ctx_aes_ctr_init(struct crypto_tfm *tfm);
int nx_crypto_ctx_aes_cbc_init(struct crypto_tfm *tfm); int nx_crypto_ctx_aes_cbc_init(struct crypto_skcipher *tfm);
int nx_crypto_ctx_aes_ecb_init(struct crypto_skcipher *tfm); int nx_crypto_ctx_aes_ecb_init(struct crypto_skcipher *tfm);
int nx_crypto_ctx_sha_init(struct crypto_tfm *tfm); int nx_crypto_ctx_sha_init(struct crypto_tfm *tfm);
void nx_crypto_ctx_exit(struct crypto_tfm *tfm); void nx_crypto_ctx_exit(struct crypto_tfm *tfm);
@ -176,7 +176,7 @@ void nx_debugfs_fini(struct nx_crypto_driver *);
#define NX_PAGE_NUM(x) ((u64)(x) & 0xfffffffffffff000ULL) #define NX_PAGE_NUM(x) ((u64)(x) & 0xfffffffffffff000ULL)
extern struct crypto_alg nx_cbc_aes_alg; extern struct skcipher_alg nx_cbc_aes_alg;
extern struct skcipher_alg nx_ecb_aes_alg; extern struct skcipher_alg nx_ecb_aes_alg;
extern struct aead_alg nx_gcm_aes_alg; extern struct aead_alg nx_gcm_aes_alg;
extern struct aead_alg nx_gcm4106_aes_alg; extern struct aead_alg nx_gcm4106_aes_alg;