crypto: atmel-aes - Fix saving of IV for CTR mode
The req->iv of the skcipher_request is expected to contain the
last used IV. Update the req->iv for CTR mode.
Fixes: bd3c7b5c2a
("crypto: atmel - add Atmel AES driver")
Signed-off-by: Tudor Ambarus <tudor.ambarus@microchip.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
781a08d974
commit
371731ec21
|
@ -120,6 +120,7 @@ struct atmel_aes_ctr_ctx {
|
||||||
size_t offset;
|
size_t offset;
|
||||||
struct scatterlist src[2];
|
struct scatterlist src[2];
|
||||||
struct scatterlist dst[2];
|
struct scatterlist dst[2];
|
||||||
|
u16 blocks;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct atmel_aes_gcm_ctx {
|
struct atmel_aes_gcm_ctx {
|
||||||
|
@ -512,6 +513,26 @@ static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev *dd)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static inline struct atmel_aes_ctr_ctx *
|
||||||
|
atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx *ctx)
|
||||||
|
{
|
||||||
|
return container_of(ctx, struct atmel_aes_ctr_ctx, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void atmel_aes_ctr_update_req_iv(struct atmel_aes_dev *dd)
|
||||||
|
{
|
||||||
|
struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
|
||||||
|
struct skcipher_request *req = skcipher_request_cast(dd->areq);
|
||||||
|
struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
|
||||||
|
unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
|
||||||
|
int i;
|
||||||
|
|
||||||
|
for (i = 0; i < ctx->blocks; i++)
|
||||||
|
crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE);
|
||||||
|
|
||||||
|
memcpy(req->iv, ctx->iv, ivsize);
|
||||||
|
}
|
||||||
|
|
||||||
static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err)
|
static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err)
|
||||||
{
|
{
|
||||||
struct skcipher_request *req = skcipher_request_cast(dd->areq);
|
struct skcipher_request *req = skcipher_request_cast(dd->areq);
|
||||||
|
@ -526,8 +547,12 @@ static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err)
|
||||||
dd->flags &= ~AES_FLAGS_BUSY;
|
dd->flags &= ~AES_FLAGS_BUSY;
|
||||||
|
|
||||||
if (!dd->ctx->is_aead &&
|
if (!dd->ctx->is_aead &&
|
||||||
(rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_ECB)
|
(rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_ECB) {
|
||||||
|
if ((rctx->mode & AES_FLAGS_OPMODE_MASK) != AES_FLAGS_CTR)
|
||||||
atmel_aes_set_iv_as_last_ciphertext_block(dd);
|
atmel_aes_set_iv_as_last_ciphertext_block(dd);
|
||||||
|
else
|
||||||
|
atmel_aes_ctr_update_req_iv(dd);
|
||||||
|
}
|
||||||
|
|
||||||
if (dd->is_async)
|
if (dd->is_async)
|
||||||
dd->areq->complete(dd->areq, err);
|
dd->areq->complete(dd->areq, err);
|
||||||
|
@ -1006,12 +1031,6 @@ static int atmel_aes_start(struct atmel_aes_dev *dd)
|
||||||
atmel_aes_transfer_complete);
|
atmel_aes_transfer_complete);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline struct atmel_aes_ctr_ctx *
|
|
||||||
atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx *ctx)
|
|
||||||
{
|
|
||||||
return container_of(ctx, struct atmel_aes_ctr_ctx, base);
|
|
||||||
}
|
|
||||||
|
|
||||||
static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
|
static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
|
||||||
{
|
{
|
||||||
struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
|
struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
|
||||||
|
@ -1019,7 +1038,7 @@ static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
|
||||||
struct scatterlist *src, *dst;
|
struct scatterlist *src, *dst;
|
||||||
size_t datalen;
|
size_t datalen;
|
||||||
u32 ctr;
|
u32 ctr;
|
||||||
u16 blocks, start, end;
|
u16 start, end;
|
||||||
bool use_dma, fragmented = false;
|
bool use_dma, fragmented = false;
|
||||||
|
|
||||||
/* Check for transfer completion. */
|
/* Check for transfer completion. */
|
||||||
|
@ -1029,14 +1048,14 @@ static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd)
|
||||||
|
|
||||||
/* Compute data length. */
|
/* Compute data length. */
|
||||||
datalen = req->cryptlen - ctx->offset;
|
datalen = req->cryptlen - ctx->offset;
|
||||||
blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE);
|
ctx->blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE);
|
||||||
ctr = be32_to_cpu(ctx->iv[3]);
|
ctr = be32_to_cpu(ctx->iv[3]);
|
||||||
|
|
||||||
/* Check 16bit counter overflow. */
|
/* Check 16bit counter overflow. */
|
||||||
start = ctr & 0xffff;
|
start = ctr & 0xffff;
|
||||||
end = start + blocks - 1;
|
end = start + ctx->blocks - 1;
|
||||||
|
|
||||||
if (blocks >> 16 || end < start) {
|
if (ctx->blocks >> 16 || end < start) {
|
||||||
ctr |= 0xffff;
|
ctr |= 0xffff;
|
||||||
datalen = AES_BLOCK_SIZE * (0x10000 - start);
|
datalen = AES_BLOCK_SIZE * (0x10000 - start);
|
||||||
fragmented = true;
|
fragmented = true;
|
||||||
|
|
Loading…
Reference in New Issue