dm crypt: reuse eboiv skcipher for IV generation
Instead of instantiating a separate cipher to perform the encryption needed to produce the IV, reuse the skcipher used for the block data and invoke it one additional time for each block to encrypt a zero vector and use the output as the IV. For CBC mode, this is equivalent to using the bare block cipher, but without the risk of ending up with a non-time invariant implementation of AES when the skcipher itself is time variant (e.g., arm64 without Crypto Extensions has a NEON based time invariant implementation of cbc(aes) but no time invariant implementation of the core cipher other than aes-ti, which is not enabled by default). This approach is a compromise between dm-crypt API flexibility and reducing dependence on parts of the crypto API that should not usually be exposed to other subsystems, such as the bare cipher API. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Tested-by: Milan Broz <gmazyland@gmail.com> Signed-off-by: Mike Snitzer <snitzer@redhat.com>
This commit is contained in:
parent
123d87d553
commit
39d13a1ac4
|
@ -120,10 +120,6 @@ struct iv_tcw_private {
|
|||
u8 *whitening;
|
||||
};
|
||||
|
||||
struct iv_eboiv_private {
|
||||
struct crypto_cipher *tfm;
|
||||
};
|
||||
|
||||
/*
|
||||
* Crypt: maps a linear range of a block device
|
||||
* and encrypts / decrypts at the same time.
|
||||
|
@ -163,7 +159,6 @@ struct crypt_config {
|
|||
struct iv_benbi_private benbi;
|
||||
struct iv_lmk_private lmk;
|
||||
struct iv_tcw_private tcw;
|
||||
struct iv_eboiv_private eboiv;
|
||||
} iv_gen_private;
|
||||
u64 iv_offset;
|
||||
unsigned int iv_size;
|
||||
|
@ -847,65 +842,47 @@ static int crypt_iv_random_gen(struct crypt_config *cc, u8 *iv,
|
|||
return 0;
|
||||
}
|
||||
|
||||
static void crypt_iv_eboiv_dtr(struct crypt_config *cc)
|
||||
{
|
||||
struct iv_eboiv_private *eboiv = &cc->iv_gen_private.eboiv;
|
||||
|
||||
crypto_free_cipher(eboiv->tfm);
|
||||
eboiv->tfm = NULL;
|
||||
}
|
||||
|
||||
static int crypt_iv_eboiv_ctr(struct crypt_config *cc, struct dm_target *ti,
|
||||
const char *opts)
|
||||
{
|
||||
struct iv_eboiv_private *eboiv = &cc->iv_gen_private.eboiv;
|
||||
struct crypto_cipher *tfm;
|
||||
|
||||
tfm = crypto_alloc_cipher(cc->cipher, 0, 0);
|
||||
if (IS_ERR(tfm)) {
|
||||
ti->error = "Error allocating crypto tfm for EBOIV";
|
||||
return PTR_ERR(tfm);
|
||||
}
|
||||
|
||||
if (crypto_cipher_blocksize(tfm) != cc->iv_size) {
|
||||
ti->error = "Block size of EBOIV cipher does "
|
||||
"not match IV size of block cipher";
|
||||
crypto_free_cipher(tfm);
|
||||
if (test_bit(CRYPT_MODE_INTEGRITY_AEAD, &cc->cipher_flags)) {
|
||||
ti->error = "AEAD transforms not supported for EBOIV";
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
eboiv->tfm = tfm;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int crypt_iv_eboiv_init(struct crypt_config *cc)
|
||||
{
|
||||
struct iv_eboiv_private *eboiv = &cc->iv_gen_private.eboiv;
|
||||
int err;
|
||||
|
||||
err = crypto_cipher_setkey(eboiv->tfm, cc->key, cc->key_size);
|
||||
if (err)
|
||||
return err;
|
||||
if (crypto_skcipher_blocksize(any_tfm(cc)) != cc->iv_size) {
|
||||
ti->error = "Block size of EBOIV cipher does "
|
||||
"not match IV size of block cipher";
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int crypt_iv_eboiv_wipe(struct crypt_config *cc)
|
||||
{
|
||||
/* Called after cc->key is set to random key in crypt_wipe() */
|
||||
return crypt_iv_eboiv_init(cc);
|
||||
}
|
||||
|
||||
static int crypt_iv_eboiv_gen(struct crypt_config *cc, u8 *iv,
|
||||
struct dm_crypt_request *dmreq)
|
||||
{
|
||||
struct iv_eboiv_private *eboiv = &cc->iv_gen_private.eboiv;
|
||||
u8 buf[MAX_CIPHER_BLOCKSIZE] __aligned(__alignof__(__le64));
|
||||
struct skcipher_request *req;
|
||||
struct scatterlist src, dst;
|
||||
struct crypto_wait wait;
|
||||
int err;
|
||||
|
||||
memset(iv, 0, cc->iv_size);
|
||||
*(__le64 *)iv = cpu_to_le64(dmreq->iv_sector * cc->sector_size);
|
||||
crypto_cipher_encrypt_one(eboiv->tfm, iv, iv);
|
||||
req = skcipher_request_alloc(any_tfm(cc), GFP_KERNEL | GFP_NOFS);
|
||||
if (!req)
|
||||
return -ENOMEM;
|
||||
|
||||
return 0;
|
||||
memset(buf, 0, cc->iv_size);
|
||||
*(__le64 *)buf = cpu_to_le64(dmreq->iv_sector * cc->sector_size);
|
||||
|
||||
sg_init_one(&src, page_address(ZERO_PAGE(0)), cc->iv_size);
|
||||
sg_init_one(&dst, iv, cc->iv_size);
|
||||
skcipher_request_set_crypt(req, &src, &dst, cc->iv_size, buf);
|
||||
skcipher_request_set_callback(req, 0, crypto_req_done, &wait);
|
||||
err = crypto_wait_req(crypto_skcipher_encrypt(req), &wait);
|
||||
skcipher_request_free(req);
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
static const struct crypt_iv_operations crypt_iv_plain_ops = {
|
||||
|
@ -962,9 +939,6 @@ static struct crypt_iv_operations crypt_iv_random_ops = {
|
|||
|
||||
static struct crypt_iv_operations crypt_iv_eboiv_ops = {
|
||||
.ctr = crypt_iv_eboiv_ctr,
|
||||
.dtr = crypt_iv_eboiv_dtr,
|
||||
.init = crypt_iv_eboiv_init,
|
||||
.wipe = crypt_iv_eboiv_wipe,
|
||||
.generator = crypt_iv_eboiv_gen
|
||||
};
|
||||
|
||||
|
|
Loading…
Reference in New Issue