crypto: aes_ti - disable interrupts while accessing S-box
In the "aes-fixed-time" AES implementation, disable interrupts while accessing the S-box, in order to make cache-timing attacks more difficult. Previously it was possible for the CPU to be interrupted while the S-box was loaded into L1 cache, potentially evicting the cachelines and causing later table lookups to be time-variant. In tests I did on x86 and ARM, this doesn't affect performance significantly. Responsiveness is potentially a concern, but interrupts are only disabled for a single AES block. Note that even after this change, the implementation still isn't necessarily guaranteed to be constant-time; see https://cr.yp.to/antiforgery/cachetiming-20050414.pdf for a discussion of the many difficulties involved in writing truly constant-time AES software. But it's valuable to make such attacks more difficult. Reviewed-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
9f4debe384
commit
0a6a40c2a8
|
@ -1006,7 +1006,8 @@ config CRYPTO_AES_TI
|
|||
8 for decryption), this implementation only uses just two S-boxes of
|
||||
256 bytes each, and attempts to eliminate data dependent latencies by
|
||||
prefetching the entire table into the cache at the start of each
|
||||
block.
|
||||
block. Interrupts are also disabled to avoid races where cachelines
|
||||
are evicted when the CPU is interrupted to do something else.
|
||||
|
||||
config CRYPTO_AES_586
|
||||
tristate "AES cipher algorithms (i586)"
|
||||
|
|
|
@ -269,6 +269,7 @@ static void aesti_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
|||
const u32 *rkp = ctx->key_enc + 4;
|
||||
int rounds = 6 + ctx->key_length / 4;
|
||||
u32 st0[4], st1[4];
|
||||
unsigned long flags;
|
||||
int round;
|
||||
|
||||
st0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in);
|
||||
|
@ -276,6 +277,12 @@ static void aesti_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
|||
st0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8);
|
||||
st0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12);
|
||||
|
||||
/*
|
||||
* Temporarily disable interrupts to avoid races where cachelines are
|
||||
* evicted when the CPU is interrupted to do something else.
|
||||
*/
|
||||
local_irq_save(flags);
|
||||
|
||||
st0[0] ^= __aesti_sbox[ 0] ^ __aesti_sbox[128];
|
||||
st0[1] ^= __aesti_sbox[32] ^ __aesti_sbox[160];
|
||||
st0[2] ^= __aesti_sbox[64] ^ __aesti_sbox[192];
|
||||
|
@ -300,6 +307,8 @@ static void aesti_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
|||
put_unaligned_le32(subshift(st1, 1) ^ rkp[5], out + 4);
|
||||
put_unaligned_le32(subshift(st1, 2) ^ rkp[6], out + 8);
|
||||
put_unaligned_le32(subshift(st1, 3) ^ rkp[7], out + 12);
|
||||
|
||||
local_irq_restore(flags);
|
||||
}
|
||||
|
||||
static void aesti_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
||||
|
@ -308,6 +317,7 @@ static void aesti_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
|||
const u32 *rkp = ctx->key_dec + 4;
|
||||
int rounds = 6 + ctx->key_length / 4;
|
||||
u32 st0[4], st1[4];
|
||||
unsigned long flags;
|
||||
int round;
|
||||
|
||||
st0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in);
|
||||
|
@ -315,6 +325,12 @@ static void aesti_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
|||
st0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8);
|
||||
st0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12);
|
||||
|
||||
/*
|
||||
* Temporarily disable interrupts to avoid races where cachelines are
|
||||
* evicted when the CPU is interrupted to do something else.
|
||||
*/
|
||||
local_irq_save(flags);
|
||||
|
||||
st0[0] ^= __aesti_inv_sbox[ 0] ^ __aesti_inv_sbox[128];
|
||||
st0[1] ^= __aesti_inv_sbox[32] ^ __aesti_inv_sbox[160];
|
||||
st0[2] ^= __aesti_inv_sbox[64] ^ __aesti_inv_sbox[192];
|
||||
|
@ -339,6 +355,8 @@ static void aesti_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
|||
put_unaligned_le32(inv_subshift(st1, 1) ^ rkp[5], out + 4);
|
||||
put_unaligned_le32(inv_subshift(st1, 2) ^ rkp[6], out + 8);
|
||||
put_unaligned_le32(inv_subshift(st1, 3) ^ rkp[7], out + 12);
|
||||
|
||||
local_irq_restore(flags);
|
||||
}
|
||||
|
||||
static struct crypto_alg aes_alg = {
|
||||
|
|
Loading…
Reference in New Issue