crypto: cryptd - Use refcount_t for refcount
Reference counters are preferred to use refcount_t instead of atomic_t. This is because the implementation of refcount_t can prevent overflows and detect possible use-after-free. So convert atomic_t ref counters to refcount_t. Signed-off-by: Chuhong Yuan <hslester96@gmail.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
03a3bb7ae6
commit
43b970fa82
|
@ -16,7 +16,7 @@
|
||||||
#include <crypto/internal/aead.h>
|
#include <crypto/internal/aead.h>
|
||||||
#include <crypto/internal/skcipher.h>
|
#include <crypto/internal/skcipher.h>
|
||||||
#include <crypto/cryptd.h>
|
#include <crypto/cryptd.h>
|
||||||
#include <linux/atomic.h>
|
#include <linux/refcount.h>
|
||||||
#include <linux/err.h>
|
#include <linux/err.h>
|
||||||
#include <linux/init.h>
|
#include <linux/init.h>
|
||||||
#include <linux/kernel.h>
|
#include <linux/kernel.h>
|
||||||
|
@ -63,7 +63,7 @@ struct aead_instance_ctx {
|
||||||
};
|
};
|
||||||
|
|
||||||
struct cryptd_skcipher_ctx {
|
struct cryptd_skcipher_ctx {
|
||||||
atomic_t refcnt;
|
refcount_t refcnt;
|
||||||
struct crypto_sync_skcipher *child;
|
struct crypto_sync_skcipher *child;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ struct cryptd_skcipher_request_ctx {
|
||||||
};
|
};
|
||||||
|
|
||||||
struct cryptd_hash_ctx {
|
struct cryptd_hash_ctx {
|
||||||
atomic_t refcnt;
|
refcount_t refcnt;
|
||||||
struct crypto_shash *child;
|
struct crypto_shash *child;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ struct cryptd_hash_request_ctx {
|
||||||
};
|
};
|
||||||
|
|
||||||
struct cryptd_aead_ctx {
|
struct cryptd_aead_ctx {
|
||||||
atomic_t refcnt;
|
refcount_t refcnt;
|
||||||
struct crypto_aead *child;
|
struct crypto_aead *child;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -127,7 +127,7 @@ static int cryptd_enqueue_request(struct cryptd_queue *queue,
|
||||||
{
|
{
|
||||||
int cpu, err;
|
int cpu, err;
|
||||||
struct cryptd_cpu_queue *cpu_queue;
|
struct cryptd_cpu_queue *cpu_queue;
|
||||||
atomic_t *refcnt;
|
refcount_t *refcnt;
|
||||||
|
|
||||||
cpu = get_cpu();
|
cpu = get_cpu();
|
||||||
cpu_queue = this_cpu_ptr(queue->cpu_queue);
|
cpu_queue = this_cpu_ptr(queue->cpu_queue);
|
||||||
|
@ -140,10 +140,10 @@ static int cryptd_enqueue_request(struct cryptd_queue *queue,
|
||||||
|
|
||||||
queue_work_on(cpu, cryptd_wq, &cpu_queue->work);
|
queue_work_on(cpu, cryptd_wq, &cpu_queue->work);
|
||||||
|
|
||||||
if (!atomic_read(refcnt))
|
if (!refcount_read(refcnt))
|
||||||
goto out_put_cpu;
|
goto out_put_cpu;
|
||||||
|
|
||||||
atomic_inc(refcnt);
|
refcount_inc(refcnt);
|
||||||
|
|
||||||
out_put_cpu:
|
out_put_cpu:
|
||||||
put_cpu();
|
put_cpu();
|
||||||
|
@ -270,13 +270,13 @@ static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
|
||||||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||||
struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
|
struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||||
struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
|
struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
|
||||||
int refcnt = atomic_read(&ctx->refcnt);
|
int refcnt = refcount_read(&ctx->refcnt);
|
||||||
|
|
||||||
local_bh_disable();
|
local_bh_disable();
|
||||||
rctx->complete(&req->base, err);
|
rctx->complete(&req->base, err);
|
||||||
local_bh_enable();
|
local_bh_enable();
|
||||||
|
|
||||||
if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
|
if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
|
||||||
crypto_free_skcipher(tfm);
|
crypto_free_skcipher(tfm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -521,13 +521,13 @@ static void cryptd_hash_complete(struct ahash_request *req, int err)
|
||||||
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
|
||||||
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
|
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
|
||||||
struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
|
struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
|
||||||
int refcnt = atomic_read(&ctx->refcnt);
|
int refcnt = refcount_read(&ctx->refcnt);
|
||||||
|
|
||||||
local_bh_disable();
|
local_bh_disable();
|
||||||
rctx->complete(&req->base, err);
|
rctx->complete(&req->base, err);
|
||||||
local_bh_enable();
|
local_bh_enable();
|
||||||
|
|
||||||
if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
|
if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
|
||||||
crypto_free_ahash(tfm);
|
crypto_free_ahash(tfm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -772,13 +772,13 @@ static void cryptd_aead_crypt(struct aead_request *req,
|
||||||
|
|
||||||
out:
|
out:
|
||||||
ctx = crypto_aead_ctx(tfm);
|
ctx = crypto_aead_ctx(tfm);
|
||||||
refcnt = atomic_read(&ctx->refcnt);
|
refcnt = refcount_read(&ctx->refcnt);
|
||||||
|
|
||||||
local_bh_disable();
|
local_bh_disable();
|
||||||
compl(&req->base, err);
|
compl(&req->base, err);
|
||||||
local_bh_enable();
|
local_bh_enable();
|
||||||
|
|
||||||
if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
|
if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
|
||||||
crypto_free_aead(tfm);
|
crypto_free_aead(tfm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -979,7 +979,7 @@ struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx = crypto_skcipher_ctx(tfm);
|
ctx = crypto_skcipher_ctx(tfm);
|
||||||
atomic_set(&ctx->refcnt, 1);
|
refcount_set(&ctx->refcnt, 1);
|
||||||
|
|
||||||
return container_of(tfm, struct cryptd_skcipher, base);
|
return container_of(tfm, struct cryptd_skcipher, base);
|
||||||
}
|
}
|
||||||
|
@ -997,7 +997,7 @@ bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
|
||||||
{
|
{
|
||||||
struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
|
struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
|
||||||
|
|
||||||
return atomic_read(&ctx->refcnt) - 1;
|
return refcount_read(&ctx->refcnt) - 1;
|
||||||
}
|
}
|
||||||
EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
|
EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
|
||||||
|
|
||||||
|
@ -1005,7 +1005,7 @@ void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
|
||||||
{
|
{
|
||||||
struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
|
struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
|
||||||
|
|
||||||
if (atomic_dec_and_test(&ctx->refcnt))
|
if (refcount_dec_and_test(&ctx->refcnt))
|
||||||
crypto_free_skcipher(&tfm->base);
|
crypto_free_skcipher(&tfm->base);
|
||||||
}
|
}
|
||||||
EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
|
EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
|
||||||
|
@ -1029,7 +1029,7 @@ struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx = crypto_ahash_ctx(tfm);
|
ctx = crypto_ahash_ctx(tfm);
|
||||||
atomic_set(&ctx->refcnt, 1);
|
refcount_set(&ctx->refcnt, 1);
|
||||||
|
|
||||||
return __cryptd_ahash_cast(tfm);
|
return __cryptd_ahash_cast(tfm);
|
||||||
}
|
}
|
||||||
|
@ -1054,7 +1054,7 @@ bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
|
||||||
{
|
{
|
||||||
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
|
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
|
||||||
|
|
||||||
return atomic_read(&ctx->refcnt) - 1;
|
return refcount_read(&ctx->refcnt) - 1;
|
||||||
}
|
}
|
||||||
EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
|
EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
|
||||||
|
|
||||||
|
@ -1062,7 +1062,7 @@ void cryptd_free_ahash(struct cryptd_ahash *tfm)
|
||||||
{
|
{
|
||||||
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
|
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
|
||||||
|
|
||||||
if (atomic_dec_and_test(&ctx->refcnt))
|
if (refcount_dec_and_test(&ctx->refcnt))
|
||||||
crypto_free_ahash(&tfm->base);
|
crypto_free_ahash(&tfm->base);
|
||||||
}
|
}
|
||||||
EXPORT_SYMBOL_GPL(cryptd_free_ahash);
|
EXPORT_SYMBOL_GPL(cryptd_free_ahash);
|
||||||
|
@ -1086,7 +1086,7 @@ struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx = crypto_aead_ctx(tfm);
|
ctx = crypto_aead_ctx(tfm);
|
||||||
atomic_set(&ctx->refcnt, 1);
|
refcount_set(&ctx->refcnt, 1);
|
||||||
|
|
||||||
return __cryptd_aead_cast(tfm);
|
return __cryptd_aead_cast(tfm);
|
||||||
}
|
}
|
||||||
|
@ -1104,7 +1104,7 @@ bool cryptd_aead_queued(struct cryptd_aead *tfm)
|
||||||
{
|
{
|
||||||
struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
|
struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
|
||||||
|
|
||||||
return atomic_read(&ctx->refcnt) - 1;
|
return refcount_read(&ctx->refcnt) - 1;
|
||||||
}
|
}
|
||||||
EXPORT_SYMBOL_GPL(cryptd_aead_queued);
|
EXPORT_SYMBOL_GPL(cryptd_aead_queued);
|
||||||
|
|
||||||
|
@ -1112,7 +1112,7 @@ void cryptd_free_aead(struct cryptd_aead *tfm)
|
||||||
{
|
{
|
||||||
struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
|
struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
|
||||||
|
|
||||||
if (atomic_dec_and_test(&ctx->refcnt))
|
if (refcount_dec_and_test(&ctx->refcnt))
|
||||||
crypto_free_aead(&tfm->base);
|
crypto_free_aead(&tfm->base);
|
||||||
}
|
}
|
||||||
EXPORT_SYMBOL_GPL(cryptd_free_aead);
|
EXPORT_SYMBOL_GPL(cryptd_free_aead);
|
||||||
|
|
Loading…
Reference in New Issue