kernel_optimize_test/crypto/rng.c
Gideon Israel Dsouza d8c34b949d crypto: Replaced gcc specific attributes with macros from compiler.h
Continuing from this commit: 52f5684c8e
("kernel: use macros from compiler.h instead of __attribute__((...))")

I submitted 4 total patches. They are part of task I've taken up to
increase compiler portability in the kernel. I've cleaned up the
subsystems under /kernel /mm /block and /security, this patch targets
/crypto.

There is <linux/compiler.h> which provides macros for various gcc specific
constructs. Eg: __weak for __attribute__((weak)). I've cleaned all
instances of gcc specific attributes with the right macros for the crypto
subsystem.

I had to make one additional change into compiler-gcc.h for the case when
one wants to use this: __attribute__((aligned) and not specify an alignment
factor. From the gcc docs, this will result in the largest alignment for
that data type on the target machine so I've named the macro
__aligned_largest. Please advise if another name is more appropriate.

Signed-off-by: Gideon Israel Dsouza <gidisrael@gmail.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2017-01-13 00:24:39 +08:00

238 lines
5.0 KiB
C

/*
* Cryptographic API.
*
* RNG operations.
*
* Copyright (c) 2008 Neil Horman <nhorman@tuxdriver.com>
* Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
#include <linux/atomic.h>
#include <crypto/internal/rng.h>
#include <linux/err.h>
#include <linux/module.h>
#include <linux/mutex.h>
#include <linux/random.h>
#include <linux/seq_file.h>
#include <linux/slab.h>
#include <linux/string.h>
#include <linux/cryptouser.h>
#include <linux/compiler.h>
#include <net/netlink.h>
#include "internal.h"
static DEFINE_MUTEX(crypto_default_rng_lock);
struct crypto_rng *crypto_default_rng;
EXPORT_SYMBOL_GPL(crypto_default_rng);
static int crypto_default_rng_refcnt;
static inline struct crypto_rng *__crypto_rng_cast(struct crypto_tfm *tfm)
{
return container_of(tfm, struct crypto_rng, base);
}
int crypto_rng_reset(struct crypto_rng *tfm, const u8 *seed, unsigned int slen)
{
u8 *buf = NULL;
int err;
if (!seed && slen) {
buf = kmalloc(slen, GFP_KERNEL);
if (!buf)
return -ENOMEM;
get_random_bytes(buf, slen);
seed = buf;
}
err = crypto_rng_alg(tfm)->seed(tfm, seed, slen);
kzfree(buf);
return err;
}
EXPORT_SYMBOL_GPL(crypto_rng_reset);
static int crypto_rng_init_tfm(struct crypto_tfm *tfm)
{
return 0;
}
static unsigned int seedsize(struct crypto_alg *alg)
{
struct rng_alg *ralg = container_of(alg, struct rng_alg, base);
return ralg->seedsize;
}
#ifdef CONFIG_NET
static int crypto_rng_report(struct sk_buff *skb, struct crypto_alg *alg)
{
struct crypto_report_rng rrng;
strncpy(rrng.type, "rng", sizeof(rrng.type));
rrng.seedsize = seedsize(alg);
if (nla_put(skb, CRYPTOCFGA_REPORT_RNG,
sizeof(struct crypto_report_rng), &rrng))
goto nla_put_failure;
return 0;
nla_put_failure:
return -EMSGSIZE;
}
#else
static int crypto_rng_report(struct sk_buff *skb, struct crypto_alg *alg)
{
return -ENOSYS;
}
#endif
static void crypto_rng_show(struct seq_file *m, struct crypto_alg *alg)
__maybe_unused;
static void crypto_rng_show(struct seq_file *m, struct crypto_alg *alg)
{
seq_printf(m, "type : rng\n");
seq_printf(m, "seedsize : %u\n", seedsize(alg));
}
static const struct crypto_type crypto_rng_type = {
.extsize = crypto_alg_extsize,
.init_tfm = crypto_rng_init_tfm,
#ifdef CONFIG_PROC_FS
.show = crypto_rng_show,
#endif
.report = crypto_rng_report,
.maskclear = ~CRYPTO_ALG_TYPE_MASK,
.maskset = CRYPTO_ALG_TYPE_MASK,
.type = CRYPTO_ALG_TYPE_RNG,
.tfmsize = offsetof(struct crypto_rng, base),
};
struct crypto_rng *crypto_alloc_rng(const char *alg_name, u32 type, u32 mask)
{
return crypto_alloc_tfm(alg_name, &crypto_rng_type, type, mask);
}
EXPORT_SYMBOL_GPL(crypto_alloc_rng);
int crypto_get_default_rng(void)
{
struct crypto_rng *rng;
int err;
mutex_lock(&crypto_default_rng_lock);
if (!crypto_default_rng) {
rng = crypto_alloc_rng("stdrng", 0, 0);
err = PTR_ERR(rng);
if (IS_ERR(rng))
goto unlock;
err = crypto_rng_reset(rng, NULL, crypto_rng_seedsize(rng));
if (err) {
crypto_free_rng(rng);
goto unlock;
}
crypto_default_rng = rng;
}
crypto_default_rng_refcnt++;
err = 0;
unlock:
mutex_unlock(&crypto_default_rng_lock);
return err;
}
EXPORT_SYMBOL_GPL(crypto_get_default_rng);
void crypto_put_default_rng(void)
{
mutex_lock(&crypto_default_rng_lock);
crypto_default_rng_refcnt--;
mutex_unlock(&crypto_default_rng_lock);
}
EXPORT_SYMBOL_GPL(crypto_put_default_rng);
#if defined(CONFIG_CRYPTO_RNG) || defined(CONFIG_CRYPTO_RNG_MODULE)
int crypto_del_default_rng(void)
{
int err = -EBUSY;
mutex_lock(&crypto_default_rng_lock);
if (crypto_default_rng_refcnt)
goto out;
crypto_free_rng(crypto_default_rng);
crypto_default_rng = NULL;
err = 0;
out:
mutex_unlock(&crypto_default_rng_lock);
return err;
}
EXPORT_SYMBOL_GPL(crypto_del_default_rng);
#endif
int crypto_register_rng(struct rng_alg *alg)
{
struct crypto_alg *base = &alg->base;
if (alg->seedsize > PAGE_SIZE / 8)
return -EINVAL;
base->cra_type = &crypto_rng_type;
base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
base->cra_flags |= CRYPTO_ALG_TYPE_RNG;
return crypto_register_alg(base);
}
EXPORT_SYMBOL_GPL(crypto_register_rng);
void crypto_unregister_rng(struct rng_alg *alg)
{
crypto_unregister_alg(&alg->base);
}
EXPORT_SYMBOL_GPL(crypto_unregister_rng);
int crypto_register_rngs(struct rng_alg *algs, int count)
{
int i, ret;
for (i = 0; i < count; i++) {
ret = crypto_register_rng(algs + i);
if (ret)
goto err;
}
return 0;
err:
for (--i; i >= 0; --i)
crypto_unregister_rng(algs + i);
return ret;
}
EXPORT_SYMBOL_GPL(crypto_register_rngs);
void crypto_unregister_rngs(struct rng_alg *algs, int count)
{
int i;
for (i = count - 1; i >= 0; --i)
crypto_unregister_rng(algs + i);
}
EXPORT_SYMBOL_GPL(crypto_unregister_rngs);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Random Number Generator");