forked from luck/tmp_suning_uos_patched
crypto: chelsio - replace AES cipher calls with library calls
Replace a couple of occurrences where the "aes-generic" cipher is instantiated explicitly and only used for encryption of a single block. Use AES library calls instead. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
6273fd7a5a
commit
571c47ab98
|
@ -2,6 +2,7 @@
|
|||
config CRYPTO_DEV_CHELSIO
|
||||
tristate "Chelsio Crypto Co-processor Driver"
|
||||
depends on CHELSIO_T4
|
||||
select CRYPTO_LIB_AES
|
||||
select CRYPTO_SHA1
|
||||
select CRYPTO_SHA256
|
||||
select CRYPTO_SHA512
|
||||
|
|
|
@ -1023,22 +1023,21 @@ static int chcr_update_tweak(struct ablkcipher_request *req, u8 *iv,
|
|||
struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
|
||||
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
|
||||
struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
|
||||
struct crypto_cipher *cipher;
|
||||
struct crypto_aes_ctx aes;
|
||||
int ret, i;
|
||||
u8 *key;
|
||||
unsigned int keylen;
|
||||
int round = reqctx->last_req_len / AES_BLOCK_SIZE;
|
||||
int round8 = round / 8;
|
||||
|
||||
cipher = ablkctx->aes_generic;
|
||||
memcpy(iv, reqctx->iv, AES_BLOCK_SIZE);
|
||||
|
||||
keylen = ablkctx->enckey_len / 2;
|
||||
key = ablkctx->key + keylen;
|
||||
ret = crypto_cipher_setkey(cipher, key, keylen);
|
||||
ret = aes_expandkey(&aes, key, keylen);
|
||||
if (ret)
|
||||
goto out;
|
||||
crypto_cipher_encrypt_one(cipher, iv, iv);
|
||||
return ret;
|
||||
aes_encrypt(&aes, iv, iv);
|
||||
for (i = 0; i < round8; i++)
|
||||
gf128mul_x8_ble((le128 *)iv, (le128 *)iv);
|
||||
|
||||
|
@ -1046,9 +1045,10 @@ static int chcr_update_tweak(struct ablkcipher_request *req, u8 *iv,
|
|||
gf128mul_x_ble((le128 *)iv, (le128 *)iv);
|
||||
|
||||
if (!isfinal)
|
||||
crypto_cipher_decrypt_one(cipher, iv, iv);
|
||||
out:
|
||||
return ret;
|
||||
aes_decrypt(&aes, iv, iv);
|
||||
|
||||
memzero_explicit(&aes, sizeof(aes));
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int chcr_update_cipher_iv(struct ablkcipher_request *req,
|
||||
|
@ -1411,16 +1411,6 @@ static int chcr_cra_init(struct crypto_tfm *tfm)
|
|||
return PTR_ERR(ablkctx->sw_cipher);
|
||||
}
|
||||
|
||||
if (get_cryptoalg_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_XTS) {
|
||||
/* To update tweak*/
|
||||
ablkctx->aes_generic = crypto_alloc_cipher("aes-generic", 0, 0);
|
||||
if (IS_ERR(ablkctx->aes_generic)) {
|
||||
pr_err("failed to allocate aes cipher for tweak\n");
|
||||
return PTR_ERR(ablkctx->aes_generic);
|
||||
}
|
||||
} else
|
||||
ablkctx->aes_generic = NULL;
|
||||
|
||||
tfm->crt_ablkcipher.reqsize = sizeof(struct chcr_blkcipher_req_ctx);
|
||||
return chcr_device_init(crypto_tfm_ctx(tfm));
|
||||
}
|
||||
|
@ -1451,8 +1441,6 @@ static void chcr_cra_exit(struct crypto_tfm *tfm)
|
|||
struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
|
||||
|
||||
crypto_free_sync_skcipher(ablkctx->sw_cipher);
|
||||
if (ablkctx->aes_generic)
|
||||
crypto_free_cipher(ablkctx->aes_generic);
|
||||
}
|
||||
|
||||
static int get_alg_config(struct algo_param *params,
|
||||
|
@ -3364,9 +3352,9 @@ static int chcr_gcm_setkey(struct crypto_aead *aead, const u8 *key,
|
|||
{
|
||||
struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
|
||||
struct chcr_gcm_ctx *gctx = GCM_CTX(aeadctx);
|
||||
struct crypto_cipher *cipher;
|
||||
unsigned int ck_size;
|
||||
int ret = 0, key_ctx_size = 0;
|
||||
struct crypto_aes_ctx aes;
|
||||
|
||||
aeadctx->enckey_len = 0;
|
||||
crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
|
||||
|
@ -3409,23 +3397,15 @@ static int chcr_gcm_setkey(struct crypto_aead *aead, const u8 *key,
|
|||
/* Calculate the H = CIPH(K, 0 repeated 16 times).
|
||||
* It will go in key context
|
||||
*/
|
||||
cipher = crypto_alloc_cipher("aes-generic", 0, 0);
|
||||
if (IS_ERR(cipher)) {
|
||||
aeadctx->enckey_len = 0;
|
||||
ret = -ENOMEM;
|
||||
goto out;
|
||||
}
|
||||
|
||||
ret = crypto_cipher_setkey(cipher, key, keylen);
|
||||
ret = aes_expandkey(&aes, key, keylen);
|
||||
if (ret) {
|
||||
aeadctx->enckey_len = 0;
|
||||
goto out1;
|
||||
goto out;
|
||||
}
|
||||
memset(gctx->ghash_h, 0, AEAD_H_SIZE);
|
||||
crypto_cipher_encrypt_one(cipher, gctx->ghash_h, gctx->ghash_h);
|
||||
aes_encrypt(&aes, gctx->ghash_h, gctx->ghash_h);
|
||||
memzero_explicit(&aes, sizeof(aes));
|
||||
|
||||
out1:
|
||||
crypto_free_cipher(cipher);
|
||||
out:
|
||||
return ret;
|
||||
}
|
||||
|
|
|
@ -172,7 +172,6 @@ static inline struct chcr_context *h_ctx(struct crypto_ahash *tfm)
|
|||
|
||||
struct ablk_ctx {
|
||||
struct crypto_sync_skcipher *sw_cipher;
|
||||
struct crypto_cipher *aes_generic;
|
||||
__be32 key_ctx_hdr;
|
||||
unsigned int enckey_len;
|
||||
unsigned char ciph_mode;
|
||||
|
|
|
@ -132,11 +132,11 @@ static inline int chcr_ipsec_setauthsize(struct xfrm_state *x,
|
|||
static inline int chcr_ipsec_setkey(struct xfrm_state *x,
|
||||
struct ipsec_sa_entry *sa_entry)
|
||||
{
|
||||
struct crypto_cipher *cipher;
|
||||
int keylen = (x->aead->alg_key_len + 7) / 8;
|
||||
unsigned char *key = x->aead->alg_key;
|
||||
int ck_size, key_ctx_size = 0;
|
||||
unsigned char ghash_h[AEAD_H_SIZE];
|
||||
struct crypto_aes_ctx aes;
|
||||
int ret = 0;
|
||||
|
||||
if (keylen > 3) {
|
||||
|
@ -170,26 +170,19 @@ static inline int chcr_ipsec_setkey(struct xfrm_state *x,
|
|||
/* Calculate the H = CIPH(K, 0 repeated 16 times).
|
||||
* It will go in key context
|
||||
*/
|
||||
cipher = crypto_alloc_cipher("aes-generic", 0, 0);
|
||||
if (IS_ERR(cipher)) {
|
||||
sa_entry->enckey_len = 0;
|
||||
ret = -ENOMEM;
|
||||
goto out;
|
||||
}
|
||||
|
||||
ret = crypto_cipher_setkey(cipher, key, keylen);
|
||||
ret = aes_expandkey(&aes, key, keylen);
|
||||
if (ret) {
|
||||
sa_entry->enckey_len = 0;
|
||||
goto out1;
|
||||
goto out;
|
||||
}
|
||||
memset(ghash_h, 0, AEAD_H_SIZE);
|
||||
crypto_cipher_encrypt_one(cipher, ghash_h, ghash_h);
|
||||
aes_encrypt(&aes, ghash_h, ghash_h);
|
||||
memzero_explicit(&aes, sizeof(aes));
|
||||
|
||||
memcpy(sa_entry->key + (DIV_ROUND_UP(sa_entry->enckey_len, 16) *
|
||||
16), ghash_h, AEAD_H_SIZE);
|
||||
sa_entry->kctx_len = ((DIV_ROUND_UP(sa_entry->enckey_len, 16)) << 4) +
|
||||
AEAD_H_SIZE;
|
||||
out1:
|
||||
crypto_free_cipher(cipher);
|
||||
out:
|
||||
return ret;
|
||||
}
|
||||
|
|
|
@ -213,8 +213,8 @@ static int chtls_key_info(struct chtls_sock *csk,
|
|||
unsigned char key[AES_KEYSIZE_128];
|
||||
struct tls12_crypto_info_aes_gcm_128 *gcm_ctx;
|
||||
unsigned char ghash_h[AEAD_H_SIZE];
|
||||
struct crypto_cipher *cipher;
|
||||
int ck_size, key_ctx_size;
|
||||
struct crypto_aes_ctx aes;
|
||||
int ret;
|
||||
|
||||
gcm_ctx = (struct tls12_crypto_info_aes_gcm_128 *)
|
||||
|
@ -234,18 +234,13 @@ static int chtls_key_info(struct chtls_sock *csk,
|
|||
/* Calculate the H = CIPH(K, 0 repeated 16 times).
|
||||
* It will go in key context
|
||||
*/
|
||||
cipher = crypto_alloc_cipher("aes", 0, 0);
|
||||
if (IS_ERR(cipher)) {
|
||||
ret = -ENOMEM;
|
||||
goto out;
|
||||
}
|
||||
|
||||
ret = crypto_cipher_setkey(cipher, key, keylen);
|
||||
ret = aes_expandkey(&aes, key, keylen);
|
||||
if (ret)
|
||||
goto out1;
|
||||
return ret;
|
||||
|
||||
memset(ghash_h, 0, AEAD_H_SIZE);
|
||||
crypto_cipher_encrypt_one(cipher, ghash_h, ghash_h);
|
||||
aes_encrypt(&aes, ghash_h, ghash_h);
|
||||
memzero_explicit(&aes, sizeof(aes));
|
||||
csk->tlshws.keylen = key_ctx_size;
|
||||
|
||||
/* Copy the Key context */
|
||||
|
@ -269,10 +264,7 @@ static int chtls_key_info(struct chtls_sock *csk,
|
|||
/* erase key info from driver */
|
||||
memset(gcm_ctx->key, 0, keylen);
|
||||
|
||||
out1:
|
||||
crypto_free_cipher(cipher);
|
||||
out:
|
||||
return ret;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void chtls_set_scmd(struct chtls_sock *csk)
|
||||
|
|
Loading…
Reference in New Issue
Block a user