|
|
@@ -0,0 +1,578 @@
|
|
|
+From ce05ffe10457bda487fa049016a6ba79934bdece Mon Sep 17 00:00:00 2001
|
|
|
+From: Christian Lamparter <[email protected]>
|
|
|
+Date: Thu, 19 Apr 2018 18:41:52 +0200
|
|
|
+Subject: [PATCH 3/8] crypto: crypto4xx - convert to skcipher
|
|
|
+
|
|
|
+The ablkcipher APIs have been effectively deprecated since [1].
|
|
|
+This patch converts the crypto4xx driver to the new skcipher APIs.
|
|
|
+
|
|
|
+[1] <https://www.spinics.net/lists/linux-crypto/msg18133.html>
|
|
|
+
|
|
|
+Signed-off-by: Christian Lamparter <[email protected]>
|
|
|
+Signed-off-by: Herbert Xu <[email protected]>
|
|
|
+---
|
|
|
+ drivers/crypto/amcc/crypto4xx_alg.c | 60 ++++---
|
|
|
+ drivers/crypto/amcc/crypto4xx_core.c | 255 +++++++++++++--------------
|
|
|
+ drivers/crypto/amcc/crypto4xx_core.h | 25 +--
|
|
|
+ 3 files changed, 163 insertions(+), 177 deletions(-)
|
|
|
+
|
|
|
+--- a/drivers/crypto/amcc/crypto4xx_alg.c
|
|
|
++++ b/drivers/crypto/amcc/crypto4xx_alg.c
|
|
|
+@@ -31,6 +31,7 @@
|
|
|
+ #include <crypto/gcm.h>
|
|
|
+ #include <crypto/sha.h>
|
|
|
+ #include <crypto/ctr.h>
|
|
|
++#include <crypto/skcipher.h>
|
|
|
+ #include "crypto4xx_reg_def.h"
|
|
|
+ #include "crypto4xx_core.h"
|
|
|
+ #include "crypto4xx_sa.h"
|
|
|
+@@ -74,36 +75,37 @@ static void set_dynamic_sa_command_1(str
|
|
|
+ sa->sa_command_1.bf.copy_hdr = cp_hdr;
|
|
|
+ }
|
|
|
+
|
|
|
+-static inline int crypto4xx_crypt(struct ablkcipher_request *req,
|
|
|
++static inline int crypto4xx_crypt(struct skcipher_request *req,
|
|
|
+ const unsigned int ivlen, bool decrypt)
|
|
|
+ {
|
|
|
+- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
|
|
++ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
|
|
|
++ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
|
|
+ __le32 iv[ivlen];
|
|
|
+
|
|
|
+ if (ivlen)
|
|
|
+- crypto4xx_memcpy_to_le32(iv, req->info, ivlen);
|
|
|
++ crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
|
|
|
+
|
|
|
+ return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
|
|
|
+- req->nbytes, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
|
|
|
++ req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
|
|
|
+ ctx->sa_len, 0);
|
|
|
+ }
|
|
|
+
|
|
|
+-int crypto4xx_encrypt_noiv(struct ablkcipher_request *req)
|
|
|
++int crypto4xx_encrypt_noiv(struct skcipher_request *req)
|
|
|
+ {
|
|
|
+ return crypto4xx_crypt(req, 0, false);
|
|
|
+ }
|
|
|
+
|
|
|
+-int crypto4xx_encrypt_iv(struct ablkcipher_request *req)
|
|
|
++int crypto4xx_encrypt_iv(struct skcipher_request *req)
|
|
|
+ {
|
|
|
+ return crypto4xx_crypt(req, AES_IV_SIZE, false);
|
|
|
+ }
|
|
|
+
|
|
|
+-int crypto4xx_decrypt_noiv(struct ablkcipher_request *req)
|
|
|
++int crypto4xx_decrypt_noiv(struct skcipher_request *req)
|
|
|
+ {
|
|
|
+ return crypto4xx_crypt(req, 0, true);
|
|
|
+ }
|
|
|
+
|
|
|
+-int crypto4xx_decrypt_iv(struct ablkcipher_request *req)
|
|
|
++int crypto4xx_decrypt_iv(struct skcipher_request *req)
|
|
|
+ {
|
|
|
+ return crypto4xx_crypt(req, AES_IV_SIZE, true);
|
|
|
+ }
|
|
|
+@@ -111,20 +113,19 @@ int crypto4xx_decrypt_iv(struct ablkciph
|
|
|
+ /**
|
|
|
+ * AES Functions
|
|
|
+ */
|
|
|
+-static int crypto4xx_setkey_aes(struct crypto_ablkcipher *cipher,
|
|
|
++static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
|
|
|
+ const u8 *key,
|
|
|
+ unsigned int keylen,
|
|
|
+ unsigned char cm,
|
|
|
+ u8 fb)
|
|
|
+ {
|
|
|
+- struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
|
|
|
+- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
++ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
|
|
+ struct dynamic_sa_ctl *sa;
|
|
|
+ int rc;
|
|
|
+
|
|
|
+ if (keylen != AES_KEYSIZE_256 &&
|
|
|
+ keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_128) {
|
|
|
+- crypto_ablkcipher_set_flags(cipher,
|
|
|
++ crypto_skcipher_set_flags(cipher,
|
|
|
+ CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
|
+ return -EINVAL;
|
|
|
+ }
|
|
|
+@@ -164,39 +165,38 @@ static int crypto4xx_setkey_aes(struct c
|
|
|
+ return 0;
|
|
|
+ }
|
|
|
+
|
|
|
+-int crypto4xx_setkey_aes_cbc(struct crypto_ablkcipher *cipher,
|
|
|
++int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
|
|
|
+ const u8 *key, unsigned int keylen)
|
|
|
+ {
|
|
|
+ return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
|
|
|
+ CRYPTO_FEEDBACK_MODE_NO_FB);
|
|
|
+ }
|
|
|
+
|
|
|
+-int crypto4xx_setkey_aes_cfb(struct crypto_ablkcipher *cipher,
|
|
|
++int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
|
|
|
+ const u8 *key, unsigned int keylen)
|
|
|
+ {
|
|
|
+ return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CFB,
|
|
|
+ CRYPTO_FEEDBACK_MODE_128BIT_CFB);
|
|
|
+ }
|
|
|
+
|
|
|
+-int crypto4xx_setkey_aes_ecb(struct crypto_ablkcipher *cipher,
|
|
|
++int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
|
|
|
+ const u8 *key, unsigned int keylen)
|
|
|
+ {
|
|
|
+ return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB,
|
|
|
+ CRYPTO_FEEDBACK_MODE_NO_FB);
|
|
|
+ }
|
|
|
+
|
|
|
+-int crypto4xx_setkey_aes_ofb(struct crypto_ablkcipher *cipher,
|
|
|
++int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
|
|
|
+ const u8 *key, unsigned int keylen)
|
|
|
+ {
|
|
|
+ return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_OFB,
|
|
|
+ CRYPTO_FEEDBACK_MODE_64BIT_OFB);
|
|
|
+ }
|
|
|
+
|
|
|
+-int crypto4xx_setkey_rfc3686(struct crypto_ablkcipher *cipher,
|
|
|
++int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
|
|
|
+ const u8 *key, unsigned int keylen)
|
|
|
+ {
|
|
|
+- struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
|
|
|
+- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
++ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
|
|
+ int rc;
|
|
|
+
|
|
|
+ rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE,
|
|
|
+@@ -210,31 +210,33 @@ int crypto4xx_setkey_rfc3686(struct cryp
|
|
|
+ return 0;
|
|
|
+ }
|
|
|
+
|
|
|
+-int crypto4xx_rfc3686_encrypt(struct ablkcipher_request *req)
|
|
|
++int crypto4xx_rfc3686_encrypt(struct skcipher_request *req)
|
|
|
+ {
|
|
|
+- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
|
|
++ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
|
|
|
++ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
|
|
+ __le32 iv[AES_IV_SIZE / 4] = {
|
|
|
+ ctx->iv_nonce,
|
|
|
+- cpu_to_le32p((u32 *) req->info),
|
|
|
+- cpu_to_le32p((u32 *) (req->info + 4)),
|
|
|
++ cpu_to_le32p((u32 *) req->iv),
|
|
|
++ cpu_to_le32p((u32 *) (req->iv + 4)),
|
|
|
+ cpu_to_le32(1) };
|
|
|
+
|
|
|
+ return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
|
|
|
+- req->nbytes, iv, AES_IV_SIZE,
|
|
|
++ req->cryptlen, iv, AES_IV_SIZE,
|
|
|
+ ctx->sa_out, ctx->sa_len, 0);
|
|
|
+ }
|
|
|
+
|
|
|
+-int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req)
|
|
|
++int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
|
|
|
+ {
|
|
|
+- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
|
|
++ struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
|
|
|
++ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
|
|
+ __le32 iv[AES_IV_SIZE / 4] = {
|
|
|
+ ctx->iv_nonce,
|
|
|
+- cpu_to_le32p((u32 *) req->info),
|
|
|
+- cpu_to_le32p((u32 *) (req->info + 4)),
|
|
|
++ cpu_to_le32p((u32 *) req->iv),
|
|
|
++ cpu_to_le32p((u32 *) (req->iv + 4)),
|
|
|
+ cpu_to_le32(1) };
|
|
|
+
|
|
|
+ return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
|
|
|
+- req->nbytes, iv, AES_IV_SIZE,
|
|
|
++ req->cryptlen, iv, AES_IV_SIZE,
|
|
|
+ ctx->sa_out, ctx->sa_len, 0);
|
|
|
+ }
|
|
|
+
|
|
|
+--- a/drivers/crypto/amcc/crypto4xx_core.c
|
|
|
++++ b/drivers/crypto/amcc/crypto4xx_core.c
|
|
|
+@@ -41,6 +41,7 @@
|
|
|
+ #include <crypto/gcm.h>
|
|
|
+ #include <crypto/sha.h>
|
|
|
+ #include <crypto/scatterwalk.h>
|
|
|
++#include <crypto/skcipher.h>
|
|
|
+ #include <crypto/internal/aead.h>
|
|
|
+ #include <crypto/internal/skcipher.h>
|
|
|
+ #include "crypto4xx_reg_def.h"
|
|
|
+@@ -526,21 +527,19 @@ static void crypto4xx_ret_sg_desc(struct
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+-static void crypto4xx_ablkcipher_done(struct crypto4xx_device *dev,
|
|
|
++static void crypto4xx_cipher_done(struct crypto4xx_device *dev,
|
|
|
+ struct pd_uinfo *pd_uinfo,
|
|
|
+ struct ce_pd *pd)
|
|
|
+ {
|
|
|
+- struct crypto4xx_ctx *ctx;
|
|
|
+- struct ablkcipher_request *ablk_req;
|
|
|
++ struct skcipher_request *req;
|
|
|
+ struct scatterlist *dst;
|
|
|
+ dma_addr_t addr;
|
|
|
+
|
|
|
+- ablk_req = ablkcipher_request_cast(pd_uinfo->async_req);
|
|
|
+- ctx = crypto_tfm_ctx(ablk_req->base.tfm);
|
|
|
++ req = skcipher_request_cast(pd_uinfo->async_req);
|
|
|
+
|
|
|
+ if (pd_uinfo->using_sd) {
|
|
|
+- crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo, ablk_req->nbytes,
|
|
|
+- ablk_req->dst);
|
|
|
++ crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo,
|
|
|
++ req->cryptlen, req->dst);
|
|
|
+ } else {
|
|
|
+ dst = pd_uinfo->dest_va;
|
|
|
+ addr = dma_map_page(dev->core_dev->device, sg_page(dst),
|
|
|
+@@ -549,8 +548,8 @@ static void crypto4xx_ablkcipher_done(st
|
|
|
+ crypto4xx_ret_sg_desc(dev, pd_uinfo);
|
|
|
+
|
|
|
+ if (pd_uinfo->state & PD_ENTRY_BUSY)
|
|
|
+- ablkcipher_request_complete(ablk_req, -EINPROGRESS);
|
|
|
+- ablkcipher_request_complete(ablk_req, 0);
|
|
|
++ skcipher_request_complete(req, -EINPROGRESS);
|
|
|
++ skcipher_request_complete(req, 0);
|
|
|
+ }
|
|
|
+
|
|
|
+ static void crypto4xx_ahash_done(struct crypto4xx_device *dev,
|
|
|
+@@ -641,8 +640,8 @@ static void crypto4xx_pd_done(struct cry
|
|
|
+ struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[idx];
|
|
|
+
|
|
|
+ switch (crypto_tfm_alg_type(pd_uinfo->async_req->tfm)) {
|
|
|
+- case CRYPTO_ALG_TYPE_ABLKCIPHER:
|
|
|
+- crypto4xx_ablkcipher_done(dev, pd_uinfo, pd);
|
|
|
++ case CRYPTO_ALG_TYPE_SKCIPHER:
|
|
|
++ crypto4xx_cipher_done(dev, pd_uinfo, pd);
|
|
|
+ break;
|
|
|
+ case CRYPTO_ALG_TYPE_AEAD:
|
|
|
+ crypto4xx_aead_done(dev, pd_uinfo, pd);
|
|
|
+@@ -936,15 +935,14 @@ static void crypto4xx_ctx_init(struct cr
|
|
|
+ ctx->sa_len = 0;
|
|
|
+ }
|
|
|
+
|
|
|
+-static int crypto4xx_ablk_init(struct crypto_tfm *tfm)
|
|
|
++static int crypto4xx_sk_init(struct crypto_skcipher *sk)
|
|
|
+ {
|
|
|
+- struct crypto_alg *alg = tfm->__crt_alg;
|
|
|
++ struct skcipher_alg *alg = crypto_skcipher_alg(sk);
|
|
|
+ struct crypto4xx_alg *amcc_alg;
|
|
|
+- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
++ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(sk);
|
|
|
+
|
|
|
+ amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.cipher);
|
|
|
+ crypto4xx_ctx_init(amcc_alg, ctx);
|
|
|
+- tfm->crt_ablkcipher.reqsize = sizeof(struct crypto4xx_ctx);
|
|
|
+ return 0;
|
|
|
+ }
|
|
|
+
|
|
|
+@@ -953,9 +951,11 @@ static void crypto4xx_common_exit(struct
|
|
|
+ crypto4xx_free_sa(ctx);
|
|
|
+ }
|
|
|
+
|
|
|
+-static void crypto4xx_ablk_exit(struct crypto_tfm *tfm)
|
|
|
++static void crypto4xx_sk_exit(struct crypto_skcipher *sk)
|
|
|
+ {
|
|
|
+- crypto4xx_common_exit(crypto_tfm_ctx(tfm));
|
|
|
++ struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(sk);
|
|
|
++
|
|
|
++ crypto4xx_common_exit(ctx);
|
|
|
+ }
|
|
|
+
|
|
|
+ static int crypto4xx_aead_init(struct crypto_aead *tfm)
|
|
|
+@@ -1012,7 +1012,7 @@ static int crypto4xx_register_alg(struct
|
|
|
+ break;
|
|
|
+
|
|
|
+ default:
|
|
|
+- rc = crypto_register_alg(&alg->alg.u.cipher);
|
|
|
++ rc = crypto_register_skcipher(&alg->alg.u.cipher);
|
|
|
+ break;
|
|
|
+ }
|
|
|
+
|
|
|
+@@ -1041,7 +1041,7 @@ static void crypto4xx_unregister_alg(str
|
|
|
+ break;
|
|
|
+
|
|
|
+ default:
|
|
|
+- crypto_unregister_alg(&alg->alg.u.cipher);
|
|
|
++ crypto_unregister_skcipher(&alg->alg.u.cipher);
|
|
|
+ }
|
|
|
+ kfree(alg);
|
|
|
+ }
|
|
|
+@@ -1103,126 +1103,109 @@ static irqreturn_t crypto4xx_ce_interrup
|
|
|
+ */
|
|
|
+ static struct crypto4xx_alg_common crypto4xx_alg[] = {
|
|
|
+ /* Crypto AES modes */
|
|
|
+- { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
|
|
|
+- .cra_name = "cbc(aes)",
|
|
|
+- .cra_driver_name = "cbc-aes-ppc4xx",
|
|
|
+- .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
|
|
+- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
|
|
|
+- CRYPTO_ALG_ASYNC |
|
|
|
+- CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
+- .cra_blocksize = AES_BLOCK_SIZE,
|
|
|
+- .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
|
|
+- .cra_type = &crypto_ablkcipher_type,
|
|
|
+- .cra_init = crypto4xx_ablk_init,
|
|
|
+- .cra_exit = crypto4xx_ablk_exit,
|
|
|
+- .cra_module = THIS_MODULE,
|
|
|
+- .cra_u = {
|
|
|
+- .ablkcipher = {
|
|
|
+- .min_keysize = AES_MIN_KEY_SIZE,
|
|
|
+- .max_keysize = AES_MAX_KEY_SIZE,
|
|
|
+- .ivsize = AES_IV_SIZE,
|
|
|
+- .setkey = crypto4xx_setkey_aes_cbc,
|
|
|
+- .encrypt = crypto4xx_encrypt_iv,
|
|
|
+- .decrypt = crypto4xx_decrypt_iv,
|
|
|
+- }
|
|
|
+- }
|
|
|
+- }},
|
|
|
+- { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
|
|
|
+- .cra_name = "cfb(aes)",
|
|
|
+- .cra_driver_name = "cfb-aes-ppc4xx",
|
|
|
+- .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
|
|
+- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
|
|
|
+- CRYPTO_ALG_ASYNC |
|
|
|
+- CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
+- .cra_blocksize = AES_BLOCK_SIZE,
|
|
|
+- .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
|
|
+- .cra_type = &crypto_ablkcipher_type,
|
|
|
+- .cra_init = crypto4xx_ablk_init,
|
|
|
+- .cra_exit = crypto4xx_ablk_exit,
|
|
|
+- .cra_module = THIS_MODULE,
|
|
|
+- .cra_u = {
|
|
|
+- .ablkcipher = {
|
|
|
+- .min_keysize = AES_MIN_KEY_SIZE,
|
|
|
+- .max_keysize = AES_MAX_KEY_SIZE,
|
|
|
+- .ivsize = AES_IV_SIZE,
|
|
|
+- .setkey = crypto4xx_setkey_aes_cfb,
|
|
|
+- .encrypt = crypto4xx_encrypt_iv,
|
|
|
+- .decrypt = crypto4xx_decrypt_iv,
|
|
|
+- }
|
|
|
+- }
|
|
|
++ { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
|
|
|
++ .base = {
|
|
|
++ .cra_name = "cbc(aes)",
|
|
|
++ .cra_driver_name = "cbc-aes-ppc4xx",
|
|
|
++ .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
|
|
++ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
|
|
|
++ CRYPTO_ALG_ASYNC |
|
|
|
++ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
++ .cra_blocksize = AES_BLOCK_SIZE,
|
|
|
++ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
|
|
++ .cra_module = THIS_MODULE,
|
|
|
++ },
|
|
|
++ .min_keysize = AES_MIN_KEY_SIZE,
|
|
|
++ .max_keysize = AES_MAX_KEY_SIZE,
|
|
|
++ .ivsize = AES_IV_SIZE,
|
|
|
++ .setkey = crypto4xx_setkey_aes_cbc,
|
|
|
++ .encrypt = crypto4xx_encrypt_iv,
|
|
|
++ .decrypt = crypto4xx_decrypt_iv,
|
|
|
++ .init = crypto4xx_sk_init,
|
|
|
++ .exit = crypto4xx_sk_exit,
|
|
|
+ } },
|
|
|
+- { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
|
|
|
+- .cra_name = "rfc3686(ctr(aes))",
|
|
|
+- .cra_driver_name = "rfc3686-ctr-aes-ppc4xx",
|
|
|
+- .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
|
|
+- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
|
|
|
+- CRYPTO_ALG_ASYNC |
|
|
|
+- CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
+- .cra_blocksize = AES_BLOCK_SIZE,
|
|
|
+- .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
|
|
+- .cra_type = &crypto_ablkcipher_type,
|
|
|
+- .cra_init = crypto4xx_ablk_init,
|
|
|
+- .cra_exit = crypto4xx_ablk_exit,
|
|
|
+- .cra_module = THIS_MODULE,
|
|
|
+- .cra_u = {
|
|
|
+- .ablkcipher = {
|
|
|
+- .min_keysize = AES_MIN_KEY_SIZE +
|
|
|
+- CTR_RFC3686_NONCE_SIZE,
|
|
|
+- .max_keysize = AES_MAX_KEY_SIZE +
|
|
|
+- CTR_RFC3686_NONCE_SIZE,
|
|
|
+- .ivsize = CTR_RFC3686_IV_SIZE,
|
|
|
+- .setkey = crypto4xx_setkey_rfc3686,
|
|
|
+- .encrypt = crypto4xx_rfc3686_encrypt,
|
|
|
+- .decrypt = crypto4xx_rfc3686_decrypt,
|
|
|
+- }
|
|
|
+- }
|
|
|
++ { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
|
|
|
++ .base = {
|
|
|
++ .cra_name = "cfb(aes)",
|
|
|
++ .cra_driver_name = "cfb-aes-ppc4xx",
|
|
|
++ .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
|
|
++ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
|
|
|
++ CRYPTO_ALG_ASYNC |
|
|
|
++ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
++ .cra_blocksize = AES_BLOCK_SIZE,
|
|
|
++ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
|
|
++ .cra_module = THIS_MODULE,
|
|
|
++ },
|
|
|
++ .min_keysize = AES_MIN_KEY_SIZE,
|
|
|
++ .max_keysize = AES_MAX_KEY_SIZE,
|
|
|
++ .ivsize = AES_IV_SIZE,
|
|
|
++ .setkey = crypto4xx_setkey_aes_cfb,
|
|
|
++ .encrypt = crypto4xx_encrypt_iv,
|
|
|
++ .decrypt = crypto4xx_decrypt_iv,
|
|
|
++ .init = crypto4xx_sk_init,
|
|
|
++ .exit = crypto4xx_sk_exit,
|
|
|
+ } },
|
|
|
+- { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
|
|
|
+- .cra_name = "ecb(aes)",
|
|
|
+- .cra_driver_name = "ecb-aes-ppc4xx",
|
|
|
+- .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
|
|
+- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
|
|
|
+- CRYPTO_ALG_ASYNC |
|
|
|
+- CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
+- .cra_blocksize = AES_BLOCK_SIZE,
|
|
|
+- .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
|
|
+- .cra_type = &crypto_ablkcipher_type,
|
|
|
+- .cra_init = crypto4xx_ablk_init,
|
|
|
+- .cra_exit = crypto4xx_ablk_exit,
|
|
|
+- .cra_module = THIS_MODULE,
|
|
|
+- .cra_u = {
|
|
|
+- .ablkcipher = {
|
|
|
+- .min_keysize = AES_MIN_KEY_SIZE,
|
|
|
+- .max_keysize = AES_MAX_KEY_SIZE,
|
|
|
+- .setkey = crypto4xx_setkey_aes_ecb,
|
|
|
+- .encrypt = crypto4xx_encrypt_noiv,
|
|
|
+- .decrypt = crypto4xx_decrypt_noiv,
|
|
|
+- }
|
|
|
+- }
|
|
|
++ { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
|
|
|
++ .base = {
|
|
|
++ .cra_name = "rfc3686(ctr(aes))",
|
|
|
++ .cra_driver_name = "rfc3686-ctr-aes-ppc4xx",
|
|
|
++ .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
|
|
++ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
|
|
|
++ CRYPTO_ALG_ASYNC |
|
|
|
++ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
++ .cra_blocksize = AES_BLOCK_SIZE,
|
|
|
++ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
|
|
++ .cra_module = THIS_MODULE,
|
|
|
++ },
|
|
|
++ .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
|
|
++ .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
|
|
++ .ivsize = CTR_RFC3686_IV_SIZE,
|
|
|
++ .setkey = crypto4xx_setkey_rfc3686,
|
|
|
++ .encrypt = crypto4xx_rfc3686_encrypt,
|
|
|
++ .decrypt = crypto4xx_rfc3686_decrypt,
|
|
|
++ .init = crypto4xx_sk_init,
|
|
|
++ .exit = crypto4xx_sk_exit,
|
|
|
+ } },
|
|
|
+- { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
|
|
|
+- .cra_name = "ofb(aes)",
|
|
|
+- .cra_driver_name = "ofb-aes-ppc4xx",
|
|
|
+- .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
|
|
+- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
|
|
|
+- CRYPTO_ALG_ASYNC |
|
|
|
+- CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
+- .cra_blocksize = AES_BLOCK_SIZE,
|
|
|
+- .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
|
|
+- .cra_type = &crypto_ablkcipher_type,
|
|
|
+- .cra_init = crypto4xx_ablk_init,
|
|
|
+- .cra_exit = crypto4xx_ablk_exit,
|
|
|
+- .cra_module = THIS_MODULE,
|
|
|
+- .cra_u = {
|
|
|
+- .ablkcipher = {
|
|
|
+- .min_keysize = AES_MIN_KEY_SIZE,
|
|
|
+- .max_keysize = AES_MAX_KEY_SIZE,
|
|
|
+- .ivsize = AES_IV_SIZE,
|
|
|
+- .setkey = crypto4xx_setkey_aes_ofb,
|
|
|
+- .encrypt = crypto4xx_encrypt_iv,
|
|
|
+- .decrypt = crypto4xx_decrypt_iv,
|
|
|
+- }
|
|
|
+- }
|
|
|
++ { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
|
|
|
++ .base = {
|
|
|
++ .cra_name = "ecb(aes)",
|
|
|
++ .cra_driver_name = "ecb-aes-ppc4xx",
|
|
|
++ .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
|
|
++ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
|
|
|
++ CRYPTO_ALG_ASYNC |
|
|
|
++ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
++ .cra_blocksize = AES_BLOCK_SIZE,
|
|
|
++ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
|
|
++ .cra_module = THIS_MODULE,
|
|
|
++ },
|
|
|
++ .min_keysize = AES_MIN_KEY_SIZE,
|
|
|
++ .max_keysize = AES_MAX_KEY_SIZE,
|
|
|
++ .setkey = crypto4xx_setkey_aes_ecb,
|
|
|
++ .encrypt = crypto4xx_encrypt_noiv,
|
|
|
++ .decrypt = crypto4xx_decrypt_noiv,
|
|
|
++ .init = crypto4xx_sk_init,
|
|
|
++ .exit = crypto4xx_sk_exit,
|
|
|
++ } },
|
|
|
++ { .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
|
|
|
++ .base = {
|
|
|
++ .cra_name = "ofb(aes)",
|
|
|
++ .cra_driver_name = "ofb-aes-ppc4xx",
|
|
|
++ .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
|
|
++ .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
|
|
|
++ CRYPTO_ALG_ASYNC |
|
|
|
++ CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
++ .cra_blocksize = AES_BLOCK_SIZE,
|
|
|
++ .cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
|
|
++ .cra_module = THIS_MODULE,
|
|
|
++ },
|
|
|
++ .min_keysize = AES_MIN_KEY_SIZE,
|
|
|
++ .max_keysize = AES_MAX_KEY_SIZE,
|
|
|
++ .ivsize = AES_IV_SIZE,
|
|
|
++ .setkey = crypto4xx_setkey_aes_ofb,
|
|
|
++ .encrypt = crypto4xx_encrypt_iv,
|
|
|
++ .decrypt = crypto4xx_decrypt_iv,
|
|
|
++ .init = crypto4xx_sk_init,
|
|
|
++ .exit = crypto4xx_sk_exit,
|
|
|
+ } },
|
|
|
+
|
|
|
+ /* AEAD */
|
|
|
+--- a/drivers/crypto/amcc/crypto4xx_core.h
|
|
|
++++ b/drivers/crypto/amcc/crypto4xx_core.h
|
|
|
+@@ -25,6 +25,7 @@
|
|
|
+ #include <linux/ratelimit.h>
|
|
|
+ #include <crypto/internal/hash.h>
|
|
|
+ #include <crypto/internal/aead.h>
|
|
|
++#include <crypto/internal/skcipher.h>
|
|
|
+ #include "crypto4xx_reg_def.h"
|
|
|
+ #include "crypto4xx_sa.h"
|
|
|
+
|
|
|
+@@ -134,7 +135,7 @@ struct crypto4xx_ctx {
|
|
|
+ struct crypto4xx_alg_common {
|
|
|
+ u32 type;
|
|
|
+ union {
|
|
|
+- struct crypto_alg cipher;
|
|
|
++ struct skcipher_alg cipher;
|
|
|
+ struct ahash_alg hash;
|
|
|
+ struct aead_alg aead;
|
|
|
+ } u;
|
|
|
+@@ -158,22 +159,22 @@ int crypto4xx_build_pd(struct crypto_asy
|
|
|
+ const struct dynamic_sa_ctl *sa,
|
|
|
+ const unsigned int sa_len,
|
|
|
+ const unsigned int assoclen);
|
|
|
+-int crypto4xx_setkey_aes_cbc(struct crypto_ablkcipher *cipher,
|
|
|
++int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
|
|
|
+ const u8 *key, unsigned int keylen);
|
|
|
+-int crypto4xx_setkey_aes_cfb(struct crypto_ablkcipher *cipher,
|
|
|
++int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
|
|
|
+ const u8 *key, unsigned int keylen);
|
|
|
+-int crypto4xx_setkey_aes_ecb(struct crypto_ablkcipher *cipher,
|
|
|
++int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
|
|
|
+ const u8 *key, unsigned int keylen);
|
|
|
+-int crypto4xx_setkey_aes_ofb(struct crypto_ablkcipher *cipher,
|
|
|
++int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
|
|
|
+ const u8 *key, unsigned int keylen);
|
|
|
+-int crypto4xx_setkey_rfc3686(struct crypto_ablkcipher *cipher,
|
|
|
++int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
|
|
|
+ const u8 *key, unsigned int keylen);
|
|
|
+-int crypto4xx_encrypt_iv(struct ablkcipher_request *req);
|
|
|
+-int crypto4xx_decrypt_iv(struct ablkcipher_request *req);
|
|
|
+-int crypto4xx_encrypt_noiv(struct ablkcipher_request *req);
|
|
|
+-int crypto4xx_decrypt_noiv(struct ablkcipher_request *req);
|
|
|
+-int crypto4xx_rfc3686_encrypt(struct ablkcipher_request *req);
|
|
|
+-int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req);
|
|
|
++int crypto4xx_encrypt_iv(struct skcipher_request *req);
|
|
|
++int crypto4xx_decrypt_iv(struct skcipher_request *req);
|
|
|
++int crypto4xx_encrypt_noiv(struct skcipher_request *req);
|
|
|
++int crypto4xx_decrypt_noiv(struct skcipher_request *req);
|
|
|
++int crypto4xx_rfc3686_encrypt(struct skcipher_request *req);
|
|
|
++int crypto4xx_rfc3686_decrypt(struct skcipher_request *req);
|
|
|
+ int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm);
|
|
|
+ int crypto4xx_hash_digest(struct ahash_request *req);
|
|
|
+ int crypto4xx_hash_final(struct ahash_request *req);
|