Lines Matching +full:ixp4xx +full:- +full:crypto
1 // SPDX-License-Identifier: GPL-2.0-only
3 * Intel IXP4xx NPE-C crypto driver
9 #include <linux/dma-mapping.h>
11 #include <linux/crypto.h>
20 #include <crypto/ctr.h>
21 #include <crypto/internal/des.h>
22 #include <crypto/aes.h>
23 #include <crypto/hmac.h>
24 #include <crypto/sha1.h>
25 #include <crypto/algapi.h>
26 #include <crypto/internal/aead.h>
27 #include <crypto/internal/skcipher.h>
28 #include <crypto/authenc.h>
29 #include <crypto/scatterwalk.h>
31 #include <linux/soc/ixp4xx/npe.h>
32 #include <linux/soc/ixp4xx/qmgr.h>
34 /* Intermittent includes, delete this after v5.14-rc1 */
35 #include <linux/soc/ixp4xx/cpu.h>
136 u32 crypto_ctx; /* NPE Crypto Param structure address */
194 struct skcipher_alg crypto; member
203 struct aead_alg crypto; member
239 return crypt_phys + (virt - crypt_virt) * sizeof(struct crypt_ctl); in crypt_virt2phys()
244 return crypt_virt + (phys - crypt_phys) / sizeof(struct crypt_ctl); in crypt_phys2virt()
249 return container_of(tfm->__crt_alg, struct ixp_alg, crypto.base)->cfg_enc; in cipher_cfg_enc()
254 return container_of(tfm->__crt_alg, struct ixp_alg, crypto.base)->cfg_dec; in cipher_cfg_dec()
259 return container_of(tfm->__crt_alg, struct ixp_alg, crypto.base)->hash; in ix_hash()
264 struct device *dev = &pdev->dev; in setup_crypt_desc()
273 return -ENOMEM; in setup_crypt_desc()
340 buf1 = buf->next; in free_buf_chain()
341 phys1 = buf->phys_next; in free_buf_chain()
342 dma_unmap_single(dev, buf->phys_addr, buf->buf_len, buf->dir); in free_buf_chain()
353 struct aead_request *req = crypt->data.aead_req; in finish_scattered_hmac()
357 int decryptlen = req->assoclen + req->cryptlen - authsize; in finish_scattered_hmac()
359 if (req_ctx->encrypt) { in finish_scattered_hmac()
360 scatterwalk_map_and_copy(req_ctx->hmac_virt, req->dst, in finish_scattered_hmac()
363 dma_pool_free(buffer_pool, req_ctx->hmac_virt, crypt->icv_rev_aes); in finish_scattered_hmac()
368 struct device *dev = &pdev->dev; in one_packet()
373 failed = phys & 0x1 ? -EBADMSG : 0; in one_packet()
377 switch (crypt->ctl_flags & CTL_FLAG_MASK) { in one_packet()
379 struct aead_request *req = crypt->data.aead_req; in one_packet()
382 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in one_packet()
383 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in one_packet()
384 if (req_ctx->hmac_virt) in one_packet()
391 struct skcipher_request *req = crypt->data.ablk_req; in one_packet()
398 offset = req->cryptlen - ivsize; in one_packet()
399 if (req_ctx->encrypt) { in one_packet()
400 scatterwalk_map_and_copy(req->iv, req->dst, in one_packet()
403 memcpy(req->iv, req_ctx->iv, ivsize); in one_packet()
404 memzero_explicit(req_ctx->iv, ivsize); in one_packet()
408 if (req_ctx->dst) in one_packet()
409 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in one_packet()
411 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in one_packet()
416 ctx = crypto_tfm_ctx(crypt->data.tfm); in one_packet()
417 dma_pool_free(ctx_pool, crypt->regist_ptr, in one_packet()
418 crypt->regist_buf->phys_addr); in one_packet()
419 dma_pool_free(buffer_pool, crypt->regist_buf, crypt->src_buf); in one_packet()
420 if (atomic_dec_and_test(&ctx->configuring)) in one_packet()
421 complete(&ctx->completion); in one_packet()
424 ctx = crypto_tfm_ctx(crypt->data.tfm); in one_packet()
425 *(__be32 *)ctx->decrypt.npe_ctx &= cpu_to_be32(~CIPH_ENCR); in one_packet()
426 if (atomic_dec_and_test(&ctx->configuring)) in one_packet()
427 complete(&ctx->completion); in one_packet()
432 crypt->ctl_flags = CTL_FLAG_UNUSED; in one_packet()
455 struct device_node *np = dev->of_node; in init_ixp_crypto()
457 int ret = -ENODEV; in init_ixp_crypto()
467 ret = of_parse_phandle_with_fixed_args(np, "intel,npe-handle", in init_ixp_crypto()
471 return -ENODEV; in init_ixp_crypto()
476 ret = of_parse_phandle_with_fixed_args(np, "queue-rx", 1, 0, in init_ixp_crypto()
480 return -ENODEV; in init_ixp_crypto()
485 ret = of_parse_phandle_with_fixed_args(np, "queue-txready", 1, 0, in init_ixp_crypto()
489 return -ENODEV; in init_ixp_crypto()
531 dev_err(dev, "Firmware of %s lacks crypto support\n", npe_name(npe_c)); in init_ixp_crypto()
532 ret = -ENODEV; in init_ixp_crypto()
541 ret = -ENOMEM; in init_ixp_crypto()
567 ret = -EIO; in init_ixp_crypto()
596 memset(dir->npe_ctx, 0, NPE_CTX_LEN); in reset_sa_dir()
597 dir->npe_ctx_idx = 0; in reset_sa_dir()
598 dir->npe_mode = 0; in reset_sa_dir()
603 dir->npe_ctx = dma_pool_alloc(ctx_pool, GFP_KERNEL, &dir->npe_ctx_phys); in init_sa_dir()
604 if (!dir->npe_ctx) in init_sa_dir()
605 return -ENOMEM; in init_sa_dir()
613 memset(dir->npe_ctx, 0, NPE_CTX_LEN); in free_sa_dir()
614 dma_pool_free(ctx_pool, dir->npe_ctx, dir->npe_ctx_phys); in free_sa_dir()
622 atomic_set(&ctx->configuring, 0); in init_tfm()
623 ret = init_sa_dir(&ctx->encrypt); in init_tfm()
626 ret = init_sa_dir(&ctx->decrypt); in init_tfm()
628 free_sa_dir(&ctx->encrypt); in init_tfm()
639 ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK); in init_tfm_ablk()
640 if (IS_ERR(ctx->fallback_tfm)) { in init_tfm_ablk()
642 name, PTR_ERR(ctx->fallback_tfm)); in init_tfm_ablk()
643 return PTR_ERR(ctx->fallback_tfm); in init_tfm_ablk()
647 crypto_tfm_alg_driver_name(&tfm->base), in init_tfm_ablk()
648 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(ctx->fallback_tfm)) in init_tfm_ablk()
651 …crypto_skcipher_set_reqsize(tfm, sizeof(struct ablk_ctx) + crypto_skcipher_reqsize(ctx->fallback_t… in init_tfm_ablk()
665 free_sa_dir(&ctx->encrypt); in exit_tfm()
666 free_sa_dir(&ctx->decrypt); in exit_tfm()
674 crypto_free_skcipher(ctx->fallback_tfm); in exit_tfm_ablk()
697 return -ENOMEM; in register_chain_var()
701 return -ENOMEM; in register_chain_var()
707 return -EAGAIN; in register_chain_var()
711 memset(pad + key_len, 0, HMAC_PAD_BLOCKLEN - key_len); in register_chain_var()
715 crypt->data.tfm = tfm; in register_chain_var()
716 crypt->regist_ptr = pad; in register_chain_var()
717 crypt->regist_buf = buf; in register_chain_var()
719 crypt->auth_offs = 0; in register_chain_var()
720 crypt->auth_len = HMAC_PAD_BLOCKLEN; in register_chain_var()
721 crypt->crypto_ctx = ctx_addr; in register_chain_var()
722 crypt->src_buf = buf_phys; in register_chain_var()
723 crypt->icv_rev_aes = target; in register_chain_var()
724 crypt->mode = NPE_OP_HASH_GEN_ICV; in register_chain_var()
725 crypt->init_len = init_len; in register_chain_var()
726 crypt->ctl_flags |= CTL_FLAG_GEN_ICV; in register_chain_var()
728 buf->next = NULL; in register_chain_var()
729 buf->buf_len = HMAC_PAD_BLOCKLEN; in register_chain_var()
730 buf->pkt_len = 0; in register_chain_var()
731 buf->phys_addr = pad_phys; in register_chain_var()
733 atomic_inc(&ctx->configuring); in register_chain_var()
750 dir = encrypt ? &ctx->encrypt : &ctx->decrypt; in setup_auth()
751 cinfo = dir->npe_ctx + dir->npe_ctx_idx; in setup_auth()
755 cfgword = algo->cfgword | (authsize << 6); /* (authsize/4) << 8 */ in setup_auth()
763 memcpy(cinfo, algo->icv, digest_len); in setup_auth()
766 itarget = dir->npe_ctx_phys + dir->npe_ctx_idx in setup_auth()
767 + sizeof(algo->cfgword); in setup_auth()
769 init_len = cinfo - (dir->npe_ctx + dir->npe_ctx_idx); in setup_auth()
770 npe_ctx_addr = dir->npe_ctx_phys + dir->npe_ctx_idx; in setup_auth()
772 dir->npe_ctx_idx += init_len; in setup_auth()
773 dir->npe_mode |= NPE_OP_HASH_ENABLE; in setup_auth()
776 dir->npe_mode |= NPE_OP_HASH_VERIFY; in setup_auth()
790 struct ix_sa_dir *dir = &ctx->decrypt; in gen_rev_aes_key()
794 return -EAGAIN; in gen_rev_aes_key()
796 *(__be32 *)dir->npe_ctx |= cpu_to_be32(CIPH_ENCR); in gen_rev_aes_key()
798 crypt->data.tfm = tfm; in gen_rev_aes_key()
799 crypt->crypt_offs = 0; in gen_rev_aes_key()
800 crypt->crypt_len = AES_BLOCK128; in gen_rev_aes_key()
801 crypt->src_buf = 0; in gen_rev_aes_key()
802 crypt->crypto_ctx = dir->npe_ctx_phys; in gen_rev_aes_key()
803 crypt->icv_rev_aes = dir->npe_ctx_phys + sizeof(u32); in gen_rev_aes_key()
804 crypt->mode = NPE_OP_ENC_GEN_KEY; in gen_rev_aes_key()
805 crypt->init_len = dir->npe_ctx_idx; in gen_rev_aes_key()
806 crypt->ctl_flags |= CTL_FLAG_GEN_REVAES; in gen_rev_aes_key()
808 atomic_inc(&ctx->configuring); in gen_rev_aes_key()
824 dir = encrypt ? &ctx->encrypt : &ctx->decrypt; in setup_cipher()
825 cinfo = dir->npe_ctx; in setup_cipher()
829 dir->npe_mode |= NPE_OP_CRYPT_ENCRYPT; in setup_cipher()
845 return -EINVAL; in setup_cipher()
861 memset(cinfo + key_len, 0, DES3_EDE_KEY_SIZE - key_len); in setup_cipher()
864 dir->npe_ctx_idx = sizeof(cipher_cfg) + key_len; in setup_cipher()
865 dir->npe_mode |= NPE_OP_CRYPT_ENABLE; in setup_cipher()
878 unsigned int len = min(nbytes, sg->length); in chainup_buffers()
883 nbytes -= len; in chainup_buffers()
891 buf->next = next_buf; in chainup_buffers()
892 buf->phys_next = next_buf_phys; in chainup_buffers()
895 buf->phys_addr = sg_dma_address(sg); in chainup_buffers()
896 buf->buf_len = len; in chainup_buffers()
897 buf->dir = dir; in chainup_buffers()
899 buf->next = NULL; in chainup_buffers()
900 buf->phys_next = 0; in chainup_buffers()
910 init_completion(&ctx->completion); in ablk_setkey()
911 atomic_inc(&ctx->configuring); in ablk_setkey()
913 reset_sa_dir(&ctx->encrypt); in ablk_setkey()
914 reset_sa_dir(&ctx->decrypt); in ablk_setkey()
916 ctx->encrypt.npe_mode = NPE_OP_HMAC_DISABLE; in ablk_setkey()
917 ctx->decrypt.npe_mode = NPE_OP_HMAC_DISABLE; in ablk_setkey()
919 ret = setup_cipher(&tfm->base, 0, key, key_len); in ablk_setkey()
922 ret = setup_cipher(&tfm->base, 1, key, key_len); in ablk_setkey()
924 if (!atomic_dec_and_test(&ctx->configuring)) in ablk_setkey()
925 wait_for_completion(&ctx->completion); in ablk_setkey()
928 crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK); in ablk_setkey()
929 crypto_skcipher_set_flags(ctx->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK); in ablk_setkey()
931 return crypto_skcipher_setkey(ctx->fallback_tfm, key, key_len); in ablk_setkey()
948 return -EINVAL; in ablk_rfc3686_setkey()
950 memcpy(ctx->nonce, key + (key_len - CTR_RFC3686_NONCE_SIZE), in ablk_rfc3686_setkey()
953 key_len -= CTR_RFC3686_NONCE_SIZE; in ablk_rfc3686_setkey()
964 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in ixp4xx_cipher_fallback()
965 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in ixp4xx_cipher_fallback()
966 areq->base.complete, areq->base.data); in ixp4xx_cipher_fallback()
967 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in ixp4xx_cipher_fallback()
968 areq->cryptlen, areq->iv); in ixp4xx_cipher_fallback()
970 err = crypto_skcipher_encrypt(&rctx->fallback_req); in ixp4xx_cipher_fallback()
972 err = crypto_skcipher_decrypt(&rctx->fallback_req); in ixp4xx_cipher_fallback()
983 unsigned int nbytes = req->cryptlen; in ablk_perform()
987 struct device *dev = &pdev->dev; in ablk_perform()
989 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? in ablk_perform()
992 if (sg_nents(req->src) > 1 || sg_nents(req->dst) > 1) in ablk_perform()
996 return -EAGAIN; in ablk_perform()
997 if (atomic_read(&ctx->configuring)) in ablk_perform()
998 return -EAGAIN; in ablk_perform()
1000 dir = encrypt ? &ctx->encrypt : &ctx->decrypt; in ablk_perform()
1001 req_ctx->encrypt = encrypt; in ablk_perform()
1005 return -ENOMEM; in ablk_perform()
1007 crypt->data.ablk_req = req; in ablk_perform()
1008 crypt->crypto_ctx = dir->npe_ctx_phys; in ablk_perform()
1009 crypt->mode = dir->npe_mode; in ablk_perform()
1010 crypt->init_len = dir->npe_ctx_idx; in ablk_perform()
1012 crypt->crypt_offs = 0; in ablk_perform()
1013 crypt->crypt_len = nbytes; in ablk_perform()
1015 BUG_ON(ivsize && !req->iv); in ablk_perform()
1016 memcpy(crypt->iv, req->iv, ivsize); in ablk_perform()
1018 offset = req->cryptlen - ivsize; in ablk_perform()
1019 scatterwalk_map_and_copy(req_ctx->iv, req->src, offset, ivsize, 0); in ablk_perform()
1021 if (req->src != req->dst) { in ablk_perform()
1024 crypt->mode |= NPE_OP_NOT_IN_PLACE; in ablk_perform()
1027 req_ctx->dst = NULL; in ablk_perform()
1028 if (!chainup_buffers(dev, req->dst, nbytes, &dst_hook, in ablk_perform()
1032 req_ctx->dst = dst_hook.next; in ablk_perform()
1033 crypt->dst_buf = dst_hook.phys_next; in ablk_perform()
1035 req_ctx->dst = NULL; in ablk_perform()
1037 req_ctx->src = NULL; in ablk_perform()
1038 if (!chainup_buffers(dev, req->src, nbytes, &src_hook, flags, in ablk_perform()
1042 req_ctx->src = src_hook.next; in ablk_perform()
1043 crypt->src_buf = src_hook.phys_next; in ablk_perform()
1044 crypt->ctl_flags |= CTL_FLAG_PERFORM_ABLK; in ablk_perform()
1047 return -EINPROGRESS; in ablk_perform()
1050 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in ablk_perform()
1052 if (req->src != req->dst) in ablk_perform()
1053 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in ablk_perform()
1055 crypt->ctl_flags = CTL_FLAG_UNUSED; in ablk_perform()
1056 return -ENOMEM; in ablk_perform()
1074 u8 *info = req->iv; in ablk_rfc3686_crypt()
1078 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE); in ablk_rfc3686_crypt()
1085 req->iv = iv; in ablk_rfc3686_crypt()
1087 req->iv = info; in ablk_rfc3686_crypt()
1103 struct device *dev = &pdev->dev; in aead_perform()
1104 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? in aead_perform()
1110 return -EAGAIN; in aead_perform()
1111 if (atomic_read(&ctx->configuring)) in aead_perform()
1112 return -EAGAIN; in aead_perform()
1115 dir = &ctx->encrypt; in aead_perform()
1116 cryptlen = req->cryptlen; in aead_perform()
1118 dir = &ctx->decrypt; in aead_perform()
1119 /* req->cryptlen includes the authsize when decrypting */ in aead_perform()
1120 cryptlen = req->cryptlen - authsize; in aead_perform()
1121 eff_cryptlen -= authsize; in aead_perform()
1125 return -ENOMEM; in aead_perform()
1127 crypt->data.aead_req = req; in aead_perform()
1128 crypt->crypto_ctx = dir->npe_ctx_phys; in aead_perform()
1129 crypt->mode = dir->npe_mode; in aead_perform()
1130 crypt->init_len = dir->npe_ctx_idx; in aead_perform()
1132 crypt->crypt_offs = cryptoffset; in aead_perform()
1133 crypt->crypt_len = eff_cryptlen; in aead_perform()
1135 crypt->auth_offs = 0; in aead_perform()
1136 crypt->auth_len = req->assoclen + cryptlen; in aead_perform()
1137 BUG_ON(ivsize && !req->iv); in aead_perform()
1138 memcpy(crypt->iv, req->iv, ivsize); in aead_perform()
1140 buf = chainup_buffers(dev, req->src, crypt->auth_len, in aead_perform()
1142 req_ctx->src = src_hook.next; in aead_perform()
1143 crypt->src_buf = src_hook.phys_next; in aead_perform()
1147 lastlen = buf->buf_len; in aead_perform()
1149 crypt->icv_rev_aes = buf->phys_addr + in aead_perform()
1150 buf->buf_len - authsize; in aead_perform()
1152 req_ctx->dst = NULL; in aead_perform()
1154 if (req->src != req->dst) { in aead_perform()
1157 crypt->mode |= NPE_OP_NOT_IN_PLACE; in aead_perform()
1160 buf = chainup_buffers(dev, req->dst, crypt->auth_len, in aead_perform()
1162 req_ctx->dst = dst_hook.next; in aead_perform()
1163 crypt->dst_buf = dst_hook.phys_next; in aead_perform()
1169 lastlen = buf->buf_len; in aead_perform()
1171 crypt->icv_rev_aes = buf->phys_addr + in aead_perform()
1172 buf->buf_len - authsize; in aead_perform()
1180 req_ctx->hmac_virt = dma_pool_alloc(buffer_pool, flags, &dma); in aead_perform()
1181 if (unlikely(!req_ctx->hmac_virt)) in aead_perform()
1183 crypt->icv_rev_aes = dma; in aead_perform()
1185 scatterwalk_map_and_copy(req_ctx->hmac_virt, in aead_perform()
1186 req->src, cryptlen, authsize, 0); in aead_perform()
1188 req_ctx->encrypt = encrypt; in aead_perform()
1190 req_ctx->hmac_virt = NULL; in aead_perform()
1193 crypt->ctl_flags |= CTL_FLAG_PERFORM_AEAD; in aead_perform()
1196 return -EINPROGRESS; in aead_perform()
1199 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in aead_perform()
1201 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in aead_perform()
1202 crypt->ctl_flags = CTL_FLAG_UNUSED; in aead_perform()
1203 return -ENOMEM; in aead_perform()
1212 if (!ctx->enckey_len && !ctx->authkey_len) in aead_setup()
1214 init_completion(&ctx->completion); in aead_setup()
1215 atomic_inc(&ctx->configuring); in aead_setup()
1217 reset_sa_dir(&ctx->encrypt); in aead_setup()
1218 reset_sa_dir(&ctx->decrypt); in aead_setup()
1220 ret = setup_cipher(&tfm->base, 0, ctx->enckey, ctx->enckey_len); in aead_setup()
1223 ret = setup_cipher(&tfm->base, 1, ctx->enckey, ctx->enckey_len); in aead_setup()
1226 ret = setup_auth(&tfm->base, 0, authsize, ctx->authkey, in aead_setup()
1227 ctx->authkey_len, digest_len); in aead_setup()
1230 ret = setup_auth(&tfm->base, 1, authsize, ctx->authkey, in aead_setup()
1231 ctx->authkey_len, digest_len); in aead_setup()
1233 if (!atomic_dec_and_test(&ctx->configuring)) in aead_setup()
1234 wait_for_completion(&ctx->completion); in aead_setup()
1243 return -EINVAL; in aead_setauthsize()
1256 if (keys.authkeylen > sizeof(ctx->authkey)) in aead_setkey()
1259 if (keys.enckeylen > sizeof(ctx->enckey)) in aead_setkey()
1262 memcpy(ctx->authkey, keys.authkey, keys.authkeylen); in aead_setkey()
1263 memcpy(ctx->enckey, keys.enckey, keys.enckeylen); in aead_setkey()
1264 ctx->authkey_len = keys.authkeylen; in aead_setkey()
1265 ctx->enckey_len = keys.enckeylen; in aead_setkey()
1271 return -EINVAL; in aead_setkey()
1285 err = -EINVAL; in des3_aead_setkey()
1286 if (keys.authkeylen > sizeof(ctx->authkey)) in des3_aead_setkey()
1293 memcpy(ctx->authkey, keys.authkey, keys.authkeylen); in des3_aead_setkey()
1294 memcpy(ctx->enckey, keys.enckey, keys.enckeylen); in des3_aead_setkey()
1295 ctx->authkey_len = keys.authkeylen; in des3_aead_setkey()
1296 ctx->enckey_len = keys.enckeylen; in des3_aead_setkey()
1307 return aead_perform(req, 1, req->assoclen, req->cryptlen, req->iv); in aead_encrypt()
1312 return aead_perform(req, 0, req->assoclen, req->cryptlen, req->iv); in aead_decrypt()
1317 .crypto = {
1329 .crypto = {
1338 .crypto = {
1350 .crypto = {
1361 .crypto = {
1372 .crypto = {
1382 .crypto = {
1393 .crypto = {
1410 .crypto = {
1422 .crypto = {
1435 .crypto = {
1447 .crypto = {
1460 .crypto = {
1472 .crypto = {
1485 #define IXP_POSTFIX "-ixp4xx"
1489 struct device *dev = &_pdev->dev; in ixp_crypto_probe()
1500 struct skcipher_alg *cra = &ixp4xx_algos[i].crypto; in ixp_crypto_probe()
1502 if (snprintf(cra->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, in ixp_crypto_probe()
1503 "%s"IXP_POSTFIX, cra->base.cra_name) >= in ixp_crypto_probe()
1510 cra->base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | in ixp_crypto_probe()
1514 if (!cra->setkey) in ixp_crypto_probe()
1515 cra->setkey = ablk_setkey; in ixp_crypto_probe()
1516 if (!cra->encrypt) in ixp_crypto_probe()
1517 cra->encrypt = ablk_encrypt; in ixp_crypto_probe()
1518 if (!cra->decrypt) in ixp_crypto_probe()
1519 cra->decrypt = ablk_decrypt; in ixp_crypto_probe()
1520 cra->init = init_tfm_ablk; in ixp_crypto_probe()
1521 cra->exit = exit_tfm_ablk; in ixp_crypto_probe()
1523 cra->base.cra_ctxsize = sizeof(struct ixp_ctx); in ixp_crypto_probe()
1524 cra->base.cra_module = THIS_MODULE; in ixp_crypto_probe()
1525 cra->base.cra_alignmask = 3; in ixp_crypto_probe()
1526 cra->base.cra_priority = 300; in ixp_crypto_probe()
1528 dev_err(&pdev->dev, "Failed to register '%s'\n", in ixp_crypto_probe()
1529 cra->base.cra_name); in ixp_crypto_probe()
1535 struct aead_alg *cra = &ixp4xx_aeads[i].crypto; in ixp_crypto_probe()
1537 if (snprintf(cra->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, in ixp_crypto_probe()
1538 "%s"IXP_POSTFIX, cra->base.cra_name) >= in ixp_crypto_probe()
1545 cra->base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | in ixp_crypto_probe()
1548 cra->setkey = cra->setkey ?: aead_setkey; in ixp_crypto_probe()
1549 cra->setauthsize = aead_setauthsize; in ixp_crypto_probe()
1550 cra->encrypt = aead_encrypt; in ixp_crypto_probe()
1551 cra->decrypt = aead_decrypt; in ixp_crypto_probe()
1552 cra->init = init_tfm_aead; in ixp_crypto_probe()
1553 cra->exit = exit_tfm_aead; in ixp_crypto_probe()
1555 cra->base.cra_ctxsize = sizeof(struct ixp_ctx); in ixp_crypto_probe()
1556 cra->base.cra_module = THIS_MODULE; in ixp_crypto_probe()
1557 cra->base.cra_alignmask = 3; in ixp_crypto_probe()
1558 cra->base.cra_priority = 300; in ixp_crypto_probe()
1561 dev_err(&pdev->dev, "Failed to register '%s'\n", in ixp_crypto_probe()
1562 cra->base.cra_driver_name); in ixp_crypto_probe()
1576 crypto_unregister_aead(&ixp4xx_aeads[i].crypto); in ixp_crypto_remove()
1581 crypto_unregister_skcipher(&ixp4xx_algos[i].crypto); in ixp_crypto_remove()
1583 release_ixp_crypto(&pdev->dev); in ixp_crypto_remove()
1589 .compatible = "intel,ixp4xx-crypto",
1606 MODULE_DESCRIPTION("IXP4xx hardware crypto");