1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Crypto user configuration API. 4 * 5 * Copyright (C) 2017-2018 Corentin Labbe <clabbe@baylibre.com> 6 * 7 */ 8 9 #include <linux/crypto.h> 10 #include <linux/cryptouser.h> 11 #include <linux/sched.h> 12 #include <net/netlink.h> 13 #include <crypto/internal/skcipher.h> 14 #include <crypto/internal/rng.h> 15 #include <crypto/akcipher.h> 16 #include <crypto/kpp.h> 17 #include <crypto/internal/cryptouser.h> 18 19 #include "internal.h" 20 21 #define null_terminated(x) (strnlen(x, sizeof(x)) < sizeof(x)) 22 23 static DEFINE_MUTEX(crypto_cfg_mutex); 24 25 extern struct sock *crypto_nlsk; 26 27 struct crypto_dump_info { 28 struct sk_buff *in_skb; 29 struct sk_buff *out_skb; 30 u32 nlmsg_seq; 31 u16 nlmsg_flags; 32 }; 33 34 static int crypto_report_aead(struct sk_buff *skb, struct crypto_alg *alg) 35 { 36 struct crypto_stat_aead raead; 37 38 memset(&raead, 0, sizeof(raead)); 39 40 strscpy(raead.type, "aead", sizeof(raead.type)); 41 42 raead.stat_encrypt_cnt = atomic64_read(&alg->stats.aead.encrypt_cnt); 43 raead.stat_encrypt_tlen = atomic64_read(&alg->stats.aead.encrypt_tlen); 44 raead.stat_decrypt_cnt = atomic64_read(&alg->stats.aead.decrypt_cnt); 45 raead.stat_decrypt_tlen = atomic64_read(&alg->stats.aead.decrypt_tlen); 46 raead.stat_err_cnt = atomic64_read(&alg->stats.aead.err_cnt); 47 48 return nla_put(skb, CRYPTOCFGA_STAT_AEAD, sizeof(raead), &raead); 49 } 50 51 static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg) 52 { 53 struct crypto_stat_cipher rcipher; 54 55 memset(&rcipher, 0, sizeof(rcipher)); 56 57 strscpy(rcipher.type, "cipher", sizeof(rcipher.type)); 58 59 rcipher.stat_encrypt_cnt = atomic64_read(&alg->stats.cipher.encrypt_cnt); 60 rcipher.stat_encrypt_tlen = atomic64_read(&alg->stats.cipher.encrypt_tlen); 61 rcipher.stat_decrypt_cnt = atomic64_read(&alg->stats.cipher.decrypt_cnt); 62 rcipher.stat_decrypt_tlen = atomic64_read(&alg->stats.cipher.decrypt_tlen); 63 rcipher.stat_err_cnt = atomic64_read(&alg->stats.cipher.err_cnt); 64 65 return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher); 66 } 67 68 static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg) 69 { 70 struct crypto_stat_compress rcomp; 71 72 memset(&rcomp, 0, sizeof(rcomp)); 73 74 strscpy(rcomp.type, "compression", sizeof(rcomp.type)); 75 rcomp.stat_compress_cnt = atomic64_read(&alg->stats.compress.compress_cnt); 76 rcomp.stat_compress_tlen = atomic64_read(&alg->stats.compress.compress_tlen); 77 rcomp.stat_decompress_cnt = atomic64_read(&alg->stats.compress.decompress_cnt); 78 rcomp.stat_decompress_tlen = atomic64_read(&alg->stats.compress.decompress_tlen); 79 rcomp.stat_err_cnt = atomic64_read(&alg->stats.compress.err_cnt); 80 81 return nla_put(skb, CRYPTOCFGA_STAT_COMPRESS, sizeof(rcomp), &rcomp); 82 } 83 84 static int crypto_report_acomp(struct sk_buff *skb, struct crypto_alg *alg) 85 { 86 struct crypto_stat_compress racomp; 87 88 memset(&racomp, 0, sizeof(racomp)); 89 90 strscpy(racomp.type, "acomp", sizeof(racomp.type)); 91 racomp.stat_compress_cnt = atomic64_read(&alg->stats.compress.compress_cnt); 92 racomp.stat_compress_tlen = atomic64_read(&alg->stats.compress.compress_tlen); 93 racomp.stat_decompress_cnt = atomic64_read(&alg->stats.compress.decompress_cnt); 94 racomp.stat_decompress_tlen = atomic64_read(&alg->stats.compress.decompress_tlen); 95 racomp.stat_err_cnt = atomic64_read(&alg->stats.compress.err_cnt); 96 97 return nla_put(skb, CRYPTOCFGA_STAT_ACOMP, sizeof(racomp), &racomp); 98 } 99 100 static int crypto_report_akcipher(struct sk_buff *skb, struct crypto_alg *alg) 101 { 102 struct crypto_stat_akcipher rakcipher; 103 104 memset(&rakcipher, 0, sizeof(rakcipher)); 105 106 strscpy(rakcipher.type, "akcipher", sizeof(rakcipher.type)); 107 rakcipher.stat_encrypt_cnt = atomic64_read(&alg->stats.akcipher.encrypt_cnt); 108 rakcipher.stat_encrypt_tlen = atomic64_read(&alg->stats.akcipher.encrypt_tlen); 109 rakcipher.stat_decrypt_cnt = atomic64_read(&alg->stats.akcipher.decrypt_cnt); 110 rakcipher.stat_decrypt_tlen = atomic64_read(&alg->stats.akcipher.decrypt_tlen); 111 rakcipher.stat_sign_cnt = atomic64_read(&alg->stats.akcipher.sign_cnt); 112 rakcipher.stat_verify_cnt = atomic64_read(&alg->stats.akcipher.verify_cnt); 113 rakcipher.stat_err_cnt = atomic64_read(&alg->stats.akcipher.err_cnt); 114 115 return nla_put(skb, CRYPTOCFGA_STAT_AKCIPHER, 116 sizeof(rakcipher), &rakcipher); 117 } 118 119 static int crypto_report_kpp(struct sk_buff *skb, struct crypto_alg *alg) 120 { 121 struct crypto_stat_kpp rkpp; 122 123 memset(&rkpp, 0, sizeof(rkpp)); 124 125 strscpy(rkpp.type, "kpp", sizeof(rkpp.type)); 126 127 rkpp.stat_setsecret_cnt = atomic64_read(&alg->stats.kpp.setsecret_cnt); 128 rkpp.stat_generate_public_key_cnt = atomic64_read(&alg->stats.kpp.generate_public_key_cnt); 129 rkpp.stat_compute_shared_secret_cnt = atomic64_read(&alg->stats.kpp.compute_shared_secret_cnt); 130 rkpp.stat_err_cnt = atomic64_read(&alg->stats.kpp.err_cnt); 131 132 return nla_put(skb, CRYPTOCFGA_STAT_KPP, sizeof(rkpp), &rkpp); 133 } 134 135 static int crypto_report_ahash(struct sk_buff *skb, struct crypto_alg *alg) 136 { 137 struct crypto_stat_hash rhash; 138 139 memset(&rhash, 0, sizeof(rhash)); 140 141 strscpy(rhash.type, "ahash", sizeof(rhash.type)); 142 143 rhash.stat_hash_cnt = atomic64_read(&alg->stats.hash.hash_cnt); 144 rhash.stat_hash_tlen = atomic64_read(&alg->stats.hash.hash_tlen); 145 rhash.stat_err_cnt = atomic64_read(&alg->stats.hash.err_cnt); 146 147 return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash); 148 } 149 150 static int crypto_report_shash(struct sk_buff *skb, struct crypto_alg *alg) 151 { 152 struct crypto_stat_hash rhash; 153 154 memset(&rhash, 0, sizeof(rhash)); 155 156 strscpy(rhash.type, "shash", sizeof(rhash.type)); 157 158 rhash.stat_hash_cnt = atomic64_read(&alg->stats.hash.hash_cnt); 159 rhash.stat_hash_tlen = atomic64_read(&alg->stats.hash.hash_tlen); 160 rhash.stat_err_cnt = atomic64_read(&alg->stats.hash.err_cnt); 161 162 return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash); 163 } 164 165 static int crypto_report_rng(struct sk_buff *skb, struct crypto_alg *alg) 166 { 167 struct crypto_stat_rng rrng; 168 169 memset(&rrng, 0, sizeof(rrng)); 170 171 strscpy(rrng.type, "rng", sizeof(rrng.type)); 172 173 rrng.stat_generate_cnt = atomic64_read(&alg->stats.rng.generate_cnt); 174 rrng.stat_generate_tlen = atomic64_read(&alg->stats.rng.generate_tlen); 175 rrng.stat_seed_cnt = atomic64_read(&alg->stats.rng.seed_cnt); 176 rrng.stat_err_cnt = atomic64_read(&alg->stats.rng.err_cnt); 177 178 return nla_put(skb, CRYPTOCFGA_STAT_RNG, sizeof(rrng), &rrng); 179 } 180 181 static int crypto_reportstat_one(struct crypto_alg *alg, 182 struct crypto_user_alg *ualg, 183 struct sk_buff *skb) 184 { 185 memset(ualg, 0, sizeof(*ualg)); 186 187 strscpy(ualg->cru_name, alg->cra_name, sizeof(ualg->cru_name)); 188 strscpy(ualg->cru_driver_name, alg->cra_driver_name, 189 sizeof(ualg->cru_driver_name)); 190 strscpy(ualg->cru_module_name, module_name(alg->cra_module), 191 sizeof(ualg->cru_module_name)); 192 193 ualg->cru_type = 0; 194 ualg->cru_mask = 0; 195 ualg->cru_flags = alg->cra_flags; 196 ualg->cru_refcnt = refcount_read(&alg->cra_refcnt); 197 198 if (nla_put_u32(skb, CRYPTOCFGA_PRIORITY_VAL, alg->cra_priority)) 199 goto nla_put_failure; 200 if (alg->cra_flags & CRYPTO_ALG_LARVAL) { 201 struct crypto_stat_larval rl; 202 203 memset(&rl, 0, sizeof(rl)); 204 strscpy(rl.type, "larval", sizeof(rl.type)); 205 if (nla_put(skb, CRYPTOCFGA_STAT_LARVAL, sizeof(rl), &rl)) 206 goto nla_put_failure; 207 goto out; 208 } 209 210 switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) { 211 case CRYPTO_ALG_TYPE_AEAD: 212 if (crypto_report_aead(skb, alg)) 213 goto nla_put_failure; 214 break; 215 case CRYPTO_ALG_TYPE_SKCIPHER: 216 if (crypto_report_cipher(skb, alg)) 217 goto nla_put_failure; 218 break; 219 case CRYPTO_ALG_TYPE_BLKCIPHER: 220 if (crypto_report_cipher(skb, alg)) 221 goto nla_put_failure; 222 break; 223 case CRYPTO_ALG_TYPE_CIPHER: 224 if (crypto_report_cipher(skb, alg)) 225 goto nla_put_failure; 226 break; 227 case CRYPTO_ALG_TYPE_COMPRESS: 228 if (crypto_report_comp(skb, alg)) 229 goto nla_put_failure; 230 break; 231 case CRYPTO_ALG_TYPE_ACOMPRESS: 232 if (crypto_report_acomp(skb, alg)) 233 goto nla_put_failure; 234 break; 235 case CRYPTO_ALG_TYPE_SCOMPRESS: 236 if (crypto_report_acomp(skb, alg)) 237 goto nla_put_failure; 238 break; 239 case CRYPTO_ALG_TYPE_AKCIPHER: 240 if (crypto_report_akcipher(skb, alg)) 241 goto nla_put_failure; 242 break; 243 case CRYPTO_ALG_TYPE_KPP: 244 if (crypto_report_kpp(skb, alg)) 245 goto nla_put_failure; 246 break; 247 case CRYPTO_ALG_TYPE_AHASH: 248 if (crypto_report_ahash(skb, alg)) 249 goto nla_put_failure; 250 break; 251 case CRYPTO_ALG_TYPE_HASH: 252 if (crypto_report_shash(skb, alg)) 253 goto nla_put_failure; 254 break; 255 case CRYPTO_ALG_TYPE_RNG: 256 if (crypto_report_rng(skb, alg)) 257 goto nla_put_failure; 258 break; 259 default: 260 pr_err("ERROR: Unhandled alg %d in %s\n", 261 alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL), 262 __func__); 263 } 264 265 out: 266 return 0; 267 268 nla_put_failure: 269 return -EMSGSIZE; 270 } 271 272 static int crypto_reportstat_alg(struct crypto_alg *alg, 273 struct crypto_dump_info *info) 274 { 275 struct sk_buff *in_skb = info->in_skb; 276 struct sk_buff *skb = info->out_skb; 277 struct nlmsghdr *nlh; 278 struct crypto_user_alg *ualg; 279 int err = 0; 280 281 nlh = nlmsg_put(skb, NETLINK_CB(in_skb).portid, info->nlmsg_seq, 282 CRYPTO_MSG_GETSTAT, sizeof(*ualg), info->nlmsg_flags); 283 if (!nlh) { 284 err = -EMSGSIZE; 285 goto out; 286 } 287 288 ualg = nlmsg_data(nlh); 289 290 err = crypto_reportstat_one(alg, ualg, skb); 291 if (err) { 292 nlmsg_cancel(skb, nlh); 293 goto out; 294 } 295 296 nlmsg_end(skb, nlh); 297 298 out: 299 return err; 300 } 301 302 int crypto_reportstat(struct sk_buff *in_skb, struct nlmsghdr *in_nlh, 303 struct nlattr **attrs) 304 { 305 struct crypto_user_alg *p = nlmsg_data(in_nlh); 306 struct crypto_alg *alg; 307 struct sk_buff *skb; 308 struct crypto_dump_info info; 309 int err; 310 311 if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name)) 312 return -EINVAL; 313 314 alg = crypto_alg_match(p, 0); 315 if (!alg) 316 return -ENOENT; 317 318 err = -ENOMEM; 319 skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_ATOMIC); 320 if (!skb) 321 goto drop_alg; 322 323 info.in_skb = in_skb; 324 info.out_skb = skb; 325 info.nlmsg_seq = in_nlh->nlmsg_seq; 326 info.nlmsg_flags = 0; 327 328 err = crypto_reportstat_alg(alg, &info); 329 330 drop_alg: 331 crypto_mod_put(alg); 332 333 if (err) 334 return err; 335 336 return nlmsg_unicast(crypto_nlsk, skb, NETLINK_CB(in_skb).portid); 337 } 338 339 MODULE_LICENSE("GPL"); 340