crypto_user_stat.c 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Crypto user configuration API.
  4. *
  5. * Copyright (C) 2017-2018 Corentin Labbe <clabbe@baylibre.com>
  6. *
  7. */
  8. #include <linux/crypto.h>
  9. #include <linux/cryptouser.h>
  10. #include <linux/sched.h>
  11. #include <net/netlink.h>
  12. #include <crypto/internal/skcipher.h>
  13. #include <crypto/internal/rng.h>
  14. #include <crypto/akcipher.h>
  15. #include <crypto/kpp.h>
  16. #include <crypto/internal/cryptouser.h>
  17. #include "internal.h"
  18. #define null_terminated(x) (strnlen(x, sizeof(x)) < sizeof(x))
  19. static DEFINE_MUTEX(crypto_cfg_mutex);
  20. extern struct sock *crypto_nlsk;
  21. struct crypto_dump_info {
  22. struct sk_buff *in_skb;
  23. struct sk_buff *out_skb;
  24. u32 nlmsg_seq;
  25. u16 nlmsg_flags;
  26. };
  27. static int crypto_report_aead(struct sk_buff *skb, struct crypto_alg *alg)
  28. {
  29. struct crypto_stat raead;
  30. u64 v64;
  31. u32 v32;
  32. strncpy(raead.type, "aead", sizeof(raead.type));
  33. v32 = atomic_read(&alg->encrypt_cnt);
  34. raead.stat_encrypt_cnt = v32;
  35. v64 = atomic64_read(&alg->encrypt_tlen);
  36. raead.stat_encrypt_tlen = v64;
  37. v32 = atomic_read(&alg->decrypt_cnt);
  38. raead.stat_decrypt_cnt = v32;
  39. v64 = atomic64_read(&alg->decrypt_tlen);
  40. raead.stat_decrypt_tlen = v64;
  41. v32 = atomic_read(&alg->aead_err_cnt);
  42. raead.stat_aead_err_cnt = v32;
  43. if (nla_put(skb, CRYPTOCFGA_STAT_AEAD,
  44. sizeof(struct crypto_stat), &raead))
  45. goto nla_put_failure;
  46. return 0;
  47. nla_put_failure:
  48. return -EMSGSIZE;
  49. }
  50. static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg)
  51. {
  52. struct crypto_stat rcipher;
  53. u64 v64;
  54. u32 v32;
  55. strlcpy(rcipher.type, "cipher", sizeof(rcipher.type));
  56. v32 = atomic_read(&alg->encrypt_cnt);
  57. rcipher.stat_encrypt_cnt = v32;
  58. v64 = atomic64_read(&alg->encrypt_tlen);
  59. rcipher.stat_encrypt_tlen = v64;
  60. v32 = atomic_read(&alg->decrypt_cnt);
  61. rcipher.stat_decrypt_cnt = v32;
  62. v64 = atomic64_read(&alg->decrypt_tlen);
  63. rcipher.stat_decrypt_tlen = v64;
  64. v32 = atomic_read(&alg->cipher_err_cnt);
  65. rcipher.stat_cipher_err_cnt = v32;
  66. if (nla_put(skb, CRYPTOCFGA_STAT_CIPHER,
  67. sizeof(struct crypto_stat), &rcipher))
  68. goto nla_put_failure;
  69. return 0;
  70. nla_put_failure:
  71. return -EMSGSIZE;
  72. }
  73. static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg)
  74. {
  75. struct crypto_stat rcomp;
  76. u64 v64;
  77. u32 v32;
  78. strlcpy(rcomp.type, "compression", sizeof(rcomp.type));
  79. v32 = atomic_read(&alg->compress_cnt);
  80. rcomp.stat_compress_cnt = v32;
  81. v64 = atomic64_read(&alg->compress_tlen);
  82. rcomp.stat_compress_tlen = v64;
  83. v32 = atomic_read(&alg->decompress_cnt);
  84. rcomp.stat_decompress_cnt = v32;
  85. v64 = atomic64_read(&alg->decompress_tlen);
  86. rcomp.stat_decompress_tlen = v64;
  87. v32 = atomic_read(&alg->cipher_err_cnt);
  88. rcomp.stat_compress_err_cnt = v32;
  89. if (nla_put(skb, CRYPTOCFGA_STAT_COMPRESS,
  90. sizeof(struct crypto_stat), &rcomp))
  91. goto nla_put_failure;
  92. return 0;
  93. nla_put_failure:
  94. return -EMSGSIZE;
  95. }
  96. static int crypto_report_acomp(struct sk_buff *skb, struct crypto_alg *alg)
  97. {
  98. struct crypto_stat racomp;
  99. u64 v64;
  100. u32 v32;
  101. strlcpy(racomp.type, "acomp", sizeof(racomp.type));
  102. v32 = atomic_read(&alg->compress_cnt);
  103. racomp.stat_compress_cnt = v32;
  104. v64 = atomic64_read(&alg->compress_tlen);
  105. racomp.stat_compress_tlen = v64;
  106. v32 = atomic_read(&alg->decompress_cnt);
  107. racomp.stat_decompress_cnt = v32;
  108. v64 = atomic64_read(&alg->decompress_tlen);
  109. racomp.stat_decompress_tlen = v64;
  110. v32 = atomic_read(&alg->cipher_err_cnt);
  111. racomp.stat_compress_err_cnt = v32;
  112. if (nla_put(skb, CRYPTOCFGA_STAT_ACOMP,
  113. sizeof(struct crypto_stat), &racomp))
  114. goto nla_put_failure;
  115. return 0;
  116. nla_put_failure:
  117. return -EMSGSIZE;
  118. }
  119. static int crypto_report_akcipher(struct sk_buff *skb, struct crypto_alg *alg)
  120. {
  121. struct crypto_stat rakcipher;
  122. u64 v64;
  123. u32 v32;
  124. strncpy(rakcipher.type, "akcipher", sizeof(rakcipher.type));
  125. v32 = atomic_read(&alg->encrypt_cnt);
  126. rakcipher.stat_encrypt_cnt = v32;
  127. v64 = atomic64_read(&alg->encrypt_tlen);
  128. rakcipher.stat_encrypt_tlen = v64;
  129. v32 = atomic_read(&alg->decrypt_cnt);
  130. rakcipher.stat_decrypt_cnt = v32;
  131. v64 = atomic64_read(&alg->decrypt_tlen);
  132. rakcipher.stat_decrypt_tlen = v64;
  133. v32 = atomic_read(&alg->sign_cnt);
  134. rakcipher.stat_sign_cnt = v32;
  135. v32 = atomic_read(&alg->verify_cnt);
  136. rakcipher.stat_verify_cnt = v32;
  137. v32 = atomic_read(&alg->akcipher_err_cnt);
  138. rakcipher.stat_akcipher_err_cnt = v32;
  139. if (nla_put(skb, CRYPTOCFGA_STAT_AKCIPHER,
  140. sizeof(struct crypto_stat), &rakcipher))
  141. goto nla_put_failure;
  142. return 0;
  143. nla_put_failure:
  144. return -EMSGSIZE;
  145. }
  146. static int crypto_report_kpp(struct sk_buff *skb, struct crypto_alg *alg)
  147. {
  148. struct crypto_stat rkpp;
  149. u32 v;
  150. strlcpy(rkpp.type, "kpp", sizeof(rkpp.type));
  151. v = atomic_read(&alg->setsecret_cnt);
  152. rkpp.stat_setsecret_cnt = v;
  153. v = atomic_read(&alg->generate_public_key_cnt);
  154. rkpp.stat_generate_public_key_cnt = v;
  155. v = atomic_read(&alg->compute_shared_secret_cnt);
  156. rkpp.stat_compute_shared_secret_cnt = v;
  157. v = atomic_read(&alg->kpp_err_cnt);
  158. rkpp.stat_kpp_err_cnt = v;
  159. if (nla_put(skb, CRYPTOCFGA_STAT_KPP,
  160. sizeof(struct crypto_stat), &rkpp))
  161. goto nla_put_failure;
  162. return 0;
  163. nla_put_failure:
  164. return -EMSGSIZE;
  165. }
  166. static int crypto_report_ahash(struct sk_buff *skb, struct crypto_alg *alg)
  167. {
  168. struct crypto_stat rhash;
  169. u64 v64;
  170. u32 v32;
  171. strncpy(rhash.type, "ahash", sizeof(rhash.type));
  172. v32 = atomic_read(&alg->hash_cnt);
  173. rhash.stat_hash_cnt = v32;
  174. v64 = atomic64_read(&alg->hash_tlen);
  175. rhash.stat_hash_tlen = v64;
  176. v32 = atomic_read(&alg->hash_err_cnt);
  177. rhash.stat_hash_err_cnt = v32;
  178. if (nla_put(skb, CRYPTOCFGA_STAT_HASH,
  179. sizeof(struct crypto_stat), &rhash))
  180. goto nla_put_failure;
  181. return 0;
  182. nla_put_failure:
  183. return -EMSGSIZE;
  184. }
  185. static int crypto_report_shash(struct sk_buff *skb, struct crypto_alg *alg)
  186. {
  187. struct crypto_stat rhash;
  188. u64 v64;
  189. u32 v32;
  190. strncpy(rhash.type, "shash", sizeof(rhash.type));
  191. v32 = atomic_read(&alg->hash_cnt);
  192. rhash.stat_hash_cnt = v32;
  193. v64 = atomic64_read(&alg->hash_tlen);
  194. rhash.stat_hash_tlen = v64;
  195. v32 = atomic_read(&alg->hash_err_cnt);
  196. rhash.stat_hash_err_cnt = v32;
  197. if (nla_put(skb, CRYPTOCFGA_STAT_HASH,
  198. sizeof(struct crypto_stat), &rhash))
  199. goto nla_put_failure;
  200. return 0;
  201. nla_put_failure:
  202. return -EMSGSIZE;
  203. }
  204. static int crypto_report_rng(struct sk_buff *skb, struct crypto_alg *alg)
  205. {
  206. struct crypto_stat rrng;
  207. u64 v64;
  208. u32 v32;
  209. strncpy(rrng.type, "rng", sizeof(rrng.type));
  210. v32 = atomic_read(&alg->generate_cnt);
  211. rrng.stat_generate_cnt = v32;
  212. v64 = atomic64_read(&alg->generate_tlen);
  213. rrng.stat_generate_tlen = v64;
  214. v32 = atomic_read(&alg->seed_cnt);
  215. rrng.stat_seed_cnt = v32;
  216. v32 = atomic_read(&alg->hash_err_cnt);
  217. rrng.stat_rng_err_cnt = v32;
  218. if (nla_put(skb, CRYPTOCFGA_STAT_RNG,
  219. sizeof(struct crypto_stat), &rrng))
  220. goto nla_put_failure;
  221. return 0;
  222. nla_put_failure:
  223. return -EMSGSIZE;
  224. }
  225. static int crypto_reportstat_one(struct crypto_alg *alg,
  226. struct crypto_user_alg *ualg,
  227. struct sk_buff *skb)
  228. {
  229. strlcpy(ualg->cru_name, alg->cra_name, sizeof(ualg->cru_name));
  230. strlcpy(ualg->cru_driver_name, alg->cra_driver_name,
  231. sizeof(ualg->cru_driver_name));
  232. strlcpy(ualg->cru_module_name, module_name(alg->cra_module),
  233. sizeof(ualg->cru_module_name));
  234. ualg->cru_type = 0;
  235. ualg->cru_mask = 0;
  236. ualg->cru_flags = alg->cra_flags;
  237. ualg->cru_refcnt = refcount_read(&alg->cra_refcnt);
  238. if (nla_put_u32(skb, CRYPTOCFGA_PRIORITY_VAL, alg->cra_priority))
  239. goto nla_put_failure;
  240. if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
  241. struct crypto_stat rl;
  242. strlcpy(rl.type, "larval", sizeof(rl.type));
  243. if (nla_put(skb, CRYPTOCFGA_STAT_LARVAL,
  244. sizeof(struct crypto_stat), &rl))
  245. goto nla_put_failure;
  246. goto out;
  247. }
  248. switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) {
  249. case CRYPTO_ALG_TYPE_AEAD:
  250. if (crypto_report_aead(skb, alg))
  251. goto nla_put_failure;
  252. break;
  253. case CRYPTO_ALG_TYPE_SKCIPHER:
  254. if (crypto_report_cipher(skb, alg))
  255. goto nla_put_failure;
  256. break;
  257. case CRYPTO_ALG_TYPE_BLKCIPHER:
  258. if (crypto_report_cipher(skb, alg))
  259. goto nla_put_failure;
  260. break;
  261. case CRYPTO_ALG_TYPE_CIPHER:
  262. if (crypto_report_cipher(skb, alg))
  263. goto nla_put_failure;
  264. break;
  265. case CRYPTO_ALG_TYPE_COMPRESS:
  266. if (crypto_report_comp(skb, alg))
  267. goto nla_put_failure;
  268. break;
  269. case CRYPTO_ALG_TYPE_ACOMPRESS:
  270. if (crypto_report_acomp(skb, alg))
  271. goto nla_put_failure;
  272. break;
  273. case CRYPTO_ALG_TYPE_SCOMPRESS:
  274. if (crypto_report_acomp(skb, alg))
  275. goto nla_put_failure;
  276. break;
  277. case CRYPTO_ALG_TYPE_AKCIPHER:
  278. if (crypto_report_akcipher(skb, alg))
  279. goto nla_put_failure;
  280. break;
  281. case CRYPTO_ALG_TYPE_KPP:
  282. if (crypto_report_kpp(skb, alg))
  283. goto nla_put_failure;
  284. break;
  285. case CRYPTO_ALG_TYPE_AHASH:
  286. if (crypto_report_ahash(skb, alg))
  287. goto nla_put_failure;
  288. break;
  289. case CRYPTO_ALG_TYPE_HASH:
  290. if (crypto_report_shash(skb, alg))
  291. goto nla_put_failure;
  292. break;
  293. case CRYPTO_ALG_TYPE_RNG:
  294. if (crypto_report_rng(skb, alg))
  295. goto nla_put_failure;
  296. break;
  297. default:
  298. pr_err("ERROR: Unhandled alg %d in %s\n",
  299. alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL),
  300. __func__);
  301. }
  302. out:
  303. return 0;
  304. nla_put_failure:
  305. return -EMSGSIZE;
  306. }
  307. static int crypto_reportstat_alg(struct crypto_alg *alg,
  308. struct crypto_dump_info *info)
  309. {
  310. struct sk_buff *in_skb = info->in_skb;
  311. struct sk_buff *skb = info->out_skb;
  312. struct nlmsghdr *nlh;
  313. struct crypto_user_alg *ualg;
  314. int err = 0;
  315. nlh = nlmsg_put(skb, NETLINK_CB(in_skb).portid, info->nlmsg_seq,
  316. CRYPTO_MSG_GETSTAT, sizeof(*ualg), info->nlmsg_flags);
  317. if (!nlh) {
  318. err = -EMSGSIZE;
  319. goto out;
  320. }
  321. ualg = nlmsg_data(nlh);
  322. err = crypto_reportstat_one(alg, ualg, skb);
  323. if (err) {
  324. nlmsg_cancel(skb, nlh);
  325. goto out;
  326. }
  327. nlmsg_end(skb, nlh);
  328. out:
  329. return err;
  330. }
  331. int crypto_reportstat(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
  332. struct nlattr **attrs)
  333. {
  334. struct crypto_user_alg *p = nlmsg_data(in_nlh);
  335. struct crypto_alg *alg;
  336. struct sk_buff *skb;
  337. struct crypto_dump_info info;
  338. int err;
  339. if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
  340. return -EINVAL;
  341. alg = crypto_alg_match(p, 0);
  342. if (!alg)
  343. return -ENOENT;
  344. err = -ENOMEM;
  345. skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_ATOMIC);
  346. if (!skb)
  347. goto drop_alg;
  348. info.in_skb = in_skb;
  349. info.out_skb = skb;
  350. info.nlmsg_seq = in_nlh->nlmsg_seq;
  351. info.nlmsg_flags = 0;
  352. err = crypto_reportstat_alg(alg, &info);
  353. drop_alg:
  354. crypto_mod_put(alg);
  355. if (err)
  356. return err;
  357. return nlmsg_unicast(crypto_nlsk, skb, NETLINK_CB(in_skb).portid);
  358. }
  359. int crypto_dump_reportstat(struct sk_buff *skb, struct netlink_callback *cb)
  360. {
  361. struct crypto_alg *alg;
  362. struct crypto_dump_info info;
  363. int err;
  364. if (cb->args[0])
  365. goto out;
  366. cb->args[0] = 1;
  367. info.in_skb = cb->skb;
  368. info.out_skb = skb;
  369. info.nlmsg_seq = cb->nlh->nlmsg_seq;
  370. info.nlmsg_flags = NLM_F_MULTI;
  371. list_for_each_entry(alg, &crypto_alg_list, cra_list) {
  372. err = crypto_reportstat_alg(alg, &info);
  373. if (err)
  374. goto out_err;
  375. }
  376. out:
  377. return skb->len;
  378. out_err:
  379. return err;
  380. }
  381. int crypto_dump_reportstat_done(struct netlink_callback *cb)
  382. {
  383. return 0;
  384. }
  385. MODULE_LICENSE("GPL");