speck.c 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Speck: a lightweight block cipher
  4. *
  5. * Copyright (c) 2018 Google, Inc
  6. *
  7. * Speck has 10 variants, including 5 block sizes. For now we only implement
  8. * the variants Speck128/128, Speck128/192, Speck128/256, Speck64/96, and
  9. * Speck64/128. Speck${B}/${K} denotes the variant with a block size of B bits
  10. * and a key size of K bits. The Speck128 variants are believed to be the most
  11. * secure variants, and they use the same block size and key sizes as AES. The
  12. * Speck64 variants are less secure, but on 32-bit processors are usually
  13. * faster. The remaining variants (Speck32, Speck48, and Speck96) are even less
  14. * secure and/or not as well suited for implementation on either 32-bit or
  15. * 64-bit processors, so are omitted.
  16. *
  17. * Reference: "The Simon and Speck Families of Lightweight Block Ciphers"
  18. * https://eprint.iacr.org/2013/404.pdf
  19. *
  20. * In a correspondence, the Speck designers have also clarified that the words
  21. * should be interpreted in little-endian format, and the words should be
  22. * ordered such that the first word of each block is 'y' rather than 'x', and
  23. * the first key word (rather than the last) becomes the first round key.
  24. */
  25. #include <asm/unaligned.h>
  26. #include <crypto/speck.h>
  27. #include <linux/bitops.h>
  28. #include <linux/crypto.h>
  29. #include <linux/init.h>
  30. #include <linux/module.h>
  31. /* Speck128 */
  32. static __always_inline void speck128_round(u64 *x, u64 *y, u64 k)
  33. {
  34. *x = ror64(*x, 8);
  35. *x += *y;
  36. *x ^= k;
  37. *y = rol64(*y, 3);
  38. *y ^= *x;
  39. }
  40. static __always_inline void speck128_unround(u64 *x, u64 *y, u64 k)
  41. {
  42. *y ^= *x;
  43. *y = ror64(*y, 3);
  44. *x ^= k;
  45. *x -= *y;
  46. *x = rol64(*x, 8);
  47. }
  48. void crypto_speck128_encrypt(const struct speck128_tfm_ctx *ctx,
  49. u8 *out, const u8 *in)
  50. {
  51. u64 y = get_unaligned_le64(in);
  52. u64 x = get_unaligned_le64(in + 8);
  53. int i;
  54. for (i = 0; i < ctx->nrounds; i++)
  55. speck128_round(&x, &y, ctx->round_keys[i]);
  56. put_unaligned_le64(y, out);
  57. put_unaligned_le64(x, out + 8);
  58. }
  59. EXPORT_SYMBOL_GPL(crypto_speck128_encrypt);
  60. static void speck128_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  61. {
  62. crypto_speck128_encrypt(crypto_tfm_ctx(tfm), out, in);
  63. }
  64. void crypto_speck128_decrypt(const struct speck128_tfm_ctx *ctx,
  65. u8 *out, const u8 *in)
  66. {
  67. u64 y = get_unaligned_le64(in);
  68. u64 x = get_unaligned_le64(in + 8);
  69. int i;
  70. for (i = ctx->nrounds - 1; i >= 0; i--)
  71. speck128_unround(&x, &y, ctx->round_keys[i]);
  72. put_unaligned_le64(y, out);
  73. put_unaligned_le64(x, out + 8);
  74. }
  75. EXPORT_SYMBOL_GPL(crypto_speck128_decrypt);
  76. static void speck128_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  77. {
  78. crypto_speck128_decrypt(crypto_tfm_ctx(tfm), out, in);
  79. }
  80. int crypto_speck128_setkey(struct speck128_tfm_ctx *ctx, const u8 *key,
  81. unsigned int keylen)
  82. {
  83. u64 l[3];
  84. u64 k;
  85. int i;
  86. switch (keylen) {
  87. case SPECK128_128_KEY_SIZE:
  88. k = get_unaligned_le64(key);
  89. l[0] = get_unaligned_le64(key + 8);
  90. ctx->nrounds = SPECK128_128_NROUNDS;
  91. for (i = 0; i < ctx->nrounds; i++) {
  92. ctx->round_keys[i] = k;
  93. speck128_round(&l[0], &k, i);
  94. }
  95. break;
  96. case SPECK128_192_KEY_SIZE:
  97. k = get_unaligned_le64(key);
  98. l[0] = get_unaligned_le64(key + 8);
  99. l[1] = get_unaligned_le64(key + 16);
  100. ctx->nrounds = SPECK128_192_NROUNDS;
  101. for (i = 0; i < ctx->nrounds; i++) {
  102. ctx->round_keys[i] = k;
  103. speck128_round(&l[i % 2], &k, i);
  104. }
  105. break;
  106. case SPECK128_256_KEY_SIZE:
  107. k = get_unaligned_le64(key);
  108. l[0] = get_unaligned_le64(key + 8);
  109. l[1] = get_unaligned_le64(key + 16);
  110. l[2] = get_unaligned_le64(key + 24);
  111. ctx->nrounds = SPECK128_256_NROUNDS;
  112. for (i = 0; i < ctx->nrounds; i++) {
  113. ctx->round_keys[i] = k;
  114. speck128_round(&l[i % 3], &k, i);
  115. }
  116. break;
  117. default:
  118. return -EINVAL;
  119. }
  120. return 0;
  121. }
  122. EXPORT_SYMBOL_GPL(crypto_speck128_setkey);
  123. static int speck128_setkey(struct crypto_tfm *tfm, const u8 *key,
  124. unsigned int keylen)
  125. {
  126. return crypto_speck128_setkey(crypto_tfm_ctx(tfm), key, keylen);
  127. }
  128. /* Speck64 */
  129. static __always_inline void speck64_round(u32 *x, u32 *y, u32 k)
  130. {
  131. *x = ror32(*x, 8);
  132. *x += *y;
  133. *x ^= k;
  134. *y = rol32(*y, 3);
  135. *y ^= *x;
  136. }
  137. static __always_inline void speck64_unround(u32 *x, u32 *y, u32 k)
  138. {
  139. *y ^= *x;
  140. *y = ror32(*y, 3);
  141. *x ^= k;
  142. *x -= *y;
  143. *x = rol32(*x, 8);
  144. }
  145. void crypto_speck64_encrypt(const struct speck64_tfm_ctx *ctx,
  146. u8 *out, const u8 *in)
  147. {
  148. u32 y = get_unaligned_le32(in);
  149. u32 x = get_unaligned_le32(in + 4);
  150. int i;
  151. for (i = 0; i < ctx->nrounds; i++)
  152. speck64_round(&x, &y, ctx->round_keys[i]);
  153. put_unaligned_le32(y, out);
  154. put_unaligned_le32(x, out + 4);
  155. }
  156. EXPORT_SYMBOL_GPL(crypto_speck64_encrypt);
  157. static void speck64_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  158. {
  159. crypto_speck64_encrypt(crypto_tfm_ctx(tfm), out, in);
  160. }
  161. void crypto_speck64_decrypt(const struct speck64_tfm_ctx *ctx,
  162. u8 *out, const u8 *in)
  163. {
  164. u32 y = get_unaligned_le32(in);
  165. u32 x = get_unaligned_le32(in + 4);
  166. int i;
  167. for (i = ctx->nrounds - 1; i >= 0; i--)
  168. speck64_unround(&x, &y, ctx->round_keys[i]);
  169. put_unaligned_le32(y, out);
  170. put_unaligned_le32(x, out + 4);
  171. }
  172. EXPORT_SYMBOL_GPL(crypto_speck64_decrypt);
  173. static void speck64_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  174. {
  175. crypto_speck64_decrypt(crypto_tfm_ctx(tfm), out, in);
  176. }
  177. int crypto_speck64_setkey(struct speck64_tfm_ctx *ctx, const u8 *key,
  178. unsigned int keylen)
  179. {
  180. u32 l[3];
  181. u32 k;
  182. int i;
  183. switch (keylen) {
  184. case SPECK64_96_KEY_SIZE:
  185. k = get_unaligned_le32(key);
  186. l[0] = get_unaligned_le32(key + 4);
  187. l[1] = get_unaligned_le32(key + 8);
  188. ctx->nrounds = SPECK64_96_NROUNDS;
  189. for (i = 0; i < ctx->nrounds; i++) {
  190. ctx->round_keys[i] = k;
  191. speck64_round(&l[i % 2], &k, i);
  192. }
  193. break;
  194. case SPECK64_128_KEY_SIZE:
  195. k = get_unaligned_le32(key);
  196. l[0] = get_unaligned_le32(key + 4);
  197. l[1] = get_unaligned_le32(key + 8);
  198. l[2] = get_unaligned_le32(key + 12);
  199. ctx->nrounds = SPECK64_128_NROUNDS;
  200. for (i = 0; i < ctx->nrounds; i++) {
  201. ctx->round_keys[i] = k;
  202. speck64_round(&l[i % 3], &k, i);
  203. }
  204. break;
  205. default:
  206. return -EINVAL;
  207. }
  208. return 0;
  209. }
  210. EXPORT_SYMBOL_GPL(crypto_speck64_setkey);
  211. static int speck64_setkey(struct crypto_tfm *tfm, const u8 *key,
  212. unsigned int keylen)
  213. {
  214. return crypto_speck64_setkey(crypto_tfm_ctx(tfm), key, keylen);
  215. }
  216. /* Algorithm definitions */
  217. static struct crypto_alg speck_algs[] = {
  218. {
  219. .cra_name = "speck128",
  220. .cra_driver_name = "speck128-generic",
  221. .cra_priority = 100,
  222. .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
  223. .cra_blocksize = SPECK128_BLOCK_SIZE,
  224. .cra_ctxsize = sizeof(struct speck128_tfm_ctx),
  225. .cra_module = THIS_MODULE,
  226. .cra_u = {
  227. .cipher = {
  228. .cia_min_keysize = SPECK128_128_KEY_SIZE,
  229. .cia_max_keysize = SPECK128_256_KEY_SIZE,
  230. .cia_setkey = speck128_setkey,
  231. .cia_encrypt = speck128_encrypt,
  232. .cia_decrypt = speck128_decrypt
  233. }
  234. }
  235. }, {
  236. .cra_name = "speck64",
  237. .cra_driver_name = "speck64-generic",
  238. .cra_priority = 100,
  239. .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
  240. .cra_blocksize = SPECK64_BLOCK_SIZE,
  241. .cra_ctxsize = sizeof(struct speck64_tfm_ctx),
  242. .cra_module = THIS_MODULE,
  243. .cra_u = {
  244. .cipher = {
  245. .cia_min_keysize = SPECK64_96_KEY_SIZE,
  246. .cia_max_keysize = SPECK64_128_KEY_SIZE,
  247. .cia_setkey = speck64_setkey,
  248. .cia_encrypt = speck64_encrypt,
  249. .cia_decrypt = speck64_decrypt
  250. }
  251. }
  252. }
  253. };
  254. static int __init speck_module_init(void)
  255. {
  256. return crypto_register_algs(speck_algs, ARRAY_SIZE(speck_algs));
  257. }
  258. static void __exit speck_module_exit(void)
  259. {
  260. crypto_unregister_algs(speck_algs, ARRAY_SIZE(speck_algs));
  261. }
  262. module_init(speck_module_init);
  263. module_exit(speck_module_exit);
  264. MODULE_DESCRIPTION("Speck block cipher (generic)");
  265. MODULE_LICENSE("GPL");
  266. MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>");
  267. MODULE_ALIAS_CRYPTO("speck128");
  268. MODULE_ALIAS_CRYPTO("speck128-generic");
  269. MODULE_ALIAS_CRYPTO("speck64");
  270. MODULE_ALIAS_CRYPTO("speck64-generic");