| xj | b04a402 | 2021-11-25 15:01:52 +0800 | [diff] [blame] | 1 | /* | 
|  | 2 | * The AEGIS-256 Authenticated-Encryption Algorithm | 
|  | 3 | *   Glue for AES-NI + SSE2 implementation | 
|  | 4 | * | 
|  | 5 | * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com> | 
|  | 6 | * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved. | 
|  | 7 | * | 
|  | 8 | * This program is free software; you can redistribute it and/or modify it | 
|  | 9 | * under the terms of the GNU General Public License as published by the Free | 
|  | 10 | * Software Foundation; either version 2 of the License, or (at your option) | 
|  | 11 | * any later version. | 
|  | 12 | */ | 
|  | 13 |  | 
|  | 14 | #include <crypto/cryptd.h> | 
|  | 15 | #include <crypto/internal/aead.h> | 
|  | 16 | #include <crypto/internal/skcipher.h> | 
|  | 17 | #include <crypto/scatterwalk.h> | 
|  | 18 | #include <linux/module.h> | 
|  | 19 | #include <asm/fpu/api.h> | 
|  | 20 | #include <asm/cpu_device_id.h> | 
|  | 21 |  | 
|  | 22 | #define AEGIS256_BLOCK_ALIGN 16 | 
|  | 23 | #define AEGIS256_BLOCK_SIZE 16 | 
|  | 24 | #define AEGIS256_NONCE_SIZE 32 | 
|  | 25 | #define AEGIS256_STATE_BLOCKS 6 | 
|  | 26 | #define AEGIS256_KEY_SIZE 32 | 
|  | 27 | #define AEGIS256_MIN_AUTH_SIZE 8 | 
|  | 28 | #define AEGIS256_MAX_AUTH_SIZE 16 | 
|  | 29 |  | 
|  | 30 | asmlinkage void crypto_aegis256_aesni_init(void *state, void *key, void *iv); | 
|  | 31 |  | 
|  | 32 | asmlinkage void crypto_aegis256_aesni_ad( | 
|  | 33 | void *state, unsigned int length, const void *data); | 
|  | 34 |  | 
|  | 35 | asmlinkage void crypto_aegis256_aesni_enc( | 
|  | 36 | void *state, unsigned int length, const void *src, void *dst); | 
|  | 37 |  | 
|  | 38 | asmlinkage void crypto_aegis256_aesni_dec( | 
|  | 39 | void *state, unsigned int length, const void *src, void *dst); | 
|  | 40 |  | 
|  | 41 | asmlinkage void crypto_aegis256_aesni_enc_tail( | 
|  | 42 | void *state, unsigned int length, const void *src, void *dst); | 
|  | 43 |  | 
|  | 44 | asmlinkage void crypto_aegis256_aesni_dec_tail( | 
|  | 45 | void *state, unsigned int length, const void *src, void *dst); | 
|  | 46 |  | 
|  | 47 | asmlinkage void crypto_aegis256_aesni_final( | 
|  | 48 | void *state, void *tag_xor, unsigned int cryptlen, | 
|  | 49 | unsigned int assoclen); | 
|  | 50 |  | 
|  | 51 | struct aegis_block { | 
|  | 52 | u8 bytes[AEGIS256_BLOCK_SIZE] __aligned(AEGIS256_BLOCK_ALIGN); | 
|  | 53 | }; | 
|  | 54 |  | 
|  | 55 | struct aegis_state { | 
|  | 56 | struct aegis_block blocks[AEGIS256_STATE_BLOCKS]; | 
|  | 57 | }; | 
|  | 58 |  | 
|  | 59 | struct aegis_ctx { | 
|  | 60 | struct aegis_block key[AEGIS256_KEY_SIZE / AEGIS256_BLOCK_SIZE]; | 
|  | 61 | }; | 
|  | 62 |  | 
|  | 63 | struct aegis_crypt_ops { | 
|  | 64 | int (*skcipher_walk_init)(struct skcipher_walk *walk, | 
|  | 65 | struct aead_request *req, bool atomic); | 
|  | 66 |  | 
|  | 67 | void (*crypt_blocks)(void *state, unsigned int length, const void *src, | 
|  | 68 | void *dst); | 
|  | 69 | void (*crypt_tail)(void *state, unsigned int length, const void *src, | 
|  | 70 | void *dst); | 
|  | 71 | }; | 
|  | 72 |  | 
|  | 73 | static void crypto_aegis256_aesni_process_ad( | 
|  | 74 | struct aegis_state *state, struct scatterlist *sg_src, | 
|  | 75 | unsigned int assoclen) | 
|  | 76 | { | 
|  | 77 | struct scatter_walk walk; | 
|  | 78 | struct aegis_block buf; | 
|  | 79 | unsigned int pos = 0; | 
|  | 80 |  | 
|  | 81 | scatterwalk_start(&walk, sg_src); | 
|  | 82 | while (assoclen != 0) { | 
|  | 83 | unsigned int size = scatterwalk_clamp(&walk, assoclen); | 
|  | 84 | unsigned int left = size; | 
|  | 85 | void *mapped = scatterwalk_map(&walk); | 
|  | 86 | const u8 *src = (const u8 *)mapped; | 
|  | 87 |  | 
|  | 88 | if (pos + size >= AEGIS256_BLOCK_SIZE) { | 
|  | 89 | if (pos > 0) { | 
|  | 90 | unsigned int fill = AEGIS256_BLOCK_SIZE - pos; | 
|  | 91 | memcpy(buf.bytes + pos, src, fill); | 
|  | 92 | crypto_aegis256_aesni_ad(state, | 
|  | 93 | AEGIS256_BLOCK_SIZE, | 
|  | 94 | buf.bytes); | 
|  | 95 | pos = 0; | 
|  | 96 | left -= fill; | 
|  | 97 | src += fill; | 
|  | 98 | } | 
|  | 99 |  | 
|  | 100 | crypto_aegis256_aesni_ad(state, left, src); | 
|  | 101 |  | 
|  | 102 | src += left & ~(AEGIS256_BLOCK_SIZE - 1); | 
|  | 103 | left &= AEGIS256_BLOCK_SIZE - 1; | 
|  | 104 | } | 
|  | 105 |  | 
|  | 106 | memcpy(buf.bytes + pos, src, left); | 
|  | 107 | pos += left; | 
|  | 108 | assoclen -= size; | 
|  | 109 |  | 
|  | 110 | scatterwalk_unmap(mapped); | 
|  | 111 | scatterwalk_advance(&walk, size); | 
|  | 112 | scatterwalk_done(&walk, 0, assoclen); | 
|  | 113 | } | 
|  | 114 |  | 
|  | 115 | if (pos > 0) { | 
|  | 116 | memset(buf.bytes + pos, 0, AEGIS256_BLOCK_SIZE - pos); | 
|  | 117 | crypto_aegis256_aesni_ad(state, AEGIS256_BLOCK_SIZE, buf.bytes); | 
|  | 118 | } | 
|  | 119 | } | 
|  | 120 |  | 
|  | 121 | static void crypto_aegis256_aesni_process_crypt( | 
|  | 122 | struct aegis_state *state, struct skcipher_walk *walk, | 
|  | 123 | const struct aegis_crypt_ops *ops) | 
|  | 124 | { | 
|  | 125 | while (walk->nbytes >= AEGIS256_BLOCK_SIZE) { | 
|  | 126 | ops->crypt_blocks(state, | 
|  | 127 | round_down(walk->nbytes, AEGIS256_BLOCK_SIZE), | 
|  | 128 | walk->src.virt.addr, walk->dst.virt.addr); | 
|  | 129 | skcipher_walk_done(walk, walk->nbytes % AEGIS256_BLOCK_SIZE); | 
|  | 130 | } | 
|  | 131 |  | 
|  | 132 | if (walk->nbytes) { | 
|  | 133 | ops->crypt_tail(state, walk->nbytes, walk->src.virt.addr, | 
|  | 134 | walk->dst.virt.addr); | 
|  | 135 | skcipher_walk_done(walk, 0); | 
|  | 136 | } | 
|  | 137 | } | 
|  | 138 |  | 
|  | 139 | static struct aegis_ctx *crypto_aegis256_aesni_ctx(struct crypto_aead *aead) | 
|  | 140 | { | 
|  | 141 | u8 *ctx = crypto_aead_ctx(aead); | 
|  | 142 | ctx = PTR_ALIGN(ctx, __alignof__(struct aegis_ctx)); | 
|  | 143 | return (void *)ctx; | 
|  | 144 | } | 
|  | 145 |  | 
|  | 146 | static int crypto_aegis256_aesni_setkey(struct crypto_aead *aead, const u8 *key, | 
|  | 147 | unsigned int keylen) | 
|  | 148 | { | 
|  | 149 | struct aegis_ctx *ctx = crypto_aegis256_aesni_ctx(aead); | 
|  | 150 |  | 
|  | 151 | if (keylen != AEGIS256_KEY_SIZE) { | 
|  | 152 | crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); | 
|  | 153 | return -EINVAL; | 
|  | 154 | } | 
|  | 155 |  | 
|  | 156 | memcpy(ctx->key, key, AEGIS256_KEY_SIZE); | 
|  | 157 |  | 
|  | 158 | return 0; | 
|  | 159 | } | 
|  | 160 |  | 
|  | 161 | static int crypto_aegis256_aesni_setauthsize(struct crypto_aead *tfm, | 
|  | 162 | unsigned int authsize) | 
|  | 163 | { | 
|  | 164 | if (authsize > AEGIS256_MAX_AUTH_SIZE) | 
|  | 165 | return -EINVAL; | 
|  | 166 | if (authsize < AEGIS256_MIN_AUTH_SIZE) | 
|  | 167 | return -EINVAL; | 
|  | 168 | return 0; | 
|  | 169 | } | 
|  | 170 |  | 
|  | 171 | static void crypto_aegis256_aesni_crypt(struct aead_request *req, | 
|  | 172 | struct aegis_block *tag_xor, | 
|  | 173 | unsigned int cryptlen, | 
|  | 174 | const struct aegis_crypt_ops *ops) | 
|  | 175 | { | 
|  | 176 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); | 
|  | 177 | struct aegis_ctx *ctx = crypto_aegis256_aesni_ctx(tfm); | 
|  | 178 | struct skcipher_walk walk; | 
|  | 179 | struct aegis_state state; | 
|  | 180 |  | 
|  | 181 | ops->skcipher_walk_init(&walk, req, true); | 
|  | 182 |  | 
|  | 183 | kernel_fpu_begin(); | 
|  | 184 |  | 
|  | 185 | crypto_aegis256_aesni_init(&state, ctx->key, req->iv); | 
|  | 186 | crypto_aegis256_aesni_process_ad(&state, req->src, req->assoclen); | 
|  | 187 | crypto_aegis256_aesni_process_crypt(&state, &walk, ops); | 
|  | 188 | crypto_aegis256_aesni_final(&state, tag_xor, req->assoclen, cryptlen); | 
|  | 189 |  | 
|  | 190 | kernel_fpu_end(); | 
|  | 191 | } | 
|  | 192 |  | 
|  | 193 | static int crypto_aegis256_aesni_encrypt(struct aead_request *req) | 
|  | 194 | { | 
|  | 195 | static const struct aegis_crypt_ops OPS = { | 
|  | 196 | .skcipher_walk_init = skcipher_walk_aead_encrypt, | 
|  | 197 | .crypt_blocks = crypto_aegis256_aesni_enc, | 
|  | 198 | .crypt_tail = crypto_aegis256_aesni_enc_tail, | 
|  | 199 | }; | 
|  | 200 |  | 
|  | 201 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); | 
|  | 202 | struct aegis_block tag = {}; | 
|  | 203 | unsigned int authsize = crypto_aead_authsize(tfm); | 
|  | 204 | unsigned int cryptlen = req->cryptlen; | 
|  | 205 |  | 
|  | 206 | crypto_aegis256_aesni_crypt(req, &tag, cryptlen, &OPS); | 
|  | 207 |  | 
|  | 208 | scatterwalk_map_and_copy(tag.bytes, req->dst, | 
|  | 209 | req->assoclen + cryptlen, authsize, 1); | 
|  | 210 | return 0; | 
|  | 211 | } | 
|  | 212 |  | 
|  | 213 | static int crypto_aegis256_aesni_decrypt(struct aead_request *req) | 
|  | 214 | { | 
|  | 215 | static const struct aegis_block zeros = {}; | 
|  | 216 |  | 
|  | 217 | static const struct aegis_crypt_ops OPS = { | 
|  | 218 | .skcipher_walk_init = skcipher_walk_aead_decrypt, | 
|  | 219 | .crypt_blocks = crypto_aegis256_aesni_dec, | 
|  | 220 | .crypt_tail = crypto_aegis256_aesni_dec_tail, | 
|  | 221 | }; | 
|  | 222 |  | 
|  | 223 | struct crypto_aead *tfm = crypto_aead_reqtfm(req); | 
|  | 224 | struct aegis_block tag; | 
|  | 225 | unsigned int authsize = crypto_aead_authsize(tfm); | 
|  | 226 | unsigned int cryptlen = req->cryptlen - authsize; | 
|  | 227 |  | 
|  | 228 | scatterwalk_map_and_copy(tag.bytes, req->src, | 
|  | 229 | req->assoclen + cryptlen, authsize, 0); | 
|  | 230 |  | 
|  | 231 | crypto_aegis256_aesni_crypt(req, &tag, cryptlen, &OPS); | 
|  | 232 |  | 
|  | 233 | return crypto_memneq(tag.bytes, zeros.bytes, authsize) ? -EBADMSG : 0; | 
|  | 234 | } | 
|  | 235 |  | 
|  | 236 | static int crypto_aegis256_aesni_init_tfm(struct crypto_aead *aead) | 
|  | 237 | { | 
|  | 238 | return 0; | 
|  | 239 | } | 
|  | 240 |  | 
|  | 241 | static void crypto_aegis256_aesni_exit_tfm(struct crypto_aead *aead) | 
|  | 242 | { | 
|  | 243 | } | 
|  | 244 |  | 
|  | 245 | static int cryptd_aegis256_aesni_setkey(struct crypto_aead *aead, | 
|  | 246 | const u8 *key, unsigned int keylen) | 
|  | 247 | { | 
|  | 248 | struct cryptd_aead **ctx = crypto_aead_ctx(aead); | 
|  | 249 | struct cryptd_aead *cryptd_tfm = *ctx; | 
|  | 250 |  | 
|  | 251 | return crypto_aead_setkey(&cryptd_tfm->base, key, keylen); | 
|  | 252 | } | 
|  | 253 |  | 
|  | 254 | static int cryptd_aegis256_aesni_setauthsize(struct crypto_aead *aead, | 
|  | 255 | unsigned int authsize) | 
|  | 256 | { | 
|  | 257 | struct cryptd_aead **ctx = crypto_aead_ctx(aead); | 
|  | 258 | struct cryptd_aead *cryptd_tfm = *ctx; | 
|  | 259 |  | 
|  | 260 | return crypto_aead_setauthsize(&cryptd_tfm->base, authsize); | 
|  | 261 | } | 
|  | 262 |  | 
|  | 263 | static int cryptd_aegis256_aesni_encrypt(struct aead_request *req) | 
|  | 264 | { | 
|  | 265 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | 
|  | 266 | struct cryptd_aead **ctx = crypto_aead_ctx(aead); | 
|  | 267 | struct cryptd_aead *cryptd_tfm = *ctx; | 
|  | 268 |  | 
|  | 269 | aead = &cryptd_tfm->base; | 
|  | 270 | if (irq_fpu_usable() && (!in_atomic() || | 
|  | 271 | !cryptd_aead_queued(cryptd_tfm))) | 
|  | 272 | aead = cryptd_aead_child(cryptd_tfm); | 
|  | 273 |  | 
|  | 274 | aead_request_set_tfm(req, aead); | 
|  | 275 |  | 
|  | 276 | return crypto_aead_encrypt(req); | 
|  | 277 | } | 
|  | 278 |  | 
|  | 279 | static int cryptd_aegis256_aesni_decrypt(struct aead_request *req) | 
|  | 280 | { | 
|  | 281 | struct crypto_aead *aead = crypto_aead_reqtfm(req); | 
|  | 282 | struct cryptd_aead **ctx = crypto_aead_ctx(aead); | 
|  | 283 | struct cryptd_aead *cryptd_tfm = *ctx; | 
|  | 284 |  | 
|  | 285 | aead = &cryptd_tfm->base; | 
|  | 286 | if (irq_fpu_usable() && (!in_atomic() || | 
|  | 287 | !cryptd_aead_queued(cryptd_tfm))) | 
|  | 288 | aead = cryptd_aead_child(cryptd_tfm); | 
|  | 289 |  | 
|  | 290 | aead_request_set_tfm(req, aead); | 
|  | 291 |  | 
|  | 292 | return crypto_aead_decrypt(req); | 
|  | 293 | } | 
|  | 294 |  | 
|  | 295 | static int cryptd_aegis256_aesni_init_tfm(struct crypto_aead *aead) | 
|  | 296 | { | 
|  | 297 | struct cryptd_aead *cryptd_tfm; | 
|  | 298 | struct cryptd_aead **ctx = crypto_aead_ctx(aead); | 
|  | 299 |  | 
|  | 300 | cryptd_tfm = cryptd_alloc_aead("__aegis256-aesni", CRYPTO_ALG_INTERNAL, | 
|  | 301 | CRYPTO_ALG_INTERNAL); | 
|  | 302 | if (IS_ERR(cryptd_tfm)) | 
|  | 303 | return PTR_ERR(cryptd_tfm); | 
|  | 304 |  | 
|  | 305 | *ctx = cryptd_tfm; | 
|  | 306 | crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base)); | 
|  | 307 | return 0; | 
|  | 308 | } | 
|  | 309 |  | 
|  | 310 | static void cryptd_aegis256_aesni_exit_tfm(struct crypto_aead *aead) | 
|  | 311 | { | 
|  | 312 | struct cryptd_aead **ctx = crypto_aead_ctx(aead); | 
|  | 313 |  | 
|  | 314 | cryptd_free_aead(*ctx); | 
|  | 315 | } | 
|  | 316 |  | 
|  | 317 | static struct aead_alg crypto_aegis256_aesni_alg[] = { | 
|  | 318 | { | 
|  | 319 | .setkey = crypto_aegis256_aesni_setkey, | 
|  | 320 | .setauthsize = crypto_aegis256_aesni_setauthsize, | 
|  | 321 | .encrypt = crypto_aegis256_aesni_encrypt, | 
|  | 322 | .decrypt = crypto_aegis256_aesni_decrypt, | 
|  | 323 | .init = crypto_aegis256_aesni_init_tfm, | 
|  | 324 | .exit = crypto_aegis256_aesni_exit_tfm, | 
|  | 325 |  | 
|  | 326 | .ivsize = AEGIS256_NONCE_SIZE, | 
|  | 327 | .maxauthsize = AEGIS256_MAX_AUTH_SIZE, | 
|  | 328 | .chunksize = AEGIS256_BLOCK_SIZE, | 
|  | 329 |  | 
|  | 330 | .base = { | 
|  | 331 | .cra_flags = CRYPTO_ALG_INTERNAL, | 
|  | 332 | .cra_blocksize = 1, | 
|  | 333 | .cra_ctxsize = sizeof(struct aegis_ctx) + | 
|  | 334 | __alignof__(struct aegis_ctx), | 
|  | 335 | .cra_alignmask = 0, | 
|  | 336 |  | 
|  | 337 | .cra_name = "__aegis256", | 
|  | 338 | .cra_driver_name = "__aegis256-aesni", | 
|  | 339 |  | 
|  | 340 | .cra_module = THIS_MODULE, | 
|  | 341 | } | 
|  | 342 | }, { | 
|  | 343 | .setkey = cryptd_aegis256_aesni_setkey, | 
|  | 344 | .setauthsize = cryptd_aegis256_aesni_setauthsize, | 
|  | 345 | .encrypt = cryptd_aegis256_aesni_encrypt, | 
|  | 346 | .decrypt = cryptd_aegis256_aesni_decrypt, | 
|  | 347 | .init = cryptd_aegis256_aesni_init_tfm, | 
|  | 348 | .exit = cryptd_aegis256_aesni_exit_tfm, | 
|  | 349 |  | 
|  | 350 | .ivsize = AEGIS256_NONCE_SIZE, | 
|  | 351 | .maxauthsize = AEGIS256_MAX_AUTH_SIZE, | 
|  | 352 | .chunksize = AEGIS256_BLOCK_SIZE, | 
|  | 353 |  | 
|  | 354 | .base = { | 
|  | 355 | .cra_flags = CRYPTO_ALG_ASYNC, | 
|  | 356 | .cra_blocksize = 1, | 
|  | 357 | .cra_ctxsize = sizeof(struct cryptd_aead *), | 
|  | 358 | .cra_alignmask = 0, | 
|  | 359 |  | 
|  | 360 | .cra_priority = 400, | 
|  | 361 |  | 
|  | 362 | .cra_name = "aegis256", | 
|  | 363 | .cra_driver_name = "aegis256-aesni", | 
|  | 364 |  | 
|  | 365 | .cra_module = THIS_MODULE, | 
|  | 366 | } | 
|  | 367 | } | 
|  | 368 | }; | 
|  | 369 |  | 
|  | 370 | static int __init crypto_aegis256_aesni_module_init(void) | 
|  | 371 | { | 
|  | 372 | if (!boot_cpu_has(X86_FEATURE_XMM2) || | 
|  | 373 | !boot_cpu_has(X86_FEATURE_AES) || | 
|  | 374 | !cpu_has_xfeatures(XFEATURE_MASK_SSE, NULL)) | 
|  | 375 | return -ENODEV; | 
|  | 376 |  | 
|  | 377 | return crypto_register_aeads(crypto_aegis256_aesni_alg, | 
|  | 378 | ARRAY_SIZE(crypto_aegis256_aesni_alg)); | 
|  | 379 | } | 
|  | 380 |  | 
|  | 381 | static void __exit crypto_aegis256_aesni_module_exit(void) | 
|  | 382 | { | 
|  | 383 | crypto_unregister_aeads(crypto_aegis256_aesni_alg, | 
|  | 384 | ARRAY_SIZE(crypto_aegis256_aesni_alg)); | 
|  | 385 | } | 
|  | 386 |  | 
|  | 387 | module_init(crypto_aegis256_aesni_module_init); | 
|  | 388 | module_exit(crypto_aegis256_aesni_module_exit); | 
|  | 389 |  | 
|  | 390 | MODULE_LICENSE("GPL"); | 
|  | 391 | MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>"); | 
|  | 392 | MODULE_DESCRIPTION("AEGIS-256 AEAD algorithm -- AESNI+SSE2 implementation"); | 
|  | 393 | MODULE_ALIAS_CRYPTO("aegis256"); | 
|  | 394 | MODULE_ALIAS_CRYPTO("aegis256-aesni"); |