| lh | 9ed821d | 2023-04-07 01:36:19 -0700 | [diff] [blame] | 1 | /* | 
|  | 2 | * Scatterlist Cryptographic API. | 
|  | 3 | * | 
|  | 4 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> | 
|  | 5 | * Copyright (c) 2002 David S. Miller (davem@redhat.com) | 
|  | 6 | * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> | 
|  | 7 | * | 
|  | 8 | * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> | 
|  | 9 | * and Nettle, by Niels Möller. | 
|  | 10 | * | 
|  | 11 | * This program is free software; you can redistribute it and/or modify it | 
|  | 12 | * under the terms of the GNU General Public License as published by the Free | 
|  | 13 | * Software Foundation; either version 2 of the License, or (at your option) | 
|  | 14 | * any later version. | 
|  | 15 | * | 
|  | 16 | */ | 
|  | 17 |  | 
|  | 18 | #include <linux/err.h> | 
|  | 19 | #include <linux/errno.h> | 
|  | 20 | #include <linux/kernel.h> | 
|  | 21 | #include <linux/kmod.h> | 
|  | 22 | #include <linux/module.h> | 
|  | 23 | #include <linux/param.h> | 
|  | 24 | #include <linux/sched.h> | 
|  | 25 | #include <linux/slab.h> | 
|  | 26 | #include <linux/string.h> | 
|  | 27 | #include "internal.h" | 
|  | 28 |  | 
|  | 29 | LIST_HEAD(crypto_alg_list); | 
|  | 30 | EXPORT_SYMBOL_GPL(crypto_alg_list); | 
|  | 31 | DECLARE_RWSEM(crypto_alg_sem); | 
|  | 32 | EXPORT_SYMBOL_GPL(crypto_alg_sem); | 
|  | 33 |  | 
|  | 34 | BLOCKING_NOTIFIER_HEAD(crypto_chain); | 
|  | 35 | EXPORT_SYMBOL_GPL(crypto_chain); | 
|  | 36 |  | 
|  | 37 | static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg) | 
|  | 38 | { | 
|  | 39 | atomic_inc(&alg->cra_refcnt); | 
|  | 40 | return alg; | 
|  | 41 | } | 
|  | 42 |  | 
|  | 43 | static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg); | 
|  | 44 |  | 
|  | 45 | struct crypto_alg *crypto_mod_get(struct crypto_alg *alg) | 
|  | 46 | { | 
|  | 47 | return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL; | 
|  | 48 | } | 
|  | 49 | EXPORT_SYMBOL_GPL(crypto_mod_get); | 
|  | 50 |  | 
|  | 51 | void crypto_mod_put(struct crypto_alg *alg) | 
|  | 52 | { | 
|  | 53 | struct module *module = alg->cra_module; | 
|  | 54 |  | 
|  | 55 | crypto_alg_put(alg); | 
|  | 56 | module_put(module); | 
|  | 57 | } | 
|  | 58 | EXPORT_SYMBOL_GPL(crypto_mod_put); | 
|  | 59 |  | 
|  | 60 | static inline int crypto_is_test_larval(struct crypto_larval *larval) | 
|  | 61 | { | 
|  | 62 | return larval->alg.cra_driver_name[0]; | 
|  | 63 | } | 
|  | 64 |  | 
|  | 65 | static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, | 
|  | 66 | u32 mask) | 
|  | 67 | { | 
|  | 68 | struct crypto_alg *q, *alg = NULL; | 
|  | 69 | int best = -2; | 
|  | 70 |  | 
|  | 71 | list_for_each_entry(q, &crypto_alg_list, cra_list) { | 
|  | 72 | int exact, fuzzy; | 
|  | 73 |  | 
|  | 74 | if (crypto_is_moribund(q)) | 
|  | 75 | continue; | 
|  | 76 |  | 
|  | 77 | if ((q->cra_flags ^ type) & mask) | 
|  | 78 | continue; | 
|  | 79 |  | 
|  | 80 | if (crypto_is_larval(q) && | 
|  | 81 | !crypto_is_test_larval((struct crypto_larval *)q) && | 
|  | 82 | ((struct crypto_larval *)q)->mask != mask) | 
|  | 83 | continue; | 
|  | 84 |  | 
|  | 85 | exact = !strcmp(q->cra_driver_name, name); | 
|  | 86 | fuzzy = !strcmp(q->cra_name, name); | 
|  | 87 | if (!exact && !(fuzzy && q->cra_priority > best)) | 
|  | 88 | continue; | 
|  | 89 |  | 
|  | 90 | if (unlikely(!crypto_mod_get(q))) | 
|  | 91 | continue; | 
|  | 92 |  | 
|  | 93 | best = q->cra_priority; | 
|  | 94 | if (alg) | 
|  | 95 | crypto_mod_put(alg); | 
|  | 96 | alg = q; | 
|  | 97 |  | 
|  | 98 | if (exact) | 
|  | 99 | break; | 
|  | 100 | } | 
|  | 101 |  | 
|  | 102 | return alg; | 
|  | 103 | } | 
|  | 104 |  | 
|  | 105 | static void crypto_larval_destroy(struct crypto_alg *alg) | 
|  | 106 | { | 
|  | 107 | struct crypto_larval *larval = (void *)alg; | 
|  | 108 |  | 
|  | 109 | BUG_ON(!crypto_is_larval(alg)); | 
|  | 110 | if (larval->adult) | 
|  | 111 | crypto_mod_put(larval->adult); | 
|  | 112 | kfree(larval); | 
|  | 113 | } | 
|  | 114 |  | 
|  | 115 | struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask) | 
|  | 116 | { | 
|  | 117 | struct crypto_larval *larval; | 
|  | 118 |  | 
|  | 119 | larval = kzalloc(sizeof(*larval), GFP_KERNEL); | 
|  | 120 | if (!larval) | 
|  | 121 | return ERR_PTR(-ENOMEM); | 
|  | 122 |  | 
|  | 123 | larval->mask = mask; | 
|  | 124 | larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type; | 
|  | 125 | larval->alg.cra_priority = -1; | 
|  | 126 | larval->alg.cra_destroy = crypto_larval_destroy; | 
|  | 127 |  | 
|  | 128 | strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME); | 
|  | 129 | init_completion(&larval->completion); | 
|  | 130 |  | 
|  | 131 | return larval; | 
|  | 132 | } | 
|  | 133 | EXPORT_SYMBOL_GPL(crypto_larval_alloc); | 
|  | 134 |  | 
|  | 135 | static struct crypto_alg *crypto_larval_add(const char *name, u32 type, | 
|  | 136 | u32 mask) | 
|  | 137 | { | 
|  | 138 | struct crypto_alg *alg; | 
|  | 139 | struct crypto_larval *larval; | 
|  | 140 |  | 
|  | 141 | larval = crypto_larval_alloc(name, type, mask); | 
|  | 142 | if (IS_ERR(larval)) | 
|  | 143 | return ERR_CAST(larval); | 
|  | 144 |  | 
|  | 145 | atomic_set(&larval->alg.cra_refcnt, 2); | 
|  | 146 |  | 
|  | 147 | down_write(&crypto_alg_sem); | 
|  | 148 | alg = __crypto_alg_lookup(name, type, mask); | 
|  | 149 | if (!alg) { | 
|  | 150 | alg = &larval->alg; | 
|  | 151 | list_add(&alg->cra_list, &crypto_alg_list); | 
|  | 152 | } | 
|  | 153 | up_write(&crypto_alg_sem); | 
|  | 154 |  | 
|  | 155 | if (alg != &larval->alg) { | 
|  | 156 | kfree(larval); | 
|  | 157 | if (crypto_is_larval(alg)) | 
|  | 158 | alg = crypto_larval_wait(alg); | 
|  | 159 | } | 
|  | 160 |  | 
|  | 161 | return alg; | 
|  | 162 | } | 
|  | 163 |  | 
|  | 164 | void crypto_larval_kill(struct crypto_alg *alg) | 
|  | 165 | { | 
|  | 166 | struct crypto_larval *larval = (void *)alg; | 
|  | 167 |  | 
|  | 168 | down_write(&crypto_alg_sem); | 
|  | 169 | list_del(&alg->cra_list); | 
|  | 170 | up_write(&crypto_alg_sem); | 
|  | 171 | complete_all(&larval->completion); | 
|  | 172 | crypto_alg_put(alg); | 
|  | 173 | } | 
|  | 174 | EXPORT_SYMBOL_GPL(crypto_larval_kill); | 
|  | 175 |  | 
|  | 176 | static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) | 
|  | 177 | { | 
|  | 178 | struct crypto_larval *larval = (void *)alg; | 
|  | 179 | long timeout; | 
|  | 180 |  | 
|  | 181 | timeout = wait_for_completion_interruptible_timeout( | 
|  | 182 | &larval->completion, 60 * HZ); | 
|  | 183 |  | 
|  | 184 | alg = larval->adult; | 
|  | 185 | if (timeout < 0) | 
|  | 186 | alg = ERR_PTR(-EINTR); | 
|  | 187 | else if (!timeout) | 
|  | 188 | alg = ERR_PTR(-ETIMEDOUT); | 
|  | 189 | else if (!alg) | 
|  | 190 | alg = ERR_PTR(-ENOENT); | 
|  | 191 | else if (crypto_is_test_larval(larval) && | 
|  | 192 | !(alg->cra_flags & CRYPTO_ALG_TESTED)) | 
|  | 193 | alg = ERR_PTR(-EAGAIN); | 
|  | 194 | else if (!crypto_mod_get(alg)) | 
|  | 195 | alg = ERR_PTR(-EAGAIN); | 
|  | 196 | crypto_mod_put(&larval->alg); | 
|  | 197 |  | 
|  | 198 | return alg; | 
|  | 199 | } | 
|  | 200 |  | 
|  | 201 | struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask) | 
|  | 202 | { | 
|  | 203 | struct crypto_alg *alg; | 
|  | 204 |  | 
|  | 205 | down_read(&crypto_alg_sem); | 
|  | 206 | alg = __crypto_alg_lookup(name, type, mask); | 
|  | 207 | up_read(&crypto_alg_sem); | 
|  | 208 |  | 
|  | 209 | return alg; | 
|  | 210 | } | 
|  | 211 | EXPORT_SYMBOL_GPL(crypto_alg_lookup); | 
|  | 212 |  | 
|  | 213 | struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask) | 
|  | 214 | { | 
|  | 215 | struct crypto_alg *alg; | 
|  | 216 |  | 
|  | 217 | if (!name) | 
|  | 218 | return ERR_PTR(-ENOENT); | 
|  | 219 |  | 
|  | 220 | mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); | 
|  | 221 | type &= mask; | 
|  | 222 |  | 
|  | 223 | alg = crypto_alg_lookup(name, type, mask); | 
|  | 224 | if (!alg) { | 
|  | 225 | request_module("%s", name); | 
|  | 226 |  | 
|  | 227 | if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask & | 
|  | 228 | CRYPTO_ALG_NEED_FALLBACK)) | 
|  | 229 | request_module("%s-all", name); | 
|  | 230 |  | 
|  | 231 | alg = crypto_alg_lookup(name, type, mask); | 
|  | 232 | } | 
|  | 233 |  | 
|  | 234 | if (alg) | 
|  | 235 | return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg; | 
|  | 236 |  | 
|  | 237 | return crypto_larval_add(name, type, mask); | 
|  | 238 | } | 
|  | 239 | EXPORT_SYMBOL_GPL(crypto_larval_lookup); | 
|  | 240 |  | 
|  | 241 | int crypto_probing_notify(unsigned long val, void *v) | 
|  | 242 | { | 
|  | 243 | int ok; | 
|  | 244 |  | 
|  | 245 | ok = blocking_notifier_call_chain(&crypto_chain, val, v); | 
|  | 246 | if (ok == NOTIFY_DONE) { | 
|  | 247 | request_module("cryptomgr"); | 
|  | 248 | ok = blocking_notifier_call_chain(&crypto_chain, val, v); | 
|  | 249 | } | 
|  | 250 |  | 
|  | 251 | return ok; | 
|  | 252 | } | 
|  | 253 | EXPORT_SYMBOL_GPL(crypto_probing_notify); | 
|  | 254 |  | 
|  | 255 | struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) | 
|  | 256 | { | 
|  | 257 | struct crypto_alg *alg; | 
|  | 258 | struct crypto_alg *larval; | 
|  | 259 | int ok; | 
|  | 260 |  | 
|  | 261 | if (!((type | mask) & CRYPTO_ALG_TESTED)) { | 
|  | 262 | type |= CRYPTO_ALG_TESTED; | 
|  | 263 | mask |= CRYPTO_ALG_TESTED; | 
|  | 264 | } | 
|  | 265 |  | 
|  | 266 | larval = crypto_larval_lookup(name, type, mask); | 
|  | 267 | if (IS_ERR(larval) || !crypto_is_larval(larval)) | 
|  | 268 | return larval; | 
|  | 269 |  | 
|  | 270 | ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval); | 
|  | 271 |  | 
|  | 272 | if (ok == NOTIFY_STOP) | 
|  | 273 | alg = crypto_larval_wait(larval); | 
|  | 274 | else { | 
|  | 275 | crypto_mod_put(larval); | 
|  | 276 | alg = ERR_PTR(-ENOENT); | 
|  | 277 | } | 
|  | 278 | crypto_larval_kill(larval); | 
|  | 279 | return alg; | 
|  | 280 | } | 
|  | 281 | EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); | 
|  | 282 |  | 
|  | 283 | static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask) | 
|  | 284 | { | 
|  | 285 | const struct crypto_type *type_obj = tfm->__crt_alg->cra_type; | 
|  | 286 |  | 
|  | 287 | if (type_obj) | 
|  | 288 | return type_obj->init(tfm, type, mask); | 
|  | 289 |  | 
|  | 290 | switch (crypto_tfm_alg_type(tfm)) { | 
|  | 291 | case CRYPTO_ALG_TYPE_CIPHER: | 
|  | 292 | return crypto_init_cipher_ops(tfm); | 
|  | 293 |  | 
|  | 294 | case CRYPTO_ALG_TYPE_COMPRESS: | 
|  | 295 | return crypto_init_compress_ops(tfm); | 
|  | 296 |  | 
|  | 297 | default: | 
|  | 298 | break; | 
|  | 299 | } | 
|  | 300 |  | 
|  | 301 | BUG(); | 
|  | 302 | return -EINVAL; | 
|  | 303 | } | 
|  | 304 |  | 
|  | 305 | static void crypto_exit_ops(struct crypto_tfm *tfm) | 
|  | 306 | { | 
|  | 307 | const struct crypto_type *type = tfm->__crt_alg->cra_type; | 
|  | 308 |  | 
|  | 309 | if (type) { | 
|  | 310 | if (tfm->exit) | 
|  | 311 | tfm->exit(tfm); | 
|  | 312 | return; | 
|  | 313 | } | 
|  | 314 |  | 
|  | 315 | switch (crypto_tfm_alg_type(tfm)) { | 
|  | 316 | case CRYPTO_ALG_TYPE_CIPHER: | 
|  | 317 | crypto_exit_cipher_ops(tfm); | 
|  | 318 | break; | 
|  | 319 |  | 
|  | 320 | case CRYPTO_ALG_TYPE_COMPRESS: | 
|  | 321 | crypto_exit_compress_ops(tfm); | 
|  | 322 | break; | 
|  | 323 |  | 
|  | 324 | default: | 
|  | 325 | BUG(); | 
|  | 326 | } | 
|  | 327 | } | 
|  | 328 |  | 
|  | 329 | static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) | 
|  | 330 | { | 
|  | 331 | const struct crypto_type *type_obj = alg->cra_type; | 
|  | 332 | unsigned int len; | 
|  | 333 |  | 
|  | 334 | len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); | 
|  | 335 | if (type_obj) | 
|  | 336 | return len + type_obj->ctxsize(alg, type, mask); | 
|  | 337 |  | 
|  | 338 | switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { | 
|  | 339 | default: | 
|  | 340 | BUG(); | 
|  | 341 |  | 
|  | 342 | case CRYPTO_ALG_TYPE_CIPHER: | 
|  | 343 | len += crypto_cipher_ctxsize(alg); | 
|  | 344 | break; | 
|  | 345 |  | 
|  | 346 | case CRYPTO_ALG_TYPE_COMPRESS: | 
|  | 347 | len += crypto_compress_ctxsize(alg); | 
|  | 348 | break; | 
|  | 349 | } | 
|  | 350 |  | 
|  | 351 | return len; | 
|  | 352 | } | 
|  | 353 |  | 
|  | 354 | void crypto_shoot_alg(struct crypto_alg *alg) | 
|  | 355 | { | 
|  | 356 | down_write(&crypto_alg_sem); | 
|  | 357 | alg->cra_flags |= CRYPTO_ALG_DYING; | 
|  | 358 | up_write(&crypto_alg_sem); | 
|  | 359 | } | 
|  | 360 | EXPORT_SYMBOL_GPL(crypto_shoot_alg); | 
|  | 361 |  | 
|  | 362 | struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, | 
|  | 363 | u32 mask) | 
|  | 364 | { | 
|  | 365 | struct crypto_tfm *tfm = NULL; | 
|  | 366 | unsigned int tfm_size; | 
|  | 367 | int err = -ENOMEM; | 
|  | 368 |  | 
|  | 369 | tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask); | 
|  | 370 | tfm = kzalloc(tfm_size, GFP_KERNEL); | 
|  | 371 | if (tfm == NULL) | 
|  | 372 | goto out_err; | 
|  | 373 |  | 
|  | 374 | tfm->__crt_alg = alg; | 
|  | 375 |  | 
|  | 376 | err = crypto_init_ops(tfm, type, mask); | 
|  | 377 | if (err) | 
|  | 378 | goto out_free_tfm; | 
|  | 379 |  | 
|  | 380 | if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) | 
|  | 381 | goto cra_init_failed; | 
|  | 382 |  | 
|  | 383 | goto out; | 
|  | 384 |  | 
|  | 385 | cra_init_failed: | 
|  | 386 | crypto_exit_ops(tfm); | 
|  | 387 | out_free_tfm: | 
|  | 388 | if (err == -EAGAIN) | 
|  | 389 | crypto_shoot_alg(alg); | 
|  | 390 | kfree(tfm); | 
|  | 391 | out_err: | 
|  | 392 | tfm = ERR_PTR(err); | 
|  | 393 | out: | 
|  | 394 | return tfm; | 
|  | 395 | } | 
|  | 396 | EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); | 
|  | 397 |  | 
|  | 398 | /* | 
|  | 399 | *	crypto_alloc_base - Locate algorithm and allocate transform | 
|  | 400 | *	@alg_name: Name of algorithm | 
|  | 401 | *	@type: Type of algorithm | 
|  | 402 | *	@mask: Mask for type comparison | 
|  | 403 | * | 
|  | 404 | *	This function should not be used by new algorithm types. | 
|  | 405 | *	Plesae use crypto_alloc_tfm instead. | 
|  | 406 | * | 
|  | 407 | *	crypto_alloc_base() will first attempt to locate an already loaded | 
|  | 408 | *	algorithm.  If that fails and the kernel supports dynamically loadable | 
|  | 409 | *	modules, it will then attempt to load a module of the same name or | 
|  | 410 | *	alias.  If that fails it will send a query to any loaded crypto manager | 
|  | 411 | *	to construct an algorithm on the fly.  A refcount is grabbed on the | 
|  | 412 | *	algorithm which is then associated with the new transform. | 
|  | 413 | * | 
|  | 414 | *	The returned transform is of a non-determinate type.  Most people | 
|  | 415 | *	should use one of the more specific allocation functions such as | 
|  | 416 | *	crypto_alloc_blkcipher. | 
|  | 417 | * | 
|  | 418 | *	In case of error the return value is an error pointer. | 
|  | 419 | */ | 
|  | 420 | struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) | 
|  | 421 | { | 
|  | 422 | struct crypto_tfm *tfm; | 
|  | 423 | int err; | 
|  | 424 |  | 
|  | 425 | for (;;) { | 
|  | 426 | struct crypto_alg *alg; | 
|  | 427 |  | 
|  | 428 | alg = crypto_alg_mod_lookup(alg_name, type, mask); | 
|  | 429 | if (IS_ERR(alg)) { | 
|  | 430 | err = PTR_ERR(alg); | 
|  | 431 | goto err; | 
|  | 432 | } | 
|  | 433 |  | 
|  | 434 | tfm = __crypto_alloc_tfm(alg, type, mask); | 
|  | 435 | if (!IS_ERR(tfm)) | 
|  | 436 | return tfm; | 
|  | 437 |  | 
|  | 438 | crypto_mod_put(alg); | 
|  | 439 | err = PTR_ERR(tfm); | 
|  | 440 |  | 
|  | 441 | err: | 
|  | 442 | if (err != -EAGAIN) | 
|  | 443 | break; | 
|  | 444 | if (signal_pending(current)) { | 
|  | 445 | err = -EINTR; | 
|  | 446 | break; | 
|  | 447 | } | 
|  | 448 | } | 
|  | 449 |  | 
|  | 450 | return ERR_PTR(err); | 
|  | 451 | } | 
|  | 452 | EXPORT_SYMBOL_GPL(crypto_alloc_base); | 
|  | 453 |  | 
|  | 454 | void *crypto_create_tfm(struct crypto_alg *alg, | 
|  | 455 | const struct crypto_type *frontend) | 
|  | 456 | { | 
|  | 457 | char *mem; | 
|  | 458 | struct crypto_tfm *tfm = NULL; | 
|  | 459 | unsigned int tfmsize; | 
|  | 460 | unsigned int total; | 
|  | 461 | int err = -ENOMEM; | 
|  | 462 |  | 
|  | 463 | tfmsize = frontend->tfmsize; | 
|  | 464 | total = tfmsize + sizeof(*tfm) + frontend->extsize(alg); | 
|  | 465 |  | 
|  | 466 | mem = kzalloc(total, GFP_KERNEL); | 
|  | 467 | if (mem == NULL) | 
|  | 468 | goto out_err; | 
|  | 469 |  | 
|  | 470 | tfm = (struct crypto_tfm *)(mem + tfmsize); | 
|  | 471 | tfm->__crt_alg = alg; | 
|  | 472 |  | 
|  | 473 | err = frontend->init_tfm(tfm); | 
|  | 474 | if (err) | 
|  | 475 | goto out_free_tfm; | 
|  | 476 |  | 
|  | 477 | if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) | 
|  | 478 | goto cra_init_failed; | 
|  | 479 |  | 
|  | 480 | goto out; | 
|  | 481 |  | 
|  | 482 | cra_init_failed: | 
|  | 483 | crypto_exit_ops(tfm); | 
|  | 484 | out_free_tfm: | 
|  | 485 | if (err == -EAGAIN) | 
|  | 486 | crypto_shoot_alg(alg); | 
|  | 487 | kfree(mem); | 
|  | 488 | out_err: | 
|  | 489 | mem = ERR_PTR(err); | 
|  | 490 | out: | 
|  | 491 | return mem; | 
|  | 492 | } | 
|  | 493 | EXPORT_SYMBOL_GPL(crypto_create_tfm); | 
|  | 494 |  | 
|  | 495 | struct crypto_alg *crypto_find_alg(const char *alg_name, | 
|  | 496 | const struct crypto_type *frontend, | 
|  | 497 | u32 type, u32 mask) | 
|  | 498 | { | 
|  | 499 | struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) = | 
|  | 500 | crypto_alg_mod_lookup; | 
|  | 501 |  | 
|  | 502 | if (frontend) { | 
|  | 503 | type &= frontend->maskclear; | 
|  | 504 | mask &= frontend->maskclear; | 
|  | 505 | type |= frontend->type; | 
|  | 506 | mask |= frontend->maskset; | 
|  | 507 |  | 
|  | 508 | if (frontend->lookup) | 
|  | 509 | lookup = frontend->lookup; | 
|  | 510 | } | 
|  | 511 |  | 
|  | 512 | return lookup(alg_name, type, mask); | 
|  | 513 | } | 
|  | 514 | EXPORT_SYMBOL_GPL(crypto_find_alg); | 
|  | 515 |  | 
|  | 516 | /* | 
|  | 517 | *	crypto_alloc_tfm - Locate algorithm and allocate transform | 
|  | 518 | *	@alg_name: Name of algorithm | 
|  | 519 | *	@frontend: Frontend algorithm type | 
|  | 520 | *	@type: Type of algorithm | 
|  | 521 | *	@mask: Mask for type comparison | 
|  | 522 | * | 
|  | 523 | *	crypto_alloc_tfm() will first attempt to locate an already loaded | 
|  | 524 | *	algorithm.  If that fails and the kernel supports dynamically loadable | 
|  | 525 | *	modules, it will then attempt to load a module of the same name or | 
|  | 526 | *	alias.  If that fails it will send a query to any loaded crypto manager | 
|  | 527 | *	to construct an algorithm on the fly.  A refcount is grabbed on the | 
|  | 528 | *	algorithm which is then associated with the new transform. | 
|  | 529 | * | 
|  | 530 | *	The returned transform is of a non-determinate type.  Most people | 
|  | 531 | *	should use one of the more specific allocation functions such as | 
|  | 532 | *	crypto_alloc_blkcipher. | 
|  | 533 | * | 
|  | 534 | *	In case of error the return value is an error pointer. | 
|  | 535 | */ | 
|  | 536 | void *crypto_alloc_tfm(const char *alg_name, | 
|  | 537 | const struct crypto_type *frontend, u32 type, u32 mask) | 
|  | 538 | { | 
|  | 539 | void *tfm; | 
|  | 540 | int err; | 
|  | 541 |  | 
|  | 542 | for (;;) { | 
|  | 543 | struct crypto_alg *alg; | 
|  | 544 |  | 
|  | 545 | alg = crypto_find_alg(alg_name, frontend, type, mask); | 
|  | 546 | if (IS_ERR(alg)) { | 
|  | 547 | err = PTR_ERR(alg); | 
|  | 548 | goto err; | 
|  | 549 | } | 
|  | 550 |  | 
|  | 551 | tfm = crypto_create_tfm(alg, frontend); | 
|  | 552 | if (!IS_ERR(tfm)) | 
|  | 553 | return tfm; | 
|  | 554 |  | 
|  | 555 | crypto_mod_put(alg); | 
|  | 556 | err = PTR_ERR(tfm); | 
|  | 557 |  | 
|  | 558 | err: | 
|  | 559 | if (err != -EAGAIN) | 
|  | 560 | break; | 
|  | 561 | if (signal_pending(current)) { | 
|  | 562 | err = -EINTR; | 
|  | 563 | break; | 
|  | 564 | } | 
|  | 565 | } | 
|  | 566 |  | 
|  | 567 | return ERR_PTR(err); | 
|  | 568 | } | 
|  | 569 | EXPORT_SYMBOL_GPL(crypto_alloc_tfm); | 
|  | 570 |  | 
|  | 571 | /* | 
|  | 572 | *	crypto_destroy_tfm - Free crypto transform | 
|  | 573 | *	@mem: Start of tfm slab | 
|  | 574 | *	@tfm: Transform to free | 
|  | 575 | * | 
|  | 576 | *	This function frees up the transform and any associated resources, | 
|  | 577 | *	then drops the refcount on the associated algorithm. | 
|  | 578 | */ | 
|  | 579 | void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm) | 
|  | 580 | { | 
|  | 581 | struct crypto_alg *alg; | 
|  | 582 |  | 
|  | 583 | if (unlikely(!mem)) | 
|  | 584 | return; | 
|  | 585 |  | 
|  | 586 | alg = tfm->__crt_alg; | 
|  | 587 |  | 
|  | 588 | if (!tfm->exit && alg->cra_exit) | 
|  | 589 | alg->cra_exit(tfm); | 
|  | 590 | crypto_exit_ops(tfm); | 
|  | 591 | crypto_mod_put(alg); | 
|  | 592 | kzfree(mem); | 
|  | 593 | } | 
|  | 594 | EXPORT_SYMBOL_GPL(crypto_destroy_tfm); | 
|  | 595 |  | 
|  | 596 | int crypto_has_alg(const char *name, u32 type, u32 mask) | 
|  | 597 | { | 
|  | 598 | int ret = 0; | 
|  | 599 | struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask); | 
|  | 600 |  | 
|  | 601 | if (!IS_ERR(alg)) { | 
|  | 602 | crypto_mod_put(alg); | 
|  | 603 | ret = 1; | 
|  | 604 | } | 
|  | 605 |  | 
|  | 606 | return ret; | 
|  | 607 | } | 
|  | 608 | EXPORT_SYMBOL_GPL(crypto_has_alg); | 
|  | 609 |  | 
|  | 610 | MODULE_DESCRIPTION("Cryptographic core API"); | 
|  | 611 | MODULE_LICENSE("GPL"); |