blob: f12d6b96660818c17a77fb12189ddc871ef2c1ec
1 | /* |
2 | * Scatterlist Cryptographic API. |
3 | * |
4 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> |
5 | * Copyright (c) 2002 David S. Miller (davem@redhat.com) |
6 | * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> |
7 | * |
8 | * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> |
9 | * and Nettle, by Niels Möller. |
10 | * |
11 | * This program is free software; you can redistribute it and/or modify it |
12 | * under the terms of the GNU General Public License as published by the Free |
13 | * Software Foundation; either version 2 of the License, or (at your option) |
14 | * any later version. |
15 | * |
16 | */ |
17 | |
18 | #include <linux/err.h> |
19 | #include <linux/errno.h> |
20 | #include <linux/kernel.h> |
21 | #include <linux/kmod.h> |
22 | #include <linux/module.h> |
23 | #include <linux/param.h> |
24 | #include <linux/sched.h> |
25 | #include <linux/slab.h> |
26 | #include <linux/string.h> |
27 | #include <linux/completion.h> |
28 | #include "internal.h" |
29 | |
30 | LIST_HEAD(crypto_alg_list); |
31 | EXPORT_SYMBOL_GPL(crypto_alg_list); |
32 | DECLARE_RWSEM(crypto_alg_sem); |
33 | EXPORT_SYMBOL_GPL(crypto_alg_sem); |
34 | |
35 | BLOCKING_NOTIFIER_HEAD(crypto_chain); |
36 | EXPORT_SYMBOL_GPL(crypto_chain); |
37 | |
38 | static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg); |
39 | |
40 | struct crypto_alg *crypto_mod_get(struct crypto_alg *alg) |
41 | { |
42 | return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL; |
43 | } |
44 | EXPORT_SYMBOL_GPL(crypto_mod_get); |
45 | |
46 | void crypto_mod_put(struct crypto_alg *alg) |
47 | { |
48 | struct module *module = alg->cra_module; |
49 | |
50 | crypto_alg_put(alg); |
51 | module_put(module); |
52 | } |
53 | EXPORT_SYMBOL_GPL(crypto_mod_put); |
54 | |
55 | static inline int crypto_is_test_larval(struct crypto_larval *larval) |
56 | { |
57 | return larval->alg.cra_driver_name[0]; |
58 | } |
59 | |
60 | static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, |
61 | u32 mask) |
62 | { |
63 | struct crypto_alg *q, *alg = NULL; |
64 | int best = -2; |
65 | |
66 | list_for_each_entry(q, &crypto_alg_list, cra_list) { |
67 | int exact, fuzzy; |
68 | |
69 | if (crypto_is_moribund(q)) |
70 | continue; |
71 | |
72 | if ((q->cra_flags ^ type) & mask) |
73 | continue; |
74 | |
75 | if (crypto_is_larval(q) && |
76 | !crypto_is_test_larval((struct crypto_larval *)q) && |
77 | ((struct crypto_larval *)q)->mask != mask) |
78 | continue; |
79 | |
80 | exact = !strcmp(q->cra_driver_name, name); |
81 | fuzzy = !strcmp(q->cra_name, name); |
82 | if (!exact && !(fuzzy && q->cra_priority > best)) |
83 | continue; |
84 | |
85 | if (unlikely(!crypto_mod_get(q))) |
86 | continue; |
87 | |
88 | best = q->cra_priority; |
89 | if (alg) |
90 | crypto_mod_put(alg); |
91 | alg = q; |
92 | |
93 | if (exact) |
94 | break; |
95 | } |
96 | |
97 | return alg; |
98 | } |
99 | |
100 | static void crypto_larval_destroy(struct crypto_alg *alg) |
101 | { |
102 | struct crypto_larval *larval = (void *)alg; |
103 | |
104 | BUG_ON(!crypto_is_larval(alg)); |
105 | if (larval->adult) |
106 | crypto_mod_put(larval->adult); |
107 | kfree(larval); |
108 | } |
109 | |
110 | struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask) |
111 | { |
112 | struct crypto_larval *larval; |
113 | |
114 | larval = kzalloc(sizeof(*larval), GFP_KERNEL); |
115 | if (!larval) |
116 | return ERR_PTR(-ENOMEM); |
117 | |
118 | larval->mask = mask; |
119 | larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type; |
120 | larval->alg.cra_priority = -1; |
121 | larval->alg.cra_destroy = crypto_larval_destroy; |
122 | |
123 | strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME); |
124 | init_completion(&larval->completion); |
125 | |
126 | return larval; |
127 | } |
128 | EXPORT_SYMBOL_GPL(crypto_larval_alloc); |
129 | |
130 | static struct crypto_alg *crypto_larval_add(const char *name, u32 type, |
131 | u32 mask) |
132 | { |
133 | struct crypto_alg *alg; |
134 | struct crypto_larval *larval; |
135 | |
136 | larval = crypto_larval_alloc(name, type, mask); |
137 | if (IS_ERR(larval)) |
138 | return ERR_CAST(larval); |
139 | |
140 | atomic_set(&larval->alg.cra_refcnt, 2); |
141 | |
142 | down_write(&crypto_alg_sem); |
143 | alg = __crypto_alg_lookup(name, type, mask); |
144 | if (!alg) { |
145 | alg = &larval->alg; |
146 | list_add(&alg->cra_list, &crypto_alg_list); |
147 | } |
148 | up_write(&crypto_alg_sem); |
149 | |
150 | if (alg != &larval->alg) { |
151 | kfree(larval); |
152 | if (crypto_is_larval(alg)) |
153 | alg = crypto_larval_wait(alg); |
154 | } |
155 | |
156 | return alg; |
157 | } |
158 | |
159 | void crypto_larval_kill(struct crypto_alg *alg) |
160 | { |
161 | struct crypto_larval *larval = (void *)alg; |
162 | |
163 | down_write(&crypto_alg_sem); |
164 | list_del(&alg->cra_list); |
165 | up_write(&crypto_alg_sem); |
166 | complete_all(&larval->completion); |
167 | crypto_alg_put(alg); |
168 | } |
169 | EXPORT_SYMBOL_GPL(crypto_larval_kill); |
170 | |
171 | static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) |
172 | { |
173 | struct crypto_larval *larval = (void *)alg; |
174 | long timeout; |
175 | |
176 | timeout = wait_for_completion_killable_timeout( |
177 | &larval->completion, 60 * HZ); |
178 | |
179 | alg = larval->adult; |
180 | if (timeout < 0) |
181 | alg = ERR_PTR(-EINTR); |
182 | else if (!timeout) |
183 | alg = ERR_PTR(-ETIMEDOUT); |
184 | else if (!alg) |
185 | alg = ERR_PTR(-ENOENT); |
186 | else if (crypto_is_test_larval(larval) && |
187 | !(alg->cra_flags & CRYPTO_ALG_TESTED)) |
188 | alg = ERR_PTR(-EAGAIN); |
189 | else if (!crypto_mod_get(alg)) |
190 | alg = ERR_PTR(-EAGAIN); |
191 | crypto_mod_put(&larval->alg); |
192 | |
193 | return alg; |
194 | } |
195 | |
196 | struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask) |
197 | { |
198 | struct crypto_alg *alg; |
199 | |
200 | down_read(&crypto_alg_sem); |
201 | alg = __crypto_alg_lookup(name, type, mask); |
202 | up_read(&crypto_alg_sem); |
203 | |
204 | return alg; |
205 | } |
206 | EXPORT_SYMBOL_GPL(crypto_alg_lookup); |
207 | |
208 | struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask) |
209 | { |
210 | struct crypto_alg *alg; |
211 | |
212 | if (!name) |
213 | return ERR_PTR(-ENOENT); |
214 | |
215 | mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); |
216 | type &= mask; |
217 | |
218 | alg = crypto_alg_lookup(name, type, mask); |
219 | if (!alg && !(mask & CRYPTO_NOLOAD)) { |
220 | request_module("crypto-%s", name); |
221 | |
222 | if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask & |
223 | CRYPTO_ALG_NEED_FALLBACK)) |
224 | request_module("crypto-%s-all", name); |
225 | |
226 | alg = crypto_alg_lookup(name, type, mask); |
227 | } |
228 | |
229 | if (alg) |
230 | return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg; |
231 | |
232 | return crypto_larval_add(name, type, mask); |
233 | } |
234 | EXPORT_SYMBOL_GPL(crypto_larval_lookup); |
235 | |
236 | int crypto_probing_notify(unsigned long val, void *v) |
237 | { |
238 | int ok; |
239 | |
240 | ok = blocking_notifier_call_chain(&crypto_chain, val, v); |
241 | if (ok == NOTIFY_DONE) { |
242 | request_module("cryptomgr"); |
243 | ok = blocking_notifier_call_chain(&crypto_chain, val, v); |
244 | } |
245 | |
246 | return ok; |
247 | } |
248 | EXPORT_SYMBOL_GPL(crypto_probing_notify); |
249 | |
250 | struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) |
251 | { |
252 | struct crypto_alg *alg; |
253 | struct crypto_alg *larval; |
254 | int ok; |
255 | |
256 | if (!((type | mask) & CRYPTO_ALG_TESTED)) { |
257 | type |= CRYPTO_ALG_TESTED; |
258 | mask |= CRYPTO_ALG_TESTED; |
259 | } |
260 | |
261 | /* |
262 | * If the internal flag is set for a cipher, require a caller to |
263 | * to invoke the cipher with the internal flag to use that cipher. |
264 | * Also, if a caller wants to allocate a cipher that may or may |
265 | * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and |
266 | * !(mask & CRYPTO_ALG_INTERNAL). |
267 | */ |
268 | if (!((type | mask) & CRYPTO_ALG_INTERNAL)) |
269 | mask |= CRYPTO_ALG_INTERNAL; |
270 | |
271 | larval = crypto_larval_lookup(name, type, mask); |
272 | if (IS_ERR(larval) || !crypto_is_larval(larval)) |
273 | return larval; |
274 | |
275 | ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval); |
276 | |
277 | if (ok == NOTIFY_STOP) |
278 | alg = crypto_larval_wait(larval); |
279 | else { |
280 | crypto_mod_put(larval); |
281 | alg = ERR_PTR(-ENOENT); |
282 | } |
283 | crypto_larval_kill(larval); |
284 | return alg; |
285 | } |
286 | EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); |
287 | |
288 | static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask) |
289 | { |
290 | const struct crypto_type *type_obj = tfm->__crt_alg->cra_type; |
291 | |
292 | if (type_obj) |
293 | return type_obj->init(tfm, type, mask); |
294 | |
295 | switch (crypto_tfm_alg_type(tfm)) { |
296 | case CRYPTO_ALG_TYPE_CIPHER: |
297 | return crypto_init_cipher_ops(tfm); |
298 | |
299 | case CRYPTO_ALG_TYPE_COMPRESS: |
300 | return crypto_init_compress_ops(tfm); |
301 | |
302 | default: |
303 | break; |
304 | } |
305 | |
306 | BUG(); |
307 | return -EINVAL; |
308 | } |
309 | |
310 | static void crypto_exit_ops(struct crypto_tfm *tfm) |
311 | { |
312 | const struct crypto_type *type = tfm->__crt_alg->cra_type; |
313 | |
314 | if (type) { |
315 | if (tfm->exit) |
316 | tfm->exit(tfm); |
317 | return; |
318 | } |
319 | |
320 | switch (crypto_tfm_alg_type(tfm)) { |
321 | case CRYPTO_ALG_TYPE_CIPHER: |
322 | crypto_exit_cipher_ops(tfm); |
323 | break; |
324 | |
325 | case CRYPTO_ALG_TYPE_COMPRESS: |
326 | crypto_exit_compress_ops(tfm); |
327 | break; |
328 | |
329 | default: |
330 | BUG(); |
331 | } |
332 | } |
333 | |
334 | static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) |
335 | { |
336 | const struct crypto_type *type_obj = alg->cra_type; |
337 | unsigned int len; |
338 | |
339 | len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); |
340 | if (type_obj) |
341 | return len + type_obj->ctxsize(alg, type, mask); |
342 | |
343 | switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { |
344 | default: |
345 | BUG(); |
346 | |
347 | case CRYPTO_ALG_TYPE_CIPHER: |
348 | len += crypto_cipher_ctxsize(alg); |
349 | break; |
350 | |
351 | case CRYPTO_ALG_TYPE_COMPRESS: |
352 | len += crypto_compress_ctxsize(alg); |
353 | break; |
354 | } |
355 | |
356 | return len; |
357 | } |
358 | |
359 | void crypto_shoot_alg(struct crypto_alg *alg) |
360 | { |
361 | down_write(&crypto_alg_sem); |
362 | alg->cra_flags |= CRYPTO_ALG_DYING; |
363 | up_write(&crypto_alg_sem); |
364 | } |
365 | EXPORT_SYMBOL_GPL(crypto_shoot_alg); |
366 | |
367 | struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, |
368 | u32 mask) |
369 | { |
370 | struct crypto_tfm *tfm = NULL; |
371 | unsigned int tfm_size; |
372 | int err = -ENOMEM; |
373 | |
374 | tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask); |
375 | tfm = kzalloc(tfm_size, GFP_KERNEL); |
376 | if (tfm == NULL) |
377 | goto out_err; |
378 | |
379 | tfm->__crt_alg = alg; |
380 | |
381 | err = crypto_init_ops(tfm, type, mask); |
382 | if (err) |
383 | goto out_free_tfm; |
384 | |
385 | if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) |
386 | goto cra_init_failed; |
387 | |
388 | goto out; |
389 | |
390 | cra_init_failed: |
391 | crypto_exit_ops(tfm); |
392 | out_free_tfm: |
393 | if (err == -EAGAIN) |
394 | crypto_shoot_alg(alg); |
395 | kfree(tfm); |
396 | out_err: |
397 | tfm = ERR_PTR(err); |
398 | out: |
399 | return tfm; |
400 | } |
401 | EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); |
402 | |
403 | /* |
404 | * crypto_alloc_base - Locate algorithm and allocate transform |
405 | * @alg_name: Name of algorithm |
406 | * @type: Type of algorithm |
407 | * @mask: Mask for type comparison |
408 | * |
409 | * This function should not be used by new algorithm types. |
410 | * Please use crypto_alloc_tfm instead. |
411 | * |
412 | * crypto_alloc_base() will first attempt to locate an already loaded |
413 | * algorithm. If that fails and the kernel supports dynamically loadable |
414 | * modules, it will then attempt to load a module of the same name or |
415 | * alias. If that fails it will send a query to any loaded crypto manager |
416 | * to construct an algorithm on the fly. A refcount is grabbed on the |
417 | * algorithm which is then associated with the new transform. |
418 | * |
419 | * The returned transform is of a non-determinate type. Most people |
420 | * should use one of the more specific allocation functions such as |
421 | * crypto_alloc_blkcipher. |
422 | * |
423 | * In case of error the return value is an error pointer. |
424 | */ |
425 | struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) |
426 | { |
427 | struct crypto_tfm *tfm; |
428 | int err; |
429 | |
430 | for (;;) { |
431 | struct crypto_alg *alg; |
432 | |
433 | alg = crypto_alg_mod_lookup(alg_name, type, mask); |
434 | if (IS_ERR(alg)) { |
435 | err = PTR_ERR(alg); |
436 | goto err; |
437 | } |
438 | |
439 | tfm = __crypto_alloc_tfm(alg, type, mask); |
440 | if (!IS_ERR(tfm)) |
441 | return tfm; |
442 | |
443 | crypto_mod_put(alg); |
444 | err = PTR_ERR(tfm); |
445 | |
446 | err: |
447 | if (err != -EAGAIN) |
448 | break; |
449 | if (fatal_signal_pending(current)) { |
450 | err = -EINTR; |
451 | break; |
452 | } |
453 | } |
454 | |
455 | return ERR_PTR(err); |
456 | } |
457 | EXPORT_SYMBOL_GPL(crypto_alloc_base); |
458 | |
459 | void *crypto_create_tfm(struct crypto_alg *alg, |
460 | const struct crypto_type *frontend) |
461 | { |
462 | char *mem; |
463 | struct crypto_tfm *tfm = NULL; |
464 | unsigned int tfmsize; |
465 | unsigned int total; |
466 | int err = -ENOMEM; |
467 | |
468 | tfmsize = frontend->tfmsize; |
469 | total = tfmsize + sizeof(*tfm) + frontend->extsize(alg); |
470 | |
471 | mem = kzalloc(total, GFP_KERNEL); |
472 | if (mem == NULL) |
473 | goto out_err; |
474 | |
475 | tfm = (struct crypto_tfm *)(mem + tfmsize); |
476 | tfm->__crt_alg = alg; |
477 | |
478 | err = frontend->init_tfm(tfm); |
479 | if (err) |
480 | goto out_free_tfm; |
481 | |
482 | if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) |
483 | goto cra_init_failed; |
484 | |
485 | goto out; |
486 | |
487 | cra_init_failed: |
488 | crypto_exit_ops(tfm); |
489 | out_free_tfm: |
490 | if (err == -EAGAIN) |
491 | crypto_shoot_alg(alg); |
492 | kfree(mem); |
493 | out_err: |
494 | mem = ERR_PTR(err); |
495 | out: |
496 | return mem; |
497 | } |
498 | EXPORT_SYMBOL_GPL(crypto_create_tfm); |
499 | |
500 | struct crypto_alg *crypto_find_alg(const char *alg_name, |
501 | const struct crypto_type *frontend, |
502 | u32 type, u32 mask) |
503 | { |
504 | struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) = |
505 | crypto_alg_mod_lookup; |
506 | |
507 | if (frontend) { |
508 | type &= frontend->maskclear; |
509 | mask &= frontend->maskclear; |
510 | type |= frontend->type; |
511 | mask |= frontend->maskset; |
512 | |
513 | if (frontend->lookup) |
514 | lookup = frontend->lookup; |
515 | } |
516 | |
517 | return lookup(alg_name, type, mask); |
518 | } |
519 | EXPORT_SYMBOL_GPL(crypto_find_alg); |
520 | |
521 | /* |
522 | * crypto_alloc_tfm - Locate algorithm and allocate transform |
523 | * @alg_name: Name of algorithm |
524 | * @frontend: Frontend algorithm type |
525 | * @type: Type of algorithm |
526 | * @mask: Mask for type comparison |
527 | * |
528 | * crypto_alloc_tfm() will first attempt to locate an already loaded |
529 | * algorithm. If that fails and the kernel supports dynamically loadable |
530 | * modules, it will then attempt to load a module of the same name or |
531 | * alias. If that fails it will send a query to any loaded crypto manager |
532 | * to construct an algorithm on the fly. A refcount is grabbed on the |
533 | * algorithm which is then associated with the new transform. |
534 | * |
535 | * The returned transform is of a non-determinate type. Most people |
536 | * should use one of the more specific allocation functions such as |
537 | * crypto_alloc_blkcipher. |
538 | * |
539 | * In case of error the return value is an error pointer. |
540 | */ |
541 | void *crypto_alloc_tfm(const char *alg_name, |
542 | const struct crypto_type *frontend, u32 type, u32 mask) |
543 | { |
544 | void *tfm; |
545 | int err; |
546 | |
547 | for (;;) { |
548 | struct crypto_alg *alg; |
549 | |
550 | alg = crypto_find_alg(alg_name, frontend, type, mask); |
551 | if (IS_ERR(alg)) { |
552 | err = PTR_ERR(alg); |
553 | goto err; |
554 | } |
555 | |
556 | tfm = crypto_create_tfm(alg, frontend); |
557 | if (!IS_ERR(tfm)) |
558 | return tfm; |
559 | |
560 | crypto_mod_put(alg); |
561 | err = PTR_ERR(tfm); |
562 | |
563 | err: |
564 | if (err != -EAGAIN) |
565 | break; |
566 | if (fatal_signal_pending(current)) { |
567 | err = -EINTR; |
568 | break; |
569 | } |
570 | } |
571 | |
572 | return ERR_PTR(err); |
573 | } |
574 | EXPORT_SYMBOL_GPL(crypto_alloc_tfm); |
575 | |
576 | /* |
577 | * crypto_destroy_tfm - Free crypto transform |
578 | * @mem: Start of tfm slab |
579 | * @tfm: Transform to free |
580 | * |
581 | * This function frees up the transform and any associated resources, |
582 | * then drops the refcount on the associated algorithm. |
583 | */ |
584 | void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm) |
585 | { |
586 | struct crypto_alg *alg; |
587 | |
588 | if (unlikely(!mem)) |
589 | return; |
590 | |
591 | alg = tfm->__crt_alg; |
592 | |
593 | if (!tfm->exit && alg->cra_exit) |
594 | alg->cra_exit(tfm); |
595 | crypto_exit_ops(tfm); |
596 | crypto_mod_put(alg); |
597 | kzfree(mem); |
598 | } |
599 | EXPORT_SYMBOL_GPL(crypto_destroy_tfm); |
600 | |
601 | int crypto_has_alg(const char *name, u32 type, u32 mask) |
602 | { |
603 | int ret = 0; |
604 | struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask); |
605 | |
606 | if (!IS_ERR(alg)) { |
607 | crypto_mod_put(alg); |
608 | ret = 1; |
609 | } |
610 | |
611 | return ret; |
612 | } |
613 | EXPORT_SYMBOL_GPL(crypto_has_alg); |
614 | |
615 | void crypto_req_done(struct crypto_async_request *req, int err) |
616 | { |
617 | struct crypto_wait *wait = req->data; |
618 | |
619 | if (err == -EINPROGRESS) |
620 | return; |
621 | |
622 | wait->err = err; |
623 | complete(&wait->completion); |
624 | } |
625 | EXPORT_SYMBOL_GPL(crypto_req_done); |
626 | |
627 | MODULE_DESCRIPTION("Cryptographic core API"); |
628 | MODULE_LICENSE("GPL"); |
629 |