cryptd.h 2.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Software async crypto daemon
  4. *
  5. * Added AEAD support to cryptd.
  6. * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
  7. * Adrian Hoban <adrian.hoban@intel.com>
  8. * Gabriele Paoloni <gabriele.paoloni@intel.com>
  9. * Aidan O'Mahony (aidan.o.mahony@intel.com)
  10. * Copyright (c) 2010, Intel Corporation.
  11. */
  12. #ifndef _CRYPTO_CRYPT_H
  13. #define _CRYPTO_CRYPT_H
  14. #include <linux/kernel.h>
  15. #include <crypto/aead.h>
  16. #include <crypto/hash.h>
  17. #include <crypto/skcipher.h>
  18. struct cryptd_ablkcipher {
  19. struct crypto_ablkcipher base;
  20. };
  21. static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast(
  22. struct crypto_ablkcipher *tfm)
  23. {
  24. return (struct cryptd_ablkcipher *)tfm;
  25. }
  26. /* alg_name should be algorithm to be cryptd-ed */
  27. struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
  28. u32 type, u32 mask);
  29. struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm);
  30. bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm);
  31. void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm);
  32. struct cryptd_skcipher {
  33. struct crypto_skcipher base;
  34. };
  35. struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
  36. u32 type, u32 mask);
  37. struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm);
  38. /* Must be called without moving CPUs. */
  39. bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm);
  40. void cryptd_free_skcipher(struct cryptd_skcipher *tfm);
  41. struct cryptd_ahash {
  42. struct crypto_ahash base;
  43. };
  44. static inline struct cryptd_ahash *__cryptd_ahash_cast(
  45. struct crypto_ahash *tfm)
  46. {
  47. return (struct cryptd_ahash *)tfm;
  48. }
  49. /* alg_name should be algorithm to be cryptd-ed */
  50. struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
  51. u32 type, u32 mask);
  52. struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
  53. struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
  54. /* Must be called without moving CPUs. */
  55. bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
  56. void cryptd_free_ahash(struct cryptd_ahash *tfm);
  57. struct cryptd_aead {
  58. struct crypto_aead base;
  59. };
  60. static inline struct cryptd_aead *__cryptd_aead_cast(
  61. struct crypto_aead *tfm)
  62. {
  63. return (struct cryptd_aead *)tfm;
  64. }
  65. struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
  66. u32 type, u32 mask);
  67. struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
  68. /* Must be called without moving CPUs. */
  69. bool cryptd_aead_queued(struct cryptd_aead *tfm);
  70. void cryptd_free_aead(struct cryptd_aead *tfm);
  71. #endif