mirror of
https://github.com/Qortal/Brooklyn.git
synced 2025-02-12 10:15:54 +00:00
2a709f28fa
* 0day explit mitigation * Memory corruption prevention * Privilege escalation prevention * Buffer over flow prevention * File System corruption defense * Thread escape prevention This may very well be the most intensive inclusion to BrooklynR. This will not be part of an x86 suite nor it will be released as tool kit. The security core toolkit will remain part of kernel base.
76 lines
2.1 KiB
C
76 lines
2.1 KiB
C
/*
|
|
* Software async crypto daemon
|
|
*
|
|
* Added AEAD support to cryptd.
|
|
* Authors: Tadeusz Struk (tadeusz.struk@intel.com)
|
|
* Adrian Hoban <adrian.hoban@intel.com>
|
|
* Gabriele Paoloni <gabriele.paoloni@intel.com>
|
|
* Aidan O'Mahony (aidan.o.mahony@intel.com)
|
|
* Copyright (c) 2010, Intel Corporation.
|
|
*/
|
|
|
|
#ifndef _CRYPTO_CRYPT_H
|
|
#define _CRYPTO_CRYPT_H
|
|
|
|
#include <linux/crypto.h>
|
|
#include <linux/kernel.h>
|
|
#include <crypto/aead.h>
|
|
#include <crypto/hash.h>
|
|
|
|
struct cryptd_ablkcipher {
|
|
struct crypto_ablkcipher base;
|
|
};
|
|
|
|
static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast(
|
|
struct crypto_ablkcipher *tfm)
|
|
{
|
|
return (struct cryptd_ablkcipher *)tfm;
|
|
}
|
|
|
|
/* alg_name should be algorithm to be cryptd-ed */
|
|
struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
|
|
u32 type, u32 mask);
|
|
struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm);
|
|
bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm);
|
|
void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm);
|
|
|
|
struct cryptd_ahash {
|
|
struct crypto_ahash base;
|
|
};
|
|
|
|
static inline struct cryptd_ahash *__cryptd_ahash_cast(
|
|
struct crypto_ahash *tfm)
|
|
{
|
|
return (struct cryptd_ahash *)tfm;
|
|
}
|
|
|
|
/* alg_name should be algorithm to be cryptd-ed */
|
|
struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
|
|
u32 type, u32 mask);
|
|
struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
|
|
struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
|
|
/* Must be called without moving CPUs. */
|
|
bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
|
|
void cryptd_free_ahash(struct cryptd_ahash *tfm);
|
|
|
|
struct cryptd_aead {
|
|
struct crypto_aead base;
|
|
};
|
|
|
|
static inline struct cryptd_aead *__cryptd_aead_cast(
|
|
struct crypto_aead *tfm)
|
|
{
|
|
return (struct cryptd_aead *)tfm;
|
|
}
|
|
|
|
struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
|
|
u32 type, u32 mask);
|
|
|
|
struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
|
|
/* Must be called without moving CPUs. */
|
|
bool cryptd_aead_queued(struct cryptd_aead *tfm);
|
|
|
|
void cryptd_free_aead(struct cryptd_aead *tfm);
|
|
|
|
#endif
|