mirror of
https://github.com/torvalds/linux.git
synced 2024-11-12 07:01:57 +00:00
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (35 commits) hwrng: timeriomem - Fix potential oops (request_mem_region/__devinit) crypto: api - Use formatting of module name crypto: testmgr - Allow hash test vectors longer than a page crypto: testmgr - Check all test vector lengths crypto: hifn_795x - fix __dev{init,exit} markings crypto: tcrypt - Do not exit on success in fips mode crypto: compress - Return produced bytes in crypto_{,de}compress_{update,final} hwrng: via_rng - Support VIA Nano hardware RNG on X86_64 builds hwrng: via_rng - Support VIA Nano hardware RNG hwrng: via_rng - The VIA Hardware RNG driver is for the CPU, not Chipset crypto: testmgr - Skip algs not flagged fips_allowed in fips mode crypto: testmgr - Mark algs allowed in fips mode crypto: testmgr - Add ctr(aes) test vectors crypto: testmgr - Dynamically allocate xbuf and axbuf crypto: testmgr - Print self-test pass notices in fips mode crypto: testmgr - Catch base cipher self-test failures in fips mode crypto: testmgr - Add ansi_cprng test vectors crypto: testmgr - Add infrastructure for ansi_cprng self-tests crypto: testmgr - Add self-tests for rfc4309(ccm(aes)) crypto: testmgr - Handle AEAD test vectors expected to fail verification ...
This commit is contained in:
commit
32f44d62e4
@ -2,6 +2,8 @@
|
||||
# Arch-specific CryptoAPI modules.
|
||||
#
|
||||
|
||||
obj-$(CONFIG_CRYPTO_FPU) += fpu.o
|
||||
|
||||
obj-$(CONFIG_CRYPTO_AES_586) += aes-i586.o
|
||||
obj-$(CONFIG_CRYPTO_TWOFISH_586) += twofish-i586.o
|
||||
obj-$(CONFIG_CRYPTO_SALSA20_586) += salsa20-i586.o
|
||||
|
@ -21,6 +21,22 @@
|
||||
#include <asm/i387.h>
|
||||
#include <asm/aes.h>
|
||||
|
||||
#if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)
|
||||
#define HAS_CTR
|
||||
#endif
|
||||
|
||||
#if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)
|
||||
#define HAS_LRW
|
||||
#endif
|
||||
|
||||
#if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
|
||||
#define HAS_PCBC
|
||||
#endif
|
||||
|
||||
#if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)
|
||||
#define HAS_XTS
|
||||
#endif
|
||||
|
||||
struct async_aes_ctx {
|
||||
struct cryptd_ablkcipher *cryptd_tfm;
|
||||
};
|
||||
@ -137,6 +153,41 @@ static struct crypto_alg aesni_alg = {
|
||||
}
|
||||
};
|
||||
|
||||
static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
|
||||
{
|
||||
struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
|
||||
|
||||
aesni_enc(ctx, dst, src);
|
||||
}
|
||||
|
||||
static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
|
||||
{
|
||||
struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
|
||||
|
||||
aesni_dec(ctx, dst, src);
|
||||
}
|
||||
|
||||
static struct crypto_alg __aesni_alg = {
|
||||
.cra_name = "__aes-aesni",
|
||||
.cra_driver_name = "__driver-aes-aesni",
|
||||
.cra_priority = 0,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
|
||||
.cra_alignmask = 0,
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(__aesni_alg.cra_list),
|
||||
.cra_u = {
|
||||
.cipher = {
|
||||
.cia_min_keysize = AES_MIN_KEY_SIZE,
|
||||
.cia_max_keysize = AES_MAX_KEY_SIZE,
|
||||
.cia_setkey = aes_set_key,
|
||||
.cia_encrypt = __aes_encrypt,
|
||||
.cia_decrypt = __aes_decrypt
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
static int ecb_encrypt(struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst, struct scatterlist *src,
|
||||
unsigned int nbytes)
|
||||
@ -277,8 +328,16 @@ static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
|
||||
unsigned int key_len)
|
||||
{
|
||||
struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
|
||||
struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base;
|
||||
int err;
|
||||
|
||||
return crypto_ablkcipher_setkey(&ctx->cryptd_tfm->base, key, key_len);
|
||||
crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
|
||||
crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm)
|
||||
& CRYPTO_TFM_REQ_MASK);
|
||||
err = crypto_ablkcipher_setkey(child, key, key_len);
|
||||
crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child)
|
||||
& CRYPTO_TFM_RES_MASK);
|
||||
return err;
|
||||
}
|
||||
|
||||
static int ablk_encrypt(struct ablkcipher_request *req)
|
||||
@ -411,6 +470,163 @@ static struct crypto_alg ablk_cbc_alg = {
|
||||
},
|
||||
};
|
||||
|
||||
#ifdef HAS_CTR
|
||||
static int ablk_ctr_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct cryptd_ablkcipher *cryptd_tfm;
|
||||
|
||||
cryptd_tfm = cryptd_alloc_ablkcipher("fpu(ctr(__driver-aes-aesni))",
|
||||
0, 0);
|
||||
if (IS_ERR(cryptd_tfm))
|
||||
return PTR_ERR(cryptd_tfm);
|
||||
ablk_init_common(tfm, cryptd_tfm);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct crypto_alg ablk_ctr_alg = {
|
||||
.cra_name = "ctr(aes)",
|
||||
.cra_driver_name = "ctr-aes-aesni",
|
||||
.cra_priority = 400,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
|
||||
.cra_blocksize = 1,
|
||||
.cra_ctxsize = sizeof(struct async_aes_ctx),
|
||||
.cra_alignmask = 0,
|
||||
.cra_type = &crypto_ablkcipher_type,
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(ablk_ctr_alg.cra_list),
|
||||
.cra_init = ablk_ctr_init,
|
||||
.cra_exit = ablk_exit,
|
||||
.cra_u = {
|
||||
.ablkcipher = {
|
||||
.min_keysize = AES_MIN_KEY_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.ivsize = AES_BLOCK_SIZE,
|
||||
.setkey = ablk_set_key,
|
||||
.encrypt = ablk_encrypt,
|
||||
.decrypt = ablk_decrypt,
|
||||
.geniv = "chainiv",
|
||||
},
|
||||
},
|
||||
};
|
||||
#endif
|
||||
|
||||
#ifdef HAS_LRW
|
||||
static int ablk_lrw_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct cryptd_ablkcipher *cryptd_tfm;
|
||||
|
||||
cryptd_tfm = cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
|
||||
0, 0);
|
||||
if (IS_ERR(cryptd_tfm))
|
||||
return PTR_ERR(cryptd_tfm);
|
||||
ablk_init_common(tfm, cryptd_tfm);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct crypto_alg ablk_lrw_alg = {
|
||||
.cra_name = "lrw(aes)",
|
||||
.cra_driver_name = "lrw-aes-aesni",
|
||||
.cra_priority = 400,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct async_aes_ctx),
|
||||
.cra_alignmask = 0,
|
||||
.cra_type = &crypto_ablkcipher_type,
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(ablk_lrw_alg.cra_list),
|
||||
.cra_init = ablk_lrw_init,
|
||||
.cra_exit = ablk_exit,
|
||||
.cra_u = {
|
||||
.ablkcipher = {
|
||||
.min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
|
||||
.ivsize = AES_BLOCK_SIZE,
|
||||
.setkey = ablk_set_key,
|
||||
.encrypt = ablk_encrypt,
|
||||
.decrypt = ablk_decrypt,
|
||||
},
|
||||
},
|
||||
};
|
||||
#endif
|
||||
|
||||
#ifdef HAS_PCBC
|
||||
static int ablk_pcbc_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct cryptd_ablkcipher *cryptd_tfm;
|
||||
|
||||
cryptd_tfm = cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
|
||||
0, 0);
|
||||
if (IS_ERR(cryptd_tfm))
|
||||
return PTR_ERR(cryptd_tfm);
|
||||
ablk_init_common(tfm, cryptd_tfm);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct crypto_alg ablk_pcbc_alg = {
|
||||
.cra_name = "pcbc(aes)",
|
||||
.cra_driver_name = "pcbc-aes-aesni",
|
||||
.cra_priority = 400,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct async_aes_ctx),
|
||||
.cra_alignmask = 0,
|
||||
.cra_type = &crypto_ablkcipher_type,
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(ablk_pcbc_alg.cra_list),
|
||||
.cra_init = ablk_pcbc_init,
|
||||
.cra_exit = ablk_exit,
|
||||
.cra_u = {
|
||||
.ablkcipher = {
|
||||
.min_keysize = AES_MIN_KEY_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.ivsize = AES_BLOCK_SIZE,
|
||||
.setkey = ablk_set_key,
|
||||
.encrypt = ablk_encrypt,
|
||||
.decrypt = ablk_decrypt,
|
||||
},
|
||||
},
|
||||
};
|
||||
#endif
|
||||
|
||||
#ifdef HAS_XTS
|
||||
static int ablk_xts_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct cryptd_ablkcipher *cryptd_tfm;
|
||||
|
||||
cryptd_tfm = cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
|
||||
0, 0);
|
||||
if (IS_ERR(cryptd_tfm))
|
||||
return PTR_ERR(cryptd_tfm);
|
||||
ablk_init_common(tfm, cryptd_tfm);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct crypto_alg ablk_xts_alg = {
|
||||
.cra_name = "xts(aes)",
|
||||
.cra_driver_name = "xts-aes-aesni",
|
||||
.cra_priority = 400,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct async_aes_ctx),
|
||||
.cra_alignmask = 0,
|
||||
.cra_type = &crypto_ablkcipher_type,
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_list = LIST_HEAD_INIT(ablk_xts_alg.cra_list),
|
||||
.cra_init = ablk_xts_init,
|
||||
.cra_exit = ablk_exit,
|
||||
.cra_u = {
|
||||
.ablkcipher = {
|
||||
.min_keysize = 2 * AES_MIN_KEY_SIZE,
|
||||
.max_keysize = 2 * AES_MAX_KEY_SIZE,
|
||||
.ivsize = AES_BLOCK_SIZE,
|
||||
.setkey = ablk_set_key,
|
||||
.encrypt = ablk_encrypt,
|
||||
.decrypt = ablk_decrypt,
|
||||
},
|
||||
},
|
||||
};
|
||||
#endif
|
||||
|
||||
static int __init aesni_init(void)
|
||||
{
|
||||
int err;
|
||||
@ -421,6 +637,8 @@ static int __init aesni_init(void)
|
||||
}
|
||||
if ((err = crypto_register_alg(&aesni_alg)))
|
||||
goto aes_err;
|
||||
if ((err = crypto_register_alg(&__aesni_alg)))
|
||||
goto __aes_err;
|
||||
if ((err = crypto_register_alg(&blk_ecb_alg)))
|
||||
goto blk_ecb_err;
|
||||
if ((err = crypto_register_alg(&blk_cbc_alg)))
|
||||
@ -429,9 +647,41 @@ static int __init aesni_init(void)
|
||||
goto ablk_ecb_err;
|
||||
if ((err = crypto_register_alg(&ablk_cbc_alg)))
|
||||
goto ablk_cbc_err;
|
||||
#ifdef HAS_CTR
|
||||
if ((err = crypto_register_alg(&ablk_ctr_alg)))
|
||||
goto ablk_ctr_err;
|
||||
#endif
|
||||
#ifdef HAS_LRW
|
||||
if ((err = crypto_register_alg(&ablk_lrw_alg)))
|
||||
goto ablk_lrw_err;
|
||||
#endif
|
||||
#ifdef HAS_PCBC
|
||||
if ((err = crypto_register_alg(&ablk_pcbc_alg)))
|
||||
goto ablk_pcbc_err;
|
||||
#endif
|
||||
#ifdef HAS_XTS
|
||||
if ((err = crypto_register_alg(&ablk_xts_alg)))
|
||||
goto ablk_xts_err;
|
||||
#endif
|
||||
|
||||
return err;
|
||||
|
||||
#ifdef HAS_XTS
|
||||
ablk_xts_err:
|
||||
#endif
|
||||
#ifdef HAS_PCBC
|
||||
crypto_unregister_alg(&ablk_pcbc_alg);
|
||||
ablk_pcbc_err:
|
||||
#endif
|
||||
#ifdef HAS_LRW
|
||||
crypto_unregister_alg(&ablk_lrw_alg);
|
||||
ablk_lrw_err:
|
||||
#endif
|
||||
#ifdef HAS_CTR
|
||||
crypto_unregister_alg(&ablk_ctr_alg);
|
||||
ablk_ctr_err:
|
||||
#endif
|
||||
crypto_unregister_alg(&ablk_cbc_alg);
|
||||
ablk_cbc_err:
|
||||
crypto_unregister_alg(&ablk_ecb_alg);
|
||||
ablk_ecb_err:
|
||||
@ -439,6 +689,8 @@ ablk_ecb_err:
|
||||
blk_cbc_err:
|
||||
crypto_unregister_alg(&blk_ecb_alg);
|
||||
blk_ecb_err:
|
||||
crypto_unregister_alg(&__aesni_alg);
|
||||
__aes_err:
|
||||
crypto_unregister_alg(&aesni_alg);
|
||||
aes_err:
|
||||
return err;
|
||||
@ -446,10 +698,23 @@ aes_err:
|
||||
|
||||
static void __exit aesni_exit(void)
|
||||
{
|
||||
#ifdef HAS_XTS
|
||||
crypto_unregister_alg(&ablk_xts_alg);
|
||||
#endif
|
||||
#ifdef HAS_PCBC
|
||||
crypto_unregister_alg(&ablk_pcbc_alg);
|
||||
#endif
|
||||
#ifdef HAS_LRW
|
||||
crypto_unregister_alg(&ablk_lrw_alg);
|
||||
#endif
|
||||
#ifdef HAS_CTR
|
||||
crypto_unregister_alg(&ablk_ctr_alg);
|
||||
#endif
|
||||
crypto_unregister_alg(&ablk_cbc_alg);
|
||||
crypto_unregister_alg(&ablk_ecb_alg);
|
||||
crypto_unregister_alg(&blk_cbc_alg);
|
||||
crypto_unregister_alg(&blk_ecb_alg);
|
||||
crypto_unregister_alg(&__aesni_alg);
|
||||
crypto_unregister_alg(&aesni_alg);
|
||||
}
|
||||
|
||||
|
166
arch/x86/crypto/fpu.c
Normal file
166
arch/x86/crypto/fpu.c
Normal file
@ -0,0 +1,166 @@
|
||||
/*
|
||||
* FPU: Wrapper for blkcipher touching fpu
|
||||
*
|
||||
* Copyright (c) Intel Corp.
|
||||
* Author: Huang Ying <ying.huang@intel.com>
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License as published by the Free
|
||||
* Software Foundation; either version 2 of the License, or (at your option)
|
||||
* any later version.
|
||||
*
|
||||
*/
|
||||
|
||||
#include <crypto/algapi.h>
|
||||
#include <linux/err.h>
|
||||
#include <linux/init.h>
|
||||
#include <linux/kernel.h>
|
||||
#include <linux/module.h>
|
||||
#include <asm/i387.h>
|
||||
|
||||
struct crypto_fpu_ctx {
|
||||
struct crypto_blkcipher *child;
|
||||
};
|
||||
|
||||
static int crypto_fpu_setkey(struct crypto_tfm *parent, const u8 *key,
|
||||
unsigned int keylen)
|
||||
{
|
||||
struct crypto_fpu_ctx *ctx = crypto_tfm_ctx(parent);
|
||||
struct crypto_blkcipher *child = ctx->child;
|
||||
int err;
|
||||
|
||||
crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
|
||||
crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) &
|
||||
CRYPTO_TFM_REQ_MASK);
|
||||
err = crypto_blkcipher_setkey(child, key, keylen);
|
||||
crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) &
|
||||
CRYPTO_TFM_RES_MASK);
|
||||
return err;
|
||||
}
|
||||
|
||||
static int crypto_fpu_encrypt(struct blkcipher_desc *desc_in,
|
||||
struct scatterlist *dst, struct scatterlist *src,
|
||||
unsigned int nbytes)
|
||||
{
|
||||
int err;
|
||||
struct crypto_fpu_ctx *ctx = crypto_blkcipher_ctx(desc_in->tfm);
|
||||
struct crypto_blkcipher *child = ctx->child;
|
||||
struct blkcipher_desc desc = {
|
||||
.tfm = child,
|
||||
.info = desc_in->info,
|
||||
.flags = desc_in->flags,
|
||||
};
|
||||
|
||||
kernel_fpu_begin();
|
||||
err = crypto_blkcipher_crt(desc.tfm)->encrypt(&desc, dst, src, nbytes);
|
||||
kernel_fpu_end();
|
||||
return err;
|
||||
}
|
||||
|
||||
static int crypto_fpu_decrypt(struct blkcipher_desc *desc_in,
|
||||
struct scatterlist *dst, struct scatterlist *src,
|
||||
unsigned int nbytes)
|
||||
{
|
||||
int err;
|
||||
struct crypto_fpu_ctx *ctx = crypto_blkcipher_ctx(desc_in->tfm);
|
||||
struct crypto_blkcipher *child = ctx->child;
|
||||
struct blkcipher_desc desc = {
|
||||
.tfm = child,
|
||||
.info = desc_in->info,
|
||||
.flags = desc_in->flags,
|
||||
};
|
||||
|
||||
kernel_fpu_begin();
|
||||
err = crypto_blkcipher_crt(desc.tfm)->decrypt(&desc, dst, src, nbytes);
|
||||
kernel_fpu_end();
|
||||
return err;
|
||||
}
|
||||
|
||||
static int crypto_fpu_init_tfm(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
|
||||
struct crypto_spawn *spawn = crypto_instance_ctx(inst);
|
||||
struct crypto_fpu_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
struct crypto_blkcipher *cipher;
|
||||
|
||||
cipher = crypto_spawn_blkcipher(spawn);
|
||||
if (IS_ERR(cipher))
|
||||
return PTR_ERR(cipher);
|
||||
|
||||
ctx->child = cipher;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void crypto_fpu_exit_tfm(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct crypto_fpu_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
crypto_free_blkcipher(ctx->child);
|
||||
}
|
||||
|
||||
static struct crypto_instance *crypto_fpu_alloc(struct rtattr **tb)
|
||||
{
|
||||
struct crypto_instance *inst;
|
||||
struct crypto_alg *alg;
|
||||
int err;
|
||||
|
||||
err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
|
||||
if (err)
|
||||
return ERR_PTR(err);
|
||||
|
||||
alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER,
|
||||
CRYPTO_ALG_TYPE_MASK);
|
||||
if (IS_ERR(alg))
|
||||
return ERR_CAST(alg);
|
||||
|
||||
inst = crypto_alloc_instance("fpu", alg);
|
||||
if (IS_ERR(inst))
|
||||
goto out_put_alg;
|
||||
|
||||
inst->alg.cra_flags = alg->cra_flags;
|
||||
inst->alg.cra_priority = alg->cra_priority;
|
||||
inst->alg.cra_blocksize = alg->cra_blocksize;
|
||||
inst->alg.cra_alignmask = alg->cra_alignmask;
|
||||
inst->alg.cra_type = alg->cra_type;
|
||||
inst->alg.cra_blkcipher.ivsize = alg->cra_blkcipher.ivsize;
|
||||
inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
|
||||
inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
|
||||
inst->alg.cra_ctxsize = sizeof(struct crypto_fpu_ctx);
|
||||
inst->alg.cra_init = crypto_fpu_init_tfm;
|
||||
inst->alg.cra_exit = crypto_fpu_exit_tfm;
|
||||
inst->alg.cra_blkcipher.setkey = crypto_fpu_setkey;
|
||||
inst->alg.cra_blkcipher.encrypt = crypto_fpu_encrypt;
|
||||
inst->alg.cra_blkcipher.decrypt = crypto_fpu_decrypt;
|
||||
|
||||
out_put_alg:
|
||||
crypto_mod_put(alg);
|
||||
return inst;
|
||||
}
|
||||
|
||||
static void crypto_fpu_free(struct crypto_instance *inst)
|
||||
{
|
||||
crypto_drop_spawn(crypto_instance_ctx(inst));
|
||||
kfree(inst);
|
||||
}
|
||||
|
||||
static struct crypto_template crypto_fpu_tmpl = {
|
||||
.name = "fpu",
|
||||
.alloc = crypto_fpu_alloc,
|
||||
.free = crypto_fpu_free,
|
||||
.module = THIS_MODULE,
|
||||
};
|
||||
|
||||
static int __init crypto_fpu_module_init(void)
|
||||
{
|
||||
return crypto_register_template(&crypto_fpu_tmpl);
|
||||
}
|
||||
|
||||
static void __exit crypto_fpu_module_exit(void)
|
||||
{
|
||||
crypto_unregister_template(&crypto_fpu_tmpl);
|
||||
}
|
||||
|
||||
module_init(crypto_fpu_module_init);
|
||||
module_exit(crypto_fpu_module_exit);
|
||||
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_DESCRIPTION("FPU block cipher wrapper");
|
@ -241,6 +241,11 @@ config CRYPTO_XTS
|
||||
key size 256, 384 or 512 bits. This implementation currently
|
||||
can't handle a sectorsize which is not a multiple of 16 bytes.
|
||||
|
||||
config CRYPTO_FPU
|
||||
tristate
|
||||
select CRYPTO_BLKCIPHER
|
||||
select CRYPTO_MANAGER
|
||||
|
||||
comment "Hash modes"
|
||||
|
||||
config CRYPTO_HMAC
|
||||
@ -486,6 +491,7 @@ config CRYPTO_AES_NI_INTEL
|
||||
select CRYPTO_AES_X86_64
|
||||
select CRYPTO_CRYPTD
|
||||
select CRYPTO_ALGAPI
|
||||
select CRYPTO_FPU
|
||||
help
|
||||
Use Intel AES-NI instructions for AES algorithm.
|
||||
|
||||
@ -505,6 +511,10 @@ config CRYPTO_AES_NI_INTEL
|
||||
|
||||
See <http://csrc.nist.gov/encryption/aes/> for more information.
|
||||
|
||||
In addition to AES cipher algorithm support, the
|
||||
acceleration for some popular block cipher mode is supported
|
||||
too, including ECB, CBC, CTR, LRW, PCBC, XTS.
|
||||
|
||||
config CRYPTO_ANUBIS
|
||||
tristate "Anubis cipher algorithm"
|
||||
select CRYPTO_ALGAPI
|
||||
|
@ -280,29 +280,13 @@ static struct notifier_block cryptomgr_notifier = {
|
||||
|
||||
static int __init cryptomgr_init(void)
|
||||
{
|
||||
int err;
|
||||
|
||||
err = testmgr_init();
|
||||
if (err)
|
||||
return err;
|
||||
|
||||
err = crypto_register_notifier(&cryptomgr_notifier);
|
||||
if (err)
|
||||
goto free_testmgr;
|
||||
|
||||
return 0;
|
||||
|
||||
free_testmgr:
|
||||
testmgr_exit();
|
||||
return err;
|
||||
return crypto_register_notifier(&cryptomgr_notifier);
|
||||
}
|
||||
|
||||
static void __exit cryptomgr_exit(void)
|
||||
{
|
||||
int err = crypto_unregister_notifier(&cryptomgr_notifier);
|
||||
BUG_ON(err);
|
||||
|
||||
testmgr_exit();
|
||||
}
|
||||
|
||||
subsys_initcall(cryptomgr_init);
|
||||
|
14
crypto/api.c
14
crypto/api.c
@ -217,14 +217,11 @@ struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
|
||||
|
||||
alg = crypto_alg_lookup(name, type, mask);
|
||||
if (!alg) {
|
||||
char tmp[CRYPTO_MAX_ALG_NAME];
|
||||
|
||||
request_module(name);
|
||||
request_module("%s", name);
|
||||
|
||||
if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
|
||||
CRYPTO_ALG_NEED_FALLBACK) &&
|
||||
snprintf(tmp, sizeof(tmp), "%s-all", name) < sizeof(tmp))
|
||||
request_module(tmp);
|
||||
CRYPTO_ALG_NEED_FALLBACK))
|
||||
request_module("%s-all", name);
|
||||
|
||||
alg = crypto_alg_lookup(name, type, mask);
|
||||
}
|
||||
@ -580,20 +577,17 @@ EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
|
||||
void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
|
||||
{
|
||||
struct crypto_alg *alg;
|
||||
int size;
|
||||
|
||||
if (unlikely(!mem))
|
||||
return;
|
||||
|
||||
alg = tfm->__crt_alg;
|
||||
size = ksize(mem);
|
||||
|
||||
if (!tfm->exit && alg->cra_exit)
|
||||
alg->cra_exit(tfm);
|
||||
crypto_exit_ops(tfm);
|
||||
crypto_mod_put(alg);
|
||||
memset(mem, 0, size);
|
||||
kfree(mem);
|
||||
kzfree(mem);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
|
||||
|
||||
|
@ -586,20 +586,24 @@ struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
|
||||
u32 type, u32 mask)
|
||||
{
|
||||
char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
|
||||
struct crypto_ablkcipher *tfm;
|
||||
struct crypto_tfm *tfm;
|
||||
|
||||
if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
|
||||
"cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
|
||||
return ERR_PTR(-EINVAL);
|
||||
tfm = crypto_alloc_ablkcipher(cryptd_alg_name, type, mask);
|
||||
type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
|
||||
type |= CRYPTO_ALG_TYPE_BLKCIPHER;
|
||||
mask &= ~CRYPTO_ALG_TYPE_MASK;
|
||||
mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
|
||||
tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
|
||||
if (IS_ERR(tfm))
|
||||
return ERR_CAST(tfm);
|
||||
if (crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_module != THIS_MODULE) {
|
||||
crypto_free_ablkcipher(tfm);
|
||||
if (tfm->__crt_alg->cra_module != THIS_MODULE) {
|
||||
crypto_free_tfm(tfm);
|
||||
return ERR_PTR(-EINVAL);
|
||||
}
|
||||
|
||||
return __cryptd_ablkcipher_cast(tfm);
|
||||
return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
|
||||
|
||||
|
@ -121,9 +121,6 @@ int crypto_register_notifier(struct notifier_block *nb);
|
||||
int crypto_unregister_notifier(struct notifier_block *nb);
|
||||
int crypto_probing_notify(unsigned long val, void *v);
|
||||
|
||||
int __init testmgr_init(void);
|
||||
void testmgr_exit(void);
|
||||
|
||||
static inline void crypto_alg_put(struct crypto_alg *alg)
|
||||
{
|
||||
if (atomic_dec_and_test(&alg->cra_refcnt) && alg->cra_destroy)
|
||||
|
@ -26,6 +26,7 @@
|
||||
#include <linux/string.h>
|
||||
|
||||
#include <crypto/compress.h>
|
||||
#include <crypto/internal/compress.h>
|
||||
|
||||
#include "internal.h"
|
||||
|
||||
|
183
crypto/tcrypt.c
183
crypto/tcrypt.c
@ -27,6 +27,7 @@
|
||||
#include <linux/timex.h>
|
||||
#include <linux/interrupt.h>
|
||||
#include "tcrypt.h"
|
||||
#include "internal.h"
|
||||
|
||||
/*
|
||||
* Need slab memory for testing (size in number of pages).
|
||||
@ -396,16 +397,16 @@ static void test_hash_speed(const char *algo, unsigned int sec,
|
||||
struct scatterlist sg[TVMEMSIZE];
|
||||
struct crypto_hash *tfm;
|
||||
struct hash_desc desc;
|
||||
char output[1024];
|
||||
static char output[1024];
|
||||
int i;
|
||||
int ret;
|
||||
|
||||
printk("\ntesting speed of %s\n", algo);
|
||||
printk(KERN_INFO "\ntesting speed of %s\n", algo);
|
||||
|
||||
tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
|
||||
|
||||
if (IS_ERR(tfm)) {
|
||||
printk("failed to load transform for %s: %ld\n", algo,
|
||||
printk(KERN_ERR "failed to load transform for %s: %ld\n", algo,
|
||||
PTR_ERR(tfm));
|
||||
return;
|
||||
}
|
||||
@ -414,7 +415,7 @@ static void test_hash_speed(const char *algo, unsigned int sec,
|
||||
desc.flags = 0;
|
||||
|
||||
if (crypto_hash_digestsize(tfm) > sizeof(output)) {
|
||||
printk("digestsize(%u) > outputbuffer(%zu)\n",
|
||||
printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n",
|
||||
crypto_hash_digestsize(tfm), sizeof(output));
|
||||
goto out;
|
||||
}
|
||||
@ -427,12 +428,14 @@ static void test_hash_speed(const char *algo, unsigned int sec,
|
||||
|
||||
for (i = 0; speed[i].blen != 0; i++) {
|
||||
if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
|
||||
printk("template (%u) too big for tvmem (%lu)\n",
|
||||
printk(KERN_ERR
|
||||
"template (%u) too big for tvmem (%lu)\n",
|
||||
speed[i].blen, TVMEMSIZE * PAGE_SIZE);
|
||||
goto out;
|
||||
}
|
||||
|
||||
printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ",
|
||||
printk(KERN_INFO "test%3u "
|
||||
"(%5u byte blocks,%5u bytes per update,%4u updates): ",
|
||||
i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
|
||||
|
||||
if (sec)
|
||||
@ -443,7 +446,7 @@ static void test_hash_speed(const char *algo, unsigned int sec,
|
||||
speed[i].plen, output);
|
||||
|
||||
if (ret) {
|
||||
printk("hashing failed ret=%d\n", ret);
|
||||
printk(KERN_ERR "hashing failed ret=%d\n", ret);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -466,239 +469,255 @@ static void test_available(void)
|
||||
|
||||
static inline int tcrypt_test(const char *alg)
|
||||
{
|
||||
return alg_test(alg, alg, 0, 0);
|
||||
int ret;
|
||||
|
||||
ret = alg_test(alg, alg, 0, 0);
|
||||
/* non-fips algs return -EINVAL in fips mode */
|
||||
if (fips_enabled && ret == -EINVAL)
|
||||
ret = 0;
|
||||
return ret;
|
||||
}
|
||||
|
||||
static void do_test(int m)
|
||||
static int do_test(int m)
|
||||
{
|
||||
int i;
|
||||
int ret = 0;
|
||||
|
||||
switch (m) {
|
||||
case 0:
|
||||
for (i = 1; i < 200; i++)
|
||||
do_test(i);
|
||||
ret += do_test(i);
|
||||
break;
|
||||
|
||||
case 1:
|
||||
tcrypt_test("md5");
|
||||
ret += tcrypt_test("md5");
|
||||
break;
|
||||
|
||||
case 2:
|
||||
tcrypt_test("sha1");
|
||||
ret += tcrypt_test("sha1");
|
||||
break;
|
||||
|
||||
case 3:
|
||||
tcrypt_test("ecb(des)");
|
||||
tcrypt_test("cbc(des)");
|
||||
ret += tcrypt_test("ecb(des)");
|
||||
ret += tcrypt_test("cbc(des)");
|
||||
break;
|
||||
|
||||
case 4:
|
||||
tcrypt_test("ecb(des3_ede)");
|
||||
tcrypt_test("cbc(des3_ede)");
|
||||
ret += tcrypt_test("ecb(des3_ede)");
|
||||
ret += tcrypt_test("cbc(des3_ede)");
|
||||
break;
|
||||
|
||||
case 5:
|
||||
tcrypt_test("md4");
|
||||
ret += tcrypt_test("md4");
|
||||
break;
|
||||
|
||||
case 6:
|
||||
tcrypt_test("sha256");
|
||||
ret += tcrypt_test("sha256");
|
||||
break;
|
||||
|
||||
case 7:
|
||||
tcrypt_test("ecb(blowfish)");
|
||||
tcrypt_test("cbc(blowfish)");
|
||||
ret += tcrypt_test("ecb(blowfish)");
|
||||
ret += tcrypt_test("cbc(blowfish)");
|
||||
break;
|
||||
|
||||
case 8:
|
||||
tcrypt_test("ecb(twofish)");
|
||||
tcrypt_test("cbc(twofish)");
|
||||
ret += tcrypt_test("ecb(twofish)");
|
||||
ret += tcrypt_test("cbc(twofish)");
|
||||
break;
|
||||
|
||||
case 9:
|
||||
tcrypt_test("ecb(serpent)");
|
||||
ret += tcrypt_test("ecb(serpent)");
|
||||
break;
|
||||
|
||||
case 10:
|
||||
tcrypt_test("ecb(aes)");
|
||||
tcrypt_test("cbc(aes)");
|
||||
tcrypt_test("lrw(aes)");
|
||||
tcrypt_test("xts(aes)");
|
||||
tcrypt_test("rfc3686(ctr(aes))");
|
||||
ret += tcrypt_test("ecb(aes)");
|
||||
ret += tcrypt_test("cbc(aes)");
|
||||
ret += tcrypt_test("lrw(aes)");
|
||||
ret += tcrypt_test("xts(aes)");
|
||||
ret += tcrypt_test("ctr(aes)");
|
||||
ret += tcrypt_test("rfc3686(ctr(aes))");
|
||||
break;
|
||||
|
||||
case 11:
|
||||
tcrypt_test("sha384");
|
||||
ret += tcrypt_test("sha384");
|
||||
break;
|
||||
|
||||
case 12:
|
||||
tcrypt_test("sha512");
|
||||
ret += tcrypt_test("sha512");
|
||||
break;
|
||||
|
||||
case 13:
|
||||
tcrypt_test("deflate");
|
||||
ret += tcrypt_test("deflate");
|
||||
break;
|
||||
|
||||
case 14:
|
||||
tcrypt_test("ecb(cast5)");
|
||||
ret += tcrypt_test("ecb(cast5)");
|
||||
break;
|
||||
|
||||
case 15:
|
||||
tcrypt_test("ecb(cast6)");
|
||||
ret += tcrypt_test("ecb(cast6)");
|
||||
break;
|
||||
|
||||
case 16:
|
||||
tcrypt_test("ecb(arc4)");
|
||||
ret += tcrypt_test("ecb(arc4)");
|
||||
break;
|
||||
|
||||
case 17:
|
||||
tcrypt_test("michael_mic");
|
||||
ret += tcrypt_test("michael_mic");
|
||||
break;
|
||||
|
||||
case 18:
|
||||
tcrypt_test("crc32c");
|
||||
ret += tcrypt_test("crc32c");
|
||||
break;
|
||||
|
||||
case 19:
|
||||
tcrypt_test("ecb(tea)");
|
||||
ret += tcrypt_test("ecb(tea)");
|
||||
break;
|
||||
|
||||
case 20:
|
||||
tcrypt_test("ecb(xtea)");
|
||||
ret += tcrypt_test("ecb(xtea)");
|
||||
break;
|
||||
|
||||
case 21:
|
||||
tcrypt_test("ecb(khazad)");
|
||||
ret += tcrypt_test("ecb(khazad)");
|
||||
break;
|
||||
|
||||
case 22:
|
||||
tcrypt_test("wp512");
|
||||
ret += tcrypt_test("wp512");
|
||||
break;
|
||||
|
||||
case 23:
|
||||
tcrypt_test("wp384");
|
||||
ret += tcrypt_test("wp384");
|
||||
break;
|
||||
|
||||
case 24:
|
||||
tcrypt_test("wp256");
|
||||
ret += tcrypt_test("wp256");
|
||||
break;
|
||||
|
||||
case 25:
|
||||
tcrypt_test("ecb(tnepres)");
|
||||
ret += tcrypt_test("ecb(tnepres)");
|
||||
break;
|
||||
|
||||
case 26:
|
||||
tcrypt_test("ecb(anubis)");
|
||||
tcrypt_test("cbc(anubis)");
|
||||
ret += tcrypt_test("ecb(anubis)");
|
||||
ret += tcrypt_test("cbc(anubis)");
|
||||
break;
|
||||
|
||||
case 27:
|
||||
tcrypt_test("tgr192");
|
||||
ret += tcrypt_test("tgr192");
|
||||
break;
|
||||
|
||||
case 28:
|
||||
|
||||
tcrypt_test("tgr160");
|
||||
ret += tcrypt_test("tgr160");
|
||||
break;
|
||||
|
||||
case 29:
|
||||
tcrypt_test("tgr128");
|
||||
ret += tcrypt_test("tgr128");
|
||||
break;
|
||||
|
||||
case 30:
|
||||
tcrypt_test("ecb(xeta)");
|
||||
ret += tcrypt_test("ecb(xeta)");
|
||||
break;
|
||||
|
||||
case 31:
|
||||
tcrypt_test("pcbc(fcrypt)");
|
||||
ret += tcrypt_test("pcbc(fcrypt)");
|
||||
break;
|
||||
|
||||
case 32:
|
||||
tcrypt_test("ecb(camellia)");
|
||||
tcrypt_test("cbc(camellia)");
|
||||
ret += tcrypt_test("ecb(camellia)");
|
||||
ret += tcrypt_test("cbc(camellia)");
|
||||
break;
|
||||
case 33:
|
||||
tcrypt_test("sha224");
|
||||
ret += tcrypt_test("sha224");
|
||||
break;
|
||||
|
||||
case 34:
|
||||
tcrypt_test("salsa20");
|
||||
ret += tcrypt_test("salsa20");
|
||||
break;
|
||||
|
||||
case 35:
|
||||
tcrypt_test("gcm(aes)");
|
||||
ret += tcrypt_test("gcm(aes)");
|
||||
break;
|
||||
|
||||
case 36:
|
||||
tcrypt_test("lzo");
|
||||
ret += tcrypt_test("lzo");
|
||||
break;
|
||||
|
||||
case 37:
|
||||
tcrypt_test("ccm(aes)");
|
||||
ret += tcrypt_test("ccm(aes)");
|
||||
break;
|
||||
|
||||
case 38:
|
||||
tcrypt_test("cts(cbc(aes))");
|
||||
ret += tcrypt_test("cts(cbc(aes))");
|
||||
break;
|
||||
|
||||
case 39:
|
||||
tcrypt_test("rmd128");
|
||||
ret += tcrypt_test("rmd128");
|
||||
break;
|
||||
|
||||
case 40:
|
||||
tcrypt_test("rmd160");
|
||||
ret += tcrypt_test("rmd160");
|
||||
break;
|
||||
|
||||
case 41:
|
||||
tcrypt_test("rmd256");
|
||||
ret += tcrypt_test("rmd256");
|
||||
break;
|
||||
|
||||
case 42:
|
||||
tcrypt_test("rmd320");
|
||||
ret += tcrypt_test("rmd320");
|
||||
break;
|
||||
|
||||
case 43:
|
||||
tcrypt_test("ecb(seed)");
|
||||
ret += tcrypt_test("ecb(seed)");
|
||||
break;
|
||||
|
||||
case 44:
|
||||
tcrypt_test("zlib");
|
||||
ret += tcrypt_test("zlib");
|
||||
break;
|
||||
|
||||
case 45:
|
||||
ret += tcrypt_test("rfc4309(ccm(aes))");
|
||||
break;
|
||||
|
||||
case 100:
|
||||
tcrypt_test("hmac(md5)");
|
||||
ret += tcrypt_test("hmac(md5)");
|
||||
break;
|
||||
|
||||
case 101:
|
||||
tcrypt_test("hmac(sha1)");
|
||||
ret += tcrypt_test("hmac(sha1)");
|
||||
break;
|
||||
|
||||
case 102:
|
||||
tcrypt_test("hmac(sha256)");
|
||||
ret += tcrypt_test("hmac(sha256)");
|
||||
break;
|
||||
|
||||
case 103:
|
||||
tcrypt_test("hmac(sha384)");
|
||||
ret += tcrypt_test("hmac(sha384)");
|
||||
break;
|
||||
|
||||
case 104:
|
||||
tcrypt_test("hmac(sha512)");
|
||||
ret += tcrypt_test("hmac(sha512)");
|
||||
break;
|
||||
|
||||
case 105:
|
||||
tcrypt_test("hmac(sha224)");
|
||||
ret += tcrypt_test("hmac(sha224)");
|
||||
break;
|
||||
|
||||
case 106:
|
||||
tcrypt_test("xcbc(aes)");
|
||||
ret += tcrypt_test("xcbc(aes)");
|
||||
break;
|
||||
|
||||
case 107:
|
||||
tcrypt_test("hmac(rmd128)");
|
||||
ret += tcrypt_test("hmac(rmd128)");
|
||||
break;
|
||||
|
||||
case 108:
|
||||
tcrypt_test("hmac(rmd160)");
|
||||
ret += tcrypt_test("hmac(rmd160)");
|
||||
break;
|
||||
|
||||
case 150:
|
||||
ret += tcrypt_test("ansi_cprng");
|
||||
break;
|
||||
|
||||
case 200:
|
||||
@ -862,6 +881,8 @@ static void do_test(int m)
|
||||
test_available();
|
||||
break;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int __init tcrypt_mod_init(void)
|
||||
@ -875,15 +896,21 @@ static int __init tcrypt_mod_init(void)
|
||||
goto err_free_tv;
|
||||
}
|
||||
|
||||
do_test(mode);
|
||||
err = do_test(mode);
|
||||
if (err) {
|
||||
printk(KERN_ERR "tcrypt: one or more tests failed!\n");
|
||||
goto err_free_tv;
|
||||
}
|
||||
|
||||
/* We intentionaly return -EAGAIN to prevent keeping
|
||||
* the module. It does all its work from init()
|
||||
* and doesn't offer any runtime functionality
|
||||
/* We intentionaly return -EAGAIN to prevent keeping the module,
|
||||
* unless we're running in fips mode. It does all its work from
|
||||
* init() and doesn't offer any runtime functionality, but in
|
||||
* the fips case, checking for a successful load is helpful.
|
||||
* => we don't need it in the memory, do we?
|
||||
* -- mludvig
|
||||
*/
|
||||
err = -EAGAIN;
|
||||
if (!fips_enabled)
|
||||
err = -EAGAIN;
|
||||
|
||||
err_free_tv:
|
||||
for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
|
||||
|
470
crypto/testmgr.c
470
crypto/testmgr.c
@ -19,6 +19,7 @@
|
||||
#include <linux/scatterlist.h>
|
||||
#include <linux/slab.h>
|
||||
#include <linux/string.h>
|
||||
#include <crypto/rng.h>
|
||||
|
||||
#include "internal.h"
|
||||
#include "testmgr.h"
|
||||
@ -84,10 +85,16 @@ struct hash_test_suite {
|
||||
unsigned int count;
|
||||
};
|
||||
|
||||
struct cprng_test_suite {
|
||||
struct cprng_testvec *vecs;
|
||||
unsigned int count;
|
||||
};
|
||||
|
||||
struct alg_test_desc {
|
||||
const char *alg;
|
||||
int (*test)(const struct alg_test_desc *desc, const char *driver,
|
||||
u32 type, u32 mask);
|
||||
int fips_allowed; /* set if alg is allowed in fips mode */
|
||||
|
||||
union {
|
||||
struct aead_test_suite aead;
|
||||
@ -95,14 +102,12 @@ struct alg_test_desc {
|
||||
struct comp_test_suite comp;
|
||||
struct pcomp_test_suite pcomp;
|
||||
struct hash_test_suite hash;
|
||||
struct cprng_test_suite cprng;
|
||||
} suite;
|
||||
};
|
||||
|
||||
static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
|
||||
|
||||
static char *xbuf[XBUFSIZE];
|
||||
static char *axbuf[XBUFSIZE];
|
||||
|
||||
static void hexdump(unsigned char *buf, unsigned int len)
|
||||
{
|
||||
print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
|
||||
@ -121,6 +126,33 @@ static void tcrypt_complete(struct crypto_async_request *req, int err)
|
||||
complete(&res->completion);
|
||||
}
|
||||
|
||||
static int testmgr_alloc_buf(char *buf[XBUFSIZE])
|
||||
{
|
||||
int i;
|
||||
|
||||
for (i = 0; i < XBUFSIZE; i++) {
|
||||
buf[i] = (void *)__get_free_page(GFP_KERNEL);
|
||||
if (!buf[i])
|
||||
goto err_free_buf;
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
err_free_buf:
|
||||
while (i-- > 0)
|
||||
free_page((unsigned long)buf[i]);
|
||||
|
||||
return -ENOMEM;
|
||||
}
|
||||
|
||||
static void testmgr_free_buf(char *buf[XBUFSIZE])
|
||||
{
|
||||
int i;
|
||||
|
||||
for (i = 0; i < XBUFSIZE; i++)
|
||||
free_page((unsigned long)buf[i]);
|
||||
}
|
||||
|
||||
static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
|
||||
unsigned int tcount)
|
||||
{
|
||||
@ -130,8 +162,12 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
|
||||
char result[64];
|
||||
struct ahash_request *req;
|
||||
struct tcrypt_result tresult;
|
||||
int ret;
|
||||
void *hash_buff;
|
||||
char *xbuf[XBUFSIZE];
|
||||
int ret = -ENOMEM;
|
||||
|
||||
if (testmgr_alloc_buf(xbuf))
|
||||
goto out_nobuf;
|
||||
|
||||
init_completion(&tresult.completion);
|
||||
|
||||
@ -139,17 +175,25 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
|
||||
if (!req) {
|
||||
printk(KERN_ERR "alg: hash: Failed to allocate request for "
|
||||
"%s\n", algo);
|
||||
ret = -ENOMEM;
|
||||
goto out_noreq;
|
||||
}
|
||||
ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
|
||||
tcrypt_complete, &tresult);
|
||||
|
||||
j = 0;
|
||||
for (i = 0; i < tcount; i++) {
|
||||
if (template[i].np)
|
||||
continue;
|
||||
|
||||
j++;
|
||||
memset(result, 0, 64);
|
||||
|
||||
hash_buff = xbuf[0];
|
||||
|
||||
ret = -EINVAL;
|
||||
if (WARN_ON(template[i].psize > PAGE_SIZE))
|
||||
goto out;
|
||||
|
||||
memcpy(hash_buff, template[i].plaintext, template[i].psize);
|
||||
sg_init_one(&sg[0], hash_buff, template[i].psize);
|
||||
|
||||
@ -159,7 +203,7 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
|
||||
template[i].ksize);
|
||||
if (ret) {
|
||||
printk(KERN_ERR "alg: hash: setkey failed on "
|
||||
"test %d for %s: ret=%d\n", i + 1, algo,
|
||||
"test %d for %s: ret=%d\n", j, algo,
|
||||
-ret);
|
||||
goto out;
|
||||
}
|
||||
@ -181,14 +225,14 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
|
||||
/* fall through */
|
||||
default:
|
||||
printk(KERN_ERR "alg: hash: digest failed on test %d "
|
||||
"for %s: ret=%d\n", i + 1, algo, -ret);
|
||||
"for %s: ret=%d\n", j, algo, -ret);
|
||||
goto out;
|
||||
}
|
||||
|
||||
if (memcmp(result, template[i].digest,
|
||||
crypto_ahash_digestsize(tfm))) {
|
||||
printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
|
||||
i + 1, algo);
|
||||
j, algo);
|
||||
hexdump(result, crypto_ahash_digestsize(tfm));
|
||||
ret = -EINVAL;
|
||||
goto out;
|
||||
@ -203,7 +247,11 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
|
||||
|
||||
temp = 0;
|
||||
sg_init_table(sg, template[i].np);
|
||||
ret = -EINVAL;
|
||||
for (k = 0; k < template[i].np; k++) {
|
||||
if (WARN_ON(offset_in_page(IDX[k]) +
|
||||
template[i].tap[k] > PAGE_SIZE))
|
||||
goto out;
|
||||
sg_set_buf(&sg[k],
|
||||
memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
|
||||
offset_in_page(IDX[k]),
|
||||
@ -265,6 +313,8 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
|
||||
out:
|
||||
ahash_request_free(req);
|
||||
out_noreq:
|
||||
testmgr_free_buf(xbuf);
|
||||
out_nobuf:
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -273,7 +323,7 @@ static int test_aead(struct crypto_aead *tfm, int enc,
|
||||
{
|
||||
const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
|
||||
unsigned int i, j, k, n, temp;
|
||||
int ret = 0;
|
||||
int ret = -ENOMEM;
|
||||
char *q;
|
||||
char *key;
|
||||
struct aead_request *req;
|
||||
@ -285,6 +335,13 @@ static int test_aead(struct crypto_aead *tfm, int enc,
|
||||
void *input;
|
||||
void *assoc;
|
||||
char iv[MAX_IVLEN];
|
||||
char *xbuf[XBUFSIZE];
|
||||
char *axbuf[XBUFSIZE];
|
||||
|
||||
if (testmgr_alloc_buf(xbuf))
|
||||
goto out_noxbuf;
|
||||
if (testmgr_alloc_buf(axbuf))
|
||||
goto out_noaxbuf;
|
||||
|
||||
if (enc == ENCRYPT)
|
||||
e = "encryption";
|
||||
@ -297,7 +354,6 @@ static int test_aead(struct crypto_aead *tfm, int enc,
|
||||
if (!req) {
|
||||
printk(KERN_ERR "alg: aead: Failed to allocate request for "
|
||||
"%s\n", algo);
|
||||
ret = -ENOMEM;
|
||||
goto out;
|
||||
}
|
||||
|
||||
@ -314,6 +370,11 @@ static int test_aead(struct crypto_aead *tfm, int enc,
|
||||
input = xbuf[0];
|
||||
assoc = axbuf[0];
|
||||
|
||||
ret = -EINVAL;
|
||||
if (WARN_ON(template[i].ilen > PAGE_SIZE ||
|
||||
template[i].alen > PAGE_SIZE))
|
||||
goto out;
|
||||
|
||||
memcpy(input, template[i].input, template[i].ilen);
|
||||
memcpy(assoc, template[i].assoc, template[i].alen);
|
||||
if (template[i].iv)
|
||||
@ -363,6 +424,16 @@ static int test_aead(struct crypto_aead *tfm, int enc,
|
||||
|
||||
switch (ret) {
|
||||
case 0:
|
||||
if (template[i].novrfy) {
|
||||
/* verification was supposed to fail */
|
||||
printk(KERN_ERR "alg: aead: %s failed "
|
||||
"on test %d for %s: ret was 0, "
|
||||
"expected -EBADMSG\n",
|
||||
e, j, algo);
|
||||
/* so really, we got a bad message */
|
||||
ret = -EBADMSG;
|
||||
goto out;
|
||||
}
|
||||
break;
|
||||
case -EINPROGRESS:
|
||||
case -EBUSY:
|
||||
@ -372,6 +443,10 @@ static int test_aead(struct crypto_aead *tfm, int enc,
|
||||
INIT_COMPLETION(result.completion);
|
||||
break;
|
||||
}
|
||||
case -EBADMSG:
|
||||
if (template[i].novrfy)
|
||||
/* verification failure was expected */
|
||||
continue;
|
||||
/* fall through */
|
||||
default:
|
||||
printk(KERN_ERR "alg: aead: %s failed on test "
|
||||
@ -459,7 +534,11 @@ static int test_aead(struct crypto_aead *tfm, int enc,
|
||||
}
|
||||
|
||||
sg_init_table(asg, template[i].anp);
|
||||
ret = -EINVAL;
|
||||
for (k = 0, temp = 0; k < template[i].anp; k++) {
|
||||
if (WARN_ON(offset_in_page(IDX[k]) +
|
||||
template[i].atap[k] > PAGE_SIZE))
|
||||
goto out;
|
||||
sg_set_buf(&asg[k],
|
||||
memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
|
||||
offset_in_page(IDX[k]),
|
||||
@ -481,6 +560,16 @@ static int test_aead(struct crypto_aead *tfm, int enc,
|
||||
|
||||
switch (ret) {
|
||||
case 0:
|
||||
if (template[i].novrfy) {
|
||||
/* verification was supposed to fail */
|
||||
printk(KERN_ERR "alg: aead: %s failed "
|
||||
"on chunk test %d for %s: ret "
|
||||
"was 0, expected -EBADMSG\n",
|
||||
e, j, algo);
|
||||
/* so really, we got a bad message */
|
||||
ret = -EBADMSG;
|
||||
goto out;
|
||||
}
|
||||
break;
|
||||
case -EINPROGRESS:
|
||||
case -EBUSY:
|
||||
@ -490,6 +579,10 @@ static int test_aead(struct crypto_aead *tfm, int enc,
|
||||
INIT_COMPLETION(result.completion);
|
||||
break;
|
||||
}
|
||||
case -EBADMSG:
|
||||
if (template[i].novrfy)
|
||||
/* verification failure was expected */
|
||||
continue;
|
||||
/* fall through */
|
||||
default:
|
||||
printk(KERN_ERR "alg: aead: %s failed on "
|
||||
@ -546,6 +639,10 @@ static int test_aead(struct crypto_aead *tfm, int enc,
|
||||
|
||||
out:
|
||||
aead_request_free(req);
|
||||
testmgr_free_buf(axbuf);
|
||||
out_noaxbuf:
|
||||
testmgr_free_buf(xbuf);
|
||||
out_noxbuf:
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -554,10 +651,14 @@ static int test_cipher(struct crypto_cipher *tfm, int enc,
|
||||
{
|
||||
const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
|
||||
unsigned int i, j, k;
|
||||
int ret;
|
||||
char *q;
|
||||
const char *e;
|
||||
void *data;
|
||||
char *xbuf[XBUFSIZE];
|
||||
int ret = -ENOMEM;
|
||||
|
||||
if (testmgr_alloc_buf(xbuf))
|
||||
goto out_nobuf;
|
||||
|
||||
if (enc == ENCRYPT)
|
||||
e = "encryption";
|
||||
@ -571,6 +672,10 @@ static int test_cipher(struct crypto_cipher *tfm, int enc,
|
||||
|
||||
j++;
|
||||
|
||||
ret = -EINVAL;
|
||||
if (WARN_ON(template[i].ilen > PAGE_SIZE))
|
||||
goto out;
|
||||
|
||||
data = xbuf[0];
|
||||
memcpy(data, template[i].input, template[i].ilen);
|
||||
|
||||
@ -611,6 +716,8 @@ static int test_cipher(struct crypto_cipher *tfm, int enc,
|
||||
ret = 0;
|
||||
|
||||
out:
|
||||
testmgr_free_buf(xbuf);
|
||||
out_nobuf:
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -620,7 +727,6 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
|
||||
const char *algo =
|
||||
crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
|
||||
unsigned int i, j, k, n, temp;
|
||||
int ret;
|
||||
char *q;
|
||||
struct ablkcipher_request *req;
|
||||
struct scatterlist sg[8];
|
||||
@ -628,6 +734,11 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
|
||||
struct tcrypt_result result;
|
||||
void *data;
|
||||
char iv[MAX_IVLEN];
|
||||
char *xbuf[XBUFSIZE];
|
||||
int ret = -ENOMEM;
|
||||
|
||||
if (testmgr_alloc_buf(xbuf))
|
||||
goto out_nobuf;
|
||||
|
||||
if (enc == ENCRYPT)
|
||||
e = "encryption";
|
||||
@ -640,7 +751,6 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
|
||||
if (!req) {
|
||||
printk(KERN_ERR "alg: skcipher: Failed to allocate request "
|
||||
"for %s\n", algo);
|
||||
ret = -ENOMEM;
|
||||
goto out;
|
||||
}
|
||||
|
||||
@ -657,6 +767,10 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
|
||||
if (!(template[i].np)) {
|
||||
j++;
|
||||
|
||||
ret = -EINVAL;
|
||||
if (WARN_ON(template[i].ilen > PAGE_SIZE))
|
||||
goto out;
|
||||
|
||||
data = xbuf[0];
|
||||
memcpy(data, template[i].input, template[i].ilen);
|
||||
|
||||
@ -825,6 +939,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
|
||||
|
||||
out:
|
||||
ablkcipher_request_free(req);
|
||||
testmgr_free_buf(xbuf);
|
||||
out_nobuf:
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -837,7 +953,8 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
|
||||
int ret;
|
||||
|
||||
for (i = 0; i < ctcount; i++) {
|
||||
int ilen, dlen = COMP_BUF_SIZE;
|
||||
int ilen;
|
||||
unsigned int dlen = COMP_BUF_SIZE;
|
||||
|
||||
memset(result, 0, sizeof (result));
|
||||
|
||||
@ -869,7 +986,8 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
|
||||
}
|
||||
|
||||
for (i = 0; i < dtcount; i++) {
|
||||
int ilen, dlen = COMP_BUF_SIZE;
|
||||
int ilen;
|
||||
unsigned int dlen = COMP_BUF_SIZE;
|
||||
|
||||
memset(result, 0, sizeof (result));
|
||||
|
||||
@ -914,24 +1032,25 @@ static int test_pcomp(struct crypto_pcomp *tfm,
|
||||
const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
|
||||
unsigned int i;
|
||||
char result[COMP_BUF_SIZE];
|
||||
int error;
|
||||
int res;
|
||||
|
||||
for (i = 0; i < ctcount; i++) {
|
||||
struct comp_request req;
|
||||
unsigned int produced = 0;
|
||||
|
||||
error = crypto_compress_setup(tfm, ctemplate[i].params,
|
||||
ctemplate[i].paramsize);
|
||||
if (error) {
|
||||
res = crypto_compress_setup(tfm, ctemplate[i].params,
|
||||
ctemplate[i].paramsize);
|
||||
if (res) {
|
||||
pr_err("alg: pcomp: compression setup failed on test "
|
||||
"%d for %s: error=%d\n", i + 1, algo, error);
|
||||
return error;
|
||||
"%d for %s: error=%d\n", i + 1, algo, res);
|
||||
return res;
|
||||
}
|
||||
|
||||
error = crypto_compress_init(tfm);
|
||||
if (error) {
|
||||
res = crypto_compress_init(tfm);
|
||||
if (res) {
|
||||
pr_err("alg: pcomp: compression init failed on test "
|
||||
"%d for %s: error=%d\n", i + 1, algo, error);
|
||||
return error;
|
||||
"%d for %s: error=%d\n", i + 1, algo, res);
|
||||
return res;
|
||||
}
|
||||
|
||||
memset(result, 0, sizeof(result));
|
||||
@ -941,32 +1060,37 @@ static int test_pcomp(struct crypto_pcomp *tfm,
|
||||
req.next_out = result;
|
||||
req.avail_out = ctemplate[i].outlen / 2;
|
||||
|
||||
error = crypto_compress_update(tfm, &req);
|
||||
if (error && (error != -EAGAIN || req.avail_in)) {
|
||||
res = crypto_compress_update(tfm, &req);
|
||||
if (res < 0 && (res != -EAGAIN || req.avail_in)) {
|
||||
pr_err("alg: pcomp: compression update failed on test "
|
||||
"%d for %s: error=%d\n", i + 1, algo, error);
|
||||
return error;
|
||||
"%d for %s: error=%d\n", i + 1, algo, res);
|
||||
return res;
|
||||
}
|
||||
if (res > 0)
|
||||
produced += res;
|
||||
|
||||
/* Add remaining input data */
|
||||
req.avail_in += (ctemplate[i].inlen + 1) / 2;
|
||||
|
||||
error = crypto_compress_update(tfm, &req);
|
||||
if (error && (error != -EAGAIN || req.avail_in)) {
|
||||
res = crypto_compress_update(tfm, &req);
|
||||
if (res < 0 && (res != -EAGAIN || req.avail_in)) {
|
||||
pr_err("alg: pcomp: compression update failed on test "
|
||||
"%d for %s: error=%d\n", i + 1, algo, error);
|
||||
return error;
|
||||
"%d for %s: error=%d\n", i + 1, algo, res);
|
||||
return res;
|
||||
}
|
||||
if (res > 0)
|
||||
produced += res;
|
||||
|
||||
/* Provide remaining output space */
|
||||
req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
|
||||
|
||||
error = crypto_compress_final(tfm, &req);
|
||||
if (error) {
|
||||
res = crypto_compress_final(tfm, &req);
|
||||
if (res < 0) {
|
||||
pr_err("alg: pcomp: compression final failed on test "
|
||||
"%d for %s: error=%d\n", i + 1, algo, error);
|
||||
return error;
|
||||
"%d for %s: error=%d\n", i + 1, algo, res);
|
||||
return res;
|
||||
}
|
||||
produced += res;
|
||||
|
||||
if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
|
||||
pr_err("alg: comp: Compression test %d failed for %s: "
|
||||
@ -976,6 +1100,13 @@ static int test_pcomp(struct crypto_pcomp *tfm,
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
if (produced != ctemplate[i].outlen) {
|
||||
pr_err("alg: comp: Compression test %d failed for %s: "
|
||||
"returned len = %u (expected %d)\n", i + 1,
|
||||
algo, produced, ctemplate[i].outlen);
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
|
||||
pr_err("alg: pcomp: Compression test %d failed for "
|
||||
"%s\n", i + 1, algo);
|
||||
@ -986,21 +1117,21 @@ static int test_pcomp(struct crypto_pcomp *tfm,
|
||||
|
||||
for (i = 0; i < dtcount; i++) {
|
||||
struct comp_request req;
|
||||
unsigned int produced = 0;
|
||||
|
||||
error = crypto_decompress_setup(tfm, dtemplate[i].params,
|
||||
dtemplate[i].paramsize);
|
||||
if (error) {
|
||||
res = crypto_decompress_setup(tfm, dtemplate[i].params,
|
||||
dtemplate[i].paramsize);
|
||||
if (res) {
|
||||
pr_err("alg: pcomp: decompression setup failed on "
|
||||
"test %d for %s: error=%d\n", i + 1, algo,
|
||||
error);
|
||||
return error;
|
||||
"test %d for %s: error=%d\n", i + 1, algo, res);
|
||||
return res;
|
||||
}
|
||||
|
||||
error = crypto_decompress_init(tfm);
|
||||
if (error) {
|
||||
res = crypto_decompress_init(tfm);
|
||||
if (res) {
|
||||
pr_err("alg: pcomp: decompression init failed on test "
|
||||
"%d for %s: error=%d\n", i + 1, algo, error);
|
||||
return error;
|
||||
"%d for %s: error=%d\n", i + 1, algo, res);
|
||||
return res;
|
||||
}
|
||||
|
||||
memset(result, 0, sizeof(result));
|
||||
@ -1010,35 +1141,38 @@ static int test_pcomp(struct crypto_pcomp *tfm,
|
||||
req.next_out = result;
|
||||
req.avail_out = dtemplate[i].outlen / 2;
|
||||
|
||||
error = crypto_decompress_update(tfm, &req);
|
||||
if (error && (error != -EAGAIN || req.avail_in)) {
|
||||
res = crypto_decompress_update(tfm, &req);
|
||||
if (res < 0 && (res != -EAGAIN || req.avail_in)) {
|
||||
pr_err("alg: pcomp: decompression update failed on "
|
||||
"test %d for %s: error=%d\n", i + 1, algo,
|
||||
error);
|
||||
return error;
|
||||
"test %d for %s: error=%d\n", i + 1, algo, res);
|
||||
return res;
|
||||
}
|
||||
if (res > 0)
|
||||
produced += res;
|
||||
|
||||
/* Add remaining input data */
|
||||
req.avail_in += (dtemplate[i].inlen + 1) / 2;
|
||||
|
||||
error = crypto_decompress_update(tfm, &req);
|
||||
if (error && (error != -EAGAIN || req.avail_in)) {
|
||||
res = crypto_decompress_update(tfm, &req);
|
||||
if (res < 0 && (res != -EAGAIN || req.avail_in)) {
|
||||
pr_err("alg: pcomp: decompression update failed on "
|
||||
"test %d for %s: error=%d\n", i + 1, algo,
|
||||
error);
|
||||
return error;
|
||||
"test %d for %s: error=%d\n", i + 1, algo, res);
|
||||
return res;
|
||||
}
|
||||
if (res > 0)
|
||||
produced += res;
|
||||
|
||||
/* Provide remaining output space */
|
||||
req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
|
||||
|
||||
error = crypto_decompress_final(tfm, &req);
|
||||
if (error && (error != -EAGAIN || req.avail_in)) {
|
||||
res = crypto_decompress_final(tfm, &req);
|
||||
if (res < 0 && (res != -EAGAIN || req.avail_in)) {
|
||||
pr_err("alg: pcomp: decompression final failed on "
|
||||
"test %d for %s: error=%d\n", i + 1, algo,
|
||||
error);
|
||||
return error;
|
||||
"test %d for %s: error=%d\n", i + 1, algo, res);
|
||||
return res;
|
||||
}
|
||||
if (res > 0)
|
||||
produced += res;
|
||||
|
||||
if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
|
||||
pr_err("alg: comp: Decompression test %d failed for "
|
||||
@ -1048,6 +1182,13 @@ static int test_pcomp(struct crypto_pcomp *tfm,
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
if (produced != dtemplate[i].outlen) {
|
||||
pr_err("alg: comp: Decompression test %d failed for "
|
||||
"%s: returned len = %u (expected %d)\n", i + 1,
|
||||
algo, produced, dtemplate[i].outlen);
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
|
||||
pr_err("alg: pcomp: Decompression test %d failed for "
|
||||
"%s\n", i + 1, algo);
|
||||
@ -1059,6 +1200,68 @@ static int test_pcomp(struct crypto_pcomp *tfm,
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
|
||||
unsigned int tcount)
|
||||
{
|
||||
const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
|
||||
int err, i, j, seedsize;
|
||||
u8 *seed;
|
||||
char result[32];
|
||||
|
||||
seedsize = crypto_rng_seedsize(tfm);
|
||||
|
||||
seed = kmalloc(seedsize, GFP_KERNEL);
|
||||
if (!seed) {
|
||||
printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
|
||||
"for %s\n", algo);
|
||||
return -ENOMEM;
|
||||
}
|
||||
|
||||
for (i = 0; i < tcount; i++) {
|
||||
memset(result, 0, 32);
|
||||
|
||||
memcpy(seed, template[i].v, template[i].vlen);
|
||||
memcpy(seed + template[i].vlen, template[i].key,
|
||||
template[i].klen);
|
||||
memcpy(seed + template[i].vlen + template[i].klen,
|
||||
template[i].dt, template[i].dtlen);
|
||||
|
||||
err = crypto_rng_reset(tfm, seed, seedsize);
|
||||
if (err) {
|
||||
printk(KERN_ERR "alg: cprng: Failed to reset rng "
|
||||
"for %s\n", algo);
|
||||
goto out;
|
||||
}
|
||||
|
||||
for (j = 0; j < template[i].loops; j++) {
|
||||
err = crypto_rng_get_bytes(tfm, result,
|
||||
template[i].rlen);
|
||||
if (err != template[i].rlen) {
|
||||
printk(KERN_ERR "alg: cprng: Failed to obtain "
|
||||
"the correct amount of random data for "
|
||||
"%s (requested %d, got %d)\n", algo,
|
||||
template[i].rlen, err);
|
||||
goto out;
|
||||
}
|
||||
}
|
||||
|
||||
err = memcmp(result, template[i].result,
|
||||
template[i].rlen);
|
||||
if (err) {
|
||||
printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
|
||||
i, algo);
|
||||
hexdump(result, template[i].rlen);
|
||||
err = -EINVAL;
|
||||
goto out;
|
||||
}
|
||||
}
|
||||
|
||||
out:
|
||||
kfree(seed);
|
||||
return err;
|
||||
}
|
||||
|
||||
static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
|
||||
u32 type, u32 mask)
|
||||
{
|
||||
@ -1258,11 +1461,42 @@ out:
|
||||
return err;
|
||||
}
|
||||
|
||||
static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
|
||||
u32 type, u32 mask)
|
||||
{
|
||||
struct crypto_rng *rng;
|
||||
int err;
|
||||
|
||||
rng = crypto_alloc_rng(driver, type, mask);
|
||||
if (IS_ERR(rng)) {
|
||||
printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
|
||||
"%ld\n", driver, PTR_ERR(rng));
|
||||
return PTR_ERR(rng);
|
||||
}
|
||||
|
||||
err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
|
||||
|
||||
crypto_free_rng(rng);
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
/* Please keep this list sorted by algorithm name. */
|
||||
static const struct alg_test_desc alg_test_descs[] = {
|
||||
{
|
||||
.alg = "ansi_cprng",
|
||||
.test = alg_test_cprng,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.cprng = {
|
||||
.vecs = ansi_cprng_aes_tv_template,
|
||||
.count = ANSI_CPRNG_AES_TEST_VECTORS
|
||||
}
|
||||
}
|
||||
}, {
|
||||
.alg = "cbc(aes)",
|
||||
.test = alg_test_skcipher,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.cipher = {
|
||||
.enc = {
|
||||
@ -1338,6 +1572,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "cbc(des3_ede)",
|
||||
.test = alg_test_skcipher,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.cipher = {
|
||||
.enc = {
|
||||
@ -1368,6 +1603,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "ccm(aes)",
|
||||
.test = alg_test_aead,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.aead = {
|
||||
.enc = {
|
||||
@ -1383,12 +1619,29 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "crc32c",
|
||||
.test = alg_test_crc32c,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.hash = {
|
||||
.vecs = crc32c_tv_template,
|
||||
.count = CRC32C_TEST_VECTORS
|
||||
}
|
||||
}
|
||||
}, {
|
||||
.alg = "ctr(aes)",
|
||||
.test = alg_test_skcipher,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.cipher = {
|
||||
.enc = {
|
||||
.vecs = aes_ctr_enc_tv_template,
|
||||
.count = AES_CTR_ENC_TEST_VECTORS
|
||||
},
|
||||
.dec = {
|
||||
.vecs = aes_ctr_dec_tv_template,
|
||||
.count = AES_CTR_DEC_TEST_VECTORS
|
||||
}
|
||||
}
|
||||
}
|
||||
}, {
|
||||
.alg = "cts(cbc(aes))",
|
||||
.test = alg_test_skcipher,
|
||||
@ -1422,6 +1675,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "ecb(aes)",
|
||||
.test = alg_test_skcipher,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.cipher = {
|
||||
.enc = {
|
||||
@ -1527,6 +1781,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "ecb(des)",
|
||||
.test = alg_test_skcipher,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.cipher = {
|
||||
.enc = {
|
||||
@ -1542,6 +1797,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "ecb(des3_ede)",
|
||||
.test = alg_test_skcipher,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.cipher = {
|
||||
.enc = {
|
||||
@ -1677,6 +1933,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "gcm(aes)",
|
||||
.test = alg_test_aead,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.aead = {
|
||||
.enc = {
|
||||
@ -1719,6 +1976,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "hmac(sha1)",
|
||||
.test = alg_test_hash,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.hash = {
|
||||
.vecs = hmac_sha1_tv_template,
|
||||
@ -1728,6 +1986,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "hmac(sha224)",
|
||||
.test = alg_test_hash,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.hash = {
|
||||
.vecs = hmac_sha224_tv_template,
|
||||
@ -1737,6 +1996,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "hmac(sha256)",
|
||||
.test = alg_test_hash,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.hash = {
|
||||
.vecs = hmac_sha256_tv_template,
|
||||
@ -1746,6 +2006,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "hmac(sha384)",
|
||||
.test = alg_test_hash,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.hash = {
|
||||
.vecs = hmac_sha384_tv_template,
|
||||
@ -1755,6 +2016,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "hmac(sha512)",
|
||||
.test = alg_test_hash,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.hash = {
|
||||
.vecs = hmac_sha512_tv_template,
|
||||
@ -1836,15 +2098,32 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "rfc3686(ctr(aes))",
|
||||
.test = alg_test_skcipher,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.cipher = {
|
||||
.enc = {
|
||||
.vecs = aes_ctr_enc_tv_template,
|
||||
.count = AES_CTR_ENC_TEST_VECTORS
|
||||
.vecs = aes_ctr_rfc3686_enc_tv_template,
|
||||
.count = AES_CTR_3686_ENC_TEST_VECTORS
|
||||
},
|
||||
.dec = {
|
||||
.vecs = aes_ctr_dec_tv_template,
|
||||
.count = AES_CTR_DEC_TEST_VECTORS
|
||||
.vecs = aes_ctr_rfc3686_dec_tv_template,
|
||||
.count = AES_CTR_3686_DEC_TEST_VECTORS
|
||||
}
|
||||
}
|
||||
}
|
||||
}, {
|
||||
.alg = "rfc4309(ccm(aes))",
|
||||
.test = alg_test_aead,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.aead = {
|
||||
.enc = {
|
||||
.vecs = aes_ccm_rfc4309_enc_tv_template,
|
||||
.count = AES_CCM_4309_ENC_TEST_VECTORS
|
||||
},
|
||||
.dec = {
|
||||
.vecs = aes_ccm_rfc4309_dec_tv_template,
|
||||
.count = AES_CCM_4309_DEC_TEST_VECTORS
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1898,6 +2177,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "sha1",
|
||||
.test = alg_test_hash,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.hash = {
|
||||
.vecs = sha1_tv_template,
|
||||
@ -1907,6 +2187,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "sha224",
|
||||
.test = alg_test_hash,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.hash = {
|
||||
.vecs = sha224_tv_template,
|
||||
@ -1916,6 +2197,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "sha256",
|
||||
.test = alg_test_hash,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.hash = {
|
||||
.vecs = sha256_tv_template,
|
||||
@ -1925,6 +2207,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "sha384",
|
||||
.test = alg_test_hash,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.hash = {
|
||||
.vecs = sha384_tv_template,
|
||||
@ -1934,6 +2217,7 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}, {
|
||||
.alg = "sha512",
|
||||
.test = alg_test_hash,
|
||||
.fips_allowed = 1,
|
||||
.suite = {
|
||||
.hash = {
|
||||
.vecs = sha512_tv_template,
|
||||
@ -2077,60 +2361,36 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
|
||||
if (i < 0)
|
||||
goto notest;
|
||||
|
||||
return alg_test_cipher(alg_test_descs + i, driver, type, mask);
|
||||
if (fips_enabled && !alg_test_descs[i].fips_allowed)
|
||||
goto non_fips_alg;
|
||||
|
||||
rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
|
||||
goto test_done;
|
||||
}
|
||||
|
||||
i = alg_find_test(alg);
|
||||
if (i < 0)
|
||||
goto notest;
|
||||
|
||||
if (fips_enabled && !alg_test_descs[i].fips_allowed)
|
||||
goto non_fips_alg;
|
||||
|
||||
rc = alg_test_descs[i].test(alg_test_descs + i, driver,
|
||||
type, mask);
|
||||
test_done:
|
||||
if (fips_enabled && rc)
|
||||
panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
|
||||
|
||||
if (fips_enabled && !rc)
|
||||
printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
|
||||
driver, alg);
|
||||
|
||||
return rc;
|
||||
|
||||
notest:
|
||||
printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
|
||||
return 0;
|
||||
non_fips_alg:
|
||||
return -EINVAL;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(alg_test);
|
||||
|
||||
int __init testmgr_init(void)
|
||||
{
|
||||
int i;
|
||||
|
||||
for (i = 0; i < XBUFSIZE; i++) {
|
||||
xbuf[i] = (void *)__get_free_page(GFP_KERNEL);
|
||||
if (!xbuf[i])
|
||||
goto err_free_xbuf;
|
||||
}
|
||||
|
||||
for (i = 0; i < XBUFSIZE; i++) {
|
||||
axbuf[i] = (void *)__get_free_page(GFP_KERNEL);
|
||||
if (!axbuf[i])
|
||||
goto err_free_axbuf;
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
err_free_axbuf:
|
||||
for (i = 0; i < XBUFSIZE && axbuf[i]; i++)
|
||||
free_page((unsigned long)axbuf[i]);
|
||||
err_free_xbuf:
|
||||
for (i = 0; i < XBUFSIZE && xbuf[i]; i++)
|
||||
free_page((unsigned long)xbuf[i]);
|
||||
|
||||
return -ENOMEM;
|
||||
}
|
||||
|
||||
void testmgr_exit(void)
|
||||
{
|
||||
int i;
|
||||
|
||||
for (i = 0; i < XBUFSIZE; i++)
|
||||
free_page((unsigned long)axbuf[i]);
|
||||
for (i = 0; i < XBUFSIZE; i++)
|
||||
free_page((unsigned long)xbuf[i]);
|
||||
}
|
||||
|
645
crypto/testmgr.h
645
crypto/testmgr.h
@ -62,6 +62,7 @@ struct aead_testvec {
|
||||
int np;
|
||||
int anp;
|
||||
unsigned char fail;
|
||||
unsigned char novrfy; /* ccm dec verification failure expected */
|
||||
unsigned char wk; /* weak key flag */
|
||||
unsigned char klen;
|
||||
unsigned short ilen;
|
||||
@ -69,6 +70,18 @@ struct aead_testvec {
|
||||
unsigned short rlen;
|
||||
};
|
||||
|
||||
struct cprng_testvec {
|
||||
char *key;
|
||||
char *dt;
|
||||
char *v;
|
||||
char *result;
|
||||
unsigned char klen;
|
||||
unsigned short dtlen;
|
||||
unsigned short vlen;
|
||||
unsigned short rlen;
|
||||
unsigned short loops;
|
||||
};
|
||||
|
||||
static char zeroed_string[48];
|
||||
|
||||
/*
|
||||
@ -2841,12 +2854,16 @@ static struct cipher_testvec cast6_dec_tv_template[] = {
|
||||
#define AES_LRW_DEC_TEST_VECTORS 8
|
||||
#define AES_XTS_ENC_TEST_VECTORS 4
|
||||
#define AES_XTS_DEC_TEST_VECTORS 4
|
||||
#define AES_CTR_ENC_TEST_VECTORS 7
|
||||
#define AES_CTR_DEC_TEST_VECTORS 6
|
||||
#define AES_CTR_ENC_TEST_VECTORS 3
|
||||
#define AES_CTR_DEC_TEST_VECTORS 3
|
||||
#define AES_CTR_3686_ENC_TEST_VECTORS 7
|
||||
#define AES_CTR_3686_DEC_TEST_VECTORS 6
|
||||
#define AES_GCM_ENC_TEST_VECTORS 9
|
||||
#define AES_GCM_DEC_TEST_VECTORS 8
|
||||
#define AES_CCM_ENC_TEST_VECTORS 7
|
||||
#define AES_CCM_DEC_TEST_VECTORS 7
|
||||
#define AES_CCM_4309_ENC_TEST_VECTORS 7
|
||||
#define AES_CCM_4309_DEC_TEST_VECTORS 10
|
||||
|
||||
static struct cipher_testvec aes_enc_tv_template[] = {
|
||||
{ /* From FIPS-197 */
|
||||
@ -3983,6 +4000,164 @@ static struct cipher_testvec aes_xts_dec_tv_template[] = {
|
||||
|
||||
|
||||
static struct cipher_testvec aes_ctr_enc_tv_template[] = {
|
||||
{ /* From NIST Special Publication 800-38A, Appendix F.5 */
|
||||
.key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6"
|
||||
"\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
|
||||
.klen = 16,
|
||||
.iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
|
||||
"\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
|
||||
.input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
|
||||
"\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
|
||||
"\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
|
||||
"\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
|
||||
"\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
|
||||
"\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
|
||||
"\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
|
||||
"\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
|
||||
.ilen = 64,
|
||||
.result = "\x87\x4d\x61\x91\xb6\x20\xe3\x26"
|
||||
"\x1b\xef\x68\x64\x99\x0d\xb6\xce"
|
||||
"\x98\x06\xf6\x6b\x79\x70\xfd\xff"
|
||||
"\x86\x17\x18\x7b\xb9\xff\xfd\xff"
|
||||
"\x5a\xe4\xdf\x3e\xdb\xd5\xd3\x5e"
|
||||
"\x5b\x4f\x09\x02\x0d\xb0\x3e\xab"
|
||||
"\x1e\x03\x1d\xda\x2f\xbe\x03\xd1"
|
||||
"\x79\x21\x70\xa0\xf3\x00\x9c\xee",
|
||||
.rlen = 64,
|
||||
}, {
|
||||
.key = "\x8e\x73\xb0\xf7\xda\x0e\x64\x52"
|
||||
"\xc8\x10\xf3\x2b\x80\x90\x79\xe5"
|
||||
"\x62\xf8\xea\xd2\x52\x2c\x6b\x7b",
|
||||
.klen = 24,
|
||||
.iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
|
||||
"\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
|
||||
.input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
|
||||
"\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
|
||||
"\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
|
||||
"\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
|
||||
"\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
|
||||
"\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
|
||||
"\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
|
||||
"\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
|
||||
.ilen = 64,
|
||||
.result = "\x1a\xbc\x93\x24\x17\x52\x1c\xa2"
|
||||
"\x4f\x2b\x04\x59\xfe\x7e\x6e\x0b"
|
||||
"\x09\x03\x39\xec\x0a\xa6\xfa\xef"
|
||||
"\xd5\xcc\xc2\xc6\xf4\xce\x8e\x94"
|
||||
"\x1e\x36\xb2\x6b\xd1\xeb\xc6\x70"
|
||||
"\xd1\xbd\x1d\x66\x56\x20\xab\xf7"
|
||||
"\x4f\x78\xa7\xf6\xd2\x98\x09\x58"
|
||||
"\x5a\x97\xda\xec\x58\xc6\xb0\x50",
|
||||
.rlen = 64,
|
||||
}, {
|
||||
.key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe"
|
||||
"\x2b\x73\xae\xf0\x85\x7d\x77\x81"
|
||||
"\x1f\x35\x2c\x07\x3b\x61\x08\xd7"
|
||||
"\x2d\x98\x10\xa3\x09\x14\xdf\xf4",
|
||||
.klen = 32,
|
||||
.iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
|
||||
"\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
|
||||
.input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
|
||||
"\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
|
||||
"\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
|
||||
"\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
|
||||
"\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
|
||||
"\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
|
||||
"\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
|
||||
"\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
|
||||
.ilen = 64,
|
||||
.result = "\x60\x1e\xc3\x13\x77\x57\x89\xa5"
|
||||
"\xb7\xa7\xf5\x04\xbb\xf3\xd2\x28"
|
||||
"\xf4\x43\xe3\xca\x4d\x62\xb5\x9a"
|
||||
"\xca\x84\xe9\x90\xca\xca\xf5\xc5"
|
||||
"\x2b\x09\x30\xda\xa2\x3d\xe9\x4c"
|
||||
"\xe8\x70\x17\xba\x2d\x84\x98\x8d"
|
||||
"\xdf\xc9\xc5\x8d\xb6\x7a\xad\xa6"
|
||||
"\x13\xc2\xdd\x08\x45\x79\x41\xa6",
|
||||
.rlen = 64,
|
||||
}
|
||||
};
|
||||
|
||||
static struct cipher_testvec aes_ctr_dec_tv_template[] = {
|
||||
{ /* From NIST Special Publication 800-38A, Appendix F.5 */
|
||||
.key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6"
|
||||
"\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
|
||||
.klen = 16,
|
||||
.iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
|
||||
"\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
|
||||
.input = "\x87\x4d\x61\x91\xb6\x20\xe3\x26"
|
||||
"\x1b\xef\x68\x64\x99\x0d\xb6\xce"
|
||||
"\x98\x06\xf6\x6b\x79\x70\xfd\xff"
|
||||
"\x86\x17\x18\x7b\xb9\xff\xfd\xff"
|
||||
"\x5a\xe4\xdf\x3e\xdb\xd5\xd3\x5e"
|
||||
"\x5b\x4f\x09\x02\x0d\xb0\x3e\xab"
|
||||
"\x1e\x03\x1d\xda\x2f\xbe\x03\xd1"
|
||||
"\x79\x21\x70\xa0\xf3\x00\x9c\xee",
|
||||
.ilen = 64,
|
||||
.result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
|
||||
"\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
|
||||
"\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
|
||||
"\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
|
||||
"\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
|
||||
"\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
|
||||
"\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
|
||||
"\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
|
||||
.rlen = 64,
|
||||
}, {
|
||||
.key = "\x8e\x73\xb0\xf7\xda\x0e\x64\x52"
|
||||
"\xc8\x10\xf3\x2b\x80\x90\x79\xe5"
|
||||
"\x62\xf8\xea\xd2\x52\x2c\x6b\x7b",
|
||||
.klen = 24,
|
||||
.iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
|
||||
"\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
|
||||
.input = "\x1a\xbc\x93\x24\x17\x52\x1c\xa2"
|
||||
"\x4f\x2b\x04\x59\xfe\x7e\x6e\x0b"
|
||||
"\x09\x03\x39\xec\x0a\xa6\xfa\xef"
|
||||
"\xd5\xcc\xc2\xc6\xf4\xce\x8e\x94"
|
||||
"\x1e\x36\xb2\x6b\xd1\xeb\xc6\x70"
|
||||
"\xd1\xbd\x1d\x66\x56\x20\xab\xf7"
|
||||
"\x4f\x78\xa7\xf6\xd2\x98\x09\x58"
|
||||
"\x5a\x97\xda\xec\x58\xc6\xb0\x50",
|
||||
.ilen = 64,
|
||||
.result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
|
||||
"\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
|
||||
"\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
|
||||
"\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
|
||||
"\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
|
||||
"\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
|
||||
"\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
|
||||
"\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
|
||||
.rlen = 64,
|
||||
}, {
|
||||
.key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe"
|
||||
"\x2b\x73\xae\xf0\x85\x7d\x77\x81"
|
||||
"\x1f\x35\x2c\x07\x3b\x61\x08\xd7"
|
||||
"\x2d\x98\x10\xa3\x09\x14\xdf\xf4",
|
||||
.klen = 32,
|
||||
.iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
|
||||
"\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
|
||||
.input = "\x60\x1e\xc3\x13\x77\x57\x89\xa5"
|
||||
"\xb7\xa7\xf5\x04\xbb\xf3\xd2\x28"
|
||||
"\xf4\x43\xe3\xca\x4d\x62\xb5\x9a"
|
||||
"\xca\x84\xe9\x90\xca\xca\xf5\xc5"
|
||||
"\x2b\x09\x30\xda\xa2\x3d\xe9\x4c"
|
||||
"\xe8\x70\x17\xba\x2d\x84\x98\x8d"
|
||||
"\xdf\xc9\xc5\x8d\xb6\x7a\xad\xa6"
|
||||
"\x13\xc2\xdd\x08\x45\x79\x41\xa6",
|
||||
.ilen = 64,
|
||||
.result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
|
||||
"\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
|
||||
"\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
|
||||
"\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
|
||||
"\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
|
||||
"\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
|
||||
"\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
|
||||
"\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
|
||||
.rlen = 64,
|
||||
}
|
||||
};
|
||||
|
||||
static struct cipher_testvec aes_ctr_rfc3686_enc_tv_template[] = {
|
||||
{ /* From RFC 3686 */
|
||||
.key = "\xae\x68\x52\xf8\x12\x10\x67\xcc"
|
||||
"\x4b\xf7\xa5\x76\x55\x77\xf3\x9e"
|
||||
@ -5114,7 +5289,7 @@ static struct cipher_testvec aes_ctr_enc_tv_template[] = {
|
||||
},
|
||||
};
|
||||
|
||||
static struct cipher_testvec aes_ctr_dec_tv_template[] = {
|
||||
static struct cipher_testvec aes_ctr_rfc3686_dec_tv_template[] = {
|
||||
{ /* From RFC 3686 */
|
||||
.key = "\xae\x68\x52\xf8\x12\x10\x67\xcc"
|
||||
"\x4b\xf7\xa5\x76\x55\x77\xf3\x9e"
|
||||
@ -5825,6 +6000,470 @@ static struct aead_testvec aes_ccm_dec_tv_template[] = {
|
||||
},
|
||||
};
|
||||
|
||||
/*
|
||||
* rfc4309 refers to section 8 of rfc3610 for test vectors, but they all
|
||||
* use a 13-byte nonce, we only support an 11-byte nonce. Similarly, all of
|
||||
* Special Publication 800-38C's test vectors also use nonce lengths our
|
||||
* implementation doesn't support. The following are taken from fips cavs
|
||||
* fax files on hand at Red Hat.
|
||||
*
|
||||
* nb: actual key lengths are (klen - 3), the last 3 bytes are actually
|
||||
* part of the nonce which combine w/the iv, but need to be input this way.
|
||||
*/
|
||||
static struct aead_testvec aes_ccm_rfc4309_enc_tv_template[] = {
|
||||
{
|
||||
.key = "\x83\xac\x54\x66\xc2\xeb\xe5\x05"
|
||||
"\x2e\x01\xd1\xfc\x5d\x82\x66\x2e"
|
||||
"\x96\xac\x59",
|
||||
.klen = 19,
|
||||
.iv = "\x30\x07\xa1\xe2\xa2\xc7\x55\x24",
|
||||
.alen = 0,
|
||||
.input = "\x19\xc8\x81\xf6\xe9\x86\xff\x93"
|
||||
"\x0b\x78\x67\xe5\xbb\xb7\xfc\x6e"
|
||||
"\x83\x77\xb3\xa6\x0c\x8c\x9f\x9c"
|
||||
"\x35\x2e\xad\xe0\x62\xf9\x91\xa1",
|
||||
.ilen = 32,
|
||||
.result = "\xab\x6f\xe1\x69\x1d\x19\x99\xa8"
|
||||
"\x92\xa0\xc4\x6f\x7e\xe2\x8b\xb1"
|
||||
"\x70\xbb\x8c\xa6\x4c\x6e\x97\x8a"
|
||||
"\x57\x2b\xbe\x5d\x98\xa6\xb1\x32"
|
||||
"\xda\x24\xea\xd9\xa1\x39\x98\xfd"
|
||||
"\xa4\xbe\xd9\xf2\x1a\x6d\x22\xa8",
|
||||
.rlen = 48,
|
||||
}, {
|
||||
.key = "\x1e\x2c\x7e\x01\x41\x9a\xef\xc0"
|
||||
"\x0d\x58\x96\x6e\x5c\xa2\x4b\xd3"
|
||||
"\x4f\xa3\x19",
|
||||
.klen = 19,
|
||||
.iv = "\xd3\x01\x5a\xd8\x30\x60\x15\x56",
|
||||
.assoc = "\xda\xe6\x28\x9c\x45\x2d\xfd\x63"
|
||||
"\x5e\xda\x4c\xb6\xe6\xfc\xf9\xb7"
|
||||
"\x0c\x56\xcb\xe4\xe0\x05\x7a\xe1"
|
||||
"\x0a\x63\x09\x78\xbc\x2c\x55\xde",
|
||||
.alen = 32,
|
||||
.input = "\x87\xa3\x36\xfd\x96\xb3\x93\x78"
|
||||
"\xa9\x28\x63\xba\x12\xa3\x14\x85"
|
||||
"\x57\x1e\x06\xc9\x7b\x21\xef\x76"
|
||||
"\x7f\x38\x7e\x8e\x29\xa4\x3e\x7e",
|
||||
.ilen = 32,
|
||||
.result = "\x8a\x1e\x11\xf0\x02\x6b\xe2\x19"
|
||||
"\xfc\x70\xc4\x6d\x8e\xb7\x99\xab"
|
||||
"\xc5\x4b\xa2\xac\xd3\xf3\x48\xff"
|
||||
"\x3b\xb5\xce\x53\xef\xde\xbb\x02"
|
||||
"\xa9\x86\x15\x6c\x13\xfe\xda\x0a"
|
||||
"\x22\xb8\x29\x3d\xd8\x39\x9a\x23",
|
||||
.rlen = 48,
|
||||
}, {
|
||||
.key = "\xf4\x6b\xc2\x75\x62\xfe\xb4\xe1"
|
||||
"\xa3\xf0\xff\xdd\x4e\x4b\x12\x75"
|
||||
"\x53\x14\x73\x66\x8d\x88\xf6\x80"
|
||||
"\xa0\x20\x35",
|
||||
.klen = 27,
|
||||
.iv = "\x26\xf2\x21\x8d\x50\x20\xda\xe2",
|
||||
.assoc = "\x5b\x9e\x13\x67\x02\x5e\xef\xc1"
|
||||
"\x6c\xf9\xd7\x1e\x52\x8f\x7a\x47"
|
||||
"\xe9\xd4\xcf\x20\x14\x6e\xf0\x2d"
|
||||
"\xd8\x9e\x2b\x56\x10\x23\x56\xe7",
|
||||
.alen = 32,
|
||||
.ilen = 0,
|
||||
.result = "\x36\xea\x7a\x70\x08\xdc\x6a\xbc"
|
||||
"\xad\x0c\x7a\x63\xf6\x61\xfd\x9b",
|
||||
.rlen = 16,
|
||||
}, {
|
||||
.key = "\x56\xdf\x5c\x8f\x26\x3f\x0e\x42"
|
||||
"\xef\x7a\xd3\xce\xfc\x84\x60\x62"
|
||||
"\xca\xb4\x40\xaf\x5f\xc9\xc9\x01"
|
||||
"\xd6\x3c\x8c",
|
||||
.klen = 27,
|
||||
.iv = "\x86\x84\xb6\xcd\xef\x09\x2e\x94",
|
||||
.assoc = "\x02\x65\x78\x3c\xe9\x21\x30\x91"
|
||||
"\xb1\xb9\xda\x76\x9a\x78\x6d\x95"
|
||||
"\xf2\x88\x32\xa3\xf2\x50\xcb\x4c"
|
||||
"\xe3\x00\x73\x69\x84\x69\x87\x79",
|
||||
.alen = 32,
|
||||
.input = "\x9f\xd2\x02\x4b\x52\x49\x31\x3c"
|
||||
"\x43\x69\x3a\x2d\x8e\x70\xad\x7e"
|
||||
"\xe0\xe5\x46\x09\x80\x89\x13\xb2"
|
||||
"\x8c\x8b\xd9\x3f\x86\xfb\xb5\x6b",
|
||||
.ilen = 32,
|
||||
.result = "\x39\xdf\x7c\x3c\x5a\x29\xb9\x62"
|
||||
"\x5d\x51\xc2\x16\xd8\xbd\x06\x9f"
|
||||
"\x9b\x6a\x09\x70\xc1\x51\x83\xc2"
|
||||
"\x66\x88\x1d\x4f\x9a\xda\xe0\x1e"
|
||||
"\xc7\x79\x11\x58\xe5\x6b\x20\x40"
|
||||
"\x7a\xea\x46\x42\x8b\xe4\x6f\xe1",
|
||||
.rlen = 48,
|
||||
}, {
|
||||
.key = "\xe0\x8d\x99\x71\x60\xd7\x97\x1a"
|
||||
"\xbd\x01\x99\xd5\x8a\xdf\x71\x3a"
|
||||
"\xd3\xdf\x24\x4b\x5e\x3d\x4b\x4e"
|
||||
"\x30\x7a\xb9\xd8\x53\x0a\x5e\x2b"
|
||||
"\x1e\x29\x91",
|
||||
.klen = 35,
|
||||
.iv = "\xad\x8e\xc1\x53\x0a\xcf\x2d\xbe",
|
||||
.assoc = "\x19\xb6\x1f\x57\xc4\xf3\xf0\x8b"
|
||||
"\x78\x2b\x94\x02\x29\x0f\x42\x27"
|
||||
"\x6b\x75\xcb\x98\x34\x08\x7e\x79"
|
||||
"\xe4\x3e\x49\x0d\x84\x8b\x22\x87",
|
||||
.alen = 32,
|
||||
.input = "\xe1\xd9\xd8\x13\xeb\x3a\x75\x3f"
|
||||
"\x9d\xbd\x5f\x66\xbe\xdc\xbb\x66"
|
||||
"\xbf\x17\x99\x62\x4a\x39\x27\x1f"
|
||||
"\x1d\xdc\x24\xae\x19\x2f\x98\x4c",
|
||||
.ilen = 32,
|
||||
.result = "\x19\xb8\x61\x33\x45\x2b\x43\x96"
|
||||
"\x6f\x51\xd0\x20\x30\x7d\x9b\xc6"
|
||||
"\x26\x3d\xf8\xc9\x65\x16\xa8\x9f"
|
||||
"\xf0\x62\x17\x34\xf2\x1e\x8d\x75"
|
||||
"\x4e\x13\xcc\xc0\xc3\x2a\x54\x2d",
|
||||
.rlen = 40,
|
||||
}, {
|
||||
.key = "\x7c\xc8\x18\x3b\x8d\x99\xe0\x7c"
|
||||
"\x45\x41\xb8\xbd\x5c\xa7\xc2\x32"
|
||||
"\x8a\xb8\x02\x59\xa4\xfe\xa9\x2c"
|
||||
"\x09\x75\x9a\x9b\x3c\x9b\x27\x39"
|
||||
"\xf9\xd9\x4e",
|
||||
.klen = 35,
|
||||
.iv = "\x63\xb5\x3d\x9d\x43\xf6\x1e\x50",
|
||||
.assoc = "\x57\xf5\x6b\x8b\x57\x5c\x3d\x3b"
|
||||
"\x13\x02\x01\x0c\x83\x4c\x96\x35"
|
||||
"\x8e\xd6\x39\xcf\x7d\x14\x9b\x94"
|
||||
"\xb0\x39\x36\xe6\x8f\x57\xe0\x13",
|
||||
.alen = 32,
|
||||
.input = "\x3b\x6c\x29\x36\xb6\xef\x07\xa6"
|
||||
"\x83\x72\x07\x4f\xcf\xfa\x66\x89"
|
||||
"\x5f\xca\xb1\xba\xd5\x8f\x2c\x27"
|
||||
"\x30\xdb\x75\x09\x93\xd4\x65\xe4",
|
||||
.ilen = 32,
|
||||
.result = "\xb0\x88\x5a\x33\xaa\xe5\xc7\x1d"
|
||||
"\x85\x23\xc7\xc6\x2f\xf4\x1e\x3d"
|
||||
"\xcc\x63\x44\x25\x07\x78\x4f\x9e"
|
||||
"\x96\xb8\x88\xeb\xbc\x48\x1f\x06"
|
||||
"\x39\xaf\x39\xac\xd8\x4a\x80\x39"
|
||||
"\x7b\x72\x8a\xf7",
|
||||
.rlen = 44,
|
||||
}, {
|
||||
.key = "\xab\xd0\xe9\x33\x07\x26\xe5\x83"
|
||||
"\x8c\x76\x95\xd4\xb6\xdc\xf3\x46"
|
||||
"\xf9\x8f\xad\xe3\x02\x13\x83\x77"
|
||||
"\x3f\xb0\xf1\xa1\xa1\x22\x0f\x2b"
|
||||
"\x24\xa7\x8b",
|
||||
.klen = 35,
|
||||
.iv = "\x07\xcb\xcc\x0e\xe6\x33\xbf\xf5",
|
||||
.assoc = "\xd4\xdb\x30\x1d\x03\xfe\xfd\x5f"
|
||||
"\x87\xd4\x8c\xb6\xb6\xf1\x7a\x5d"
|
||||
"\xab\x90\x65\x8d\x8e\xca\x4d\x4f"
|
||||
"\x16\x0c\x40\x90\x4b\xc7\x36\x73",
|
||||
.alen = 32,
|
||||
.input = "\xf5\xc6\x7d\x48\xc1\xb7\xe6\x92"
|
||||
"\x97\x5a\xca\xc4\xa9\x6d\xf9\x3d"
|
||||
"\x6c\xde\xbc\xf1\x90\xea\x6a\xb2"
|
||||
"\x35\x86\x36\xaf\x5c\xfe\x4b\x3a",
|
||||
.ilen = 32,
|
||||
.result = "\x83\x6f\x40\x87\x72\xcf\xc1\x13"
|
||||
"\xef\xbb\x80\x21\x04\x6c\x58\x09"
|
||||
"\x07\x1b\xfc\xdf\xc0\x3f\x5b\xc7"
|
||||
"\xe0\x79\xa8\x6e\x71\x7c\x3f\xcf"
|
||||
"\x5c\xda\xb2\x33\xe5\x13\xe2\x0d"
|
||||
"\x74\xd1\xef\xb5\x0f\x3a\xb5\xf8",
|
||||
.rlen = 48,
|
||||
},
|
||||
};
|
||||
|
||||
static struct aead_testvec aes_ccm_rfc4309_dec_tv_template[] = {
|
||||
{
|
||||
.key = "\xab\x2f\x8a\x74\xb7\x1c\xd2\xb1"
|
||||
"\xff\x80\x2e\x48\x7d\x82\xf8\xb9"
|
||||
"\xc6\xfb\x7d",
|
||||
.klen = 19,
|
||||
.iv = "\x80\x0d\x13\xab\xd8\xa6\xb2\xd8",
|
||||
.alen = 0,
|
||||
.input = "\xd5\xe8\x93\x9f\xc7\x89\x2e\x2b",
|
||||
.ilen = 8,
|
||||
.result = "\x00",
|
||||
.rlen = 0,
|
||||
.novrfy = 1,
|
||||
}, {
|
||||
.key = "\xab\x2f\x8a\x74\xb7\x1c\xd2\xb1"
|
||||
"\xff\x80\x2e\x48\x7d\x82\xf8\xb9"
|
||||
"\xaf\x94\x87",
|
||||
.klen = 19,
|
||||
.iv = "\x78\x35\x82\x81\x7f\x88\x94\x68",
|
||||
.alen = 0,
|
||||
.input = "\x41\x3c\xb8\x87\x73\xcb\xf3\xf3",
|
||||
.ilen = 8,
|
||||
.result = "\x00",
|
||||
.rlen = 0,
|
||||
}, {
|
||||
.key = "\x61\x0e\x8c\xae\xe3\x23\xb6\x38"
|
||||
"\x76\x1c\xf6\x3a\x67\xa3\x9c\xd8"
|
||||
"\xc6\xfb\x7d",
|
||||
.klen = 19,
|
||||
.iv = "\x80\x0d\x13\xab\xd8\xa6\xb2\xd8",
|
||||
.assoc = "\xf3\x94\x87\x78\x35\x82\x81\x7f"
|
||||
"\x88\x94\x68\xb1\x78\x6b\x2b\xd6"
|
||||
"\x04\x1f\x4e\xed\x78\xd5\x33\x66"
|
||||
"\xd8\x94\x99\x91\x81\x54\x62\x57",
|
||||
.alen = 32,
|
||||
.input = "\xf0\x7c\x29\x02\xae\x1c\x2f\x55"
|
||||
"\xd0\xd1\x3d\x1a\xa3\x6d\xe4\x0a"
|
||||
"\x86\xb0\x87\x6b\x62\x33\x8c\x34"
|
||||
"\xce\xab\x57\xcc\x79\x0b\xe0\x6f"
|
||||
"\x5c\x3e\x48\x1f\x6c\x46\xf7\x51"
|
||||
"\x8b\x84\x83\x2a\xc1\x05\xb8\xc5",
|
||||
.ilen = 48,
|
||||
.result = "\x50\x82\x3e\x07\xe2\x1e\xb6\xfb"
|
||||
"\x33\xe4\x73\xce\xd2\xfb\x95\x79"
|
||||
"\xe8\xb4\xb5\x77\x11\x10\x62\x6f"
|
||||
"\x6a\x82\xd1\x13\xec\xf5\xd0\x48",
|
||||
.rlen = 32,
|
||||
.novrfy = 1,
|
||||
}, {
|
||||
.key = "\x61\x0e\x8c\xae\xe3\x23\xb6\x38"
|
||||
"\x76\x1c\xf6\x3a\x67\xa3\x9c\xd8"
|
||||
"\x05\xe0\xc9",
|
||||
.klen = 19,
|
||||
.iv = "\x0f\xed\x34\xea\x97\xd4\x3b\xdf",
|
||||
.assoc = "\x49\x5c\x50\x1f\x1d\x94\xcc\x81"
|
||||
"\xba\xb7\xb6\x03\xaf\xa5\xc1\xa1"
|
||||
"\xd8\x5c\x42\x68\xe0\x6c\xda\x89"
|
||||
"\x05\xac\x56\xac\x1b\x2a\xd3\x86",
|
||||
.alen = 32,
|
||||
.input = "\x39\xbe\x7d\x15\x62\x77\xf3\x3c"
|
||||
"\xad\x83\x52\x6d\x71\x03\x25\x1c"
|
||||
"\xed\x81\x3a\x9a\x16\x7d\x19\x80"
|
||||
"\x72\x04\x72\xd0\xf6\xff\x05\x0f"
|
||||
"\xb7\x14\x30\x00\x32\x9e\xa0\xa6"
|
||||
"\x9e\x5a\x18\xa1\xb8\xfe\xdb\xd3",
|
||||
.ilen = 48,
|
||||
.result = "\x75\x05\xbe\xc2\xd9\x1e\xde\x60"
|
||||
"\x47\x3d\x8c\x7d\xbd\xb5\xd9\xb7"
|
||||
"\xf2\xae\x61\x05\x8f\x82\x24\x3f"
|
||||
"\x9c\x67\x91\xe1\x38\x4f\xe4\x0c",
|
||||
.rlen = 32,
|
||||
}, {
|
||||
.key = "\x39\xbb\xa7\xbe\x59\x97\x9e\x73"
|
||||
"\xa2\xbc\x6b\x98\xd7\x75\x7f\xe3"
|
||||
"\xa4\x48\x93\x39\x26\x71\x4a\xc6"
|
||||
"\xee\x49\x83",
|
||||
.klen = 27,
|
||||
.iv = "\xe9\xa9\xff\xe9\x57\xba\xfd\x9e",
|
||||
.assoc = "\x44\xa6\x2c\x05\xe9\xe1\x43\xb1"
|
||||
"\x58\x7c\xf2\x5c\x6d\x39\x0a\x64"
|
||||
"\xa4\xf0\x13\x05\xd1\x77\x99\x67"
|
||||
"\x11\xc4\xc6\xdb\x00\x56\x36\x61",
|
||||
.alen = 32,
|
||||
.input = "\x71\x99\xfa\xf4\x44\x12\x68\x9b",
|
||||
.ilen = 8,
|
||||
.result = "\x00",
|
||||
.rlen = 0,
|
||||
}, {
|
||||
.key = "\x58\x5d\xa0\x96\x65\x1a\x04\xd7"
|
||||
"\x96\xe5\xc5\x68\xaa\x95\x35\xe0"
|
||||
"\x29\xa0\xba\x9e\x48\x78\xd1\xba"
|
||||
"\xee\x49\x83",
|
||||
.klen = 27,
|
||||
.iv = "\xe9\xa9\xff\xe9\x57\xba\xfd\x9e",
|
||||
.assoc = "\x44\xa6\x2c\x05\xe9\xe1\x43\xb1"
|
||||
"\x58\x7c\xf2\x5c\x6d\x39\x0a\x64"
|
||||
"\xa4\xf0\x13\x05\xd1\x77\x99\x67"
|
||||
"\x11\xc4\xc6\xdb\x00\x56\x36\x61",
|
||||
.alen = 32,
|
||||
.input = "\xfb\xe5\x5d\x34\xbe\xe5\xe8\xe7"
|
||||
"\x5a\xef\x2f\xbf\x1f\x7f\xd4\xb2"
|
||||
"\x66\xca\x61\x1e\x96\x7a\x61\xb3"
|
||||
"\x1c\x16\x45\x52\xba\x04\x9c\x9f"
|
||||
"\xb1\xd2\x40\xbc\x52\x7c\x6f\xb1",
|
||||
.ilen = 40,
|
||||
.result = "\x85\x34\x66\x42\xc8\x92\x0f\x36"
|
||||
"\x58\xe0\x6b\x91\x3c\x98\x5c\xbb"
|
||||
"\x0a\x85\xcc\x02\xad\x7a\x96\xe9"
|
||||
"\x65\x43\xa4\xc3\x0f\xdc\x55\x81",
|
||||
.rlen = 32,
|
||||
}, {
|
||||
.key = "\x58\x5d\xa0\x96\x65\x1a\x04\xd7"
|
||||
"\x96\xe5\xc5\x68\xaa\x95\x35\xe0"
|
||||
"\x29\xa0\xba\x9e\x48\x78\xd1\xba"
|
||||
"\xd1\xfc\x57",
|
||||
.klen = 27,
|
||||
.iv = "\x9c\xfe\xb8\x9c\xad\x71\xaa\x1f",
|
||||
.assoc = "\x86\x67\xa5\xa9\x14\x5f\x0d\xc6"
|
||||
"\xff\x14\xc7\x44\xbf\x6c\x3a\xc3"
|
||||
"\xff\xb6\x81\xbd\xe2\xd5\x06\xc7"
|
||||
"\x3c\xa1\x52\x13\x03\x8a\x23\x3a",
|
||||
.alen = 32,
|
||||
.input = "\x3f\x66\xb0\x9d\xe5\x4b\x38\x00"
|
||||
"\xc6\x0e\x6e\xe5\xd6\x98\xa6\x37"
|
||||
"\x8c\x26\x33\xc6\xb2\xa2\x17\xfa"
|
||||
"\x64\x19\xc0\x30\xd7\xfc\x14\x6b"
|
||||
"\xe3\x33\xc2\x04\xb0\x37\xbe\x3f"
|
||||
"\xa9\xb4\x2d\x68\x03\xa3\x44\xef",
|
||||
.ilen = 48,
|
||||
.result = "\x02\x87\x4d\x28\x80\x6e\xb2\xed"
|
||||
"\x99\x2a\xa8\xca\x04\x25\x45\x90"
|
||||
"\x1d\xdd\x5a\xd9\xe4\xdb\x9c\x9c"
|
||||
"\x49\xe9\x01\xfe\xa7\x80\x6d\x6b",
|
||||
.rlen = 32,
|
||||
.novrfy = 1,
|
||||
}, {
|
||||
.key = "\xa4\x4b\x54\x29\x0a\xb8\x6d\x01"
|
||||
"\x5b\x80\x2a\xcf\x25\xc4\xb7\x5c"
|
||||
"\x20\x2c\xad\x30\xc2\x2b\x41\xfb"
|
||||
"\x0e\x85\xbc\x33\xad\x0f\x2b\xff"
|
||||
"\xee\x49\x83",
|
||||
.klen = 35,
|
||||
.iv = "\xe9\xa9\xff\xe9\x57\xba\xfd\x9e",
|
||||
.alen = 0,
|
||||
.input = "\x1f\xb8\x8f\xa3\xdd\x54\x00\xf2",
|
||||
.ilen = 8,
|
||||
.result = "\x00",
|
||||
.rlen = 0,
|
||||
}, {
|
||||
.key = "\x39\xbb\xa7\xbe\x59\x97\x9e\x73"
|
||||
"\xa2\xbc\x6b\x98\xd7\x75\x7f\xe3"
|
||||
"\xa4\x48\x93\x39\x26\x71\x4a\xc6"
|
||||
"\xae\x8f\x11\x4c\xc2\x9c\x4a\xbb"
|
||||
"\x85\x34\x66",
|
||||
.klen = 35,
|
||||
.iv = "\x42\xc8\x92\x0f\x36\x58\xe0\x6b",
|
||||
.alen = 0,
|
||||
.input = "\x48\x01\x5e\x02\x24\x04\x66\x47"
|
||||
"\xa1\xea\x6f\xaf\xe8\xfc\xfb\xdd"
|
||||
"\xa5\xa9\x87\x8d\x84\xee\x2e\x77"
|
||||
"\xbb\x86\xb9\xf5\x5c\x6c\xff\xf6"
|
||||
"\x72\xc3\x8e\xf7\x70\xb1\xb2\x07"
|
||||
"\xbc\xa8\xa3\xbd\x83\x7c\x1d\x2a",
|
||||
.ilen = 48,
|
||||
.result = "\xdc\x56\xf2\x71\xb0\xb1\xa0\x6c"
|
||||
"\xf0\x97\x3a\xfb\x6d\xe7\x32\x99"
|
||||
"\x3e\xaf\x70\x5e\xb2\x4d\xea\x39"
|
||||
"\x89\xd4\x75\x7a\x63\xb1\xda\x93",
|
||||
.rlen = 32,
|
||||
.novrfy = 1,
|
||||
}, {
|
||||
.key = "\x58\x5d\xa0\x96\x65\x1a\x04\xd7"
|
||||
"\x96\xe5\xc5\x68\xaa\x95\x35\xe0"
|
||||
"\x29\xa0\xba\x9e\x48\x78\xd1\xba"
|
||||
"\x0d\x1a\x53\x3b\xb5\xe3\xf8\x8b"
|
||||
"\xcf\x76\x3f",
|
||||
.klen = 35,
|
||||
.iv = "\xd9\x95\x75\x8f\x44\x89\x40\x7b",
|
||||
.assoc = "\x8f\x86\x6c\x4d\x1d\xc5\x39\x88"
|
||||
"\xc8\xf3\x5c\x52\x10\x63\x6f\x2b"
|
||||
"\x8a\x2a\xc5\x6f\x30\x23\x58\x7b"
|
||||
"\xfb\x36\x03\x11\xb4\xd9\xf2\xfe",
|
||||
.alen = 32,
|
||||
.input = "\x48\x58\xd6\xf3\xad\x63\x58\xbf"
|
||||
"\xae\xc7\x5e\xae\x83\x8f\x7b\xe4"
|
||||
"\x78\x5c\x4c\x67\x71\x89\x94\xbf"
|
||||
"\x47\xf1\x63\x7e\x1c\x59\xbd\xc5"
|
||||
"\x7f\x44\x0a\x0c\x01\x18\x07\x92"
|
||||
"\xe1\xd3\x51\xce\x32\x6d\x0c\x5b",
|
||||
.ilen = 48,
|
||||
.result = "\xc2\x54\xc8\xde\x78\x87\x77\x40"
|
||||
"\x49\x71\xe4\xb7\xe7\xcb\x76\x61"
|
||||
"\x0a\x41\xb9\xe9\xc0\x76\x54\xab"
|
||||
"\x04\x49\x3b\x19\x93\x57\x25\x5d",
|
||||
.rlen = 32,
|
||||
},
|
||||
};
|
||||
|
||||
/*
|
||||
* ANSI X9.31 Continuous Pseudo-Random Number Generator (AES mode)
|
||||
* test vectors, taken from Appendix B.2.9 and B.2.10:
|
||||
* http://csrc.nist.gov/groups/STM/cavp/documents/rng/RNGVS.pdf
|
||||
* Only AES-128 is supported at this time.
|
||||
*/
|
||||
#define ANSI_CPRNG_AES_TEST_VECTORS 6
|
||||
|
||||
static struct cprng_testvec ansi_cprng_aes_tv_template[] = {
|
||||
{
|
||||
.key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42"
|
||||
"\xed\x06\x1c\xab\xb8\xd4\x62\x02",
|
||||
.klen = 16,
|
||||
.dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62"
|
||||
"\xd7\x1d\x4a\xfb\xb0\xe9\x22\xf9",
|
||||
.dtlen = 16,
|
||||
.v = "\x80\x00\x00\x00\x00\x00\x00\x00"
|
||||
"\x00\x00\x00\x00\x00\x00\x00\x00",
|
||||
.vlen = 16,
|
||||
.result = "\x59\x53\x1e\xd1\x3b\xb0\xc0\x55"
|
||||
"\x84\x79\x66\x85\xc1\x2f\x76\x41",
|
||||
.rlen = 16,
|
||||
.loops = 1,
|
||||
}, {
|
||||
.key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42"
|
||||
"\xed\x06\x1c\xab\xb8\xd4\x62\x02",
|
||||
.klen = 16,
|
||||
.dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62"
|
||||
"\xd7\x1d\x4a\xfb\xb0\xe9\x22\xfa",
|
||||
.dtlen = 16,
|
||||
.v = "\xc0\x00\x00\x00\x00\x00\x00\x00"
|
||||
"\x00\x00\x00\x00\x00\x00\x00\x00",
|
||||
.vlen = 16,
|
||||
.result = "\x7c\x22\x2c\xf4\xca\x8f\xa2\x4c"
|
||||
"\x1c\x9c\xb6\x41\xa9\xf3\x22\x0d",
|
||||
.rlen = 16,
|
||||
.loops = 1,
|
||||
}, {
|
||||
.key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42"
|
||||
"\xed\x06\x1c\xab\xb8\xd4\x62\x02",
|
||||
.klen = 16,
|
||||
.dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62"
|
||||
"\xd7\x1d\x4a\xfb\xb0\xe9\x22\xfb",
|
||||
.dtlen = 16,
|
||||
.v = "\xe0\x00\x00\x00\x00\x00\x00\x00"
|
||||
"\x00\x00\x00\x00\x00\x00\x00\x00",
|
||||
.vlen = 16,
|
||||
.result = "\x8a\xaa\x00\x39\x66\x67\x5b\xe5"
|
||||
"\x29\x14\x28\x81\xa9\x4d\x4e\xc7",
|
||||
.rlen = 16,
|
||||
.loops = 1,
|
||||
}, {
|
||||
.key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42"
|
||||
"\xed\x06\x1c\xab\xb8\xd4\x62\x02",
|
||||
.klen = 16,
|
||||
.dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62"
|
||||
"\xd7\x1d\x4a\xfb\xb0\xe9\x22\xfc",
|
||||
.dtlen = 16,
|
||||
.v = "\xf0\x00\x00\x00\x00\x00\x00\x00"
|
||||
"\x00\x00\x00\x00\x00\x00\x00\x00",
|
||||
.vlen = 16,
|
||||
.result = "\x88\xdd\xa4\x56\x30\x24\x23\xe5"
|
||||
"\xf6\x9d\xa5\x7e\x7b\x95\xc7\x3a",
|
||||
.rlen = 16,
|
||||
.loops = 1,
|
||||
}, {
|
||||
.key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42"
|
||||
"\xed\x06\x1c\xab\xb8\xd4\x62\x02",
|
||||
.klen = 16,
|
||||
.dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62"
|
||||
"\xd7\x1d\x4a\xfb\xb0\xe9\x22\xfd",
|
||||
.dtlen = 16,
|
||||
.v = "\xf8\x00\x00\x00\x00\x00\x00\x00"
|
||||
"\x00\x00\x00\x00\x00\x00\x00\x00",
|
||||
.vlen = 16,
|
||||
.result = "\x05\x25\x92\x46\x61\x79\xd2\xcb"
|
||||
"\x78\xc4\x0b\x14\x0a\x5a\x9a\xc8",
|
||||
.rlen = 16,
|
||||
.loops = 1,
|
||||
}, { /* Monte Carlo Test */
|
||||
.key = "\x9f\x5b\x51\x20\x0b\xf3\x34\xb5"
|
||||
"\xd8\x2b\xe8\xc3\x72\x55\xc8\x48",
|
||||
.klen = 16,
|
||||
.dt = "\x63\x76\xbb\xe5\x29\x02\xba\x3b"
|
||||
"\x67\xc9\x25\xfa\x70\x1f\x11\xac",
|
||||
.dtlen = 16,
|
||||
.v = "\x57\x2c\x8e\x76\x87\x26\x47\x97"
|
||||
"\x7e\x74\xfb\xdd\xc4\x95\x01\xd1",
|
||||
.vlen = 16,
|
||||
.result = "\x48\xe9\xbd\x0d\x06\xee\x18\xfb"
|
||||
"\xe4\x57\x90\xd5\xc3\xfc\x9b\x73",
|
||||
.rlen = 16,
|
||||
.loops = 10000,
|
||||
},
|
||||
};
|
||||
|
||||
/* Cast5 test vectors from RFC 2144 */
|
||||
#define CAST5_ENC_TEST_VECTORS 3
|
||||
#define CAST5_DEC_TEST_VECTORS 3
|
||||
|
@ -165,15 +165,15 @@ static int zlib_compress_update(struct crypto_pcomp *tfm,
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
ret = req->avail_out - stream->avail_out;
|
||||
pr_debug("avail_in %u, avail_out %u (consumed %u, produced %u)\n",
|
||||
stream->avail_in, stream->avail_out,
|
||||
req->avail_in - stream->avail_in,
|
||||
req->avail_out - stream->avail_out);
|
||||
req->avail_in - stream->avail_in, ret);
|
||||
req->next_in = stream->next_in;
|
||||
req->avail_in = stream->avail_in;
|
||||
req->next_out = stream->next_out;
|
||||
req->avail_out = stream->avail_out;
|
||||
return 0;
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int zlib_compress_final(struct crypto_pcomp *tfm,
|
||||
@ -195,15 +195,15 @@ static int zlib_compress_final(struct crypto_pcomp *tfm,
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
ret = req->avail_out - stream->avail_out;
|
||||
pr_debug("avail_in %u, avail_out %u (consumed %u, produced %u)\n",
|
||||
stream->avail_in, stream->avail_out,
|
||||
req->avail_in - stream->avail_in,
|
||||
req->avail_out - stream->avail_out);
|
||||
req->avail_in - stream->avail_in, ret);
|
||||
req->next_in = stream->next_in;
|
||||
req->avail_in = stream->avail_in;
|
||||
req->next_out = stream->next_out;
|
||||
req->avail_out = stream->avail_out;
|
||||
return 0;
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
@ -280,15 +280,15 @@ static int zlib_decompress_update(struct crypto_pcomp *tfm,
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
ret = req->avail_out - stream->avail_out;
|
||||
pr_debug("avail_in %u, avail_out %u (consumed %u, produced %u)\n",
|
||||
stream->avail_in, stream->avail_out,
|
||||
req->avail_in - stream->avail_in,
|
||||
req->avail_out - stream->avail_out);
|
||||
req->avail_in - stream->avail_in, ret);
|
||||
req->next_in = stream->next_in;
|
||||
req->avail_in = stream->avail_in;
|
||||
req->next_out = stream->next_out;
|
||||
req->avail_out = stream->avail_out;
|
||||
return 0;
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int zlib_decompress_final(struct crypto_pcomp *tfm,
|
||||
@ -328,15 +328,15 @@ static int zlib_decompress_final(struct crypto_pcomp *tfm,
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
ret = req->avail_out - stream->avail_out;
|
||||
pr_debug("avail_in %u, avail_out %u (consumed %u, produced %u)\n",
|
||||
stream->avail_in, stream->avail_out,
|
||||
req->avail_in - stream->avail_in,
|
||||
req->avail_out - stream->avail_out);
|
||||
req->avail_in - stream->avail_in, ret);
|
||||
req->next_in = stream->next_in;
|
||||
req->avail_in = stream->avail_in;
|
||||
req->next_out = stream->next_out;
|
||||
req->avail_out = stream->avail_out;
|
||||
return 0;
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
|
@ -88,7 +88,7 @@ config HW_RANDOM_N2RNG
|
||||
|
||||
config HW_RANDOM_VIA
|
||||
tristate "VIA HW Random Number Generator support"
|
||||
depends on HW_RANDOM && X86_32
|
||||
depends on HW_RANDOM && X86
|
||||
default HW_RANDOM
|
||||
---help---
|
||||
This driver provides kernel-side support for the Random Number
|
||||
|
@ -89,7 +89,7 @@ static struct hwrng omap_rng_ops = {
|
||||
.data_read = omap_rng_data_read,
|
||||
};
|
||||
|
||||
static int __init omap_rng_probe(struct platform_device *pdev)
|
||||
static int __devinit omap_rng_probe(struct platform_device *pdev)
|
||||
{
|
||||
struct resource *res, *mem;
|
||||
int ret;
|
||||
|
@ -88,9 +88,9 @@ static struct hwrng timeriomem_rng_ops = {
|
||||
.priv = 0,
|
||||
};
|
||||
|
||||
static int __init timeriomem_rng_probe(struct platform_device *pdev)
|
||||
static int __devinit timeriomem_rng_probe(struct platform_device *pdev)
|
||||
{
|
||||
struct resource *res, *mem;
|
||||
struct resource *res;
|
||||
int ret;
|
||||
|
||||
res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
|
||||
@ -98,21 +98,12 @@ static int __init timeriomem_rng_probe(struct platform_device *pdev)
|
||||
if (!res)
|
||||
return -ENOENT;
|
||||
|
||||
mem = request_mem_region(res->start, res->end - res->start + 1,
|
||||
pdev->name);
|
||||
if (mem == NULL)
|
||||
return -EBUSY;
|
||||
|
||||
dev_set_drvdata(&pdev->dev, mem);
|
||||
|
||||
timeriomem_rng_data = pdev->dev.platform_data;
|
||||
|
||||
timeriomem_rng_data->address = ioremap(res->start,
|
||||
res->end - res->start + 1);
|
||||
if (!timeriomem_rng_data->address) {
|
||||
ret = -ENOMEM;
|
||||
goto err_ioremap;
|
||||
}
|
||||
if (!timeriomem_rng_data->address)
|
||||
return -EIO;
|
||||
|
||||
if (timeriomem_rng_data->period != 0
|
||||
&& usecs_to_jiffies(timeriomem_rng_data->period) > 0) {
|
||||
@ -125,7 +116,7 @@ static int __init timeriomem_rng_probe(struct platform_device *pdev)
|
||||
|
||||
ret = hwrng_register(&timeriomem_rng_ops);
|
||||
if (ret)
|
||||
goto err_register;
|
||||
goto failed;
|
||||
|
||||
dev_info(&pdev->dev, "32bits from 0x%p @ %dus\n",
|
||||
timeriomem_rng_data->address,
|
||||
@ -133,24 +124,19 @@ static int __init timeriomem_rng_probe(struct platform_device *pdev)
|
||||
|
||||
return 0;
|
||||
|
||||
err_register:
|
||||
failed:
|
||||
dev_err(&pdev->dev, "problem registering\n");
|
||||
iounmap(timeriomem_rng_data->address);
|
||||
err_ioremap:
|
||||
release_resource(mem);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int __devexit timeriomem_rng_remove(struct platform_device *pdev)
|
||||
{
|
||||
struct resource *mem = dev_get_drvdata(&pdev->dev);
|
||||
|
||||
del_timer_sync(&timeriomem_rng_timer);
|
||||
hwrng_unregister(&timeriomem_rng_ops);
|
||||
|
||||
iounmap(timeriomem_rng_data->address);
|
||||
release_resource(mem);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
@ -132,6 +132,19 @@ static int via_rng_init(struct hwrng *rng)
|
||||
struct cpuinfo_x86 *c = &cpu_data(0);
|
||||
u32 lo, hi, old_lo;
|
||||
|
||||
/* VIA Nano CPUs don't have the MSR_VIA_RNG anymore. The RNG
|
||||
* is always enabled if CPUID rng_en is set. There is no
|
||||
* RNG configuration like it used to be the case in this
|
||||
* register */
|
||||
if ((c->x86 == 6) && (c->x86_model >= 0x0f)) {
|
||||
if (!cpu_has_xstore_enabled) {
|
||||
printk(KERN_ERR PFX "can't enable hardware RNG "
|
||||
"if XSTORE is not enabled\n");
|
||||
return -ENODEV;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Control the RNG via MSR. Tread lightly and pay very close
|
||||
* close attention to values written, as the reserved fields
|
||||
* are documented to be "undefined and unpredictable"; but it
|
||||
@ -205,5 +218,5 @@ static void __exit mod_exit(void)
|
||||
module_init(mod_init);
|
||||
module_exit(mod_exit);
|
||||
|
||||
MODULE_DESCRIPTION("H/W RNG driver for VIA chipsets");
|
||||
MODULE_DESCRIPTION("H/W RNG driver for VIA CPU with PadLock");
|
||||
MODULE_LICENSE("GPL");
|
||||
|
@ -12,7 +12,7 @@ if CRYPTO_HW
|
||||
|
||||
config CRYPTO_DEV_PADLOCK
|
||||
tristate "Support for VIA PadLock ACE"
|
||||
depends on X86_32 && !UML
|
||||
depends on X86 && !UML
|
||||
select CRYPTO_ALGAPI
|
||||
help
|
||||
Some VIA processors come with an integrated crypto engine
|
||||
|
@ -2564,7 +2564,7 @@ static void hifn_tasklet_callback(unsigned long data)
|
||||
hifn_process_queue(dev);
|
||||
}
|
||||
|
||||
static int hifn_probe(struct pci_dev *pdev, const struct pci_device_id *id)
|
||||
static int __devinit hifn_probe(struct pci_dev *pdev, const struct pci_device_id *id)
|
||||
{
|
||||
int err, i;
|
||||
struct hifn_device *dev;
|
||||
@ -2696,7 +2696,7 @@ err_out_disable_pci_device:
|
||||
return err;
|
||||
}
|
||||
|
||||
static void hifn_remove(struct pci_dev *pdev)
|
||||
static void __devexit hifn_remove(struct pci_dev *pdev)
|
||||
{
|
||||
int i;
|
||||
struct hifn_device *dev;
|
||||
@ -2744,7 +2744,7 @@ static struct pci_driver hifn_pci_driver = {
|
||||
.remove = __devexit_p(hifn_remove),
|
||||
};
|
||||
|
||||
static int __devinit hifn_init(void)
|
||||
static int __init hifn_init(void)
|
||||
{
|
||||
unsigned int freq;
|
||||
int err;
|
||||
@ -2789,7 +2789,7 @@ static int __devinit hifn_init(void)
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void __devexit hifn_fini(void)
|
||||
static void __exit hifn_fini(void)
|
||||
{
|
||||
pci_unregister_driver(&hifn_pci_driver);
|
||||
|
||||
|
@ -154,7 +154,11 @@ static inline void padlock_reset_key(struct cword *cword)
|
||||
int cpu = raw_smp_processor_id();
|
||||
|
||||
if (cword != per_cpu(last_cword, cpu))
|
||||
#ifndef CONFIG_X86_64
|
||||
asm volatile ("pushfl; popfl");
|
||||
#else
|
||||
asm volatile ("pushfq; popfq");
|
||||
#endif
|
||||
}
|
||||
|
||||
static inline void padlock_store_cword(struct cword *cword)
|
||||
@ -208,10 +212,19 @@ static inline void padlock_xcrypt_ecb(const u8 *input, u8 *output, void *key,
|
||||
|
||||
asm volatile ("test $1, %%cl;"
|
||||
"je 1f;"
|
||||
#ifndef CONFIG_X86_64
|
||||
"lea -1(%%ecx), %%eax;"
|
||||
"mov $1, %%ecx;"
|
||||
#else
|
||||
"lea -1(%%rcx), %%rax;"
|
||||
"mov $1, %%rcx;"
|
||||
#endif
|
||||
".byte 0xf3,0x0f,0xa7,0xc8;" /* rep xcryptecb */
|
||||
#ifndef CONFIG_X86_64
|
||||
"mov %%eax, %%ecx;"
|
||||
#else
|
||||
"mov %%rax, %%rcx;"
|
||||
#endif
|
||||
"1:"
|
||||
".byte 0xf3,0x0f,0xa7,0xc8" /* rep xcryptecb */
|
||||
: "+S"(input), "+D"(output)
|
||||
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user