Merge tag 'libcrypto-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiggers/linux

Pull crypto library updates from Eric Biggers:

 - Add a RISC-V optimized implementation of Poly1305. This code was
   written by Andy Polyakov and contributed by Zhihang Shao.

 - Migrate the MD5 code into lib/crypto/, and add KUnit tests for MD5.

   Yes, it's still the 90s, and several kernel subsystems are still
   using MD5 for legacy use cases. As long as that remains the case,
   it's helpful to clean it up in the same way as I've been doing for
   other algorithms.

   Later, I plan to convert most of these users of MD5 to use the new
   MD5 library API instead of the generic crypto API.

 - Simplify the organization of the ChaCha, Poly1305, BLAKE2s, and
   Curve25519 code.

   Consolidate these into one module per algorithm, and centralize the
   configuration and build process. This is the same reorganization that
   has already been successful for SHA-1 and SHA-2.

 - Remove the unused crypto_kpp API for Curve25519.

 - Migrate the BLAKE2s and Curve25519 self-tests to KUnit.

 - Always enable the architecture-optimized BLAKE2s code.

* tag 'libcrypto-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiggers/linux: (38 commits)
  crypto: md5 - Implement export_core() and import_core()
  wireguard: kconfig: simplify crypto kconfig selections
  lib/crypto: tests: Enable Curve25519 test when CRYPTO_SELFTESTS
  lib/crypto: curve25519: Consolidate into single module
  lib/crypto: curve25519: Move a couple functions out-of-line
  lib/crypto: tests: Add Curve25519 benchmark
  lib/crypto: tests: Migrate Curve25519 self-test to KUnit
  crypto: curve25519 - Remove unused kpp support
  crypto: testmgr - Remove curve25519 kpp tests
  crypto: x86/curve25519 - Remove unused kpp support
  crypto: powerpc/curve25519 - Remove unused kpp support
  crypto: arm/curve25519 - Remove unused kpp support
  crypto: hisilicon/hpre - Remove unused curve25519 kpp support
  lib/crypto: tests: Add KUnit tests for BLAKE2s
  lib/crypto: blake2s: Consolidate into single C translation unit
  lib/crypto: blake2s: Move generic code into blake2s.c
  lib/crypto: blake2s: Always enable arch-optimized BLAKE2s code
  lib/crypto: blake2s: Remove obsolete self-test
  lib/crypto: x86/blake2s: Reduce size of BLAKE2S_SIGMA2
  lib/crypto: chacha: Consolidate into single module
  ...
This commit is contained in:
Linus Torvalds
2025-09-29 15:48:56 -07:00
143 changed files with 3395 additions and 5041 deletions

View File

@@ -344,14 +344,6 @@ config CRYPTO_ECRDSA
One of the Russian cryptographic standard algorithms (called GOST
algorithms). Only signature verification is implemented.
config CRYPTO_CURVE25519
tristate "Curve25519"
select CRYPTO_KPP
select CRYPTO_LIB_CURVE25519_GENERIC
select CRYPTO_LIB_CURVE25519_INTERNAL
help
Curve25519 elliptic curve (RFC7748)
endmenu
menu "Block ciphers"
@@ -609,6 +601,7 @@ menu "Length-preserving ciphers and modes"
config CRYPTO_ADIANTUM
tristate "Adiantum"
select CRYPTO_CHACHA20
select CRYPTO_LIB_POLY1305
select CRYPTO_LIB_POLY1305_GENERIC
select CRYPTO_NHPOLY1305
select CRYPTO_MANAGER
@@ -647,7 +640,6 @@ config CRYPTO_ARC4
config CRYPTO_CHACHA20
tristate "ChaCha"
select CRYPTO_LIB_CHACHA
select CRYPTO_LIB_CHACHA_GENERIC
select CRYPTO_SKCIPHER
help
The ChaCha20, XChaCha20, and XChaCha12 stream cipher algorithms
@@ -770,6 +762,7 @@ config CRYPTO_XTS
config CRYPTO_NHPOLY1305
tristate
select CRYPTO_HASH
select CRYPTO_LIB_POLY1305
select CRYPTO_LIB_POLY1305_GENERIC
endmenu
@@ -938,8 +931,9 @@ config CRYPTO_MD4
config CRYPTO_MD5
tristate "MD5"
select CRYPTO_HASH
select CRYPTO_LIB_MD5
help
MD5 message digest algorithm (RFC1321)
MD5 message digest algorithm (RFC1321), including HMAC support.
config CRYPTO_MICHAEL_MIC
tristate "Michael MIC"

View File

@@ -182,7 +182,6 @@ obj-$(CONFIG_CRYPTO_USER_API_AEAD) += algif_aead.o
obj-$(CONFIG_CRYPTO_ZSTD) += zstd.o
obj-$(CONFIG_CRYPTO_ECC) += ecc.o
obj-$(CONFIG_CRYPTO_ESSIV) += essiv.o
obj-$(CONFIG_CRYPTO_CURVE25519) += curve25519-generic.o
ecdh_generic-y += ecdh.o
ecdh_generic-y += ecdh_helper.o

View File

@@ -47,7 +47,7 @@ static int chacha12_setkey(struct crypto_skcipher *tfm,
static int chacha_stream_xor(struct skcipher_request *req,
const struct chacha_ctx *ctx,
const u8 iv[CHACHA_IV_SIZE], bool arch)
const u8 iv[CHACHA_IV_SIZE])
{
struct skcipher_walk walk;
struct chacha_state state;
@@ -63,36 +63,23 @@ static int chacha_stream_xor(struct skcipher_request *req,
if (nbytes < walk.total)
nbytes = round_down(nbytes, CHACHA_BLOCK_SIZE);
if (arch)
chacha_crypt(&state, walk.dst.virt.addr,
walk.src.virt.addr, nbytes, ctx->nrounds);
else
chacha_crypt_generic(&state, walk.dst.virt.addr,
walk.src.virt.addr, nbytes,
ctx->nrounds);
chacha_crypt(&state, walk.dst.virt.addr, walk.src.virt.addr,
nbytes, ctx->nrounds);
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
}
return err;
}
static int crypto_chacha_crypt_generic(struct skcipher_request *req)
static int crypto_chacha_crypt(struct skcipher_request *req)
{
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
const struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
return chacha_stream_xor(req, ctx, req->iv, false);
return chacha_stream_xor(req, ctx, req->iv);
}
static int crypto_chacha_crypt_arch(struct skcipher_request *req)
{
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
const struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
return chacha_stream_xor(req, ctx, req->iv, true);
}
static int crypto_xchacha_crypt(struct skcipher_request *req, bool arch)
static int crypto_xchacha_crypt(struct skcipher_request *req)
{
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
const struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
@@ -102,10 +89,7 @@ static int crypto_xchacha_crypt(struct skcipher_request *req, bool arch)
/* Compute the subkey given the original key and first 128 nonce bits */
chacha_init(&state, ctx->key, req->iv);
if (arch)
hchacha_block(&state, subctx.key, ctx->nrounds);
else
hchacha_block_generic(&state, subctx.key, ctx->nrounds);
hchacha_block(&state, subctx.key, ctx->nrounds);
subctx.nrounds = ctx->nrounds;
/* Build the real IV */
@@ -113,71 +97,13 @@ static int crypto_xchacha_crypt(struct skcipher_request *req, bool arch)
memcpy(&real_iv[8], req->iv + 16, 8); /* remaining 64 nonce bits */
/* Generate the stream and XOR it with the data */
return chacha_stream_xor(req, &subctx, real_iv, arch);
}
static int crypto_xchacha_crypt_generic(struct skcipher_request *req)
{
return crypto_xchacha_crypt(req, false);
}
static int crypto_xchacha_crypt_arch(struct skcipher_request *req)
{
return crypto_xchacha_crypt(req, true);
return chacha_stream_xor(req, &subctx, real_iv);
}
static struct skcipher_alg algs[] = {
{
.base.cra_name = "chacha20",
.base.cra_driver_name = "chacha20-generic",
.base.cra_priority = 100,
.base.cra_blocksize = 1,
.base.cra_ctxsize = sizeof(struct chacha_ctx),
.base.cra_module = THIS_MODULE,
.min_keysize = CHACHA_KEY_SIZE,
.max_keysize = CHACHA_KEY_SIZE,
.ivsize = CHACHA_IV_SIZE,
.chunksize = CHACHA_BLOCK_SIZE,
.setkey = chacha20_setkey,
.encrypt = crypto_chacha_crypt_generic,
.decrypt = crypto_chacha_crypt_generic,
},
{
.base.cra_name = "xchacha20",
.base.cra_driver_name = "xchacha20-generic",
.base.cra_priority = 100,
.base.cra_blocksize = 1,
.base.cra_ctxsize = sizeof(struct chacha_ctx),
.base.cra_module = THIS_MODULE,
.min_keysize = CHACHA_KEY_SIZE,
.max_keysize = CHACHA_KEY_SIZE,
.ivsize = XCHACHA_IV_SIZE,
.chunksize = CHACHA_BLOCK_SIZE,
.setkey = chacha20_setkey,
.encrypt = crypto_xchacha_crypt_generic,
.decrypt = crypto_xchacha_crypt_generic,
},
{
.base.cra_name = "xchacha12",
.base.cra_driver_name = "xchacha12-generic",
.base.cra_priority = 100,
.base.cra_blocksize = 1,
.base.cra_ctxsize = sizeof(struct chacha_ctx),
.base.cra_module = THIS_MODULE,
.min_keysize = CHACHA_KEY_SIZE,
.max_keysize = CHACHA_KEY_SIZE,
.ivsize = XCHACHA_IV_SIZE,
.chunksize = CHACHA_BLOCK_SIZE,
.setkey = chacha12_setkey,
.encrypt = crypto_xchacha_crypt_generic,
.decrypt = crypto_xchacha_crypt_generic,
},
{
.base.cra_name = "chacha20",
.base.cra_driver_name = "chacha20-" __stringify(ARCH),
.base.cra_driver_name = "chacha20-lib",
.base.cra_priority = 300,
.base.cra_blocksize = 1,
.base.cra_ctxsize = sizeof(struct chacha_ctx),
@@ -188,12 +114,12 @@ static struct skcipher_alg algs[] = {
.ivsize = CHACHA_IV_SIZE,
.chunksize = CHACHA_BLOCK_SIZE,
.setkey = chacha20_setkey,
.encrypt = crypto_chacha_crypt_arch,
.decrypt = crypto_chacha_crypt_arch,
.encrypt = crypto_chacha_crypt,
.decrypt = crypto_chacha_crypt,
},
{
.base.cra_name = "xchacha20",
.base.cra_driver_name = "xchacha20-" __stringify(ARCH),
.base.cra_driver_name = "xchacha20-lib",
.base.cra_priority = 300,
.base.cra_blocksize = 1,
.base.cra_ctxsize = sizeof(struct chacha_ctx),
@@ -204,12 +130,12 @@ static struct skcipher_alg algs[] = {
.ivsize = XCHACHA_IV_SIZE,
.chunksize = CHACHA_BLOCK_SIZE,
.setkey = chacha20_setkey,
.encrypt = crypto_xchacha_crypt_arch,
.decrypt = crypto_xchacha_crypt_arch,
.encrypt = crypto_xchacha_crypt,
.decrypt = crypto_xchacha_crypt,
},
{
.base.cra_name = "xchacha12",
.base.cra_driver_name = "xchacha12-" __stringify(ARCH),
.base.cra_driver_name = "xchacha12-lib",
.base.cra_priority = 300,
.base.cra_blocksize = 1,
.base.cra_ctxsize = sizeof(struct chacha_ctx),
@@ -220,27 +146,19 @@ static struct skcipher_alg algs[] = {
.ivsize = XCHACHA_IV_SIZE,
.chunksize = CHACHA_BLOCK_SIZE,
.setkey = chacha12_setkey,
.encrypt = crypto_xchacha_crypt_arch,
.decrypt = crypto_xchacha_crypt_arch,
.encrypt = crypto_xchacha_crypt,
.decrypt = crypto_xchacha_crypt,
}
};
static unsigned int num_algs;
static int __init crypto_chacha_mod_init(void)
{
/* register the arch flavours only if they differ from generic */
num_algs = ARRAY_SIZE(algs);
BUILD_BUG_ON(ARRAY_SIZE(algs) % 2 != 0);
if (!chacha_is_arch_optimized())
num_algs /= 2;
return crypto_register_skciphers(algs, num_algs);
return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
}
static void __exit crypto_chacha_mod_fini(void)
{
crypto_unregister_skciphers(algs, num_algs);
crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
}
module_init(crypto_chacha_mod_init);
@@ -250,11 +168,8 @@ MODULE_LICENSE("GPL");
MODULE_AUTHOR("Martin Willi <martin@strongswan.org>");
MODULE_DESCRIPTION("Crypto API wrappers for the ChaCha20, XChaCha20, and XChaCha12 stream ciphers");
MODULE_ALIAS_CRYPTO("chacha20");
MODULE_ALIAS_CRYPTO("chacha20-generic");
MODULE_ALIAS_CRYPTO("chacha20-" __stringify(ARCH));
MODULE_ALIAS_CRYPTO("chacha20-lib");
MODULE_ALIAS_CRYPTO("xchacha20");
MODULE_ALIAS_CRYPTO("xchacha20-generic");
MODULE_ALIAS_CRYPTO("xchacha20-" __stringify(ARCH));
MODULE_ALIAS_CRYPTO("xchacha20-lib");
MODULE_ALIAS_CRYPTO("xchacha12");
MODULE_ALIAS_CRYPTO("xchacha12-generic");
MODULE_ALIAS_CRYPTO("xchacha12-" __stringify(ARCH));
MODULE_ALIAS_CRYPTO("xchacha12-lib");

View File

@@ -1,91 +0,0 @@
// SPDX-License-Identifier: GPL-2.0-or-later
#include <crypto/curve25519.h>
#include <crypto/internal/kpp.h>
#include <crypto/kpp.h>
#include <linux/module.h>
#include <linux/scatterlist.h>
static int curve25519_set_secret(struct crypto_kpp *tfm, const void *buf,
unsigned int len)
{
u8 *secret = kpp_tfm_ctx(tfm);
if (!len)
curve25519_generate_secret(secret);
else if (len == CURVE25519_KEY_SIZE &&
crypto_memneq(buf, curve25519_null_point, CURVE25519_KEY_SIZE))
memcpy(secret, buf, CURVE25519_KEY_SIZE);
else
return -EINVAL;
return 0;
}
static int curve25519_compute_value(struct kpp_request *req)
{
struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
const u8 *secret = kpp_tfm_ctx(tfm);
u8 public_key[CURVE25519_KEY_SIZE];
u8 buf[CURVE25519_KEY_SIZE];
int copied, nbytes;
u8 const *bp;
if (req->src) {
copied = sg_copy_to_buffer(req->src,
sg_nents_for_len(req->src,
CURVE25519_KEY_SIZE),
public_key, CURVE25519_KEY_SIZE);
if (copied != CURVE25519_KEY_SIZE)
return -EINVAL;
bp = public_key;
} else {
bp = curve25519_base_point;
}
curve25519_generic(buf, secret, bp);
/* might want less than we've got */
nbytes = min_t(size_t, CURVE25519_KEY_SIZE, req->dst_len);
copied = sg_copy_from_buffer(req->dst, sg_nents_for_len(req->dst,
nbytes),
buf, nbytes);
if (copied != nbytes)
return -EINVAL;
return 0;
}
static unsigned int curve25519_max_size(struct crypto_kpp *tfm)
{
return CURVE25519_KEY_SIZE;
}
static struct kpp_alg curve25519_alg = {
.base.cra_name = "curve25519",
.base.cra_driver_name = "curve25519-generic",
.base.cra_priority = 100,
.base.cra_module = THIS_MODULE,
.base.cra_ctxsize = CURVE25519_KEY_SIZE,
.set_secret = curve25519_set_secret,
.generate_public_key = curve25519_compute_value,
.compute_shared_secret = curve25519_compute_value,
.max_size = curve25519_max_size,
};
static int __init curve25519_init(void)
{
return crypto_register_kpp(&curve25519_alg);
}
static void __exit curve25519_exit(void)
{
crypto_unregister_kpp(&curve25519_alg);
}
module_init(curve25519_init);
module_exit(curve25519_exit);
MODULE_ALIAS_CRYPTO("curve25519");
MODULE_ALIAS_CRYPTO("curve25519-generic");
MODULE_DESCRIPTION("Curve25519 elliptic curve (RFC7748)");
MODULE_LICENSE("GPL");

View File

@@ -1,25 +1,62 @@
/*
* Cryptographic API.
*
* MD5 Message Digest Algorithm (RFC1321).
*
* Derived from cryptoapi implementation, originally based on the
* public domain implementation written by Colin Plumb in 1993.
*
* Copyright (c) Cryptoapi developers.
* Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
// SPDX-License-Identifier: GPL-2.0-or-later
/*
* Crypto API support for MD5 and HMAC-MD5
*
* Copyright 2025 Google LLC
*/
#include <crypto/internal/hash.h>
#include <crypto/md5.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/string.h>
/*
* Export and import functions. crypto_shash wants a particular format that
* matches that used by some legacy drivers. It currently is the same as the
* library MD5 context, except the value in bytecount must be block-aligned and
* the remainder must be stored in an extra u8 appended to the struct.
*/
#define MD5_SHASH_STATE_SIZE (sizeof(struct md5_ctx) + 1)
static_assert(sizeof(struct md5_ctx) == sizeof(struct md5_state));
static_assert(offsetof(struct md5_ctx, state) == offsetof(struct md5_state, hash));
static_assert(offsetof(struct md5_ctx, bytecount) == offsetof(struct md5_state, byte_count));
static_assert(offsetof(struct md5_ctx, buf) == offsetof(struct md5_state, block));
static int __crypto_md5_export(const struct md5_ctx *ctx0, void *out)
{
struct md5_ctx ctx = *ctx0;
unsigned int partial;
u8 *p = out;
partial = ctx.bytecount % MD5_BLOCK_SIZE;
ctx.bytecount -= partial;
memcpy(p, &ctx, sizeof(ctx));
p += sizeof(ctx);
*p = partial;
return 0;
}
static int __crypto_md5_import(struct md5_ctx *ctx, const void *in)
{
const u8 *p = in;
memcpy(ctx, p, sizeof(*ctx));
p += sizeof(*ctx);
ctx->bytecount += *p;
return 0;
}
static int __crypto_md5_export_core(const struct md5_ctx *ctx, void *out)
{
memcpy(out, ctx, offsetof(struct md5_ctx, buf));
return 0;
}
static int __crypto_md5_import_core(struct md5_ctx *ctx, const void *in)
{
memcpy(ctx, in, offsetof(struct md5_ctx, buf));
return 0;
}
const u8 md5_zero_message_hash[MD5_DIGEST_SIZE] = {
0xd4, 0x1d, 0x8c, 0xd9, 0x8f, 0x00, 0xb2, 0x04,
@@ -27,198 +64,173 @@ const u8 md5_zero_message_hash[MD5_DIGEST_SIZE] = {
};
EXPORT_SYMBOL_GPL(md5_zero_message_hash);
#define F1(x, y, z) (z ^ (x & (y ^ z)))
#define F2(x, y, z) F1(z, x, y)
#define F3(x, y, z) (x ^ y ^ z)
#define F4(x, y, z) (y ^ (x | ~z))
#define MD5_CTX(desc) ((struct md5_ctx *)shash_desc_ctx(desc))
#define MD5STEP(f, w, x, y, z, in, s) \
(w += f(x, y, z) + in, w = (w<<s | w>>(32-s)) + x)
static void md5_transform(__u32 *hash, __u32 const *in)
static int crypto_md5_init(struct shash_desc *desc)
{
u32 a, b, c, d;
a = hash[0];
b = hash[1];
c = hash[2];
d = hash[3];
MD5STEP(F1, a, b, c, d, in[0] + 0xd76aa478, 7);
MD5STEP(F1, d, a, b, c, in[1] + 0xe8c7b756, 12);
MD5STEP(F1, c, d, a, b, in[2] + 0x242070db, 17);
MD5STEP(F1, b, c, d, a, in[3] + 0xc1bdceee, 22);
MD5STEP(F1, a, b, c, d, in[4] + 0xf57c0faf, 7);
MD5STEP(F1, d, a, b, c, in[5] + 0x4787c62a, 12);
MD5STEP(F1, c, d, a, b, in[6] + 0xa8304613, 17);
MD5STEP(F1, b, c, d, a, in[7] + 0xfd469501, 22);
MD5STEP(F1, a, b, c, d, in[8] + 0x698098d8, 7);
MD5STEP(F1, d, a, b, c, in[9] + 0x8b44f7af, 12);
MD5STEP(F1, c, d, a, b, in[10] + 0xffff5bb1, 17);
MD5STEP(F1, b, c, d, a, in[11] + 0x895cd7be, 22);
MD5STEP(F1, a, b, c, d, in[12] + 0x6b901122, 7);
MD5STEP(F1, d, a, b, c, in[13] + 0xfd987193, 12);
MD5STEP(F1, c, d, a, b, in[14] + 0xa679438e, 17);
MD5STEP(F1, b, c, d, a, in[15] + 0x49b40821, 22);
MD5STEP(F2, a, b, c, d, in[1] + 0xf61e2562, 5);
MD5STEP(F2, d, a, b, c, in[6] + 0xc040b340, 9);
MD5STEP(F2, c, d, a, b, in[11] + 0x265e5a51, 14);
MD5STEP(F2, b, c, d, a, in[0] + 0xe9b6c7aa, 20);
MD5STEP(F2, a, b, c, d, in[5] + 0xd62f105d, 5);
MD5STEP(F2, d, a, b, c, in[10] + 0x02441453, 9);
MD5STEP(F2, c, d, a, b, in[15] + 0xd8a1e681, 14);
MD5STEP(F2, b, c, d, a, in[4] + 0xe7d3fbc8, 20);
MD5STEP(F2, a, b, c, d, in[9] + 0x21e1cde6, 5);
MD5STEP(F2, d, a, b, c, in[14] + 0xc33707d6, 9);
MD5STEP(F2, c, d, a, b, in[3] + 0xf4d50d87, 14);
MD5STEP(F2, b, c, d, a, in[8] + 0x455a14ed, 20);
MD5STEP(F2, a, b, c, d, in[13] + 0xa9e3e905, 5);
MD5STEP(F2, d, a, b, c, in[2] + 0xfcefa3f8, 9);
MD5STEP(F2, c, d, a, b, in[7] + 0x676f02d9, 14);
MD5STEP(F2, b, c, d, a, in[12] + 0x8d2a4c8a, 20);
MD5STEP(F3, a, b, c, d, in[5] + 0xfffa3942, 4);
MD5STEP(F3, d, a, b, c, in[8] + 0x8771f681, 11);
MD5STEP(F3, c, d, a, b, in[11] + 0x6d9d6122, 16);
MD5STEP(F3, b, c, d, a, in[14] + 0xfde5380c, 23);
MD5STEP(F3, a, b, c, d, in[1] + 0xa4beea44, 4);
MD5STEP(F3, d, a, b, c, in[4] + 0x4bdecfa9, 11);
MD5STEP(F3, c, d, a, b, in[7] + 0xf6bb4b60, 16);
MD5STEP(F3, b, c, d, a, in[10] + 0xbebfbc70, 23);
MD5STEP(F3, a, b, c, d, in[13] + 0x289b7ec6, 4);
MD5STEP(F3, d, a, b, c, in[0] + 0xeaa127fa, 11);
MD5STEP(F3, c, d, a, b, in[3] + 0xd4ef3085, 16);
MD5STEP(F3, b, c, d, a, in[6] + 0x04881d05, 23);
MD5STEP(F3, a, b, c, d, in[9] + 0xd9d4d039, 4);
MD5STEP(F3, d, a, b, c, in[12] + 0xe6db99e5, 11);
MD5STEP(F3, c, d, a, b, in[15] + 0x1fa27cf8, 16);
MD5STEP(F3, b, c, d, a, in[2] + 0xc4ac5665, 23);
MD5STEP(F4, a, b, c, d, in[0] + 0xf4292244, 6);
MD5STEP(F4, d, a, b, c, in[7] + 0x432aff97, 10);
MD5STEP(F4, c, d, a, b, in[14] + 0xab9423a7, 15);
MD5STEP(F4, b, c, d, a, in[5] + 0xfc93a039, 21);
MD5STEP(F4, a, b, c, d, in[12] + 0x655b59c3, 6);
MD5STEP(F4, d, a, b, c, in[3] + 0x8f0ccc92, 10);
MD5STEP(F4, c, d, a, b, in[10] + 0xffeff47d, 15);
MD5STEP(F4, b, c, d, a, in[1] + 0x85845dd1, 21);
MD5STEP(F4, a, b, c, d, in[8] + 0x6fa87e4f, 6);
MD5STEP(F4, d, a, b, c, in[15] + 0xfe2ce6e0, 10);
MD5STEP(F4, c, d, a, b, in[6] + 0xa3014314, 15);
MD5STEP(F4, b, c, d, a, in[13] + 0x4e0811a1, 21);
MD5STEP(F4, a, b, c, d, in[4] + 0xf7537e82, 6);
MD5STEP(F4, d, a, b, c, in[11] + 0xbd3af235, 10);
MD5STEP(F4, c, d, a, b, in[2] + 0x2ad7d2bb, 15);
MD5STEP(F4, b, c, d, a, in[9] + 0xeb86d391, 21);
hash[0] += a;
hash[1] += b;
hash[2] += c;
hash[3] += d;
}
static inline void md5_transform_helper(struct md5_state *ctx,
u32 block[MD5_BLOCK_WORDS])
{
le32_to_cpu_array(block, MD5_BLOCK_WORDS);
md5_transform(ctx->hash, block);
}
static int md5_init(struct shash_desc *desc)
{
struct md5_state *mctx = shash_desc_ctx(desc);
mctx->hash[0] = MD5_H0;
mctx->hash[1] = MD5_H1;
mctx->hash[2] = MD5_H2;
mctx->hash[3] = MD5_H3;
mctx->byte_count = 0;
md5_init(MD5_CTX(desc));
return 0;
}
static int md5_update(struct shash_desc *desc, const u8 *data, unsigned int len)
static int crypto_md5_update(struct shash_desc *desc,
const u8 *data, unsigned int len)
{
struct md5_state *mctx = shash_desc_ctx(desc);
u32 block[MD5_BLOCK_WORDS];
mctx->byte_count += len;
do {
memcpy(block, data, sizeof(block));
md5_transform_helper(mctx, block);
data += sizeof(block);
len -= sizeof(block);
} while (len >= sizeof(block));
memzero_explicit(block, sizeof(block));
mctx->byte_count -= len;
return len;
}
static int md5_finup(struct shash_desc *desc, const u8 *data, unsigned int len,
u8 *out)
{
struct md5_state *mctx = shash_desc_ctx(desc);
u32 block[MD5_BLOCK_WORDS];
unsigned int offset;
int padding;
char *p;
memcpy(block, data, len);
offset = len;
p = (char *)block + offset;
padding = 56 - (offset + 1);
*p++ = 0x80;
if (padding < 0) {
memset(p, 0x00, padding + sizeof (u64));
md5_transform_helper(mctx, block);
p = (char *)block;
padding = 56;
}
memset(p, 0, padding);
mctx->byte_count += len;
block[14] = mctx->byte_count << 3;
block[15] = mctx->byte_count >> 29;
le32_to_cpu_array(block, (sizeof(block) - sizeof(u64)) / sizeof(u32));
md5_transform(mctx->hash, block);
memzero_explicit(block, sizeof(block));
cpu_to_le32_array(mctx->hash, sizeof(mctx->hash) / sizeof(u32));
memcpy(out, mctx->hash, sizeof(mctx->hash));
md5_update(MD5_CTX(desc), data, len);
return 0;
}
static struct shash_alg alg = {
.digestsize = MD5_DIGEST_SIZE,
.init = md5_init,
.update = md5_update,
.finup = md5_finup,
.descsize = MD5_STATE_SIZE,
.base = {
.cra_name = "md5",
.cra_driver_name = "md5-generic",
.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY,
.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
static int crypto_md5_final(struct shash_desc *desc, u8 *out)
{
md5_final(MD5_CTX(desc), out);
return 0;
}
static int crypto_md5_digest(struct shash_desc *desc,
const u8 *data, unsigned int len, u8 *out)
{
md5(data, len, out);
return 0;
}
static int crypto_md5_export(struct shash_desc *desc, void *out)
{
return __crypto_md5_export(MD5_CTX(desc), out);
}
static int crypto_md5_import(struct shash_desc *desc, const void *in)
{
return __crypto_md5_import(MD5_CTX(desc), in);
}
static int crypto_md5_export_core(struct shash_desc *desc, void *out)
{
return __crypto_md5_export_core(MD5_CTX(desc), out);
}
static int crypto_md5_import_core(struct shash_desc *desc, const void *in)
{
return __crypto_md5_import_core(MD5_CTX(desc), in);
}
#define HMAC_MD5_KEY(tfm) ((struct hmac_md5_key *)crypto_shash_ctx(tfm))
#define HMAC_MD5_CTX(desc) ((struct hmac_md5_ctx *)shash_desc_ctx(desc))
static int crypto_hmac_md5_setkey(struct crypto_shash *tfm,
const u8 *raw_key, unsigned int keylen)
{
hmac_md5_preparekey(HMAC_MD5_KEY(tfm), raw_key, keylen);
return 0;
}
static int crypto_hmac_md5_init(struct shash_desc *desc)
{
hmac_md5_init(HMAC_MD5_CTX(desc), HMAC_MD5_KEY(desc->tfm));
return 0;
}
static int crypto_hmac_md5_update(struct shash_desc *desc,
const u8 *data, unsigned int len)
{
hmac_md5_update(HMAC_MD5_CTX(desc), data, len);
return 0;
}
static int crypto_hmac_md5_final(struct shash_desc *desc, u8 *out)
{
hmac_md5_final(HMAC_MD5_CTX(desc), out);
return 0;
}
static int crypto_hmac_md5_digest(struct shash_desc *desc,
const u8 *data, unsigned int len, u8 *out)
{
hmac_md5(HMAC_MD5_KEY(desc->tfm), data, len, out);
return 0;
}
static int crypto_hmac_md5_export(struct shash_desc *desc, void *out)
{
return __crypto_md5_export(&HMAC_MD5_CTX(desc)->hash_ctx, out);
}
static int crypto_hmac_md5_import(struct shash_desc *desc, const void *in)
{
struct hmac_md5_ctx *ctx = HMAC_MD5_CTX(desc);
ctx->ostate = HMAC_MD5_KEY(desc->tfm)->ostate;
return __crypto_md5_import(&ctx->hash_ctx, in);
}
static int crypto_hmac_md5_export_core(struct shash_desc *desc, void *out)
{
return __crypto_md5_export_core(&HMAC_MD5_CTX(desc)->hash_ctx, out);
}
static int crypto_hmac_md5_import_core(struct shash_desc *desc, const void *in)
{
struct hmac_md5_ctx *ctx = HMAC_MD5_CTX(desc);
ctx->ostate = HMAC_MD5_KEY(desc->tfm)->ostate;
return __crypto_md5_import_core(&ctx->hash_ctx, in);
}
static struct shash_alg algs[] = {
{
.base.cra_name = "md5",
.base.cra_driver_name = "md5-lib",
.base.cra_priority = 300,
.base.cra_blocksize = MD5_BLOCK_SIZE,
.base.cra_module = THIS_MODULE,
.digestsize = MD5_DIGEST_SIZE,
.init = crypto_md5_init,
.update = crypto_md5_update,
.final = crypto_md5_final,
.digest = crypto_md5_digest,
.export = crypto_md5_export,
.import = crypto_md5_import,
.export_core = crypto_md5_export_core,
.import_core = crypto_md5_import_core,
.descsize = sizeof(struct md5_ctx),
.statesize = MD5_SHASH_STATE_SIZE,
},
{
.base.cra_name = "hmac(md5)",
.base.cra_driver_name = "hmac-md5-lib",
.base.cra_priority = 300,
.base.cra_blocksize = MD5_BLOCK_SIZE,
.base.cra_ctxsize = sizeof(struct hmac_md5_key),
.base.cra_module = THIS_MODULE,
.digestsize = MD5_DIGEST_SIZE,
.setkey = crypto_hmac_md5_setkey,
.init = crypto_hmac_md5_init,
.update = crypto_hmac_md5_update,
.final = crypto_hmac_md5_final,
.digest = crypto_hmac_md5_digest,
.export = crypto_hmac_md5_export,
.import = crypto_hmac_md5_import,
.export_core = crypto_hmac_md5_export_core,
.import_core = crypto_hmac_md5_import_core,
.descsize = sizeof(struct hmac_md5_ctx),
.statesize = MD5_SHASH_STATE_SIZE,
},
};
static int __init md5_mod_init(void)
static int __init crypto_md5_mod_init(void)
{
return crypto_register_shash(&alg);
return crypto_register_shashes(algs, ARRAY_SIZE(algs));
}
module_init(crypto_md5_mod_init);
static void __exit md5_mod_fini(void)
static void __exit crypto_md5_mod_exit(void)
{
crypto_unregister_shash(&alg);
crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
}
module_init(md5_mod_init);
module_exit(md5_mod_fini);
module_exit(crypto_md5_mod_exit);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("MD5 Message Digest Algorithm");
MODULE_DESCRIPTION("Crypto API support for MD5 and HMAC-MD5");
MODULE_ALIAS_CRYPTO("md5");
MODULE_ALIAS_CRYPTO("md5-lib");
MODULE_ALIAS_CRYPTO("hmac(md5)");
MODULE_ALIAS_CRYPTO("hmac-md5-lib");

View File

@@ -4152,14 +4152,14 @@ static int alg_test_null(const struct alg_test_desc *desc,
static const struct alg_test_desc alg_test_descs[] = {
{
.alg = "adiantum(xchacha12,aes)",
.generic_driver = "adiantum(xchacha12-generic,aes-generic,nhpoly1305-generic)",
.generic_driver = "adiantum(xchacha12-lib,aes-generic,nhpoly1305-generic)",
.test = alg_test_skcipher,
.suite = {
.cipher = __VECS(adiantum_xchacha12_aes_tv_template)
},
}, {
.alg = "adiantum(xchacha20,aes)",
.generic_driver = "adiantum(xchacha20-generic,aes-generic,nhpoly1305-generic)",
.generic_driver = "adiantum(xchacha20-lib,aes-generic,nhpoly1305-generic)",
.test = alg_test_skcipher,
.suite = {
.cipher = __VECS(adiantum_xchacha20_aes_tv_template)
@@ -4178,6 +4178,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "authenc(hmac(md5),ecb(cipher_null))",
.generic_driver = "authenc(hmac-md5-lib,ecb-cipher_null)",
.test = alg_test_aead,
.suite = {
.aead = __VECS(hmac_md5_ecb_cipher_null_tv_template)
@@ -4484,6 +4485,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "chacha20",
.generic_driver = "chacha20-lib",
.test = alg_test_skcipher,
.suite = {
.cipher = __VECS(chacha20_tv_template)
@@ -4639,12 +4641,6 @@ static const struct alg_test_desc alg_test_descs[] = {
.suite = {
.cipher = __VECS(sm4_cts_tv_template)
}
}, {
.alg = "curve25519",
.test = alg_test_kpp,
.suite = {
.kpp = __VECS(curve25519_tv_template)
}
}, {
.alg = "deflate",
.test = alg_test_comp,
@@ -5064,6 +5060,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "hmac(md5)",
.generic_driver = "hmac-md5-lib",
.test = alg_test_hash,
.suite = {
.hash = __VECS(hmac_md5_tv_template)
@@ -5250,6 +5247,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "md5",
.generic_driver = "md5-lib",
.test = alg_test_hash,
.suite = {
.hash = __VECS(md5_tv_template)
@@ -5417,12 +5415,14 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "rfc7539(chacha20,poly1305)",
.generic_driver = "rfc7539(chacha20-lib,poly1305-generic)",
.test = alg_test_aead,
.suite = {
.aead = __VECS(rfc7539_tv_template)
}
}, {
.alg = "rfc7539esp(chacha20,poly1305)",
.generic_driver = "rfc7539esp(chacha20-lib,poly1305-generic)",
.test = alg_test_aead,
.suite = {
.aead = {
@@ -5588,12 +5588,14 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}, {
.alg = "xchacha12",
.generic_driver = "xchacha12-lib",
.test = alg_test_skcipher,
.suite = {
.cipher = __VECS(xchacha12_tv_template)
},
}, {
.alg = "xchacha20",
.generic_driver = "xchacha20-lib",
.test = alg_test_skcipher,
.suite = {
.cipher = __VECS(xchacha20_tv_template)

File diff suppressed because it is too large Load Diff