mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
synced 2026-02-13 21:22:56 -05:00
lib/crypto: aes: Drop 'volatile' from aes_sbox and aes_inv_sbox
The volatile keyword is no longer necessary or useful on aes_sbox and aes_inv_sbox, since the table prefetching is now done using a helper function that casts to volatile itself and also includes an optimization barrier. Since it prevents some compiler optimizations, remove it. Acked-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20260112192035.10427-36-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@kernel.org>
This commit is contained in:
@@ -11,11 +11,7 @@
|
||||
#include <linux/module.h>
|
||||
#include <linux/unaligned.h>
|
||||
|
||||
/*
|
||||
* Emit the sbox as volatile const to prevent the compiler from doing
|
||||
* constant folding on sbox references involving fixed indexes.
|
||||
*/
|
||||
static volatile const u8 ____cacheline_aligned aes_sbox[] = {
|
||||
static const u8 ____cacheline_aligned aes_sbox[] = {
|
||||
0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5,
|
||||
0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76,
|
||||
0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0,
|
||||
@@ -50,7 +46,7 @@ static volatile const u8 ____cacheline_aligned aes_sbox[] = {
|
||||
0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16,
|
||||
};
|
||||
|
||||
static volatile const u8 ____cacheline_aligned aes_inv_sbox[] = {
|
||||
static const u8 ____cacheline_aligned aes_inv_sbox[] = {
|
||||
0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38,
|
||||
0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb,
|
||||
0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87,
|
||||
@@ -430,7 +426,7 @@ static void __maybe_unused aes_decrypt_generic(const u32 inv_rndkeys[],
|
||||
w[3] = w3;
|
||||
} while (--n);
|
||||
|
||||
aes_prefetch((const void *)aes_inv_sbox, sizeof(aes_inv_sbox));
|
||||
aes_prefetch(aes_inv_sbox, sizeof(aes_inv_sbox));
|
||||
put_unaligned_le32(declast_quarterround(w, 0, *rkp++), &out[0]);
|
||||
put_unaligned_le32(declast_quarterround(w, 1, *rkp++), &out[4]);
|
||||
put_unaligned_le32(declast_quarterround(w, 2, *rkp++), &out[8]);
|
||||
|
||||
Reference in New Issue
Block a user