Files
linux/lib/crypto/arm64/aes-ce.S
Eric Biggers 4b90840320 lib/crypto: arm64/aes: Move assembly code for AES modes into libaes
To migrate the support for CBC-based MACs into libaes, the corresponding
arm64 assembly code needs to be moved there.  However, the arm64 AES
assembly code groups many AES modes together; individual modes aren't
easily separable.  (This isn't unique to arm64; other architectures
organize their AES modes similarly.)

Since the other AES modes will be migrated into the library eventually
too, just move the full assembly files for the AES modes into the
library.  (This is similar to what I already did for PowerPC and SPARC.)

Specifically: move the assembly files aes-ce.S, aes-modes.S, and
aes-neon.S and their build rules; declare the assembly functions in
<crypto/aes.h>; and export the assembly functions from libaes.

Note that the exports and public declarations of the assembly functions
are temporary.  They exist only to keep arch/arm64/crypto/ working until
the AES modes are fully moved into the library.

Reviewed-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20260218213501.136844-5-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
2026-03-09 13:27:20 -07:00

146 lines
3.5 KiB
ArmAsm

/* SPDX-License-Identifier: GPL-2.0-only */
/*
* AES cipher for ARMv8 with Crypto Extensions
*
* Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
*/
#include <linux/linkage.h>
#include <asm/assembler.h>
#define AES_FUNC_START(func) SYM_FUNC_START(ce_ ## func)
#define AES_FUNC_END(func) SYM_FUNC_END(ce_ ## func)
.arch armv8-a+crypto
xtsmask .req v16
cbciv .req v16
vctr .req v16
.macro xts_reload_mask, tmp
.endm
.macro xts_cts_skip_tw, reg, lbl
.endm
/* preload all round keys */
.macro load_round_keys, rk, nr, tmp
add \tmp, \rk, \nr, sxtw #4
sub \tmp, \tmp, #160
ld1 {v17.4s-v20.4s}, [\rk]
ld1 {v21.4s-v24.4s}, [\tmp], #64
ld1 {v25.4s-v28.4s}, [\tmp], #64
ld1 {v29.4s-v31.4s}, [\tmp]
.endm
/* prepare for encryption with key in rk[] */
.macro enc_prepare, rounds, rk, temp
load_round_keys \rk, \rounds, \temp
.endm
/* prepare for encryption (again) but with new key in rk[] */
.macro enc_switch_key, rounds, rk, temp
load_round_keys \rk, \rounds, \temp
.endm
/* prepare for decryption with key in rk[] */
.macro dec_prepare, rounds, rk, temp
load_round_keys \rk, \rounds, \temp
.endm
.macro do_enc_Nx, de, mc, k, i0, i1, i2, i3, i4
aes\de \i0\().16b, \k\().16b
aes\mc \i0\().16b, \i0\().16b
.ifnb \i1
aes\de \i1\().16b, \k\().16b
aes\mc \i1\().16b, \i1\().16b
.ifnb \i3
aes\de \i2\().16b, \k\().16b
aes\mc \i2\().16b, \i2\().16b
aes\de \i3\().16b, \k\().16b
aes\mc \i3\().16b, \i3\().16b
.ifnb \i4
aes\de \i4\().16b, \k\().16b
aes\mc \i4\().16b, \i4\().16b
.endif
.endif
.endif
.endm
/* up to 5 interleaved encryption rounds with the same round key */
.macro round_Nx, enc, k, i0, i1, i2, i3, i4
.ifc \enc, e
do_enc_Nx e, mc, \k, \i0, \i1, \i2, \i3, \i4
.else
do_enc_Nx d, imc, \k, \i0, \i1, \i2, \i3, \i4
.endif
.endm
/* up to 5 interleaved final rounds */
.macro fin_round_Nx, de, k, k2, i0, i1, i2, i3, i4
aes\de \i0\().16b, \k\().16b
.ifnb \i1
aes\de \i1\().16b, \k\().16b
.ifnb \i3
aes\de \i2\().16b, \k\().16b
aes\de \i3\().16b, \k\().16b
.ifnb \i4
aes\de \i4\().16b, \k\().16b
.endif
.endif
.endif
eor \i0\().16b, \i0\().16b, \k2\().16b
.ifnb \i1
eor \i1\().16b, \i1\().16b, \k2\().16b
.ifnb \i3
eor \i2\().16b, \i2\().16b, \k2\().16b
eor \i3\().16b, \i3\().16b, \k2\().16b
.ifnb \i4
eor \i4\().16b, \i4\().16b, \k2\().16b
.endif
.endif
.endif
.endm
/* up to 5 interleaved blocks */
.macro do_block_Nx, enc, rounds, i0, i1, i2, i3, i4
tbz \rounds, #2, .L\@ /* 128 bits */
round_Nx \enc, v17, \i0, \i1, \i2, \i3, \i4
round_Nx \enc, v18, \i0, \i1, \i2, \i3, \i4
tbz \rounds, #1, .L\@ /* 192 bits */
round_Nx \enc, v19, \i0, \i1, \i2, \i3, \i4
round_Nx \enc, v20, \i0, \i1, \i2, \i3, \i4
.L\@: .irp key, v21, v22, v23, v24, v25, v26, v27, v28, v29
round_Nx \enc, \key, \i0, \i1, \i2, \i3, \i4
.endr
fin_round_Nx \enc, v30, v31, \i0, \i1, \i2, \i3, \i4
.endm
.macro encrypt_block, in, rounds, t0, t1, t2
do_block_Nx e, \rounds, \in
.endm
.macro encrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2
do_block_Nx e, \rounds, \i0, \i1, \i2, \i3
.endm
.macro encrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2
do_block_Nx e, \rounds, \i0, \i1, \i2, \i3, \i4
.endm
.macro decrypt_block, in, rounds, t0, t1, t2
do_block_Nx d, \rounds, \in
.endm
.macro decrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2
do_block_Nx d, \rounds, \i0, \i1, \i2, \i3
.endm
.macro decrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2
do_block_Nx d, \rounds, \i0, \i1, \i2, \i3, \i4
.endm
#define MAX_STRIDE 5
#include "aes-modes.S"