mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
synced 2026-02-07 18:17:37 -05:00
Merge branch 'tls-expand-tls_cipher_size_desc-to-simplify-getsockopt-setsockopt'
Sabrina Dubroca says: ==================== tls: expand tls_cipher_size_desc to simplify getsockopt/setsockopt Commit2d2c5ea242("net/tls: Describe ciphers sizes by const structs") introduced tls_cipher_size_desc to describe the size of the fields of the per-cipher crypto_info structs, and commitea7a9d88ba("net/tls: Use cipher sizes structs") used it, but only in tls_device.c and tls_device_fallback.c, and skipped converting similar code in tls_main.c and tls_sw.c. This series expands tls_cipher_size_desc (renamed to tls_cipher_desc to better fit this expansion) to fully describe a cipher: - offset of the fields within the per-cipher crypto_info - size of the full struct (for copies to/from userspace) - offload flag - algorithm name used by SW crypto With these additions, we can remove ~350L of switch (crypto_info->cipher_type) { ... } from tls_set_device_offload, tls_sw_fallback_init, do_tls_getsockopt_conf, do_tls_setsockopt_conf, tls_set_sw_offload (mainly do_tls_getsockopt_conf and tls_set_sw_offload). This series also adds the ARIA ciphers to the tls selftests, and some more getsockopt/setsockopt tests to cover more of the code changed by this series. ==================== Link: https://lore.kernel.org/r/cover.1692977948.git.sd@queasysnail.net Signed-off-by: Jakub Kicinski <kuba@kernel.org>
This commit is contained in:
@@ -51,16 +51,6 @@
|
||||
|
||||
struct tls_rec;
|
||||
|
||||
struct tls_cipher_size_desc {
|
||||
unsigned int iv;
|
||||
unsigned int key;
|
||||
unsigned int salt;
|
||||
unsigned int tag;
|
||||
unsigned int rec_seq;
|
||||
};
|
||||
|
||||
extern const struct tls_cipher_size_desc tls_cipher_size_desc[];
|
||||
|
||||
/* Maximum data size carried in a TLS record */
|
||||
#define TLS_MAX_PAYLOAD_SIZE ((size_t)1 << 14)
|
||||
|
||||
|
||||
@@ -51,6 +51,59 @@
|
||||
#define TLS_DEC_STATS(net, field) \
|
||||
SNMP_DEC_STATS((net)->mib.tls_statistics, field)
|
||||
|
||||
struct tls_cipher_desc {
|
||||
unsigned int nonce;
|
||||
unsigned int iv;
|
||||
unsigned int key;
|
||||
unsigned int salt;
|
||||
unsigned int tag;
|
||||
unsigned int rec_seq;
|
||||
unsigned int iv_offset;
|
||||
unsigned int key_offset;
|
||||
unsigned int salt_offset;
|
||||
unsigned int rec_seq_offset;
|
||||
char *cipher_name;
|
||||
bool offloadable;
|
||||
size_t crypto_info;
|
||||
};
|
||||
|
||||
#define TLS_CIPHER_MIN TLS_CIPHER_AES_GCM_128
|
||||
#define TLS_CIPHER_MAX TLS_CIPHER_ARIA_GCM_256
|
||||
extern const struct tls_cipher_desc tls_cipher_desc[TLS_CIPHER_MAX + 1 - TLS_CIPHER_MIN];
|
||||
|
||||
static inline const struct tls_cipher_desc *get_cipher_desc(u16 cipher_type)
|
||||
{
|
||||
if (cipher_type < TLS_CIPHER_MIN || cipher_type > TLS_CIPHER_MAX)
|
||||
return NULL;
|
||||
|
||||
return &tls_cipher_desc[cipher_type - TLS_CIPHER_MIN];
|
||||
}
|
||||
|
||||
static inline char *crypto_info_iv(struct tls_crypto_info *crypto_info,
|
||||
const struct tls_cipher_desc *cipher_desc)
|
||||
{
|
||||
return (char *)crypto_info + cipher_desc->iv_offset;
|
||||
}
|
||||
|
||||
static inline char *crypto_info_key(struct tls_crypto_info *crypto_info,
|
||||
const struct tls_cipher_desc *cipher_desc)
|
||||
{
|
||||
return (char *)crypto_info + cipher_desc->key_offset;
|
||||
}
|
||||
|
||||
static inline char *crypto_info_salt(struct tls_crypto_info *crypto_info,
|
||||
const struct tls_cipher_desc *cipher_desc)
|
||||
{
|
||||
return (char *)crypto_info + cipher_desc->salt_offset;
|
||||
}
|
||||
|
||||
static inline char *crypto_info_rec_seq(struct tls_crypto_info *crypto_info,
|
||||
const struct tls_cipher_desc *cipher_desc)
|
||||
{
|
||||
return (char *)crypto_info + cipher_desc->rec_seq_offset;
|
||||
}
|
||||
|
||||
|
||||
/* TLS records are maintained in 'struct tls_rec'. It stores the memory pages
|
||||
* allocated or mapped for each TLS record. After encryption, the records are
|
||||
* stores in a linked list.
|
||||
|
||||
@@ -884,7 +884,7 @@ static int
|
||||
tls_device_reencrypt(struct sock *sk, struct tls_context *tls_ctx)
|
||||
{
|
||||
struct tls_sw_context_rx *sw_ctx = tls_sw_ctx_rx(tls_ctx);
|
||||
const struct tls_cipher_size_desc *cipher_sz;
|
||||
const struct tls_cipher_desc *cipher_desc;
|
||||
int err, offset, copy, data_len, pos;
|
||||
struct sk_buff *skb, *skb_iter;
|
||||
struct scatterlist sg[1];
|
||||
@@ -898,10 +898,10 @@ tls_device_reencrypt(struct sock *sk, struct tls_context *tls_ctx)
|
||||
default:
|
||||
return -EINVAL;
|
||||
}
|
||||
cipher_sz = &tls_cipher_size_desc[tls_ctx->crypto_recv.info.cipher_type];
|
||||
cipher_desc = get_cipher_desc(tls_ctx->crypto_recv.info.cipher_type);
|
||||
|
||||
rxm = strp_msg(tls_strp_msg(sw_ctx));
|
||||
orig_buf = kmalloc(rxm->full_len + TLS_HEADER_SIZE + cipher_sz->iv,
|
||||
orig_buf = kmalloc(rxm->full_len + TLS_HEADER_SIZE + cipher_desc->iv,
|
||||
sk->sk_allocation);
|
||||
if (!orig_buf)
|
||||
return -ENOMEM;
|
||||
@@ -917,8 +917,8 @@ tls_device_reencrypt(struct sock *sk, struct tls_context *tls_ctx)
|
||||
|
||||
sg_init_table(sg, 1);
|
||||
sg_set_buf(&sg[0], buf,
|
||||
rxm->full_len + TLS_HEADER_SIZE + cipher_sz->iv);
|
||||
err = skb_copy_bits(skb, offset, buf, TLS_HEADER_SIZE + cipher_sz->iv);
|
||||
rxm->full_len + TLS_HEADER_SIZE + cipher_desc->iv);
|
||||
err = skb_copy_bits(skb, offset, buf, TLS_HEADER_SIZE + cipher_desc->iv);
|
||||
if (err)
|
||||
goto free_buf;
|
||||
|
||||
@@ -929,7 +929,7 @@ tls_device_reencrypt(struct sock *sk, struct tls_context *tls_ctx)
|
||||
else
|
||||
err = 0;
|
||||
|
||||
data_len = rxm->full_len - cipher_sz->tag;
|
||||
data_len = rxm->full_len - cipher_desc->tag;
|
||||
|
||||
if (skb_pagelen(skb) > offset) {
|
||||
copy = min_t(int, skb_pagelen(skb) - offset, data_len);
|
||||
@@ -1046,7 +1046,7 @@ int tls_set_device_offload(struct sock *sk, struct tls_context *ctx)
|
||||
{
|
||||
struct tls_context *tls_ctx = tls_get_ctx(sk);
|
||||
struct tls_prot_info *prot = &tls_ctx->prot_info;
|
||||
const struct tls_cipher_size_desc *cipher_sz;
|
||||
const struct tls_cipher_desc *cipher_desc;
|
||||
struct tls_record_info *start_marker_record;
|
||||
struct tls_offload_context_tx *offload_ctx;
|
||||
struct tls_crypto_info *crypto_info;
|
||||
@@ -1079,46 +1079,32 @@ int tls_set_device_offload(struct sock *sk, struct tls_context *ctx)
|
||||
goto release_netdev;
|
||||
}
|
||||
|
||||
switch (crypto_info->cipher_type) {
|
||||
case TLS_CIPHER_AES_GCM_128:
|
||||
iv = ((struct tls12_crypto_info_aes_gcm_128 *)crypto_info)->iv;
|
||||
rec_seq =
|
||||
((struct tls12_crypto_info_aes_gcm_128 *)crypto_info)->rec_seq;
|
||||
break;
|
||||
case TLS_CIPHER_AES_GCM_256:
|
||||
iv = ((struct tls12_crypto_info_aes_gcm_256 *)crypto_info)->iv;
|
||||
rec_seq =
|
||||
((struct tls12_crypto_info_aes_gcm_256 *)crypto_info)->rec_seq;
|
||||
break;
|
||||
default:
|
||||
cipher_desc = get_cipher_desc(crypto_info->cipher_type);
|
||||
if (!cipher_desc || !cipher_desc->offloadable) {
|
||||
rc = -EINVAL;
|
||||
goto release_netdev;
|
||||
}
|
||||
cipher_sz = &tls_cipher_size_desc[crypto_info->cipher_type];
|
||||
|
||||
/* Sanity-check the rec_seq_size for stack allocations */
|
||||
if (cipher_sz->rec_seq > TLS_MAX_REC_SEQ_SIZE) {
|
||||
rc = -EINVAL;
|
||||
goto release_netdev;
|
||||
}
|
||||
iv = crypto_info_iv(crypto_info, cipher_desc);
|
||||
rec_seq = crypto_info_rec_seq(crypto_info, cipher_desc);
|
||||
|
||||
prot->version = crypto_info->version;
|
||||
prot->cipher_type = crypto_info->cipher_type;
|
||||
prot->prepend_size = TLS_HEADER_SIZE + cipher_sz->iv;
|
||||
prot->tag_size = cipher_sz->tag;
|
||||
prot->prepend_size = TLS_HEADER_SIZE + cipher_desc->iv;
|
||||
prot->tag_size = cipher_desc->tag;
|
||||
prot->overhead_size = prot->prepend_size + prot->tag_size;
|
||||
prot->iv_size = cipher_sz->iv;
|
||||
prot->salt_size = cipher_sz->salt;
|
||||
ctx->tx.iv = kmalloc(cipher_sz->iv + cipher_sz->salt, GFP_KERNEL);
|
||||
prot->iv_size = cipher_desc->iv;
|
||||
prot->salt_size = cipher_desc->salt;
|
||||
ctx->tx.iv = kmalloc(cipher_desc->iv + cipher_desc->salt, GFP_KERNEL);
|
||||
if (!ctx->tx.iv) {
|
||||
rc = -ENOMEM;
|
||||
goto release_netdev;
|
||||
}
|
||||
|
||||
memcpy(ctx->tx.iv + cipher_sz->salt, iv, cipher_sz->iv);
|
||||
memcpy(ctx->tx.iv + cipher_desc->salt, iv, cipher_desc->iv);
|
||||
|
||||
prot->rec_seq_size = cipher_sz->rec_seq;
|
||||
ctx->tx.rec_seq = kmemdup(rec_seq, cipher_sz->rec_seq, GFP_KERNEL);
|
||||
prot->rec_seq_size = cipher_desc->rec_seq;
|
||||
ctx->tx.rec_seq = kmemdup(rec_seq, cipher_desc->rec_seq, GFP_KERNEL);
|
||||
if (!ctx->tx.rec_seq) {
|
||||
rc = -ENOMEM;
|
||||
goto free_iv;
|
||||
|
||||
@@ -55,7 +55,7 @@ static int tls_enc_record(struct aead_request *aead_req,
|
||||
struct tls_prot_info *prot)
|
||||
{
|
||||
unsigned char buf[TLS_HEADER_SIZE + MAX_IV_SIZE];
|
||||
const struct tls_cipher_size_desc *cipher_sz;
|
||||
const struct tls_cipher_desc *cipher_desc;
|
||||
struct scatterlist sg_in[3];
|
||||
struct scatterlist sg_out[3];
|
||||
unsigned int buf_size;
|
||||
@@ -69,9 +69,9 @@ static int tls_enc_record(struct aead_request *aead_req,
|
||||
default:
|
||||
return -EINVAL;
|
||||
}
|
||||
cipher_sz = &tls_cipher_size_desc[prot->cipher_type];
|
||||
cipher_desc = get_cipher_desc(prot->cipher_type);
|
||||
|
||||
buf_size = TLS_HEADER_SIZE + cipher_sz->iv;
|
||||
buf_size = TLS_HEADER_SIZE + cipher_desc->iv;
|
||||
len = min_t(int, *in_len, buf_size);
|
||||
|
||||
scatterwalk_copychunks(buf, in, len, 0);
|
||||
@@ -85,11 +85,11 @@ static int tls_enc_record(struct aead_request *aead_req,
|
||||
scatterwalk_pagedone(out, 1, 1);
|
||||
|
||||
len = buf[4] | (buf[3] << 8);
|
||||
len -= cipher_sz->iv;
|
||||
len -= cipher_desc->iv;
|
||||
|
||||
tls_make_aad(aad, len - cipher_sz->tag, (char *)&rcd_sn, buf[0], prot);
|
||||
tls_make_aad(aad, len - cipher_desc->tag, (char *)&rcd_sn, buf[0], prot);
|
||||
|
||||
memcpy(iv + cipher_sz->salt, buf + TLS_HEADER_SIZE, cipher_sz->iv);
|
||||
memcpy(iv + cipher_desc->salt, buf + TLS_HEADER_SIZE, cipher_desc->iv);
|
||||
|
||||
sg_init_table(sg_in, ARRAY_SIZE(sg_in));
|
||||
sg_init_table(sg_out, ARRAY_SIZE(sg_out));
|
||||
@@ -100,7 +100,7 @@ static int tls_enc_record(struct aead_request *aead_req,
|
||||
|
||||
*in_len -= len;
|
||||
if (*in_len < 0) {
|
||||
*in_len += cipher_sz->tag;
|
||||
*in_len += cipher_desc->tag;
|
||||
/* the input buffer doesn't contain the entire record.
|
||||
* trim len accordingly. The resulting authentication tag
|
||||
* will contain garbage, but we don't care, so we won't
|
||||
@@ -121,7 +121,7 @@ static int tls_enc_record(struct aead_request *aead_req,
|
||||
scatterwalk_pagedone(out, 1, 1);
|
||||
}
|
||||
|
||||
len -= cipher_sz->tag;
|
||||
len -= cipher_desc->tag;
|
||||
aead_request_set_crypt(aead_req, sg_in, sg_out, len, iv);
|
||||
|
||||
rc = crypto_aead_encrypt(aead_req);
|
||||
@@ -309,14 +309,14 @@ static void fill_sg_out(struct scatterlist sg_out[3], void *buf,
|
||||
int sync_size,
|
||||
void *dummy_buf)
|
||||
{
|
||||
const struct tls_cipher_size_desc *cipher_sz =
|
||||
&tls_cipher_size_desc[tls_ctx->crypto_send.info.cipher_type];
|
||||
const struct tls_cipher_desc *cipher_desc =
|
||||
get_cipher_desc(tls_ctx->crypto_send.info.cipher_type);
|
||||
|
||||
sg_set_buf(&sg_out[0], dummy_buf, sync_size);
|
||||
sg_set_buf(&sg_out[1], nskb->data + tcp_payload_offset, payload_len);
|
||||
/* Add room for authentication tag produced by crypto */
|
||||
dummy_buf += sync_size;
|
||||
sg_set_buf(&sg_out[2], dummy_buf, cipher_sz->tag);
|
||||
sg_set_buf(&sg_out[2], dummy_buf, cipher_desc->tag);
|
||||
}
|
||||
|
||||
static struct sk_buff *tls_enc_skb(struct tls_context *tls_ctx,
|
||||
@@ -328,7 +328,7 @@ static struct sk_buff *tls_enc_skb(struct tls_context *tls_ctx,
|
||||
struct tls_offload_context_tx *ctx = tls_offload_ctx_tx(tls_ctx);
|
||||
int tcp_payload_offset = skb_tcp_all_headers(skb);
|
||||
int payload_len = skb->len - tcp_payload_offset;
|
||||
const struct tls_cipher_size_desc *cipher_sz;
|
||||
const struct tls_cipher_desc *cipher_desc;
|
||||
void *buf, *iv, *aad, *dummy_buf, *salt;
|
||||
struct aead_request *aead_req;
|
||||
struct sk_buff *nskb = NULL;
|
||||
@@ -348,16 +348,16 @@ static struct sk_buff *tls_enc_skb(struct tls_context *tls_ctx,
|
||||
default:
|
||||
goto free_req;
|
||||
}
|
||||
cipher_sz = &tls_cipher_size_desc[tls_ctx->crypto_send.info.cipher_type];
|
||||
buf_len = cipher_sz->salt + cipher_sz->iv + TLS_AAD_SPACE_SIZE +
|
||||
sync_size + cipher_sz->tag;
|
||||
cipher_desc = get_cipher_desc(tls_ctx->crypto_send.info.cipher_type);
|
||||
buf_len = cipher_desc->salt + cipher_desc->iv + TLS_AAD_SPACE_SIZE +
|
||||
sync_size + cipher_desc->tag;
|
||||
buf = kmalloc(buf_len, GFP_ATOMIC);
|
||||
if (!buf)
|
||||
goto free_req;
|
||||
|
||||
iv = buf;
|
||||
memcpy(iv, salt, cipher_sz->salt);
|
||||
aad = buf + cipher_sz->salt + cipher_sz->iv;
|
||||
memcpy(iv, salt, cipher_desc->salt);
|
||||
aad = buf + cipher_desc->salt + cipher_desc->iv;
|
||||
dummy_buf = aad + TLS_AAD_SPACE_SIZE;
|
||||
|
||||
nskb = alloc_skb(skb_headroom(skb) + skb->len, GFP_ATOMIC);
|
||||
@@ -471,12 +471,15 @@ int tls_sw_fallback_init(struct sock *sk,
|
||||
struct tls_offload_context_tx *offload_ctx,
|
||||
struct tls_crypto_info *crypto_info)
|
||||
{
|
||||
const struct tls_cipher_size_desc *cipher_sz;
|
||||
const u8 *key;
|
||||
const struct tls_cipher_desc *cipher_desc;
|
||||
int rc;
|
||||
|
||||
cipher_desc = get_cipher_desc(crypto_info->cipher_type);
|
||||
if (!cipher_desc || !cipher_desc->offloadable)
|
||||
return -EINVAL;
|
||||
|
||||
offload_ctx->aead_send =
|
||||
crypto_alloc_aead("gcm(aes)", 0, CRYPTO_ALG_ASYNC);
|
||||
crypto_alloc_aead(cipher_desc->cipher_name, 0, CRYPTO_ALG_ASYNC);
|
||||
if (IS_ERR(offload_ctx->aead_send)) {
|
||||
rc = PTR_ERR(offload_ctx->aead_send);
|
||||
pr_err_ratelimited("crypto_alloc_aead failed rc=%d\n", rc);
|
||||
@@ -484,24 +487,13 @@ int tls_sw_fallback_init(struct sock *sk,
|
||||
goto err_out;
|
||||
}
|
||||
|
||||
switch (crypto_info->cipher_type) {
|
||||
case TLS_CIPHER_AES_GCM_128:
|
||||
key = ((struct tls12_crypto_info_aes_gcm_128 *)crypto_info)->key;
|
||||
break;
|
||||
case TLS_CIPHER_AES_GCM_256:
|
||||
key = ((struct tls12_crypto_info_aes_gcm_256 *)crypto_info)->key;
|
||||
break;
|
||||
default:
|
||||
rc = -EINVAL;
|
||||
goto free_aead;
|
||||
}
|
||||
cipher_sz = &tls_cipher_size_desc[crypto_info->cipher_type];
|
||||
|
||||
rc = crypto_aead_setkey(offload_ctx->aead_send, key, cipher_sz->key);
|
||||
rc = crypto_aead_setkey(offload_ctx->aead_send,
|
||||
crypto_info_key(crypto_info, cipher_desc),
|
||||
cipher_desc->key);
|
||||
if (rc)
|
||||
goto free_aead;
|
||||
|
||||
rc = crypto_aead_setauthsize(offload_ctx->aead_send, cipher_sz->tag);
|
||||
rc = crypto_aead_setauthsize(offload_ctx->aead_send, cipher_desc->tag);
|
||||
if (rc)
|
||||
goto free_aead;
|
||||
|
||||
|
||||
@@ -58,23 +58,66 @@ enum {
|
||||
TLS_NUM_PROTS,
|
||||
};
|
||||
|
||||
#define CIPHER_SIZE_DESC(cipher) [cipher] = { \
|
||||
#define CHECK_CIPHER_DESC(cipher,ci) \
|
||||
static_assert(cipher ## _IV_SIZE <= MAX_IV_SIZE); \
|
||||
static_assert(cipher ## _REC_SEQ_SIZE <= TLS_MAX_REC_SEQ_SIZE); \
|
||||
static_assert(cipher ## _TAG_SIZE == TLS_TAG_SIZE); \
|
||||
static_assert(sizeof_field(struct ci, iv) == cipher ## _IV_SIZE); \
|
||||
static_assert(sizeof_field(struct ci, key) == cipher ## _KEY_SIZE); \
|
||||
static_assert(sizeof_field(struct ci, salt) == cipher ## _SALT_SIZE); \
|
||||
static_assert(sizeof_field(struct ci, rec_seq) == cipher ## _REC_SEQ_SIZE);
|
||||
|
||||
#define __CIPHER_DESC(ci) \
|
||||
.iv_offset = offsetof(struct ci, iv), \
|
||||
.key_offset = offsetof(struct ci, key), \
|
||||
.salt_offset = offsetof(struct ci, salt), \
|
||||
.rec_seq_offset = offsetof(struct ci, rec_seq), \
|
||||
.crypto_info = sizeof(struct ci)
|
||||
|
||||
#define CIPHER_DESC(cipher,ci,algname,_offloadable) [cipher - TLS_CIPHER_MIN] = { \
|
||||
.nonce = cipher ## _IV_SIZE, \
|
||||
.iv = cipher ## _IV_SIZE, \
|
||||
.key = cipher ## _KEY_SIZE, \
|
||||
.salt = cipher ## _SALT_SIZE, \
|
||||
.tag = cipher ## _TAG_SIZE, \
|
||||
.rec_seq = cipher ## _REC_SEQ_SIZE, \
|
||||
.cipher_name = algname, \
|
||||
.offloadable = _offloadable, \
|
||||
__CIPHER_DESC(ci), \
|
||||
}
|
||||
|
||||
const struct tls_cipher_size_desc tls_cipher_size_desc[] = {
|
||||
CIPHER_SIZE_DESC(TLS_CIPHER_AES_GCM_128),
|
||||
CIPHER_SIZE_DESC(TLS_CIPHER_AES_GCM_256),
|
||||
CIPHER_SIZE_DESC(TLS_CIPHER_AES_CCM_128),
|
||||
CIPHER_SIZE_DESC(TLS_CIPHER_CHACHA20_POLY1305),
|
||||
CIPHER_SIZE_DESC(TLS_CIPHER_SM4_GCM),
|
||||
CIPHER_SIZE_DESC(TLS_CIPHER_SM4_CCM),
|
||||
#define CIPHER_DESC_NONCE0(cipher,ci,algname,_offloadable) [cipher - TLS_CIPHER_MIN] = { \
|
||||
.nonce = 0, \
|
||||
.iv = cipher ## _IV_SIZE, \
|
||||
.key = cipher ## _KEY_SIZE, \
|
||||
.salt = cipher ## _SALT_SIZE, \
|
||||
.tag = cipher ## _TAG_SIZE, \
|
||||
.rec_seq = cipher ## _REC_SEQ_SIZE, \
|
||||
.cipher_name = algname, \
|
||||
.offloadable = _offloadable, \
|
||||
__CIPHER_DESC(ci), \
|
||||
}
|
||||
|
||||
const struct tls_cipher_desc tls_cipher_desc[TLS_CIPHER_MAX + 1 - TLS_CIPHER_MIN] = {
|
||||
CIPHER_DESC(TLS_CIPHER_AES_GCM_128, tls12_crypto_info_aes_gcm_128, "gcm(aes)", true),
|
||||
CIPHER_DESC(TLS_CIPHER_AES_GCM_256, tls12_crypto_info_aes_gcm_256, "gcm(aes)", true),
|
||||
CIPHER_DESC(TLS_CIPHER_AES_CCM_128, tls12_crypto_info_aes_ccm_128, "ccm(aes)", false),
|
||||
CIPHER_DESC_NONCE0(TLS_CIPHER_CHACHA20_POLY1305, tls12_crypto_info_chacha20_poly1305, "rfc7539(chacha20,poly1305)", false),
|
||||
CIPHER_DESC(TLS_CIPHER_SM4_GCM, tls12_crypto_info_sm4_gcm, "gcm(sm4)", false),
|
||||
CIPHER_DESC(TLS_CIPHER_SM4_CCM, tls12_crypto_info_sm4_ccm, "ccm(sm4)", false),
|
||||
CIPHER_DESC(TLS_CIPHER_ARIA_GCM_128, tls12_crypto_info_aria_gcm_128, "gcm(aria)", false),
|
||||
CIPHER_DESC(TLS_CIPHER_ARIA_GCM_256, tls12_crypto_info_aria_gcm_256, "gcm(aria)", false),
|
||||
};
|
||||
|
||||
CHECK_CIPHER_DESC(TLS_CIPHER_AES_GCM_128, tls12_crypto_info_aes_gcm_128);
|
||||
CHECK_CIPHER_DESC(TLS_CIPHER_AES_GCM_256, tls12_crypto_info_aes_gcm_256);
|
||||
CHECK_CIPHER_DESC(TLS_CIPHER_AES_CCM_128, tls12_crypto_info_aes_ccm_128);
|
||||
CHECK_CIPHER_DESC(TLS_CIPHER_CHACHA20_POLY1305, tls12_crypto_info_chacha20_poly1305);
|
||||
CHECK_CIPHER_DESC(TLS_CIPHER_SM4_GCM, tls12_crypto_info_sm4_gcm);
|
||||
CHECK_CIPHER_DESC(TLS_CIPHER_SM4_CCM, tls12_crypto_info_sm4_ccm);
|
||||
CHECK_CIPHER_DESC(TLS_CIPHER_ARIA_GCM_128, tls12_crypto_info_aria_gcm_128);
|
||||
CHECK_CIPHER_DESC(TLS_CIPHER_ARIA_GCM_256, tls12_crypto_info_aria_gcm_256);
|
||||
|
||||
static const struct proto *saved_tcpv6_prot;
|
||||
static DEFINE_MUTEX(tcpv6_prot_mutex);
|
||||
static const struct proto *saved_tcpv4_prot;
|
||||
@@ -392,6 +435,7 @@ static int do_tls_getsockopt_conf(struct sock *sk, char __user *optval,
|
||||
int __user *optlen, int tx)
|
||||
{
|
||||
int rc = 0;
|
||||
const struct tls_cipher_desc *cipher_desc;
|
||||
struct tls_context *ctx = tls_get_ctx(sk);
|
||||
struct tls_crypto_info *crypto_info;
|
||||
struct cipher_context *cctx;
|
||||
@@ -430,173 +474,20 @@ static int do_tls_getsockopt_conf(struct sock *sk, char __user *optval,
|
||||
goto out;
|
||||
}
|
||||
|
||||
switch (crypto_info->cipher_type) {
|
||||
case TLS_CIPHER_AES_GCM_128: {
|
||||
struct tls12_crypto_info_aes_gcm_128 *
|
||||
crypto_info_aes_gcm_128 =
|
||||
container_of(crypto_info,
|
||||
struct tls12_crypto_info_aes_gcm_128,
|
||||
info);
|
||||
|
||||
if (len != sizeof(*crypto_info_aes_gcm_128)) {
|
||||
rc = -EINVAL;
|
||||
goto out;
|
||||
}
|
||||
memcpy(crypto_info_aes_gcm_128->iv,
|
||||
cctx->iv + TLS_CIPHER_AES_GCM_128_SALT_SIZE,
|
||||
TLS_CIPHER_AES_GCM_128_IV_SIZE);
|
||||
memcpy(crypto_info_aes_gcm_128->rec_seq, cctx->rec_seq,
|
||||
TLS_CIPHER_AES_GCM_128_REC_SEQ_SIZE);
|
||||
if (copy_to_user(optval,
|
||||
crypto_info_aes_gcm_128,
|
||||
sizeof(*crypto_info_aes_gcm_128)))
|
||||
rc = -EFAULT;
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_AES_GCM_256: {
|
||||
struct tls12_crypto_info_aes_gcm_256 *
|
||||
crypto_info_aes_gcm_256 =
|
||||
container_of(crypto_info,
|
||||
struct tls12_crypto_info_aes_gcm_256,
|
||||
info);
|
||||
|
||||
if (len != sizeof(*crypto_info_aes_gcm_256)) {
|
||||
rc = -EINVAL;
|
||||
goto out;
|
||||
}
|
||||
memcpy(crypto_info_aes_gcm_256->iv,
|
||||
cctx->iv + TLS_CIPHER_AES_GCM_256_SALT_SIZE,
|
||||
TLS_CIPHER_AES_GCM_256_IV_SIZE);
|
||||
memcpy(crypto_info_aes_gcm_256->rec_seq, cctx->rec_seq,
|
||||
TLS_CIPHER_AES_GCM_256_REC_SEQ_SIZE);
|
||||
if (copy_to_user(optval,
|
||||
crypto_info_aes_gcm_256,
|
||||
sizeof(*crypto_info_aes_gcm_256)))
|
||||
rc = -EFAULT;
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_AES_CCM_128: {
|
||||
struct tls12_crypto_info_aes_ccm_128 *aes_ccm_128 =
|
||||
container_of(crypto_info,
|
||||
struct tls12_crypto_info_aes_ccm_128, info);
|
||||
|
||||
if (len != sizeof(*aes_ccm_128)) {
|
||||
rc = -EINVAL;
|
||||
goto out;
|
||||
}
|
||||
memcpy(aes_ccm_128->iv,
|
||||
cctx->iv + TLS_CIPHER_AES_CCM_128_SALT_SIZE,
|
||||
TLS_CIPHER_AES_CCM_128_IV_SIZE);
|
||||
memcpy(aes_ccm_128->rec_seq, cctx->rec_seq,
|
||||
TLS_CIPHER_AES_CCM_128_REC_SEQ_SIZE);
|
||||
if (copy_to_user(optval, aes_ccm_128, sizeof(*aes_ccm_128)))
|
||||
rc = -EFAULT;
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_CHACHA20_POLY1305: {
|
||||
struct tls12_crypto_info_chacha20_poly1305 *chacha20_poly1305 =
|
||||
container_of(crypto_info,
|
||||
struct tls12_crypto_info_chacha20_poly1305,
|
||||
info);
|
||||
|
||||
if (len != sizeof(*chacha20_poly1305)) {
|
||||
rc = -EINVAL;
|
||||
goto out;
|
||||
}
|
||||
memcpy(chacha20_poly1305->iv,
|
||||
cctx->iv + TLS_CIPHER_CHACHA20_POLY1305_SALT_SIZE,
|
||||
TLS_CIPHER_CHACHA20_POLY1305_IV_SIZE);
|
||||
memcpy(chacha20_poly1305->rec_seq, cctx->rec_seq,
|
||||
TLS_CIPHER_CHACHA20_POLY1305_REC_SEQ_SIZE);
|
||||
if (copy_to_user(optval, chacha20_poly1305,
|
||||
sizeof(*chacha20_poly1305)))
|
||||
rc = -EFAULT;
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_SM4_GCM: {
|
||||
struct tls12_crypto_info_sm4_gcm *sm4_gcm_info =
|
||||
container_of(crypto_info,
|
||||
struct tls12_crypto_info_sm4_gcm, info);
|
||||
|
||||
if (len != sizeof(*sm4_gcm_info)) {
|
||||
rc = -EINVAL;
|
||||
goto out;
|
||||
}
|
||||
memcpy(sm4_gcm_info->iv,
|
||||
cctx->iv + TLS_CIPHER_SM4_GCM_SALT_SIZE,
|
||||
TLS_CIPHER_SM4_GCM_IV_SIZE);
|
||||
memcpy(sm4_gcm_info->rec_seq, cctx->rec_seq,
|
||||
TLS_CIPHER_SM4_GCM_REC_SEQ_SIZE);
|
||||
if (copy_to_user(optval, sm4_gcm_info, sizeof(*sm4_gcm_info)))
|
||||
rc = -EFAULT;
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_SM4_CCM: {
|
||||
struct tls12_crypto_info_sm4_ccm *sm4_ccm_info =
|
||||
container_of(crypto_info,
|
||||
struct tls12_crypto_info_sm4_ccm, info);
|
||||
|
||||
if (len != sizeof(*sm4_ccm_info)) {
|
||||
rc = -EINVAL;
|
||||
goto out;
|
||||
}
|
||||
memcpy(sm4_ccm_info->iv,
|
||||
cctx->iv + TLS_CIPHER_SM4_CCM_SALT_SIZE,
|
||||
TLS_CIPHER_SM4_CCM_IV_SIZE);
|
||||
memcpy(sm4_ccm_info->rec_seq, cctx->rec_seq,
|
||||
TLS_CIPHER_SM4_CCM_REC_SEQ_SIZE);
|
||||
if (copy_to_user(optval, sm4_ccm_info, sizeof(*sm4_ccm_info)))
|
||||
rc = -EFAULT;
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_ARIA_GCM_128: {
|
||||
struct tls12_crypto_info_aria_gcm_128 *
|
||||
crypto_info_aria_gcm_128 =
|
||||
container_of(crypto_info,
|
||||
struct tls12_crypto_info_aria_gcm_128,
|
||||
info);
|
||||
|
||||
if (len != sizeof(*crypto_info_aria_gcm_128)) {
|
||||
rc = -EINVAL;
|
||||
goto out;
|
||||
}
|
||||
memcpy(crypto_info_aria_gcm_128->iv,
|
||||
cctx->iv + TLS_CIPHER_ARIA_GCM_128_SALT_SIZE,
|
||||
TLS_CIPHER_ARIA_GCM_128_IV_SIZE);
|
||||
memcpy(crypto_info_aria_gcm_128->rec_seq, cctx->rec_seq,
|
||||
TLS_CIPHER_ARIA_GCM_128_REC_SEQ_SIZE);
|
||||
if (copy_to_user(optval,
|
||||
crypto_info_aria_gcm_128,
|
||||
sizeof(*crypto_info_aria_gcm_128)))
|
||||
rc = -EFAULT;
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_ARIA_GCM_256: {
|
||||
struct tls12_crypto_info_aria_gcm_256 *
|
||||
crypto_info_aria_gcm_256 =
|
||||
container_of(crypto_info,
|
||||
struct tls12_crypto_info_aria_gcm_256,
|
||||
info);
|
||||
|
||||
if (len != sizeof(*crypto_info_aria_gcm_256)) {
|
||||
rc = -EINVAL;
|
||||
goto out;
|
||||
}
|
||||
memcpy(crypto_info_aria_gcm_256->iv,
|
||||
cctx->iv + TLS_CIPHER_ARIA_GCM_256_SALT_SIZE,
|
||||
TLS_CIPHER_ARIA_GCM_256_IV_SIZE);
|
||||
memcpy(crypto_info_aria_gcm_256->rec_seq, cctx->rec_seq,
|
||||
TLS_CIPHER_ARIA_GCM_256_REC_SEQ_SIZE);
|
||||
if (copy_to_user(optval,
|
||||
crypto_info_aria_gcm_256,
|
||||
sizeof(*crypto_info_aria_gcm_256)))
|
||||
rc = -EFAULT;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
cipher_desc = get_cipher_desc(crypto_info->cipher_type);
|
||||
if (!cipher_desc || len != cipher_desc->crypto_info) {
|
||||
rc = -EINVAL;
|
||||
goto out;
|
||||
}
|
||||
|
||||
memcpy(crypto_info_iv(crypto_info, cipher_desc),
|
||||
cctx->iv + cipher_desc->salt, cipher_desc->iv);
|
||||
memcpy(crypto_info_rec_seq(crypto_info, cipher_desc),
|
||||
cctx->rec_seq, cipher_desc->rec_seq);
|
||||
|
||||
if (copy_to_user(optval, crypto_info, cipher_desc->crypto_info))
|
||||
rc = -EFAULT;
|
||||
|
||||
out:
|
||||
return rc;
|
||||
}
|
||||
@@ -696,7 +587,7 @@ static int do_tls_setsockopt_conf(struct sock *sk, sockptr_t optval,
|
||||
struct tls_crypto_info *crypto_info;
|
||||
struct tls_crypto_info *alt_crypto_info;
|
||||
struct tls_context *ctx = tls_get_ctx(sk);
|
||||
size_t optsize;
|
||||
const struct tls_cipher_desc *cipher_desc;
|
||||
int rc = 0;
|
||||
int conf;
|
||||
|
||||
@@ -737,46 +628,23 @@ static int do_tls_setsockopt_conf(struct sock *sk, sockptr_t optval,
|
||||
}
|
||||
}
|
||||
|
||||
switch (crypto_info->cipher_type) {
|
||||
case TLS_CIPHER_AES_GCM_128:
|
||||
optsize = sizeof(struct tls12_crypto_info_aes_gcm_128);
|
||||
break;
|
||||
case TLS_CIPHER_AES_GCM_256: {
|
||||
optsize = sizeof(struct tls12_crypto_info_aes_gcm_256);
|
||||
break;
|
||||
cipher_desc = get_cipher_desc(crypto_info->cipher_type);
|
||||
if (!cipher_desc) {
|
||||
rc = -EINVAL;
|
||||
goto err_crypto_info;
|
||||
}
|
||||
case TLS_CIPHER_AES_CCM_128:
|
||||
optsize = sizeof(struct tls12_crypto_info_aes_ccm_128);
|
||||
break;
|
||||
case TLS_CIPHER_CHACHA20_POLY1305:
|
||||
optsize = sizeof(struct tls12_crypto_info_chacha20_poly1305);
|
||||
break;
|
||||
case TLS_CIPHER_SM4_GCM:
|
||||
optsize = sizeof(struct tls12_crypto_info_sm4_gcm);
|
||||
break;
|
||||
case TLS_CIPHER_SM4_CCM:
|
||||
optsize = sizeof(struct tls12_crypto_info_sm4_ccm);
|
||||
break;
|
||||
|
||||
switch (crypto_info->cipher_type) {
|
||||
case TLS_CIPHER_ARIA_GCM_128:
|
||||
if (crypto_info->version != TLS_1_2_VERSION) {
|
||||
rc = -EINVAL;
|
||||
goto err_crypto_info;
|
||||
}
|
||||
optsize = sizeof(struct tls12_crypto_info_aria_gcm_128);
|
||||
break;
|
||||
case TLS_CIPHER_ARIA_GCM_256:
|
||||
if (crypto_info->version != TLS_1_2_VERSION) {
|
||||
rc = -EINVAL;
|
||||
goto err_crypto_info;
|
||||
}
|
||||
optsize = sizeof(struct tls12_crypto_info_aria_gcm_256);
|
||||
break;
|
||||
default:
|
||||
rc = -EINVAL;
|
||||
goto err_crypto_info;
|
||||
}
|
||||
|
||||
if (optlen != optsize) {
|
||||
if (optlen != cipher_desc->crypto_info) {
|
||||
rc = -EINVAL;
|
||||
goto err_crypto_info;
|
||||
}
|
||||
|
||||
179
net/tls/tls_sw.c
179
net/tls/tls_sw.c
@@ -2590,10 +2590,10 @@ int tls_set_sw_offload(struct sock *sk, struct tls_context *ctx, int tx)
|
||||
struct tls_sw_context_rx *sw_ctx_rx = NULL;
|
||||
struct cipher_context *cctx;
|
||||
struct crypto_aead **aead;
|
||||
u16 nonce_size, tag_size, iv_size, rec_seq_size, salt_size;
|
||||
struct crypto_tfm *tfm;
|
||||
char *iv, *rec_seq, *key, *salt, *cipher_name;
|
||||
size_t keysize;
|
||||
char *iv, *rec_seq, *key, *salt;
|
||||
const struct tls_cipher_desc *cipher_desc;
|
||||
u16 nonce_size;
|
||||
int rc = 0;
|
||||
|
||||
if (!ctx) {
|
||||
@@ -2647,148 +2647,19 @@ int tls_set_sw_offload(struct sock *sk, struct tls_context *ctx, int tx)
|
||||
aead = &sw_ctx_rx->aead_recv;
|
||||
}
|
||||
|
||||
switch (crypto_info->cipher_type) {
|
||||
case TLS_CIPHER_AES_GCM_128: {
|
||||
struct tls12_crypto_info_aes_gcm_128 *gcm_128_info;
|
||||
|
||||
gcm_128_info = (void *)crypto_info;
|
||||
nonce_size = TLS_CIPHER_AES_GCM_128_IV_SIZE;
|
||||
tag_size = TLS_CIPHER_AES_GCM_128_TAG_SIZE;
|
||||
iv_size = TLS_CIPHER_AES_GCM_128_IV_SIZE;
|
||||
iv = gcm_128_info->iv;
|
||||
rec_seq_size = TLS_CIPHER_AES_GCM_128_REC_SEQ_SIZE;
|
||||
rec_seq = gcm_128_info->rec_seq;
|
||||
keysize = TLS_CIPHER_AES_GCM_128_KEY_SIZE;
|
||||
key = gcm_128_info->key;
|
||||
salt = gcm_128_info->salt;
|
||||
salt_size = TLS_CIPHER_AES_GCM_128_SALT_SIZE;
|
||||
cipher_name = "gcm(aes)";
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_AES_GCM_256: {
|
||||
struct tls12_crypto_info_aes_gcm_256 *gcm_256_info;
|
||||
|
||||
gcm_256_info = (void *)crypto_info;
|
||||
nonce_size = TLS_CIPHER_AES_GCM_256_IV_SIZE;
|
||||
tag_size = TLS_CIPHER_AES_GCM_256_TAG_SIZE;
|
||||
iv_size = TLS_CIPHER_AES_GCM_256_IV_SIZE;
|
||||
iv = gcm_256_info->iv;
|
||||
rec_seq_size = TLS_CIPHER_AES_GCM_256_REC_SEQ_SIZE;
|
||||
rec_seq = gcm_256_info->rec_seq;
|
||||
keysize = TLS_CIPHER_AES_GCM_256_KEY_SIZE;
|
||||
key = gcm_256_info->key;
|
||||
salt = gcm_256_info->salt;
|
||||
salt_size = TLS_CIPHER_AES_GCM_256_SALT_SIZE;
|
||||
cipher_name = "gcm(aes)";
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_AES_CCM_128: {
|
||||
struct tls12_crypto_info_aes_ccm_128 *ccm_128_info;
|
||||
|
||||
ccm_128_info = (void *)crypto_info;
|
||||
nonce_size = TLS_CIPHER_AES_CCM_128_IV_SIZE;
|
||||
tag_size = TLS_CIPHER_AES_CCM_128_TAG_SIZE;
|
||||
iv_size = TLS_CIPHER_AES_CCM_128_IV_SIZE;
|
||||
iv = ccm_128_info->iv;
|
||||
rec_seq_size = TLS_CIPHER_AES_CCM_128_REC_SEQ_SIZE;
|
||||
rec_seq = ccm_128_info->rec_seq;
|
||||
keysize = TLS_CIPHER_AES_CCM_128_KEY_SIZE;
|
||||
key = ccm_128_info->key;
|
||||
salt = ccm_128_info->salt;
|
||||
salt_size = TLS_CIPHER_AES_CCM_128_SALT_SIZE;
|
||||
cipher_name = "ccm(aes)";
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_CHACHA20_POLY1305: {
|
||||
struct tls12_crypto_info_chacha20_poly1305 *chacha20_poly1305_info;
|
||||
|
||||
chacha20_poly1305_info = (void *)crypto_info;
|
||||
nonce_size = 0;
|
||||
tag_size = TLS_CIPHER_CHACHA20_POLY1305_TAG_SIZE;
|
||||
iv_size = TLS_CIPHER_CHACHA20_POLY1305_IV_SIZE;
|
||||
iv = chacha20_poly1305_info->iv;
|
||||
rec_seq_size = TLS_CIPHER_CHACHA20_POLY1305_REC_SEQ_SIZE;
|
||||
rec_seq = chacha20_poly1305_info->rec_seq;
|
||||
keysize = TLS_CIPHER_CHACHA20_POLY1305_KEY_SIZE;
|
||||
key = chacha20_poly1305_info->key;
|
||||
salt = chacha20_poly1305_info->salt;
|
||||
salt_size = TLS_CIPHER_CHACHA20_POLY1305_SALT_SIZE;
|
||||
cipher_name = "rfc7539(chacha20,poly1305)";
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_SM4_GCM: {
|
||||
struct tls12_crypto_info_sm4_gcm *sm4_gcm_info;
|
||||
|
||||
sm4_gcm_info = (void *)crypto_info;
|
||||
nonce_size = TLS_CIPHER_SM4_GCM_IV_SIZE;
|
||||
tag_size = TLS_CIPHER_SM4_GCM_TAG_SIZE;
|
||||
iv_size = TLS_CIPHER_SM4_GCM_IV_SIZE;
|
||||
iv = sm4_gcm_info->iv;
|
||||
rec_seq_size = TLS_CIPHER_SM4_GCM_REC_SEQ_SIZE;
|
||||
rec_seq = sm4_gcm_info->rec_seq;
|
||||
keysize = TLS_CIPHER_SM4_GCM_KEY_SIZE;
|
||||
key = sm4_gcm_info->key;
|
||||
salt = sm4_gcm_info->salt;
|
||||
salt_size = TLS_CIPHER_SM4_GCM_SALT_SIZE;
|
||||
cipher_name = "gcm(sm4)";
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_SM4_CCM: {
|
||||
struct tls12_crypto_info_sm4_ccm *sm4_ccm_info;
|
||||
|
||||
sm4_ccm_info = (void *)crypto_info;
|
||||
nonce_size = TLS_CIPHER_SM4_CCM_IV_SIZE;
|
||||
tag_size = TLS_CIPHER_SM4_CCM_TAG_SIZE;
|
||||
iv_size = TLS_CIPHER_SM4_CCM_IV_SIZE;
|
||||
iv = sm4_ccm_info->iv;
|
||||
rec_seq_size = TLS_CIPHER_SM4_CCM_REC_SEQ_SIZE;
|
||||
rec_seq = sm4_ccm_info->rec_seq;
|
||||
keysize = TLS_CIPHER_SM4_CCM_KEY_SIZE;
|
||||
key = sm4_ccm_info->key;
|
||||
salt = sm4_ccm_info->salt;
|
||||
salt_size = TLS_CIPHER_SM4_CCM_SALT_SIZE;
|
||||
cipher_name = "ccm(sm4)";
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_ARIA_GCM_128: {
|
||||
struct tls12_crypto_info_aria_gcm_128 *aria_gcm_128_info;
|
||||
|
||||
aria_gcm_128_info = (void *)crypto_info;
|
||||
nonce_size = TLS_CIPHER_ARIA_GCM_128_IV_SIZE;
|
||||
tag_size = TLS_CIPHER_ARIA_GCM_128_TAG_SIZE;
|
||||
iv_size = TLS_CIPHER_ARIA_GCM_128_IV_SIZE;
|
||||
iv = aria_gcm_128_info->iv;
|
||||
rec_seq_size = TLS_CIPHER_ARIA_GCM_128_REC_SEQ_SIZE;
|
||||
rec_seq = aria_gcm_128_info->rec_seq;
|
||||
keysize = TLS_CIPHER_ARIA_GCM_128_KEY_SIZE;
|
||||
key = aria_gcm_128_info->key;
|
||||
salt = aria_gcm_128_info->salt;
|
||||
salt_size = TLS_CIPHER_ARIA_GCM_128_SALT_SIZE;
|
||||
cipher_name = "gcm(aria)";
|
||||
break;
|
||||
}
|
||||
case TLS_CIPHER_ARIA_GCM_256: {
|
||||
struct tls12_crypto_info_aria_gcm_256 *gcm_256_info;
|
||||
|
||||
gcm_256_info = (void *)crypto_info;
|
||||
nonce_size = TLS_CIPHER_ARIA_GCM_256_IV_SIZE;
|
||||
tag_size = TLS_CIPHER_ARIA_GCM_256_TAG_SIZE;
|
||||
iv_size = TLS_CIPHER_ARIA_GCM_256_IV_SIZE;
|
||||
iv = gcm_256_info->iv;
|
||||
rec_seq_size = TLS_CIPHER_ARIA_GCM_256_REC_SEQ_SIZE;
|
||||
rec_seq = gcm_256_info->rec_seq;
|
||||
keysize = TLS_CIPHER_ARIA_GCM_256_KEY_SIZE;
|
||||
key = gcm_256_info->key;
|
||||
salt = gcm_256_info->salt;
|
||||
salt_size = TLS_CIPHER_ARIA_GCM_256_SALT_SIZE;
|
||||
cipher_name = "gcm(aria)";
|
||||
break;
|
||||
}
|
||||
default:
|
||||
cipher_desc = get_cipher_desc(crypto_info->cipher_type);
|
||||
if (!cipher_desc) {
|
||||
rc = -EINVAL;
|
||||
goto free_priv;
|
||||
}
|
||||
|
||||
nonce_size = cipher_desc->nonce;
|
||||
|
||||
iv = crypto_info_iv(crypto_info, cipher_desc);
|
||||
key = crypto_info_key(crypto_info, cipher_desc);
|
||||
salt = crypto_info_salt(crypto_info, cipher_desc);
|
||||
rec_seq = crypto_info_rec_seq(crypto_info, cipher_desc);
|
||||
|
||||
if (crypto_info->version == TLS_1_3_VERSION) {
|
||||
nonce_size = 0;
|
||||
prot->aad_size = TLS_HEADER_SIZE;
|
||||
@@ -2799,9 +2670,7 @@ int tls_set_sw_offload(struct sock *sk, struct tls_context *ctx, int tx)
|
||||
}
|
||||
|
||||
/* Sanity-check the sizes for stack allocations. */
|
||||
if (iv_size > MAX_IV_SIZE || nonce_size > MAX_IV_SIZE ||
|
||||
rec_seq_size > TLS_MAX_REC_SEQ_SIZE || tag_size != TLS_TAG_SIZE ||
|
||||
prot->aad_size > TLS_MAX_AAD_SIZE) {
|
||||
if (nonce_size > MAX_IV_SIZE || prot->aad_size > TLS_MAX_AAD_SIZE) {
|
||||
rc = -EINVAL;
|
||||
goto free_priv;
|
||||
}
|
||||
@@ -2809,28 +2678,29 @@ int tls_set_sw_offload(struct sock *sk, struct tls_context *ctx, int tx)
|
||||
prot->version = crypto_info->version;
|
||||
prot->cipher_type = crypto_info->cipher_type;
|
||||
prot->prepend_size = TLS_HEADER_SIZE + nonce_size;
|
||||
prot->tag_size = tag_size;
|
||||
prot->tag_size = cipher_desc->tag;
|
||||
prot->overhead_size = prot->prepend_size +
|
||||
prot->tag_size + prot->tail_size;
|
||||
prot->iv_size = iv_size;
|
||||
prot->salt_size = salt_size;
|
||||
cctx->iv = kmalloc(iv_size + salt_size, GFP_KERNEL);
|
||||
prot->iv_size = cipher_desc->iv;
|
||||
prot->salt_size = cipher_desc->salt;
|
||||
cctx->iv = kmalloc(cipher_desc->iv + cipher_desc->salt, GFP_KERNEL);
|
||||
if (!cctx->iv) {
|
||||
rc = -ENOMEM;
|
||||
goto free_priv;
|
||||
}
|
||||
/* Note: 128 & 256 bit salt are the same size */
|
||||
prot->rec_seq_size = rec_seq_size;
|
||||
memcpy(cctx->iv, salt, salt_size);
|
||||
memcpy(cctx->iv + salt_size, iv, iv_size);
|
||||
cctx->rec_seq = kmemdup(rec_seq, rec_seq_size, GFP_KERNEL);
|
||||
prot->rec_seq_size = cipher_desc->rec_seq;
|
||||
memcpy(cctx->iv, salt, cipher_desc->salt);
|
||||
memcpy(cctx->iv + cipher_desc->salt, iv, cipher_desc->iv);
|
||||
|
||||
cctx->rec_seq = kmemdup(rec_seq, cipher_desc->rec_seq, GFP_KERNEL);
|
||||
if (!cctx->rec_seq) {
|
||||
rc = -ENOMEM;
|
||||
goto free_iv;
|
||||
}
|
||||
|
||||
if (!*aead) {
|
||||
*aead = crypto_alloc_aead(cipher_name, 0, 0);
|
||||
*aead = crypto_alloc_aead(cipher_desc->cipher_name, 0, 0);
|
||||
if (IS_ERR(*aead)) {
|
||||
rc = PTR_ERR(*aead);
|
||||
*aead = NULL;
|
||||
@@ -2840,8 +2710,7 @@ int tls_set_sw_offload(struct sock *sk, struct tls_context *ctx, int tx)
|
||||
|
||||
ctx->push_pending_record = tls_sw_push_pending_record;
|
||||
|
||||
rc = crypto_aead_setkey(*aead, key, keysize);
|
||||
|
||||
rc = crypto_aead_setkey(*aead, key, cipher_desc->key);
|
||||
if (rc)
|
||||
goto free_aead;
|
||||
|
||||
|
||||
@@ -51,3 +51,4 @@ CONFIG_AMT=m
|
||||
CONFIG_VXLAN=m
|
||||
CONFIG_IP_SCTP=m
|
||||
CONFIG_NETFILTER_XT_MATCH_POLICY=m
|
||||
CONFIG_CRYPTO_ARIA=y
|
||||
|
||||
@@ -30,12 +30,15 @@ static int fips_enabled;
|
||||
|
||||
struct tls_crypto_info_keys {
|
||||
union {
|
||||
struct tls_crypto_info crypto_info;
|
||||
struct tls12_crypto_info_aes_gcm_128 aes128;
|
||||
struct tls12_crypto_info_chacha20_poly1305 chacha20;
|
||||
struct tls12_crypto_info_sm4_gcm sm4gcm;
|
||||
struct tls12_crypto_info_sm4_ccm sm4ccm;
|
||||
struct tls12_crypto_info_aes_ccm_128 aesccm128;
|
||||
struct tls12_crypto_info_aes_gcm_256 aesgcm256;
|
||||
struct tls12_crypto_info_aria_gcm_128 ariagcm128;
|
||||
struct tls12_crypto_info_aria_gcm_256 ariagcm256;
|
||||
};
|
||||
size_t len;
|
||||
};
|
||||
@@ -76,6 +79,16 @@ static void tls_crypto_info_init(uint16_t tls_version, uint16_t cipher_type,
|
||||
tls12->aesgcm256.info.version = tls_version;
|
||||
tls12->aesgcm256.info.cipher_type = cipher_type;
|
||||
break;
|
||||
case TLS_CIPHER_ARIA_GCM_128:
|
||||
tls12->len = sizeof(struct tls12_crypto_info_aria_gcm_128);
|
||||
tls12->ariagcm128.info.version = tls_version;
|
||||
tls12->ariagcm128.info.cipher_type = cipher_type;
|
||||
break;
|
||||
case TLS_CIPHER_ARIA_GCM_256:
|
||||
tls12->len = sizeof(struct tls12_crypto_info_aria_gcm_256);
|
||||
tls12->ariagcm256.info.version = tls_version;
|
||||
tls12->ariagcm256.info.cipher_type = cipher_type;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@@ -228,6 +241,31 @@ TEST_F(tls_basic, base_base)
|
||||
EXPECT_EQ(memcmp(buf, test_str, send_len), 0);
|
||||
};
|
||||
|
||||
TEST_F(tls_basic, bad_cipher)
|
||||
{
|
||||
struct tls_crypto_info_keys tls12;
|
||||
|
||||
tls12.crypto_info.version = 200;
|
||||
tls12.crypto_info.cipher_type = TLS_CIPHER_AES_GCM_128;
|
||||
EXPECT_EQ(setsockopt(self->fd, SOL_TLS, TLS_TX, &tls12, sizeof(struct tls12_crypto_info_aes_gcm_128)), -1);
|
||||
|
||||
tls12.crypto_info.version = TLS_1_2_VERSION;
|
||||
tls12.crypto_info.cipher_type = 50;
|
||||
EXPECT_EQ(setsockopt(self->fd, SOL_TLS, TLS_TX, &tls12, sizeof(struct tls12_crypto_info_aes_gcm_128)), -1);
|
||||
|
||||
tls12.crypto_info.version = TLS_1_2_VERSION;
|
||||
tls12.crypto_info.cipher_type = 59;
|
||||
EXPECT_EQ(setsockopt(self->fd, SOL_TLS, TLS_TX, &tls12, sizeof(struct tls12_crypto_info_aes_gcm_128)), -1);
|
||||
|
||||
tls12.crypto_info.version = TLS_1_2_VERSION;
|
||||
tls12.crypto_info.cipher_type = 10;
|
||||
EXPECT_EQ(setsockopt(self->fd, SOL_TLS, TLS_TX, &tls12, sizeof(struct tls12_crypto_info_aes_gcm_128)), -1);
|
||||
|
||||
tls12.crypto_info.version = TLS_1_2_VERSION;
|
||||
tls12.crypto_info.cipher_type = 70;
|
||||
EXPECT_EQ(setsockopt(self->fd, SOL_TLS, TLS_TX, &tls12, sizeof(struct tls12_crypto_info_aes_gcm_128)), -1);
|
||||
}
|
||||
|
||||
FIXTURE(tls)
|
||||
{
|
||||
int fd, cfd;
|
||||
@@ -312,6 +350,18 @@ FIXTURE_VARIANT_ADD(tls, 13_nopad)
|
||||
.nopad = true,
|
||||
};
|
||||
|
||||
FIXTURE_VARIANT_ADD(tls, 12_aria_gcm)
|
||||
{
|
||||
.tls_version = TLS_1_2_VERSION,
|
||||
.cipher_type = TLS_CIPHER_ARIA_GCM_128,
|
||||
};
|
||||
|
||||
FIXTURE_VARIANT_ADD(tls, 12_aria_gcm_256)
|
||||
{
|
||||
.tls_version = TLS_1_2_VERSION,
|
||||
.cipher_type = TLS_CIPHER_ARIA_GCM_256,
|
||||
};
|
||||
|
||||
FIXTURE_SETUP(tls)
|
||||
{
|
||||
struct tls_crypto_info_keys tls12;
|
||||
@@ -1472,6 +1522,40 @@ TEST_F(tls, shutdown_reuse)
|
||||
EXPECT_EQ(errno, EISCONN);
|
||||
}
|
||||
|
||||
TEST_F(tls, getsockopt)
|
||||
{
|
||||
struct tls_crypto_info_keys expect, get;
|
||||
socklen_t len;
|
||||
|
||||
/* get only the version/cipher */
|
||||
len = sizeof(struct tls_crypto_info);
|
||||
memrnd(&get, sizeof(get));
|
||||
EXPECT_EQ(getsockopt(self->fd, SOL_TLS, TLS_TX, &get, &len), 0);
|
||||
EXPECT_EQ(len, sizeof(struct tls_crypto_info));
|
||||
EXPECT_EQ(get.crypto_info.version, variant->tls_version);
|
||||
EXPECT_EQ(get.crypto_info.cipher_type, variant->cipher_type);
|
||||
|
||||
/* get the full crypto_info */
|
||||
tls_crypto_info_init(variant->tls_version, variant->cipher_type, &expect);
|
||||
len = expect.len;
|
||||
memrnd(&get, sizeof(get));
|
||||
EXPECT_EQ(getsockopt(self->fd, SOL_TLS, TLS_TX, &get, &len), 0);
|
||||
EXPECT_EQ(len, expect.len);
|
||||
EXPECT_EQ(get.crypto_info.version, variant->tls_version);
|
||||
EXPECT_EQ(get.crypto_info.cipher_type, variant->cipher_type);
|
||||
EXPECT_EQ(memcmp(&get, &expect, expect.len), 0);
|
||||
|
||||
/* short get should fail */
|
||||
len = sizeof(struct tls_crypto_info) - 1;
|
||||
EXPECT_EQ(getsockopt(self->fd, SOL_TLS, TLS_TX, &get, &len), -1);
|
||||
EXPECT_EQ(errno, EINVAL);
|
||||
|
||||
/* partial get of the cipher data should fail */
|
||||
len = expect.len - 1;
|
||||
EXPECT_EQ(getsockopt(self->fd, SOL_TLS, TLS_TX, &get, &len), -1);
|
||||
EXPECT_EQ(errno, EINVAL);
|
||||
}
|
||||
|
||||
FIXTURE(tls_err)
|
||||
{
|
||||
int fd, cfd;
|
||||
|
||||
Reference in New Issue
Block a user