aboutsummaryrefslogtreecommitdiffstats
path: root/target/linux/generic/backport-5.4/080-wireguard-0033-crypto-lib-chacha20poly1305-reimplement-crypt_from_s.patch
diff options
context:
space:
mode:
authorJason A. Donenfeld <Jason@zx2c4.com>2021-02-19 14:29:04 +0100
committerDavid Bauer <mail@david-bauer.net>2021-02-26 20:41:01 +0100
commit3888fa78802354ab7bbd19b7d061fd80a16ce06b (patch)
tree2225a6313cb6482f0cb9c09df662a0d44197350e /target/linux/generic/backport-5.4/080-wireguard-0033-crypto-lib-chacha20poly1305-reimplement-crypt_from_s.patch
parent7d4143234c4dfdd050ebc64ec8231f9d81ea65af (diff)
downloadupstream-3888fa78802354ab7bbd19b7d061fd80a16ce06b.tar.gz
upstream-3888fa78802354ab7bbd19b7d061fd80a16ce06b.tar.bz2
upstream-3888fa78802354ab7bbd19b7d061fd80a16ce06b.zip
kernel: 5.4: import wireguard backport
Rather than using the clunky, old, slower wireguard-linux-compat out of tree module, this commit does a patch-by-patch backport of upstream's wireguard to 5.4. This specific backport is in widespread use, being part of SUSE's enterprise kernel, Oracle's enterprise kernel, Google's Android kernel, Gentoo's distro kernel, and probably more I've forgotten about. It's definately the "more proper" way of adding wireguard to a kernel than the ugly compat.h hell of the wireguard-linux-compat repo. And most importantly for OpenWRT, it allows using the same module configuration code for 5.10 as for 5.4, with no need for bifurcation. These patches are from the backport tree which is maintained in the open here: https://git.zx2c4.com/wireguard-linux/log/?h=backport-5.4.y I'll be sending PRs to update this as needed. Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
Diffstat (limited to 'target/linux/generic/backport-5.4/080-wireguard-0033-crypto-lib-chacha20poly1305-reimplement-crypt_from_s.patch')
-rw-r--r--target/linux/generic/backport-5.4/080-wireguard-0033-crypto-lib-chacha20poly1305-reimplement-crypt_from_s.patch295
1 files changed, 295 insertions, 0 deletions
diff --git a/target/linux/generic/backport-5.4/080-wireguard-0033-crypto-lib-chacha20poly1305-reimplement-crypt_from_s.patch b/target/linux/generic/backport-5.4/080-wireguard-0033-crypto-lib-chacha20poly1305-reimplement-crypt_from_s.patch
new file mode 100644
index 0000000000..a7811eb26a
--- /dev/null
+++ b/target/linux/generic/backport-5.4/080-wireguard-0033-crypto-lib-chacha20poly1305-reimplement-crypt_from_s.patch
@@ -0,0 +1,295 @@
+From b7af0c213ba3afe27da21845419756aec63b43b4 Mon Sep 17 00:00:00 2001
+From: Ard Biesheuvel <ardb@kernel.org>
+Date: Fri, 8 Nov 2019 13:22:40 +0100
+Subject: [PATCH 033/124] crypto: lib/chacha20poly1305 - reimplement
+ crypt_from_sg() routine
+
+commit d95312a3ccc0cd544d374be2fc45aeaa803e5fd9 upstream.
+
+Reimplement the library routines to perform chacha20poly1305 en/decryption
+on scatterlists, without [ab]using the [deprecated] blkcipher interface,
+which is rather heavyweight and does things we don't really need.
+
+Instead, we use the sg_miter API in a novel and clever way, to iterate
+over the scatterlist in-place (i.e., source == destination, which is the
+only way this library is expected to be used). That way, we don't have to
+iterate over two scatterlists in parallel.
+
+Another optimization is that, instead of relying on the blkcipher walker
+to present the input in suitable chunks, we recognize that ChaCha is a
+streamcipher, and so we can simply deal with partial blocks by keeping a
+block of cipherstream on the stack and use crypto_xor() to mix it with
+the in/output.
+
+Finally, we omit the scatterwalk_and_copy() call if the last element of
+the scatterlist covers the MAC as well (which is the common case),
+avoiding the need to walk the scatterlist and kmap() the page twice.
+
+Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
+Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
+Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
+---
+ include/crypto/chacha20poly1305.h | 11 ++
+ lib/crypto/chacha20poly1305-selftest.c | 45 ++++++++
+ lib/crypto/chacha20poly1305.c | 150 +++++++++++++++++++++++++
+ 3 files changed, 206 insertions(+)
+
+--- a/include/crypto/chacha20poly1305.h
++++ b/include/crypto/chacha20poly1305.h
+@@ -7,6 +7,7 @@
+ #define __CHACHA20POLY1305_H
+
+ #include <linux/types.h>
++#include <linux/scatterlist.h>
+
+ enum chacha20poly1305_lengths {
+ XCHACHA20POLY1305_NONCE_SIZE = 24,
+@@ -34,4 +35,14 @@ bool __must_check xchacha20poly1305_decr
+ const size_t ad_len, const u8 nonce[XCHACHA20POLY1305_NONCE_SIZE],
+ const u8 key[CHACHA20POLY1305_KEY_SIZE]);
+
++bool chacha20poly1305_encrypt_sg_inplace(struct scatterlist *src, size_t src_len,
++ const u8 *ad, const size_t ad_len,
++ const u64 nonce,
++ const u8 key[CHACHA20POLY1305_KEY_SIZE]);
++
++bool chacha20poly1305_decrypt_sg_inplace(struct scatterlist *src, size_t src_len,
++ const u8 *ad, const size_t ad_len,
++ const u64 nonce,
++ const u8 key[CHACHA20POLY1305_KEY_SIZE]);
++
+ #endif /* __CHACHA20POLY1305_H */
+--- a/lib/crypto/chacha20poly1305-selftest.c
++++ b/lib/crypto/chacha20poly1305-selftest.c
+@@ -7250,6 +7250,7 @@ bool __init chacha20poly1305_selftest(vo
+ enum { MAXIMUM_TEST_BUFFER_LEN = 1UL << 12 };
+ size_t i;
+ u8 *computed_output = NULL, *heap_src = NULL;
++ struct scatterlist sg_src;
+ bool success = true, ret;
+
+ heap_src = kmalloc(MAXIMUM_TEST_BUFFER_LEN, GFP_KERNEL);
+@@ -7280,6 +7281,29 @@ bool __init chacha20poly1305_selftest(vo
+ }
+ }
+
++ for (i = 0; i < ARRAY_SIZE(chacha20poly1305_enc_vectors); ++i) {
++ if (chacha20poly1305_enc_vectors[i].nlen != 8)
++ continue;
++ memcpy(heap_src, chacha20poly1305_enc_vectors[i].input,
++ chacha20poly1305_enc_vectors[i].ilen);
++ sg_init_one(&sg_src, heap_src,
++ chacha20poly1305_enc_vectors[i].ilen + POLY1305_DIGEST_SIZE);
++ chacha20poly1305_encrypt_sg_inplace(&sg_src,
++ chacha20poly1305_enc_vectors[i].ilen,
++ chacha20poly1305_enc_vectors[i].assoc,
++ chacha20poly1305_enc_vectors[i].alen,
++ get_unaligned_le64(chacha20poly1305_enc_vectors[i].nonce),
++ chacha20poly1305_enc_vectors[i].key);
++ if (memcmp(heap_src,
++ chacha20poly1305_enc_vectors[i].output,
++ chacha20poly1305_enc_vectors[i].ilen +
++ POLY1305_DIGEST_SIZE)) {
++ pr_err("chacha20poly1305 sg encryption self-test %zu: FAIL\n",
++ i + 1);
++ success = false;
++ }
++ }
++
+ for (i = 0; i < ARRAY_SIZE(chacha20poly1305_dec_vectors); ++i) {
+ memset(computed_output, 0, MAXIMUM_TEST_BUFFER_LEN);
+ ret = chacha20poly1305_decrypt(computed_output,
+@@ -7301,6 +7325,27 @@ bool __init chacha20poly1305_selftest(vo
+ }
+ }
+
++ for (i = 0; i < ARRAY_SIZE(chacha20poly1305_dec_vectors); ++i) {
++ memcpy(heap_src, chacha20poly1305_dec_vectors[i].input,
++ chacha20poly1305_dec_vectors[i].ilen);
++ sg_init_one(&sg_src, heap_src,
++ chacha20poly1305_dec_vectors[i].ilen);
++ ret = chacha20poly1305_decrypt_sg_inplace(&sg_src,
++ chacha20poly1305_dec_vectors[i].ilen,
++ chacha20poly1305_dec_vectors[i].assoc,
++ chacha20poly1305_dec_vectors[i].alen,
++ get_unaligned_le64(chacha20poly1305_dec_vectors[i].nonce),
++ chacha20poly1305_dec_vectors[i].key);
++ if (!decryption_success(ret,
++ chacha20poly1305_dec_vectors[i].failure,
++ memcmp(heap_src, chacha20poly1305_dec_vectors[i].output,
++ chacha20poly1305_dec_vectors[i].ilen -
++ POLY1305_DIGEST_SIZE))) {
++ pr_err("chacha20poly1305 sg decryption self-test %zu: FAIL\n",
++ i + 1);
++ success = false;
++ }
++ }
+
+ for (i = 0; i < ARRAY_SIZE(xchacha20poly1305_enc_vectors); ++i) {
+ memset(computed_output, 0, MAXIMUM_TEST_BUFFER_LEN);
+--- a/lib/crypto/chacha20poly1305.c
++++ b/lib/crypto/chacha20poly1305.c
+@@ -11,6 +11,7 @@
+ #include <crypto/chacha20poly1305.h>
+ #include <crypto/chacha.h>
+ #include <crypto/poly1305.h>
++#include <crypto/scatterwalk.h>
+
+ #include <asm/unaligned.h>
+ #include <linux/kernel.h>
+@@ -205,6 +206,155 @@ bool xchacha20poly1305_decrypt(u8 *dst,
+ }
+ EXPORT_SYMBOL(xchacha20poly1305_decrypt);
+
++static
++bool chacha20poly1305_crypt_sg_inplace(struct scatterlist *src,
++ const size_t src_len,
++ const u8 *ad, const size_t ad_len,
++ const u64 nonce,
++ const u8 key[CHACHA20POLY1305_KEY_SIZE],
++ int encrypt)
++{
++ const u8 *pad0 = page_address(ZERO_PAGE(0));
++ struct poly1305_desc_ctx poly1305_state;
++ u32 chacha_state[CHACHA_STATE_WORDS];
++ struct sg_mapping_iter miter;
++ size_t partial = 0;
++ unsigned int flags;
++ bool ret = true;
++ int sl;
++ union {
++ struct {
++ u32 k[CHACHA_KEY_WORDS];
++ __le64 iv[2];
++ };
++ u8 block0[POLY1305_KEY_SIZE];
++ u8 chacha_stream[CHACHA_BLOCK_SIZE];
++ struct {
++ u8 mac[2][POLY1305_DIGEST_SIZE];
++ };
++ __le64 lens[2];
++ } b __aligned(16);
++
++ chacha_load_key(b.k, key);
++
++ b.iv[0] = 0;
++ b.iv[1] = cpu_to_le64(nonce);
++
++ chacha_init(chacha_state, b.k, (u8 *)b.iv);
++ chacha_crypt(chacha_state, b.block0, pad0, sizeof(b.block0), 20);
++ poly1305_init(&poly1305_state, b.block0);
++
++ if (unlikely(ad_len)) {
++ poly1305_update(&poly1305_state, ad, ad_len);
++ if (ad_len & 0xf)
++ poly1305_update(&poly1305_state, pad0, 0x10 - (ad_len & 0xf));
++ }
++
++ flags = SG_MITER_TO_SG;
++ if (!preemptible())
++ flags |= SG_MITER_ATOMIC;
++
++ sg_miter_start(&miter, src, sg_nents(src), flags);
++
++ for (sl = src_len; sl > 0 && sg_miter_next(&miter); sl -= miter.length) {
++ u8 *addr = miter.addr;
++ size_t length = min_t(size_t, sl, miter.length);
++
++ if (!encrypt)
++ poly1305_update(&poly1305_state, addr, length);
++
++ if (unlikely(partial)) {
++ size_t l = min(length, CHACHA_BLOCK_SIZE - partial);
++
++ crypto_xor(addr, b.chacha_stream + partial, l);
++ partial = (partial + l) & (CHACHA_BLOCK_SIZE - 1);
++
++ addr += l;
++ length -= l;
++ }
++
++ if (likely(length >= CHACHA_BLOCK_SIZE || length == sl)) {
++ size_t l = length;
++
++ if (unlikely(length < sl))
++ l &= ~(CHACHA_BLOCK_SIZE - 1);
++ chacha_crypt(chacha_state, addr, addr, l, 20);
++ addr += l;
++ length -= l;
++ }
++
++ if (unlikely(length > 0)) {
++ chacha_crypt(chacha_state, b.chacha_stream, pad0,
++ CHACHA_BLOCK_SIZE, 20);
++ crypto_xor(addr, b.chacha_stream, length);
++ partial = length;
++ }
++
++ if (encrypt)
++ poly1305_update(&poly1305_state, miter.addr,
++ min_t(size_t, sl, miter.length));
++ }
++
++ if (src_len & 0xf)
++ poly1305_update(&poly1305_state, pad0, 0x10 - (src_len & 0xf));
++
++ b.lens[0] = cpu_to_le64(ad_len);
++ b.lens[1] = cpu_to_le64(src_len);
++ poly1305_update(&poly1305_state, (u8 *)b.lens, sizeof(b.lens));
++
++ if (likely(sl <= -POLY1305_DIGEST_SIZE)) {
++ if (encrypt) {
++ poly1305_final(&poly1305_state,
++ miter.addr + miter.length + sl);
++ ret = true;
++ } else {
++ poly1305_final(&poly1305_state, b.mac[0]);
++ ret = !crypto_memneq(b.mac[0],
++ miter.addr + miter.length + sl,
++ POLY1305_DIGEST_SIZE);
++ }
++ }
++
++ sg_miter_stop(&miter);
++
++ if (unlikely(sl > -POLY1305_DIGEST_SIZE)) {
++ poly1305_final(&poly1305_state, b.mac[1]);
++ scatterwalk_map_and_copy(b.mac[encrypt], src, src_len,
++ sizeof(b.mac[1]), encrypt);
++ ret = encrypt ||
++ !crypto_memneq(b.mac[0], b.mac[1], POLY1305_DIGEST_SIZE);
++ }
++
++ memzero_explicit(chacha_state, sizeof(chacha_state));
++ memzero_explicit(&b, sizeof(b));
++
++ return ret;
++}
++
++bool chacha20poly1305_encrypt_sg_inplace(struct scatterlist *src, size_t src_len,
++ const u8 *ad, const size_t ad_len,
++ const u64 nonce,
++ const u8 key[CHACHA20POLY1305_KEY_SIZE])
++{
++ return chacha20poly1305_crypt_sg_inplace(src, src_len, ad, ad_len,
++ nonce, key, 1);
++}
++EXPORT_SYMBOL(chacha20poly1305_encrypt_sg_inplace);
++
++bool chacha20poly1305_decrypt_sg_inplace(struct scatterlist *src, size_t src_len,
++ const u8 *ad, const size_t ad_len,
++ const u64 nonce,
++ const u8 key[CHACHA20POLY1305_KEY_SIZE])
++{
++ if (unlikely(src_len < POLY1305_DIGEST_SIZE))
++ return false;
++
++ return chacha20poly1305_crypt_sg_inplace(src,
++ src_len - POLY1305_DIGEST_SIZE,
++ ad, ad_len, nonce, key, 0);
++}
++EXPORT_SYMBOL(chacha20poly1305_decrypt_sg_inplace);
++
+ static int __init mod_init(void)
+ {
+ if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS) &&