diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2025-01-24 07:48:10 -0800 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2025-01-24 07:48:10 -0800 |
commit | 454cb97726fe62a04b187a0d631ec0a69f6b713a (patch) | |
tree | 16dae08192b97960398b9cb9cc92439fb1b7e4a6 /lib | |
parent | ae2d4fc540cd27d667d10597b6ad8cc4c6ce622a (diff) | |
parent | 9d4f8e54cef2c42e23ef258833dbd06a1eaff89b (diff) |
Merge tag 'v6.14-p1' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu:
"API:
- Remove physical address skcipher walking
- Fix boot-up self-test race
Algorithms:
- Optimisations for x86/aes-gcm
- Optimisations for x86/aes-xts
- Remove VMAC
- Remove keywrap
Drivers:
- Remove n2
Others:
- Fixes for padata UAF
- Fix potential rhashtable deadlock by moving schedule_work outside
lock"
* tag 'v6.14-p1' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (75 commits)
rhashtable: Fix rhashtable_try_insert test
dt-bindings: crypto: qcom,inline-crypto-engine: Document the SM8750 ICE
dt-bindings: crypto: qcom,prng: Document SM8750 RNG
dt-bindings: crypto: qcom-qce: Document the SM8750 crypto engine
crypto: asymmetric_keys - Remove unused key_being_used_for[]
padata: avoid UAF for reorder_work
padata: fix UAF in padata_reorder
padata: add pd get/put refcnt helper
crypto: skcipher - call cond_resched() directly
crypto: skcipher - optimize initializing skcipher_walk fields
crypto: skcipher - clean up initialization of skcipher_walk::flags
crypto: skcipher - fold skcipher_walk_skcipher() into skcipher_walk_virt()
crypto: skcipher - remove redundant check for SKCIPHER_WALK_SLOW
crypto: skcipher - remove redundant clamping to page size
crypto: skcipher - remove unnecessary page alignment of bounce buffer
crypto: skcipher - document skcipher_walk_done() and rename some vars
crypto: omap - switch from scatter_walk to plain offset
crypto: powerpc/p10-aes-gcm - simplify handling of linear associated data
crypto: bcm - Drop unused setting of local 'ptr' variable
crypto: hisilicon/qm - support new function communication
...
Diffstat (limited to 'lib')
-rw-r--r-- | lib/crypto/aesgcm.c | 2 | ||||
-rw-r--r-- | lib/crypto/gf128mul.c | 75 | ||||
-rw-r--r-- | lib/rhashtable.c | 12 |
3 files changed, 9 insertions, 80 deletions
diff --git a/lib/crypto/aesgcm.c b/lib/crypto/aesgcm.c index 6bba6473fdf3..902e49410aaf 100644 --- a/lib/crypto/aesgcm.c +++ b/lib/crypto/aesgcm.c @@ -697,7 +697,7 @@ static int __init libaesgcm_init(void) u8 tagbuf[AES_BLOCK_SIZE]; int plen = aesgcm_tv[i].plen; struct aesgcm_ctx ctx; - u8 buf[sizeof(ptext12)]; + static u8 buf[sizeof(ptext12)]; if (aesgcm_expandkey(&ctx, aesgcm_tv[i].key, aesgcm_tv[i].klen, aesgcm_tv[i].clen - plen)) { diff --git a/lib/crypto/gf128mul.c b/lib/crypto/gf128mul.c index 8f8c45e0cdcf..fbe72cb3453a 100644 --- a/lib/crypto/gf128mul.c +++ b/lib/crypto/gf128mul.c @@ -225,44 +225,6 @@ void gf128mul_lle(be128 *r, const be128 *b) } EXPORT_SYMBOL(gf128mul_lle); -void gf128mul_bbe(be128 *r, const be128 *b) -{ - be128 p[8]; - int i; - - p[0] = *r; - for (i = 0; i < 7; ++i) - gf128mul_x_bbe(&p[i + 1], &p[i]); - - memset(r, 0, sizeof(*r)); - for (i = 0;;) { - u8 ch = ((u8 *)b)[i]; - - if (ch & 0x80) - be128_xor(r, r, &p[7]); - if (ch & 0x40) - be128_xor(r, r, &p[6]); - if (ch & 0x20) - be128_xor(r, r, &p[5]); - if (ch & 0x10) - be128_xor(r, r, &p[4]); - if (ch & 0x08) - be128_xor(r, r, &p[3]); - if (ch & 0x04) - be128_xor(r, r, &p[2]); - if (ch & 0x02) - be128_xor(r, r, &p[1]); - if (ch & 0x01) - be128_xor(r, r, &p[0]); - - if (++i >= 16) - break; - - gf128mul_x8_bbe(r); - } -} -EXPORT_SYMBOL(gf128mul_bbe); - /* This version uses 64k bytes of table space. A 16 byte buffer has to be multiplied by a 16 byte key value in GF(2^128). If we consider a GF(2^128) value in @@ -380,28 +342,6 @@ out: } EXPORT_SYMBOL(gf128mul_init_4k_lle); -struct gf128mul_4k *gf128mul_init_4k_bbe(const be128 *g) -{ - struct gf128mul_4k *t; - int j, k; - - t = kzalloc(sizeof(*t), GFP_KERNEL); - if (!t) - goto out; - - t->t[1] = *g; - for (j = 1; j <= 64; j <<= 1) - gf128mul_x_bbe(&t->t[j + j], &t->t[j]); - - for (j = 2; j < 256; j += j) - for (k = 1; k < j; ++k) - be128_xor(&t->t[j + k], &t->t[j], &t->t[k]); - -out: - return t; -} -EXPORT_SYMBOL(gf128mul_init_4k_bbe); - void gf128mul_4k_lle(be128 *a, const struct gf128mul_4k *t) { u8 *ap = (u8 *)a; @@ -417,20 +357,5 @@ void gf128mul_4k_lle(be128 *a, const struct gf128mul_4k *t) } EXPORT_SYMBOL(gf128mul_4k_lle); -void gf128mul_4k_bbe(be128 *a, const struct gf128mul_4k *t) -{ - u8 *ap = (u8 *)a; - be128 r[1]; - int i = 0; - - *r = t->t[ap[0]]; - while (++i < 16) { - gf128mul_x8_bbe(r); - be128_xor(r, r, &t->t[ap[i]]); - } - *a = *r; -} -EXPORT_SYMBOL(gf128mul_4k_bbe); - MODULE_LICENSE("GPL"); MODULE_DESCRIPTION("Functions for multiplying elements of GF(2^128)"); diff --git a/lib/rhashtable.c b/lib/rhashtable.c index 6c902639728b..0e9a1d4cf89b 100644 --- a/lib/rhashtable.c +++ b/lib/rhashtable.c @@ -584,10 +584,6 @@ static struct bucket_table *rhashtable_insert_one( */ rht_assign_locked(bkt, obj); - atomic_inc(&ht->nelems); - if (rht_grow_above_75(ht, tbl)) - schedule_work(&ht->run_work); - return NULL; } @@ -615,15 +611,23 @@ static void *rhashtable_try_insert(struct rhashtable *ht, const void *key, new_tbl = rht_dereference_rcu(tbl->future_tbl, ht); data = ERR_PTR(-EAGAIN); } else { + bool inserted; + flags = rht_lock(tbl, bkt); data = rhashtable_lookup_one(ht, bkt, tbl, hash, key, obj); new_tbl = rhashtable_insert_one(ht, bkt, tbl, hash, obj, data); + inserted = data && !new_tbl; + if (inserted) + atomic_inc(&ht->nelems); if (PTR_ERR(new_tbl) != -EEXIST) data = ERR_CAST(new_tbl); rht_unlock(tbl, bkt, flags); + + if (inserted && rht_grow_above_75(ht, tbl)) + schedule_work(&ht->run_work); } } while (!IS_ERR_OR_NULL(new_tbl)); |