Skip to content

Commit 3846c01

Browse files
committed
crypto: arm/ghash-ce - Remove SIMD fallback code path
Remove the obsolete fallback code path for SIMD and remove the cryptd-based ghash-ce algorithm. Rename the shash algorithm to ghash-ce. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
1 parent fdd3058 commit 3846c01

File tree

1 file changed

+8
-189
lines changed

1 file changed

+8
-189
lines changed

arch/arm/crypto/ghash-ce-glue.c

Lines changed: 8 additions & 189 deletions
Original file line numberDiff line numberDiff line change
@@ -55,10 +55,6 @@ struct ghash_desc_ctx {
5555
u32 count;
5656
};
5757

58-
struct ghash_async_ctx {
59-
struct cryptd_ahash *cryptd_tfm;
60-
};
61-
6258
asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
6359
u64 const h[][2], const char *head);
6460

@@ -78,34 +74,12 @@ static int ghash_init(struct shash_desc *desc)
7874
static void ghash_do_update(int blocks, u64 dg[], const char *src,
7975
struct ghash_key *key, const char *head)
8076
{
81-
if (likely(crypto_simd_usable())) {
82-
kernel_neon_begin();
83-
if (static_branch_likely(&use_p64))
84-
pmull_ghash_update_p64(blocks, dg, src, key->h, head);
85-
else
86-
pmull_ghash_update_p8(blocks, dg, src, key->h, head);
87-
kernel_neon_end();
88-
} else {
89-
be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) };
90-
91-
do {
92-
const u8 *in = src;
93-
94-
if (head) {
95-
in = head;
96-
blocks++;
97-
head = NULL;
98-
} else {
99-
src += GHASH_BLOCK_SIZE;
100-
}
101-
102-
crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
103-
gf128mul_lle(&dst, &key->k);
104-
} while (--blocks);
105-
106-
dg[0] = be64_to_cpu(dst.b);
107-
dg[1] = be64_to_cpu(dst.a);
108-
}
77+
kernel_neon_begin();
78+
if (static_branch_likely(&use_p64))
79+
pmull_ghash_update_p64(blocks, dg, src, key->h, head);
80+
else
81+
pmull_ghash_update_p8(blocks, dg, src, key->h, head);
82+
kernel_neon_end();
10983
}
11084

11185
static int ghash_update(struct shash_desc *desc, const u8 *src,
@@ -206,162 +180,13 @@ static struct shash_alg ghash_alg = {
206180
.descsize = sizeof(struct ghash_desc_ctx),
207181

208182
.base.cra_name = "ghash",
209-
.base.cra_driver_name = "ghash-ce-sync",
210-
.base.cra_priority = 300 - 1,
183+
.base.cra_driver_name = "ghash-ce",
184+
.base.cra_priority = 300,
211185
.base.cra_blocksize = GHASH_BLOCK_SIZE,
212186
.base.cra_ctxsize = sizeof(struct ghash_key) + sizeof(u64[2]),
213187
.base.cra_module = THIS_MODULE,
214188
};
215189

216-
static int ghash_async_init(struct ahash_request *req)
217-
{
218-
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
219-
struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
220-
struct ahash_request *cryptd_req = ahash_request_ctx(req);
221-
struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
222-
struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
223-
struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
224-
225-
desc->tfm = child;
226-
return crypto_shash_init(desc);
227-
}
228-
229-
static int ghash_async_update(struct ahash_request *req)
230-
{
231-
struct ahash_request *cryptd_req = ahash_request_ctx(req);
232-
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
233-
struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
234-
struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
235-
236-
if (!crypto_simd_usable() ||
237-
(in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
238-
memcpy(cryptd_req, req, sizeof(*req));
239-
ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
240-
return crypto_ahash_update(cryptd_req);
241-
} else {
242-
struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
243-
return shash_ahash_update(req, desc);
244-
}
245-
}
246-
247-
static int ghash_async_final(struct ahash_request *req)
248-
{
249-
struct ahash_request *cryptd_req = ahash_request_ctx(req);
250-
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
251-
struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
252-
struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
253-
254-
if (!crypto_simd_usable() ||
255-
(in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
256-
memcpy(cryptd_req, req, sizeof(*req));
257-
ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
258-
return crypto_ahash_final(cryptd_req);
259-
} else {
260-
struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
261-
return crypto_shash_final(desc, req->result);
262-
}
263-
}
264-
265-
static int ghash_async_digest(struct ahash_request *req)
266-
{
267-
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
268-
struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
269-
struct ahash_request *cryptd_req = ahash_request_ctx(req);
270-
struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
271-
272-
if (!crypto_simd_usable() ||
273-
(in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
274-
memcpy(cryptd_req, req, sizeof(*req));
275-
ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
276-
return crypto_ahash_digest(cryptd_req);
277-
} else {
278-
struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
279-
struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
280-
281-
desc->tfm = child;
282-
return shash_ahash_digest(req, desc);
283-
}
284-
}
285-
286-
static int ghash_async_import(struct ahash_request *req, const void *in)
287-
{
288-
struct ahash_request *cryptd_req = ahash_request_ctx(req);
289-
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
290-
struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
291-
struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
292-
293-
desc->tfm = cryptd_ahash_child(ctx->cryptd_tfm);
294-
295-
return crypto_shash_import(desc, in);
296-
}
297-
298-
static int ghash_async_export(struct ahash_request *req, void *out)
299-
{
300-
struct ahash_request *cryptd_req = ahash_request_ctx(req);
301-
struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
302-
303-
return crypto_shash_export(desc, out);
304-
}
305-
306-
static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
307-
unsigned int keylen)
308-
{
309-
struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
310-
struct crypto_ahash *child = &ctx->cryptd_tfm->base;
311-
312-
crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
313-
crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
314-
& CRYPTO_TFM_REQ_MASK);
315-
return crypto_ahash_setkey(child, key, keylen);
316-
}
317-
318-
static int ghash_async_init_tfm(struct crypto_tfm *tfm)
319-
{
320-
struct cryptd_ahash *cryptd_tfm;
321-
struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
322-
323-
cryptd_tfm = cryptd_alloc_ahash("ghash-ce-sync", 0, 0);
324-
if (IS_ERR(cryptd_tfm))
325-
return PTR_ERR(cryptd_tfm);
326-
ctx->cryptd_tfm = cryptd_tfm;
327-
crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
328-
sizeof(struct ahash_request) +
329-
crypto_ahash_reqsize(&cryptd_tfm->base));
330-
331-
return 0;
332-
}
333-
334-
static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
335-
{
336-
struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
337-
338-
cryptd_free_ahash(ctx->cryptd_tfm);
339-
}
340-
341-
static struct ahash_alg ghash_async_alg = {
342-
.init = ghash_async_init,
343-
.update = ghash_async_update,
344-
.final = ghash_async_final,
345-
.setkey = ghash_async_setkey,
346-
.digest = ghash_async_digest,
347-
.import = ghash_async_import,
348-
.export = ghash_async_export,
349-
.halg.digestsize = GHASH_DIGEST_SIZE,
350-
.halg.statesize = sizeof(struct ghash_desc_ctx),
351-
.halg.base = {
352-
.cra_name = "ghash",
353-
.cra_driver_name = "ghash-ce",
354-
.cra_priority = 300,
355-
.cra_flags = CRYPTO_ALG_ASYNC,
356-
.cra_blocksize = GHASH_BLOCK_SIZE,
357-
.cra_ctxsize = sizeof(struct ghash_async_ctx),
358-
.cra_module = THIS_MODULE,
359-
.cra_init = ghash_async_init_tfm,
360-
.cra_exit = ghash_async_exit_tfm,
361-
},
362-
};
363-
364-
365190
void pmull_gcm_encrypt(int blocks, u64 dg[], const char *src,
366191
struct gcm_key const *k, char *dst,
367192
const char *iv, int rounds, u32 counter);
@@ -759,14 +584,9 @@ static int __init ghash_ce_mod_init(void)
759584
err = crypto_register_shash(&ghash_alg);
760585
if (err)
761586
goto err_aead;
762-
err = crypto_register_ahash(&ghash_async_alg);
763-
if (err)
764-
goto err_shash;
765587

766588
return 0;
767589

768-
err_shash:
769-
crypto_unregister_shash(&ghash_alg);
770590
err_aead:
771591
if (elf_hwcap2 & HWCAP2_PMULL)
772592
crypto_unregister_aeads(gcm_aes_algs,
@@ -776,7 +596,6 @@ static int __init ghash_ce_mod_init(void)
776596

777597
static void __exit ghash_ce_mod_exit(void)
778598
{
779-
crypto_unregister_ahash(&ghash_async_alg);
780599
crypto_unregister_shash(&ghash_alg);
781600
if (elf_hwcap2 & HWCAP2_PMULL)
782601
crypto_unregister_aeads(gcm_aes_algs,

0 commit comments

Comments
 (0)