Skip to content

Commit f656fa4

Browse files
ebiggersMikulas Patocka
authored andcommitted
dm-crypt: switch to using the crc32 library
Now that the crc32() library function takes advantage of architecture-specific optimizations, it is unnecessary to go through the crypto API. Just use crc32(). This is much simpler, and it improves performance due to eliminating the crypto API overhead. (However, this only affects the TCW IV mode of dm-crypt, which is a compatibility mode that is rarely used compared to other dm-crypt modes.) Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Mikulas Patocka <mpatocka@redhat.com>
1 parent 2014c95 commit f656fa4

File tree

2 files changed

+11
-31
lines changed

2 files changed

+11
-31
lines changed

drivers/md/Kconfig

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -267,6 +267,7 @@ config DM_CRYPT
267267
depends on BLK_DEV_DM
268268
depends on (ENCRYPTED_KEYS || ENCRYPTED_KEYS=n)
269269
depends on (TRUSTED_KEYS || TRUSTED_KEYS=n)
270+
select CRC32
270271
select CRYPTO
271272
select CRYPTO_CBC
272273
select CRYPTO_ESSIV

drivers/md/dm-crypt.c

Lines changed: 10 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
#include <linux/bio.h>
1818
#include <linux/blkdev.h>
1919
#include <linux/blk-integrity.h>
20+
#include <linux/crc32.h>
2021
#include <linux/mempool.h>
2122
#include <linux/slab.h>
2223
#include <linux/crypto.h>
@@ -125,7 +126,6 @@ struct iv_lmk_private {
125126

126127
#define TCW_WHITENING_SIZE 16
127128
struct iv_tcw_private {
128-
struct crypto_shash *crc32_tfm;
129129
u8 *iv_seed;
130130
u8 *whitening;
131131
};
@@ -607,10 +607,6 @@ static void crypt_iv_tcw_dtr(struct crypt_config *cc)
607607
tcw->iv_seed = NULL;
608608
kfree_sensitive(tcw->whitening);
609609
tcw->whitening = NULL;
610-
611-
if (tcw->crc32_tfm && !IS_ERR(tcw->crc32_tfm))
612-
crypto_free_shash(tcw->crc32_tfm);
613-
tcw->crc32_tfm = NULL;
614610
}
615611

616612
static int crypt_iv_tcw_ctr(struct crypt_config *cc, struct dm_target *ti,
@@ -628,13 +624,6 @@ static int crypt_iv_tcw_ctr(struct crypt_config *cc, struct dm_target *ti,
628624
return -EINVAL;
629625
}
630626

631-
tcw->crc32_tfm = crypto_alloc_shash("crc32", 0,
632-
CRYPTO_ALG_ALLOCATES_MEMORY);
633-
if (IS_ERR(tcw->crc32_tfm)) {
634-
ti->error = "Error initializing CRC32 in TCW";
635-
return PTR_ERR(tcw->crc32_tfm);
636-
}
637-
638627
tcw->iv_seed = kzalloc(cc->iv_size, GFP_KERNEL);
639628
tcw->whitening = kzalloc(TCW_WHITENING_SIZE, GFP_KERNEL);
640629
if (!tcw->iv_seed || !tcw->whitening) {
@@ -668,36 +657,28 @@ static int crypt_iv_tcw_wipe(struct crypt_config *cc)
668657
return 0;
669658
}
670659

671-
static int crypt_iv_tcw_whitening(struct crypt_config *cc,
672-
struct dm_crypt_request *dmreq,
673-
u8 *data)
660+
static void crypt_iv_tcw_whitening(struct crypt_config *cc,
661+
struct dm_crypt_request *dmreq, u8 *data)
674662
{
675663
struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw;
676664
__le64 sector = cpu_to_le64(dmreq->iv_sector);
677665
u8 buf[TCW_WHITENING_SIZE];
678-
SHASH_DESC_ON_STACK(desc, tcw->crc32_tfm);
679-
int i, r;
666+
int i;
680667

681668
/* xor whitening with sector number */
682669
crypto_xor_cpy(buf, tcw->whitening, (u8 *)&sector, 8);
683670
crypto_xor_cpy(&buf[8], tcw->whitening + 8, (u8 *)&sector, 8);
684671

685672
/* calculate crc32 for every 32bit part and xor it */
686-
desc->tfm = tcw->crc32_tfm;
687-
for (i = 0; i < 4; i++) {
688-
r = crypto_shash_digest(desc, &buf[i * 4], 4, &buf[i * 4]);
689-
if (r)
690-
goto out;
691-
}
673+
for (i = 0; i < 4; i++)
674+
put_unaligned_le32(crc32(0, &buf[i * 4], 4), &buf[i * 4]);
692675
crypto_xor(&buf[0], &buf[12], 4);
693676
crypto_xor(&buf[4], &buf[8], 4);
694677

695678
/* apply whitening (8 bytes) to whole sector */
696679
for (i = 0; i < ((1 << SECTOR_SHIFT) / 8); i++)
697680
crypto_xor(data + i * 8, buf, 8);
698-
out:
699681
memzero_explicit(buf, sizeof(buf));
700-
return r;
701682
}
702683

703684
static int crypt_iv_tcw_gen(struct crypt_config *cc, u8 *iv,
@@ -707,13 +688,12 @@ static int crypt_iv_tcw_gen(struct crypt_config *cc, u8 *iv,
707688
struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw;
708689
__le64 sector = cpu_to_le64(dmreq->iv_sector);
709690
u8 *src;
710-
int r = 0;
711691

712692
/* Remove whitening from ciphertext */
713693
if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) {
714694
sg = crypt_get_sg_data(cc, dmreq->sg_in);
715695
src = kmap_local_page(sg_page(sg));
716-
r = crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset);
696+
crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset);
717697
kunmap_local(src);
718698
}
719699

@@ -723,26 +703,25 @@ static int crypt_iv_tcw_gen(struct crypt_config *cc, u8 *iv,
723703
crypto_xor_cpy(&iv[8], tcw->iv_seed + 8, (u8 *)&sector,
724704
cc->iv_size - 8);
725705

726-
return r;
706+
return 0;
727707
}
728708

729709
static int crypt_iv_tcw_post(struct crypt_config *cc, u8 *iv,
730710
struct dm_crypt_request *dmreq)
731711
{
732712
struct scatterlist *sg;
733713
u8 *dst;
734-
int r;
735714

736715
if (bio_data_dir(dmreq->ctx->bio_in) != WRITE)
737716
return 0;
738717

739718
/* Apply whitening on ciphertext */
740719
sg = crypt_get_sg_data(cc, dmreq->sg_out);
741720
dst = kmap_local_page(sg_page(sg));
742-
r = crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset);
721+
crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset);
743722
kunmap_local(dst);
744723

745-
return r;
724+
return 0;
746725
}
747726

748727
static int crypt_iv_random_gen(struct crypt_config *cc, u8 *iv,

0 commit comments

Comments
 (0)