Skip to content

Commit

Permalink
crypto: x86/cast6 - switch to XTS template
Browse files Browse the repository at this point in the history
Now that the XTS template can wrap accelerated ECB modes, it can be
used to implement CAST6 in XTS mode as well, which turns out to
be at least as fast, and sometimes even faster

Acked-by: Eric Biggers <[email protected]>
Signed-off-by: Ard Biesheuvel <[email protected]>
Signed-off-by: Herbert Xu <[email protected]>
  • Loading branch information
ardbiesheuvel authored and herbertx committed Jan 14, 2021
1 parent 55a7e88 commit 2cc0fed
Show file tree
Hide file tree
Showing 3 changed files with 1 addition and 155 deletions.
56 changes: 0 additions & 56 deletions arch/x86/crypto/cast6-avx-x86_64-asm_64.S
Original file line number Diff line number Diff line change
Expand Up @@ -212,8 +212,6 @@

.section .rodata.cst16, "aM", @progbits, 16
.align 16
.Lxts_gf128mul_and_shl1_mask:
.byte 0x87, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0
.Lbswap_mask:
.byte 3, 2, 1, 0, 7, 6, 5, 4, 11, 10, 9, 8, 15, 14, 13, 12
.Lbswap128_mask:
Expand Down Expand Up @@ -440,57 +438,3 @@ SYM_FUNC_START(cast6_ctr_8way)
FRAME_END
ret;
SYM_FUNC_END(cast6_ctr_8way)

SYM_FUNC_START(cast6_xts_enc_8way)
/* input:
* %rdi: ctx, CTX
* %rsi: dst
* %rdx: src
* %rcx: iv (t ⊕ αⁿ ∈ GF(2¹²⁸))
*/
FRAME_BEGIN
pushq %r15;

movq %rdi, CTX
movq %rsi, %r11;

/* regs <= src, dst <= IVs, regs <= regs xor IVs */
load_xts_8way(%rcx, %rdx, %rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2,
RX, RKR, RKM, .Lxts_gf128mul_and_shl1_mask);

call __cast6_enc_blk8;

/* dst <= regs xor IVs(in dst) */
store_xts_8way(%r11, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);

popq %r15;
FRAME_END
ret;
SYM_FUNC_END(cast6_xts_enc_8way)

SYM_FUNC_START(cast6_xts_dec_8way)
/* input:
* %rdi: ctx, CTX
* %rsi: dst
* %rdx: src
* %rcx: iv (t ⊕ αⁿ ∈ GF(2¹²⁸))
*/
FRAME_BEGIN
pushq %r15;

movq %rdi, CTX
movq %rsi, %r11;

/* regs <= src, dst <= IVs, regs <= regs xor IVs */
load_xts_8way(%rcx, %rdx, %rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2,
RX, RKR, RKM, .Lxts_gf128mul_and_shl1_mask);

call __cast6_dec_blk8;

/* dst <= regs xor IVs(in dst) */
store_xts_8way(%r11, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);

popq %r15;
FRAME_END
ret;
SYM_FUNC_END(cast6_xts_dec_8way)
98 changes: 0 additions & 98 deletions arch/x86/crypto/cast6_avx_glue.c
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
#include <crypto/algapi.h>
#include <crypto/cast6.h>
#include <crypto/internal/simd.h>
#include <crypto/xts.h>
#include <asm/crypto/glue_helper.h>

#define CAST6_PARALLEL_BLOCKS 8
Expand All @@ -27,27 +26,12 @@ asmlinkage void cast6_cbc_dec_8way(const void *ctx, u8 *dst, const u8 *src);
asmlinkage void cast6_ctr_8way(const void *ctx, u8 *dst, const u8 *src,
le128 *iv);

asmlinkage void cast6_xts_enc_8way(const void *ctx, u8 *dst, const u8 *src,
le128 *iv);
asmlinkage void cast6_xts_dec_8way(const void *ctx, u8 *dst, const u8 *src,
le128 *iv);

static int cast6_setkey_skcipher(struct crypto_skcipher *tfm,
const u8 *key, unsigned int keylen)
{
return cast6_setkey(&tfm->base, key, keylen);
}

static void cast6_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
{
glue_xts_crypt_128bit_one(ctx, dst, src, iv, __cast6_encrypt);
}

static void cast6_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
{
glue_xts_crypt_128bit_one(ctx, dst, src, iv, __cast6_decrypt);
}

static void cast6_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv)
{
be128 ctrblk;
Expand Down Expand Up @@ -87,19 +71,6 @@ static const struct common_glue_ctx cast6_ctr = {
} }
};

static const struct common_glue_ctx cast6_enc_xts = {
.num_funcs = 2,
.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,

.funcs = { {
.num_blocks = CAST6_PARALLEL_BLOCKS,
.fn_u = { .xts = cast6_xts_enc_8way }
}, {
.num_blocks = 1,
.fn_u = { .xts = cast6_xts_enc }
} }
};

static const struct common_glue_ctx cast6_dec = {
.num_funcs = 2,
.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
Expand All @@ -126,19 +97,6 @@ static const struct common_glue_ctx cast6_dec_cbc = {
} }
};

static const struct common_glue_ctx cast6_dec_xts = {
.num_funcs = 2,
.fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,

.funcs = { {
.num_blocks = CAST6_PARALLEL_BLOCKS,
.fn_u = { .xts = cast6_xts_dec_8way }
}, {
.num_blocks = 1,
.fn_u = { .xts = cast6_xts_dec }
} }
};

static int ecb_encrypt(struct skcipher_request *req)
{
return glue_ecb_req_128bit(&cast6_enc, req);
Expand All @@ -164,48 +122,6 @@ static int ctr_crypt(struct skcipher_request *req)
return glue_ctr_req_128bit(&cast6_ctr, req);
}

struct cast6_xts_ctx {
struct cast6_ctx tweak_ctx;
struct cast6_ctx crypt_ctx;
};

static int xts_cast6_setkey(struct crypto_skcipher *tfm, const u8 *key,
unsigned int keylen)
{
struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
int err;

err = xts_verify_key(tfm, key, keylen);
if (err)
return err;

/* first half of xts-key is for crypt */
err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2);
if (err)
return err;

/* second half of xts-key is for tweak */
return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
}

static int xts_encrypt(struct skcipher_request *req)
{
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);

return glue_xts_req_128bit(&cast6_enc_xts, req, __cast6_encrypt,
&ctx->tweak_ctx, &ctx->crypt_ctx, false);
}

static int xts_decrypt(struct skcipher_request *req)
{
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);

return glue_xts_req_128bit(&cast6_dec_xts, req, __cast6_encrypt,
&ctx->tweak_ctx, &ctx->crypt_ctx, true);
}

static struct skcipher_alg cast6_algs[] = {
{
.base.cra_name = "__ecb(cast6)",
Expand Down Expand Up @@ -249,20 +165,6 @@ static struct skcipher_alg cast6_algs[] = {
.setkey = cast6_setkey_skcipher,
.encrypt = ctr_crypt,
.decrypt = ctr_crypt,
}, {
.base.cra_name = "__xts(cast6)",
.base.cra_driver_name = "__xts-cast6-avx",
.base.cra_priority = 200,
.base.cra_flags = CRYPTO_ALG_INTERNAL,
.base.cra_blocksize = CAST6_BLOCK_SIZE,
.base.cra_ctxsize = sizeof(struct cast6_xts_ctx),
.base.cra_module = THIS_MODULE,
.min_keysize = 2 * CAST6_MIN_KEY_SIZE,
.max_keysize = 2 * CAST6_MAX_KEY_SIZE,
.ivsize = CAST6_BLOCK_SIZE,
.setkey = xts_cast6_setkey,
.encrypt = xts_encrypt,
.decrypt = xts_decrypt,
},
};

Expand Down
2 changes: 1 addition & 1 deletion crypto/Kconfig
Original file line number Diff line number Diff line change
Expand Up @@ -1394,7 +1394,7 @@ config CRYPTO_CAST6_AVX_X86_64
select CRYPTO_CAST_COMMON
select CRYPTO_GLUE_HELPER_X86
select CRYPTO_SIMD
select CRYPTO_XTS
imply CRYPTO_XTS
help
The CAST6 encryption algorithm (synonymous with CAST-256) is
described in RFC2612.
Expand Down

0 comments on commit 2cc0fed

Please sign in to comment.