aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/serpent_generic.c
diff options
context:
space:
mode:
Diffstat (limited to 'crypto/serpent_generic.c')
-rw-r--r--crypto/serpent_generic.c165
1 files changed, 47 insertions, 118 deletions
diff --git a/crypto/serpent_generic.c b/crypto/serpent_generic.c
index 492c1d0bfe06..45f98b750053 100644
--- a/crypto/serpent_generic.c
+++ b/crypto/serpent_generic.c
@@ -5,17 +5,12 @@
* Serpent Cipher Algorithm.
*
* Copyright (C) 2002 Dag Arne Osvik <osvik@ii.uib.no>
- * 2003 Herbert Valerio Riedel <hvr@gnu.org>
- *
- * Added tnepres support:
- * Ruben Jesus Garcia Hernandez <ruben@ugr.es>, 18.10.2004
- * Based on code by hvr
*/
#include <linux/init.h>
#include <linux/module.h>
#include <linux/errno.h>
-#include <asm/byteorder.h>
+#include <asm/unaligned.h>
#include <linux/crypto.h>
#include <linux/types.h>
#include <crypto/serpent.h>
@@ -277,6 +272,7 @@ int __serpent_setkey(struct serpent_ctx *ctx, const u8 *key,
u32 *k = ctx->expkey;
u8 *k8 = (u8 *)k;
u32 r0, r1, r2, r3, r4;
+ __le32 *lk;
int i;
/* Copy key, add padding */
@@ -288,22 +284,32 @@ int __serpent_setkey(struct serpent_ctx *ctx, const u8 *key,
while (i < SERPENT_MAX_KEY_SIZE)
k8[i++] = 0;
+ lk = (__le32 *)k;
+ k[0] = le32_to_cpu(lk[0]);
+ k[1] = le32_to_cpu(lk[1]);
+ k[2] = le32_to_cpu(lk[2]);
+ k[3] = le32_to_cpu(lk[3]);
+ k[4] = le32_to_cpu(lk[4]);
+ k[5] = le32_to_cpu(lk[5]);
+ k[6] = le32_to_cpu(lk[6]);
+ k[7] = le32_to_cpu(lk[7]);
+
/* Expand key using polynomial */
- r0 = le32_to_cpu(k[3]);
- r1 = le32_to_cpu(k[4]);
- r2 = le32_to_cpu(k[5]);
- r3 = le32_to_cpu(k[6]);
- r4 = le32_to_cpu(k[7]);
-
- keyiter(le32_to_cpu(k[0]), r0, r4, r2, 0, 0);
- keyiter(le32_to_cpu(k[1]), r1, r0, r3, 1, 1);
- keyiter(le32_to_cpu(k[2]), r2, r1, r4, 2, 2);
- keyiter(le32_to_cpu(k[3]), r3, r2, r0, 3, 3);
- keyiter(le32_to_cpu(k[4]), r4, r3, r1, 4, 4);
- keyiter(le32_to_cpu(k[5]), r0, r4, r2, 5, 5);
- keyiter(le32_to_cpu(k[6]), r1, r0, r3, 6, 6);
- keyiter(le32_to_cpu(k[7]), r2, r1, r4, 7, 7);
+ r0 = k[3];
+ r1 = k[4];
+ r2 = k[5];
+ r3 = k[6];
+ r4 = k[7];
+
+ keyiter(k[0], r0, r4, r2, 0, 0);
+ keyiter(k[1], r1, r0, r3, 1, 1);
+ keyiter(k[2], r2, r1, r4, 2, 2);
+ keyiter(k[3], r3, r2, r0, 3, 3);
+ keyiter(k[4], r4, r3, r1, 4, 4);
+ keyiter(k[5], r0, r4, r2, 5, 5);
+ keyiter(k[6], r1, r0, r3, 6, 6);
+ keyiter(k[7], r2, r1, r4, 7, 7);
keyiter(k[0], r3, r2, r0, 8, 8);
keyiter(k[1], r4, r3, r1, 9, 9);
@@ -453,19 +459,12 @@ void __serpent_encrypt(const void *c, u8 *dst, const u8 *src)
{
const struct serpent_ctx *ctx = c;
const u32 *k = ctx->expkey;
- const __le32 *s = (const __le32 *)src;
- __le32 *d = (__le32 *)dst;
u32 r0, r1, r2, r3, r4;
-/*
- * Note: The conversions between u8* and u32* might cause trouble
- * on architectures with stricter alignment rules than x86
- */
-
- r0 = le32_to_cpu(s[0]);
- r1 = le32_to_cpu(s[1]);
- r2 = le32_to_cpu(s[2]);
- r3 = le32_to_cpu(s[3]);
+ r0 = get_unaligned_le32(src);
+ r1 = get_unaligned_le32(src + 4);
+ r2 = get_unaligned_le32(src + 8);
+ r3 = get_unaligned_le32(src + 12);
K(r0, r1, r2, r3, 0);
S0(r0, r1, r2, r3, r4); LK(r2, r1, r3, r0, r4, 1);
@@ -501,10 +500,10 @@ void __serpent_encrypt(const void *c, u8 *dst, const u8 *src)
S6(r0, r1, r3, r2, r4); LK(r3, r4, r1, r2, r0, 31);
S7(r3, r4, r1, r2, r0); K(r0, r1, r2, r3, 32);
- d[0] = cpu_to_le32(r0);
- d[1] = cpu_to_le32(r1);
- d[2] = cpu_to_le32(r2);
- d[3] = cpu_to_le32(r3);
+ put_unaligned_le32(r0, dst);
+ put_unaligned_le32(r1, dst + 4);
+ put_unaligned_le32(r2, dst + 8);
+ put_unaligned_le32(r3, dst + 12);
}
EXPORT_SYMBOL_GPL(__serpent_encrypt);
@@ -519,14 +518,12 @@ void __serpent_decrypt(const void *c, u8 *dst, const u8 *src)
{
const struct serpent_ctx *ctx = c;
const u32 *k = ctx->expkey;
- const __le32 *s = (const __le32 *)src;
- __le32 *d = (__le32 *)dst;
u32 r0, r1, r2, r3, r4;
- r0 = le32_to_cpu(s[0]);
- r1 = le32_to_cpu(s[1]);
- r2 = le32_to_cpu(s[2]);
- r3 = le32_to_cpu(s[3]);
+ r0 = get_unaligned_le32(src);
+ r1 = get_unaligned_le32(src + 4);
+ r2 = get_unaligned_le32(src + 8);
+ r3 = get_unaligned_le32(src + 12);
K(r0, r1, r2, r3, 32);
SI7(r0, r1, r2, r3, r4); KL(r1, r3, r0, r4, r2, 31);
@@ -562,10 +559,10 @@ void __serpent_decrypt(const void *c, u8 *dst, const u8 *src)
SI1(r3, r1, r2, r0, r4); KL(r4, r1, r2, r0, r3, 1);
SI0(r4, r1, r2, r0, r3); K(r2, r3, r1, r4, 0);
- d[0] = cpu_to_le32(r2);
- d[1] = cpu_to_le32(r3);
- d[2] = cpu_to_le32(r1);
- d[3] = cpu_to_le32(r4);
+ put_unaligned_le32(r2, dst);
+ put_unaligned_le32(r3, dst + 4);
+ put_unaligned_le32(r1, dst + 8);
+ put_unaligned_le32(r4, dst + 12);
}
EXPORT_SYMBOL_GPL(__serpent_decrypt);
@@ -576,66 +573,13 @@ static void serpent_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
__serpent_decrypt(ctx, dst, src);
}
-static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key,
- unsigned int keylen)
-{
- u8 rev_key[SERPENT_MAX_KEY_SIZE];
- int i;
-
- for (i = 0; i < keylen; ++i)
- rev_key[keylen - i - 1] = key[i];
-
- return serpent_setkey(tfm, rev_key, keylen);
-}
-
-static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- const u32 * const s = (const u32 * const)src;
- u32 * const d = (u32 * const)dst;
-
- u32 rs[4], rd[4];
-
- rs[0] = swab32(s[3]);
- rs[1] = swab32(s[2]);
- rs[2] = swab32(s[1]);
- rs[3] = swab32(s[0]);
-
- serpent_encrypt(tfm, (u8 *)rd, (u8 *)rs);
-
- d[0] = swab32(rd[3]);
- d[1] = swab32(rd[2]);
- d[2] = swab32(rd[1]);
- d[3] = swab32(rd[0]);
-}
-
-static void tnepres_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- const u32 * const s = (const u32 * const)src;
- u32 * const d = (u32 * const)dst;
-
- u32 rs[4], rd[4];
-
- rs[0] = swab32(s[3]);
- rs[1] = swab32(s[2]);
- rs[2] = swab32(s[1]);
- rs[3] = swab32(s[0]);
-
- serpent_decrypt(tfm, (u8 *)rd, (u8 *)rs);
-
- d[0] = swab32(rd[3]);
- d[1] = swab32(rd[2]);
- d[2] = swab32(rd[1]);
- d[3] = swab32(rd[0]);
-}
-
-static struct crypto_alg srp_algs[2] = { {
+static struct crypto_alg srp_alg = {
.cra_name = "serpent",
.cra_driver_name = "serpent-generic",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = SERPENT_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct serpent_ctx),
- .cra_alignmask = 3,
.cra_module = THIS_MODULE,
.cra_u = { .cipher = {
.cia_min_keysize = SERPENT_MIN_KEY_SIZE,
@@ -643,38 +587,23 @@ static struct crypto_alg srp_algs[2] = { {
.cia_setkey = serpent_setkey,
.cia_encrypt = serpent_encrypt,
.cia_decrypt = serpent_decrypt } }
-}, {
- .cra_name = "tnepres",
- .cra_driver_name = "tnepres-generic",
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
- .cra_blocksize = SERPENT_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct serpent_ctx),
- .cra_alignmask = 3,
- .cra_module = THIS_MODULE,
- .cra_u = { .cipher = {
- .cia_min_keysize = SERPENT_MIN_KEY_SIZE,
- .cia_max_keysize = SERPENT_MAX_KEY_SIZE,
- .cia_setkey = tnepres_setkey,
- .cia_encrypt = tnepres_encrypt,
- .cia_decrypt = tnepres_decrypt } }
-} };
+};
static int __init serpent_mod_init(void)
{
- return crypto_register_algs(srp_algs, ARRAY_SIZE(srp_algs));
+ return crypto_register_alg(&srp_alg);
}
static void __exit serpent_mod_fini(void)
{
- crypto_unregister_algs(srp_algs, ARRAY_SIZE(srp_algs));
+ crypto_unregister_alg(&srp_alg);
}
subsys_initcall(serpent_mod_init);
module_exit(serpent_mod_fini);
MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION("Serpent and tnepres (kerneli compatible serpent reversed) Cipher Algorithm");
+MODULE_DESCRIPTION("Serpent Cipher Algorithm");
MODULE_AUTHOR("Dag Arne Osvik <osvik@ii.uib.no>");
-MODULE_ALIAS_CRYPTO("tnepres");
MODULE_ALIAS_CRYPTO("serpent");
MODULE_ALIAS_CRYPTO("serpent-generic");