1/*
2 * Cryptographic API.
3 *
4 * TEA, XTEA, and XETA crypto alogrithms
5 *
6 * The TEA and Xtended TEA algorithms were developed by David Wheeler
7 * and Roger Needham at the Computer Laboratory of Cambridge University.
8 *
9 * Due to the order of evaluation in XTEA many people have incorrectly
10 * implemented it.  XETA (XTEA in the wrong order), exists for
11 * compatibility with these implementations.
12 *
13 * Copyright (c) 2004 Aaron Grothe ajgrothe@yahoo.com
14 *
15 * This program is free software; you can redistribute it and/or modify
16 * it under the terms of the GNU General Public License as published by
17 * the Free Software Foundation; either version 2 of the License, or
18 * (at your option) any later version.
19 *
20 */
21
22#include <linux/init.h>
23#include <linux/module.h>
24#include <linux/mm.h>
25#include <asm/byteorder.h>
26#include <linux/crypto.h>
27#include <linux/types.h>
28
29#define TEA_KEY_SIZE		16
30#define TEA_BLOCK_SIZE		8
31#define TEA_ROUNDS		32
32#define TEA_DELTA		0x9e3779b9
33
34#define XTEA_KEY_SIZE		16
35#define XTEA_BLOCK_SIZE		8
36#define XTEA_ROUNDS		32
37#define XTEA_DELTA		0x9e3779b9
38
39struct tea_ctx {
40	u32 KEY[4];
41};
42
43struct xtea_ctx {
44	u32 KEY[4];
45};
46
47static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
48		      unsigned int key_len)
49{
50	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
51	const __le32 *key = (const __le32 *)in_key;
52
53	ctx->KEY[0] = le32_to_cpu(key[0]);
54	ctx->KEY[1] = le32_to_cpu(key[1]);
55	ctx->KEY[2] = le32_to_cpu(key[2]);
56	ctx->KEY[3] = le32_to_cpu(key[3]);
57
58	return 0;
59
60}
61
62static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
63{
64	u32 y, z, n, sum = 0;
65	u32 k0, k1, k2, k3;
66	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
67	const __le32 *in = (const __le32 *)src;
68	__le32 *out = (__le32 *)dst;
69
70	y = le32_to_cpu(in[0]);
71	z = le32_to_cpu(in[1]);
72
73	k0 = ctx->KEY[0];
74	k1 = ctx->KEY[1];
75	k2 = ctx->KEY[2];
76	k3 = ctx->KEY[3];
77
78	n = TEA_ROUNDS;
79
80	while (n-- > 0) {
81		sum += TEA_DELTA;
82		y += ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
83		z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
84	}
85
86	out[0] = cpu_to_le32(y);
87	out[1] = cpu_to_le32(z);
88}
89
90static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
91{
92	u32 y, z, n, sum;
93	u32 k0, k1, k2, k3;
94	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
95	const __le32 *in = (const __le32 *)src;
96	__le32 *out = (__le32 *)dst;
97
98	y = le32_to_cpu(in[0]);
99	z = le32_to_cpu(in[1]);
100
101	k0 = ctx->KEY[0];
102	k1 = ctx->KEY[1];
103	k2 = ctx->KEY[2];
104	k3 = ctx->KEY[3];
105
106	sum = TEA_DELTA << 5;
107
108	n = TEA_ROUNDS;
109
110	while (n-- > 0) {
111		z -= ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
112		y -= ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
113		sum -= TEA_DELTA;
114	}
115
116	out[0] = cpu_to_le32(y);
117	out[1] = cpu_to_le32(z);
118}
119
120static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
121		       unsigned int key_len)
122{
123	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
124	const __le32 *key = (const __le32 *)in_key;
125
126	ctx->KEY[0] = le32_to_cpu(key[0]);
127	ctx->KEY[1] = le32_to_cpu(key[1]);
128	ctx->KEY[2] = le32_to_cpu(key[2]);
129	ctx->KEY[3] = le32_to_cpu(key[3]);
130
131	return 0;
132
133}
134
135static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
136{
137	u32 y, z, sum = 0;
138	u32 limit = XTEA_DELTA * XTEA_ROUNDS;
139	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
140	const __le32 *in = (const __le32 *)src;
141	__le32 *out = (__le32 *)dst;
142
143	y = le32_to_cpu(in[0]);
144	z = le32_to_cpu(in[1]);
145
146	while (sum != limit) {
147		y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]);
148		sum += XTEA_DELTA;
149		z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]);
150	}
151
152	out[0] = cpu_to_le32(y);
153	out[1] = cpu_to_le32(z);
154}
155
156static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
157{
158	u32 y, z, sum;
159	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
160	const __le32 *in = (const __le32 *)src;
161	__le32 *out = (__le32 *)dst;
162
163	y = le32_to_cpu(in[0]);
164	z = le32_to_cpu(in[1]);
165
166	sum = XTEA_DELTA * XTEA_ROUNDS;
167
168	while (sum) {
169		z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]);
170		sum -= XTEA_DELTA;
171		y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]);
172	}
173
174	out[0] = cpu_to_le32(y);
175	out[1] = cpu_to_le32(z);
176}
177
178
179static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
180{
181	u32 y, z, sum = 0;
182	u32 limit = XTEA_DELTA * XTEA_ROUNDS;
183	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
184	const __le32 *in = (const __le32 *)src;
185	__le32 *out = (__le32 *)dst;
186
187	y = le32_to_cpu(in[0]);
188	z = le32_to_cpu(in[1]);
189
190	while (sum != limit) {
191		y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3];
192		sum += XTEA_DELTA;
193		z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3];
194	}
195
196	out[0] = cpu_to_le32(y);
197	out[1] = cpu_to_le32(z);
198}
199
200static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
201{
202	u32 y, z, sum;
203	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
204	const __le32 *in = (const __le32 *)src;
205	__le32 *out = (__le32 *)dst;
206
207	y = le32_to_cpu(in[0]);
208	z = le32_to_cpu(in[1]);
209
210	sum = XTEA_DELTA * XTEA_ROUNDS;
211
212	while (sum) {
213		z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3];
214		sum -= XTEA_DELTA;
215		y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3];
216	}
217
218	out[0] = cpu_to_le32(y);
219	out[1] = cpu_to_le32(z);
220}
221
222static struct crypto_alg tea_algs[3] = { {
223	.cra_name		=	"tea",
224	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
225	.cra_blocksize		=	TEA_BLOCK_SIZE,
226	.cra_ctxsize		=	sizeof (struct tea_ctx),
227	.cra_alignmask		=	3,
228	.cra_module		=	THIS_MODULE,
229	.cra_u			=	{ .cipher = {
230	.cia_min_keysize	=	TEA_KEY_SIZE,
231	.cia_max_keysize	=	TEA_KEY_SIZE,
232	.cia_setkey		= 	tea_setkey,
233	.cia_encrypt		=	tea_encrypt,
234	.cia_decrypt		=	tea_decrypt } }
235}, {
236	.cra_name		=	"xtea",
237	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
238	.cra_blocksize		=	XTEA_BLOCK_SIZE,
239	.cra_ctxsize		=	sizeof (struct xtea_ctx),
240	.cra_alignmask		=	3,
241	.cra_module		=	THIS_MODULE,
242	.cra_u			=	{ .cipher = {
243	.cia_min_keysize	=	XTEA_KEY_SIZE,
244	.cia_max_keysize	=	XTEA_KEY_SIZE,
245	.cia_setkey		= 	xtea_setkey,
246	.cia_encrypt		=	xtea_encrypt,
247	.cia_decrypt		=	xtea_decrypt } }
248}, {
249	.cra_name		=	"xeta",
250	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
251	.cra_blocksize		=	XTEA_BLOCK_SIZE,
252	.cra_ctxsize		=	sizeof (struct xtea_ctx),
253	.cra_alignmask		=	3,
254	.cra_module		=	THIS_MODULE,
255	.cra_u			=	{ .cipher = {
256	.cia_min_keysize	=	XTEA_KEY_SIZE,
257	.cia_max_keysize	=	XTEA_KEY_SIZE,
258	.cia_setkey		= 	xtea_setkey,
259	.cia_encrypt		=	xeta_encrypt,
260	.cia_decrypt		=	xeta_decrypt } }
261} };
262
263static int __init tea_mod_init(void)
264{
265	return crypto_register_algs(tea_algs, ARRAY_SIZE(tea_algs));
266}
267
268static void __exit tea_mod_fini(void)
269{
270	crypto_unregister_algs(tea_algs, ARRAY_SIZE(tea_algs));
271}
272
273MODULE_ALIAS("xtea");
274MODULE_ALIAS("xeta");
275
276module_init(tea_mod_init);
277module_exit(tea_mod_fini);
278
279MODULE_LICENSE("GPL");
280MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms");
281