1/*
2 * Glue Code for AVX assembler versions of Serpent Cipher
3 *
4 * Copyright (C) 2012 Johannes Goetzfried
5 *     <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
6 *
7 * Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
8 *
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307
22 * USA
23 *
24 */
25
26#include <linux/module.h>
27#include <linux/hardirq.h>
28#include <linux/types.h>
29#include <linux/crypto.h>
30#include <linux/err.h>
31#include <crypto/ablk_helper.h>
32#include <crypto/algapi.h>
33#include <crypto/serpent.h>
34#include <crypto/cryptd.h>
35#include <crypto/b128ops.h>
36#include <crypto/ctr.h>
37#include <crypto/lrw.h>
38#include <crypto/xts.h>
39#include <asm/xcr.h>
40#include <asm/xsave.h>
41#include <asm/crypto/serpent-avx.h>
42#include <asm/crypto/glue_helper.h>
43
44/* 8-way parallel cipher functions */
45asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
46					 const u8 *src);
47EXPORT_SYMBOL_GPL(serpent_ecb_enc_8way_avx);
48
49asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
50					 const u8 *src);
51EXPORT_SYMBOL_GPL(serpent_ecb_dec_8way_avx);
52
53asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
54					 const u8 *src);
55EXPORT_SYMBOL_GPL(serpent_cbc_dec_8way_avx);
56
57asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
58				     const u8 *src, le128 *iv);
59EXPORT_SYMBOL_GPL(serpent_ctr_8way_avx);
60
61asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
62					 const u8 *src, le128 *iv);
63EXPORT_SYMBOL_GPL(serpent_xts_enc_8way_avx);
64
65asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
66					 const u8 *src, le128 *iv);
67EXPORT_SYMBOL_GPL(serpent_xts_dec_8way_avx);
68
69void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
70{
71	be128 ctrblk;
72
73	le128_to_be128(&ctrblk, iv);
74	le128_inc(iv);
75
76	__serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
77	u128_xor(dst, src, (u128 *)&ctrblk);
78}
79EXPORT_SYMBOL_GPL(__serpent_crypt_ctr);
80
81void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
82{
83	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
84				  GLUE_FUNC_CAST(__serpent_encrypt));
85}
86EXPORT_SYMBOL_GPL(serpent_xts_enc);
87
88void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
89{
90	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
91				  GLUE_FUNC_CAST(__serpent_decrypt));
92}
93EXPORT_SYMBOL_GPL(serpent_xts_dec);
94
95
96static const struct common_glue_ctx serpent_enc = {
97	.num_funcs = 2,
98	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
99
100	.funcs = { {
101		.num_blocks = SERPENT_PARALLEL_BLOCKS,
102		.fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
103	}, {
104		.num_blocks = 1,
105		.fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
106	} }
107};
108
109static const struct common_glue_ctx serpent_ctr = {
110	.num_funcs = 2,
111	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
112
113	.funcs = { {
114		.num_blocks = SERPENT_PARALLEL_BLOCKS,
115		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
116	}, {
117		.num_blocks = 1,
118		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr) }
119	} }
120};
121
122static const struct common_glue_ctx serpent_enc_xts = {
123	.num_funcs = 2,
124	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
125
126	.funcs = { {
127		.num_blocks = SERPENT_PARALLEL_BLOCKS,
128		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
129	}, {
130		.num_blocks = 1,
131		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
132	} }
133};
134
135static const struct common_glue_ctx serpent_dec = {
136	.num_funcs = 2,
137	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
138
139	.funcs = { {
140		.num_blocks = SERPENT_PARALLEL_BLOCKS,
141		.fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
142	}, {
143		.num_blocks = 1,
144		.fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
145	} }
146};
147
148static const struct common_glue_ctx serpent_dec_cbc = {
149	.num_funcs = 2,
150	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
151
152	.funcs = { {
153		.num_blocks = SERPENT_PARALLEL_BLOCKS,
154		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
155	}, {
156		.num_blocks = 1,
157		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
158	} }
159};
160
161static const struct common_glue_ctx serpent_dec_xts = {
162	.num_funcs = 2,
163	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
164
165	.funcs = { {
166		.num_blocks = SERPENT_PARALLEL_BLOCKS,
167		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
168	}, {
169		.num_blocks = 1,
170		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
171	} }
172};
173
174static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
175		       struct scatterlist *src, unsigned int nbytes)
176{
177	return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes);
178}
179
180static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
181		       struct scatterlist *src, unsigned int nbytes)
182{
183	return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes);
184}
185
186static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
187		       struct scatterlist *src, unsigned int nbytes)
188{
189	return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__serpent_encrypt), desc,
190				     dst, src, nbytes);
191}
192
193static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
194		       struct scatterlist *src, unsigned int nbytes)
195{
196	return glue_cbc_decrypt_128bit(&serpent_dec_cbc, desc, dst, src,
197				       nbytes);
198}
199
200static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
201		     struct scatterlist *src, unsigned int nbytes)
202{
203	return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes);
204}
205
206static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes)
207{
208	return glue_fpu_begin(SERPENT_BLOCK_SIZE, SERPENT_PARALLEL_BLOCKS,
209			      NULL, fpu_enabled, nbytes);
210}
211
212static inline void serpent_fpu_end(bool fpu_enabled)
213{
214	glue_fpu_end(fpu_enabled);
215}
216
217struct crypt_priv {
218	struct serpent_ctx *ctx;
219	bool fpu_enabled;
220};
221
222static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
223{
224	const unsigned int bsize = SERPENT_BLOCK_SIZE;
225	struct crypt_priv *ctx = priv;
226	int i;
227
228	ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
229
230	if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
231		serpent_ecb_enc_8way_avx(ctx->ctx, srcdst, srcdst);
232		return;
233	}
234
235	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
236		__serpent_encrypt(ctx->ctx, srcdst, srcdst);
237}
238
239static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
240{
241	const unsigned int bsize = SERPENT_BLOCK_SIZE;
242	struct crypt_priv *ctx = priv;
243	int i;
244
245	ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
246
247	if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
248		serpent_ecb_dec_8way_avx(ctx->ctx, srcdst, srcdst);
249		return;
250	}
251
252	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
253		__serpent_decrypt(ctx->ctx, srcdst, srcdst);
254}
255
256int lrw_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
257		       unsigned int keylen)
258{
259	struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
260	int err;
261
262	err = __serpent_setkey(&ctx->serpent_ctx, key, keylen -
263							SERPENT_BLOCK_SIZE);
264	if (err)
265		return err;
266
267	return lrw_init_table(&ctx->lrw_table, key + keylen -
268						SERPENT_BLOCK_SIZE);
269}
270EXPORT_SYMBOL_GPL(lrw_serpent_setkey);
271
272static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
273		       struct scatterlist *src, unsigned int nbytes)
274{
275	struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
276	be128 buf[SERPENT_PARALLEL_BLOCKS];
277	struct crypt_priv crypt_ctx = {
278		.ctx = &ctx->serpent_ctx,
279		.fpu_enabled = false,
280	};
281	struct lrw_crypt_req req = {
282		.tbuf = buf,
283		.tbuflen = sizeof(buf),
284
285		.table_ctx = &ctx->lrw_table,
286		.crypt_ctx = &crypt_ctx,
287		.crypt_fn = encrypt_callback,
288	};
289	int ret;
290
291	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
292	ret = lrw_crypt(desc, dst, src, nbytes, &req);
293	serpent_fpu_end(crypt_ctx.fpu_enabled);
294
295	return ret;
296}
297
298static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
299		       struct scatterlist *src, unsigned int nbytes)
300{
301	struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
302	be128 buf[SERPENT_PARALLEL_BLOCKS];
303	struct crypt_priv crypt_ctx = {
304		.ctx = &ctx->serpent_ctx,
305		.fpu_enabled = false,
306	};
307	struct lrw_crypt_req req = {
308		.tbuf = buf,
309		.tbuflen = sizeof(buf),
310
311		.table_ctx = &ctx->lrw_table,
312		.crypt_ctx = &crypt_ctx,
313		.crypt_fn = decrypt_callback,
314	};
315	int ret;
316
317	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
318	ret = lrw_crypt(desc, dst, src, nbytes, &req);
319	serpent_fpu_end(crypt_ctx.fpu_enabled);
320
321	return ret;
322}
323
324void lrw_serpent_exit_tfm(struct crypto_tfm *tfm)
325{
326	struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
327
328	lrw_free_table(&ctx->lrw_table);
329}
330EXPORT_SYMBOL_GPL(lrw_serpent_exit_tfm);
331
332int xts_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
333		       unsigned int keylen)
334{
335	struct serpent_xts_ctx *ctx = crypto_tfm_ctx(tfm);
336	u32 *flags = &tfm->crt_flags;
337	int err;
338
339	/* key consists of keys of equal size concatenated, therefore
340	 * the length must be even
341	 */
342	if (keylen % 2) {
343		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
344		return -EINVAL;
345	}
346
347	/* first half of xts-key is for crypt */
348	err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
349	if (err)
350		return err;
351
352	/* second half of xts-key is for tweak */
353	return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
354}
355EXPORT_SYMBOL_GPL(xts_serpent_setkey);
356
357static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
358		       struct scatterlist *src, unsigned int nbytes)
359{
360	struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
361
362	return glue_xts_crypt_128bit(&serpent_enc_xts, desc, dst, src, nbytes,
363				     XTS_TWEAK_CAST(__serpent_encrypt),
364				     &ctx->tweak_ctx, &ctx->crypt_ctx);
365}
366
367static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
368		       struct scatterlist *src, unsigned int nbytes)
369{
370	struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
371
372	return glue_xts_crypt_128bit(&serpent_dec_xts, desc, dst, src, nbytes,
373				     XTS_TWEAK_CAST(__serpent_encrypt),
374				     &ctx->tweak_ctx, &ctx->crypt_ctx);
375}
376
377static struct crypto_alg serpent_algs[10] = { {
378	.cra_name		= "__ecb-serpent-avx",
379	.cra_driver_name	= "__driver-ecb-serpent-avx",
380	.cra_priority		= 0,
381	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
382	.cra_blocksize		= SERPENT_BLOCK_SIZE,
383	.cra_ctxsize		= sizeof(struct serpent_ctx),
384	.cra_alignmask		= 0,
385	.cra_type		= &crypto_blkcipher_type,
386	.cra_module		= THIS_MODULE,
387	.cra_u = {
388		.blkcipher = {
389			.min_keysize	= SERPENT_MIN_KEY_SIZE,
390			.max_keysize	= SERPENT_MAX_KEY_SIZE,
391			.setkey		= serpent_setkey,
392			.encrypt	= ecb_encrypt,
393			.decrypt	= ecb_decrypt,
394		},
395	},
396}, {
397	.cra_name		= "__cbc-serpent-avx",
398	.cra_driver_name	= "__driver-cbc-serpent-avx",
399	.cra_priority		= 0,
400	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
401	.cra_blocksize		= SERPENT_BLOCK_SIZE,
402	.cra_ctxsize		= sizeof(struct serpent_ctx),
403	.cra_alignmask		= 0,
404	.cra_type		= &crypto_blkcipher_type,
405	.cra_module		= THIS_MODULE,
406	.cra_u = {
407		.blkcipher = {
408			.min_keysize	= SERPENT_MIN_KEY_SIZE,
409			.max_keysize	= SERPENT_MAX_KEY_SIZE,
410			.setkey		= serpent_setkey,
411			.encrypt	= cbc_encrypt,
412			.decrypt	= cbc_decrypt,
413		},
414	},
415}, {
416	.cra_name		= "__ctr-serpent-avx",
417	.cra_driver_name	= "__driver-ctr-serpent-avx",
418	.cra_priority		= 0,
419	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
420	.cra_blocksize		= 1,
421	.cra_ctxsize		= sizeof(struct serpent_ctx),
422	.cra_alignmask		= 0,
423	.cra_type		= &crypto_blkcipher_type,
424	.cra_module		= THIS_MODULE,
425	.cra_u = {
426		.blkcipher = {
427			.min_keysize	= SERPENT_MIN_KEY_SIZE,
428			.max_keysize	= SERPENT_MAX_KEY_SIZE,
429			.ivsize		= SERPENT_BLOCK_SIZE,
430			.setkey		= serpent_setkey,
431			.encrypt	= ctr_crypt,
432			.decrypt	= ctr_crypt,
433		},
434	},
435}, {
436	.cra_name		= "__lrw-serpent-avx",
437	.cra_driver_name	= "__driver-lrw-serpent-avx",
438	.cra_priority		= 0,
439	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
440	.cra_blocksize		= SERPENT_BLOCK_SIZE,
441	.cra_ctxsize		= sizeof(struct serpent_lrw_ctx),
442	.cra_alignmask		= 0,
443	.cra_type		= &crypto_blkcipher_type,
444	.cra_module		= THIS_MODULE,
445	.cra_exit		= lrw_serpent_exit_tfm,
446	.cra_u = {
447		.blkcipher = {
448			.min_keysize	= SERPENT_MIN_KEY_SIZE +
449					  SERPENT_BLOCK_SIZE,
450			.max_keysize	= SERPENT_MAX_KEY_SIZE +
451					  SERPENT_BLOCK_SIZE,
452			.ivsize		= SERPENT_BLOCK_SIZE,
453			.setkey		= lrw_serpent_setkey,
454			.encrypt	= lrw_encrypt,
455			.decrypt	= lrw_decrypt,
456		},
457	},
458}, {
459	.cra_name		= "__xts-serpent-avx",
460	.cra_driver_name	= "__driver-xts-serpent-avx",
461	.cra_priority		= 0,
462	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
463	.cra_blocksize		= SERPENT_BLOCK_SIZE,
464	.cra_ctxsize		= sizeof(struct serpent_xts_ctx),
465	.cra_alignmask		= 0,
466	.cra_type		= &crypto_blkcipher_type,
467	.cra_module		= THIS_MODULE,
468	.cra_u = {
469		.blkcipher = {
470			.min_keysize	= SERPENT_MIN_KEY_SIZE * 2,
471			.max_keysize	= SERPENT_MAX_KEY_SIZE * 2,
472			.ivsize		= SERPENT_BLOCK_SIZE,
473			.setkey		= xts_serpent_setkey,
474			.encrypt	= xts_encrypt,
475			.decrypt	= xts_decrypt,
476		},
477	},
478}, {
479	.cra_name		= "ecb(serpent)",
480	.cra_driver_name	= "ecb-serpent-avx",
481	.cra_priority		= 500,
482	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
483	.cra_blocksize		= SERPENT_BLOCK_SIZE,
484	.cra_ctxsize		= sizeof(struct async_helper_ctx),
485	.cra_alignmask		= 0,
486	.cra_type		= &crypto_ablkcipher_type,
487	.cra_module		= THIS_MODULE,
488	.cra_init		= ablk_init,
489	.cra_exit		= ablk_exit,
490	.cra_u = {
491		.ablkcipher = {
492			.min_keysize	= SERPENT_MIN_KEY_SIZE,
493			.max_keysize	= SERPENT_MAX_KEY_SIZE,
494			.setkey		= ablk_set_key,
495			.encrypt	= ablk_encrypt,
496			.decrypt	= ablk_decrypt,
497		},
498	},
499}, {
500	.cra_name		= "cbc(serpent)",
501	.cra_driver_name	= "cbc-serpent-avx",
502	.cra_priority		= 500,
503	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
504	.cra_blocksize		= SERPENT_BLOCK_SIZE,
505	.cra_ctxsize		= sizeof(struct async_helper_ctx),
506	.cra_alignmask		= 0,
507	.cra_type		= &crypto_ablkcipher_type,
508	.cra_module		= THIS_MODULE,
509	.cra_init		= ablk_init,
510	.cra_exit		= ablk_exit,
511	.cra_u = {
512		.ablkcipher = {
513			.min_keysize	= SERPENT_MIN_KEY_SIZE,
514			.max_keysize	= SERPENT_MAX_KEY_SIZE,
515			.ivsize		= SERPENT_BLOCK_SIZE,
516			.setkey		= ablk_set_key,
517			.encrypt	= __ablk_encrypt,
518			.decrypt	= ablk_decrypt,
519		},
520	},
521}, {
522	.cra_name		= "ctr(serpent)",
523	.cra_driver_name	= "ctr-serpent-avx",
524	.cra_priority		= 500,
525	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
526	.cra_blocksize		= 1,
527	.cra_ctxsize		= sizeof(struct async_helper_ctx),
528	.cra_alignmask		= 0,
529	.cra_type		= &crypto_ablkcipher_type,
530	.cra_module		= THIS_MODULE,
531	.cra_init		= ablk_init,
532	.cra_exit		= ablk_exit,
533	.cra_u = {
534		.ablkcipher = {
535			.min_keysize	= SERPENT_MIN_KEY_SIZE,
536			.max_keysize	= SERPENT_MAX_KEY_SIZE,
537			.ivsize		= SERPENT_BLOCK_SIZE,
538			.setkey		= ablk_set_key,
539			.encrypt	= ablk_encrypt,
540			.decrypt	= ablk_encrypt,
541			.geniv		= "chainiv",
542		},
543	},
544}, {
545	.cra_name		= "lrw(serpent)",
546	.cra_driver_name	= "lrw-serpent-avx",
547	.cra_priority		= 500,
548	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
549	.cra_blocksize		= SERPENT_BLOCK_SIZE,
550	.cra_ctxsize		= sizeof(struct async_helper_ctx),
551	.cra_alignmask		= 0,
552	.cra_type		= &crypto_ablkcipher_type,
553	.cra_module		= THIS_MODULE,
554	.cra_init		= ablk_init,
555	.cra_exit		= ablk_exit,
556	.cra_u = {
557		.ablkcipher = {
558			.min_keysize	= SERPENT_MIN_KEY_SIZE +
559					  SERPENT_BLOCK_SIZE,
560			.max_keysize	= SERPENT_MAX_KEY_SIZE +
561					  SERPENT_BLOCK_SIZE,
562			.ivsize		= SERPENT_BLOCK_SIZE,
563			.setkey		= ablk_set_key,
564			.encrypt	= ablk_encrypt,
565			.decrypt	= ablk_decrypt,
566		},
567	},
568}, {
569	.cra_name		= "xts(serpent)",
570	.cra_driver_name	= "xts-serpent-avx",
571	.cra_priority		= 500,
572	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
573	.cra_blocksize		= SERPENT_BLOCK_SIZE,
574	.cra_ctxsize		= sizeof(struct async_helper_ctx),
575	.cra_alignmask		= 0,
576	.cra_type		= &crypto_ablkcipher_type,
577	.cra_module		= THIS_MODULE,
578	.cra_init		= ablk_init,
579	.cra_exit		= ablk_exit,
580	.cra_u = {
581		.ablkcipher = {
582			.min_keysize	= SERPENT_MIN_KEY_SIZE * 2,
583			.max_keysize	= SERPENT_MAX_KEY_SIZE * 2,
584			.ivsize		= SERPENT_BLOCK_SIZE,
585			.setkey		= ablk_set_key,
586			.encrypt	= ablk_encrypt,
587			.decrypt	= ablk_decrypt,
588		},
589	},
590} };
591
592static int __init serpent_init(void)
593{
594	u64 xcr0;
595
596	if (!cpu_has_avx || !cpu_has_osxsave) {
597		printk(KERN_INFO "AVX instructions are not detected.\n");
598		return -ENODEV;
599	}
600
601	xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
602	if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
603		printk(KERN_INFO "AVX detected but unusable.\n");
604		return -ENODEV;
605	}
606
607	return crypto_register_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
608}
609
610static void __exit serpent_exit(void)
611{
612	crypto_unregister_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
613}
614
615module_init(serpent_init);
616module_exit(serpent_exit);
617
618MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX optimized");
619MODULE_LICENSE("GPL");
620MODULE_ALIAS("serpent");
621