xref: /linux/arch/x86/crypto/camellia_aesni_avx_glue.c (revision 6ed7ffddcf61f668114edb676417e5fb33773b59)
1 /*
2  * Glue Code for x86_64/AVX/AES-NI assembler optimized version of Camellia
3  *
4  * Copyright © 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  *
11  */
12 
13 #include <linux/module.h>
14 #include <linux/types.h>
15 #include <linux/crypto.h>
16 #include <linux/err.h>
17 #include <crypto/algapi.h>
18 #include <crypto/ctr.h>
19 #include <crypto/lrw.h>
20 #include <crypto/xts.h>
21 #include <asm/xcr.h>
22 #include <asm/xsave.h>
23 #include <asm/crypto/camellia.h>
24 #include <asm/crypto/ablk_helper.h>
25 #include <asm/crypto/glue_helper.h>
26 
27 #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
28 
29 /* 16-way AES-NI parallel cipher functions */
30 asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst,
31 				       const u8 *src);
32 asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst,
33 				       const u8 *src);
34 
35 asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst,
36 				       const u8 *src);
37 asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst,
38 				   const u8 *src, le128 *iv);
39 
40 static const struct common_glue_ctx camellia_enc = {
41 	.num_funcs = 3,
42 	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
43 
44 	.funcs = { {
45 		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
46 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
47 	}, {
48 		.num_blocks = 2,
49 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
50 	}, {
51 		.num_blocks = 1,
52 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
53 	} }
54 };
55 
56 static const struct common_glue_ctx camellia_ctr = {
57 	.num_funcs = 3,
58 	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
59 
60 	.funcs = { {
61 		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
62 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
63 	}, {
64 		.num_blocks = 2,
65 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
66 	}, {
67 		.num_blocks = 1,
68 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
69 	} }
70 };
71 
72 static const struct common_glue_ctx camellia_dec = {
73 	.num_funcs = 3,
74 	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
75 
76 	.funcs = { {
77 		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
78 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
79 	}, {
80 		.num_blocks = 2,
81 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
82 	}, {
83 		.num_blocks = 1,
84 		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
85 	} }
86 };
87 
88 static const struct common_glue_ctx camellia_dec_cbc = {
89 	.num_funcs = 3,
90 	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
91 
92 	.funcs = { {
93 		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
94 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
95 	}, {
96 		.num_blocks = 2,
97 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
98 	}, {
99 		.num_blocks = 1,
100 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
101 	} }
102 };
103 
104 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
105 		       struct scatterlist *src, unsigned int nbytes)
106 {
107 	return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes);
108 }
109 
110 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
111 		       struct scatterlist *src, unsigned int nbytes)
112 {
113 	return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes);
114 }
115 
116 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
117 		       struct scatterlist *src, unsigned int nbytes)
118 {
119 	return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(camellia_enc_blk), desc,
120 				       dst, src, nbytes);
121 }
122 
123 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
124 		       struct scatterlist *src, unsigned int nbytes)
125 {
126 	return glue_cbc_decrypt_128bit(&camellia_dec_cbc, desc, dst, src,
127 				       nbytes);
128 }
129 
130 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
131 		     struct scatterlist *src, unsigned int nbytes)
132 {
133 	return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes);
134 }
135 
136 static inline bool camellia_fpu_begin(bool fpu_enabled, unsigned int nbytes)
137 {
138 	return glue_fpu_begin(CAMELLIA_BLOCK_SIZE,
139 			      CAMELLIA_AESNI_PARALLEL_BLOCKS, NULL, fpu_enabled,
140 			      nbytes);
141 }
142 
143 static inline void camellia_fpu_end(bool fpu_enabled)
144 {
145 	glue_fpu_end(fpu_enabled);
146 }
147 
148 static int camellia_setkey(struct crypto_tfm *tfm, const u8 *in_key,
149 			   unsigned int key_len)
150 {
151 	return __camellia_setkey(crypto_tfm_ctx(tfm), in_key, key_len,
152 				 &tfm->crt_flags);
153 }
154 
155 struct crypt_priv {
156 	struct camellia_ctx *ctx;
157 	bool fpu_enabled;
158 };
159 
160 static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
161 {
162 	const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
163 	struct crypt_priv *ctx = priv;
164 	int i;
165 
166 	ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
167 
168 	if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
169 		camellia_ecb_enc_16way(ctx->ctx, srcdst, srcdst);
170 		srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
171 		nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
172 	}
173 
174 	while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
175 		camellia_enc_blk_2way(ctx->ctx, srcdst, srcdst);
176 		srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
177 		nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
178 	}
179 
180 	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
181 		camellia_enc_blk(ctx->ctx, srcdst, srcdst);
182 }
183 
184 static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
185 {
186 	const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
187 	struct crypt_priv *ctx = priv;
188 	int i;
189 
190 	ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
191 
192 	if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
193 		camellia_ecb_dec_16way(ctx->ctx, srcdst, srcdst);
194 		srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
195 		nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
196 	}
197 
198 	while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
199 		camellia_dec_blk_2way(ctx->ctx, srcdst, srcdst);
200 		srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
201 		nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
202 	}
203 
204 	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
205 		camellia_dec_blk(ctx->ctx, srcdst, srcdst);
206 }
207 
208 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
209 		       struct scatterlist *src, unsigned int nbytes)
210 {
211 	struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
212 	be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
213 	struct crypt_priv crypt_ctx = {
214 		.ctx = &ctx->camellia_ctx,
215 		.fpu_enabled = false,
216 	};
217 	struct lrw_crypt_req req = {
218 		.tbuf = buf,
219 		.tbuflen = sizeof(buf),
220 
221 		.table_ctx = &ctx->lrw_table,
222 		.crypt_ctx = &crypt_ctx,
223 		.crypt_fn = encrypt_callback,
224 	};
225 	int ret;
226 
227 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
228 	ret = lrw_crypt(desc, dst, src, nbytes, &req);
229 	camellia_fpu_end(crypt_ctx.fpu_enabled);
230 
231 	return ret;
232 }
233 
234 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
235 		       struct scatterlist *src, unsigned int nbytes)
236 {
237 	struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
238 	be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
239 	struct crypt_priv crypt_ctx = {
240 		.ctx = &ctx->camellia_ctx,
241 		.fpu_enabled = false,
242 	};
243 	struct lrw_crypt_req req = {
244 		.tbuf = buf,
245 		.tbuflen = sizeof(buf),
246 
247 		.table_ctx = &ctx->lrw_table,
248 		.crypt_ctx = &crypt_ctx,
249 		.crypt_fn = decrypt_callback,
250 	};
251 	int ret;
252 
253 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
254 	ret = lrw_crypt(desc, dst, src, nbytes, &req);
255 	camellia_fpu_end(crypt_ctx.fpu_enabled);
256 
257 	return ret;
258 }
259 
260 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
261 		       struct scatterlist *src, unsigned int nbytes)
262 {
263 	struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
264 	be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
265 	struct crypt_priv crypt_ctx = {
266 		.ctx = &ctx->crypt_ctx,
267 		.fpu_enabled = false,
268 	};
269 	struct xts_crypt_req req = {
270 		.tbuf = buf,
271 		.tbuflen = sizeof(buf),
272 
273 		.tweak_ctx = &ctx->tweak_ctx,
274 		.tweak_fn = XTS_TWEAK_CAST(camellia_enc_blk),
275 		.crypt_ctx = &crypt_ctx,
276 		.crypt_fn = encrypt_callback,
277 	};
278 	int ret;
279 
280 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
281 	ret = xts_crypt(desc, dst, src, nbytes, &req);
282 	camellia_fpu_end(crypt_ctx.fpu_enabled);
283 
284 	return ret;
285 }
286 
287 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
288 		       struct scatterlist *src, unsigned int nbytes)
289 {
290 	struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
291 	be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
292 	struct crypt_priv crypt_ctx = {
293 		.ctx = &ctx->crypt_ctx,
294 		.fpu_enabled = false,
295 	};
296 	struct xts_crypt_req req = {
297 		.tbuf = buf,
298 		.tbuflen = sizeof(buf),
299 
300 		.tweak_ctx = &ctx->tweak_ctx,
301 		.tweak_fn = XTS_TWEAK_CAST(camellia_enc_blk),
302 		.crypt_ctx = &crypt_ctx,
303 		.crypt_fn = decrypt_callback,
304 	};
305 	int ret;
306 
307 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
308 	ret = xts_crypt(desc, dst, src, nbytes, &req);
309 	camellia_fpu_end(crypt_ctx.fpu_enabled);
310 
311 	return ret;
312 }
313 
314 static struct crypto_alg cmll_algs[10] = { {
315 	.cra_name		= "__ecb-camellia-aesni",
316 	.cra_driver_name	= "__driver-ecb-camellia-aesni",
317 	.cra_priority		= 0,
318 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
319 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
320 	.cra_ctxsize		= sizeof(struct camellia_ctx),
321 	.cra_alignmask		= 0,
322 	.cra_type		= &crypto_blkcipher_type,
323 	.cra_module		= THIS_MODULE,
324 	.cra_u = {
325 		.blkcipher = {
326 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
327 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
328 			.setkey		= camellia_setkey,
329 			.encrypt	= ecb_encrypt,
330 			.decrypt	= ecb_decrypt,
331 		},
332 	},
333 }, {
334 	.cra_name		= "__cbc-camellia-aesni",
335 	.cra_driver_name	= "__driver-cbc-camellia-aesni",
336 	.cra_priority		= 0,
337 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
338 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
339 	.cra_ctxsize		= sizeof(struct camellia_ctx),
340 	.cra_alignmask		= 0,
341 	.cra_type		= &crypto_blkcipher_type,
342 	.cra_module		= THIS_MODULE,
343 	.cra_u = {
344 		.blkcipher = {
345 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
346 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
347 			.setkey		= camellia_setkey,
348 			.encrypt	= cbc_encrypt,
349 			.decrypt	= cbc_decrypt,
350 		},
351 	},
352 }, {
353 	.cra_name		= "__ctr-camellia-aesni",
354 	.cra_driver_name	= "__driver-ctr-camellia-aesni",
355 	.cra_priority		= 0,
356 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
357 	.cra_blocksize		= 1,
358 	.cra_ctxsize		= sizeof(struct camellia_ctx),
359 	.cra_alignmask		= 0,
360 	.cra_type		= &crypto_blkcipher_type,
361 	.cra_module		= THIS_MODULE,
362 	.cra_u = {
363 		.blkcipher = {
364 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
365 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
366 			.ivsize		= CAMELLIA_BLOCK_SIZE,
367 			.setkey		= camellia_setkey,
368 			.encrypt	= ctr_crypt,
369 			.decrypt	= ctr_crypt,
370 		},
371 	},
372 }, {
373 	.cra_name		= "__lrw-camellia-aesni",
374 	.cra_driver_name	= "__driver-lrw-camellia-aesni",
375 	.cra_priority		= 0,
376 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
377 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
378 	.cra_ctxsize		= sizeof(struct camellia_lrw_ctx),
379 	.cra_alignmask		= 0,
380 	.cra_type		= &crypto_blkcipher_type,
381 	.cra_module		= THIS_MODULE,
382 	.cra_exit		= lrw_camellia_exit_tfm,
383 	.cra_u = {
384 		.blkcipher = {
385 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE +
386 					  CAMELLIA_BLOCK_SIZE,
387 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE +
388 					  CAMELLIA_BLOCK_SIZE,
389 			.ivsize		= CAMELLIA_BLOCK_SIZE,
390 			.setkey		= lrw_camellia_setkey,
391 			.encrypt	= lrw_encrypt,
392 			.decrypt	= lrw_decrypt,
393 		},
394 	},
395 }, {
396 	.cra_name		= "__xts-camellia-aesni",
397 	.cra_driver_name	= "__driver-xts-camellia-aesni",
398 	.cra_priority		= 0,
399 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
400 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
401 	.cra_ctxsize		= sizeof(struct camellia_xts_ctx),
402 	.cra_alignmask		= 0,
403 	.cra_type		= &crypto_blkcipher_type,
404 	.cra_module		= THIS_MODULE,
405 	.cra_u = {
406 		.blkcipher = {
407 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE * 2,
408 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE * 2,
409 			.ivsize		= CAMELLIA_BLOCK_SIZE,
410 			.setkey		= xts_camellia_setkey,
411 			.encrypt	= xts_encrypt,
412 			.decrypt	= xts_decrypt,
413 		},
414 	},
415 }, {
416 	.cra_name		= "ecb(camellia)",
417 	.cra_driver_name	= "ecb-camellia-aesni",
418 	.cra_priority		= 400,
419 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
420 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
421 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
422 	.cra_alignmask		= 0,
423 	.cra_type		= &crypto_ablkcipher_type,
424 	.cra_module		= THIS_MODULE,
425 	.cra_init		= ablk_init,
426 	.cra_exit		= ablk_exit,
427 	.cra_u = {
428 		.ablkcipher = {
429 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
430 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
431 			.setkey		= ablk_set_key,
432 			.encrypt	= ablk_encrypt,
433 			.decrypt	= ablk_decrypt,
434 		},
435 	},
436 }, {
437 	.cra_name		= "cbc(camellia)",
438 	.cra_driver_name	= "cbc-camellia-aesni",
439 	.cra_priority		= 400,
440 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
441 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
442 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
443 	.cra_alignmask		= 0,
444 	.cra_type		= &crypto_ablkcipher_type,
445 	.cra_module		= THIS_MODULE,
446 	.cra_init		= ablk_init,
447 	.cra_exit		= ablk_exit,
448 	.cra_u = {
449 		.ablkcipher = {
450 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
451 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
452 			.ivsize		= CAMELLIA_BLOCK_SIZE,
453 			.setkey		= ablk_set_key,
454 			.encrypt	= __ablk_encrypt,
455 			.decrypt	= ablk_decrypt,
456 		},
457 	},
458 }, {
459 	.cra_name		= "ctr(camellia)",
460 	.cra_driver_name	= "ctr-camellia-aesni",
461 	.cra_priority		= 400,
462 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
463 	.cra_blocksize		= 1,
464 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
465 	.cra_alignmask		= 0,
466 	.cra_type		= &crypto_ablkcipher_type,
467 	.cra_module		= THIS_MODULE,
468 	.cra_init		= ablk_init,
469 	.cra_exit		= ablk_exit,
470 	.cra_u = {
471 		.ablkcipher = {
472 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
473 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
474 			.ivsize		= CAMELLIA_BLOCK_SIZE,
475 			.setkey		= ablk_set_key,
476 			.encrypt	= ablk_encrypt,
477 			.decrypt	= ablk_encrypt,
478 			.geniv		= "chainiv",
479 		},
480 	},
481 }, {
482 	.cra_name		= "lrw(camellia)",
483 	.cra_driver_name	= "lrw-camellia-aesni",
484 	.cra_priority		= 400,
485 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
486 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
487 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
488 	.cra_alignmask		= 0,
489 	.cra_type		= &crypto_ablkcipher_type,
490 	.cra_module		= THIS_MODULE,
491 	.cra_init		= ablk_init,
492 	.cra_exit		= ablk_exit,
493 	.cra_u = {
494 		.ablkcipher = {
495 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE +
496 					  CAMELLIA_BLOCK_SIZE,
497 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE +
498 					  CAMELLIA_BLOCK_SIZE,
499 			.ivsize		= CAMELLIA_BLOCK_SIZE,
500 			.setkey		= ablk_set_key,
501 			.encrypt	= ablk_encrypt,
502 			.decrypt	= ablk_decrypt,
503 		},
504 	},
505 }, {
506 	.cra_name		= "xts(camellia)",
507 	.cra_driver_name	= "xts-camellia-aesni",
508 	.cra_priority		= 400,
509 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
510 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
511 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
512 	.cra_alignmask		= 0,
513 	.cra_type		= &crypto_ablkcipher_type,
514 	.cra_module		= THIS_MODULE,
515 	.cra_init		= ablk_init,
516 	.cra_exit		= ablk_exit,
517 	.cra_u = {
518 		.ablkcipher = {
519 			.min_keysize	= CAMELLIA_MIN_KEY_SIZE * 2,
520 			.max_keysize	= CAMELLIA_MAX_KEY_SIZE * 2,
521 			.ivsize		= CAMELLIA_BLOCK_SIZE,
522 			.setkey		= ablk_set_key,
523 			.encrypt	= ablk_encrypt,
524 			.decrypt	= ablk_decrypt,
525 		},
526 	},
527 } };
528 
529 static int __init camellia_aesni_init(void)
530 {
531 	u64 xcr0;
532 
533 	if (!cpu_has_avx || !cpu_has_aes || !cpu_has_osxsave) {
534 		pr_info("AVX or AES-NI instructions are not detected.\n");
535 		return -ENODEV;
536 	}
537 
538 	xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
539 	if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
540 		pr_info("AVX detected but unusable.\n");
541 		return -ENODEV;
542 	}
543 
544 	return crypto_register_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
545 }
546 
547 static void __exit camellia_aesni_fini(void)
548 {
549 	crypto_unregister_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
550 }
551 
552 module_init(camellia_aesni_init);
553 module_exit(camellia_aesni_fini);
554 
555 MODULE_LICENSE("GPL");
556 MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX optimized");
557 MODULE_ALIAS("camellia");
558 MODULE_ALIAS("camellia-asm");
559