xref: /illumos-gate/usr/src/uts/common/crypto/io/aes.c (revision bf5d9f18edeb77c14df996d367853599bdd43fd1)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23  * Copyright 2017 Nexenta Systems, Inc.  All rights reserved.
24  */
25 
26 /*
27  * AES provider for the Kernel Cryptographic Framework (KCF)
28  */
29 
30 #include <sys/types.h>
31 #include <sys/systm.h>
32 #include <sys/modctl.h>
33 #include <sys/cmn_err.h>
34 #include <sys/ddi.h>
35 #include <sys/crypto/common.h>
36 #include <sys/crypto/impl.h>
37 #include <sys/crypto/spi.h>
38 #include <sys/sysmacros.h>
39 #include <sys/strsun.h>
40 #include <modes/modes.h>
41 #define	_AES_IMPL
42 #include <aes/aes_impl.h>
43 
44 extern struct mod_ops mod_cryptoops;
45 
46 /*
47  * Module linkage information for the kernel.
48  */
49 static struct modlcrypto modlcrypto = {
50 	&mod_cryptoops,
51 	"AES Kernel SW Provider"
52 };
53 
54 static struct modlinkage modlinkage = {
55 	MODREV_1,
56 	(void *)&modlcrypto,
57 	NULL
58 };
59 
60 /*
61  * Mechanism info structure passed to KCF during registration.
62  */
63 static crypto_mech_info_t aes_mech_info_tab[] = {
64 	/* AES_ECB */
65 	{SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
66 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
67 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
68 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
69 	/* AES_CBC */
70 	{SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
71 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
72 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
73 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
74 	/* AES_CMAC */
75 	{SUN_CKM_AES_CMAC, AES_CMAC_MECH_INFO_TYPE,
76 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
77 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
78 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
79 	/* AES_CTR */
80 	{SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
81 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
82 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
83 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
84 	/* AES_CCM */
85 	{SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
86 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
87 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
88 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
89 	/* AES_GCM */
90 	{SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
91 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
92 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
93 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
94 	/* AES_GMAC */
95 	{SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
96 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
97 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
98 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
99 	    CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
100 	    CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
101 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
102 };
103 
104 /* operations are in-place if the output buffer is NULL */
105 #define	AES_ARG_INPLACE(input, output)				\
106 	if ((output) == NULL)					\
107 		(output) = (input);
108 
109 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
110 
111 static crypto_control_ops_t aes_control_ops = {
112 	aes_provider_status
113 };
114 
115 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
116     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
117 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
118     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
119 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
120     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
121 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
122     crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
123 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
124     crypto_req_handle_t);
125 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
126     crypto_req_handle_t);
127 
128 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
129     crypto_req_handle_t);
130 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
131     crypto_data_t *, crypto_req_handle_t);
132 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
133     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
134     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
135 
136 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
137     crypto_req_handle_t);
138 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
139     crypto_data_t *, crypto_req_handle_t);
140 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
141     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
142     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
143 
144 static crypto_cipher_ops_t aes_cipher_ops = {
145 	aes_encrypt_init,
146 	aes_encrypt,
147 	aes_encrypt_update,
148 	aes_encrypt_final,
149 	aes_encrypt_atomic,
150 	aes_decrypt_init,
151 	aes_decrypt,
152 	aes_decrypt_update,
153 	aes_decrypt_final,
154 	aes_decrypt_atomic
155 };
156 
157 static int aes_mac_init(crypto_ctx_t *, crypto_mechanism_t *,
158     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
159 static int aes_mac(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
160     crypto_req_handle_t);
161 static int aes_mac_update(crypto_ctx_t *, crypto_data_t *,
162     crypto_req_handle_t);
163 static int aes_mac_final(crypto_ctx_t *, crypto_data_t *,
164     crypto_req_handle_t);
165 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
166     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
167     crypto_spi_ctx_template_t, crypto_req_handle_t);
168 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
169     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
170     crypto_spi_ctx_template_t, crypto_req_handle_t);
171 
172 static crypto_mac_ops_t aes_mac_ops = {
173 	aes_mac_init,
174 	aes_mac,
175 	aes_mac_update,
176 	aes_mac_final,
177 	aes_mac_atomic,
178 	aes_mac_verify_atomic
179 };
180 
181 static int aes_create_ctx_template(crypto_provider_handle_t,
182     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
183     size_t *, crypto_req_handle_t);
184 static int aes_free_context(crypto_ctx_t *);
185 
186 static crypto_ctx_ops_t aes_ctx_ops = {
187 	aes_create_ctx_template,
188 	aes_free_context
189 };
190 
191 static crypto_ops_t aes_crypto_ops = {
192 	&aes_control_ops,
193 	NULL,
194 	&aes_cipher_ops,
195 	&aes_mac_ops,
196 	NULL,
197 	NULL,
198 	NULL,
199 	NULL,
200 	NULL,
201 	NULL,
202 	NULL,
203 	NULL,
204 	NULL,
205 	&aes_ctx_ops,
206 	NULL,
207 	NULL,
208 	NULL,
209 };
210 
211 static crypto_provider_info_t aes_prov_info = {
212 	CRYPTO_SPI_VERSION_4,
213 	"AES Software Provider",
214 	CRYPTO_SW_PROVIDER,
215 	{&modlinkage},
216 	NULL,
217 	&aes_crypto_ops,
218 	sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
219 	aes_mech_info_tab
220 };
221 
222 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
223 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
224 
225 int
226 _init(void)
227 {
228 	int ret;
229 
230 	if ((ret = mod_install(&modlinkage)) != 0)
231 		return (ret);
232 
233 	/* Register with KCF.  If the registration fails, remove the module. */
234 	if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) {
235 		(void) mod_remove(&modlinkage);
236 		return (EACCES);
237 	}
238 
239 	return (0);
240 }
241 
242 int
243 _fini(void)
244 {
245 	/* Unregister from KCF if module is registered */
246 	if (aes_prov_handle != NULL) {
247 		if (crypto_unregister_provider(aes_prov_handle))
248 			return (EBUSY);
249 
250 		aes_prov_handle = NULL;
251 	}
252 
253 	return (mod_remove(&modlinkage));
254 }
255 
256 int
257 _info(struct modinfo *modinfop)
258 {
259 	return (mod_info(&modlinkage, modinfop));
260 }
261 
262 
263 static int
264 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
265 {
266 	void *p = NULL;
267 	boolean_t param_required = B_TRUE;
268 	size_t param_len;
269 	void *(*alloc_fun)(int);
270 	int rv = CRYPTO_SUCCESS;
271 
272 	switch (mechanism->cm_type) {
273 	case AES_ECB_MECH_INFO_TYPE:
274 		param_required = B_FALSE;
275 		alloc_fun = ecb_alloc_ctx;
276 		break;
277 	case AES_CBC_MECH_INFO_TYPE:
278 		param_len = AES_BLOCK_LEN;
279 		alloc_fun = cbc_alloc_ctx;
280 		break;
281 	case AES_CMAC_MECH_INFO_TYPE:
282 		param_required = B_FALSE;
283 		alloc_fun = cmac_alloc_ctx;
284 		break;
285 	case AES_CTR_MECH_INFO_TYPE:
286 		param_len = sizeof (CK_AES_CTR_PARAMS);
287 		alloc_fun = ctr_alloc_ctx;
288 		break;
289 	case AES_CCM_MECH_INFO_TYPE:
290 		param_len = sizeof (CK_AES_CCM_PARAMS);
291 		alloc_fun = ccm_alloc_ctx;
292 		break;
293 	case AES_GCM_MECH_INFO_TYPE:
294 		param_len = sizeof (CK_AES_GCM_PARAMS);
295 		alloc_fun = gcm_alloc_ctx;
296 		break;
297 	case AES_GMAC_MECH_INFO_TYPE:
298 		param_len = sizeof (CK_AES_GMAC_PARAMS);
299 		alloc_fun = gmac_alloc_ctx;
300 		break;
301 	default:
302 		rv = CRYPTO_MECHANISM_INVALID;
303 		return (rv);
304 	}
305 	if (param_required && mechanism->cm_param != NULL &&
306 	    mechanism->cm_param_len != param_len) {
307 		rv = CRYPTO_MECHANISM_PARAM_INVALID;
308 	}
309 	if (ctx != NULL) {
310 		p = (alloc_fun)(kmflag);
311 		*ctx = p;
312 	}
313 	return (rv);
314 }
315 
316 /*
317  * Initialize key schedules for AES
318  */
319 static int
320 init_keysched(crypto_key_t *key, void *newbie)
321 {
322 	/*
323 	 * Only keys by value are supported by this module.
324 	 */
325 	switch (key->ck_format) {
326 	case CRYPTO_KEY_RAW:
327 		if (key->ck_length < AES_MINBITS ||
328 		    key->ck_length > AES_MAXBITS) {
329 			return (CRYPTO_KEY_SIZE_RANGE);
330 		}
331 
332 		/* key length must be either 128, 192, or 256 */
333 		if ((key->ck_length & 63) != 0)
334 			return (CRYPTO_KEY_SIZE_RANGE);
335 		break;
336 	default:
337 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
338 	}
339 
340 	aes_init_keysched(key->ck_data, key->ck_length, newbie);
341 	return (CRYPTO_SUCCESS);
342 }
343 
344 /*
345  * KCF software provider control entry points.
346  */
347 /* ARGSUSED */
348 static void
349 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
350 {
351 	*status = CRYPTO_PROVIDER_READY;
352 }
353 
354 static int
355 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
356     crypto_key_t *key, crypto_spi_ctx_template_t template,
357     crypto_req_handle_t req)
358 {
359 	return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
360 }
361 
362 static int
363 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
364     crypto_key_t *key, crypto_spi_ctx_template_t template,
365     crypto_req_handle_t req)
366 {
367 	return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
368 }
369 
370 
371 
372 /*
373  * KCF software provider encrypt entry points.
374  */
375 static int
376 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
377     crypto_key_t *key, crypto_spi_ctx_template_t template,
378     crypto_req_handle_t req, boolean_t is_encrypt_init)
379 {
380 	aes_ctx_t *aes_ctx;
381 	int rv;
382 	int kmflag;
383 
384 	/*
385 	 * Only keys by value are supported by this module.
386 	 */
387 	if (key->ck_format != CRYPTO_KEY_RAW) {
388 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
389 	}
390 
391 	kmflag = crypto_kmflag(req);
392 	if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
393 	    != CRYPTO_SUCCESS)
394 		return (rv);
395 
396 	rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
397 	    is_encrypt_init);
398 	if (rv != CRYPTO_SUCCESS) {
399 		crypto_free_mode_ctx(aes_ctx);
400 		return (rv);
401 	}
402 
403 	ctx->cc_provider_private = aes_ctx;
404 
405 	return (CRYPTO_SUCCESS);
406 }
407 
408 static void
409 aes_copy_block64(uint8_t *in, uint64_t *out)
410 {
411 	if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
412 		/* LINTED: pointer alignment */
413 		out[0] = *(uint64_t *)&in[0];
414 		/* LINTED: pointer alignment */
415 		out[1] = *(uint64_t *)&in[8];
416 	} else {
417 		uint8_t *iv8 = (uint8_t *)&out[0];
418 
419 		AES_COPY_BLOCK(in, iv8);
420 	}
421 }
422 
423 
424 static int
425 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
426     crypto_data_t *ciphertext, crypto_req_handle_t req)
427 {
428 	int ret = CRYPTO_FAILED;
429 
430 	aes_ctx_t *aes_ctx;
431 	size_t saved_length, saved_offset, length_needed;
432 
433 	ASSERT(ctx->cc_provider_private != NULL);
434 	aes_ctx = ctx->cc_provider_private;
435 
436 	/*
437 	 * For block ciphers, plaintext must be a multiple of AES block size.
438 	 * This test is only valid for ciphers whose blocksize is a power of 2.
439 	 */
440 	if (((aes_ctx->ac_flags & (CMAC_MODE|CTR_MODE|CCM_MODE|
441 	    GCM_MODE|GMAC_MODE)) == 0) &&
442 	    (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
443 		return (CRYPTO_DATA_LEN_RANGE);
444 
445 	AES_ARG_INPLACE(plaintext, ciphertext);
446 
447 	/*
448 	 * We need to just return the length needed to store the output.
449 	 * We should not destroy the context for the following case.
450 	 */
451 	switch (aes_ctx->ac_flags & (CMAC_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) {
452 	case CCM_MODE:
453 		length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
454 		break;
455 	case GCM_MODE:
456 		length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
457 		break;
458 	case CMAC_MODE:
459 		length_needed = AES_BLOCK_LEN;
460 		break;
461 	case GMAC_MODE:
462 		if (plaintext->cd_length != 0)
463 			return (CRYPTO_ARGUMENTS_BAD);
464 
465 		length_needed = aes_ctx->ac_tag_len;
466 		break;
467 	default:
468 		length_needed = plaintext->cd_length;
469 	}
470 
471 	if (ciphertext->cd_length < length_needed) {
472 		ciphertext->cd_length = length_needed;
473 		return (CRYPTO_BUFFER_TOO_SMALL);
474 	}
475 
476 	saved_length = ciphertext->cd_length;
477 	saved_offset = ciphertext->cd_offset;
478 
479 	/*
480 	 * Do an update on the specified input data.
481 	 */
482 	ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
483 	if (ret != CRYPTO_SUCCESS) {
484 		return (ret);
485 	}
486 
487 	/*
488 	 * For CCM mode, aes_ccm_encrypt_final() will take care of any
489 	 * left-over unprocessed data, and compute the MAC
490 	 */
491 	if (aes_ctx->ac_flags & CCM_MODE) {
492 		/*
493 		 * ccm_encrypt_final() will compute the MAC and append
494 		 * it to existing ciphertext. So, need to adjust the left over
495 		 * length value accordingly
496 		 */
497 
498 		/* order of following 2 lines MUST not be reversed */
499 		ciphertext->cd_offset = ciphertext->cd_length;
500 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
501 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
502 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
503 		if (ret != CRYPTO_SUCCESS) {
504 			return (ret);
505 		}
506 
507 		if (plaintext != ciphertext) {
508 			ciphertext->cd_length =
509 			    ciphertext->cd_offset - saved_offset;
510 		}
511 		ciphertext->cd_offset = saved_offset;
512 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
513 		/*
514 		 * gcm_encrypt_final() will compute the MAC and append
515 		 * it to existing ciphertext. So, need to adjust the left over
516 		 * length value accordingly
517 		 */
518 
519 		/* order of following 2 lines MUST not be reversed */
520 		ciphertext->cd_offset = ciphertext->cd_length;
521 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
522 		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
523 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
524 		    aes_xor_block);
525 		if (ret != CRYPTO_SUCCESS) {
526 			return (ret);
527 		}
528 
529 		if (plaintext != ciphertext) {
530 			ciphertext->cd_length =
531 			    ciphertext->cd_offset - saved_offset;
532 		}
533 		ciphertext->cd_offset = saved_offset;
534 	} else if (aes_ctx->ac_flags & CMAC_MODE) {
535 		/* cmac_update doesn't store data */
536 		ciphertext->cd_length = saved_length;
537 		ret = cmac_mode_final((cbc_ctx_t *)aes_ctx, ciphertext,
538 		    aes_encrypt_block, aes_xor_block);
539 		aes_ctx->ac_remainder_len = 0;
540 	}
541 
542 	ASSERT(aes_ctx->ac_remainder_len == 0);
543 	(void) aes_free_context(ctx);
544 
545 	return (ret);
546 }
547 
548 
549 static int
550 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
551     crypto_data_t *plaintext, crypto_req_handle_t req)
552 {
553 	int ret = CRYPTO_FAILED;
554 
555 	aes_ctx_t *aes_ctx;
556 	off_t saved_offset;
557 	size_t saved_length, length_needed;
558 
559 	ASSERT(ctx->cc_provider_private != NULL);
560 	aes_ctx = ctx->cc_provider_private;
561 
562 	/*
563 	 * For block ciphers, plaintext must be a multiple of AES block size.
564 	 * This test is only valid for ciphers whose blocksize is a power of 2.
565 	 */
566 	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
567 	    == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
568 		return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
569 	}
570 
571 	AES_ARG_INPLACE(ciphertext, plaintext);
572 
573 	/*
574 	 * Return length needed to store the output.
575 	 * Do not destroy context when plaintext buffer is too small.
576 	 *
577 	 * CCM:  plaintext is MAC len smaller than cipher text
578 	 * GCM:  plaintext is TAG len smaller than cipher text
579 	 * GMAC: plaintext length must be zero
580 	 */
581 	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
582 	case CCM_MODE:
583 		length_needed = aes_ctx->ac_processed_data_len;
584 		break;
585 	case GCM_MODE:
586 		length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
587 		break;
588 	case GMAC_MODE:
589 		if (plaintext->cd_length != 0)
590 			return (CRYPTO_ARGUMENTS_BAD);
591 
592 		length_needed = 0;
593 		break;
594 	default:
595 		length_needed = ciphertext->cd_length;
596 	}
597 
598 	if (plaintext->cd_length < length_needed) {
599 		plaintext->cd_length = length_needed;
600 		return (CRYPTO_BUFFER_TOO_SMALL);
601 	}
602 
603 	saved_offset = plaintext->cd_offset;
604 	saved_length = plaintext->cd_length;
605 
606 	/*
607 	 * Do an update on the specified input data.
608 	 */
609 	ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
610 	if (ret != CRYPTO_SUCCESS) {
611 		goto cleanup;
612 	}
613 
614 	if (aes_ctx->ac_flags & CCM_MODE) {
615 		ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
616 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
617 
618 		/* order of following 2 lines MUST not be reversed */
619 		plaintext->cd_offset = plaintext->cd_length;
620 		plaintext->cd_length = saved_length - plaintext->cd_length;
621 
622 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
623 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
624 		    aes_xor_block);
625 		if (ret == CRYPTO_SUCCESS) {
626 			if (plaintext != ciphertext) {
627 				plaintext->cd_length =
628 				    plaintext->cd_offset - saved_offset;
629 			}
630 		} else {
631 			plaintext->cd_length = saved_length;
632 		}
633 
634 		plaintext->cd_offset = saved_offset;
635 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
636 		/* order of following 2 lines MUST not be reversed */
637 		plaintext->cd_offset = plaintext->cd_length;
638 		plaintext->cd_length = saved_length - plaintext->cd_length;
639 
640 		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
641 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
642 		if (ret == CRYPTO_SUCCESS) {
643 			if (plaintext != ciphertext) {
644 				plaintext->cd_length =
645 				    plaintext->cd_offset - saved_offset;
646 			}
647 		} else {
648 			plaintext->cd_length = saved_length;
649 		}
650 
651 		plaintext->cd_offset = saved_offset;
652 	}
653 
654 	ASSERT(aes_ctx->ac_remainder_len == 0);
655 
656 cleanup:
657 	(void) aes_free_context(ctx);
658 
659 	return (ret);
660 }
661 
662 
663 /* ARGSUSED */
664 static int
665 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
666     crypto_data_t *ciphertext, crypto_req_handle_t req)
667 {
668 	off_t saved_offset;
669 	size_t saved_length, out_len;
670 	int ret = CRYPTO_SUCCESS;
671 	aes_ctx_t *aes_ctx;
672 
673 	ASSERT(ctx->cc_provider_private != NULL);
674 	aes_ctx = ctx->cc_provider_private;
675 
676 	AES_ARG_INPLACE(plaintext, ciphertext);
677 
678 	/* compute number of bytes that will hold the ciphertext */
679 	out_len = aes_ctx->ac_remainder_len;
680 	out_len += plaintext->cd_length;
681 	out_len &= ~(AES_BLOCK_LEN - 1);
682 
683 	/*
684 	 * return length needed to store the output.
685 	 * CMAC stores its output in a local buffer until *_final.
686 	 */
687 	if ((aes_ctx->ac_flags & CMAC_MODE) == 0 &&
688 	    ciphertext->cd_length < out_len) {
689 		ciphertext->cd_length = out_len;
690 		return (CRYPTO_BUFFER_TOO_SMALL);
691 	}
692 
693 	saved_offset = ciphertext->cd_offset;
694 	saved_length = ciphertext->cd_length;
695 
696 	/*
697 	 * Do the AES update on the specified input data.
698 	 */
699 	switch (plaintext->cd_format) {
700 	case CRYPTO_DATA_RAW:
701 		ret = crypto_update_iov(ctx->cc_provider_private,
702 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
703 		    aes_copy_block64);
704 		break;
705 	case CRYPTO_DATA_UIO:
706 		ret = crypto_update_uio(ctx->cc_provider_private,
707 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
708 		    aes_copy_block64);
709 		break;
710 	case CRYPTO_DATA_MBLK:
711 		ret = crypto_update_mp(ctx->cc_provider_private,
712 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
713 		    aes_copy_block64);
714 		break;
715 	default:
716 		ret = CRYPTO_ARGUMENTS_BAD;
717 	}
718 
719 	/*
720 	 * Since AES counter mode is a stream cipher, we call
721 	 * ctr_mode_final() to pick up any remaining bytes.
722 	 * It is an internal function that does not destroy
723 	 * the context like *normal* final routines.
724 	 */
725 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
726 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
727 		    ciphertext, aes_encrypt_block);
728 	}
729 
730 	if (ret == CRYPTO_SUCCESS) {
731 		if (plaintext != ciphertext)
732 			ciphertext->cd_length =
733 			    ciphertext->cd_offset - saved_offset;
734 	} else {
735 		ciphertext->cd_length = saved_length;
736 	}
737 	ciphertext->cd_offset = saved_offset;
738 
739 	return (ret);
740 }
741 
742 
743 static int
744 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
745     crypto_data_t *plaintext, crypto_req_handle_t req)
746 {
747 	off_t saved_offset;
748 	size_t saved_length, out_len;
749 	int ret = CRYPTO_SUCCESS;
750 	aes_ctx_t *aes_ctx;
751 
752 	ASSERT(ctx->cc_provider_private != NULL);
753 	aes_ctx = ctx->cc_provider_private;
754 
755 	AES_ARG_INPLACE(ciphertext, plaintext);
756 
757 	/*
758 	 * Compute number of bytes that will hold the plaintext.
759 	 * This is not necessary for CCM, GCM, and GMAC since these
760 	 * mechanisms never return plaintext for update operations.
761 	 */
762 	if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
763 		out_len = aes_ctx->ac_remainder_len;
764 		out_len += ciphertext->cd_length;
765 		out_len &= ~(AES_BLOCK_LEN - 1);
766 
767 		/* return length needed to store the output */
768 		if (plaintext->cd_length < out_len) {
769 			plaintext->cd_length = out_len;
770 			return (CRYPTO_BUFFER_TOO_SMALL);
771 		}
772 	}
773 
774 	saved_offset = plaintext->cd_offset;
775 	saved_length = plaintext->cd_length;
776 
777 	if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
778 		gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
779 
780 	/*
781 	 * Do the AES update on the specified input data.
782 	 */
783 	switch (ciphertext->cd_format) {
784 	case CRYPTO_DATA_RAW:
785 		ret = crypto_update_iov(ctx->cc_provider_private,
786 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
787 		    aes_copy_block64);
788 		break;
789 	case CRYPTO_DATA_UIO:
790 		ret = crypto_update_uio(ctx->cc_provider_private,
791 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
792 		    aes_copy_block64);
793 		break;
794 	case CRYPTO_DATA_MBLK:
795 		ret = crypto_update_mp(ctx->cc_provider_private,
796 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
797 		    aes_copy_block64);
798 		break;
799 	default:
800 		ret = CRYPTO_ARGUMENTS_BAD;
801 	}
802 
803 	/*
804 	 * Since AES counter mode is a stream cipher, we call
805 	 * ctr_mode_final() to pick up any remaining bytes.
806 	 * It is an internal function that does not destroy
807 	 * the context like *normal* final routines.
808 	 */
809 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
810 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
811 		    aes_encrypt_block);
812 		if (ret == CRYPTO_DATA_LEN_RANGE)
813 			ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
814 	}
815 
816 	if (ret == CRYPTO_SUCCESS) {
817 		if (ciphertext != plaintext)
818 			plaintext->cd_length =
819 			    plaintext->cd_offset - saved_offset;
820 	} else {
821 		plaintext->cd_length = saved_length;
822 	}
823 	plaintext->cd_offset = saved_offset;
824 
825 
826 	return (ret);
827 }
828 
829 /* ARGSUSED */
830 static int
831 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
832     crypto_req_handle_t req)
833 {
834 	aes_ctx_t *aes_ctx;
835 	int ret;
836 
837 	ASSERT(ctx->cc_provider_private != NULL);
838 	aes_ctx = ctx->cc_provider_private;
839 
840 	if (data->cd_format != CRYPTO_DATA_RAW &&
841 	    data->cd_format != CRYPTO_DATA_UIO &&
842 	    data->cd_format != CRYPTO_DATA_MBLK) {
843 		return (CRYPTO_ARGUMENTS_BAD);
844 	}
845 
846 	if (aes_ctx->ac_flags & CTR_MODE) {
847 		if (aes_ctx->ac_remainder_len > 0) {
848 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
849 			    aes_encrypt_block);
850 			if (ret != CRYPTO_SUCCESS)
851 				return (ret);
852 		}
853 	} else if (aes_ctx->ac_flags & CCM_MODE) {
854 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
855 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
856 		if (ret != CRYPTO_SUCCESS) {
857 			return (ret);
858 		}
859 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
860 		size_t saved_offset = data->cd_offset;
861 
862 		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
863 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
864 		    aes_xor_block);
865 		if (ret != CRYPTO_SUCCESS) {
866 			return (ret);
867 		}
868 		data->cd_length = data->cd_offset - saved_offset;
869 		data->cd_offset = saved_offset;
870 	} else if (aes_ctx->ac_flags & CMAC_MODE) {
871 		ret = cmac_mode_final((cbc_ctx_t *)aes_ctx, data,
872 		    aes_encrypt_block, aes_xor_block);
873 		if (ret != CRYPTO_SUCCESS)
874 			return (ret);
875 		data->cd_length = AES_BLOCK_LEN;
876 	} else {
877 		/*
878 		 * There must be no unprocessed plaintext.
879 		 * This happens if the length of the last data is
880 		 * not a multiple of the AES block length.
881 		 */
882 		if (aes_ctx->ac_remainder_len > 0) {
883 			return (CRYPTO_DATA_LEN_RANGE);
884 		}
885 		data->cd_length = 0;
886 	}
887 
888 	(void) aes_free_context(ctx);
889 
890 	return (CRYPTO_SUCCESS);
891 }
892 
893 /* ARGSUSED */
894 static int
895 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
896     crypto_req_handle_t req)
897 {
898 	aes_ctx_t *aes_ctx;
899 	int ret;
900 	off_t saved_offset;
901 	size_t saved_length;
902 
903 	ASSERT(ctx->cc_provider_private != NULL);
904 	aes_ctx = ctx->cc_provider_private;
905 
906 	if (data->cd_format != CRYPTO_DATA_RAW &&
907 	    data->cd_format != CRYPTO_DATA_UIO &&
908 	    data->cd_format != CRYPTO_DATA_MBLK) {
909 		return (CRYPTO_ARGUMENTS_BAD);
910 	}
911 
912 	/*
913 	 * There must be no unprocessed ciphertext.
914 	 * This happens if the length of the last ciphertext is
915 	 * not a multiple of the AES block length.
916 	 */
917 	if (aes_ctx->ac_remainder_len > 0) {
918 		if ((aes_ctx->ac_flags & CTR_MODE) == 0)
919 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
920 		else {
921 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
922 			    aes_encrypt_block);
923 			if (ret == CRYPTO_DATA_LEN_RANGE)
924 				ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
925 			if (ret != CRYPTO_SUCCESS)
926 				return (ret);
927 		}
928 	}
929 
930 	if (aes_ctx->ac_flags & CCM_MODE) {
931 		/*
932 		 * This is where all the plaintext is returned, make sure
933 		 * the plaintext buffer is big enough
934 		 */
935 		size_t pt_len = aes_ctx->ac_data_len;
936 		if (data->cd_length < pt_len) {
937 			data->cd_length = pt_len;
938 			return (CRYPTO_BUFFER_TOO_SMALL);
939 		}
940 
941 		ASSERT(aes_ctx->ac_processed_data_len == pt_len);
942 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
943 		saved_offset = data->cd_offset;
944 		saved_length = data->cd_length;
945 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
946 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
947 		    aes_xor_block);
948 		if (ret == CRYPTO_SUCCESS) {
949 			data->cd_length = data->cd_offset - saved_offset;
950 		} else {
951 			data->cd_length = saved_length;
952 		}
953 
954 		data->cd_offset = saved_offset;
955 		if (ret != CRYPTO_SUCCESS) {
956 			return (ret);
957 		}
958 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
959 		/*
960 		 * This is where all the plaintext is returned, make sure
961 		 * the plaintext buffer is big enough
962 		 */
963 		gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
964 		size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
965 
966 		if (data->cd_length < pt_len) {
967 			data->cd_length = pt_len;
968 			return (CRYPTO_BUFFER_TOO_SMALL);
969 		}
970 
971 		saved_offset = data->cd_offset;
972 		saved_length = data->cd_length;
973 		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
974 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
975 		if (ret == CRYPTO_SUCCESS) {
976 			data->cd_length = data->cd_offset - saved_offset;
977 		} else {
978 			data->cd_length = saved_length;
979 		}
980 
981 		data->cd_offset = saved_offset;
982 		if (ret != CRYPTO_SUCCESS) {
983 			return (ret);
984 		}
985 	}
986 
987 
988 	if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
989 		data->cd_length = 0;
990 	}
991 
992 	(void) aes_free_context(ctx);
993 
994 	return (CRYPTO_SUCCESS);
995 }
996 
997 /* ARGSUSED */
998 static int
999 aes_encrypt_atomic(crypto_provider_handle_t provider,
1000     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1001     crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
1002     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1003 {
1004 	aes_ctx_t aes_ctx;	/* on the stack */
1005 	off_t saved_offset;
1006 	size_t saved_length;
1007 	size_t length_needed;
1008 	int ret;
1009 
1010 	AES_ARG_INPLACE(plaintext, ciphertext);
1011 
1012 	/*
1013 	 * CTR, CCM, CMAC, GCM, and GMAC modes do not require that plaintext
1014 	 * be a multiple of AES block size.
1015 	 */
1016 	switch (mechanism->cm_type) {
1017 	case AES_CTR_MECH_INFO_TYPE:
1018 	case AES_CCM_MECH_INFO_TYPE:
1019 	case AES_GCM_MECH_INFO_TYPE:
1020 	case AES_GMAC_MECH_INFO_TYPE:
1021 	case AES_CMAC_MECH_INFO_TYPE:
1022 		break;
1023 	default:
1024 		if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1025 			return (CRYPTO_DATA_LEN_RANGE);
1026 	}
1027 
1028 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1029 		return (ret);
1030 
1031 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1032 
1033 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1034 	    crypto_kmflag(req), B_TRUE);
1035 	if (ret != CRYPTO_SUCCESS)
1036 		return (ret);
1037 
1038 	switch (mechanism->cm_type) {
1039 	case AES_CCM_MECH_INFO_TYPE:
1040 		length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1041 		break;
1042 	case AES_GMAC_MECH_INFO_TYPE:
1043 		if (plaintext->cd_length != 0)
1044 			return (CRYPTO_ARGUMENTS_BAD);
1045 		/* FALLTHRU */
1046 	case AES_GCM_MECH_INFO_TYPE:
1047 		length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1048 		break;
1049 	case AES_CMAC_MECH_INFO_TYPE:
1050 		length_needed = AES_BLOCK_LEN;
1051 		break;
1052 	default:
1053 		length_needed = plaintext->cd_length;
1054 	}
1055 
1056 	/* return size of buffer needed to store output */
1057 	if (ciphertext->cd_length < length_needed) {
1058 		ciphertext->cd_length = length_needed;
1059 		ret = CRYPTO_BUFFER_TOO_SMALL;
1060 		goto out;
1061 	}
1062 
1063 	saved_offset = ciphertext->cd_offset;
1064 	saved_length = ciphertext->cd_length;
1065 
1066 	/*
1067 	 * Do an update on the specified input data.
1068 	 */
1069 	switch (plaintext->cd_format) {
1070 	case CRYPTO_DATA_RAW:
1071 		ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1072 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1073 		break;
1074 	case CRYPTO_DATA_UIO:
1075 		ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1076 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1077 		break;
1078 	case CRYPTO_DATA_MBLK:
1079 		ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1080 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1081 		break;
1082 	default:
1083 		ret = CRYPTO_ARGUMENTS_BAD;
1084 	}
1085 
1086 	if (ret == CRYPTO_SUCCESS) {
1087 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1088 			ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1089 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1090 			    aes_xor_block);
1091 			if (ret != CRYPTO_SUCCESS)
1092 				goto out;
1093 			ASSERT(aes_ctx.ac_remainder_len == 0);
1094 		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1095 		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1096 			ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1097 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1098 			    aes_copy_block, aes_xor_block);
1099 			if (ret != CRYPTO_SUCCESS)
1100 				goto out;
1101 			ASSERT(aes_ctx.ac_remainder_len == 0);
1102 		} else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1103 			if (aes_ctx.ac_remainder_len > 0) {
1104 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1105 				    ciphertext, aes_encrypt_block);
1106 				if (ret != CRYPTO_SUCCESS)
1107 					goto out;
1108 			}
1109 		} else if (mechanism->cm_type == AES_CMAC_MECH_INFO_TYPE) {
1110 			ret = cmac_mode_final((cbc_ctx_t *)&aes_ctx,
1111 			    ciphertext, aes_encrypt_block,
1112 			    aes_xor_block);
1113 			if (ret != CRYPTO_SUCCESS)
1114 				goto out;
1115 		} else {
1116 			ASSERT(aes_ctx.ac_remainder_len == 0);
1117 		}
1118 
1119 		if (plaintext != ciphertext) {
1120 			ciphertext->cd_length =
1121 			    ciphertext->cd_offset - saved_offset;
1122 		}
1123 	} else {
1124 		ciphertext->cd_length = saved_length;
1125 	}
1126 	ciphertext->cd_offset = saved_offset;
1127 
1128 out:
1129 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1130 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1131 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1132 	}
1133 
1134 	return (ret);
1135 }
1136 
1137 /* ARGSUSED */
1138 static int
1139 aes_decrypt_atomic(crypto_provider_handle_t provider,
1140     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1141     crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1142     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1143 {
1144 	aes_ctx_t aes_ctx;	/* on the stack */
1145 	off_t saved_offset;
1146 	size_t saved_length;
1147 	size_t length_needed;
1148 	int ret;
1149 
1150 	AES_ARG_INPLACE(ciphertext, plaintext);
1151 
1152 	/*
1153 	 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1154 	 * be a multiple of AES block size.
1155 	 */
1156 	switch (mechanism->cm_type) {
1157 	case AES_CTR_MECH_INFO_TYPE:
1158 	case AES_CCM_MECH_INFO_TYPE:
1159 	case AES_GCM_MECH_INFO_TYPE:
1160 	case AES_GMAC_MECH_INFO_TYPE:
1161 		break;
1162 	default:
1163 		if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1164 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1165 	}
1166 
1167 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1168 		return (ret);
1169 
1170 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1171 
1172 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1173 	    crypto_kmflag(req), B_FALSE);
1174 	if (ret != CRYPTO_SUCCESS)
1175 		return (ret);
1176 
1177 	switch (mechanism->cm_type) {
1178 	case AES_CCM_MECH_INFO_TYPE:
1179 		length_needed = aes_ctx.ac_data_len;
1180 		break;
1181 	case AES_GCM_MECH_INFO_TYPE:
1182 		length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1183 		break;
1184 	case AES_GMAC_MECH_INFO_TYPE:
1185 		if (plaintext->cd_length != 0)
1186 			return (CRYPTO_ARGUMENTS_BAD);
1187 		length_needed = 0;
1188 		break;
1189 	default:
1190 		length_needed = ciphertext->cd_length;
1191 	}
1192 
1193 	/* return size of buffer needed to store output */
1194 	if (plaintext->cd_length < length_needed) {
1195 		plaintext->cd_length = length_needed;
1196 		ret = CRYPTO_BUFFER_TOO_SMALL;
1197 		goto out;
1198 	}
1199 
1200 	saved_offset = plaintext->cd_offset;
1201 	saved_length = plaintext->cd_length;
1202 
1203 	if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1204 	    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1205 		gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1206 
1207 	/*
1208 	 * Do an update on the specified input data.
1209 	 */
1210 	switch (ciphertext->cd_format) {
1211 	case CRYPTO_DATA_RAW:
1212 		ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1213 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1214 		break;
1215 	case CRYPTO_DATA_UIO:
1216 		ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1217 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1218 		break;
1219 	case CRYPTO_DATA_MBLK:
1220 		ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1221 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1222 		break;
1223 	default:
1224 		ret = CRYPTO_ARGUMENTS_BAD;
1225 	}
1226 
1227 	if (ret == CRYPTO_SUCCESS) {
1228 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1229 			ASSERT(aes_ctx.ac_processed_data_len
1230 			    == aes_ctx.ac_data_len);
1231 			ASSERT(aes_ctx.ac_processed_mac_len
1232 			    == aes_ctx.ac_mac_len);
1233 			ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1234 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1235 			    aes_copy_block, aes_xor_block);
1236 			ASSERT(aes_ctx.ac_remainder_len == 0);
1237 			if ((ret == CRYPTO_SUCCESS) &&
1238 			    (ciphertext != plaintext)) {
1239 				plaintext->cd_length =
1240 				    plaintext->cd_offset - saved_offset;
1241 			} else {
1242 				plaintext->cd_length = saved_length;
1243 			}
1244 		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1245 		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1246 			ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1247 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1248 			    aes_xor_block);
1249 			ASSERT(aes_ctx.ac_remainder_len == 0);
1250 			if ((ret == CRYPTO_SUCCESS) &&
1251 			    (ciphertext != plaintext)) {
1252 				plaintext->cd_length =
1253 				    plaintext->cd_offset - saved_offset;
1254 			} else {
1255 				plaintext->cd_length = saved_length;
1256 			}
1257 		} else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1258 			ASSERT(aes_ctx.ac_remainder_len == 0);
1259 			if (ciphertext != plaintext)
1260 				plaintext->cd_length =
1261 				    plaintext->cd_offset - saved_offset;
1262 		} else {
1263 			if (aes_ctx.ac_remainder_len > 0) {
1264 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1265 				    plaintext, aes_encrypt_block);
1266 				if (ret == CRYPTO_DATA_LEN_RANGE)
1267 					ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1268 				if (ret != CRYPTO_SUCCESS)
1269 					goto out;
1270 			}
1271 			if (ciphertext != plaintext)
1272 				plaintext->cd_length =
1273 				    plaintext->cd_offset - saved_offset;
1274 		}
1275 	} else {
1276 		plaintext->cd_length = saved_length;
1277 	}
1278 	plaintext->cd_offset = saved_offset;
1279 
1280 out:
1281 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1282 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1283 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1284 	}
1285 
1286 	if (aes_ctx.ac_flags & CCM_MODE) {
1287 		if (aes_ctx.ac_pt_buf != NULL) {
1288 			kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1289 		}
1290 	} else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1291 		if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1292 			kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1293 			    ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1294 		}
1295 	}
1296 
1297 	return (ret);
1298 }
1299 
1300 /*
1301  * KCF software provider context template entry points.
1302  */
1303 /* ARGSUSED */
1304 static int
1305 aes_create_ctx_template(crypto_provider_handle_t provider,
1306     crypto_mechanism_t *mechanism, crypto_key_t *key,
1307     crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1308 {
1309 	void *keysched;
1310 	size_t size;
1311 	int rv;
1312 
1313 	if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1314 	    mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1315 	    mechanism->cm_type != AES_CMAC_MECH_INFO_TYPE &&
1316 	    mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1317 	    mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1318 	    mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1319 	    mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1320 		return (CRYPTO_MECHANISM_INVALID);
1321 
1322 	if ((keysched = aes_alloc_keysched(&size,
1323 	    crypto_kmflag(req))) == NULL) {
1324 		return (CRYPTO_HOST_MEMORY);
1325 	}
1326 
1327 	/*
1328 	 * Initialize key schedule.  Key length information is stored
1329 	 * in the key.
1330 	 */
1331 	if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1332 		bzero(keysched, size);
1333 		kmem_free(keysched, size);
1334 		return (rv);
1335 	}
1336 
1337 	*tmpl = keysched;
1338 	*tmpl_size = size;
1339 
1340 	return (CRYPTO_SUCCESS);
1341 }
1342 
1343 
1344 static int
1345 aes_free_context(crypto_ctx_t *ctx)
1346 {
1347 	aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1348 
1349 	if (aes_ctx != NULL) {
1350 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1351 			ASSERT(aes_ctx->ac_keysched_len != 0);
1352 			bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1353 			kmem_free(aes_ctx->ac_keysched,
1354 			    aes_ctx->ac_keysched_len);
1355 		}
1356 		crypto_free_mode_ctx(aes_ctx);
1357 		ctx->cc_provider_private = NULL;
1358 	}
1359 
1360 	return (CRYPTO_SUCCESS);
1361 }
1362 
1363 
1364 static int
1365 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1366     crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1367     boolean_t is_encrypt_init)
1368 {
1369 	int rv = CRYPTO_SUCCESS;
1370 	void *keysched;
1371 	size_t size;
1372 
1373 	if (template == NULL) {
1374 		if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1375 			return (CRYPTO_HOST_MEMORY);
1376 		/*
1377 		 * Initialize key schedule.
1378 		 * Key length is stored in the key.
1379 		 */
1380 		if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1381 			kmem_free(keysched, size);
1382 			return (rv);
1383 		}
1384 
1385 		aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1386 		aes_ctx->ac_keysched_len = size;
1387 	} else {
1388 		keysched = template;
1389 	}
1390 	aes_ctx->ac_keysched = keysched;
1391 
1392 	switch (mechanism->cm_type) {
1393 	case AES_CBC_MECH_INFO_TYPE:
1394 		rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1395 		    mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1396 		break;
1397 	case AES_CMAC_MECH_INFO_TYPE:
1398 		rv = cmac_init_ctx((cbc_ctx_t *)aes_ctx, AES_BLOCK_LEN);
1399 		break;
1400 	case AES_CTR_MECH_INFO_TYPE: {
1401 		CK_AES_CTR_PARAMS *pp;
1402 
1403 		if (mechanism->cm_param == NULL ||
1404 		    mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1405 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1406 		}
1407 		pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1408 		rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1409 		    pp->cb, aes_copy_block);
1410 		break;
1411 	}
1412 	case AES_CCM_MECH_INFO_TYPE:
1413 		if (mechanism->cm_param == NULL ||
1414 		    mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1415 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1416 		}
1417 		rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1418 		    kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1419 		    aes_xor_block);
1420 		break;
1421 	case AES_GCM_MECH_INFO_TYPE:
1422 		if (mechanism->cm_param == NULL ||
1423 		    mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1424 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1425 		}
1426 		rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1427 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1428 		    aes_xor_block);
1429 		break;
1430 	case AES_GMAC_MECH_INFO_TYPE:
1431 		if (mechanism->cm_param == NULL ||
1432 		    mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1433 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1434 		}
1435 		rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1436 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1437 		    aes_xor_block);
1438 		break;
1439 	case AES_ECB_MECH_INFO_TYPE:
1440 		aes_ctx->ac_flags |= ECB_MODE;
1441 	}
1442 
1443 	if (rv != CRYPTO_SUCCESS) {
1444 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1445 			bzero(keysched, size);
1446 			kmem_free(keysched, size);
1447 		}
1448 	}
1449 
1450 	return (rv);
1451 }
1452 
1453 static int
1454 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1455     CK_AES_GCM_PARAMS *gcm_params)
1456 {
1457 	/* LINTED: pointer alignment */
1458 	CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1459 
1460 	if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1461 		return (CRYPTO_MECHANISM_INVALID);
1462 
1463 	if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1464 		return (CRYPTO_MECHANISM_PARAM_INVALID);
1465 
1466 	if (params->pIv == NULL)
1467 		return (CRYPTO_MECHANISM_PARAM_INVALID);
1468 
1469 	gcm_params->pIv = params->pIv;
1470 	gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1471 	gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1472 
1473 	if (data == NULL)
1474 		return (CRYPTO_SUCCESS);
1475 
1476 	if (data->cd_format != CRYPTO_DATA_RAW)
1477 		return (CRYPTO_ARGUMENTS_BAD);
1478 
1479 	gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1480 	gcm_params->ulAADLen = data->cd_length;
1481 	return (CRYPTO_SUCCESS);
1482 }
1483 
1484 static int
1485 aes_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
1486     crypto_key_t *key, crypto_spi_ctx_template_t template,
1487     crypto_req_handle_t req)
1488 {
1489 	return (aes_encrypt_init(ctx, mechanism,
1490 	    key, template, req));
1491 }
1492 
1493 static int
1494 aes_mac(crypto_ctx_t *ctx, crypto_data_t *plaintext, crypto_data_t *ciphertext,
1495     crypto_req_handle_t req)
1496 {
1497 	return (aes_encrypt(ctx, plaintext, ciphertext, req));
1498 }
1499 
1500 static int
1501 aes_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1502     crypto_req_handle_t req)
1503 {
1504 	crypto_data_t out;
1505 	uint8_t block[AES_BLOCK_LEN];
1506 	out.cd_format = CRYPTO_DATA_RAW;
1507 	out.cd_offset = 0;
1508 	out.cd_length = sizeof (block);
1509 	out.cd_miscdata = NULL;
1510 	out.cd_raw.iov_base = (void *)block;
1511 	out.cd_raw.iov_len = sizeof (block);
1512 
1513 	return (aes_encrypt_update(ctx, data, &out, req));
1514 }
1515 
1516 static int
1517 aes_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1518 {
1519 	return (aes_encrypt_final(ctx, mac, req));
1520 }
1521 
1522 static int
1523 aes_mac_atomic(crypto_provider_handle_t provider,
1524     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1525     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1526     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1527 {
1528 	CK_AES_GCM_PARAMS gcm_params;
1529 	crypto_mechanism_t gcm_mech;
1530 	int rv;
1531 
1532 	if (mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1533 		if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1534 		    != CRYPTO_SUCCESS)
1535 			return (rv);
1536 
1537 		gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1538 		gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1539 		gcm_mech.cm_param = (char *)&gcm_params;
1540 
1541 		return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1542 		    key, &null_crypto_data, mac, template, req));
1543 	}
1544 	/* CMAC */
1545 	return (aes_encrypt_atomic(provider, session_id, mechanism,
1546 	    key, data, mac, template, req));
1547 }
1548 
1549 static int
1550 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1551     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1552     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1553     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1554 {
1555 	CK_AES_GCM_PARAMS gcm_params;
1556 	crypto_mechanism_t gcm_mech;
1557 	crypto_data_t data_mac;
1558 	char buf[AES_BLOCK_LEN];
1559 	int rv;
1560 
1561 	if (mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1562 		if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1563 		    != CRYPTO_SUCCESS)
1564 			return (rv);
1565 
1566 		gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1567 		gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1568 		gcm_mech.cm_param = (char *)&gcm_params;
1569 
1570 		return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1571 		    key, mac, &null_crypto_data, template, req));
1572 	}
1573 
1574 	/* CMAC */
1575 
1576 	data_mac.cd_format = CRYPTO_DATA_RAW;
1577 	data_mac.cd_offset = 0;
1578 	data_mac.cd_length = AES_BLOCK_LEN;
1579 	data_mac.cd_miscdata = NULL;
1580 	data_mac.cd_raw.iov_base = (void *) buf;
1581 	data_mac.cd_raw.iov_len = AES_BLOCK_LEN;
1582 
1583 	rv = aes_encrypt_atomic(provider, session_id, &gcm_mech,
1584 	    key, data, &data_mac, template, req);
1585 
1586 	if (rv != CRYPTO_SUCCESS)
1587 		return (rv);
1588 
1589 	/* should use get_input_data for mac? */
1590 	if (bcmp(buf, mac->cd_raw.iov_base + mac->cd_offset,
1591 	    AES_BLOCK_LEN) != 0)
1592 		return (CRYPTO_INVALID_MAC);
1593 
1594 	return (CRYPTO_SUCCESS);
1595 }
1596