Print this page
5072963 Need an optimized AES implementation for amd64
6699938 CCM max payload computation is off by one
Split |
Close |
Expand all |
Collapse all |
--- old/usr/src/common/crypto/aes/aes_cbc_crypt.c
+++ new/usr/src/common/crypto/aes/aes_cbc_crypt.c
1 1 /*
2 2 * CDDL HEADER START
3 3 *
4 4 * The contents of this file are subject to the terms of the
5 5 * Common Development and Distribution License (the "License").
6 6 * You may not use this file except in compliance with the License.
7 7 *
8 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 9 * or http://www.opensolaris.org/os/licensing.
10 10 * See the License for the specific language governing permissions
11 11 * and limitations under the License.
12 12 *
13 13 * When distributing Covered Code, include this CDDL HEADER in each
14 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 15 * If applicable, add the following below this CDDL HEADER, with the
↓ open down ↓ |
15 lines elided |
↑ open up ↑ |
16 16 * fields enclosed by brackets "[]" replaced with your own identifying
17 17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 18 *
19 19 * CDDL HEADER END
20 20 */
21 21 /*
22 22 * Copyright 2008 Sun Microsystems, Inc. All rights reserved.
23 23 * Use is subject to license terms.
24 24 */
25 25
26 -#pragma ident "@(#)aes_cbc_crypt.c 1.9 08/05/09 SMI"
26 +#pragma ident "@(#)aes_cbc_crypt.c 1.10 08/06/13 SMI"
27 27
28 28
29 29 #include <sys/sysmacros.h>
30 30 #include <sys/systm.h>
31 31 #include <sys/crypto/common.h>
32 32 #include <sys/strsun.h>
33 33 #include "aes_cbc_crypt.h"
34 34 #include "aes_impl.h"
35 35 #ifndef _KERNEL
36 36 #include <limits.h>
37 37 #include <strings.h>
38 38 #endif /* !_KERNEL */
39 39
40 40 static int aes_ctr_ccm_mode_contiguous_blocks(aes_ctx_t *, char *, size_t,
41 41 crypto_data_t *);
42 42 static void
↓ open down ↓ |
6 lines elided |
↑ open up ↑ |
43 43 encode_adata_len(ulong_t auth_data_len, uint8_t *encoded, size_t *encoded_len);
44 44 static void
45 45 aes_ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize,
46 46 ulong_t authDataSize, uint8_t *b0, aes_ctx_t *aes_ctx);
47 47 static int
48 48 aes_ccm_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length,
49 49 crypto_data_t *out);
50 50
51 51 /*
52 52 * Initialize by setting iov_or_mp to point to the current iovec or mp,
53 - * and by setting current_offset to an offset within the current iovec or mp .
53 + * and by setting current_offset to an offset within the current iovec or mp.
54 54 */
55 55 static void
56 56 aes_init_ptrs(crypto_data_t *out, void **iov_or_mp, offset_t *current_offset)
57 57 {
58 58 offset_t offset;
59 59
60 60 switch (out->cd_format) {
61 61 case CRYPTO_DATA_RAW:
62 62 *current_offset = out->cd_offset;
63 63 break;
64 64
65 65 case CRYPTO_DATA_UIO: {
66 66 uio_t *uiop = out->cd_uio;
67 67 uintptr_t vec_idx;
68 68
69 69 offset = out->cd_offset;
70 70 for (vec_idx = 0; vec_idx < uiop->uio_iovcnt &&
71 71 offset >= uiop->uio_iov[vec_idx].iov_len;
72 72 offset -= uiop->uio_iov[vec_idx++].iov_len)
73 73 ;
74 74
75 75 *current_offset = offset;
76 76 *iov_or_mp = (void *)vec_idx;
77 77 break;
78 78 }
79 79
80 80 case CRYPTO_DATA_MBLK: {
81 81 mblk_t *mp;
82 82
83 83 offset = out->cd_offset;
84 84 for (mp = out->cd_mp; mp != NULL && offset >= MBLKL(mp);
85 85 offset -= MBLKL(mp), mp = mp->b_cont)
86 86 ;
87 87
88 88 *current_offset = offset;
89 89 *iov_or_mp = mp;
90 90 break;
91 91
92 92 }
93 93 } /* end switch */
94 94 }
95 95
96 96 /*
97 97 * Get pointers for where in the output to copy a block of encrypted or
98 98 * decrypted data. The iov_or_mp argument stores a pointer to the current
99 99 * iovec or mp, and offset stores an offset into the current iovec or mp.
100 100 */
101 101 static void
102 102 aes_get_ptrs(crypto_data_t *out, void **iov_or_mp, offset_t *current_offset,
103 103 uint8_t **out_data_1, size_t *out_data_1_len, uint8_t **out_data_2,
104 104 size_t amt)
105 105 {
106 106 offset_t offset;
107 107
108 108 switch (out->cd_format) {
109 109 case CRYPTO_DATA_RAW: {
110 110 iovec_t *iov;
111 111
112 112 offset = *current_offset;
113 113 iov = &out->cd_raw;
114 114 if ((offset + amt) <= iov->iov_len) {
115 115 /* one AES block fits */
116 116 *out_data_1 = (uint8_t *)iov->iov_base + offset;
117 117 *out_data_1_len = amt;
118 118 *out_data_2 = NULL;
119 119 *current_offset = offset + amt;
120 120 }
121 121 break;
122 122 }
123 123
124 124 case CRYPTO_DATA_UIO: {
125 125 uio_t *uio = out->cd_uio;
126 126 iovec_t *iov;
127 127 offset_t offset;
128 128 uintptr_t vec_idx;
129 129 uint8_t *p;
130 130
131 131 offset = *current_offset;
132 132 vec_idx = (uintptr_t)(*iov_or_mp);
133 133 iov = &uio->uio_iov[vec_idx];
134 134 p = (uint8_t *)iov->iov_base + offset;
135 135 *out_data_1 = p;
136 136
137 137 if (offset + amt <= iov->iov_len) {
138 138 /* can fit one AES block into this iov */
139 139 *out_data_1_len = amt;
140 140 *out_data_2 = NULL;
141 141 *current_offset = offset + amt;
142 142 } else {
143 143 /* one AES block spans two iovecs */
144 144 *out_data_1_len = iov->iov_len - offset;
145 145 if (vec_idx == uio->uio_iovcnt)
146 146 return;
147 147 vec_idx++;
148 148 iov = &uio->uio_iov[vec_idx];
149 149 *out_data_2 = (uint8_t *)iov->iov_base;
150 150 *current_offset = amt - *out_data_1_len;
151 151 }
152 152 *iov_or_mp = (void *)vec_idx;
153 153 break;
154 154 }
155 155
156 156 case CRYPTO_DATA_MBLK: {
157 157 mblk_t *mp;
158 158 uint8_t *p;
159 159
160 160 offset = *current_offset;
161 161 mp = (mblk_t *)*iov_or_mp;
162 162 p = mp->b_rptr + offset;
163 163 *out_data_1 = p;
164 164 if ((p + amt) <= mp->b_wptr) {
165 165 /* can fit one AES block into this mblk */
166 166 *out_data_1_len = amt;
167 167 *out_data_2 = NULL;
168 168 *current_offset = offset + amt;
169 169 } else {
170 170 /* one AES block spans two mblks */
171 171 *out_data_1_len = mp->b_wptr - p;
172 172 if ((mp = mp->b_cont) == NULL)
173 173 return;
174 174 *out_data_2 = mp->b_rptr;
175 175 *current_offset = (amt - *out_data_1_len);
176 176 }
177 177 *iov_or_mp = mp;
178 178 break;
179 179 }
180 180 } /* end switch */
181 181 }
182 182
183 183 static int
184 184 aes_cbc_encrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length,
185 185 crypto_data_t *out)
186 186 {
187 187
188 188 /* EXPORT DELETE START */
189 189
190 190 size_t remainder = length;
191 191 size_t need;
192 192 uint8_t *datap = (uint8_t *)data;
193 193 uint8_t *blockp;
194 194 uint8_t *lastp;
195 195 uint32_t tmp[4];
196 196 void *iov_or_mp;
197 197 offset_t offset;
198 198 uint8_t *out_data_1;
199 199 uint8_t *out_data_2;
200 200 size_t out_data_1_len;
201 201
202 202 if (length + ctx->ac_remainder_len < AES_BLOCK_LEN) {
203 203 /* accumulate bytes here and return */
204 204 bcopy(datap,
205 205 (uint8_t *)ctx->ac_remainder + ctx->ac_remainder_len,
206 206 length);
207 207 ctx->ac_remainder_len += length;
208 208 ctx->ac_copy_to = datap;
209 209 return (0);
210 210 }
211 211
212 212 lastp = (uint8_t *)ctx->ac_iv;
213 213 if (out != NULL)
214 214 aes_init_ptrs(out, &iov_or_mp, &offset);
215 215
216 216 do {
217 217 /* Unprocessed data from last call. */
218 218 if (ctx->ac_remainder_len > 0) {
219 219 need = AES_BLOCK_LEN - ctx->ac_remainder_len;
220 220
221 221 if (need > remainder)
222 222 return (1);
223 223
224 224 bcopy(datap, &((uint8_t *)ctx->ac_remainder)
225 225 [ctx->ac_remainder_len], need);
226 226
227 227 blockp = (uint8_t *)ctx->ac_remainder;
228 228 } else {
229 229 blockp = datap;
230 230 }
231 231
232 232 /* don't write on the plaintext */
233 233 if (out != NULL) {
234 234 if (IS_P2ALIGNED(blockp, sizeof (uint32_t))) {
235 235 /* LINTED: pointer alignment */
236 236 tmp[0] = *(uint32_t *)blockp;
237 237 /* LINTED: pointer alignment */
238 238 tmp[1] = *(uint32_t *)&blockp[4];
239 239 /* LINTED: pointer alignment */
240 240 tmp[2] = *(uint32_t *)&blockp[8];
241 241 /* LINTED: pointer alignment */
242 242 tmp[3] = *(uint32_t *)&blockp[12];
243 243 } else {
244 244 uint8_t *tmp8 = (uint8_t *)tmp;
245 245
↓ open down ↓ |
182 lines elided |
↑ open up ↑ |
246 246 AES_COPY_BLOCK(blockp, tmp8);
247 247 }
248 248 blockp = (uint8_t *)tmp;
249 249 }
250 250
251 251 if (ctx->ac_flags & AES_CBC_MODE) {
252 252 /*
253 253 * XOR the previous cipher block or IV with the
254 254 * current clear block. Check for alignment.
255 255 */
256 - if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
257 - IS_P2ALIGNED(lastp, sizeof (uint32_t))) {
256 + if (IS_P2ALIGNED2(blockp, lastp, sizeof (uint32_t))) {
258 257 /* LINTED: pointer alignment */
259 258 *(uint32_t *)&blockp[0] ^=
260 259 /* LINTED: pointer alignment */
261 260 *(uint32_t *)&lastp[0];
262 261 /* LINTED: pointer alignment */
263 262 *(uint32_t *)&blockp[4] ^=
264 263 /* LINTED: pointer alignment */
265 264 *(uint32_t *)&lastp[4];
266 265 /* LINTED: pointer alignment */
267 266 *(uint32_t *)&blockp[8] ^=
268 267 /* LINTED: pointer alignment */
269 268 *(uint32_t *)&lastp[8];
270 269 /* LINTED: pointer alignment */
271 270 *(uint32_t *)&blockp[12] ^=
272 271 /* LINTED: pointer alignment */
273 272 *(uint32_t *)&lastp[12];
274 273 } else {
275 274 AES_XOR_BLOCK(lastp, blockp);
276 275 }
277 276 }
278 277
279 278 if (out == NULL) {
280 279 aes_encrypt_block(ctx->ac_keysched, blockp, blockp);
281 280
282 281 ctx->ac_lastp = blockp;
283 282 lastp = blockp;
284 283
285 284 if (ctx->ac_remainder_len > 0) {
286 285 bcopy(blockp, ctx->ac_copy_to,
↓ open down ↓ |
19 lines elided |
↑ open up ↑ |
287 286 ctx->ac_remainder_len);
288 287 bcopy(blockp + ctx->ac_remainder_len, datap,
289 288 need);
290 289 }
291 290 } else {
292 291 aes_encrypt_block(ctx->ac_keysched, blockp, lastp);
293 292 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
294 293 &out_data_1_len, &out_data_2, AES_BLOCK_LEN);
295 294
296 295 /* copy block to where it belongs */
297 - bcopy(lastp, out_data_1, out_data_1_len);
296 + if ((out_data_1_len == AES_BLOCK_LEN) &&
297 + (IS_P2ALIGNED2(lastp, out_data_1,
298 + sizeof (uint32_t)))) {
299 + /* LINTED: pointer alignment */
300 + uint32_t *d = (uint32_t *)out_data_1;
301 + /* LINTED: pointer alignment */
302 + d[0] = *(uint32_t *)lastp;
303 + /* LINTED: pointer alignment */
304 + d[1] = *(uint32_t *)&lastp[4];
305 + /* LINTED: pointer alignment */
306 + d[2] = *(uint32_t *)&lastp[8];
307 + /* LINTED: pointer alignment */
308 + d[3] = *(uint32_t *)&lastp[12];
309 + } else {
310 + bcopy(lastp, out_data_1, out_data_1_len);
311 + }
298 312 if (out_data_2 != NULL) {
299 313 bcopy(lastp + out_data_1_len, out_data_2,
300 314 AES_BLOCK_LEN - out_data_1_len);
301 315 }
316 +
302 317 /* update offset */
303 318 out->cd_offset += AES_BLOCK_LEN;
304 319 }
305 320
306 321 /* Update pointer to next block of data to be processed. */
307 322 if (ctx->ac_remainder_len != 0) {
308 323 datap += need;
309 324 ctx->ac_remainder_len = 0;
310 325 } else {
311 326 datap += AES_BLOCK_LEN;
312 327 }
313 328
314 329 remainder = (size_t)&data[length] - (size_t)datap;
315 330
316 331 /* Incomplete last block. */
317 332 if (remainder > 0 && remainder < AES_BLOCK_LEN) {
318 333 bcopy(datap, ctx->ac_remainder, remainder);
319 334 ctx->ac_remainder_len = remainder;
320 335 ctx->ac_copy_to = datap;
321 336 goto out;
322 337 }
323 338 ctx->ac_copy_to = NULL;
324 339
325 340 } while (remainder > 0);
326 341
327 342 out:
328 343 /*
329 344 * Save the last encrypted block in the context - but only for
330 345 * the CBC mode of operation.
331 346 */
332 347 if ((ctx->ac_flags & AES_CBC_MODE) && (ctx->ac_lastp != NULL)) {
333 348 uint8_t *iv8 = (uint8_t *)ctx->ac_iv;
334 349 uint8_t *last8 = (uint8_t *)ctx->ac_lastp;
335 350
336 351 if (IS_P2ALIGNED(ctx->ac_lastp, sizeof (uint32_t))) {
337 352 /* LINTED: pointer alignment */
338 353 *(uint32_t *)iv8 = *(uint32_t *)last8;
339 354 /* LINTED: pointer alignment */
340 355 *(uint32_t *)&iv8[4] = *(uint32_t *)&last8[4];
341 356 /* LINTED: pointer alignment */
342 357 *(uint32_t *)&iv8[8] = *(uint32_t *)&last8[8];
343 358 /* LINTED: pointer alignment */
344 359 *(uint32_t *)&iv8[12] = *(uint32_t *)&last8[12];
345 360 } else {
346 361 AES_COPY_BLOCK(last8, iv8);
347 362 }
348 363 ctx->ac_lastp = (uint8_t *)ctx->ac_iv;
349 364 }
350 365
351 366 /* EXPORT DELETE END */
352 367
353 368 return (0);
354 369 }
355 370
356 371 #define OTHER(a, ctx) \
357 372 (((a) == (ctx)->ac_lastblock) ? (ctx)->ac_iv : (ctx)->ac_lastblock)
358 373
359 374 /*
360 375 * Encrypt multiple blocks of data.
361 376 */
362 377 /* ARGSUSED */
363 378 int
364 379 aes_encrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length,
365 380 crypto_data_t *out)
366 381 {
367 382 if (ctx->ac_flags & AES_CTR_MODE)
368 383 return (aes_ctr_ccm_mode_contiguous_blocks(ctx, data, length,
369 384 out));
370 385 else if (ctx->ac_flags & AES_CCM_MODE)
371 386 return (aes_ctr_ccm_mode_contiguous_blocks(ctx, data, length,
372 387 out));
373 388 return (aes_cbc_encrypt_contiguous_blocks(ctx, data, length, out));
374 389 }
375 390
376 391 /* ARGSUSED */
377 392 static int
378 393 aes_cbc_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length,
379 394 crypto_data_t *out)
380 395 {
381 396
382 397 /* EXPORT DELETE START */
383 398
384 399 size_t remainder = length;
385 400 size_t need;
386 401 uint8_t *datap = (uint8_t *)data;
387 402 uint8_t *blockp;
388 403 uint8_t *lastp;
389 404 uint32_t tmp[4];
390 405 void *iov_or_mp;
391 406 offset_t offset;
392 407 uint8_t *out_data_1;
393 408 uint8_t *out_data_2;
394 409 size_t out_data_1_len;
395 410
396 411 if (length + ctx->ac_remainder_len < AES_BLOCK_LEN) {
397 412 /* accumulate bytes here and return */
398 413 bcopy(datap,
399 414 (uint8_t *)ctx->ac_remainder + ctx->ac_remainder_len,
400 415 length);
401 416 ctx->ac_remainder_len += length;
402 417 ctx->ac_copy_to = datap;
403 418 return (0);
404 419 }
405 420
406 421 lastp = ctx->ac_lastp;
407 422 if (out != NULL)
408 423 aes_init_ptrs(out, &iov_or_mp, &offset);
409 424
410 425 do {
411 426 /* Unprocessed data from last call. */
412 427 if (ctx->ac_remainder_len > 0) {
413 428 need = AES_BLOCK_LEN - ctx->ac_remainder_len;
414 429
415 430 if (need > remainder)
416 431 return (1);
417 432
418 433 bcopy(datap, &((uint8_t *)ctx->ac_remainder)
419 434 [ctx->ac_remainder_len], need);
420 435
421 436 blockp = (uint8_t *)ctx->ac_remainder;
422 437 } else {
423 438 blockp = datap;
424 439 }
425 440
426 441 if (ctx->ac_flags & AES_CBC_MODE) {
427 442
428 443 /* Save current ciphertext block */
429 444 if (IS_P2ALIGNED(blockp, sizeof (uint32_t))) {
430 445 uint32_t *tmp32;
431 446
432 447 /* LINTED: pointer alignment */
433 448 tmp32 = (uint32_t *)OTHER((uint64_t *)lastp,
434 449 ctx);
435 450
436 451 /* LINTED: pointer alignment */
437 452 *tmp32++ = *(uint32_t *)blockp;
438 453 /* LINTED: pointer alignment */
439 454 *tmp32++ = *(uint32_t *)&blockp[4];
440 455 /* LINTED: pointer alignment */
441 456 *tmp32++ = *(uint32_t *)&blockp[8];
442 457 /* LINTED: pointer alignment */
443 458 *tmp32++ = *(uint32_t *)&blockp[12];
444 459 } else {
445 460 uint8_t *tmp8;
446 461 /* LINTED: pointer alignment */
447 462 tmp8 = (uint8_t *)OTHER((uint64_t *)lastp, ctx);
448 463
449 464 AES_COPY_BLOCK(blockp, tmp8);
450 465 }
451 466 }
452 467
453 468 if (out != NULL) {
454 469 aes_decrypt_block(ctx->ac_keysched, blockp,
455 470 (uint8_t *)tmp);
↓ open down ↓ |
144 lines elided |
↑ open up ↑ |
456 471 blockp = (uint8_t *)tmp;
457 472 } else {
458 473 aes_decrypt_block(ctx->ac_keysched, blockp, blockp);
459 474 }
460 475
461 476 if (ctx->ac_flags & AES_CBC_MODE) {
462 477 /*
463 478 * XOR the previous cipher block or IV with the
464 479 * currently decrypted block. Check for alignment.
465 480 */
466 - if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
467 - IS_P2ALIGNED(lastp, sizeof (uint32_t))) {
481 + if (IS_P2ALIGNED2(blockp, lastp, sizeof (uint32_t))) {
468 482 /* LINTED: pointer alignment */
469 483 *(uint32_t *)blockp ^= *(uint32_t *)lastp;
470 484 /* LINTED: pointer alignment */
471 485 *(uint32_t *)&blockp[4] ^=
472 486 /* LINTED: pointer alignment */
473 487 *(uint32_t *)&lastp[4];
474 488 /* LINTED: pointer alignment */
475 489 *(uint32_t *)&blockp[8] ^=
476 490 /* LINTED: pointer alignment */
477 491 *(uint32_t *)&lastp[8];
478 492 /* LINTED: pointer alignment */
479 493 *(uint32_t *)&blockp[12] ^=
480 494 /* LINTED: pointer alignment */
481 495 *(uint32_t *)&lastp[12];
482 496 } else {
483 497 AES_XOR_BLOCK(lastp, blockp);
484 498 }
↓ open down ↓ |
7 lines elided |
↑ open up ↑ |
485 499
486 500 /* LINTED: pointer alignment */
487 501 lastp = (uint8_t *)OTHER((uint64_t *)lastp, ctx);
488 502 }
489 503
490 504 if (out != NULL) {
491 505 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
492 506 &out_data_1_len, &out_data_2, AES_BLOCK_LEN);
493 507
494 508 /* copy temporary block to where it belongs */
495 - bcopy(&tmp, out_data_1, out_data_1_len);
509 + if ((out_data_1_len == AES_BLOCK_LEN) &&
510 + (IS_P2ALIGNED(out_data_1, sizeof (uint32_t)))) {
511 + /* LINTED: pointer alignment */
512 + uint32_t *d = (uint32_t *)out_data_1;
513 + d[0] = tmp[0];
514 + d[1] = tmp[1];
515 + d[2] = tmp[2];
516 + d[3] = tmp[3];
517 + } else {
518 + bcopy(&tmp, out_data_1, out_data_1_len);
519 + }
496 520 if (out_data_2 != NULL) {
497 521 bcopy((uint8_t *)&tmp + out_data_1_len,
498 522 out_data_2, AES_BLOCK_LEN - out_data_1_len);
499 523 }
500 524
501 525 /* update offset */
502 526 out->cd_offset += AES_BLOCK_LEN;
503 527
504 528 } else if (ctx->ac_remainder_len > 0) {
505 529 /* copy temporary block to where it belongs */
506 530 bcopy(blockp, ctx->ac_copy_to, ctx->ac_remainder_len);
507 531 bcopy(blockp + ctx->ac_remainder_len, datap, need);
508 532 }
509 533
510 534 /* Update pointer to next block of data to be processed. */
511 535 if (ctx->ac_remainder_len != 0) {
512 536 datap += need;
513 537 ctx->ac_remainder_len = 0;
514 538 } else {
515 539 datap += AES_BLOCK_LEN;
516 540 }
517 541
518 542 remainder = (size_t)&data[length] - (size_t)datap;
519 543
520 544 /* Incomplete last block. */
521 545 if (remainder > 0 && remainder < AES_BLOCK_LEN) {
522 546 bcopy(datap, ctx->ac_remainder, remainder);
523 547 ctx->ac_remainder_len = remainder;
524 548 ctx->ac_lastp = lastp;
525 549 ctx->ac_copy_to = datap;
526 550 return (0);
527 551 }
528 552 ctx->ac_copy_to = NULL;
529 553
530 554 } while (remainder > 0);
531 555
532 556 ctx->ac_lastp = lastp;
533 557
534 558 /* EXPORT DELETE END */
535 559
536 560 return (0);
537 561 }
538 562
539 563 /*
540 564 * Decrypt multiple blocks of data.
541 565 */
542 566 int
543 567 aes_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length,
544 568 crypto_data_t *out)
545 569 {
546 570 if (ctx->ac_flags & AES_CTR_MODE)
547 571 return (aes_ctr_ccm_mode_contiguous_blocks(ctx, data, length,
548 572 out));
549 573 else if (ctx->ac_flags & AES_CCM_MODE)
550 574 return (aes_ccm_decrypt_contiguous_blocks(ctx, data, length,
551 575 out));
552 576 return (aes_cbc_decrypt_contiguous_blocks(ctx, data, length, out));
553 577 }
554 578
555 579 /* ARGSUSED */
556 580 int
557 581 aes_counter_final(aes_ctx_t *ctx, crypto_data_t *out)
558 582 {
559 583 /* EXPORT DELETE START */
560 584
561 585 uint8_t *lastp;
562 586 uint32_t counter_block[4];
563 587 uint8_t tmp[AES_BLOCK_LEN];
564 588 int i;
565 589 void *iov_or_mp;
566 590 offset_t offset;
567 591 uint8_t *out_data_1;
568 592 uint8_t *out_data_2;
569 593 size_t out_data_1_len;
570 594
571 595 if (out->cd_length < ctx->ac_remainder_len)
572 596 return (CRYPTO_ARGUMENTS_BAD);
573 597
574 598 /* ac_iv is the counter block */
575 599 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_iv,
576 600 (uint8_t *)counter_block);
577 601
578 602 lastp = (uint8_t *)counter_block;
579 603
580 604 /* copy remainder to temporary buffer */
581 605 bcopy(ctx->ac_remainder, tmp, ctx->ac_remainder_len);
582 606
583 607 /* XOR with counter block */
584 608 for (i = 0; i < ctx->ac_remainder_len; i++) {
585 609 tmp[i] ^= lastp[i];
586 610 }
587 611
588 612 aes_init_ptrs(out, &iov_or_mp, &offset);
589 613 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
590 614 &out_data_1_len, &out_data_2, ctx->ac_remainder_len);
591 615
592 616 /* copy temporary block to where it belongs */
593 617 bcopy(tmp, out_data_1, out_data_1_len);
594 618 if (out_data_2 != NULL) {
595 619 bcopy((uint8_t *)tmp + out_data_1_len,
596 620 out_data_2, ctx->ac_remainder_len - out_data_1_len);
597 621 }
598 622 out->cd_offset += ctx->ac_remainder_len;
599 623 ctx->ac_remainder_len = 0;
600 624
601 625 /* EXPORT DELETE END */
602 626
603 627 return (0);
604 628 }
605 629
606 630 /*
607 631 * Encrypt and decrypt multiple blocks of data in counter mode.
608 632 * Encrypt multiple blocks of data in CCM mode. Decrypt for CCM mode
609 633 * is done in another function.
610 634 */
611 635 /* ARGSUSED */
612 636 int
613 637 aes_ctr_ccm_mode_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length,
614 638 crypto_data_t *out)
615 639 {
616 640
617 641 /* EXPORT DELETE START */
618 642
619 643 size_t remainder = length;
620 644 size_t need;
621 645 uint8_t *datap = (uint8_t *)data;
622 646 uint8_t *blockp;
623 647 uint8_t *lastp;
624 648 uint32_t tmp[4];
625 649 uint32_t counter_block[4];
626 650 void *iov_or_mp;
627 651 offset_t offset;
628 652 uint8_t *out_data_1;
629 653 uint8_t *out_data_2;
630 654 size_t out_data_1_len;
631 655 uint64_t counter;
632 656 uint8_t *mac_buf;
633 657 #ifdef _LITTLE_ENDIAN
634 658 uint8_t *p;
635 659 #endif
636 660
637 661 if (length + ctx->ac_remainder_len < AES_BLOCK_LEN) {
638 662 /* accumulate bytes here and return */
639 663 bcopy(datap,
640 664 (uint8_t *)ctx->ac_remainder + ctx->ac_remainder_len,
641 665 length);
642 666 ctx->ac_remainder_len += length;
643 667 ctx->ac_copy_to = datap;
644 668 return (0);
645 669 }
646 670
647 671 lastp = (uint8_t *)ctx->ac_cb;
648 672 if (out != NULL)
649 673 aes_init_ptrs(out, &iov_or_mp, &offset);
650 674
651 675 if (ctx->ac_flags & AES_CCM_MODE) {
652 676 mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf;
653 677 }
654 678
655 679 do {
656 680 /* Unprocessed data from last call. */
657 681 if (ctx->ac_remainder_len > 0) {
658 682 need = AES_BLOCK_LEN - ctx->ac_remainder_len;
659 683
660 684 if (need > remainder)
661 685 return (1);
662 686
663 687 bcopy(datap, &((uint8_t *)ctx->ac_remainder)
664 688 [ctx->ac_remainder_len], need);
665 689
666 690 blockp = (uint8_t *)ctx->ac_remainder;
667 691 } else {
668 692 blockp = datap;
669 693 }
670 694
671 695 /* don't write on the plaintext */
672 696 if (out != NULL) {
673 697 if (IS_P2ALIGNED(blockp, sizeof (uint32_t))) {
674 698 /* LINTED: pointer alignment */
675 699 tmp[0] = *(uint32_t *)blockp;
676 700 /* LINTED: pointer alignment */
677 701 tmp[1] = *(uint32_t *)&blockp[4];
678 702 /* LINTED: pointer alignment */
679 703 tmp[2] = *(uint32_t *)&blockp[8];
680 704 /* LINTED: pointer alignment */
681 705 tmp[3] = *(uint32_t *)&blockp[12];
682 706 } else {
683 707 uint8_t *tmp8 = (uint8_t *)tmp;
684 708
685 709 AES_COPY_BLOCK(blockp, tmp8);
686 710 }
↓ open down ↓ |
181 lines elided |
↑ open up ↑ |
687 711 blockp = (uint8_t *)tmp;
688 712 }
689 713
690 714 if (ctx->ac_flags & AES_CCM_MODE) {
691 715 /*
692 716 * do CBC MAC
693 717 *
694 718 * XOR the previous cipher block current clear block.
695 719 * mac_buf always contain previous cipher block.
696 720 */
697 - if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
698 - IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
721 + if (IS_P2ALIGNED2(blockp, mac_buf, sizeof (uint32_t))) {
699 722 /* LINTED: pointer alignment */
700 723 *(uint32_t *)&mac_buf[0] ^=
701 724 /* LINTED: pointer alignment */
702 725 *(uint32_t *)&blockp[0];
703 726 /* LINTED: pointer alignment */
704 727 *(uint32_t *)&mac_buf[4] ^=
705 728 /* LINTED: pointer alignment */
706 729 *(uint32_t *)&blockp[4];
707 730 /* LINTED: pointer alignment */
708 731 *(uint32_t *)&mac_buf[8] ^=
709 732 /* LINTED: pointer alignment */
710 733 *(uint32_t *)&blockp[8];
711 734 /* LINTED: pointer alignment */
712 735 *(uint32_t *)&mac_buf[12] ^=
713 736 /* LINTED: pointer alignment */
714 737 *(uint32_t *)&blockp[12];
715 738 } else {
716 739 AES_XOR_BLOCK(blockp, mac_buf);
717 740 }
718 741 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
719 742 }
720 743
721 744
722 745 /* ac_cb is the counter block */
723 746 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb,
724 747 (uint8_t *)counter_block);
725 748
726 749 lastp = (uint8_t *)counter_block;
727 750
728 751 /*
729 752 * Increment counter. Counter bits are confined
730 753 * to the bottom 64 bits of the counter block.
731 754 */
732 755 counter = ctx->ac_cb[1] & ctx->ac_counter_mask;
733 756 #ifdef _LITTLE_ENDIAN
734 757 p = (uint8_t *)&counter;
735 758 counter = (((uint64_t)p[0] << 56) |
736 759 ((uint64_t)p[1] << 48) |
737 760 ((uint64_t)p[2] << 40) |
738 761 ((uint64_t)p[3] << 32) |
739 762 ((uint64_t)p[4] << 24) |
740 763 ((uint64_t)p[5] << 16) |
741 764 ((uint64_t)p[6] << 8) |
742 765 (uint64_t)p[7]);
743 766 #endif
744 767 counter++;
745 768 #ifdef _LITTLE_ENDIAN
746 769 counter = (((uint64_t)p[0] << 56) |
747 770 ((uint64_t)p[1] << 48) |
748 771 ((uint64_t)p[2] << 40) |
749 772 ((uint64_t)p[3] << 32) |
750 773 ((uint64_t)p[4] << 24) |
751 774 ((uint64_t)p[5] << 16) |
752 775 ((uint64_t)p[6] << 8) |
↓ open down ↓ |
44 lines elided |
↑ open up ↑ |
753 776 (uint64_t)p[7]);
754 777 #endif
755 778 counter &= ctx->ac_counter_mask;
756 779 ctx->ac_cb[1] =
757 780 (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter;
758 781
759 782 /*
760 783 * XOR the previous cipher block or IV with the
761 784 * current clear block. Check for alignment.
762 785 */
763 - if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
764 - IS_P2ALIGNED(lastp, sizeof (uint32_t))) {
786 + if (IS_P2ALIGNED2(blockp, lastp, sizeof (uint32_t))) {
765 787 /* LINTED: pointer alignment */
766 788 *(uint32_t *)&blockp[0] ^=
767 789 /* LINTED: pointer alignment */
768 790 *(uint32_t *)&lastp[0];
769 791 /* LINTED: pointer alignment */
770 792 *(uint32_t *)&blockp[4] ^=
771 793 /* LINTED: pointer alignment */
772 794 *(uint32_t *)&lastp[4];
773 795 /* LINTED: pointer alignment */
774 796 *(uint32_t *)&blockp[8] ^=
775 797 /* LINTED: pointer alignment */
776 798 *(uint32_t *)&lastp[8];
777 799 /* LINTED: pointer alignment */
778 800 *(uint32_t *)&blockp[12] ^=
779 801 /* LINTED: pointer alignment */
780 802 *(uint32_t *)&lastp[12];
781 803 } else {
782 804 AES_XOR_BLOCK(lastp, blockp);
783 805 }
784 806
785 807 ctx->ac_lastp = blockp;
786 808 lastp = blockp;
787 809 if (ctx->ac_flags & AES_CCM_MODE) {
788 810 ctx->ac_ccm_processed_data_len += AES_BLOCK_LEN;
789 811 }
790 812
791 813 if (out == NULL) {
792 814 if (ctx->ac_remainder_len > 0) {
↓ open down ↓ |
18 lines elided |
↑ open up ↑ |
793 815 bcopy(blockp, ctx->ac_copy_to,
794 816 ctx->ac_remainder_len);
795 817 bcopy(blockp + ctx->ac_remainder_len, datap,
796 818 need);
797 819 }
798 820 } else {
799 821 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
800 822 &out_data_1_len, &out_data_2, AES_BLOCK_LEN);
801 823
802 824 /* copy block to where it belongs */
803 - bcopy(lastp, out_data_1, out_data_1_len);
825 + if ((out_data_1_len == AES_BLOCK_LEN) &&
826 + (IS_P2ALIGNED2(lastp, out_data_1,
827 + sizeof (uint32_t)))) {
828 + /* LINTED: pointer alignment */
829 + uint32_t *d = (uint32_t *)out_data_1;
830 + /* LINTED: pointer alignment */
831 + d[0] = *(uint32_t *)lastp;
832 + /* LINTED: pointer alignment */
833 + d[1] = *(uint32_t *)&lastp[4];
834 + /* LINTED: pointer alignment */
835 + d[2] = *(uint32_t *)&lastp[8];
836 + /* LINTED: pointer alignment */
837 + d[3] = *(uint32_t *)&lastp[12];
838 + } else {
839 + bcopy(lastp, out_data_1, out_data_1_len);
840 + }
804 841 if (out_data_2 != NULL) {
805 842 bcopy(lastp + out_data_1_len, out_data_2,
806 843 AES_BLOCK_LEN - out_data_1_len);
807 844 }
845 +
808 846 /* update offset */
809 847 out->cd_offset += AES_BLOCK_LEN;
810 848 }
811 849
812 850 /* Update pointer to next block of data to be processed. */
813 851 if (ctx->ac_remainder_len != 0) {
814 852 datap += need;
815 853 ctx->ac_remainder_len = 0;
816 854 } else {
817 855 datap += AES_BLOCK_LEN;
818 856 }
819 857
820 858 remainder = (size_t)&data[length] - (size_t)datap;
821 859
822 860 /* Incomplete last block. */
823 861 if (remainder > 0 && remainder < AES_BLOCK_LEN) {
824 862 bcopy(datap, ctx->ac_remainder, remainder);
825 863 ctx->ac_remainder_len = remainder;
826 864 ctx->ac_copy_to = datap;
827 865 goto out;
828 866 }
829 867 ctx->ac_copy_to = NULL;
830 868
831 869 } while (remainder > 0);
832 870
833 871 out:
834 872
835 873 /* EXPORT DELETE END */
836 874
837 875 return (0);
838 876 }
839 877
840 878 /*
841 879 * The following function should be call at encrypt or decrypt init time
842 880 * for AES CCM mode.
843 881 */
844 882 int
845 883 aes_ccm_init(aes_ctx_t *ctx, unsigned char *nonce, size_t nonce_len,
846 884 unsigned char *auth_data, size_t auth_data_len)
847 885 {
848 886 /* EXPORT DELETE START */
849 887 uint8_t *mac_buf, *datap, *ivp, *authp;
850 888 uint32_t iv[4], tmp[4];
851 889 size_t remainder, processed;
852 890 uint8_t encoded_a[10]; /* max encoded auth data length is 10 octets */
853 891 size_t encoded_a_len = 0;
854 892
855 893 mac_buf = (uint8_t *)&(ctx->ac_ccm_mac_buf);
856 894
857 895 /*
858 896 * Format the 1st block for CBC-MAC and construct the
859 897 * 1st counter block.
860 898 *
↓ open down ↓ |
43 lines elided |
↑ open up ↑ |
861 899 * aes_ctx->ac_iv is used for storing the counter block
862 900 * mac_buf will store b0 at this time.
863 901 */
864 902 aes_ccm_format_initial_blocks(nonce, nonce_len,
865 903 auth_data_len, mac_buf, ctx);
866 904
867 905 /* The IV for CBC MAC for AES CCM mode is always zero */
868 906 bzero(iv, AES_BLOCK_LEN);
869 907 ivp = (uint8_t *)iv;
870 908
871 - if (IS_P2ALIGNED(ivp, sizeof (uint32_t)) &&
872 - IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
909 + if (IS_P2ALIGNED2(ivp, mac_buf, sizeof (uint32_t))) {
873 910 /* LINTED: pointer alignment */
874 911 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&ivp[0];
875 912 /* LINTED: pointer alignment */
876 913 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&ivp[4];
877 914 /* LINTED: pointer alignment */
878 915 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&ivp[8];
879 916 /* LINTED: pointer alignment */
880 917 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&ivp[12];
881 918 } else {
882 919 AES_XOR_BLOCK(ivp, mac_buf);
883 920 }
884 921
885 922 /* encrypt the nonce */
886 923 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
887 924
888 925 /* take care of the associated data, if any */
889 926 if (auth_data_len == 0) {
890 927 return (0);
891 928 }
892 929
893 930 encode_adata_len(auth_data_len, encoded_a, &encoded_a_len);
894 931
895 932 remainder = auth_data_len;
896 933
897 934 /* 1st block: it contains encoded associated data, and some data */
↓ open down ↓ |
15 lines elided |
↑ open up ↑ |
898 935 authp = (uint8_t *)tmp;
899 936 bzero(authp, AES_BLOCK_LEN);
900 937 bcopy(encoded_a, authp, encoded_a_len);
901 938 processed = AES_BLOCK_LEN - encoded_a_len;
902 939 if (processed > auth_data_len) {
903 940 /* in case auth_data is very small */
904 941 processed = auth_data_len;
905 942 }
906 943 bcopy(auth_data, authp+encoded_a_len, processed);
907 944 /* xor with previous buffer */
908 - if (IS_P2ALIGNED(authp, sizeof (uint32_t)) &&
909 - IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
945 + if (IS_P2ALIGNED2(authp, mac_buf, sizeof (uint32_t))) {
910 946 /* LINTED: pointer alignment */
911 947 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&authp[0];
912 948 /* LINTED: pointer alignment */
913 949 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&authp[4];
914 950 /* LINTED: pointer alignment */
915 951 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&authp[8];
916 952 /* LINTED: pointer alignment */
917 953 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&authp[12];
918 954 } else {
919 955 AES_XOR_BLOCK(authp, mac_buf);
920 956 }
921 957 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
922 958 remainder -= processed;
923 959 if (remainder == 0) {
924 960 /* a small amount of associated data, it's all done now */
925 961 return (0);
926 962 }
927 963
928 964 do {
929 965 if (remainder < AES_BLOCK_LEN) {
930 966 /*
931 967 * There's not a block full of data, pad rest of
932 968 * buffer with zero
933 969 */
934 970 bzero(authp, AES_BLOCK_LEN);
↓ open down ↓ |
15 lines elided |
↑ open up ↑ |
935 971 bcopy(&(auth_data[processed]), authp, remainder);
936 972 datap = (uint8_t *)authp;
937 973 remainder = 0;
938 974 } else {
939 975 datap = (uint8_t *)(&(auth_data[processed]));
940 976 processed += AES_BLOCK_LEN;
941 977 remainder -= AES_BLOCK_LEN;
942 978 }
943 979
944 980 /* xor with previous buffer */
945 - if (IS_P2ALIGNED(datap, sizeof (uint32_t)) &&
946 - IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
981 + if (IS_P2ALIGNED2(datap, mac_buf, sizeof (uint32_t))) {
947 982 /* LINTED: pointer alignment */
948 983 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&datap[0];
949 984 /* LINTED: pointer alignment */
950 985 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&datap[4];
951 986 /* LINTED: pointer alignment */
952 987 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&datap[8];
953 988 /* LINTED: pointer alignment */
954 989 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&datap[12];
955 990 } else {
956 991 AES_XOR_BLOCK(datap, mac_buf);
957 992 }
958 993
959 994 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
960 995
961 996 } while (remainder > 0);
962 997
963 998 /* EXPORT DELETE END */
964 999 return (0);
965 1000 }
966 1001
967 1002 void
968 1003 calculate_ccm_mac(aes_ctx_t *ctx, uint8_t **ccm_mac)
969 1004 {
970 1005 /* EXPORT DELETE START */
971 1006 uint64_t counter;
972 1007 uint32_t counter_block[4];
973 1008 uint8_t *counterp, *mac_buf;
974 1009 int i;
975 1010
976 1011 mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf;
977 1012
978 1013 /* first counter block start with index 0 */
979 1014 counter = 0;
980 1015 ctx->ac_cb[1] = (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter;
981 1016
982 1017 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb,
983 1018 (uint8_t *)counter_block);
984 1019
985 1020 counterp = (uint8_t *)counter_block;
986 1021
987 1022 /* calculate XOR of MAC with first counter block */
988 1023 for (i = 0; i < ctx->ac_ccm_mac_len; i++) {
989 1024 (*ccm_mac)[i] = mac_buf[i] ^ counterp[i];
990 1025 }
991 1026 /* EXPORT DELETE END */
992 1027 }
993 1028
994 1029 /* ARGSUSED */
995 1030 int
996 1031 aes_ccm_encrypt_final(aes_ctx_t *ctx, crypto_data_t *out)
997 1032 {
998 1033 /* EXPORT DELETE START */
999 1034
1000 1035 uint8_t *lastp, *mac_buf, *ccm_mac_p, *macp;
1001 1036 uint32_t counter_block[4];
1002 1037 uint32_t tmp[4];
1003 1038 uint8_t ccm_mac[AES_BLOCK_LEN];
1004 1039 void *iov_or_mp;
1005 1040 offset_t offset;
1006 1041 uint8_t *out_data_1;
1007 1042 uint8_t *out_data_2;
1008 1043 size_t out_data_1_len;
1009 1044 int i;
1010 1045
1011 1046 if (out->cd_length < (ctx->ac_remainder_len + ctx->ac_ccm_mac_len)) {
1012 1047 return (CRYPTO_ARGUMENTS_BAD);
1013 1048 }
1014 1049
1015 1050 /*
1016 1051 * When we get here, the number of bytes of payload processed
1017 1052 * plus whatever data remains, if any,
1018 1053 * should be the same as the number of bytes that's being
1019 1054 * passed in the argument during init time.
1020 1055 */
1021 1056 if ((ctx->ac_ccm_processed_data_len + ctx->ac_remainder_len)
1022 1057 != (ctx->ac_ccm_data_len)) {
1023 1058 return (CRYPTO_DATA_LEN_RANGE);
1024 1059 }
1025 1060
1026 1061 mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf;
↓ open down ↓ |
70 lines elided |
↑ open up ↑ |
1027 1062
1028 1063 if (ctx->ac_remainder_len > 0) {
1029 1064
1030 1065 macp = (uint8_t *)tmp;
1031 1066 bzero(macp, AES_BLOCK_LEN);
1032 1067
1033 1068 /* copy remainder to temporary buffer */
1034 1069 bcopy(ctx->ac_remainder, macp, ctx->ac_remainder_len);
1035 1070
1036 1071 /* calculate the CBC MAC */
1037 - if (IS_P2ALIGNED(macp, sizeof (uint32_t)) &&
1038 - IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
1072 + if (IS_P2ALIGNED2(macp, mac_buf, sizeof (uint32_t))) {
1039 1073 /* LINTED: pointer alignment */
1040 1074 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&macp[0];
1041 1075 /* LINTED: pointer alignment */
1042 1076 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&macp[4];
1043 1077 /* LINTED: pointer alignment */
1044 1078 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&macp[8];
1045 1079 /* LINTED: pointer alignment */
1046 1080 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&macp[12];
1047 1081 } else {
1048 1082 AES_XOR_BLOCK(macp, mac_buf);
1049 1083 }
1050 1084 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
1051 1085
1052 1086 /* calculate the counter mode */
1053 1087 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb,
1054 1088 (uint8_t *)counter_block);
1055 1089
1056 1090 lastp = (uint8_t *)counter_block;
1057 1091
1058 1092 /* copy remainder to temporary buffer */
1059 1093 bcopy(ctx->ac_remainder, macp, ctx->ac_remainder_len);
1060 1094
1061 1095 /* XOR with counter block */
1062 1096 for (i = 0; i < ctx->ac_remainder_len; i++) {
1063 1097 macp[i] ^= lastp[i];
1064 1098 }
1065 1099 ctx->ac_ccm_processed_data_len += ctx->ac_remainder_len;
1066 1100 }
1067 1101
1068 1102 /* Calculate the CCM MAC */
1069 1103 ccm_mac_p = ccm_mac;
1070 1104 calculate_ccm_mac(ctx, &ccm_mac_p);
1071 1105
1072 1106 aes_init_ptrs(out, &iov_or_mp, &offset);
1073 1107 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
1074 1108 &out_data_1_len, &out_data_2,
1075 1109 ctx->ac_remainder_len + ctx->ac_ccm_mac_len);
1076 1110
1077 1111 if (ctx->ac_remainder_len > 0) {
1078 1112
1079 1113 /* copy temporary block to where it belongs */
1080 1114 if (out_data_2 == NULL) {
1081 1115 /* everything will fit in out_data_1 */
1082 1116 bcopy(macp, out_data_1, ctx->ac_remainder_len);
1083 1117 bcopy(ccm_mac, out_data_1 + ctx->ac_remainder_len,
1084 1118 ctx->ac_ccm_mac_len);
1085 1119 } else {
1086 1120
1087 1121 if (out_data_1_len < ctx->ac_remainder_len) {
1088 1122
1089 1123 size_t data_2_len_used;
1090 1124
1091 1125 bcopy(macp, out_data_1, out_data_1_len);
1092 1126
1093 1127 data_2_len_used = ctx->ac_remainder_len
1094 1128 - out_data_1_len;
1095 1129
1096 1130 bcopy((uint8_t *)macp + out_data_1_len,
1097 1131 out_data_2, data_2_len_used);
1098 1132 bcopy(ccm_mac, out_data_2 + data_2_len_used,
1099 1133 ctx->ac_ccm_mac_len);
1100 1134 } else {
1101 1135 bcopy(macp, out_data_1, out_data_1_len);
1102 1136 if (out_data_1_len == ctx->ac_remainder_len) {
1103 1137 /* mac will be in out_data_2 */
1104 1138 bcopy(ccm_mac, out_data_2,
1105 1139 ctx->ac_ccm_mac_len);
1106 1140 } else {
1107 1141 size_t len_not_used
1108 1142 = out_data_1_len -
1109 1143 ctx->ac_remainder_len;
1110 1144 /*
1111 1145 * part of mac in will be in
1112 1146 * out_data_1, part of the mac will be
1113 1147 * in out_data_2
1114 1148 */
1115 1149 bcopy(ccm_mac,
1116 1150 out_data_1 + ctx->ac_remainder_len,
1117 1151 len_not_used);
1118 1152 bcopy(ccm_mac+len_not_used, out_data_2,
1119 1153 ctx->ac_ccm_mac_len - len_not_used);
1120 1154
1121 1155 }
1122 1156 }
1123 1157 }
1124 1158 } else {
1125 1159 /* copy block to where it belongs */
1126 1160 bcopy(ccm_mac, out_data_1, out_data_1_len);
1127 1161 if (out_data_2 != NULL) {
1128 1162 bcopy(ccm_mac + out_data_1_len, out_data_2,
1129 1163 AES_BLOCK_LEN - out_data_1_len);
1130 1164 }
1131 1165 }
1132 1166 out->cd_offset += ctx->ac_remainder_len + ctx->ac_ccm_mac_len;
1133 1167 ctx->ac_remainder_len = 0;
1134 1168
1135 1169 /* EXPORT DELETE END */
1136 1170
1137 1171 return (0);
1138 1172 }
1139 1173
↓ open down ↓ |
91 lines elided |
↑ open up ↑ |
1140 1174 int
1141 1175 aes_ccm_validate_args(CK_AES_CCM_PARAMS *ccm_param, boolean_t is_encrypt_init)
1142 1176 {
1143 1177
1144 1178 /* EXPORT DELETE START */
1145 1179 size_t macSize, nonceSize;
1146 1180 uint8_t q;
1147 1181 uint64_t maxValue;
1148 1182
1149 1183 /*
1150 - * Check the length of the MAC. Only valid length
1184 + * Check the byte length of the MAC. The only valid
1151 1185 * lengths for the MAC are: 4, 6, 8, 10, 12, 14, 16
1152 1186 */
1153 1187 macSize = ccm_param->ulMACSize;
1154 1188 if ((macSize < 4) || (macSize > 16) || ((macSize % 2) != 0)) {
1155 1189 return (CRYPTO_MECHANISM_PARAM_INVALID);
1156 1190 }
1157 1191
1158 - /* Check the nonce value. Valid values are 7, 8, 9, 10, 11, 12, 13 */
1192 + /* Check the nonce length. Valid values are 7, 8, 9, 10, 11, 12, 13 */
1159 1193 nonceSize = ccm_param->ulNonceSize;
1160 1194 if ((nonceSize < 7) || (nonceSize > 13)) {
1161 1195 return (CRYPTO_MECHANISM_PARAM_INVALID);
1162 1196 }
1163 1197
1198 + /* q is the length of the field storing the length, in bytes */
1164 1199 q = (uint8_t)((15 - nonceSize) & 0xFF);
1165 1200
1166 1201
1167 1202 /*
1168 1203 * If it is decrypt, need to make sure size of ciphertext is at least
1169 1204 * bigger than MAC len
1170 1205 */
1171 1206 if ((!is_encrypt_init) && (ccm_param->ulDataSize < macSize)) {
1172 1207 return (CRYPTO_MECHANISM_PARAM_INVALID);
1173 1208 }
1174 1209
1175 1210 /*
1176 1211 * Check to make sure the length of the payload is within the
1177 1212 * range of values allowed by q
1178 1213 */
1179 1214 if (q < 8) {
1180 - maxValue = 1ULL << (q * 8);
1215 + maxValue = (1ULL << (q * 8)) - 1;
1181 1216 } else {
1182 1217 maxValue = ULONG_MAX;
1183 1218 }
1184 1219
1185 1220 if (ccm_param->ulDataSize > maxValue) {
1186 1221 return (CRYPTO_MECHANISM_PARAM_INVALID);
1187 1222 }
1188 1223
1189 1224 /* EXPORT DELETE END */
1190 1225 return (0);
1191 1226 }
1192 1227
1193 1228 /*
1194 1229 * Format the first block used in CBC-MAC (B0) and the initial counter
1195 - * block based on formating functions and counter generation functions
1230 + * block based on formatting functions and counter generation functions
1196 1231 * specified in RFC 3610 and NIST publication 800-38C, appendix A
1197 1232 *
1198 1233 * b0 is the first block used in CBC-MAC
1199 1234 * cb0 is the first counter block
1200 1235 *
1201 1236 * It's assumed that the arguments b0 and cb0 are preallocated AES blocks
1202 1237 *
1203 1238 */
1204 1239 static void
1205 1240 aes_ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize,
1206 1241 ulong_t authDataSize, uint8_t *b0, aes_ctx_t *aes_ctx)
1207 1242 {
1208 1243 /* EXPORT DELETE START */
1209 1244 uint64_t payloadSize;
1210 1245 uint8_t t, q, have_adata = 0;
1211 1246 size_t limit;
↓ open down ↓ |
6 lines elided |
↑ open up ↑ |
1212 1247 int i, j, k;
1213 1248 uint64_t mask = 0;
1214 1249 uint8_t *cb;
1215 1250 #ifdef _LITTLE_ENDIAN
1216 1251 uint8_t *p8;
1217 1252 #endif /* _LITTLE_ENDIAN */
1218 1253
1219 1254 q = (uint8_t)((15 - nonceSize) & 0xFF);
1220 1255 t = (uint8_t)((aes_ctx->ac_ccm_mac_len) & 0xFF);
1221 1256
1222 - /* Construct the first octect of b0 */
1257 + /* Construct the first octet of b0 */
1223 1258 if (authDataSize > 0) {
1224 1259 have_adata = 1;
1225 1260 }
1226 1261 b0[0] = (have_adata << 6) | (((t - 2) / 2) << 3) | (q - 1);
1227 1262
1228 1263 /* copy the nonce value into b0 */
1229 1264 bcopy(nonce, &(b0[1]), nonceSize);
1230 1265
1231 1266 /* store the length of the payload into b0 */
1232 1267 bzero(&(b0[1+nonceSize]), q);
1233 1268
1234 1269 payloadSize = aes_ctx->ac_ccm_data_len;
1235 1270 limit = 8 < q ? 8 : q;
1236 1271
1237 1272 for (i = 0, j = 0, k = 15; i < limit; i++, j += 8, k--) {
1238 1273 b0[k] = (uint8_t)((payloadSize >> j) & 0xFF);
1239 1274 }
1240 1275
1241 1276 /* format the counter block */
1242 1277
1243 1278 cb = (uint8_t *)aes_ctx->ac_cb;
1244 1279
1245 1280 cb[0] = 0x07 & (q-1); /* first byte */
1246 1281
1247 1282 /* copy the nonce value into the counter block */
1248 1283 bcopy(nonce, &(cb[1]), nonceSize);
1249 1284
1250 1285 bzero(&(cb[1+nonceSize]), q);
1251 1286
1252 1287 /* Create the mask for the counter field based on the size of nonce */
1253 1288 q <<= 3;
1254 1289 while (q-- > 0) {
1255 1290 mask |= (1ULL << q);
1256 1291 }
1257 1292
1258 1293 #ifdef _LITTLE_ENDIAN
1259 1294 p8 = (uint8_t *)&mask;
1260 1295 mask = (((uint64_t)p8[0] << 56) |
1261 1296 ((uint64_t)p8[1] << 48) |
1262 1297 ((uint64_t)p8[2] << 40) |
1263 1298 ((uint64_t)p8[3] << 32) |
1264 1299 ((uint64_t)p8[4] << 24) |
↓ open down ↓ |
32 lines elided |
↑ open up ↑ |
1265 1300 ((uint64_t)p8[5] << 16) |
1266 1301 ((uint64_t)p8[6] << 8) |
1267 1302 (uint64_t)p8[7]);
1268 1303 #endif
1269 1304 aes_ctx->ac_counter_mask = mask;
1270 1305
1271 1306 /*
1272 1307 * During calculation, we start using counter block 1, we will
1273 1308 * set it up right here.
1274 1309 * We can just set the last byte to have the value 1, because
1275 - * even with the bigest nonce of 13, the last byte of the
1310 + * even with the biggest nonce of 13, the last byte of the
1276 1311 * counter block will be used for the counter value.
1277 1312 */
1278 1313 cb[15] = 0x01;
1279 1314
1280 1315 /* EXPORT DELETE END */
1281 1316
1282 1317 }
1283 1318
1284 1319 /*
1285 1320 * Encode the length of the associated data as
1286 1321 * specified in RFC 3610 and NIST publication 800-38C, appendix A
1287 1322 */
1288 1323 static void
1289 1324 encode_adata_len(ulong_t auth_data_len, uint8_t *encoded, size_t *encoded_len)
1290 1325 {
1291 1326
1292 1327 /* EXPORT DELETE START */
1293 1328
1294 1329 if (auth_data_len < ((1ULL<<16) - (1ULL<<8))) {
1295 1330 /* 0 < a < (2^16-2^8) */
1296 1331 *encoded_len = 2;
1297 1332 encoded[0] = (auth_data_len & 0xff00) >> 8;
1298 1333 encoded[1] = auth_data_len & 0xff;
1299 1334
1300 1335 } else if ((auth_data_len >= ((1ULL<<16) - (1ULL<<8))) &&
1301 1336 (auth_data_len < (1ULL << 31))) {
1302 1337 /* (2^16-2^8) <= a < 2^32 */
1303 1338 *encoded_len = 6;
1304 1339 encoded[0] = 0xff;
1305 1340 encoded[1] = 0xfe;
1306 1341 encoded[2] = (auth_data_len & 0xff000000) >> 24;
1307 1342 encoded[3] = (auth_data_len & 0xff0000) >> 16;
1308 1343 encoded[4] = (auth_data_len & 0xff00) >> 8;
1309 1344 encoded[5] = auth_data_len & 0xff;
1310 1345 #ifdef _LP64
1311 1346 } else {
1312 1347 /* 2^32 <= a < 2^64 */
1313 1348 *encoded_len = 10;
1314 1349 encoded[0] = 0xff;
1315 1350 encoded[1] = 0xff;
1316 1351 encoded[2] = (auth_data_len & 0xff00000000000000) >> 56;
1317 1352 encoded[3] = (auth_data_len & 0xff000000000000) >> 48;
1318 1353 encoded[4] = (auth_data_len & 0xff0000000000) >> 40;
1319 1354 encoded[5] = (auth_data_len & 0xff00000000) >> 32;
1320 1355 encoded[6] = (auth_data_len & 0xff000000) >> 24;
1321 1356 encoded[7] = (auth_data_len & 0xff0000) >> 16;
1322 1357 encoded[8] = (auth_data_len & 0xff00) >> 8;
1323 1358 encoded[9] = auth_data_len & 0xff;
1324 1359 #endif /* _LP64 */
1325 1360 }
1326 1361 /* EXPORT DELETE END */
1327 1362 }
1328 1363
1329 1364 /*
1330 1365 * This will only deal with decrypting the last block of the input that
1331 1366 * might not be multiples of AES_BLOCK_LEN
1332 1367 */
1333 1368 static void
1334 1369 aes_ccm_decrypt_incomplete_block(aes_ctx_t *ctx)
1335 1370 {
1336 1371
1337 1372 /* EXPORT DELETE START */
1338 1373 uint8_t *datap, counter_block[AES_BLOCK_LEN], *outp, *counterp;
1339 1374 int i;
1340 1375
1341 1376 datap = (uint8_t *)ctx->ac_remainder;
1342 1377 outp = &((ctx->ac_ccm_pt_buf)[ctx->ac_ccm_processed_data_len]);
1343 1378
1344 1379 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb,
1345 1380 counter_block);
1346 1381
1347 1382 counterp = (uint8_t *)counter_block;
1348 1383
1349 1384 /* XOR with counter block */
1350 1385 for (i = 0; i < ctx->ac_remainder_len; i++) {
1351 1386 outp[i] = datap[i] ^ counterp[i];
1352 1387 }
1353 1388 /* EXPORT DELETE END */
1354 1389 }
1355 1390
1356 1391 /*
1357 1392 * This will decrypt the cipher text. However, the plaintext won't be
1358 1393 * returned to the caller. It will be returned when decrypt_final() is
1359 1394 * called if the MAC matches
1360 1395 */
1361 1396 /* ARGSUSED */
1362 1397 static int
1363 1398 aes_ccm_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length,
1364 1399 crypto_data_t *out)
1365 1400 {
1366 1401
1367 1402 /* EXPORT DELETE START */
1368 1403
1369 1404 size_t remainder = length;
1370 1405 size_t need;
1371 1406 uint8_t *datap = (uint8_t *)data;
1372 1407 uint8_t *blockp;
1373 1408 uint32_t counter_block[4];
1374 1409 uint8_t *cbp;
1375 1410 uint64_t counter;
1376 1411 size_t pt_len, total_decrypted_len, mac_len, pm_len, pd_len;
1377 1412 uint32_t tmp[4];
1378 1413 uint8_t *resultp;
1379 1414 #ifdef _LITTLE_ENDIAN
1380 1415 uint8_t *p;
1381 1416 #endif /* _LITTLE_ENDIAN */
1382 1417
1383 1418
1384 1419 pm_len = ctx->ac_ccm_processed_mac_len;
1385 1420
1386 1421 if (pm_len > 0) {
1387 1422 uint8_t *tmp;
1388 1423 /*
1389 1424 * all ciphertext has been processed, just waiting for
1390 1425 * part of the value of the mac
1391 1426 */
1392 1427 if ((pm_len + length) > ctx->ac_ccm_mac_len) {
1393 1428 return (CRYPTO_DATA_LEN_RANGE);
1394 1429 }
1395 1430 tmp = (uint8_t *)ctx->ac_ccm_mac_input_buf;
1396 1431
1397 1432 bcopy(datap, tmp + pm_len, length);
1398 1433
1399 1434 ctx->ac_ccm_processed_mac_len += length;
1400 1435 return (0);
1401 1436 }
1402 1437
1403 1438 /*
1404 1439 * If we decrypt the given data, what total amount of data would
1405 1440 * have been decrypted?
1406 1441 */
1407 1442 pd_len = ctx->ac_ccm_processed_data_len;
1408 1443 total_decrypted_len = pd_len + length + ctx->ac_remainder_len;
1409 1444
1410 1445 if (total_decrypted_len >
1411 1446 (ctx->ac_ccm_data_len + ctx->ac_ccm_mac_len)) {
1412 1447 return (CRYPTO_DATA_LEN_RANGE);
1413 1448 }
1414 1449
1415 1450 pt_len = ctx->ac_ccm_data_len;
1416 1451
1417 1452 if (total_decrypted_len > pt_len) {
1418 1453 /*
1419 1454 * part of the input will be the MAC, need to isolate that
1420 1455 * to be dealt with later. The left-over data in
1421 1456 * ac_remainder_len from last time will not be part of the
1422 1457 * MAC. Otherwise, it would have already been taken out
1423 1458 * when this call is made last time.
1424 1459 */
1425 1460 size_t pt_part = pt_len - pd_len - ctx->ac_remainder_len;
1426 1461
1427 1462 mac_len = length - pt_part;
1428 1463
1429 1464 ctx->ac_ccm_processed_mac_len = mac_len;
1430 1465 bcopy(data + pt_part, ctx->ac_ccm_mac_input_buf, mac_len);
1431 1466
1432 1467 if (pt_part + ctx->ac_remainder_len < AES_BLOCK_LEN) {
1433 1468 /*
1434 1469 * since this is last of the ciphertext, will
1435 1470 * just decrypt with it here
1436 1471 */
1437 1472 bcopy(datap, &((uint8_t *)ctx->ac_remainder)
1438 1473 [ctx->ac_remainder_len], pt_part);
1439 1474 ctx->ac_remainder_len += pt_part;
1440 1475 aes_ccm_decrypt_incomplete_block(ctx);
1441 1476 ctx->ac_remainder_len = 0;
1442 1477 ctx->ac_ccm_processed_data_len += pt_part;
1443 1478 return (0);
1444 1479 } else {
1445 1480 /* let rest of the code handle this */
1446 1481 length = pt_part;
1447 1482 }
1448 1483 } else if (length + ctx->ac_remainder_len < AES_BLOCK_LEN) {
1449 1484 /* accumulate bytes here and return */
1450 1485 bcopy(datap,
1451 1486 (uint8_t *)ctx->ac_remainder + ctx->ac_remainder_len,
1452 1487 length);
1453 1488 ctx->ac_remainder_len += length;
1454 1489 ctx->ac_copy_to = datap;
1455 1490 return (0);
1456 1491 }
1457 1492
1458 1493 do {
1459 1494 /* Unprocessed data from last call. */
1460 1495 if (ctx->ac_remainder_len > 0) {
1461 1496 need = AES_BLOCK_LEN - ctx->ac_remainder_len;
1462 1497
1463 1498 if (need > remainder)
1464 1499 return (1);
1465 1500
1466 1501 bcopy(datap, &((uint8_t *)ctx->ac_remainder)
1467 1502 [ctx->ac_remainder_len], need);
1468 1503
1469 1504 blockp = (uint8_t *)ctx->ac_remainder;
1470 1505 } else {
1471 1506 blockp = datap;
1472 1507 }
1473 1508
1474 1509 /* don't write on the plaintext */
1475 1510 if (IS_P2ALIGNED(blockp, sizeof (uint32_t))) {
1476 1511 /* LINTED: pointer alignment */
1477 1512 tmp[0] = *(uint32_t *)blockp;
1478 1513 /* LINTED: pointer alignment */
1479 1514 tmp[1] = *(uint32_t *)&blockp[4];
1480 1515 /* LINTED: pointer alignment */
1481 1516 tmp[2] = *(uint32_t *)&blockp[8];
1482 1517 /* LINTED: pointer alignment */
1483 1518 tmp[3] = *(uint32_t *)&blockp[12];
1484 1519 } else {
1485 1520 uint8_t *tmp8 = (uint8_t *)tmp;
1486 1521
1487 1522 AES_COPY_BLOCK(blockp, tmp8);
1488 1523 }
1489 1524 blockp = (uint8_t *)tmp;
1490 1525
1491 1526 /* Calculate the counter mode, ac_cb is the counter block */
1492 1527 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb,
1493 1528 (uint8_t *)counter_block);
1494 1529 cbp = (uint8_t *)counter_block;
1495 1530
1496 1531 /*
1497 1532 * Increment counter.
1498 1533 * Counter bits are confined to the bottom 64 bits
1499 1534 */
1500 1535 counter = ctx->ac_cb[1] & ctx->ac_counter_mask;
1501 1536 #ifdef _LITTLE_ENDIAN
1502 1537 p = (uint8_t *)&counter;
1503 1538 counter = (((uint64_t)p[0] << 56) |
1504 1539 ((uint64_t)p[1] << 48) |
1505 1540 ((uint64_t)p[2] << 40) |
1506 1541 ((uint64_t)p[3] << 32) |
1507 1542 ((uint64_t)p[4] << 24) |
1508 1543 ((uint64_t)p[5] << 16) |
1509 1544 ((uint64_t)p[6] << 8) |
1510 1545 (uint64_t)p[7]);
1511 1546 #endif
1512 1547 counter++;
1513 1548 #ifdef _LITTLE_ENDIAN
1514 1549 counter = (((uint64_t)p[0] << 56) |
1515 1550 ((uint64_t)p[1] << 48) |
1516 1551 ((uint64_t)p[2] << 40) |
1517 1552 ((uint64_t)p[3] << 32) |
↓ open down ↓ |
232 lines elided |
↑ open up ↑ |
1518 1553 ((uint64_t)p[4] << 24) |
1519 1554 ((uint64_t)p[5] << 16) |
1520 1555 ((uint64_t)p[6] << 8) |
1521 1556 (uint64_t)p[7]);
1522 1557 #endif
1523 1558 counter &= ctx->ac_counter_mask;
1524 1559 ctx->ac_cb[1] =
1525 1560 (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter;
1526 1561
1527 1562 /* XOR with the ciphertext */
1528 - if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
1529 - IS_P2ALIGNED(cbp, sizeof (uint32_t))) {
1563 + if (IS_P2ALIGNED2(blockp, cbp, sizeof (uint32_t))) {
1530 1564 /* LINTED: pointer alignment */
1531 1565 *(uint32_t *)&blockp[0] ^= *(uint32_t *)&cbp[0];
1532 1566 /* LINTED: pointer alignment */
1533 1567 *(uint32_t *)&blockp[4] ^= *(uint32_t *)&cbp[4];
1534 1568 /* LINTED: pointer alignment */
1535 1569 *(uint32_t *)&blockp[8] ^= *(uint32_t *)&cbp[8];
1536 1570 /* LINTED: pointer alignment */
1537 1571 *(uint32_t *)&blockp[12] ^= *(uint32_t *)&cbp[12];
1538 1572 } else {
1539 1573 AES_XOR_BLOCK(cbp, blockp);
1540 1574 }
1541 1575
1542 1576 /* Copy the plaintext to the "holding buffer" */
1543 1577 resultp = (uint8_t *)ctx->ac_ccm_pt_buf +
1544 1578 ctx->ac_ccm_processed_data_len;
1545 - if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
1546 - IS_P2ALIGNED(resultp, sizeof (uint32_t))) {
1579 + if (IS_P2ALIGNED2(blockp, resultp, sizeof (uint32_t))) {
1547 1580 /* LINTED: pointer alignment */
1548 1581 *(uint32_t *)&resultp[0] = *(uint32_t *)blockp;
1549 1582 /* LINTED: pointer alignment */
1550 1583 *(uint32_t *)&resultp[4] = *(uint32_t *)&blockp[4];
1551 1584 /* LINTED: pointer alignment */
1552 1585 *(uint32_t *)&resultp[8] = *(uint32_t *)&blockp[8];
1553 1586 /* LINTED: pointer alignment */
1554 1587 *(uint32_t *)&resultp[12] = *(uint32_t *)&blockp[12];
1555 1588 } else {
1556 1589 AES_COPY_BLOCK(blockp, resultp);
1557 1590 }
1558 1591
1559 1592 ctx->ac_ccm_processed_data_len += AES_BLOCK_LEN;
1560 1593
1561 1594 ctx->ac_lastp = blockp;
1562 1595
1563 1596 /* Update pointer to next block of data to be processed. */
1564 1597 if (ctx->ac_remainder_len != 0) {
1565 1598 datap += need;
1566 1599 ctx->ac_remainder_len = 0;
1567 1600 } else {
1568 1601 datap += AES_BLOCK_LEN;
1569 1602 }
1570 1603
1571 1604 remainder = (size_t)&data[length] - (size_t)datap;
1572 1605
1573 1606 /* Incomplete last block */
1574 1607 if (remainder > 0 && remainder < AES_BLOCK_LEN) {
1575 1608 bcopy(datap, ctx->ac_remainder, remainder);
1576 1609 ctx->ac_remainder_len = remainder;
1577 1610 ctx->ac_copy_to = datap;
1578 1611 if (ctx->ac_ccm_processed_mac_len > 0) {
1579 1612 /*
1580 1613 * not expecting anymore ciphertext, just
1581 1614 * compute plaintext for the remaining input
1582 1615 */
1583 1616 aes_ccm_decrypt_incomplete_block(ctx);
1584 1617 ctx->ac_ccm_processed_data_len += remainder;
1585 1618 ctx->ac_remainder_len = 0;
1586 1619 }
1587 1620 goto out;
1588 1621 }
1589 1622 ctx->ac_copy_to = NULL;
1590 1623
1591 1624 } while (remainder > 0);
1592 1625
1593 1626 out:
1594 1627 /* EXPORT DELETE END */
1595 1628
1596 1629 return (0);
1597 1630 }
1598 1631
1599 1632 int
1600 1633 aes_ccm_decrypt_final(aes_ctx_t *ctx, crypto_data_t *out)
1601 1634 {
1602 1635 /* EXPORT DELETE START */
1603 1636 size_t mac_remain, pt_len;
1604 1637 uint8_t *pt, *mac_buf, *macp, *ccm_mac_p;
1605 1638 uint8_t ccm_mac[AES_BLOCK_LEN];
1606 1639 void *iov_or_mp;
1607 1640 offset_t offset;
1608 1641 uint8_t *out_data_1, *out_data_2;
1609 1642 size_t out_data_1_len;
1610 1643 uint32_t tmp[4];
1611 1644
1612 1645 pt_len = ctx->ac_ccm_data_len;
1613 1646
1614 1647 /* Make sure output buffer can fit all of the plaintext */
1615 1648 if (out->cd_length < pt_len) {
1616 1649 return (CRYPTO_ARGUMENTS_BAD);
1617 1650 }
1618 1651
1619 1652 pt = ctx->ac_ccm_pt_buf;
1620 1653 mac_remain = ctx->ac_ccm_processed_data_len;
1621 1654 mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf;
↓ open down ↓ |
65 lines elided |
↑ open up ↑ |
1622 1655
1623 1656 macp = (uint8_t *)tmp;
1624 1657
1625 1658 while (mac_remain > 0) {
1626 1659
1627 1660 if (mac_remain < AES_BLOCK_LEN) {
1628 1661 bzero(tmp, AES_BLOCK_LEN);
1629 1662 bcopy(pt, tmp, mac_remain);
1630 1663 mac_remain = 0;
1631 1664 } else {
1632 - if (IS_P2ALIGNED(pt, sizeof (uint32_t)) &&
1633 - IS_P2ALIGNED(macp, sizeof (uint32_t))) {
1665 + if (IS_P2ALIGNED2(pt, macp, sizeof (uint32_t))) {
1634 1666 /* LINTED: pointer alignment */
1635 1667 *(uint32_t *)&macp[0] = *(uint32_t *)pt;
1636 1668 /* LINTED: pointer alignment */
1637 1669 *(uint32_t *)&macp[4] = *(uint32_t *)&pt[4];
1638 1670 /* LINTED: pointer alignment */
1639 1671 *(uint32_t *)&macp[8] = *(uint32_t *)&pt[8];
1640 1672 /* LINTED: pointer alignment */
1641 1673 *(uint32_t *)&macp[12] = *(uint32_t *)&pt[12];
1642 1674 } else {
1643 1675 AES_COPY_BLOCK(pt, macp);
1644 1676 }
1645 1677 mac_remain -= AES_BLOCK_LEN;
1646 1678 pt += AES_BLOCK_LEN;
1647 1679 }
1648 1680
1649 1681 /* calculate the CBC MAC */
1650 - if (IS_P2ALIGNED(macp, sizeof (uint32_t)) &&
1651 - IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
1682 + if (IS_P2ALIGNED2(macp, mac_buf, sizeof (uint32_t))) {
1652 1683 /* LINTED: pointer alignment */
1653 1684 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&macp[0];
1654 1685 /* LINTED: pointer alignment */
1655 1686 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&macp[4];
1656 1687 /* LINTED: pointer alignment */
1657 1688 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&macp[8];
1658 1689 /* LINTED: pointer alignment */
1659 1690 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&macp[12];
1660 1691 } else {
1661 1692 AES_XOR_BLOCK(macp, mac_buf);
1662 1693 }
1663 1694 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
1664 1695 }
1665 1696
1666 1697 /* Calculate the CCM MAC */
1667 1698 ccm_mac_p = ccm_mac;
1668 1699 calculate_ccm_mac(ctx, &ccm_mac_p);
1669 1700
1670 1701 /* compare the input CCM MAC value with what we calculated */
1671 1702 if (bcmp(ctx->ac_ccm_mac_input_buf, ccm_mac, ctx->ac_ccm_mac_len)) {
1672 1703 /* They don't match */
1673 1704 return (CRYPTO_DATA_LEN_RANGE);
1674 1705 } else {
1675 1706 aes_init_ptrs(out, &iov_or_mp, &offset);
1676 1707 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
1677 1708 &out_data_1_len, &out_data_2, pt_len);
1678 1709 bcopy(ctx->ac_ccm_pt_buf, out_data_1, out_data_1_len);
1679 1710 if (out_data_2 != NULL) {
1680 1711 bcopy((ctx->ac_ccm_pt_buf) + out_data_1_len,
1681 1712 out_data_2, pt_len - out_data_1_len);
1682 1713 }
1683 1714 out->cd_offset += pt_len;
1684 1715 }
1685 1716
1686 1717 /* EXPORT DELETE END */
1687 1718 return (0);
1688 1719 }
↓ open down ↓ |
27 lines elided |
↑ open up ↑ |
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX