1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright 2008 Sun Microsystems, Inc. All rights reserved. 23 * Use is subject to license terms. 24 */ 25 26 #pragma ident "@(#)aes_cbc_crypt.c 1.9 08/05/09 SMI" 27 28 29 #include <sys/sysmacros.h> 30 #include <sys/systm.h> 31 #include <sys/crypto/common.h> 32 #include <sys/strsun.h> 33 #include "aes_cbc_crypt.h" 34 #include "aes_impl.h" 35 #ifndef _KERNEL 36 #include <limits.h> 37 #include <strings.h> 38 #endif /* !_KERNEL */ 39 40 static int aes_ctr_ccm_mode_contiguous_blocks(aes_ctx_t *, char *, size_t, 41 crypto_data_t *); 42 static void 43 encode_adata_len(ulong_t auth_data_len, uint8_t *encoded, size_t *encoded_len); 44 static void 45 aes_ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize, 46 ulong_t authDataSize, uint8_t *b0, aes_ctx_t *aes_ctx); 47 static int 48 aes_ccm_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 49 crypto_data_t *out); 50 51 /* 52 * Initialize by setting iov_or_mp to point to the current iovec or mp, 53 * and by setting current_offset to an offset within the current iovec or mp . 54 */ 55 static void 56 aes_init_ptrs(crypto_data_t *out, void **iov_or_mp, offset_t *current_offset) 57 { 58 offset_t offset; 59 60 switch (out->cd_format) { 61 case CRYPTO_DATA_RAW: 62 *current_offset = out->cd_offset; 63 break; 64 65 case CRYPTO_DATA_UIO: { 66 uio_t *uiop = out->cd_uio; 67 uintptr_t vec_idx; 68 69 offset = out->cd_offset; 70 for (vec_idx = 0; vec_idx < uiop->uio_iovcnt && 71 offset >= uiop->uio_iov[vec_idx].iov_len; 72 offset -= uiop->uio_iov[vec_idx++].iov_len) 73 ; 74 75 *current_offset = offset; 76 *iov_or_mp = (void *)vec_idx; 77 break; 78 } 79 80 case CRYPTO_DATA_MBLK: { 81 mblk_t *mp; 82 83 offset = out->cd_offset; 84 for (mp = out->cd_mp; mp != NULL && offset >= MBLKL(mp); 85 offset -= MBLKL(mp), mp = mp->b_cont) 86 ; 87 88 *current_offset = offset; 89 *iov_or_mp = mp; 90 break; 91 92 } 93 } /* end switch */ 94 } 95 96 /* 97 * Get pointers for where in the output to copy a block of encrypted or 98 * decrypted data. The iov_or_mp argument stores a pointer to the current 99 * iovec or mp, and offset stores an offset into the current iovec or mp. 100 */ 101 static void 102 aes_get_ptrs(crypto_data_t *out, void **iov_or_mp, offset_t *current_offset, 103 uint8_t **out_data_1, size_t *out_data_1_len, uint8_t **out_data_2, 104 size_t amt) 105 { 106 offset_t offset; 107 108 switch (out->cd_format) { 109 case CRYPTO_DATA_RAW: { 110 iovec_t *iov; 111 112 offset = *current_offset; 113 iov = &out->cd_raw; 114 if ((offset + amt) <= iov->iov_len) { 115 /* one AES block fits */ 116 *out_data_1 = (uint8_t *)iov->iov_base + offset; 117 *out_data_1_len = amt; 118 *out_data_2 = NULL; 119 *current_offset = offset + amt; 120 } 121 break; 122 } 123 124 case CRYPTO_DATA_UIO: { 125 uio_t *uio = out->cd_uio; 126 iovec_t *iov; 127 offset_t offset; 128 uintptr_t vec_idx; 129 uint8_t *p; 130 131 offset = *current_offset; 132 vec_idx = (uintptr_t)(*iov_or_mp); 133 iov = &uio->uio_iov[vec_idx]; 134 p = (uint8_t *)iov->iov_base + offset; 135 *out_data_1 = p; 136 137 if (offset + amt <= iov->iov_len) { 138 /* can fit one AES block into this iov */ 139 *out_data_1_len = amt; 140 *out_data_2 = NULL; 141 *current_offset = offset + amt; 142 } else { 143 /* one AES block spans two iovecs */ 144 *out_data_1_len = iov->iov_len - offset; 145 if (vec_idx == uio->uio_iovcnt) 146 return; 147 vec_idx++; 148 iov = &uio->uio_iov[vec_idx]; 149 *out_data_2 = (uint8_t *)iov->iov_base; 150 *current_offset = amt - *out_data_1_len; 151 } 152 *iov_or_mp = (void *)vec_idx; 153 break; 154 } 155 156 case CRYPTO_DATA_MBLK: { 157 mblk_t *mp; 158 uint8_t *p; 159 160 offset = *current_offset; 161 mp = (mblk_t *)*iov_or_mp; 162 p = mp->b_rptr + offset; 163 *out_data_1 = p; 164 if ((p + amt) <= mp->b_wptr) { 165 /* can fit one AES block into this mblk */ 166 *out_data_1_len = amt; 167 *out_data_2 = NULL; 168 *current_offset = offset + amt; 169 } else { 170 /* one AES block spans two mblks */ 171 *out_data_1_len = mp->b_wptr - p; 172 if ((mp = mp->b_cont) == NULL) 173 return; 174 *out_data_2 = mp->b_rptr; 175 *current_offset = (amt - *out_data_1_len); 176 } 177 *iov_or_mp = mp; 178 break; 179 } 180 } /* end switch */ 181 } 182 183 static int 184 aes_cbc_encrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 185 crypto_data_t *out) 186 { 187 188 /* EXPORT DELETE START */ 189 190 size_t remainder = length; 191 size_t need; 192 uint8_t *datap = (uint8_t *)data; 193 uint8_t *blockp; 194 uint8_t *lastp; 195 uint32_t tmp[4]; 196 void *iov_or_mp; 197 offset_t offset; 198 uint8_t *out_data_1; 199 uint8_t *out_data_2; 200 size_t out_data_1_len; 201 202 if (length + ctx->ac_remainder_len < AES_BLOCK_LEN) { 203 /* accumulate bytes here and return */ 204 bcopy(datap, 205 (uint8_t *)ctx->ac_remainder + ctx->ac_remainder_len, 206 length); 207 ctx->ac_remainder_len += length; 208 ctx->ac_copy_to = datap; 209 return (0); 210 } 211 212 lastp = (uint8_t *)ctx->ac_iv; 213 if (out != NULL) 214 aes_init_ptrs(out, &iov_or_mp, &offset); 215 216 do { 217 /* Unprocessed data from last call. */ 218 if (ctx->ac_remainder_len > 0) { 219 need = AES_BLOCK_LEN - ctx->ac_remainder_len; 220 221 if (need > remainder) 222 return (1); 223 224 bcopy(datap, &((uint8_t *)ctx->ac_remainder) 225 [ctx->ac_remainder_len], need); 226 227 blockp = (uint8_t *)ctx->ac_remainder; 228 } else { 229 blockp = datap; 230 } 231 232 /* don't write on the plaintext */ 233 if (out != NULL) { 234 if (IS_P2ALIGNED(blockp, sizeof (uint32_t))) { 235 /* LINTED: pointer alignment */ 236 tmp[0] = *(uint32_t *)blockp; 237 /* LINTED: pointer alignment */ 238 tmp[1] = *(uint32_t *)&blockp[4]; 239 /* LINTED: pointer alignment */ 240 tmp[2] = *(uint32_t *)&blockp[8]; 241 /* LINTED: pointer alignment */ 242 tmp[3] = *(uint32_t *)&blockp[12]; 243 } else { 244 uint8_t *tmp8 = (uint8_t *)tmp; 245 246 AES_COPY_BLOCK(blockp, tmp8); 247 } 248 blockp = (uint8_t *)tmp; 249 } 250 251 if (ctx->ac_flags & AES_CBC_MODE) { 252 /* 253 * XOR the previous cipher block or IV with the 254 * current clear block. Check for alignment. 255 */ 256 if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) && 257 IS_P2ALIGNED(lastp, sizeof (uint32_t))) { 258 /* LINTED: pointer alignment */ 259 *(uint32_t *)&blockp[0] ^= 260 /* LINTED: pointer alignment */ 261 *(uint32_t *)&lastp[0]; 262 /* LINTED: pointer alignment */ 263 *(uint32_t *)&blockp[4] ^= 264 /* LINTED: pointer alignment */ 265 *(uint32_t *)&lastp[4]; 266 /* LINTED: pointer alignment */ 267 *(uint32_t *)&blockp[8] ^= 268 /* LINTED: pointer alignment */ 269 *(uint32_t *)&lastp[8]; 270 /* LINTED: pointer alignment */ 271 *(uint32_t *)&blockp[12] ^= 272 /* LINTED: pointer alignment */ 273 *(uint32_t *)&lastp[12]; 274 } else { 275 AES_XOR_BLOCK(lastp, blockp); 276 } 277 } 278 279 if (out == NULL) { 280 aes_encrypt_block(ctx->ac_keysched, blockp, blockp); 281 282 ctx->ac_lastp = blockp; 283 lastp = blockp; 284 285 if (ctx->ac_remainder_len > 0) { 286 bcopy(blockp, ctx->ac_copy_to, 287 ctx->ac_remainder_len); 288 bcopy(blockp + ctx->ac_remainder_len, datap, 289 need); 290 } 291 } else { 292 aes_encrypt_block(ctx->ac_keysched, blockp, lastp); 293 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 294 &out_data_1_len, &out_data_2, AES_BLOCK_LEN); 295 296 /* copy block to where it belongs */ 297 bcopy(lastp, out_data_1, out_data_1_len); 298 if (out_data_2 != NULL) { 299 bcopy(lastp + out_data_1_len, out_data_2, 300 AES_BLOCK_LEN - out_data_1_len); 301 } 302 /* update offset */ 303 out->cd_offset += AES_BLOCK_LEN; 304 } 305 306 /* Update pointer to next block of data to be processed. */ 307 if (ctx->ac_remainder_len != 0) { 308 datap += need; 309 ctx->ac_remainder_len = 0; 310 } else { 311 datap += AES_BLOCK_LEN; 312 } 313 314 remainder = (size_t)&data[length] - (size_t)datap; 315 316 /* Incomplete last block. */ 317 if (remainder > 0 && remainder < AES_BLOCK_LEN) { 318 bcopy(datap, ctx->ac_remainder, remainder); 319 ctx->ac_remainder_len = remainder; 320 ctx->ac_copy_to = datap; 321 goto out; 322 } 323 ctx->ac_copy_to = NULL; 324 325 } while (remainder > 0); 326 327 out: 328 /* 329 * Save the last encrypted block in the context - but only for 330 * the CBC mode of operation. 331 */ 332 if ((ctx->ac_flags & AES_CBC_MODE) && (ctx->ac_lastp != NULL)) { 333 uint8_t *iv8 = (uint8_t *)ctx->ac_iv; 334 uint8_t *last8 = (uint8_t *)ctx->ac_lastp; 335 336 if (IS_P2ALIGNED(ctx->ac_lastp, sizeof (uint32_t))) { 337 /* LINTED: pointer alignment */ 338 *(uint32_t *)iv8 = *(uint32_t *)last8; 339 /* LINTED: pointer alignment */ 340 *(uint32_t *)&iv8[4] = *(uint32_t *)&last8[4]; 341 /* LINTED: pointer alignment */ 342 *(uint32_t *)&iv8[8] = *(uint32_t *)&last8[8]; 343 /* LINTED: pointer alignment */ 344 *(uint32_t *)&iv8[12] = *(uint32_t *)&last8[12]; 345 } else { 346 AES_COPY_BLOCK(last8, iv8); 347 } 348 ctx->ac_lastp = (uint8_t *)ctx->ac_iv; 349 } 350 351 /* EXPORT DELETE END */ 352 353 return (0); 354 } 355 356 #define OTHER(a, ctx) \ 357 (((a) == (ctx)->ac_lastblock) ? (ctx)->ac_iv : (ctx)->ac_lastblock) 358 359 /* 360 * Encrypt multiple blocks of data. 361 */ 362 /* ARGSUSED */ 363 int 364 aes_encrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 365 crypto_data_t *out) 366 { 367 if (ctx->ac_flags & AES_CTR_MODE) 368 return (aes_ctr_ccm_mode_contiguous_blocks(ctx, data, length, 369 out)); 370 else if (ctx->ac_flags & AES_CCM_MODE) 371 return (aes_ctr_ccm_mode_contiguous_blocks(ctx, data, length, 372 out)); 373 return (aes_cbc_encrypt_contiguous_blocks(ctx, data, length, out)); 374 } 375 376 /* ARGSUSED */ 377 static int 378 aes_cbc_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 379 crypto_data_t *out) 380 { 381 382 /* EXPORT DELETE START */ 383 384 size_t remainder = length; 385 size_t need; 386 uint8_t *datap = (uint8_t *)data; 387 uint8_t *blockp; 388 uint8_t *lastp; 389 uint32_t tmp[4]; 390 void *iov_or_mp; 391 offset_t offset; 392 uint8_t *out_data_1; 393 uint8_t *out_data_2; 394 size_t out_data_1_len; 395 396 if (length + ctx->ac_remainder_len < AES_BLOCK_LEN) { 397 /* accumulate bytes here and return */ 398 bcopy(datap, 399 (uint8_t *)ctx->ac_remainder + ctx->ac_remainder_len, 400 length); 401 ctx->ac_remainder_len += length; 402 ctx->ac_copy_to = datap; 403 return (0); 404 } 405 406 lastp = ctx->ac_lastp; 407 if (out != NULL) 408 aes_init_ptrs(out, &iov_or_mp, &offset); 409 410 do { 411 /* Unprocessed data from last call. */ 412 if (ctx->ac_remainder_len > 0) { 413 need = AES_BLOCK_LEN - ctx->ac_remainder_len; 414 415 if (need > remainder) 416 return (1); 417 418 bcopy(datap, &((uint8_t *)ctx->ac_remainder) 419 [ctx->ac_remainder_len], need); 420 421 blockp = (uint8_t *)ctx->ac_remainder; 422 } else { 423 blockp = datap; 424 } 425 426 if (ctx->ac_flags & AES_CBC_MODE) { 427 428 /* Save current ciphertext block */ 429 if (IS_P2ALIGNED(blockp, sizeof (uint32_t))) { 430 uint32_t *tmp32; 431 432 /* LINTED: pointer alignment */ 433 tmp32 = (uint32_t *)OTHER((uint64_t *)lastp, 434 ctx); 435 436 /* LINTED: pointer alignment */ 437 *tmp32++ = *(uint32_t *)blockp; 438 /* LINTED: pointer alignment */ 439 *tmp32++ = *(uint32_t *)&blockp[4]; 440 /* LINTED: pointer alignment */ 441 *tmp32++ = *(uint32_t *)&blockp[8]; 442 /* LINTED: pointer alignment */ 443 *tmp32++ = *(uint32_t *)&blockp[12]; 444 } else { 445 uint8_t *tmp8; 446 /* LINTED: pointer alignment */ 447 tmp8 = (uint8_t *)OTHER((uint64_t *)lastp, ctx); 448 449 AES_COPY_BLOCK(blockp, tmp8); 450 } 451 } 452 453 if (out != NULL) { 454 aes_decrypt_block(ctx->ac_keysched, blockp, 455 (uint8_t *)tmp); 456 blockp = (uint8_t *)tmp; 457 } else { 458 aes_decrypt_block(ctx->ac_keysched, blockp, blockp); 459 } 460 461 if (ctx->ac_flags & AES_CBC_MODE) { 462 /* 463 * XOR the previous cipher block or IV with the 464 * currently decrypted block. Check for alignment. 465 */ 466 if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) && 467 IS_P2ALIGNED(lastp, sizeof (uint32_t))) { 468 /* LINTED: pointer alignment */ 469 *(uint32_t *)blockp ^= *(uint32_t *)lastp; 470 /* LINTED: pointer alignment */ 471 *(uint32_t *)&blockp[4] ^= 472 /* LINTED: pointer alignment */ 473 *(uint32_t *)&lastp[4]; 474 /* LINTED: pointer alignment */ 475 *(uint32_t *)&blockp[8] ^= 476 /* LINTED: pointer alignment */ 477 *(uint32_t *)&lastp[8]; 478 /* LINTED: pointer alignment */ 479 *(uint32_t *)&blockp[12] ^= 480 /* LINTED: pointer alignment */ 481 *(uint32_t *)&lastp[12]; 482 } else { 483 AES_XOR_BLOCK(lastp, blockp); 484 } 485 486 /* LINTED: pointer alignment */ 487 lastp = (uint8_t *)OTHER((uint64_t *)lastp, ctx); 488 } 489 490 if (out != NULL) { 491 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 492 &out_data_1_len, &out_data_2, AES_BLOCK_LEN); 493 494 /* copy temporary block to where it belongs */ 495 bcopy(&tmp, out_data_1, out_data_1_len); 496 if (out_data_2 != NULL) { 497 bcopy((uint8_t *)&tmp + out_data_1_len, 498 out_data_2, AES_BLOCK_LEN - out_data_1_len); 499 } 500 501 /* update offset */ 502 out->cd_offset += AES_BLOCK_LEN; 503 504 } else if (ctx->ac_remainder_len > 0) { 505 /* copy temporary block to where it belongs */ 506 bcopy(blockp, ctx->ac_copy_to, ctx->ac_remainder_len); 507 bcopy(blockp + ctx->ac_remainder_len, datap, need); 508 } 509 510 /* Update pointer to next block of data to be processed. */ 511 if (ctx->ac_remainder_len != 0) { 512 datap += need; 513 ctx->ac_remainder_len = 0; 514 } else { 515 datap += AES_BLOCK_LEN; 516 } 517 518 remainder = (size_t)&data[length] - (size_t)datap; 519 520 /* Incomplete last block. */ 521 if (remainder > 0 && remainder < AES_BLOCK_LEN) { 522 bcopy(datap, ctx->ac_remainder, remainder); 523 ctx->ac_remainder_len = remainder; 524 ctx->ac_lastp = lastp; 525 ctx->ac_copy_to = datap; 526 return (0); 527 } 528 ctx->ac_copy_to = NULL; 529 530 } while (remainder > 0); 531 532 ctx->ac_lastp = lastp; 533 534 /* EXPORT DELETE END */ 535 536 return (0); 537 } 538 539 /* 540 * Decrypt multiple blocks of data. 541 */ 542 int 543 aes_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 544 crypto_data_t *out) 545 { 546 if (ctx->ac_flags & AES_CTR_MODE) 547 return (aes_ctr_ccm_mode_contiguous_blocks(ctx, data, length, 548 out)); 549 else if (ctx->ac_flags & AES_CCM_MODE) 550 return (aes_ccm_decrypt_contiguous_blocks(ctx, data, length, 551 out)); 552 return (aes_cbc_decrypt_contiguous_blocks(ctx, data, length, out)); 553 } 554 555 /* ARGSUSED */ 556 int 557 aes_counter_final(aes_ctx_t *ctx, crypto_data_t *out) 558 { 559 /* EXPORT DELETE START */ 560 561 uint8_t *lastp; 562 uint32_t counter_block[4]; 563 uint8_t tmp[AES_BLOCK_LEN]; 564 int i; 565 void *iov_or_mp; 566 offset_t offset; 567 uint8_t *out_data_1; 568 uint8_t *out_data_2; 569 size_t out_data_1_len; 570 571 if (out->cd_length < ctx->ac_remainder_len) 572 return (CRYPTO_ARGUMENTS_BAD); 573 574 /* ac_iv is the counter block */ 575 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_iv, 576 (uint8_t *)counter_block); 577 578 lastp = (uint8_t *)counter_block; 579 580 /* copy remainder to temporary buffer */ 581 bcopy(ctx->ac_remainder, tmp, ctx->ac_remainder_len); 582 583 /* XOR with counter block */ 584 for (i = 0; i < ctx->ac_remainder_len; i++) { 585 tmp[i] ^= lastp[i]; 586 } 587 588 aes_init_ptrs(out, &iov_or_mp, &offset); 589 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 590 &out_data_1_len, &out_data_2, ctx->ac_remainder_len); 591 592 /* copy temporary block to where it belongs */ 593 bcopy(tmp, out_data_1, out_data_1_len); 594 if (out_data_2 != NULL) { 595 bcopy((uint8_t *)tmp + out_data_1_len, 596 out_data_2, ctx->ac_remainder_len - out_data_1_len); 597 } 598 out->cd_offset += ctx->ac_remainder_len; 599 ctx->ac_remainder_len = 0; 600 601 /* EXPORT DELETE END */ 602 603 return (0); 604 } 605 606 /* 607 * Encrypt and decrypt multiple blocks of data in counter mode. 608 * Encrypt multiple blocks of data in CCM mode. Decrypt for CCM mode 609 * is done in another function. 610 */ 611 /* ARGSUSED */ 612 int 613 aes_ctr_ccm_mode_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 614 crypto_data_t *out) 615 { 616 617 /* EXPORT DELETE START */ 618 619 size_t remainder = length; 620 size_t need; 621 uint8_t *datap = (uint8_t *)data; 622 uint8_t *blockp; 623 uint8_t *lastp; 624 uint32_t tmp[4]; 625 uint32_t counter_block[4]; 626 void *iov_or_mp; 627 offset_t offset; 628 uint8_t *out_data_1; 629 uint8_t *out_data_2; 630 size_t out_data_1_len; 631 uint64_t counter; 632 uint8_t *mac_buf; 633 #ifdef _LITTLE_ENDIAN 634 uint8_t *p; 635 #endif 636 637 if (length + ctx->ac_remainder_len < AES_BLOCK_LEN) { 638 /* accumulate bytes here and return */ 639 bcopy(datap, 640 (uint8_t *)ctx->ac_remainder + ctx->ac_remainder_len, 641 length); 642 ctx->ac_remainder_len += length; 643 ctx->ac_copy_to = datap; 644 return (0); 645 } 646 647 lastp = (uint8_t *)ctx->ac_cb; 648 if (out != NULL) 649 aes_init_ptrs(out, &iov_or_mp, &offset); 650 651 if (ctx->ac_flags & AES_CCM_MODE) { 652 mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf; 653 } 654 655 do { 656 /* Unprocessed data from last call. */ 657 if (ctx->ac_remainder_len > 0) { 658 need = AES_BLOCK_LEN - ctx->ac_remainder_len; 659 660 if (need > remainder) 661 return (1); 662 663 bcopy(datap, &((uint8_t *)ctx->ac_remainder) 664 [ctx->ac_remainder_len], need); 665 666 blockp = (uint8_t *)ctx->ac_remainder; 667 } else { 668 blockp = datap; 669 } 670 671 /* don't write on the plaintext */ 672 if (out != NULL) { 673 if (IS_P2ALIGNED(blockp, sizeof (uint32_t))) { 674 /* LINTED: pointer alignment */ 675 tmp[0] = *(uint32_t *)blockp; 676 /* LINTED: pointer alignment */ 677 tmp[1] = *(uint32_t *)&blockp[4]; 678 /* LINTED: pointer alignment */ 679 tmp[2] = *(uint32_t *)&blockp[8]; 680 /* LINTED: pointer alignment */ 681 tmp[3] = *(uint32_t *)&blockp[12]; 682 } else { 683 uint8_t *tmp8 = (uint8_t *)tmp; 684 685 AES_COPY_BLOCK(blockp, tmp8); 686 } 687 blockp = (uint8_t *)tmp; 688 } 689 690 if (ctx->ac_flags & AES_CCM_MODE) { 691 /* 692 * do CBC MAC 693 * 694 * XOR the previous cipher block current clear block. 695 * mac_buf always contain previous cipher block. 696 */ 697 if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) && 698 IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) { 699 /* LINTED: pointer alignment */ 700 *(uint32_t *)&mac_buf[0] ^= 701 /* LINTED: pointer alignment */ 702 *(uint32_t *)&blockp[0]; 703 /* LINTED: pointer alignment */ 704 *(uint32_t *)&mac_buf[4] ^= 705 /* LINTED: pointer alignment */ 706 *(uint32_t *)&blockp[4]; 707 /* LINTED: pointer alignment */ 708 *(uint32_t *)&mac_buf[8] ^= 709 /* LINTED: pointer alignment */ 710 *(uint32_t *)&blockp[8]; 711 /* LINTED: pointer alignment */ 712 *(uint32_t *)&mac_buf[12] ^= 713 /* LINTED: pointer alignment */ 714 *(uint32_t *)&blockp[12]; 715 } else { 716 AES_XOR_BLOCK(blockp, mac_buf); 717 } 718 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf); 719 } 720 721 722 /* ac_cb is the counter block */ 723 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb, 724 (uint8_t *)counter_block); 725 726 lastp = (uint8_t *)counter_block; 727 728 /* 729 * Increment counter. Counter bits are confined 730 * to the bottom 64 bits of the counter block. 731 */ 732 counter = ctx->ac_cb[1] & ctx->ac_counter_mask; 733 #ifdef _LITTLE_ENDIAN 734 p = (uint8_t *)&counter; 735 counter = (((uint64_t)p[0] << 56) | 736 ((uint64_t)p[1] << 48) | 737 ((uint64_t)p[2] << 40) | 738 ((uint64_t)p[3] << 32) | 739 ((uint64_t)p[4] << 24) | 740 ((uint64_t)p[5] << 16) | 741 ((uint64_t)p[6] << 8) | 742 (uint64_t)p[7]); 743 #endif 744 counter++; 745 #ifdef _LITTLE_ENDIAN 746 counter = (((uint64_t)p[0] << 56) | 747 ((uint64_t)p[1] << 48) | 748 ((uint64_t)p[2] << 40) | 749 ((uint64_t)p[3] << 32) | 750 ((uint64_t)p[4] << 24) | 751 ((uint64_t)p[5] << 16) | 752 ((uint64_t)p[6] << 8) | 753 (uint64_t)p[7]); 754 #endif 755 counter &= ctx->ac_counter_mask; 756 ctx->ac_cb[1] = 757 (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter; 758 759 /* 760 * XOR the previous cipher block or IV with the 761 * current clear block. Check for alignment. 762 */ 763 if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) && 764 IS_P2ALIGNED(lastp, sizeof (uint32_t))) { 765 /* LINTED: pointer alignment */ 766 *(uint32_t *)&blockp[0] ^= 767 /* LINTED: pointer alignment */ 768 *(uint32_t *)&lastp[0]; 769 /* LINTED: pointer alignment */ 770 *(uint32_t *)&blockp[4] ^= 771 /* LINTED: pointer alignment */ 772 *(uint32_t *)&lastp[4]; 773 /* LINTED: pointer alignment */ 774 *(uint32_t *)&blockp[8] ^= 775 /* LINTED: pointer alignment */ 776 *(uint32_t *)&lastp[8]; 777 /* LINTED: pointer alignment */ 778 *(uint32_t *)&blockp[12] ^= 779 /* LINTED: pointer alignment */ 780 *(uint32_t *)&lastp[12]; 781 } else { 782 AES_XOR_BLOCK(lastp, blockp); 783 } 784 785 ctx->ac_lastp = blockp; 786 lastp = blockp; 787 if (ctx->ac_flags & AES_CCM_MODE) { 788 ctx->ac_ccm_processed_data_len += AES_BLOCK_LEN; 789 } 790 791 if (out == NULL) { 792 if (ctx->ac_remainder_len > 0) { 793 bcopy(blockp, ctx->ac_copy_to, 794 ctx->ac_remainder_len); 795 bcopy(blockp + ctx->ac_remainder_len, datap, 796 need); 797 } 798 } else { 799 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 800 &out_data_1_len, &out_data_2, AES_BLOCK_LEN); 801 802 /* copy block to where it belongs */ 803 bcopy(lastp, out_data_1, out_data_1_len); 804 if (out_data_2 != NULL) { 805 bcopy(lastp + out_data_1_len, out_data_2, 806 AES_BLOCK_LEN - out_data_1_len); 807 } 808 /* update offset */ 809 out->cd_offset += AES_BLOCK_LEN; 810 } 811 812 /* Update pointer to next block of data to be processed. */ 813 if (ctx->ac_remainder_len != 0) { 814 datap += need; 815 ctx->ac_remainder_len = 0; 816 } else { 817 datap += AES_BLOCK_LEN; 818 } 819 820 remainder = (size_t)&data[length] - (size_t)datap; 821 822 /* Incomplete last block. */ 823 if (remainder > 0 && remainder < AES_BLOCK_LEN) { 824 bcopy(datap, ctx->ac_remainder, remainder); 825 ctx->ac_remainder_len = remainder; 826 ctx->ac_copy_to = datap; 827 goto out; 828 } 829 ctx->ac_copy_to = NULL; 830 831 } while (remainder > 0); 832 833 out: 834 835 /* EXPORT DELETE END */ 836 837 return (0); 838 } 839 840 /* 841 * The following function should be call at encrypt or decrypt init time 842 * for AES CCM mode. 843 */ 844 int 845 aes_ccm_init(aes_ctx_t *ctx, unsigned char *nonce, size_t nonce_len, 846 unsigned char *auth_data, size_t auth_data_len) 847 { 848 /* EXPORT DELETE START */ 849 uint8_t *mac_buf, *datap, *ivp, *authp; 850 uint32_t iv[4], tmp[4]; 851 size_t remainder, processed; 852 uint8_t encoded_a[10]; /* max encoded auth data length is 10 octets */ 853 size_t encoded_a_len = 0; 854 855 mac_buf = (uint8_t *)&(ctx->ac_ccm_mac_buf); 856 857 /* 858 * Format the 1st block for CBC-MAC and construct the 859 * 1st counter block. 860 * 861 * aes_ctx->ac_iv is used for storing the counter block 862 * mac_buf will store b0 at this time. 863 */ 864 aes_ccm_format_initial_blocks(nonce, nonce_len, 865 auth_data_len, mac_buf, ctx); 866 867 /* The IV for CBC MAC for AES CCM mode is always zero */ 868 bzero(iv, AES_BLOCK_LEN); 869 ivp = (uint8_t *)iv; 870 871 if (IS_P2ALIGNED(ivp, sizeof (uint32_t)) && 872 IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) { 873 /* LINTED: pointer alignment */ 874 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&ivp[0]; 875 /* LINTED: pointer alignment */ 876 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&ivp[4]; 877 /* LINTED: pointer alignment */ 878 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&ivp[8]; 879 /* LINTED: pointer alignment */ 880 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&ivp[12]; 881 } else { 882 AES_XOR_BLOCK(ivp, mac_buf); 883 } 884 885 /* encrypt the nonce */ 886 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf); 887 888 /* take care of the associated data, if any */ 889 if (auth_data_len == 0) { 890 return (0); 891 } 892 893 encode_adata_len(auth_data_len, encoded_a, &encoded_a_len); 894 895 remainder = auth_data_len; 896 897 /* 1st block: it contains encoded associated data, and some data */ 898 authp = (uint8_t *)tmp; 899 bzero(authp, AES_BLOCK_LEN); 900 bcopy(encoded_a, authp, encoded_a_len); 901 processed = AES_BLOCK_LEN - encoded_a_len; 902 if (processed > auth_data_len) { 903 /* in case auth_data is very small */ 904 processed = auth_data_len; 905 } 906 bcopy(auth_data, authp+encoded_a_len, processed); 907 /* xor with previous buffer */ 908 if (IS_P2ALIGNED(authp, sizeof (uint32_t)) && 909 IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) { 910 /* LINTED: pointer alignment */ 911 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&authp[0]; 912 /* LINTED: pointer alignment */ 913 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&authp[4]; 914 /* LINTED: pointer alignment */ 915 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&authp[8]; 916 /* LINTED: pointer alignment */ 917 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&authp[12]; 918 } else { 919 AES_XOR_BLOCK(authp, mac_buf); 920 } 921 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf); 922 remainder -= processed; 923 if (remainder == 0) { 924 /* a small amount of associated data, it's all done now */ 925 return (0); 926 } 927 928 do { 929 if (remainder < AES_BLOCK_LEN) { 930 /* 931 * There's not a block full of data, pad rest of 932 * buffer with zero 933 */ 934 bzero(authp, AES_BLOCK_LEN); 935 bcopy(&(auth_data[processed]), authp, remainder); 936 datap = (uint8_t *)authp; 937 remainder = 0; 938 } else { 939 datap = (uint8_t *)(&(auth_data[processed])); 940 processed += AES_BLOCK_LEN; 941 remainder -= AES_BLOCK_LEN; 942 } 943 944 /* xor with previous buffer */ 945 if (IS_P2ALIGNED(datap, sizeof (uint32_t)) && 946 IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) { 947 /* LINTED: pointer alignment */ 948 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&datap[0]; 949 /* LINTED: pointer alignment */ 950 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&datap[4]; 951 /* LINTED: pointer alignment */ 952 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&datap[8]; 953 /* LINTED: pointer alignment */ 954 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&datap[12]; 955 } else { 956 AES_XOR_BLOCK(datap, mac_buf); 957 } 958 959 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf); 960 961 } while (remainder > 0); 962 963 /* EXPORT DELETE END */ 964 return (0); 965 } 966 967 void 968 calculate_ccm_mac(aes_ctx_t *ctx, uint8_t **ccm_mac) 969 { 970 /* EXPORT DELETE START */ 971 uint64_t counter; 972 uint32_t counter_block[4]; 973 uint8_t *counterp, *mac_buf; 974 int i; 975 976 mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf; 977 978 /* first counter block start with index 0 */ 979 counter = 0; 980 ctx->ac_cb[1] = (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter; 981 982 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb, 983 (uint8_t *)counter_block); 984 985 counterp = (uint8_t *)counter_block; 986 987 /* calculate XOR of MAC with first counter block */ 988 for (i = 0; i < ctx->ac_ccm_mac_len; i++) { 989 (*ccm_mac)[i] = mac_buf[i] ^ counterp[i]; 990 } 991 /* EXPORT DELETE END */ 992 } 993 994 /* ARGSUSED */ 995 int 996 aes_ccm_encrypt_final(aes_ctx_t *ctx, crypto_data_t *out) 997 { 998 /* EXPORT DELETE START */ 999 1000 uint8_t *lastp, *mac_buf, *ccm_mac_p, *macp; 1001 uint32_t counter_block[4]; 1002 uint32_t tmp[4]; 1003 uint8_t ccm_mac[AES_BLOCK_LEN]; 1004 void *iov_or_mp; 1005 offset_t offset; 1006 uint8_t *out_data_1; 1007 uint8_t *out_data_2; 1008 size_t out_data_1_len; 1009 int i; 1010 1011 if (out->cd_length < (ctx->ac_remainder_len + ctx->ac_ccm_mac_len)) { 1012 return (CRYPTO_ARGUMENTS_BAD); 1013 } 1014 1015 /* 1016 * When we get here, the number of bytes of payload processed 1017 * plus whatever data remains, if any, 1018 * should be the same as the number of bytes that's being 1019 * passed in the argument during init time. 1020 */ 1021 if ((ctx->ac_ccm_processed_data_len + ctx->ac_remainder_len) 1022 != (ctx->ac_ccm_data_len)) { 1023 return (CRYPTO_DATA_LEN_RANGE); 1024 } 1025 1026 mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf; 1027 1028 if (ctx->ac_remainder_len > 0) { 1029 1030 macp = (uint8_t *)tmp; 1031 bzero(macp, AES_BLOCK_LEN); 1032 1033 /* copy remainder to temporary buffer */ 1034 bcopy(ctx->ac_remainder, macp, ctx->ac_remainder_len); 1035 1036 /* calculate the CBC MAC */ 1037 if (IS_P2ALIGNED(macp, sizeof (uint32_t)) && 1038 IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) { 1039 /* LINTED: pointer alignment */ 1040 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&macp[0]; 1041 /* LINTED: pointer alignment */ 1042 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&macp[4]; 1043 /* LINTED: pointer alignment */ 1044 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&macp[8]; 1045 /* LINTED: pointer alignment */ 1046 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&macp[12]; 1047 } else { 1048 AES_XOR_BLOCK(macp, mac_buf); 1049 } 1050 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf); 1051 1052 /* calculate the counter mode */ 1053 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb, 1054 (uint8_t *)counter_block); 1055 1056 lastp = (uint8_t *)counter_block; 1057 1058 /* copy remainder to temporary buffer */ 1059 bcopy(ctx->ac_remainder, macp, ctx->ac_remainder_len); 1060 1061 /* XOR with counter block */ 1062 for (i = 0; i < ctx->ac_remainder_len; i++) { 1063 macp[i] ^= lastp[i]; 1064 } 1065 ctx->ac_ccm_processed_data_len += ctx->ac_remainder_len; 1066 } 1067 1068 /* Calculate the CCM MAC */ 1069 ccm_mac_p = ccm_mac; 1070 calculate_ccm_mac(ctx, &ccm_mac_p); 1071 1072 aes_init_ptrs(out, &iov_or_mp, &offset); 1073 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 1074 &out_data_1_len, &out_data_2, 1075 ctx->ac_remainder_len + ctx->ac_ccm_mac_len); 1076 1077 if (ctx->ac_remainder_len > 0) { 1078 1079 /* copy temporary block to where it belongs */ 1080 if (out_data_2 == NULL) { 1081 /* everything will fit in out_data_1 */ 1082 bcopy(macp, out_data_1, ctx->ac_remainder_len); 1083 bcopy(ccm_mac, out_data_1 + ctx->ac_remainder_len, 1084 ctx->ac_ccm_mac_len); 1085 } else { 1086 1087 if (out_data_1_len < ctx->ac_remainder_len) { 1088 1089 size_t data_2_len_used; 1090 1091 bcopy(macp, out_data_1, out_data_1_len); 1092 1093 data_2_len_used = ctx->ac_remainder_len 1094 - out_data_1_len; 1095 1096 bcopy((uint8_t *)macp + out_data_1_len, 1097 out_data_2, data_2_len_used); 1098 bcopy(ccm_mac, out_data_2 + data_2_len_used, 1099 ctx->ac_ccm_mac_len); 1100 } else { 1101 bcopy(macp, out_data_1, out_data_1_len); 1102 if (out_data_1_len == ctx->ac_remainder_len) { 1103 /* mac will be in out_data_2 */ 1104 bcopy(ccm_mac, out_data_2, 1105 ctx->ac_ccm_mac_len); 1106 } else { 1107 size_t len_not_used 1108 = out_data_1_len - 1109 ctx->ac_remainder_len; 1110 /* 1111 * part of mac in will be in 1112 * out_data_1, part of the mac will be 1113 * in out_data_2 1114 */ 1115 bcopy(ccm_mac, 1116 out_data_1 + ctx->ac_remainder_len, 1117 len_not_used); 1118 bcopy(ccm_mac+len_not_used, out_data_2, 1119 ctx->ac_ccm_mac_len - len_not_used); 1120 1121 } 1122 } 1123 } 1124 } else { 1125 /* copy block to where it belongs */ 1126 bcopy(ccm_mac, out_data_1, out_data_1_len); 1127 if (out_data_2 != NULL) { 1128 bcopy(ccm_mac + out_data_1_len, out_data_2, 1129 AES_BLOCK_LEN - out_data_1_len); 1130 } 1131 } 1132 out->cd_offset += ctx->ac_remainder_len + ctx->ac_ccm_mac_len; 1133 ctx->ac_remainder_len = 0; 1134 1135 /* EXPORT DELETE END */ 1136 1137 return (0); 1138 } 1139 1140 int 1141 aes_ccm_validate_args(CK_AES_CCM_PARAMS *ccm_param, boolean_t is_encrypt_init) 1142 { 1143 1144 /* EXPORT DELETE START */ 1145 size_t macSize, nonceSize; 1146 uint8_t q; 1147 uint64_t maxValue; 1148 1149 /* 1150 * Check the length of the MAC. Only valid length 1151 * lengths for the MAC are: 4, 6, 8, 10, 12, 14, 16 1152 */ 1153 macSize = ccm_param->ulMACSize; 1154 if ((macSize < 4) || (macSize > 16) || ((macSize % 2) != 0)) { 1155 return (CRYPTO_MECHANISM_PARAM_INVALID); 1156 } 1157 1158 /* Check the nonce value. Valid values are 7, 8, 9, 10, 11, 12, 13 */ 1159 nonceSize = ccm_param->ulNonceSize; 1160 if ((nonceSize < 7) || (nonceSize > 13)) { 1161 return (CRYPTO_MECHANISM_PARAM_INVALID); 1162 } 1163 1164 q = (uint8_t)((15 - nonceSize) & 0xFF); 1165 1166 1167 /* 1168 * If it is decrypt, need to make sure size of ciphertext is at least 1169 * bigger than MAC len 1170 */ 1171 if ((!is_encrypt_init) && (ccm_param->ulDataSize < macSize)) { 1172 return (CRYPTO_MECHANISM_PARAM_INVALID); 1173 } 1174 1175 /* 1176 * Check to make sure the length of the payload is within the 1177 * range of values allowed by q 1178 */ 1179 if (q < 8) { 1180 maxValue = 1ULL << (q * 8); 1181 } else { 1182 maxValue = ULONG_MAX; 1183 } 1184 1185 if (ccm_param->ulDataSize > maxValue) { 1186 return (CRYPTO_MECHANISM_PARAM_INVALID); 1187 } 1188 1189 /* EXPORT DELETE END */ 1190 return (0); 1191 } 1192 1193 /* 1194 * Format the first block used in CBC-MAC (B0) and the initial counter 1195 * block based on formating functions and counter generation functions 1196 * specified in RFC 3610 and NIST publication 800-38C, appendix A 1197 * 1198 * b0 is the first block used in CBC-MAC 1199 * cb0 is the first counter block 1200 * 1201 * It's assumed that the arguments b0 and cb0 are preallocated AES blocks 1202 * 1203 */ 1204 static void 1205 aes_ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize, 1206 ulong_t authDataSize, uint8_t *b0, aes_ctx_t *aes_ctx) 1207 { 1208 /* EXPORT DELETE START */ 1209 uint64_t payloadSize; 1210 uint8_t t, q, have_adata = 0; 1211 size_t limit; 1212 int i, j, k; 1213 uint64_t mask = 0; 1214 uint8_t *cb; 1215 #ifdef _LITTLE_ENDIAN 1216 uint8_t *p8; 1217 #endif /* _LITTLE_ENDIAN */ 1218 1219 q = (uint8_t)((15 - nonceSize) & 0xFF); 1220 t = (uint8_t)((aes_ctx->ac_ccm_mac_len) & 0xFF); 1221 1222 /* Construct the first octect of b0 */ 1223 if (authDataSize > 0) { 1224 have_adata = 1; 1225 } 1226 b0[0] = (have_adata << 6) | (((t - 2) / 2) << 3) | (q - 1); 1227 1228 /* copy the nonce value into b0 */ 1229 bcopy(nonce, &(b0[1]), nonceSize); 1230 1231 /* store the length of the payload into b0 */ 1232 bzero(&(b0[1+nonceSize]), q); 1233 1234 payloadSize = aes_ctx->ac_ccm_data_len; 1235 limit = 8 < q ? 8 : q; 1236 1237 for (i = 0, j = 0, k = 15; i < limit; i++, j += 8, k--) { 1238 b0[k] = (uint8_t)((payloadSize >> j) & 0xFF); 1239 } 1240 1241 /* format the counter block */ 1242 1243 cb = (uint8_t *)aes_ctx->ac_cb; 1244 1245 cb[0] = 0x07 & (q-1); /* first byte */ 1246 1247 /* copy the nonce value into the counter block */ 1248 bcopy(nonce, &(cb[1]), nonceSize); 1249 1250 bzero(&(cb[1+nonceSize]), q); 1251 1252 /* Create the mask for the counter field based on the size of nonce */ 1253 q <<= 3; 1254 while (q-- > 0) { 1255 mask |= (1ULL << q); 1256 } 1257 1258 #ifdef _LITTLE_ENDIAN 1259 p8 = (uint8_t *)&mask; 1260 mask = (((uint64_t)p8[0] << 56) | 1261 ((uint64_t)p8[1] << 48) | 1262 ((uint64_t)p8[2] << 40) | 1263 ((uint64_t)p8[3] << 32) | 1264 ((uint64_t)p8[4] << 24) | 1265 ((uint64_t)p8[5] << 16) | 1266 ((uint64_t)p8[6] << 8) | 1267 (uint64_t)p8[7]); 1268 #endif 1269 aes_ctx->ac_counter_mask = mask; 1270 1271 /* 1272 * During calculation, we start using counter block 1, we will 1273 * set it up right here. 1274 * We can just set the last byte to have the value 1, because 1275 * even with the bigest nonce of 13, the last byte of the 1276 * counter block will be used for the counter value. 1277 */ 1278 cb[15] = 0x01; 1279 1280 /* EXPORT DELETE END */ 1281 1282 } 1283 1284 /* 1285 * Encode the length of the associated data as 1286 * specified in RFC 3610 and NIST publication 800-38C, appendix A 1287 */ 1288 static void 1289 encode_adata_len(ulong_t auth_data_len, uint8_t *encoded, size_t *encoded_len) 1290 { 1291 1292 /* EXPORT DELETE START */ 1293 1294 if (auth_data_len < ((1ULL<<16) - (1ULL<<8))) { 1295 /* 0 < a < (2^16-2^8) */ 1296 *encoded_len = 2; 1297 encoded[0] = (auth_data_len & 0xff00) >> 8; 1298 encoded[1] = auth_data_len & 0xff; 1299 1300 } else if ((auth_data_len >= ((1ULL<<16) - (1ULL<<8))) && 1301 (auth_data_len < (1ULL << 31))) { 1302 /* (2^16-2^8) <= a < 2^32 */ 1303 *encoded_len = 6; 1304 encoded[0] = 0xff; 1305 encoded[1] = 0xfe; 1306 encoded[2] = (auth_data_len & 0xff000000) >> 24; 1307 encoded[3] = (auth_data_len & 0xff0000) >> 16; 1308 encoded[4] = (auth_data_len & 0xff00) >> 8; 1309 encoded[5] = auth_data_len & 0xff; 1310 #ifdef _LP64 1311 } else { 1312 /* 2^32 <= a < 2^64 */ 1313 *encoded_len = 10; 1314 encoded[0] = 0xff; 1315 encoded[1] = 0xff; 1316 encoded[2] = (auth_data_len & 0xff00000000000000) >> 56; 1317 encoded[3] = (auth_data_len & 0xff000000000000) >> 48; 1318 encoded[4] = (auth_data_len & 0xff0000000000) >> 40; 1319 encoded[5] = (auth_data_len & 0xff00000000) >> 32; 1320 encoded[6] = (auth_data_len & 0xff000000) >> 24; 1321 encoded[7] = (auth_data_len & 0xff0000) >> 16; 1322 encoded[8] = (auth_data_len & 0xff00) >> 8; 1323 encoded[9] = auth_data_len & 0xff; 1324 #endif /* _LP64 */ 1325 } 1326 /* EXPORT DELETE END */ 1327 } 1328 1329 /* 1330 * This will only deal with decrypting the last block of the input that 1331 * might not be multiples of AES_BLOCK_LEN 1332 */ 1333 static void 1334 aes_ccm_decrypt_incomplete_block(aes_ctx_t *ctx) 1335 { 1336 1337 /* EXPORT DELETE START */ 1338 uint8_t *datap, counter_block[AES_BLOCK_LEN], *outp, *counterp; 1339 int i; 1340 1341 datap = (uint8_t *)ctx->ac_remainder; 1342 outp = &((ctx->ac_ccm_pt_buf)[ctx->ac_ccm_processed_data_len]); 1343 1344 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb, 1345 counter_block); 1346 1347 counterp = (uint8_t *)counter_block; 1348 1349 /* XOR with counter block */ 1350 for (i = 0; i < ctx->ac_remainder_len; i++) { 1351 outp[i] = datap[i] ^ counterp[i]; 1352 } 1353 /* EXPORT DELETE END */ 1354 } 1355 1356 /* 1357 * This will decrypt the cipher text. However, the plaintext won't be 1358 * returned to the caller. It will be returned when decrypt_final() is 1359 * called if the MAC matches 1360 */ 1361 /* ARGSUSED */ 1362 static int 1363 aes_ccm_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 1364 crypto_data_t *out) 1365 { 1366 1367 /* EXPORT DELETE START */ 1368 1369 size_t remainder = length; 1370 size_t need; 1371 uint8_t *datap = (uint8_t *)data; 1372 uint8_t *blockp; 1373 uint32_t counter_block[4]; 1374 uint8_t *cbp; 1375 uint64_t counter; 1376 size_t pt_len, total_decrypted_len, mac_len, pm_len, pd_len; 1377 uint32_t tmp[4]; 1378 uint8_t *resultp; 1379 #ifdef _LITTLE_ENDIAN 1380 uint8_t *p; 1381 #endif /* _LITTLE_ENDIAN */ 1382 1383 1384 pm_len = ctx->ac_ccm_processed_mac_len; 1385 1386 if (pm_len > 0) { 1387 uint8_t *tmp; 1388 /* 1389 * all ciphertext has been processed, just waiting for 1390 * part of the value of the mac 1391 */ 1392 if ((pm_len + length) > ctx->ac_ccm_mac_len) { 1393 return (CRYPTO_DATA_LEN_RANGE); 1394 } 1395 tmp = (uint8_t *)ctx->ac_ccm_mac_input_buf; 1396 1397 bcopy(datap, tmp + pm_len, length); 1398 1399 ctx->ac_ccm_processed_mac_len += length; 1400 return (0); 1401 } 1402 1403 /* 1404 * If we decrypt the given data, what total amount of data would 1405 * have been decrypted? 1406 */ 1407 pd_len = ctx->ac_ccm_processed_data_len; 1408 total_decrypted_len = pd_len + length + ctx->ac_remainder_len; 1409 1410 if (total_decrypted_len > 1411 (ctx->ac_ccm_data_len + ctx->ac_ccm_mac_len)) { 1412 return (CRYPTO_DATA_LEN_RANGE); 1413 } 1414 1415 pt_len = ctx->ac_ccm_data_len; 1416 1417 if (total_decrypted_len > pt_len) { 1418 /* 1419 * part of the input will be the MAC, need to isolate that 1420 * to be dealt with later. The left-over data in 1421 * ac_remainder_len from last time will not be part of the 1422 * MAC. Otherwise, it would have already been taken out 1423 * when this call is made last time. 1424 */ 1425 size_t pt_part = pt_len - pd_len - ctx->ac_remainder_len; 1426 1427 mac_len = length - pt_part; 1428 1429 ctx->ac_ccm_processed_mac_len = mac_len; 1430 bcopy(data + pt_part, ctx->ac_ccm_mac_input_buf, mac_len); 1431 1432 if (pt_part + ctx->ac_remainder_len < AES_BLOCK_LEN) { 1433 /* 1434 * since this is last of the ciphertext, will 1435 * just decrypt with it here 1436 */ 1437 bcopy(datap, &((uint8_t *)ctx->ac_remainder) 1438 [ctx->ac_remainder_len], pt_part); 1439 ctx->ac_remainder_len += pt_part; 1440 aes_ccm_decrypt_incomplete_block(ctx); 1441 ctx->ac_remainder_len = 0; 1442 ctx->ac_ccm_processed_data_len += pt_part; 1443 return (0); 1444 } else { 1445 /* let rest of the code handle this */ 1446 length = pt_part; 1447 } 1448 } else if (length + ctx->ac_remainder_len < AES_BLOCK_LEN) { 1449 /* accumulate bytes here and return */ 1450 bcopy(datap, 1451 (uint8_t *)ctx->ac_remainder + ctx->ac_remainder_len, 1452 length); 1453 ctx->ac_remainder_len += length; 1454 ctx->ac_copy_to = datap; 1455 return (0); 1456 } 1457 1458 do { 1459 /* Unprocessed data from last call. */ 1460 if (ctx->ac_remainder_len > 0) { 1461 need = AES_BLOCK_LEN - ctx->ac_remainder_len; 1462 1463 if (need > remainder) 1464 return (1); 1465 1466 bcopy(datap, &((uint8_t *)ctx->ac_remainder) 1467 [ctx->ac_remainder_len], need); 1468 1469 blockp = (uint8_t *)ctx->ac_remainder; 1470 } else { 1471 blockp = datap; 1472 } 1473 1474 /* don't write on the plaintext */ 1475 if (IS_P2ALIGNED(blockp, sizeof (uint32_t))) { 1476 /* LINTED: pointer alignment */ 1477 tmp[0] = *(uint32_t *)blockp; 1478 /* LINTED: pointer alignment */ 1479 tmp[1] = *(uint32_t *)&blockp[4]; 1480 /* LINTED: pointer alignment */ 1481 tmp[2] = *(uint32_t *)&blockp[8]; 1482 /* LINTED: pointer alignment */ 1483 tmp[3] = *(uint32_t *)&blockp[12]; 1484 } else { 1485 uint8_t *tmp8 = (uint8_t *)tmp; 1486 1487 AES_COPY_BLOCK(blockp, tmp8); 1488 } 1489 blockp = (uint8_t *)tmp; 1490 1491 /* Calculate the counter mode, ac_cb is the counter block */ 1492 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb, 1493 (uint8_t *)counter_block); 1494 cbp = (uint8_t *)counter_block; 1495 1496 /* 1497 * Increment counter. 1498 * Counter bits are confined to the bottom 64 bits 1499 */ 1500 counter = ctx->ac_cb[1] & ctx->ac_counter_mask; 1501 #ifdef _LITTLE_ENDIAN 1502 p = (uint8_t *)&counter; 1503 counter = (((uint64_t)p[0] << 56) | 1504 ((uint64_t)p[1] << 48) | 1505 ((uint64_t)p[2] << 40) | 1506 ((uint64_t)p[3] << 32) | 1507 ((uint64_t)p[4] << 24) | 1508 ((uint64_t)p[5] << 16) | 1509 ((uint64_t)p[6] << 8) | 1510 (uint64_t)p[7]); 1511 #endif 1512 counter++; 1513 #ifdef _LITTLE_ENDIAN 1514 counter = (((uint64_t)p[0] << 56) | 1515 ((uint64_t)p[1] << 48) | 1516 ((uint64_t)p[2] << 40) | 1517 ((uint64_t)p[3] << 32) | 1518 ((uint64_t)p[4] << 24) | 1519 ((uint64_t)p[5] << 16) | 1520 ((uint64_t)p[6] << 8) | 1521 (uint64_t)p[7]); 1522 #endif 1523 counter &= ctx->ac_counter_mask; 1524 ctx->ac_cb[1] = 1525 (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter; 1526 1527 /* XOR with the ciphertext */ 1528 if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) && 1529 IS_P2ALIGNED(cbp, sizeof (uint32_t))) { 1530 /* LINTED: pointer alignment */ 1531 *(uint32_t *)&blockp[0] ^= *(uint32_t *)&cbp[0]; 1532 /* LINTED: pointer alignment */ 1533 *(uint32_t *)&blockp[4] ^= *(uint32_t *)&cbp[4]; 1534 /* LINTED: pointer alignment */ 1535 *(uint32_t *)&blockp[8] ^= *(uint32_t *)&cbp[8]; 1536 /* LINTED: pointer alignment */ 1537 *(uint32_t *)&blockp[12] ^= *(uint32_t *)&cbp[12]; 1538 } else { 1539 AES_XOR_BLOCK(cbp, blockp); 1540 } 1541 1542 /* Copy the plaintext to the "holding buffer" */ 1543 resultp = (uint8_t *)ctx->ac_ccm_pt_buf + 1544 ctx->ac_ccm_processed_data_len; 1545 if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) && 1546 IS_P2ALIGNED(resultp, sizeof (uint32_t))) { 1547 /* LINTED: pointer alignment */ 1548 *(uint32_t *)&resultp[0] = *(uint32_t *)blockp; 1549 /* LINTED: pointer alignment */ 1550 *(uint32_t *)&resultp[4] = *(uint32_t *)&blockp[4]; 1551 /* LINTED: pointer alignment */ 1552 *(uint32_t *)&resultp[8] = *(uint32_t *)&blockp[8]; 1553 /* LINTED: pointer alignment */ 1554 *(uint32_t *)&resultp[12] = *(uint32_t *)&blockp[12]; 1555 } else { 1556 AES_COPY_BLOCK(blockp, resultp); 1557 } 1558 1559 ctx->ac_ccm_processed_data_len += AES_BLOCK_LEN; 1560 1561 ctx->ac_lastp = blockp; 1562 1563 /* Update pointer to next block of data to be processed. */ 1564 if (ctx->ac_remainder_len != 0) { 1565 datap += need; 1566 ctx->ac_remainder_len = 0; 1567 } else { 1568 datap += AES_BLOCK_LEN; 1569 } 1570 1571 remainder = (size_t)&data[length] - (size_t)datap; 1572 1573 /* Incomplete last block */ 1574 if (remainder > 0 && remainder < AES_BLOCK_LEN) { 1575 bcopy(datap, ctx->ac_remainder, remainder); 1576 ctx->ac_remainder_len = remainder; 1577 ctx->ac_copy_to = datap; 1578 if (ctx->ac_ccm_processed_mac_len > 0) { 1579 /* 1580 * not expecting anymore ciphertext, just 1581 * compute plaintext for the remaining input 1582 */ 1583 aes_ccm_decrypt_incomplete_block(ctx); 1584 ctx->ac_ccm_processed_data_len += remainder; 1585 ctx->ac_remainder_len = 0; 1586 } 1587 goto out; 1588 } 1589 ctx->ac_copy_to = NULL; 1590 1591 } while (remainder > 0); 1592 1593 out: 1594 /* EXPORT DELETE END */ 1595 1596 return (0); 1597 } 1598 1599 int 1600 aes_ccm_decrypt_final(aes_ctx_t *ctx, crypto_data_t *out) 1601 { 1602 /* EXPORT DELETE START */ 1603 size_t mac_remain, pt_len; 1604 uint8_t *pt, *mac_buf, *macp, *ccm_mac_p; 1605 uint8_t ccm_mac[AES_BLOCK_LEN]; 1606 void *iov_or_mp; 1607 offset_t offset; 1608 uint8_t *out_data_1, *out_data_2; 1609 size_t out_data_1_len; 1610 uint32_t tmp[4]; 1611 1612 pt_len = ctx->ac_ccm_data_len; 1613 1614 /* Make sure output buffer can fit all of the plaintext */ 1615 if (out->cd_length < pt_len) { 1616 return (CRYPTO_ARGUMENTS_BAD); 1617 } 1618 1619 pt = ctx->ac_ccm_pt_buf; 1620 mac_remain = ctx->ac_ccm_processed_data_len; 1621 mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf; 1622 1623 macp = (uint8_t *)tmp; 1624 1625 while (mac_remain > 0) { 1626 1627 if (mac_remain < AES_BLOCK_LEN) { 1628 bzero(tmp, AES_BLOCK_LEN); 1629 bcopy(pt, tmp, mac_remain); 1630 mac_remain = 0; 1631 } else { 1632 if (IS_P2ALIGNED(pt, sizeof (uint32_t)) && 1633 IS_P2ALIGNED(macp, sizeof (uint32_t))) { 1634 /* LINTED: pointer alignment */ 1635 *(uint32_t *)&macp[0] = *(uint32_t *)pt; 1636 /* LINTED: pointer alignment */ 1637 *(uint32_t *)&macp[4] = *(uint32_t *)&pt[4]; 1638 /* LINTED: pointer alignment */ 1639 *(uint32_t *)&macp[8] = *(uint32_t *)&pt[8]; 1640 /* LINTED: pointer alignment */ 1641 *(uint32_t *)&macp[12] = *(uint32_t *)&pt[12]; 1642 } else { 1643 AES_COPY_BLOCK(pt, macp); 1644 } 1645 mac_remain -= AES_BLOCK_LEN; 1646 pt += AES_BLOCK_LEN; 1647 } 1648 1649 /* calculate the CBC MAC */ 1650 if (IS_P2ALIGNED(macp, sizeof (uint32_t)) && 1651 IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) { 1652 /* LINTED: pointer alignment */ 1653 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&macp[0]; 1654 /* LINTED: pointer alignment */ 1655 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&macp[4]; 1656 /* LINTED: pointer alignment */ 1657 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&macp[8]; 1658 /* LINTED: pointer alignment */ 1659 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&macp[12]; 1660 } else { 1661 AES_XOR_BLOCK(macp, mac_buf); 1662 } 1663 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf); 1664 } 1665 1666 /* Calculate the CCM MAC */ 1667 ccm_mac_p = ccm_mac; 1668 calculate_ccm_mac(ctx, &ccm_mac_p); 1669 1670 /* compare the input CCM MAC value with what we calculated */ 1671 if (bcmp(ctx->ac_ccm_mac_input_buf, ccm_mac, ctx->ac_ccm_mac_len)) { 1672 /* They don't match */ 1673 return (CRYPTO_DATA_LEN_RANGE); 1674 } else { 1675 aes_init_ptrs(out, &iov_or_mp, &offset); 1676 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 1677 &out_data_1_len, &out_data_2, pt_len); 1678 bcopy(ctx->ac_ccm_pt_buf, out_data_1, out_data_1_len); 1679 if (out_data_2 != NULL) { 1680 bcopy((ctx->ac_ccm_pt_buf) + out_data_1_len, 1681 out_data_2, pt_len - out_data_1_len); 1682 } 1683 out->cd_offset += pt_len; 1684 } 1685 1686 /* EXPORT DELETE END */ 1687 return (0); 1688 }