1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright 2008 Sun Microsystems, Inc. All rights reserved. 23 * Use is subject to license terms. 24 */ 25 26 #pragma ident "@(#)aes_cbc_crypt.c 1.10 08/06/13 SMI" 27 28 29 #include <sys/sysmacros.h> 30 #include <sys/systm.h> 31 #include <sys/crypto/common.h> 32 #include <sys/strsun.h> 33 #include "aes_cbc_crypt.h" 34 #include "aes_impl.h" 35 #ifndef _KERNEL 36 #include <limits.h> 37 #include <strings.h> 38 #endif /* !_KERNEL */ 39 40 static int aes_ctr_ccm_mode_contiguous_blocks(aes_ctx_t *, char *, size_t, 41 crypto_data_t *); 42 static void 43 encode_adata_len(ulong_t auth_data_len, uint8_t *encoded, size_t *encoded_len); 44 static void 45 aes_ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize, 46 ulong_t authDataSize, uint8_t *b0, aes_ctx_t *aes_ctx); 47 static int 48 aes_ccm_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 49 crypto_data_t *out); 50 51 /* 52 * Initialize by setting iov_or_mp to point to the current iovec or mp, 53 * and by setting current_offset to an offset within the current iovec or mp. 54 */ 55 static void 56 aes_init_ptrs(crypto_data_t *out, void **iov_or_mp, offset_t *current_offset) 57 { 58 offset_t offset; 59 60 switch (out->cd_format) { 61 case CRYPTO_DATA_RAW: 62 *current_offset = out->cd_offset; 63 break; 64 65 case CRYPTO_DATA_UIO: { 66 uio_t *uiop = out->cd_uio; 67 uintptr_t vec_idx; 68 69 offset = out->cd_offset; 70 for (vec_idx = 0; vec_idx < uiop->uio_iovcnt && 71 offset >= uiop->uio_iov[vec_idx].iov_len; 72 offset -= uiop->uio_iov[vec_idx++].iov_len) 73 ; 74 75 *current_offset = offset; 76 *iov_or_mp = (void *)vec_idx; 77 break; 78 } 79 80 case CRYPTO_DATA_MBLK: { 81 mblk_t *mp; 82 83 offset = out->cd_offset; 84 for (mp = out->cd_mp; mp != NULL && offset >= MBLKL(mp); 85 offset -= MBLKL(mp), mp = mp->b_cont) 86 ; 87 88 *current_offset = offset; 89 *iov_or_mp = mp; 90 break; 91 92 } 93 } /* end switch */ 94 } 95 96 /* 97 * Get pointers for where in the output to copy a block of encrypted or 98 * decrypted data. The iov_or_mp argument stores a pointer to the current 99 * iovec or mp, and offset stores an offset into the current iovec or mp. 100 */ 101 static void 102 aes_get_ptrs(crypto_data_t *out, void **iov_or_mp, offset_t *current_offset, 103 uint8_t **out_data_1, size_t *out_data_1_len, uint8_t **out_data_2, 104 size_t amt) 105 { 106 offset_t offset; 107 108 switch (out->cd_format) { 109 case CRYPTO_DATA_RAW: { 110 iovec_t *iov; 111 112 offset = *current_offset; 113 iov = &out->cd_raw; 114 if ((offset + amt) <= iov->iov_len) { 115 /* one AES block fits */ 116 *out_data_1 = (uint8_t *)iov->iov_base + offset; 117 *out_data_1_len = amt; 118 *out_data_2 = NULL; 119 *current_offset = offset + amt; 120 } 121 break; 122 } 123 124 case CRYPTO_DATA_UIO: { 125 uio_t *uio = out->cd_uio; 126 iovec_t *iov; 127 offset_t offset; 128 uintptr_t vec_idx; 129 uint8_t *p; 130 131 offset = *current_offset; 132 vec_idx = (uintptr_t)(*iov_or_mp); 133 iov = &uio->uio_iov[vec_idx]; 134 p = (uint8_t *)iov->iov_base + offset; 135 *out_data_1 = p; 136 137 if (offset + amt <= iov->iov_len) { 138 /* can fit one AES block into this iov */ 139 *out_data_1_len = amt; 140 *out_data_2 = NULL; 141 *current_offset = offset + amt; 142 } else { 143 /* one AES block spans two iovecs */ 144 *out_data_1_len = iov->iov_len - offset; 145 if (vec_idx == uio->uio_iovcnt) 146 return; 147 vec_idx++; 148 iov = &uio->uio_iov[vec_idx]; 149 *out_data_2 = (uint8_t *)iov->iov_base; 150 *current_offset = amt - *out_data_1_len; 151 } 152 *iov_or_mp = (void *)vec_idx; 153 break; 154 } 155 156 case CRYPTO_DATA_MBLK: { 157 mblk_t *mp; 158 uint8_t *p; 159 160 offset = *current_offset; 161 mp = (mblk_t *)*iov_or_mp; 162 p = mp->b_rptr + offset; 163 *out_data_1 = p; 164 if ((p + amt) <= mp->b_wptr) { 165 /* can fit one AES block into this mblk */ 166 *out_data_1_len = amt; 167 *out_data_2 = NULL; 168 *current_offset = offset + amt; 169 } else { 170 /* one AES block spans two mblks */ 171 *out_data_1_len = mp->b_wptr - p; 172 if ((mp = mp->b_cont) == NULL) 173 return; 174 *out_data_2 = mp->b_rptr; 175 *current_offset = (amt - *out_data_1_len); 176 } 177 *iov_or_mp = mp; 178 break; 179 } 180 } /* end switch */ 181 } 182 183 static int 184 aes_cbc_encrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 185 crypto_data_t *out) 186 { 187 188 /* EXPORT DELETE START */ 189 190 size_t remainder = length; 191 size_t need; 192 uint8_t *datap = (uint8_t *)data; 193 uint8_t *blockp; 194 uint8_t *lastp; 195 uint32_t tmp[4]; 196 void *iov_or_mp; 197 offset_t offset; 198 uint8_t *out_data_1; 199 uint8_t *out_data_2; 200 size_t out_data_1_len; 201 202 if (length + ctx->ac_remainder_len < AES_BLOCK_LEN) { 203 /* accumulate bytes here and return */ 204 bcopy(datap, 205 (uint8_t *)ctx->ac_remainder + ctx->ac_remainder_len, 206 length); 207 ctx->ac_remainder_len += length; 208 ctx->ac_copy_to = datap; 209 return (0); 210 } 211 212 lastp = (uint8_t *)ctx->ac_iv; 213 if (out != NULL) 214 aes_init_ptrs(out, &iov_or_mp, &offset); 215 216 do { 217 /* Unprocessed data from last call. */ 218 if (ctx->ac_remainder_len > 0) { 219 need = AES_BLOCK_LEN - ctx->ac_remainder_len; 220 221 if (need > remainder) 222 return (1); 223 224 bcopy(datap, &((uint8_t *)ctx->ac_remainder) 225 [ctx->ac_remainder_len], need); 226 227 blockp = (uint8_t *)ctx->ac_remainder; 228 } else { 229 blockp = datap; 230 } 231 232 /* don't write on the plaintext */ 233 if (out != NULL) { 234 if (IS_P2ALIGNED(blockp, sizeof (uint32_t))) { 235 /* LINTED: pointer alignment */ 236 tmp[0] = *(uint32_t *)blockp; 237 /* LINTED: pointer alignment */ 238 tmp[1] = *(uint32_t *)&blockp[4]; 239 /* LINTED: pointer alignment */ 240 tmp[2] = *(uint32_t *)&blockp[8]; 241 /* LINTED: pointer alignment */ 242 tmp[3] = *(uint32_t *)&blockp[12]; 243 } else { 244 uint8_t *tmp8 = (uint8_t *)tmp; 245 246 AES_COPY_BLOCK(blockp, tmp8); 247 } 248 blockp = (uint8_t *)tmp; 249 } 250 251 if (ctx->ac_flags & AES_CBC_MODE) { 252 /* 253 * XOR the previous cipher block or IV with the 254 * current clear block. Check for alignment. 255 */ 256 if (IS_P2ALIGNED2(blockp, lastp, sizeof (uint32_t))) { 257 /* LINTED: pointer alignment */ 258 *(uint32_t *)&blockp[0] ^= 259 /* LINTED: pointer alignment */ 260 *(uint32_t *)&lastp[0]; 261 /* LINTED: pointer alignment */ 262 *(uint32_t *)&blockp[4] ^= 263 /* LINTED: pointer alignment */ 264 *(uint32_t *)&lastp[4]; 265 /* LINTED: pointer alignment */ 266 *(uint32_t *)&blockp[8] ^= 267 /* LINTED: pointer alignment */ 268 *(uint32_t *)&lastp[8]; 269 /* LINTED: pointer alignment */ 270 *(uint32_t *)&blockp[12] ^= 271 /* LINTED: pointer alignment */ 272 *(uint32_t *)&lastp[12]; 273 } else { 274 AES_XOR_BLOCK(lastp, blockp); 275 } 276 } 277 278 if (out == NULL) { 279 aes_encrypt_block(ctx->ac_keysched, blockp, blockp); 280 281 ctx->ac_lastp = blockp; 282 lastp = blockp; 283 284 if (ctx->ac_remainder_len > 0) { 285 bcopy(blockp, ctx->ac_copy_to, 286 ctx->ac_remainder_len); 287 bcopy(blockp + ctx->ac_remainder_len, datap, 288 need); 289 } 290 } else { 291 aes_encrypt_block(ctx->ac_keysched, blockp, lastp); 292 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 293 &out_data_1_len, &out_data_2, AES_BLOCK_LEN); 294 295 /* copy block to where it belongs */ 296 if ((out_data_1_len == AES_BLOCK_LEN) && 297 (IS_P2ALIGNED2(lastp, out_data_1, 298 sizeof (uint32_t)))) { 299 /* LINTED: pointer alignment */ 300 uint32_t *d = (uint32_t *)out_data_1; 301 /* LINTED: pointer alignment */ 302 d[0] = *(uint32_t *)lastp; 303 /* LINTED: pointer alignment */ 304 d[1] = *(uint32_t *)&lastp[4]; 305 /* LINTED: pointer alignment */ 306 d[2] = *(uint32_t *)&lastp[8]; 307 /* LINTED: pointer alignment */ 308 d[3] = *(uint32_t *)&lastp[12]; 309 } else { 310 bcopy(lastp, out_data_1, out_data_1_len); 311 } 312 if (out_data_2 != NULL) { 313 bcopy(lastp + out_data_1_len, out_data_2, 314 AES_BLOCK_LEN - out_data_1_len); 315 } 316 317 /* update offset */ 318 out->cd_offset += AES_BLOCK_LEN; 319 } 320 321 /* Update pointer to next block of data to be processed. */ 322 if (ctx->ac_remainder_len != 0) { 323 datap += need; 324 ctx->ac_remainder_len = 0; 325 } else { 326 datap += AES_BLOCK_LEN; 327 } 328 329 remainder = (size_t)&data[length] - (size_t)datap; 330 331 /* Incomplete last block. */ 332 if (remainder > 0 && remainder < AES_BLOCK_LEN) { 333 bcopy(datap, ctx->ac_remainder, remainder); 334 ctx->ac_remainder_len = remainder; 335 ctx->ac_copy_to = datap; 336 goto out; 337 } 338 ctx->ac_copy_to = NULL; 339 340 } while (remainder > 0); 341 342 out: 343 /* 344 * Save the last encrypted block in the context - but only for 345 * the CBC mode of operation. 346 */ 347 if ((ctx->ac_flags & AES_CBC_MODE) && (ctx->ac_lastp != NULL)) { 348 uint8_t *iv8 = (uint8_t *)ctx->ac_iv; 349 uint8_t *last8 = (uint8_t *)ctx->ac_lastp; 350 351 if (IS_P2ALIGNED(ctx->ac_lastp, sizeof (uint32_t))) { 352 /* LINTED: pointer alignment */ 353 *(uint32_t *)iv8 = *(uint32_t *)last8; 354 /* LINTED: pointer alignment */ 355 *(uint32_t *)&iv8[4] = *(uint32_t *)&last8[4]; 356 /* LINTED: pointer alignment */ 357 *(uint32_t *)&iv8[8] = *(uint32_t *)&last8[8]; 358 /* LINTED: pointer alignment */ 359 *(uint32_t *)&iv8[12] = *(uint32_t *)&last8[12]; 360 } else { 361 AES_COPY_BLOCK(last8, iv8); 362 } 363 ctx->ac_lastp = (uint8_t *)ctx->ac_iv; 364 } 365 366 /* EXPORT DELETE END */ 367 368 return (0); 369 } 370 371 #define OTHER(a, ctx) \ 372 (((a) == (ctx)->ac_lastblock) ? (ctx)->ac_iv : (ctx)->ac_lastblock) 373 374 /* 375 * Encrypt multiple blocks of data. 376 */ 377 /* ARGSUSED */ 378 int 379 aes_encrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 380 crypto_data_t *out) 381 { 382 if (ctx->ac_flags & AES_CTR_MODE) 383 return (aes_ctr_ccm_mode_contiguous_blocks(ctx, data, length, 384 out)); 385 else if (ctx->ac_flags & AES_CCM_MODE) 386 return (aes_ctr_ccm_mode_contiguous_blocks(ctx, data, length, 387 out)); 388 return (aes_cbc_encrypt_contiguous_blocks(ctx, data, length, out)); 389 } 390 391 /* ARGSUSED */ 392 static int 393 aes_cbc_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 394 crypto_data_t *out) 395 { 396 397 /* EXPORT DELETE START */ 398 399 size_t remainder = length; 400 size_t need; 401 uint8_t *datap = (uint8_t *)data; 402 uint8_t *blockp; 403 uint8_t *lastp; 404 uint32_t tmp[4]; 405 void *iov_or_mp; 406 offset_t offset; 407 uint8_t *out_data_1; 408 uint8_t *out_data_2; 409 size_t out_data_1_len; 410 411 if (length + ctx->ac_remainder_len < AES_BLOCK_LEN) { 412 /* accumulate bytes here and return */ 413 bcopy(datap, 414 (uint8_t *)ctx->ac_remainder + ctx->ac_remainder_len, 415 length); 416 ctx->ac_remainder_len += length; 417 ctx->ac_copy_to = datap; 418 return (0); 419 } 420 421 lastp = ctx->ac_lastp; 422 if (out != NULL) 423 aes_init_ptrs(out, &iov_or_mp, &offset); 424 425 do { 426 /* Unprocessed data from last call. */ 427 if (ctx->ac_remainder_len > 0) { 428 need = AES_BLOCK_LEN - ctx->ac_remainder_len; 429 430 if (need > remainder) 431 return (1); 432 433 bcopy(datap, &((uint8_t *)ctx->ac_remainder) 434 [ctx->ac_remainder_len], need); 435 436 blockp = (uint8_t *)ctx->ac_remainder; 437 } else { 438 blockp = datap; 439 } 440 441 if (ctx->ac_flags & AES_CBC_MODE) { 442 443 /* Save current ciphertext block */ 444 if (IS_P2ALIGNED(blockp, sizeof (uint32_t))) { 445 uint32_t *tmp32; 446 447 /* LINTED: pointer alignment */ 448 tmp32 = (uint32_t *)OTHER((uint64_t *)lastp, 449 ctx); 450 451 /* LINTED: pointer alignment */ 452 *tmp32++ = *(uint32_t *)blockp; 453 /* LINTED: pointer alignment */ 454 *tmp32++ = *(uint32_t *)&blockp[4]; 455 /* LINTED: pointer alignment */ 456 *tmp32++ = *(uint32_t *)&blockp[8]; 457 /* LINTED: pointer alignment */ 458 *tmp32++ = *(uint32_t *)&blockp[12]; 459 } else { 460 uint8_t *tmp8; 461 /* LINTED: pointer alignment */ 462 tmp8 = (uint8_t *)OTHER((uint64_t *)lastp, ctx); 463 464 AES_COPY_BLOCK(blockp, tmp8); 465 } 466 } 467 468 if (out != NULL) { 469 aes_decrypt_block(ctx->ac_keysched, blockp, 470 (uint8_t *)tmp); 471 blockp = (uint8_t *)tmp; 472 } else { 473 aes_decrypt_block(ctx->ac_keysched, blockp, blockp); 474 } 475 476 if (ctx->ac_flags & AES_CBC_MODE) { 477 /* 478 * XOR the previous cipher block or IV with the 479 * currently decrypted block. Check for alignment. 480 */ 481 if (IS_P2ALIGNED2(blockp, lastp, sizeof (uint32_t))) { 482 /* LINTED: pointer alignment */ 483 *(uint32_t *)blockp ^= *(uint32_t *)lastp; 484 /* LINTED: pointer alignment */ 485 *(uint32_t *)&blockp[4] ^= 486 /* LINTED: pointer alignment */ 487 *(uint32_t *)&lastp[4]; 488 /* LINTED: pointer alignment */ 489 *(uint32_t *)&blockp[8] ^= 490 /* LINTED: pointer alignment */ 491 *(uint32_t *)&lastp[8]; 492 /* LINTED: pointer alignment */ 493 *(uint32_t *)&blockp[12] ^= 494 /* LINTED: pointer alignment */ 495 *(uint32_t *)&lastp[12]; 496 } else { 497 AES_XOR_BLOCK(lastp, blockp); 498 } 499 500 /* LINTED: pointer alignment */ 501 lastp = (uint8_t *)OTHER((uint64_t *)lastp, ctx); 502 } 503 504 if (out != NULL) { 505 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 506 &out_data_1_len, &out_data_2, AES_BLOCK_LEN); 507 508 /* copy temporary block to where it belongs */ 509 if ((out_data_1_len == AES_BLOCK_LEN) && 510 (IS_P2ALIGNED(out_data_1, sizeof (uint32_t)))) { 511 /* LINTED: pointer alignment */ 512 uint32_t *d = (uint32_t *)out_data_1; 513 d[0] = tmp[0]; 514 d[1] = tmp[1]; 515 d[2] = tmp[2]; 516 d[3] = tmp[3]; 517 } else { 518 bcopy(&tmp, out_data_1, out_data_1_len); 519 } 520 if (out_data_2 != NULL) { 521 bcopy((uint8_t *)&tmp + out_data_1_len, 522 out_data_2, AES_BLOCK_LEN - out_data_1_len); 523 } 524 525 /* update offset */ 526 out->cd_offset += AES_BLOCK_LEN; 527 528 } else if (ctx->ac_remainder_len > 0) { 529 /* copy temporary block to where it belongs */ 530 bcopy(blockp, ctx->ac_copy_to, ctx->ac_remainder_len); 531 bcopy(blockp + ctx->ac_remainder_len, datap, need); 532 } 533 534 /* Update pointer to next block of data to be processed. */ 535 if (ctx->ac_remainder_len != 0) { 536 datap += need; 537 ctx->ac_remainder_len = 0; 538 } else { 539 datap += AES_BLOCK_LEN; 540 } 541 542 remainder = (size_t)&data[length] - (size_t)datap; 543 544 /* Incomplete last block. */ 545 if (remainder > 0 && remainder < AES_BLOCK_LEN) { 546 bcopy(datap, ctx->ac_remainder, remainder); 547 ctx->ac_remainder_len = remainder; 548 ctx->ac_lastp = lastp; 549 ctx->ac_copy_to = datap; 550 return (0); 551 } 552 ctx->ac_copy_to = NULL; 553 554 } while (remainder > 0); 555 556 ctx->ac_lastp = lastp; 557 558 /* EXPORT DELETE END */ 559 560 return (0); 561 } 562 563 /* 564 * Decrypt multiple blocks of data. 565 */ 566 int 567 aes_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 568 crypto_data_t *out) 569 { 570 if (ctx->ac_flags & AES_CTR_MODE) 571 return (aes_ctr_ccm_mode_contiguous_blocks(ctx, data, length, 572 out)); 573 else if (ctx->ac_flags & AES_CCM_MODE) 574 return (aes_ccm_decrypt_contiguous_blocks(ctx, data, length, 575 out)); 576 return (aes_cbc_decrypt_contiguous_blocks(ctx, data, length, out)); 577 } 578 579 /* ARGSUSED */ 580 int 581 aes_counter_final(aes_ctx_t *ctx, crypto_data_t *out) 582 { 583 /* EXPORT DELETE START */ 584 585 uint8_t *lastp; 586 uint32_t counter_block[4]; 587 uint8_t tmp[AES_BLOCK_LEN]; 588 int i; 589 void *iov_or_mp; 590 offset_t offset; 591 uint8_t *out_data_1; 592 uint8_t *out_data_2; 593 size_t out_data_1_len; 594 595 if (out->cd_length < ctx->ac_remainder_len) 596 return (CRYPTO_ARGUMENTS_BAD); 597 598 /* ac_iv is the counter block */ 599 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_iv, 600 (uint8_t *)counter_block); 601 602 lastp = (uint8_t *)counter_block; 603 604 /* copy remainder to temporary buffer */ 605 bcopy(ctx->ac_remainder, tmp, ctx->ac_remainder_len); 606 607 /* XOR with counter block */ 608 for (i = 0; i < ctx->ac_remainder_len; i++) { 609 tmp[i] ^= lastp[i]; 610 } 611 612 aes_init_ptrs(out, &iov_or_mp, &offset); 613 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 614 &out_data_1_len, &out_data_2, ctx->ac_remainder_len); 615 616 /* copy temporary block to where it belongs */ 617 bcopy(tmp, out_data_1, out_data_1_len); 618 if (out_data_2 != NULL) { 619 bcopy((uint8_t *)tmp + out_data_1_len, 620 out_data_2, ctx->ac_remainder_len - out_data_1_len); 621 } 622 out->cd_offset += ctx->ac_remainder_len; 623 ctx->ac_remainder_len = 0; 624 625 /* EXPORT DELETE END */ 626 627 return (0); 628 } 629 630 /* 631 * Encrypt and decrypt multiple blocks of data in counter mode. 632 * Encrypt multiple blocks of data in CCM mode. Decrypt for CCM mode 633 * is done in another function. 634 */ 635 /* ARGSUSED */ 636 int 637 aes_ctr_ccm_mode_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 638 crypto_data_t *out) 639 { 640 641 /* EXPORT DELETE START */ 642 643 size_t remainder = length; 644 size_t need; 645 uint8_t *datap = (uint8_t *)data; 646 uint8_t *blockp; 647 uint8_t *lastp; 648 uint32_t tmp[4]; 649 uint32_t counter_block[4]; 650 void *iov_or_mp; 651 offset_t offset; 652 uint8_t *out_data_1; 653 uint8_t *out_data_2; 654 size_t out_data_1_len; 655 uint64_t counter; 656 uint8_t *mac_buf; 657 #ifdef _LITTLE_ENDIAN 658 uint8_t *p; 659 #endif 660 661 if (length + ctx->ac_remainder_len < AES_BLOCK_LEN) { 662 /* accumulate bytes here and return */ 663 bcopy(datap, 664 (uint8_t *)ctx->ac_remainder + ctx->ac_remainder_len, 665 length); 666 ctx->ac_remainder_len += length; 667 ctx->ac_copy_to = datap; 668 return (0); 669 } 670 671 lastp = (uint8_t *)ctx->ac_cb; 672 if (out != NULL) 673 aes_init_ptrs(out, &iov_or_mp, &offset); 674 675 if (ctx->ac_flags & AES_CCM_MODE) { 676 mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf; 677 } 678 679 do { 680 /* Unprocessed data from last call. */ 681 if (ctx->ac_remainder_len > 0) { 682 need = AES_BLOCK_LEN - ctx->ac_remainder_len; 683 684 if (need > remainder) 685 return (1); 686 687 bcopy(datap, &((uint8_t *)ctx->ac_remainder) 688 [ctx->ac_remainder_len], need); 689 690 blockp = (uint8_t *)ctx->ac_remainder; 691 } else { 692 blockp = datap; 693 } 694 695 /* don't write on the plaintext */ 696 if (out != NULL) { 697 if (IS_P2ALIGNED(blockp, sizeof (uint32_t))) { 698 /* LINTED: pointer alignment */ 699 tmp[0] = *(uint32_t *)blockp; 700 /* LINTED: pointer alignment */ 701 tmp[1] = *(uint32_t *)&blockp[4]; 702 /* LINTED: pointer alignment */ 703 tmp[2] = *(uint32_t *)&blockp[8]; 704 /* LINTED: pointer alignment */ 705 tmp[3] = *(uint32_t *)&blockp[12]; 706 } else { 707 uint8_t *tmp8 = (uint8_t *)tmp; 708 709 AES_COPY_BLOCK(blockp, tmp8); 710 } 711 blockp = (uint8_t *)tmp; 712 } 713 714 if (ctx->ac_flags & AES_CCM_MODE) { 715 /* 716 * do CBC MAC 717 * 718 * XOR the previous cipher block current clear block. 719 * mac_buf always contain previous cipher block. 720 */ 721 if (IS_P2ALIGNED2(blockp, mac_buf, sizeof (uint32_t))) { 722 /* LINTED: pointer alignment */ 723 *(uint32_t *)&mac_buf[0] ^= 724 /* LINTED: pointer alignment */ 725 *(uint32_t *)&blockp[0]; 726 /* LINTED: pointer alignment */ 727 *(uint32_t *)&mac_buf[4] ^= 728 /* LINTED: pointer alignment */ 729 *(uint32_t *)&blockp[4]; 730 /* LINTED: pointer alignment */ 731 *(uint32_t *)&mac_buf[8] ^= 732 /* LINTED: pointer alignment */ 733 *(uint32_t *)&blockp[8]; 734 /* LINTED: pointer alignment */ 735 *(uint32_t *)&mac_buf[12] ^= 736 /* LINTED: pointer alignment */ 737 *(uint32_t *)&blockp[12]; 738 } else { 739 AES_XOR_BLOCK(blockp, mac_buf); 740 } 741 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf); 742 } 743 744 745 /* ac_cb is the counter block */ 746 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb, 747 (uint8_t *)counter_block); 748 749 lastp = (uint8_t *)counter_block; 750 751 /* 752 * Increment counter. Counter bits are confined 753 * to the bottom 64 bits of the counter block. 754 */ 755 counter = ctx->ac_cb[1] & ctx->ac_counter_mask; 756 #ifdef _LITTLE_ENDIAN 757 p = (uint8_t *)&counter; 758 counter = (((uint64_t)p[0] << 56) | 759 ((uint64_t)p[1] << 48) | 760 ((uint64_t)p[2] << 40) | 761 ((uint64_t)p[3] << 32) | 762 ((uint64_t)p[4] << 24) | 763 ((uint64_t)p[5] << 16) | 764 ((uint64_t)p[6] << 8) | 765 (uint64_t)p[7]); 766 #endif 767 counter++; 768 #ifdef _LITTLE_ENDIAN 769 counter = (((uint64_t)p[0] << 56) | 770 ((uint64_t)p[1] << 48) | 771 ((uint64_t)p[2] << 40) | 772 ((uint64_t)p[3] << 32) | 773 ((uint64_t)p[4] << 24) | 774 ((uint64_t)p[5] << 16) | 775 ((uint64_t)p[6] << 8) | 776 (uint64_t)p[7]); 777 #endif 778 counter &= ctx->ac_counter_mask; 779 ctx->ac_cb[1] = 780 (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter; 781 782 /* 783 * XOR the previous cipher block or IV with the 784 * current clear block. Check for alignment. 785 */ 786 if (IS_P2ALIGNED2(blockp, lastp, sizeof (uint32_t))) { 787 /* LINTED: pointer alignment */ 788 *(uint32_t *)&blockp[0] ^= 789 /* LINTED: pointer alignment */ 790 *(uint32_t *)&lastp[0]; 791 /* LINTED: pointer alignment */ 792 *(uint32_t *)&blockp[4] ^= 793 /* LINTED: pointer alignment */ 794 *(uint32_t *)&lastp[4]; 795 /* LINTED: pointer alignment */ 796 *(uint32_t *)&blockp[8] ^= 797 /* LINTED: pointer alignment */ 798 *(uint32_t *)&lastp[8]; 799 /* LINTED: pointer alignment */ 800 *(uint32_t *)&blockp[12] ^= 801 /* LINTED: pointer alignment */ 802 *(uint32_t *)&lastp[12]; 803 } else { 804 AES_XOR_BLOCK(lastp, blockp); 805 } 806 807 ctx->ac_lastp = blockp; 808 lastp = blockp; 809 if (ctx->ac_flags & AES_CCM_MODE) { 810 ctx->ac_ccm_processed_data_len += AES_BLOCK_LEN; 811 } 812 813 if (out == NULL) { 814 if (ctx->ac_remainder_len > 0) { 815 bcopy(blockp, ctx->ac_copy_to, 816 ctx->ac_remainder_len); 817 bcopy(blockp + ctx->ac_remainder_len, datap, 818 need); 819 } 820 } else { 821 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 822 &out_data_1_len, &out_data_2, AES_BLOCK_LEN); 823 824 /* copy block to where it belongs */ 825 if ((out_data_1_len == AES_BLOCK_LEN) && 826 (IS_P2ALIGNED2(lastp, out_data_1, 827 sizeof (uint32_t)))) { 828 /* LINTED: pointer alignment */ 829 uint32_t *d = (uint32_t *)out_data_1; 830 /* LINTED: pointer alignment */ 831 d[0] = *(uint32_t *)lastp; 832 /* LINTED: pointer alignment */ 833 d[1] = *(uint32_t *)&lastp[4]; 834 /* LINTED: pointer alignment */ 835 d[2] = *(uint32_t *)&lastp[8]; 836 /* LINTED: pointer alignment */ 837 d[3] = *(uint32_t *)&lastp[12]; 838 } else { 839 bcopy(lastp, out_data_1, out_data_1_len); 840 } 841 if (out_data_2 != NULL) { 842 bcopy(lastp + out_data_1_len, out_data_2, 843 AES_BLOCK_LEN - out_data_1_len); 844 } 845 846 /* update offset */ 847 out->cd_offset += AES_BLOCK_LEN; 848 } 849 850 /* Update pointer to next block of data to be processed. */ 851 if (ctx->ac_remainder_len != 0) { 852 datap += need; 853 ctx->ac_remainder_len = 0; 854 } else { 855 datap += AES_BLOCK_LEN; 856 } 857 858 remainder = (size_t)&data[length] - (size_t)datap; 859 860 /* Incomplete last block. */ 861 if (remainder > 0 && remainder < AES_BLOCK_LEN) { 862 bcopy(datap, ctx->ac_remainder, remainder); 863 ctx->ac_remainder_len = remainder; 864 ctx->ac_copy_to = datap; 865 goto out; 866 } 867 ctx->ac_copy_to = NULL; 868 869 } while (remainder > 0); 870 871 out: 872 873 /* EXPORT DELETE END */ 874 875 return (0); 876 } 877 878 /* 879 * The following function should be call at encrypt or decrypt init time 880 * for AES CCM mode. 881 */ 882 int 883 aes_ccm_init(aes_ctx_t *ctx, unsigned char *nonce, size_t nonce_len, 884 unsigned char *auth_data, size_t auth_data_len) 885 { 886 /* EXPORT DELETE START */ 887 uint8_t *mac_buf, *datap, *ivp, *authp; 888 uint32_t iv[4], tmp[4]; 889 size_t remainder, processed; 890 uint8_t encoded_a[10]; /* max encoded auth data length is 10 octets */ 891 size_t encoded_a_len = 0; 892 893 mac_buf = (uint8_t *)&(ctx->ac_ccm_mac_buf); 894 895 /* 896 * Format the 1st block for CBC-MAC and construct the 897 * 1st counter block. 898 * 899 * aes_ctx->ac_iv is used for storing the counter block 900 * mac_buf will store b0 at this time. 901 */ 902 aes_ccm_format_initial_blocks(nonce, nonce_len, 903 auth_data_len, mac_buf, ctx); 904 905 /* The IV for CBC MAC for AES CCM mode is always zero */ 906 bzero(iv, AES_BLOCK_LEN); 907 ivp = (uint8_t *)iv; 908 909 if (IS_P2ALIGNED2(ivp, mac_buf, sizeof (uint32_t))) { 910 /* LINTED: pointer alignment */ 911 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&ivp[0]; 912 /* LINTED: pointer alignment */ 913 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&ivp[4]; 914 /* LINTED: pointer alignment */ 915 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&ivp[8]; 916 /* LINTED: pointer alignment */ 917 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&ivp[12]; 918 } else { 919 AES_XOR_BLOCK(ivp, mac_buf); 920 } 921 922 /* encrypt the nonce */ 923 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf); 924 925 /* take care of the associated data, if any */ 926 if (auth_data_len == 0) { 927 return (0); 928 } 929 930 encode_adata_len(auth_data_len, encoded_a, &encoded_a_len); 931 932 remainder = auth_data_len; 933 934 /* 1st block: it contains encoded associated data, and some data */ 935 authp = (uint8_t *)tmp; 936 bzero(authp, AES_BLOCK_LEN); 937 bcopy(encoded_a, authp, encoded_a_len); 938 processed = AES_BLOCK_LEN - encoded_a_len; 939 if (processed > auth_data_len) { 940 /* in case auth_data is very small */ 941 processed = auth_data_len; 942 } 943 bcopy(auth_data, authp+encoded_a_len, processed); 944 /* xor with previous buffer */ 945 if (IS_P2ALIGNED2(authp, mac_buf, sizeof (uint32_t))) { 946 /* LINTED: pointer alignment */ 947 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&authp[0]; 948 /* LINTED: pointer alignment */ 949 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&authp[4]; 950 /* LINTED: pointer alignment */ 951 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&authp[8]; 952 /* LINTED: pointer alignment */ 953 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&authp[12]; 954 } else { 955 AES_XOR_BLOCK(authp, mac_buf); 956 } 957 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf); 958 remainder -= processed; 959 if (remainder == 0) { 960 /* a small amount of associated data, it's all done now */ 961 return (0); 962 } 963 964 do { 965 if (remainder < AES_BLOCK_LEN) { 966 /* 967 * There's not a block full of data, pad rest of 968 * buffer with zero 969 */ 970 bzero(authp, AES_BLOCK_LEN); 971 bcopy(&(auth_data[processed]), authp, remainder); 972 datap = (uint8_t *)authp; 973 remainder = 0; 974 } else { 975 datap = (uint8_t *)(&(auth_data[processed])); 976 processed += AES_BLOCK_LEN; 977 remainder -= AES_BLOCK_LEN; 978 } 979 980 /* xor with previous buffer */ 981 if (IS_P2ALIGNED2(datap, mac_buf, sizeof (uint32_t))) { 982 /* LINTED: pointer alignment */ 983 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&datap[0]; 984 /* LINTED: pointer alignment */ 985 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&datap[4]; 986 /* LINTED: pointer alignment */ 987 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&datap[8]; 988 /* LINTED: pointer alignment */ 989 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&datap[12]; 990 } else { 991 AES_XOR_BLOCK(datap, mac_buf); 992 } 993 994 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf); 995 996 } while (remainder > 0); 997 998 /* EXPORT DELETE END */ 999 return (0); 1000 } 1001 1002 void 1003 calculate_ccm_mac(aes_ctx_t *ctx, uint8_t **ccm_mac) 1004 { 1005 /* EXPORT DELETE START */ 1006 uint64_t counter; 1007 uint32_t counter_block[4]; 1008 uint8_t *counterp, *mac_buf; 1009 int i; 1010 1011 mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf; 1012 1013 /* first counter block start with index 0 */ 1014 counter = 0; 1015 ctx->ac_cb[1] = (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter; 1016 1017 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb, 1018 (uint8_t *)counter_block); 1019 1020 counterp = (uint8_t *)counter_block; 1021 1022 /* calculate XOR of MAC with first counter block */ 1023 for (i = 0; i < ctx->ac_ccm_mac_len; i++) { 1024 (*ccm_mac)[i] = mac_buf[i] ^ counterp[i]; 1025 } 1026 /* EXPORT DELETE END */ 1027 } 1028 1029 /* ARGSUSED */ 1030 int 1031 aes_ccm_encrypt_final(aes_ctx_t *ctx, crypto_data_t *out) 1032 { 1033 /* EXPORT DELETE START */ 1034 1035 uint8_t *lastp, *mac_buf, *ccm_mac_p, *macp; 1036 uint32_t counter_block[4]; 1037 uint32_t tmp[4]; 1038 uint8_t ccm_mac[AES_BLOCK_LEN]; 1039 void *iov_or_mp; 1040 offset_t offset; 1041 uint8_t *out_data_1; 1042 uint8_t *out_data_2; 1043 size_t out_data_1_len; 1044 int i; 1045 1046 if (out->cd_length < (ctx->ac_remainder_len + ctx->ac_ccm_mac_len)) { 1047 return (CRYPTO_ARGUMENTS_BAD); 1048 } 1049 1050 /* 1051 * When we get here, the number of bytes of payload processed 1052 * plus whatever data remains, if any, 1053 * should be the same as the number of bytes that's being 1054 * passed in the argument during init time. 1055 */ 1056 if ((ctx->ac_ccm_processed_data_len + ctx->ac_remainder_len) 1057 != (ctx->ac_ccm_data_len)) { 1058 return (CRYPTO_DATA_LEN_RANGE); 1059 } 1060 1061 mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf; 1062 1063 if (ctx->ac_remainder_len > 0) { 1064 1065 macp = (uint8_t *)tmp; 1066 bzero(macp, AES_BLOCK_LEN); 1067 1068 /* copy remainder to temporary buffer */ 1069 bcopy(ctx->ac_remainder, macp, ctx->ac_remainder_len); 1070 1071 /* calculate the CBC MAC */ 1072 if (IS_P2ALIGNED2(macp, mac_buf, sizeof (uint32_t))) { 1073 /* LINTED: pointer alignment */ 1074 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&macp[0]; 1075 /* LINTED: pointer alignment */ 1076 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&macp[4]; 1077 /* LINTED: pointer alignment */ 1078 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&macp[8]; 1079 /* LINTED: pointer alignment */ 1080 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&macp[12]; 1081 } else { 1082 AES_XOR_BLOCK(macp, mac_buf); 1083 } 1084 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf); 1085 1086 /* calculate the counter mode */ 1087 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb, 1088 (uint8_t *)counter_block); 1089 1090 lastp = (uint8_t *)counter_block; 1091 1092 /* copy remainder to temporary buffer */ 1093 bcopy(ctx->ac_remainder, macp, ctx->ac_remainder_len); 1094 1095 /* XOR with counter block */ 1096 for (i = 0; i < ctx->ac_remainder_len; i++) { 1097 macp[i] ^= lastp[i]; 1098 } 1099 ctx->ac_ccm_processed_data_len += ctx->ac_remainder_len; 1100 } 1101 1102 /* Calculate the CCM MAC */ 1103 ccm_mac_p = ccm_mac; 1104 calculate_ccm_mac(ctx, &ccm_mac_p); 1105 1106 aes_init_ptrs(out, &iov_or_mp, &offset); 1107 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 1108 &out_data_1_len, &out_data_2, 1109 ctx->ac_remainder_len + ctx->ac_ccm_mac_len); 1110 1111 if (ctx->ac_remainder_len > 0) { 1112 1113 /* copy temporary block to where it belongs */ 1114 if (out_data_2 == NULL) { 1115 /* everything will fit in out_data_1 */ 1116 bcopy(macp, out_data_1, ctx->ac_remainder_len); 1117 bcopy(ccm_mac, out_data_1 + ctx->ac_remainder_len, 1118 ctx->ac_ccm_mac_len); 1119 } else { 1120 1121 if (out_data_1_len < ctx->ac_remainder_len) { 1122 1123 size_t data_2_len_used; 1124 1125 bcopy(macp, out_data_1, out_data_1_len); 1126 1127 data_2_len_used = ctx->ac_remainder_len 1128 - out_data_1_len; 1129 1130 bcopy((uint8_t *)macp + out_data_1_len, 1131 out_data_2, data_2_len_used); 1132 bcopy(ccm_mac, out_data_2 + data_2_len_used, 1133 ctx->ac_ccm_mac_len); 1134 } else { 1135 bcopy(macp, out_data_1, out_data_1_len); 1136 if (out_data_1_len == ctx->ac_remainder_len) { 1137 /* mac will be in out_data_2 */ 1138 bcopy(ccm_mac, out_data_2, 1139 ctx->ac_ccm_mac_len); 1140 } else { 1141 size_t len_not_used 1142 = out_data_1_len - 1143 ctx->ac_remainder_len; 1144 /* 1145 * part of mac in will be in 1146 * out_data_1, part of the mac will be 1147 * in out_data_2 1148 */ 1149 bcopy(ccm_mac, 1150 out_data_1 + ctx->ac_remainder_len, 1151 len_not_used); 1152 bcopy(ccm_mac+len_not_used, out_data_2, 1153 ctx->ac_ccm_mac_len - len_not_used); 1154 1155 } 1156 } 1157 } 1158 } else { 1159 /* copy block to where it belongs */ 1160 bcopy(ccm_mac, out_data_1, out_data_1_len); 1161 if (out_data_2 != NULL) { 1162 bcopy(ccm_mac + out_data_1_len, out_data_2, 1163 AES_BLOCK_LEN - out_data_1_len); 1164 } 1165 } 1166 out->cd_offset += ctx->ac_remainder_len + ctx->ac_ccm_mac_len; 1167 ctx->ac_remainder_len = 0; 1168 1169 /* EXPORT DELETE END */ 1170 1171 return (0); 1172 } 1173 1174 int 1175 aes_ccm_validate_args(CK_AES_CCM_PARAMS *ccm_param, boolean_t is_encrypt_init) 1176 { 1177 1178 /* EXPORT DELETE START */ 1179 size_t macSize, nonceSize; 1180 uint8_t q; 1181 uint64_t maxValue; 1182 1183 /* 1184 * Check the byte length of the MAC. The only valid 1185 * lengths for the MAC are: 4, 6, 8, 10, 12, 14, 16 1186 */ 1187 macSize = ccm_param->ulMACSize; 1188 if ((macSize < 4) || (macSize > 16) || ((macSize % 2) != 0)) { 1189 return (CRYPTO_MECHANISM_PARAM_INVALID); 1190 } 1191 1192 /* Check the nonce length. Valid values are 7, 8, 9, 10, 11, 12, 13 */ 1193 nonceSize = ccm_param->ulNonceSize; 1194 if ((nonceSize < 7) || (nonceSize > 13)) { 1195 return (CRYPTO_MECHANISM_PARAM_INVALID); 1196 } 1197 1198 /* q is the length of the field storing the length, in bytes */ 1199 q = (uint8_t)((15 - nonceSize) & 0xFF); 1200 1201 1202 /* 1203 * If it is decrypt, need to make sure size of ciphertext is at least 1204 * bigger than MAC len 1205 */ 1206 if ((!is_encrypt_init) && (ccm_param->ulDataSize < macSize)) { 1207 return (CRYPTO_MECHANISM_PARAM_INVALID); 1208 } 1209 1210 /* 1211 * Check to make sure the length of the payload is within the 1212 * range of values allowed by q 1213 */ 1214 if (q < 8) { 1215 maxValue = (1ULL << (q * 8)) - 1; 1216 } else { 1217 maxValue = ULONG_MAX; 1218 } 1219 1220 if (ccm_param->ulDataSize > maxValue) { 1221 return (CRYPTO_MECHANISM_PARAM_INVALID); 1222 } 1223 1224 /* EXPORT DELETE END */ 1225 return (0); 1226 } 1227 1228 /* 1229 * Format the first block used in CBC-MAC (B0) and the initial counter 1230 * block based on formatting functions and counter generation functions 1231 * specified in RFC 3610 and NIST publication 800-38C, appendix A 1232 * 1233 * b0 is the first block used in CBC-MAC 1234 * cb0 is the first counter block 1235 * 1236 * It's assumed that the arguments b0 and cb0 are preallocated AES blocks 1237 * 1238 */ 1239 static void 1240 aes_ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize, 1241 ulong_t authDataSize, uint8_t *b0, aes_ctx_t *aes_ctx) 1242 { 1243 /* EXPORT DELETE START */ 1244 uint64_t payloadSize; 1245 uint8_t t, q, have_adata = 0; 1246 size_t limit; 1247 int i, j, k; 1248 uint64_t mask = 0; 1249 uint8_t *cb; 1250 #ifdef _LITTLE_ENDIAN 1251 uint8_t *p8; 1252 #endif /* _LITTLE_ENDIAN */ 1253 1254 q = (uint8_t)((15 - nonceSize) & 0xFF); 1255 t = (uint8_t)((aes_ctx->ac_ccm_mac_len) & 0xFF); 1256 1257 /* Construct the first octet of b0 */ 1258 if (authDataSize > 0) { 1259 have_adata = 1; 1260 } 1261 b0[0] = (have_adata << 6) | (((t - 2) / 2) << 3) | (q - 1); 1262 1263 /* copy the nonce value into b0 */ 1264 bcopy(nonce, &(b0[1]), nonceSize); 1265 1266 /* store the length of the payload into b0 */ 1267 bzero(&(b0[1+nonceSize]), q); 1268 1269 payloadSize = aes_ctx->ac_ccm_data_len; 1270 limit = 8 < q ? 8 : q; 1271 1272 for (i = 0, j = 0, k = 15; i < limit; i++, j += 8, k--) { 1273 b0[k] = (uint8_t)((payloadSize >> j) & 0xFF); 1274 } 1275 1276 /* format the counter block */ 1277 1278 cb = (uint8_t *)aes_ctx->ac_cb; 1279 1280 cb[0] = 0x07 & (q-1); /* first byte */ 1281 1282 /* copy the nonce value into the counter block */ 1283 bcopy(nonce, &(cb[1]), nonceSize); 1284 1285 bzero(&(cb[1+nonceSize]), q); 1286 1287 /* Create the mask for the counter field based on the size of nonce */ 1288 q <<= 3; 1289 while (q-- > 0) { 1290 mask |= (1ULL << q); 1291 } 1292 1293 #ifdef _LITTLE_ENDIAN 1294 p8 = (uint8_t *)&mask; 1295 mask = (((uint64_t)p8[0] << 56) | 1296 ((uint64_t)p8[1] << 48) | 1297 ((uint64_t)p8[2] << 40) | 1298 ((uint64_t)p8[3] << 32) | 1299 ((uint64_t)p8[4] << 24) | 1300 ((uint64_t)p8[5] << 16) | 1301 ((uint64_t)p8[6] << 8) | 1302 (uint64_t)p8[7]); 1303 #endif 1304 aes_ctx->ac_counter_mask = mask; 1305 1306 /* 1307 * During calculation, we start using counter block 1, we will 1308 * set it up right here. 1309 * We can just set the last byte to have the value 1, because 1310 * even with the biggest nonce of 13, the last byte of the 1311 * counter block will be used for the counter value. 1312 */ 1313 cb[15] = 0x01; 1314 1315 /* EXPORT DELETE END */ 1316 1317 } 1318 1319 /* 1320 * Encode the length of the associated data as 1321 * specified in RFC 3610 and NIST publication 800-38C, appendix A 1322 */ 1323 static void 1324 encode_adata_len(ulong_t auth_data_len, uint8_t *encoded, size_t *encoded_len) 1325 { 1326 1327 /* EXPORT DELETE START */ 1328 1329 if (auth_data_len < ((1ULL<<16) - (1ULL<<8))) { 1330 /* 0 < a < (2^16-2^8) */ 1331 *encoded_len = 2; 1332 encoded[0] = (auth_data_len & 0xff00) >> 8; 1333 encoded[1] = auth_data_len & 0xff; 1334 1335 } else if ((auth_data_len >= ((1ULL<<16) - (1ULL<<8))) && 1336 (auth_data_len < (1ULL << 31))) { 1337 /* (2^16-2^8) <= a < 2^32 */ 1338 *encoded_len = 6; 1339 encoded[0] = 0xff; 1340 encoded[1] = 0xfe; 1341 encoded[2] = (auth_data_len & 0xff000000) >> 24; 1342 encoded[3] = (auth_data_len & 0xff0000) >> 16; 1343 encoded[4] = (auth_data_len & 0xff00) >> 8; 1344 encoded[5] = auth_data_len & 0xff; 1345 #ifdef _LP64 1346 } else { 1347 /* 2^32 <= a < 2^64 */ 1348 *encoded_len = 10; 1349 encoded[0] = 0xff; 1350 encoded[1] = 0xff; 1351 encoded[2] = (auth_data_len & 0xff00000000000000) >> 56; 1352 encoded[3] = (auth_data_len & 0xff000000000000) >> 48; 1353 encoded[4] = (auth_data_len & 0xff0000000000) >> 40; 1354 encoded[5] = (auth_data_len & 0xff00000000) >> 32; 1355 encoded[6] = (auth_data_len & 0xff000000) >> 24; 1356 encoded[7] = (auth_data_len & 0xff0000) >> 16; 1357 encoded[8] = (auth_data_len & 0xff00) >> 8; 1358 encoded[9] = auth_data_len & 0xff; 1359 #endif /* _LP64 */ 1360 } 1361 /* EXPORT DELETE END */ 1362 } 1363 1364 /* 1365 * This will only deal with decrypting the last block of the input that 1366 * might not be multiples of AES_BLOCK_LEN 1367 */ 1368 static void 1369 aes_ccm_decrypt_incomplete_block(aes_ctx_t *ctx) 1370 { 1371 1372 /* EXPORT DELETE START */ 1373 uint8_t *datap, counter_block[AES_BLOCK_LEN], *outp, *counterp; 1374 int i; 1375 1376 datap = (uint8_t *)ctx->ac_remainder; 1377 outp = &((ctx->ac_ccm_pt_buf)[ctx->ac_ccm_processed_data_len]); 1378 1379 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb, 1380 counter_block); 1381 1382 counterp = (uint8_t *)counter_block; 1383 1384 /* XOR with counter block */ 1385 for (i = 0; i < ctx->ac_remainder_len; i++) { 1386 outp[i] = datap[i] ^ counterp[i]; 1387 } 1388 /* EXPORT DELETE END */ 1389 } 1390 1391 /* 1392 * This will decrypt the cipher text. However, the plaintext won't be 1393 * returned to the caller. It will be returned when decrypt_final() is 1394 * called if the MAC matches 1395 */ 1396 /* ARGSUSED */ 1397 static int 1398 aes_ccm_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length, 1399 crypto_data_t *out) 1400 { 1401 1402 /* EXPORT DELETE START */ 1403 1404 size_t remainder = length; 1405 size_t need; 1406 uint8_t *datap = (uint8_t *)data; 1407 uint8_t *blockp; 1408 uint32_t counter_block[4]; 1409 uint8_t *cbp; 1410 uint64_t counter; 1411 size_t pt_len, total_decrypted_len, mac_len, pm_len, pd_len; 1412 uint32_t tmp[4]; 1413 uint8_t *resultp; 1414 #ifdef _LITTLE_ENDIAN 1415 uint8_t *p; 1416 #endif /* _LITTLE_ENDIAN */ 1417 1418 1419 pm_len = ctx->ac_ccm_processed_mac_len; 1420 1421 if (pm_len > 0) { 1422 uint8_t *tmp; 1423 /* 1424 * all ciphertext has been processed, just waiting for 1425 * part of the value of the mac 1426 */ 1427 if ((pm_len + length) > ctx->ac_ccm_mac_len) { 1428 return (CRYPTO_DATA_LEN_RANGE); 1429 } 1430 tmp = (uint8_t *)ctx->ac_ccm_mac_input_buf; 1431 1432 bcopy(datap, tmp + pm_len, length); 1433 1434 ctx->ac_ccm_processed_mac_len += length; 1435 return (0); 1436 } 1437 1438 /* 1439 * If we decrypt the given data, what total amount of data would 1440 * have been decrypted? 1441 */ 1442 pd_len = ctx->ac_ccm_processed_data_len; 1443 total_decrypted_len = pd_len + length + ctx->ac_remainder_len; 1444 1445 if (total_decrypted_len > 1446 (ctx->ac_ccm_data_len + ctx->ac_ccm_mac_len)) { 1447 return (CRYPTO_DATA_LEN_RANGE); 1448 } 1449 1450 pt_len = ctx->ac_ccm_data_len; 1451 1452 if (total_decrypted_len > pt_len) { 1453 /* 1454 * part of the input will be the MAC, need to isolate that 1455 * to be dealt with later. The left-over data in 1456 * ac_remainder_len from last time will not be part of the 1457 * MAC. Otherwise, it would have already been taken out 1458 * when this call is made last time. 1459 */ 1460 size_t pt_part = pt_len - pd_len - ctx->ac_remainder_len; 1461 1462 mac_len = length - pt_part; 1463 1464 ctx->ac_ccm_processed_mac_len = mac_len; 1465 bcopy(data + pt_part, ctx->ac_ccm_mac_input_buf, mac_len); 1466 1467 if (pt_part + ctx->ac_remainder_len < AES_BLOCK_LEN) { 1468 /* 1469 * since this is last of the ciphertext, will 1470 * just decrypt with it here 1471 */ 1472 bcopy(datap, &((uint8_t *)ctx->ac_remainder) 1473 [ctx->ac_remainder_len], pt_part); 1474 ctx->ac_remainder_len += pt_part; 1475 aes_ccm_decrypt_incomplete_block(ctx); 1476 ctx->ac_remainder_len = 0; 1477 ctx->ac_ccm_processed_data_len += pt_part; 1478 return (0); 1479 } else { 1480 /* let rest of the code handle this */ 1481 length = pt_part; 1482 } 1483 } else if (length + ctx->ac_remainder_len < AES_BLOCK_LEN) { 1484 /* accumulate bytes here and return */ 1485 bcopy(datap, 1486 (uint8_t *)ctx->ac_remainder + ctx->ac_remainder_len, 1487 length); 1488 ctx->ac_remainder_len += length; 1489 ctx->ac_copy_to = datap; 1490 return (0); 1491 } 1492 1493 do { 1494 /* Unprocessed data from last call. */ 1495 if (ctx->ac_remainder_len > 0) { 1496 need = AES_BLOCK_LEN - ctx->ac_remainder_len; 1497 1498 if (need > remainder) 1499 return (1); 1500 1501 bcopy(datap, &((uint8_t *)ctx->ac_remainder) 1502 [ctx->ac_remainder_len], need); 1503 1504 blockp = (uint8_t *)ctx->ac_remainder; 1505 } else { 1506 blockp = datap; 1507 } 1508 1509 /* don't write on the plaintext */ 1510 if (IS_P2ALIGNED(blockp, sizeof (uint32_t))) { 1511 /* LINTED: pointer alignment */ 1512 tmp[0] = *(uint32_t *)blockp; 1513 /* LINTED: pointer alignment */ 1514 tmp[1] = *(uint32_t *)&blockp[4]; 1515 /* LINTED: pointer alignment */ 1516 tmp[2] = *(uint32_t *)&blockp[8]; 1517 /* LINTED: pointer alignment */ 1518 tmp[3] = *(uint32_t *)&blockp[12]; 1519 } else { 1520 uint8_t *tmp8 = (uint8_t *)tmp; 1521 1522 AES_COPY_BLOCK(blockp, tmp8); 1523 } 1524 blockp = (uint8_t *)tmp; 1525 1526 /* Calculate the counter mode, ac_cb is the counter block */ 1527 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb, 1528 (uint8_t *)counter_block); 1529 cbp = (uint8_t *)counter_block; 1530 1531 /* 1532 * Increment counter. 1533 * Counter bits are confined to the bottom 64 bits 1534 */ 1535 counter = ctx->ac_cb[1] & ctx->ac_counter_mask; 1536 #ifdef _LITTLE_ENDIAN 1537 p = (uint8_t *)&counter; 1538 counter = (((uint64_t)p[0] << 56) | 1539 ((uint64_t)p[1] << 48) | 1540 ((uint64_t)p[2] << 40) | 1541 ((uint64_t)p[3] << 32) | 1542 ((uint64_t)p[4] << 24) | 1543 ((uint64_t)p[5] << 16) | 1544 ((uint64_t)p[6] << 8) | 1545 (uint64_t)p[7]); 1546 #endif 1547 counter++; 1548 #ifdef _LITTLE_ENDIAN 1549 counter = (((uint64_t)p[0] << 56) | 1550 ((uint64_t)p[1] << 48) | 1551 ((uint64_t)p[2] << 40) | 1552 ((uint64_t)p[3] << 32) | 1553 ((uint64_t)p[4] << 24) | 1554 ((uint64_t)p[5] << 16) | 1555 ((uint64_t)p[6] << 8) | 1556 (uint64_t)p[7]); 1557 #endif 1558 counter &= ctx->ac_counter_mask; 1559 ctx->ac_cb[1] = 1560 (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter; 1561 1562 /* XOR with the ciphertext */ 1563 if (IS_P2ALIGNED2(blockp, cbp, sizeof (uint32_t))) { 1564 /* LINTED: pointer alignment */ 1565 *(uint32_t *)&blockp[0] ^= *(uint32_t *)&cbp[0]; 1566 /* LINTED: pointer alignment */ 1567 *(uint32_t *)&blockp[4] ^= *(uint32_t *)&cbp[4]; 1568 /* LINTED: pointer alignment */ 1569 *(uint32_t *)&blockp[8] ^= *(uint32_t *)&cbp[8]; 1570 /* LINTED: pointer alignment */ 1571 *(uint32_t *)&blockp[12] ^= *(uint32_t *)&cbp[12]; 1572 } else { 1573 AES_XOR_BLOCK(cbp, blockp); 1574 } 1575 1576 /* Copy the plaintext to the "holding buffer" */ 1577 resultp = (uint8_t *)ctx->ac_ccm_pt_buf + 1578 ctx->ac_ccm_processed_data_len; 1579 if (IS_P2ALIGNED2(blockp, resultp, sizeof (uint32_t))) { 1580 /* LINTED: pointer alignment */ 1581 *(uint32_t *)&resultp[0] = *(uint32_t *)blockp; 1582 /* LINTED: pointer alignment */ 1583 *(uint32_t *)&resultp[4] = *(uint32_t *)&blockp[4]; 1584 /* LINTED: pointer alignment */ 1585 *(uint32_t *)&resultp[8] = *(uint32_t *)&blockp[8]; 1586 /* LINTED: pointer alignment */ 1587 *(uint32_t *)&resultp[12] = *(uint32_t *)&blockp[12]; 1588 } else { 1589 AES_COPY_BLOCK(blockp, resultp); 1590 } 1591 1592 ctx->ac_ccm_processed_data_len += AES_BLOCK_LEN; 1593 1594 ctx->ac_lastp = blockp; 1595 1596 /* Update pointer to next block of data to be processed. */ 1597 if (ctx->ac_remainder_len != 0) { 1598 datap += need; 1599 ctx->ac_remainder_len = 0; 1600 } else { 1601 datap += AES_BLOCK_LEN; 1602 } 1603 1604 remainder = (size_t)&data[length] - (size_t)datap; 1605 1606 /* Incomplete last block */ 1607 if (remainder > 0 && remainder < AES_BLOCK_LEN) { 1608 bcopy(datap, ctx->ac_remainder, remainder); 1609 ctx->ac_remainder_len = remainder; 1610 ctx->ac_copy_to = datap; 1611 if (ctx->ac_ccm_processed_mac_len > 0) { 1612 /* 1613 * not expecting anymore ciphertext, just 1614 * compute plaintext for the remaining input 1615 */ 1616 aes_ccm_decrypt_incomplete_block(ctx); 1617 ctx->ac_ccm_processed_data_len += remainder; 1618 ctx->ac_remainder_len = 0; 1619 } 1620 goto out; 1621 } 1622 ctx->ac_copy_to = NULL; 1623 1624 } while (remainder > 0); 1625 1626 out: 1627 /* EXPORT DELETE END */ 1628 1629 return (0); 1630 } 1631 1632 int 1633 aes_ccm_decrypt_final(aes_ctx_t *ctx, crypto_data_t *out) 1634 { 1635 /* EXPORT DELETE START */ 1636 size_t mac_remain, pt_len; 1637 uint8_t *pt, *mac_buf, *macp, *ccm_mac_p; 1638 uint8_t ccm_mac[AES_BLOCK_LEN]; 1639 void *iov_or_mp; 1640 offset_t offset; 1641 uint8_t *out_data_1, *out_data_2; 1642 size_t out_data_1_len; 1643 uint32_t tmp[4]; 1644 1645 pt_len = ctx->ac_ccm_data_len; 1646 1647 /* Make sure output buffer can fit all of the plaintext */ 1648 if (out->cd_length < pt_len) { 1649 return (CRYPTO_ARGUMENTS_BAD); 1650 } 1651 1652 pt = ctx->ac_ccm_pt_buf; 1653 mac_remain = ctx->ac_ccm_processed_data_len; 1654 mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf; 1655 1656 macp = (uint8_t *)tmp; 1657 1658 while (mac_remain > 0) { 1659 1660 if (mac_remain < AES_BLOCK_LEN) { 1661 bzero(tmp, AES_BLOCK_LEN); 1662 bcopy(pt, tmp, mac_remain); 1663 mac_remain = 0; 1664 } else { 1665 if (IS_P2ALIGNED2(pt, macp, sizeof (uint32_t))) { 1666 /* LINTED: pointer alignment */ 1667 *(uint32_t *)&macp[0] = *(uint32_t *)pt; 1668 /* LINTED: pointer alignment */ 1669 *(uint32_t *)&macp[4] = *(uint32_t *)&pt[4]; 1670 /* LINTED: pointer alignment */ 1671 *(uint32_t *)&macp[8] = *(uint32_t *)&pt[8]; 1672 /* LINTED: pointer alignment */ 1673 *(uint32_t *)&macp[12] = *(uint32_t *)&pt[12]; 1674 } else { 1675 AES_COPY_BLOCK(pt, macp); 1676 } 1677 mac_remain -= AES_BLOCK_LEN; 1678 pt += AES_BLOCK_LEN; 1679 } 1680 1681 /* calculate the CBC MAC */ 1682 if (IS_P2ALIGNED2(macp, mac_buf, sizeof (uint32_t))) { 1683 /* LINTED: pointer alignment */ 1684 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&macp[0]; 1685 /* LINTED: pointer alignment */ 1686 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&macp[4]; 1687 /* LINTED: pointer alignment */ 1688 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&macp[8]; 1689 /* LINTED: pointer alignment */ 1690 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&macp[12]; 1691 } else { 1692 AES_XOR_BLOCK(macp, mac_buf); 1693 } 1694 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf); 1695 } 1696 1697 /* Calculate the CCM MAC */ 1698 ccm_mac_p = ccm_mac; 1699 calculate_ccm_mac(ctx, &ccm_mac_p); 1700 1701 /* compare the input CCM MAC value with what we calculated */ 1702 if (bcmp(ctx->ac_ccm_mac_input_buf, ccm_mac, ctx->ac_ccm_mac_len)) { 1703 /* They don't match */ 1704 return (CRYPTO_DATA_LEN_RANGE); 1705 } else { 1706 aes_init_ptrs(out, &iov_or_mp, &offset); 1707 aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 1708 &out_data_1_len, &out_data_2, pt_len); 1709 bcopy(ctx->ac_ccm_pt_buf, out_data_1, out_data_1_len); 1710 if (out_data_2 != NULL) { 1711 bcopy((ctx->ac_ccm_pt_buf) + out_data_1_len, 1712 out_data_2, pt_len - out_data_1_len); 1713 } 1714 out->cd_offset += pt_len; 1715 } 1716 1717 /* EXPORT DELETE END */ 1718 return (0); 1719 }