Print this page
5072963 Need an optimized AES implementation for amd64
6699938 CCM max payload computation is off by one


   6  * You may not use this file except in compliance with the License.
   7  *
   8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
   9  * or http://www.opensolaris.org/os/licensing.
  10  * See the License for the specific language governing permissions
  11  * and limitations under the License.
  12  *
  13  * When distributing Covered Code, include this CDDL HEADER in each
  14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
  15  * If applicable, add the following below this CDDL HEADER, with the
  16  * fields enclosed by brackets "[]" replaced with your own identifying
  17  * information: Portions Copyright [yyyy] [name of copyright owner]
  18  *
  19  * CDDL HEADER END
  20  */
  21 /*
  22  * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
  23  * Use is subject to license terms.
  24  */
  25 
  26 #pragma ident   "@(#)aes_cbc_crypt.c    1.9     08/05/09 SMI"
  27 
  28 
  29 #include <sys/sysmacros.h>
  30 #include <sys/systm.h>
  31 #include <sys/crypto/common.h>
  32 #include <sys/strsun.h>
  33 #include "aes_cbc_crypt.h"
  34 #include "aes_impl.h"
  35 #ifndef _KERNEL
  36 #include <limits.h>
  37 #include <strings.h>
  38 #endif  /* !_KERNEL */
  39 
  40 static int aes_ctr_ccm_mode_contiguous_blocks(aes_ctx_t *, char *, size_t,
  41     crypto_data_t *);
  42 static void
  43 encode_adata_len(ulong_t auth_data_len, uint8_t *encoded, size_t *encoded_len);
  44 static void
  45 aes_ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize,
  46     ulong_t authDataSize, uint8_t *b0, aes_ctx_t *aes_ctx);
  47 static int
  48 aes_ccm_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length,
  49     crypto_data_t *out);
  50 
  51 /*
  52  * Initialize by setting iov_or_mp to point to the current iovec or mp,
  53  * and by setting current_offset to an offset within the current iovec or mp .
  54  */
  55 static void
  56 aes_init_ptrs(crypto_data_t *out, void **iov_or_mp, offset_t *current_offset)
  57 {
  58         offset_t offset;
  59 
  60         switch (out->cd_format) {
  61         case CRYPTO_DATA_RAW:
  62                 *current_offset = out->cd_offset;
  63                 break;
  64 
  65         case CRYPTO_DATA_UIO: {
  66                 uio_t *uiop = out->cd_uio;
  67                 uintptr_t vec_idx;
  68 
  69                 offset = out->cd_offset;
  70                 for (vec_idx = 0; vec_idx < uiop->uio_iovcnt &&
  71                     offset >= uiop->uio_iov[vec_idx].iov_len;
  72                     offset -= uiop->uio_iov[vec_idx++].iov_len)
  73                         ;


 236                                 tmp[0] = *(uint32_t *)blockp;
 237                                 /* LINTED: pointer alignment */
 238                                 tmp[1] = *(uint32_t *)&blockp[4];
 239                                 /* LINTED: pointer alignment */
 240                                 tmp[2] = *(uint32_t *)&blockp[8];
 241                                 /* LINTED: pointer alignment */
 242                                 tmp[3] = *(uint32_t *)&blockp[12];
 243                         } else {
 244                                 uint8_t *tmp8 = (uint8_t *)tmp;
 245 
 246                                 AES_COPY_BLOCK(blockp, tmp8);
 247                         }
 248                         blockp = (uint8_t *)tmp;
 249                 }
 250 
 251                 if (ctx->ac_flags & AES_CBC_MODE) {
 252                         /*
 253                          * XOR the previous cipher block or IV with the
 254                          * current clear block. Check for alignment.
 255                          */
 256                         if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
 257                             IS_P2ALIGNED(lastp, sizeof (uint32_t))) {
 258                                 /* LINTED: pointer alignment */
 259                                 *(uint32_t *)&blockp[0] ^=
 260                                 /* LINTED: pointer alignment */
 261                                     *(uint32_t *)&lastp[0];
 262                                 /* LINTED: pointer alignment */
 263                                 *(uint32_t *)&blockp[4] ^=
 264                                 /* LINTED: pointer alignment */
 265                                     *(uint32_t *)&lastp[4];
 266                                 /* LINTED: pointer alignment */
 267                                 *(uint32_t *)&blockp[8] ^=
 268                                 /* LINTED: pointer alignment */
 269                                     *(uint32_t *)&lastp[8];
 270                                 /* LINTED: pointer alignment */
 271                                 *(uint32_t *)&blockp[12] ^=
 272                                 /* LINTED: pointer alignment */
 273                                     *(uint32_t *)&lastp[12];
 274                         } else {
 275                                 AES_XOR_BLOCK(lastp, blockp);
 276                         }
 277                 }
 278 
 279                 if (out == NULL) {
 280                         aes_encrypt_block(ctx->ac_keysched, blockp, blockp);
 281 
 282                         ctx->ac_lastp = blockp;
 283                         lastp = blockp;
 284 
 285                         if (ctx->ac_remainder_len > 0) {
 286                                 bcopy(blockp, ctx->ac_copy_to,
 287                                     ctx->ac_remainder_len);
 288                                 bcopy(blockp + ctx->ac_remainder_len, datap,
 289                                     need);
 290                         }
 291                 } else {
 292                         aes_encrypt_block(ctx->ac_keysched, blockp, lastp);
 293                         aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
 294                             &out_data_1_len, &out_data_2, AES_BLOCK_LEN);
 295 
 296                         /* copy block to where it belongs */














 297                         bcopy(lastp, out_data_1, out_data_1_len);

 298                         if (out_data_2 != NULL) {
 299                                 bcopy(lastp + out_data_1_len, out_data_2,
 300                                     AES_BLOCK_LEN - out_data_1_len);
 301                         }

 302                         /* update offset */
 303                         out->cd_offset += AES_BLOCK_LEN;
 304                 }
 305 
 306                 /* Update pointer to next block of data to be processed. */
 307                 if (ctx->ac_remainder_len != 0) {
 308                         datap += need;
 309                         ctx->ac_remainder_len = 0;
 310                 } else {
 311                         datap += AES_BLOCK_LEN;
 312                 }
 313 
 314                 remainder = (size_t)&data[length] - (size_t)datap;
 315 
 316                 /* Incomplete last block. */
 317                 if (remainder > 0 && remainder < AES_BLOCK_LEN) {
 318                         bcopy(datap, ctx->ac_remainder, remainder);
 319                         ctx->ac_remainder_len = remainder;
 320                         ctx->ac_copy_to = datap;
 321                         goto out;


 446                                 /* LINTED: pointer alignment */
 447                                 tmp8 = (uint8_t *)OTHER((uint64_t *)lastp, ctx);
 448 
 449                                 AES_COPY_BLOCK(blockp, tmp8);
 450                         }
 451                 }
 452 
 453                 if (out != NULL) {
 454                         aes_decrypt_block(ctx->ac_keysched, blockp,
 455                             (uint8_t *)tmp);
 456                         blockp = (uint8_t *)tmp;
 457                 } else {
 458                         aes_decrypt_block(ctx->ac_keysched, blockp, blockp);
 459                 }
 460 
 461                 if (ctx->ac_flags & AES_CBC_MODE) {
 462                         /*
 463                          * XOR the previous cipher block or IV with the
 464                          * currently decrypted block.  Check for alignment.
 465                          */
 466                         if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
 467                             IS_P2ALIGNED(lastp, sizeof (uint32_t))) {
 468                                 /* LINTED: pointer alignment */
 469                                 *(uint32_t *)blockp ^= *(uint32_t *)lastp;
 470                                 /* LINTED: pointer alignment */
 471                                 *(uint32_t *)&blockp[4] ^=
 472                                 /* LINTED: pointer alignment */
 473                                     *(uint32_t *)&lastp[4];
 474                                 /* LINTED: pointer alignment */
 475                                 *(uint32_t *)&blockp[8] ^=
 476                                 /* LINTED: pointer alignment */
 477                                     *(uint32_t *)&lastp[8];
 478                                 /* LINTED: pointer alignment */
 479                                 *(uint32_t *)&blockp[12] ^=
 480                                 /* LINTED: pointer alignment */
 481                                     *(uint32_t *)&lastp[12];
 482                         } else {
 483                                 AES_XOR_BLOCK(lastp, blockp);
 484                         }
 485 
 486                         /* LINTED: pointer alignment */
 487                         lastp = (uint8_t *)OTHER((uint64_t *)lastp, ctx);
 488                 }
 489 
 490                 if (out != NULL) {
 491                         aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
 492                             &out_data_1_len, &out_data_2, AES_BLOCK_LEN);
 493 
 494                         /* copy temporary block to where it belongs */









 495                         bcopy(&tmp, out_data_1, out_data_1_len);

 496                         if (out_data_2 != NULL) {
 497                                 bcopy((uint8_t *)&tmp + out_data_1_len,
 498                                     out_data_2, AES_BLOCK_LEN - out_data_1_len);
 499                         }
 500 
 501                         /* update offset */
 502                         out->cd_offset += AES_BLOCK_LEN;
 503 
 504                 } else if (ctx->ac_remainder_len > 0) {
 505                         /* copy temporary block to where it belongs */
 506                         bcopy(blockp, ctx->ac_copy_to, ctx->ac_remainder_len);
 507                         bcopy(blockp + ctx->ac_remainder_len, datap, need);
 508                 }
 509 
 510                 /* Update pointer to next block of data to be processed. */
 511                 if (ctx->ac_remainder_len != 0) {
 512                         datap += need;
 513                         ctx->ac_remainder_len = 0;
 514                 } else {
 515                         datap += AES_BLOCK_LEN;


 677                                 tmp[1] = *(uint32_t *)&blockp[4];
 678                                 /* LINTED: pointer alignment */
 679                                 tmp[2] = *(uint32_t *)&blockp[8];
 680                                 /* LINTED: pointer alignment */
 681                                 tmp[3] = *(uint32_t *)&blockp[12];
 682                         } else {
 683                                 uint8_t *tmp8 = (uint8_t *)tmp;
 684 
 685                                 AES_COPY_BLOCK(blockp, tmp8);
 686                         }
 687                         blockp = (uint8_t *)tmp;
 688                 }
 689 
 690                 if (ctx->ac_flags & AES_CCM_MODE) {
 691                         /*
 692                          * do CBC MAC
 693                          *
 694                          * XOR the previous cipher block current clear block.
 695                          * mac_buf always contain previous cipher block.
 696                          */
 697                         if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
 698                             IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
 699                                 /* LINTED: pointer alignment */
 700                                 *(uint32_t *)&mac_buf[0] ^=
 701                                 /* LINTED: pointer alignment */
 702                                     *(uint32_t *)&blockp[0];
 703                                 /* LINTED: pointer alignment */
 704                                 *(uint32_t *)&mac_buf[4] ^=
 705                                 /* LINTED: pointer alignment */
 706                                     *(uint32_t *)&blockp[4];
 707                                 /* LINTED: pointer alignment */
 708                                 *(uint32_t *)&mac_buf[8] ^=
 709                                 /* LINTED: pointer alignment */
 710                                     *(uint32_t *)&blockp[8];
 711                                 /* LINTED: pointer alignment */
 712                                 *(uint32_t *)&mac_buf[12] ^=
 713                                 /* LINTED: pointer alignment */
 714                                     *(uint32_t *)&blockp[12];
 715                         } else {
 716                                 AES_XOR_BLOCK(blockp, mac_buf);
 717                         }
 718                         aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);


 743 #endif
 744                 counter++;
 745 #ifdef _LITTLE_ENDIAN
 746                 counter = (((uint64_t)p[0] << 56) |
 747                     ((uint64_t)p[1] << 48) |
 748                     ((uint64_t)p[2] << 40) |
 749                     ((uint64_t)p[3] << 32) |
 750                     ((uint64_t)p[4] << 24) |
 751                     ((uint64_t)p[5] << 16) |
 752                     ((uint64_t)p[6] << 8) |
 753                     (uint64_t)p[7]);
 754 #endif
 755                 counter &= ctx->ac_counter_mask;
 756                 ctx->ac_cb[1] =
 757                     (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter;
 758 
 759                 /*
 760                  * XOR the previous cipher block or IV with the
 761                  * current clear block. Check for alignment.
 762                  */
 763                 if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
 764                     IS_P2ALIGNED(lastp, sizeof (uint32_t))) {
 765                         /* LINTED: pointer alignment */
 766                         *(uint32_t *)&blockp[0] ^=
 767                         /* LINTED: pointer alignment */
 768                             *(uint32_t *)&lastp[0];
 769                         /* LINTED: pointer alignment */
 770                         *(uint32_t *)&blockp[4] ^=
 771                         /* LINTED: pointer alignment */
 772                             *(uint32_t *)&lastp[4];
 773                         /* LINTED: pointer alignment */
 774                         *(uint32_t *)&blockp[8] ^=
 775                         /* LINTED: pointer alignment */
 776                             *(uint32_t *)&lastp[8];
 777                         /* LINTED: pointer alignment */
 778                         *(uint32_t *)&blockp[12] ^=
 779                         /* LINTED: pointer alignment */
 780                             *(uint32_t *)&lastp[12];
 781                 } else {
 782                         AES_XOR_BLOCK(lastp, blockp);
 783                 }
 784 
 785                 ctx->ac_lastp = blockp;
 786                 lastp = blockp;
 787                 if (ctx->ac_flags & AES_CCM_MODE) {
 788                         ctx->ac_ccm_processed_data_len += AES_BLOCK_LEN;
 789                 }
 790 
 791                 if (out == NULL) {
 792                         if (ctx->ac_remainder_len > 0) {
 793                                 bcopy(blockp, ctx->ac_copy_to,
 794                                     ctx->ac_remainder_len);
 795                                 bcopy(blockp + ctx->ac_remainder_len, datap,
 796                                     need);
 797                         }
 798                 } else {
 799                         aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
 800                             &out_data_1_len, &out_data_2, AES_BLOCK_LEN);
 801 
 802                         /* copy block to where it belongs */














 803                         bcopy(lastp, out_data_1, out_data_1_len);

 804                         if (out_data_2 != NULL) {
 805                                 bcopy(lastp + out_data_1_len, out_data_2,
 806                                     AES_BLOCK_LEN - out_data_1_len);
 807                         }

 808                         /* update offset */
 809                         out->cd_offset += AES_BLOCK_LEN;
 810                 }
 811 
 812                 /* Update pointer to next block of data to be processed. */
 813                 if (ctx->ac_remainder_len != 0) {
 814                         datap += need;
 815                         ctx->ac_remainder_len = 0;
 816                 } else {
 817                         datap += AES_BLOCK_LEN;
 818                 }
 819 
 820                 remainder = (size_t)&data[length] - (size_t)datap;
 821 
 822                 /* Incomplete last block. */
 823                 if (remainder > 0 && remainder < AES_BLOCK_LEN) {
 824                         bcopy(datap, ctx->ac_remainder, remainder);
 825                         ctx->ac_remainder_len = remainder;
 826                         ctx->ac_copy_to = datap;
 827                         goto out;


 851         size_t remainder, processed;
 852         uint8_t encoded_a[10]; /* max encoded auth data length is 10 octets */
 853         size_t encoded_a_len = 0;
 854 
 855         mac_buf = (uint8_t *)&(ctx->ac_ccm_mac_buf);
 856 
 857         /*
 858          * Format the 1st block for CBC-MAC and construct the
 859          * 1st counter block.
 860          *
 861          * aes_ctx->ac_iv is used for storing the counter block
 862          * mac_buf will store b0 at this time.
 863          */
 864         aes_ccm_format_initial_blocks(nonce, nonce_len,
 865             auth_data_len, mac_buf, ctx);
 866 
 867         /* The IV for CBC MAC for AES CCM mode is always zero */
 868         bzero(iv, AES_BLOCK_LEN);
 869         ivp = (uint8_t *)iv;
 870 
 871         if (IS_P2ALIGNED(ivp, sizeof (uint32_t)) &&
 872             IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
 873                 /* LINTED: pointer alignment */
 874                 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&ivp[0];
 875                 /* LINTED: pointer alignment */
 876                 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&ivp[4];
 877                 /* LINTED: pointer alignment */
 878                 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&ivp[8];
 879                 /* LINTED: pointer alignment */
 880                 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&ivp[12];
 881         } else {
 882                 AES_XOR_BLOCK(ivp, mac_buf);
 883         }
 884 
 885         /* encrypt the nonce */
 886         aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
 887 
 888         /* take care of the associated data, if any */
 889         if (auth_data_len == 0) {
 890                 return (0);
 891         }
 892 
 893         encode_adata_len(auth_data_len, encoded_a, &encoded_a_len);
 894 
 895         remainder = auth_data_len;
 896 
 897         /* 1st block: it contains encoded associated data, and some data */
 898         authp = (uint8_t *)tmp;
 899         bzero(authp, AES_BLOCK_LEN);
 900         bcopy(encoded_a, authp, encoded_a_len);
 901         processed = AES_BLOCK_LEN - encoded_a_len;
 902         if (processed > auth_data_len) {
 903                 /* in case auth_data is very small */
 904                 processed = auth_data_len;
 905         }
 906         bcopy(auth_data, authp+encoded_a_len, processed);
 907         /* xor with previous buffer */
 908         if (IS_P2ALIGNED(authp, sizeof (uint32_t)) &&
 909             IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
 910                 /* LINTED: pointer alignment */
 911                 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&authp[0];
 912                 /* LINTED: pointer alignment */
 913                 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&authp[4];
 914                 /* LINTED: pointer alignment */
 915                 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&authp[8];
 916                 /* LINTED: pointer alignment */
 917                 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&authp[12];
 918         } else {
 919                 AES_XOR_BLOCK(authp, mac_buf);
 920         }
 921         aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
 922         remainder -= processed;
 923         if (remainder == 0) {
 924                 /* a small amount of associated data, it's all done now */
 925                 return (0);
 926         }
 927 
 928         do {
 929                 if (remainder < AES_BLOCK_LEN) {
 930                         /*
 931                          * There's not a block full of data, pad rest of
 932                          * buffer with zero
 933                          */
 934                         bzero(authp, AES_BLOCK_LEN);
 935                         bcopy(&(auth_data[processed]), authp, remainder);
 936                         datap = (uint8_t *)authp;
 937                         remainder = 0;
 938                 } else {
 939                         datap = (uint8_t *)(&(auth_data[processed]));
 940                         processed += AES_BLOCK_LEN;
 941                         remainder -= AES_BLOCK_LEN;
 942                 }
 943 
 944                 /* xor with previous buffer */
 945                 if (IS_P2ALIGNED(datap, sizeof (uint32_t)) &&
 946                     IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
 947                         /* LINTED: pointer alignment */
 948                         *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&datap[0];
 949                         /* LINTED: pointer alignment */
 950                         *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&datap[4];
 951                         /* LINTED: pointer alignment */
 952                         *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&datap[8];
 953                         /* LINTED: pointer alignment */
 954                         *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&datap[12];
 955                 } else {
 956                         AES_XOR_BLOCK(datap, mac_buf);
 957                 }
 958 
 959                 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
 960 
 961         } while (remainder > 0);
 962 
 963 /* EXPORT DELETE END */
 964         return (0);
 965 }
 966 


1017          * plus whatever data remains, if any,
1018          * should be the same as the number of bytes that's being
1019          * passed in the argument during init time.
1020          */
1021         if ((ctx->ac_ccm_processed_data_len + ctx->ac_remainder_len)
1022             != (ctx->ac_ccm_data_len)) {
1023                 return (CRYPTO_DATA_LEN_RANGE);
1024         }
1025 
1026         mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf;
1027 
1028         if (ctx->ac_remainder_len > 0) {
1029 
1030                 macp = (uint8_t *)tmp;
1031                 bzero(macp, AES_BLOCK_LEN);
1032 
1033                 /* copy remainder to temporary buffer */
1034                 bcopy(ctx->ac_remainder, macp, ctx->ac_remainder_len);
1035 
1036                 /* calculate the CBC MAC */
1037                 if (IS_P2ALIGNED(macp, sizeof (uint32_t)) &&
1038                     IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
1039                         /* LINTED: pointer alignment */
1040                         *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&macp[0];
1041                         /* LINTED: pointer alignment */
1042                         *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&macp[4];
1043                         /* LINTED: pointer alignment */
1044                         *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&macp[8];
1045                         /* LINTED: pointer alignment */
1046                         *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&macp[12];
1047                 } else {
1048                         AES_XOR_BLOCK(macp, mac_buf);
1049                 }
1050                 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
1051 
1052                 /* calculate the counter mode */
1053                 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb,
1054                     (uint8_t *)counter_block);
1055 
1056                 lastp = (uint8_t *)counter_block;
1057 
1058                 /* copy remainder to temporary buffer */


1130                 }
1131         }
1132         out->cd_offset += ctx->ac_remainder_len + ctx->ac_ccm_mac_len;
1133         ctx->ac_remainder_len = 0;
1134 
1135 /* EXPORT DELETE END */
1136 
1137         return (0);
1138 }
1139 
1140 int
1141 aes_ccm_validate_args(CK_AES_CCM_PARAMS *ccm_param, boolean_t is_encrypt_init)
1142 {
1143 
1144 /* EXPORT DELETE START */
1145         size_t macSize, nonceSize;
1146         uint8_t q;
1147         uint64_t maxValue;
1148 
1149         /*
1150          * Check the length of the MAC.  Only valid length
1151          * lengths for the MAC are: 4, 6, 8, 10, 12, 14, 16
1152          */
1153         macSize = ccm_param->ulMACSize;
1154         if ((macSize < 4) || (macSize > 16) || ((macSize % 2) != 0)) {
1155                 return (CRYPTO_MECHANISM_PARAM_INVALID);
1156         }
1157 
1158         /* Check the nonce value.  Valid values are 7, 8, 9, 10, 11, 12, 13 */
1159         nonceSize = ccm_param->ulNonceSize;
1160         if ((nonceSize < 7) || (nonceSize > 13)) {
1161                 return (CRYPTO_MECHANISM_PARAM_INVALID);
1162         }
1163 

1164         q = (uint8_t)((15 - nonceSize) & 0xFF);
1165 
1166 
1167         /*
1168          * If it is decrypt, need to make sure size of ciphertext is at least
1169          * bigger than MAC len
1170          */
1171         if ((!is_encrypt_init) && (ccm_param->ulDataSize < macSize)) {
1172                 return (CRYPTO_MECHANISM_PARAM_INVALID);
1173         }
1174 
1175         /*
1176          * Check to make sure the length of the payload is within the
1177          * range of values allowed by q
1178          */
1179         if (q < 8) {
1180                 maxValue = 1ULL << (q * 8);
1181         } else {
1182                 maxValue = ULONG_MAX;
1183         }
1184 
1185         if (ccm_param->ulDataSize > maxValue) {
1186                 return (CRYPTO_MECHANISM_PARAM_INVALID);
1187         }
1188 
1189 /* EXPORT DELETE END */
1190         return (0);
1191 }
1192 
1193 /*
1194  * Format the first block used in CBC-MAC (B0) and the initial counter
1195  * block based on formating functions and counter generation functions
1196  * specified in RFC 3610 and NIST publication 800-38C, appendix A
1197  *
1198  * b0 is the first block used in CBC-MAC
1199  * cb0 is the first counter block
1200  *
1201  * It's assumed that the arguments b0 and cb0 are preallocated AES blocks
1202  *
1203  */
1204 static void
1205 aes_ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize,
1206     ulong_t authDataSize, uint8_t *b0, aes_ctx_t *aes_ctx)
1207 {
1208 /* EXPORT DELETE START */
1209         uint64_t payloadSize;
1210         uint8_t t, q, have_adata = 0;
1211         size_t limit;
1212         int i, j, k;
1213         uint64_t mask = 0;
1214         uint8_t *cb;
1215 #ifdef _LITTLE_ENDIAN
1216         uint8_t *p8;
1217 #endif  /* _LITTLE_ENDIAN */
1218 
1219         q = (uint8_t)((15 - nonceSize) & 0xFF);
1220         t = (uint8_t)((aes_ctx->ac_ccm_mac_len) & 0xFF);
1221 
1222         /* Construct the first octect of b0 */
1223         if (authDataSize > 0) {
1224                 have_adata = 1;
1225         }
1226         b0[0] = (have_adata << 6) | (((t - 2)  / 2) << 3) | (q - 1);
1227 
1228         /* copy the nonce value into b0 */
1229         bcopy(nonce, &(b0[1]), nonceSize);
1230 
1231         /* store the length of the payload into b0 */
1232         bzero(&(b0[1+nonceSize]), q);
1233 
1234         payloadSize = aes_ctx->ac_ccm_data_len;
1235         limit = 8 < q ? 8 : q;
1236 
1237         for (i = 0, j = 0, k = 15; i < limit; i++, j += 8, k--) {
1238                 b0[k] = (uint8_t)((payloadSize >> j) & 0xFF);
1239         }
1240 
1241         /* format the counter block */
1242 


1255                 mask |= (1ULL << q);
1256         }
1257 
1258 #ifdef _LITTLE_ENDIAN
1259         p8 = (uint8_t *)&mask;
1260         mask = (((uint64_t)p8[0] << 56) |
1261             ((uint64_t)p8[1] << 48) |
1262             ((uint64_t)p8[2] << 40) |
1263             ((uint64_t)p8[3] << 32) |
1264             ((uint64_t)p8[4] << 24) |
1265             ((uint64_t)p8[5] << 16) |
1266             ((uint64_t)p8[6] << 8) |
1267             (uint64_t)p8[7]);
1268 #endif
1269         aes_ctx->ac_counter_mask = mask;
1270 
1271         /*
1272          * During calculation, we start using counter block 1, we will
1273          * set it up right here.
1274          * We can just set the last byte to have the value 1, because
1275          * even with the bigest nonce of 13, the last byte of the
1276          * counter block will be used for the counter value.
1277          */
1278         cb[15] = 0x01;
1279 
1280 /* EXPORT DELETE END */
1281 
1282 }
1283 
1284 /*
1285  * Encode the length of the associated data as
1286  * specified in RFC 3610 and NIST publication 800-38C, appendix A
1287  */
1288 static void
1289 encode_adata_len(ulong_t auth_data_len, uint8_t *encoded, size_t *encoded_len)
1290 {
1291 
1292 /* EXPORT DELETE START */
1293 
1294         if (auth_data_len < ((1ULL<<16) - (1ULL<<8))) {
1295                 /* 0 < a < (2^16-2^8) */


1508                     ((uint64_t)p[5] << 16) |
1509                     ((uint64_t)p[6] << 8) |
1510                     (uint64_t)p[7]);
1511 #endif
1512                 counter++;
1513 #ifdef _LITTLE_ENDIAN
1514                 counter = (((uint64_t)p[0] << 56) |
1515                     ((uint64_t)p[1] << 48) |
1516                     ((uint64_t)p[2] << 40) |
1517                     ((uint64_t)p[3] << 32) |
1518                     ((uint64_t)p[4] << 24) |
1519                     ((uint64_t)p[5] << 16) |
1520                     ((uint64_t)p[6] << 8) |
1521                     (uint64_t)p[7]);
1522 #endif
1523                 counter &= ctx->ac_counter_mask;
1524                 ctx->ac_cb[1] =
1525                     (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter;
1526 
1527                 /* XOR with the ciphertext */
1528                 if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
1529                     IS_P2ALIGNED(cbp, sizeof (uint32_t))) {
1530                         /* LINTED: pointer alignment */
1531                         *(uint32_t *)&blockp[0] ^= *(uint32_t *)&cbp[0];
1532                         /* LINTED: pointer alignment */
1533                         *(uint32_t *)&blockp[4] ^= *(uint32_t *)&cbp[4];
1534                         /* LINTED: pointer alignment */
1535                         *(uint32_t *)&blockp[8] ^= *(uint32_t *)&cbp[8];
1536                         /* LINTED: pointer alignment */
1537                         *(uint32_t *)&blockp[12] ^= *(uint32_t *)&cbp[12];
1538                 } else {
1539                         AES_XOR_BLOCK(cbp, blockp);
1540                 }
1541 
1542                 /* Copy the plaintext to the "holding buffer" */
1543                 resultp = (uint8_t *)ctx->ac_ccm_pt_buf +
1544                     ctx->ac_ccm_processed_data_len;
1545                 if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
1546                     IS_P2ALIGNED(resultp, sizeof (uint32_t))) {
1547                         /* LINTED: pointer alignment */
1548                         *(uint32_t *)&resultp[0] = *(uint32_t *)blockp;
1549                         /* LINTED: pointer alignment */
1550                         *(uint32_t *)&resultp[4] = *(uint32_t *)&blockp[4];
1551                         /* LINTED: pointer alignment */
1552                         *(uint32_t *)&resultp[8] = *(uint32_t *)&blockp[8];
1553                         /* LINTED: pointer alignment */
1554                         *(uint32_t *)&resultp[12] = *(uint32_t *)&blockp[12];
1555                 } else {
1556                         AES_COPY_BLOCK(blockp, resultp);
1557                 }
1558 
1559                 ctx->ac_ccm_processed_data_len += AES_BLOCK_LEN;
1560 
1561                 ctx->ac_lastp = blockp;
1562 
1563                 /* Update pointer to next block of data to be processed. */
1564                 if (ctx->ac_remainder_len != 0) {
1565                         datap += need;
1566                         ctx->ac_remainder_len = 0;


1612         pt_len = ctx->ac_ccm_data_len;
1613 
1614         /* Make sure output buffer can fit all of the plaintext */
1615         if (out->cd_length < pt_len) {
1616                 return (CRYPTO_ARGUMENTS_BAD);
1617         }
1618 
1619         pt = ctx->ac_ccm_pt_buf;
1620         mac_remain = ctx->ac_ccm_processed_data_len;
1621         mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf;
1622 
1623         macp = (uint8_t *)tmp;
1624 
1625         while (mac_remain > 0) {
1626 
1627                 if (mac_remain < AES_BLOCK_LEN) {
1628                         bzero(tmp, AES_BLOCK_LEN);
1629                         bcopy(pt, tmp, mac_remain);
1630                         mac_remain = 0;
1631                 } else {
1632                         if (IS_P2ALIGNED(pt, sizeof (uint32_t)) &&
1633                             IS_P2ALIGNED(macp, sizeof (uint32_t))) {
1634                                 /* LINTED: pointer alignment */
1635                                 *(uint32_t *)&macp[0] = *(uint32_t *)pt;
1636                                 /* LINTED: pointer alignment */
1637                                 *(uint32_t *)&macp[4] = *(uint32_t *)&pt[4];
1638                                 /* LINTED: pointer alignment */
1639                                 *(uint32_t *)&macp[8] = *(uint32_t *)&pt[8];
1640                                 /* LINTED: pointer alignment */
1641                                 *(uint32_t *)&macp[12] = *(uint32_t *)&pt[12];
1642                         } else {
1643                                 AES_COPY_BLOCK(pt, macp);
1644                         }
1645                         mac_remain -= AES_BLOCK_LEN;
1646                         pt += AES_BLOCK_LEN;
1647                 }
1648 
1649                 /* calculate the CBC MAC */
1650                 if (IS_P2ALIGNED(macp, sizeof (uint32_t)) &&
1651                     IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
1652                         /* LINTED: pointer alignment */
1653                         *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&macp[0];
1654                         /* LINTED: pointer alignment */
1655                         *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&macp[4];
1656                         /* LINTED: pointer alignment */
1657                         *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&macp[8];
1658                         /* LINTED: pointer alignment */
1659                         *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&macp[12];
1660                 } else {
1661                         AES_XOR_BLOCK(macp, mac_buf);
1662                 }
1663                 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
1664         }
1665 
1666         /* Calculate the CCM MAC */
1667         ccm_mac_p = ccm_mac;
1668         calculate_ccm_mac(ctx, &ccm_mac_p);
1669 
1670         /* compare the input CCM MAC value with what we calculated */
1671         if (bcmp(ctx->ac_ccm_mac_input_buf, ccm_mac, ctx->ac_ccm_mac_len)) {


   6  * You may not use this file except in compliance with the License.
   7  *
   8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
   9  * or http://www.opensolaris.org/os/licensing.
  10  * See the License for the specific language governing permissions
  11  * and limitations under the License.
  12  *
  13  * When distributing Covered Code, include this CDDL HEADER in each
  14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
  15  * If applicable, add the following below this CDDL HEADER, with the
  16  * fields enclosed by brackets "[]" replaced with your own identifying
  17  * information: Portions Copyright [yyyy] [name of copyright owner]
  18  *
  19  * CDDL HEADER END
  20  */
  21 /*
  22  * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
  23  * Use is subject to license terms.
  24  */
  25 
  26 #pragma ident   "@(#)aes_cbc_crypt.c    1.10    08/06/13 SMI"
  27 
  28 
  29 #include <sys/sysmacros.h>
  30 #include <sys/systm.h>
  31 #include <sys/crypto/common.h>
  32 #include <sys/strsun.h>
  33 #include "aes_cbc_crypt.h"
  34 #include "aes_impl.h"
  35 #ifndef _KERNEL
  36 #include <limits.h>
  37 #include <strings.h>
  38 #endif  /* !_KERNEL */
  39 
  40 static int aes_ctr_ccm_mode_contiguous_blocks(aes_ctx_t *, char *, size_t,
  41     crypto_data_t *);
  42 static void
  43 encode_adata_len(ulong_t auth_data_len, uint8_t *encoded, size_t *encoded_len);
  44 static void
  45 aes_ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize,
  46     ulong_t authDataSize, uint8_t *b0, aes_ctx_t *aes_ctx);
  47 static int
  48 aes_ccm_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length,
  49     crypto_data_t *out);
  50 
  51 /*
  52  * Initialize by setting iov_or_mp to point to the current iovec or mp,
  53  * and by setting current_offset to an offset within the current iovec or mp.
  54  */
  55 static void
  56 aes_init_ptrs(crypto_data_t *out, void **iov_or_mp, offset_t *current_offset)
  57 {
  58         offset_t offset;
  59 
  60         switch (out->cd_format) {
  61         case CRYPTO_DATA_RAW:
  62                 *current_offset = out->cd_offset;
  63                 break;
  64 
  65         case CRYPTO_DATA_UIO: {
  66                 uio_t *uiop = out->cd_uio;
  67                 uintptr_t vec_idx;
  68 
  69                 offset = out->cd_offset;
  70                 for (vec_idx = 0; vec_idx < uiop->uio_iovcnt &&
  71                     offset >= uiop->uio_iov[vec_idx].iov_len;
  72                     offset -= uiop->uio_iov[vec_idx++].iov_len)
  73                         ;


 236                                 tmp[0] = *(uint32_t *)blockp;
 237                                 /* LINTED: pointer alignment */
 238                                 tmp[1] = *(uint32_t *)&blockp[4];
 239                                 /* LINTED: pointer alignment */
 240                                 tmp[2] = *(uint32_t *)&blockp[8];
 241                                 /* LINTED: pointer alignment */
 242                                 tmp[3] = *(uint32_t *)&blockp[12];
 243                         } else {
 244                                 uint8_t *tmp8 = (uint8_t *)tmp;
 245 
 246                                 AES_COPY_BLOCK(blockp, tmp8);
 247                         }
 248                         blockp = (uint8_t *)tmp;
 249                 }
 250 
 251                 if (ctx->ac_flags & AES_CBC_MODE) {
 252                         /*
 253                          * XOR the previous cipher block or IV with the
 254                          * current clear block. Check for alignment.
 255                          */
 256                         if (IS_P2ALIGNED2(blockp, lastp, sizeof (uint32_t))) {

 257                                 /* LINTED: pointer alignment */
 258                                 *(uint32_t *)&blockp[0] ^=
 259                                 /* LINTED: pointer alignment */
 260                                     *(uint32_t *)&lastp[0];
 261                                 /* LINTED: pointer alignment */
 262                                 *(uint32_t *)&blockp[4] ^=
 263                                 /* LINTED: pointer alignment */
 264                                     *(uint32_t *)&lastp[4];
 265                                 /* LINTED: pointer alignment */
 266                                 *(uint32_t *)&blockp[8] ^=
 267                                 /* LINTED: pointer alignment */
 268                                     *(uint32_t *)&lastp[8];
 269                                 /* LINTED: pointer alignment */
 270                                 *(uint32_t *)&blockp[12] ^=
 271                                 /* LINTED: pointer alignment */
 272                                     *(uint32_t *)&lastp[12];
 273                         } else {
 274                                 AES_XOR_BLOCK(lastp, blockp);
 275                         }
 276                 }
 277 
 278                 if (out == NULL) {
 279                         aes_encrypt_block(ctx->ac_keysched, blockp, blockp);
 280 
 281                         ctx->ac_lastp = blockp;
 282                         lastp = blockp;
 283 
 284                         if (ctx->ac_remainder_len > 0) {
 285                                 bcopy(blockp, ctx->ac_copy_to,
 286                                     ctx->ac_remainder_len);
 287                                 bcopy(blockp + ctx->ac_remainder_len, datap,
 288                                     need);
 289                         }
 290                 } else {
 291                         aes_encrypt_block(ctx->ac_keysched, blockp, lastp);
 292                         aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
 293                             &out_data_1_len, &out_data_2, AES_BLOCK_LEN);
 294 
 295                         /* copy block to where it belongs */
 296                         if ((out_data_1_len == AES_BLOCK_LEN) &&
 297                             (IS_P2ALIGNED2(lastp, out_data_1,
 298                             sizeof (uint32_t)))) {
 299                                 /* LINTED: pointer alignment */
 300                                 uint32_t *d = (uint32_t *)out_data_1;
 301                                 /* LINTED: pointer alignment */
 302                                 d[0] = *(uint32_t *)lastp;
 303                                 /* LINTED: pointer alignment */
 304                                 d[1] = *(uint32_t *)&lastp[4];
 305                                 /* LINTED: pointer alignment */
 306                                 d[2] = *(uint32_t *)&lastp[8];
 307                                 /* LINTED: pointer alignment */
 308                                 d[3] = *(uint32_t *)&lastp[12];
 309                         } else {
 310                                 bcopy(lastp, out_data_1, out_data_1_len);
 311                         }
 312                         if (out_data_2 != NULL) {
 313                                 bcopy(lastp + out_data_1_len, out_data_2,
 314                                     AES_BLOCK_LEN - out_data_1_len);
 315                         }
 316 
 317                         /* update offset */
 318                         out->cd_offset += AES_BLOCK_LEN;
 319                 }
 320 
 321                 /* Update pointer to next block of data to be processed. */
 322                 if (ctx->ac_remainder_len != 0) {
 323                         datap += need;
 324                         ctx->ac_remainder_len = 0;
 325                 } else {
 326                         datap += AES_BLOCK_LEN;
 327                 }
 328 
 329                 remainder = (size_t)&data[length] - (size_t)datap;
 330 
 331                 /* Incomplete last block. */
 332                 if (remainder > 0 && remainder < AES_BLOCK_LEN) {
 333                         bcopy(datap, ctx->ac_remainder, remainder);
 334                         ctx->ac_remainder_len = remainder;
 335                         ctx->ac_copy_to = datap;
 336                         goto out;


 461                                 /* LINTED: pointer alignment */
 462                                 tmp8 = (uint8_t *)OTHER((uint64_t *)lastp, ctx);
 463 
 464                                 AES_COPY_BLOCK(blockp, tmp8);
 465                         }
 466                 }
 467 
 468                 if (out != NULL) {
 469                         aes_decrypt_block(ctx->ac_keysched, blockp,
 470                             (uint8_t *)tmp);
 471                         blockp = (uint8_t *)tmp;
 472                 } else {
 473                         aes_decrypt_block(ctx->ac_keysched, blockp, blockp);
 474                 }
 475 
 476                 if (ctx->ac_flags & AES_CBC_MODE) {
 477                         /*
 478                          * XOR the previous cipher block or IV with the
 479                          * currently decrypted block.  Check for alignment.
 480                          */
 481                         if (IS_P2ALIGNED2(blockp, lastp, sizeof (uint32_t))) {

 482                                 /* LINTED: pointer alignment */
 483                                 *(uint32_t *)blockp ^= *(uint32_t *)lastp;
 484                                 /* LINTED: pointer alignment */
 485                                 *(uint32_t *)&blockp[4] ^=
 486                                 /* LINTED: pointer alignment */
 487                                     *(uint32_t *)&lastp[4];
 488                                 /* LINTED: pointer alignment */
 489                                 *(uint32_t *)&blockp[8] ^=
 490                                 /* LINTED: pointer alignment */
 491                                     *(uint32_t *)&lastp[8];
 492                                 /* LINTED: pointer alignment */
 493                                 *(uint32_t *)&blockp[12] ^=
 494                                 /* LINTED: pointer alignment */
 495                                     *(uint32_t *)&lastp[12];
 496                         } else {
 497                                 AES_XOR_BLOCK(lastp, blockp);
 498                         }
 499 
 500                         /* LINTED: pointer alignment */
 501                         lastp = (uint8_t *)OTHER((uint64_t *)lastp, ctx);
 502                 }
 503 
 504                 if (out != NULL) {
 505                         aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
 506                             &out_data_1_len, &out_data_2, AES_BLOCK_LEN);
 507 
 508                         /* copy temporary block to where it belongs */
 509                         if ((out_data_1_len == AES_BLOCK_LEN) &&
 510                             (IS_P2ALIGNED(out_data_1, sizeof (uint32_t)))) {
 511                                 /* LINTED: pointer alignment */
 512                                 uint32_t *d = (uint32_t *)out_data_1;
 513                                 d[0] = tmp[0];
 514                                 d[1] = tmp[1];
 515                                 d[2] = tmp[2];
 516                                 d[3] = tmp[3];
 517                         } else {
 518                                 bcopy(&tmp, out_data_1, out_data_1_len);
 519                         }
 520                         if (out_data_2 != NULL) {
 521                                 bcopy((uint8_t *)&tmp + out_data_1_len,
 522                                     out_data_2, AES_BLOCK_LEN - out_data_1_len);
 523                         }
 524 
 525                         /* update offset */
 526                         out->cd_offset += AES_BLOCK_LEN;
 527 
 528                 } else if (ctx->ac_remainder_len > 0) {
 529                         /* copy temporary block to where it belongs */
 530                         bcopy(blockp, ctx->ac_copy_to, ctx->ac_remainder_len);
 531                         bcopy(blockp + ctx->ac_remainder_len, datap, need);
 532                 }
 533 
 534                 /* Update pointer to next block of data to be processed. */
 535                 if (ctx->ac_remainder_len != 0) {
 536                         datap += need;
 537                         ctx->ac_remainder_len = 0;
 538                 } else {
 539                         datap += AES_BLOCK_LEN;


 701                                 tmp[1] = *(uint32_t *)&blockp[4];
 702                                 /* LINTED: pointer alignment */
 703                                 tmp[2] = *(uint32_t *)&blockp[8];
 704                                 /* LINTED: pointer alignment */
 705                                 tmp[3] = *(uint32_t *)&blockp[12];
 706                         } else {
 707                                 uint8_t *tmp8 = (uint8_t *)tmp;
 708 
 709                                 AES_COPY_BLOCK(blockp, tmp8);
 710                         }
 711                         blockp = (uint8_t *)tmp;
 712                 }
 713 
 714                 if (ctx->ac_flags & AES_CCM_MODE) {
 715                         /*
 716                          * do CBC MAC
 717                          *
 718                          * XOR the previous cipher block current clear block.
 719                          * mac_buf always contain previous cipher block.
 720                          */
 721                         if (IS_P2ALIGNED2(blockp, mac_buf, sizeof (uint32_t))) {

 722                                 /* LINTED: pointer alignment */
 723                                 *(uint32_t *)&mac_buf[0] ^=
 724                                 /* LINTED: pointer alignment */
 725                                     *(uint32_t *)&blockp[0];
 726                                 /* LINTED: pointer alignment */
 727                                 *(uint32_t *)&mac_buf[4] ^=
 728                                 /* LINTED: pointer alignment */
 729                                     *(uint32_t *)&blockp[4];
 730                                 /* LINTED: pointer alignment */
 731                                 *(uint32_t *)&mac_buf[8] ^=
 732                                 /* LINTED: pointer alignment */
 733                                     *(uint32_t *)&blockp[8];
 734                                 /* LINTED: pointer alignment */
 735                                 *(uint32_t *)&mac_buf[12] ^=
 736                                 /* LINTED: pointer alignment */
 737                                     *(uint32_t *)&blockp[12];
 738                         } else {
 739                                 AES_XOR_BLOCK(blockp, mac_buf);
 740                         }
 741                         aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);


 766 #endif
 767                 counter++;
 768 #ifdef _LITTLE_ENDIAN
 769                 counter = (((uint64_t)p[0] << 56) |
 770                     ((uint64_t)p[1] << 48) |
 771                     ((uint64_t)p[2] << 40) |
 772                     ((uint64_t)p[3] << 32) |
 773                     ((uint64_t)p[4] << 24) |
 774                     ((uint64_t)p[5] << 16) |
 775                     ((uint64_t)p[6] << 8) |
 776                     (uint64_t)p[7]);
 777 #endif
 778                 counter &= ctx->ac_counter_mask;
 779                 ctx->ac_cb[1] =
 780                     (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter;
 781 
 782                 /*
 783                  * XOR the previous cipher block or IV with the
 784                  * current clear block. Check for alignment.
 785                  */
 786                 if (IS_P2ALIGNED2(blockp, lastp, sizeof (uint32_t))) {

 787                         /* LINTED: pointer alignment */
 788                         *(uint32_t *)&blockp[0] ^=
 789                         /* LINTED: pointer alignment */
 790                             *(uint32_t *)&lastp[0];
 791                         /* LINTED: pointer alignment */
 792                         *(uint32_t *)&blockp[4] ^=
 793                         /* LINTED: pointer alignment */
 794                             *(uint32_t *)&lastp[4];
 795                         /* LINTED: pointer alignment */
 796                         *(uint32_t *)&blockp[8] ^=
 797                         /* LINTED: pointer alignment */
 798                             *(uint32_t *)&lastp[8];
 799                         /* LINTED: pointer alignment */
 800                         *(uint32_t *)&blockp[12] ^=
 801                         /* LINTED: pointer alignment */
 802                             *(uint32_t *)&lastp[12];
 803                 } else {
 804                         AES_XOR_BLOCK(lastp, blockp);
 805                 }
 806 
 807                 ctx->ac_lastp = blockp;
 808                 lastp = blockp;
 809                 if (ctx->ac_flags & AES_CCM_MODE) {
 810                         ctx->ac_ccm_processed_data_len += AES_BLOCK_LEN;
 811                 }
 812 
 813                 if (out == NULL) {
 814                         if (ctx->ac_remainder_len > 0) {
 815                                 bcopy(blockp, ctx->ac_copy_to,
 816                                     ctx->ac_remainder_len);
 817                                 bcopy(blockp + ctx->ac_remainder_len, datap,
 818                                     need);
 819                         }
 820                 } else {
 821                         aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
 822                             &out_data_1_len, &out_data_2, AES_BLOCK_LEN);
 823 
 824                         /* copy block to where it belongs */
 825                         if ((out_data_1_len == AES_BLOCK_LEN) &&
 826                             (IS_P2ALIGNED2(lastp, out_data_1,
 827                             sizeof (uint32_t)))) {
 828                                 /* LINTED: pointer alignment */
 829                                 uint32_t *d = (uint32_t *)out_data_1;
 830                                 /* LINTED: pointer alignment */
 831                                 d[0] = *(uint32_t *)lastp;
 832                                 /* LINTED: pointer alignment */
 833                                 d[1] = *(uint32_t *)&lastp[4];
 834                                 /* LINTED: pointer alignment */
 835                                 d[2] = *(uint32_t *)&lastp[8];
 836                                 /* LINTED: pointer alignment */
 837                                 d[3] = *(uint32_t *)&lastp[12];
 838                         } else {
 839                                 bcopy(lastp, out_data_1, out_data_1_len);
 840                         }
 841                         if (out_data_2 != NULL) {
 842                                 bcopy(lastp + out_data_1_len, out_data_2,
 843                                     AES_BLOCK_LEN - out_data_1_len);
 844                         }
 845 
 846                         /* update offset */
 847                         out->cd_offset += AES_BLOCK_LEN;
 848                 }
 849 
 850                 /* Update pointer to next block of data to be processed. */
 851                 if (ctx->ac_remainder_len != 0) {
 852                         datap += need;
 853                         ctx->ac_remainder_len = 0;
 854                 } else {
 855                         datap += AES_BLOCK_LEN;
 856                 }
 857 
 858                 remainder = (size_t)&data[length] - (size_t)datap;
 859 
 860                 /* Incomplete last block. */
 861                 if (remainder > 0 && remainder < AES_BLOCK_LEN) {
 862                         bcopy(datap, ctx->ac_remainder, remainder);
 863                         ctx->ac_remainder_len = remainder;
 864                         ctx->ac_copy_to = datap;
 865                         goto out;


 889         size_t remainder, processed;
 890         uint8_t encoded_a[10]; /* max encoded auth data length is 10 octets */
 891         size_t encoded_a_len = 0;
 892 
 893         mac_buf = (uint8_t *)&(ctx->ac_ccm_mac_buf);
 894 
 895         /*
 896          * Format the 1st block for CBC-MAC and construct the
 897          * 1st counter block.
 898          *
 899          * aes_ctx->ac_iv is used for storing the counter block
 900          * mac_buf will store b0 at this time.
 901          */
 902         aes_ccm_format_initial_blocks(nonce, nonce_len,
 903             auth_data_len, mac_buf, ctx);
 904 
 905         /* The IV for CBC MAC for AES CCM mode is always zero */
 906         bzero(iv, AES_BLOCK_LEN);
 907         ivp = (uint8_t *)iv;
 908 
 909         if (IS_P2ALIGNED2(ivp, mac_buf, sizeof (uint32_t))) {

 910                 /* LINTED: pointer alignment */
 911                 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&ivp[0];
 912                 /* LINTED: pointer alignment */
 913                 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&ivp[4];
 914                 /* LINTED: pointer alignment */
 915                 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&ivp[8];
 916                 /* LINTED: pointer alignment */
 917                 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&ivp[12];
 918         } else {
 919                 AES_XOR_BLOCK(ivp, mac_buf);
 920         }
 921 
 922         /* encrypt the nonce */
 923         aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
 924 
 925         /* take care of the associated data, if any */
 926         if (auth_data_len == 0) {
 927                 return (0);
 928         }
 929 
 930         encode_adata_len(auth_data_len, encoded_a, &encoded_a_len);
 931 
 932         remainder = auth_data_len;
 933 
 934         /* 1st block: it contains encoded associated data, and some data */
 935         authp = (uint8_t *)tmp;
 936         bzero(authp, AES_BLOCK_LEN);
 937         bcopy(encoded_a, authp, encoded_a_len);
 938         processed = AES_BLOCK_LEN - encoded_a_len;
 939         if (processed > auth_data_len) {
 940                 /* in case auth_data is very small */
 941                 processed = auth_data_len;
 942         }
 943         bcopy(auth_data, authp+encoded_a_len, processed);
 944         /* xor with previous buffer */
 945         if (IS_P2ALIGNED2(authp, mac_buf, sizeof (uint32_t))) {

 946                 /* LINTED: pointer alignment */
 947                 *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&authp[0];
 948                 /* LINTED: pointer alignment */
 949                 *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&authp[4];
 950                 /* LINTED: pointer alignment */
 951                 *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&authp[8];
 952                 /* LINTED: pointer alignment */
 953                 *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&authp[12];
 954         } else {
 955                 AES_XOR_BLOCK(authp, mac_buf);
 956         }
 957         aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
 958         remainder -= processed;
 959         if (remainder == 0) {
 960                 /* a small amount of associated data, it's all done now */
 961                 return (0);
 962         }
 963 
 964         do {
 965                 if (remainder < AES_BLOCK_LEN) {
 966                         /*
 967                          * There's not a block full of data, pad rest of
 968                          * buffer with zero
 969                          */
 970                         bzero(authp, AES_BLOCK_LEN);
 971                         bcopy(&(auth_data[processed]), authp, remainder);
 972                         datap = (uint8_t *)authp;
 973                         remainder = 0;
 974                 } else {
 975                         datap = (uint8_t *)(&(auth_data[processed]));
 976                         processed += AES_BLOCK_LEN;
 977                         remainder -= AES_BLOCK_LEN;
 978                 }
 979 
 980                 /* xor with previous buffer */
 981                 if (IS_P2ALIGNED2(datap, mac_buf, sizeof (uint32_t))) {

 982                         /* LINTED: pointer alignment */
 983                         *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&datap[0];
 984                         /* LINTED: pointer alignment */
 985                         *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&datap[4];
 986                         /* LINTED: pointer alignment */
 987                         *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&datap[8];
 988                         /* LINTED: pointer alignment */
 989                         *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&datap[12];
 990                 } else {
 991                         AES_XOR_BLOCK(datap, mac_buf);
 992                 }
 993 
 994                 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
 995 
 996         } while (remainder > 0);
 997 
 998 /* EXPORT DELETE END */
 999         return (0);
1000 }
1001 


1052          * plus whatever data remains, if any,
1053          * should be the same as the number of bytes that's being
1054          * passed in the argument during init time.
1055          */
1056         if ((ctx->ac_ccm_processed_data_len + ctx->ac_remainder_len)
1057             != (ctx->ac_ccm_data_len)) {
1058                 return (CRYPTO_DATA_LEN_RANGE);
1059         }
1060 
1061         mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf;
1062 
1063         if (ctx->ac_remainder_len > 0) {
1064 
1065                 macp = (uint8_t *)tmp;
1066                 bzero(macp, AES_BLOCK_LEN);
1067 
1068                 /* copy remainder to temporary buffer */
1069                 bcopy(ctx->ac_remainder, macp, ctx->ac_remainder_len);
1070 
1071                 /* calculate the CBC MAC */
1072                 if (IS_P2ALIGNED2(macp, mac_buf, sizeof (uint32_t))) {

1073                         /* LINTED: pointer alignment */
1074                         *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&macp[0];
1075                         /* LINTED: pointer alignment */
1076                         *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&macp[4];
1077                         /* LINTED: pointer alignment */
1078                         *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&macp[8];
1079                         /* LINTED: pointer alignment */
1080                         *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&macp[12];
1081                 } else {
1082                         AES_XOR_BLOCK(macp, mac_buf);
1083                 }
1084                 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
1085 
1086                 /* calculate the counter mode */
1087                 aes_encrypt_block(ctx->ac_keysched, (uint8_t *)ctx->ac_cb,
1088                     (uint8_t *)counter_block);
1089 
1090                 lastp = (uint8_t *)counter_block;
1091 
1092                 /* copy remainder to temporary buffer */


1164                 }
1165         }
1166         out->cd_offset += ctx->ac_remainder_len + ctx->ac_ccm_mac_len;
1167         ctx->ac_remainder_len = 0;
1168 
1169 /* EXPORT DELETE END */
1170 
1171         return (0);
1172 }
1173 
1174 int
1175 aes_ccm_validate_args(CK_AES_CCM_PARAMS *ccm_param, boolean_t is_encrypt_init)
1176 {
1177 
1178 /* EXPORT DELETE START */
1179         size_t macSize, nonceSize;
1180         uint8_t q;
1181         uint64_t maxValue;
1182 
1183         /*
1184          * Check the byte length of the MAC.  The only valid
1185          * lengths for the MAC are: 4, 6, 8, 10, 12, 14, 16
1186          */
1187         macSize = ccm_param->ulMACSize;
1188         if ((macSize < 4) || (macSize > 16) || ((macSize % 2) != 0)) {
1189                 return (CRYPTO_MECHANISM_PARAM_INVALID);
1190         }
1191 
1192         /* Check the nonce length.  Valid values are 7, 8, 9, 10, 11, 12, 13 */
1193         nonceSize = ccm_param->ulNonceSize;
1194         if ((nonceSize < 7) || (nonceSize > 13)) {
1195                 return (CRYPTO_MECHANISM_PARAM_INVALID);
1196         }
1197 
1198         /* q is the length of the field storing the length, in bytes */
1199         q = (uint8_t)((15 - nonceSize) & 0xFF);
1200 
1201 
1202         /*
1203          * If it is decrypt, need to make sure size of ciphertext is at least
1204          * bigger than MAC len
1205          */
1206         if ((!is_encrypt_init) && (ccm_param->ulDataSize < macSize)) {
1207                 return (CRYPTO_MECHANISM_PARAM_INVALID);
1208         }
1209 
1210         /*
1211          * Check to make sure the length of the payload is within the
1212          * range of values allowed by q
1213          */
1214         if (q < 8) {
1215                 maxValue = (1ULL << (q * 8)) - 1;
1216         } else {
1217                 maxValue = ULONG_MAX;
1218         }
1219 
1220         if (ccm_param->ulDataSize > maxValue) {
1221                 return (CRYPTO_MECHANISM_PARAM_INVALID);
1222         }
1223 
1224 /* EXPORT DELETE END */
1225         return (0);
1226 }
1227 
1228 /*
1229  * Format the first block used in CBC-MAC (B0) and the initial counter
1230  * block based on formatting functions and counter generation functions
1231  * specified in RFC 3610 and NIST publication 800-38C, appendix A
1232  *
1233  * b0 is the first block used in CBC-MAC
1234  * cb0 is the first counter block
1235  *
1236  * It's assumed that the arguments b0 and cb0 are preallocated AES blocks
1237  *
1238  */
1239 static void
1240 aes_ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize,
1241     ulong_t authDataSize, uint8_t *b0, aes_ctx_t *aes_ctx)
1242 {
1243 /* EXPORT DELETE START */
1244         uint64_t payloadSize;
1245         uint8_t t, q, have_adata = 0;
1246         size_t limit;
1247         int i, j, k;
1248         uint64_t mask = 0;
1249         uint8_t *cb;
1250 #ifdef _LITTLE_ENDIAN
1251         uint8_t *p8;
1252 #endif  /* _LITTLE_ENDIAN */
1253 
1254         q = (uint8_t)((15 - nonceSize) & 0xFF);
1255         t = (uint8_t)((aes_ctx->ac_ccm_mac_len) & 0xFF);
1256 
1257         /* Construct the first octet of b0 */
1258         if (authDataSize > 0) {
1259                 have_adata = 1;
1260         }
1261         b0[0] = (have_adata << 6) | (((t - 2)  / 2) << 3) | (q - 1);
1262 
1263         /* copy the nonce value into b0 */
1264         bcopy(nonce, &(b0[1]), nonceSize);
1265 
1266         /* store the length of the payload into b0 */
1267         bzero(&(b0[1+nonceSize]), q);
1268 
1269         payloadSize = aes_ctx->ac_ccm_data_len;
1270         limit = 8 < q ? 8 : q;
1271 
1272         for (i = 0, j = 0, k = 15; i < limit; i++, j += 8, k--) {
1273                 b0[k] = (uint8_t)((payloadSize >> j) & 0xFF);
1274         }
1275 
1276         /* format the counter block */
1277 


1290                 mask |= (1ULL << q);
1291         }
1292 
1293 #ifdef _LITTLE_ENDIAN
1294         p8 = (uint8_t *)&mask;
1295         mask = (((uint64_t)p8[0] << 56) |
1296             ((uint64_t)p8[1] << 48) |
1297             ((uint64_t)p8[2] << 40) |
1298             ((uint64_t)p8[3] << 32) |
1299             ((uint64_t)p8[4] << 24) |
1300             ((uint64_t)p8[5] << 16) |
1301             ((uint64_t)p8[6] << 8) |
1302             (uint64_t)p8[7]);
1303 #endif
1304         aes_ctx->ac_counter_mask = mask;
1305 
1306         /*
1307          * During calculation, we start using counter block 1, we will
1308          * set it up right here.
1309          * We can just set the last byte to have the value 1, because
1310          * even with the biggest nonce of 13, the last byte of the
1311          * counter block will be used for the counter value.
1312          */
1313         cb[15] = 0x01;
1314 
1315 /* EXPORT DELETE END */
1316 
1317 }
1318 
1319 /*
1320  * Encode the length of the associated data as
1321  * specified in RFC 3610 and NIST publication 800-38C, appendix A
1322  */
1323 static void
1324 encode_adata_len(ulong_t auth_data_len, uint8_t *encoded, size_t *encoded_len)
1325 {
1326 
1327 /* EXPORT DELETE START */
1328 
1329         if (auth_data_len < ((1ULL<<16) - (1ULL<<8))) {
1330                 /* 0 < a < (2^16-2^8) */


1543                     ((uint64_t)p[5] << 16) |
1544                     ((uint64_t)p[6] << 8) |
1545                     (uint64_t)p[7]);
1546 #endif
1547                 counter++;
1548 #ifdef _LITTLE_ENDIAN
1549                 counter = (((uint64_t)p[0] << 56) |
1550                     ((uint64_t)p[1] << 48) |
1551                     ((uint64_t)p[2] << 40) |
1552                     ((uint64_t)p[3] << 32) |
1553                     ((uint64_t)p[4] << 24) |
1554                     ((uint64_t)p[5] << 16) |
1555                     ((uint64_t)p[6] << 8) |
1556                     (uint64_t)p[7]);
1557 #endif
1558                 counter &= ctx->ac_counter_mask;
1559                 ctx->ac_cb[1] =
1560                     (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter;
1561 
1562                 /* XOR with the ciphertext */
1563                 if (IS_P2ALIGNED2(blockp, cbp, sizeof (uint32_t))) {

1564                         /* LINTED: pointer alignment */
1565                         *(uint32_t *)&blockp[0] ^= *(uint32_t *)&cbp[0];
1566                         /* LINTED: pointer alignment */
1567                         *(uint32_t *)&blockp[4] ^= *(uint32_t *)&cbp[4];
1568                         /* LINTED: pointer alignment */
1569                         *(uint32_t *)&blockp[8] ^= *(uint32_t *)&cbp[8];
1570                         /* LINTED: pointer alignment */
1571                         *(uint32_t *)&blockp[12] ^= *(uint32_t *)&cbp[12];
1572                 } else {
1573                         AES_XOR_BLOCK(cbp, blockp);
1574                 }
1575 
1576                 /* Copy the plaintext to the "holding buffer" */
1577                 resultp = (uint8_t *)ctx->ac_ccm_pt_buf +
1578                     ctx->ac_ccm_processed_data_len;
1579                 if (IS_P2ALIGNED2(blockp, resultp, sizeof (uint32_t))) {

1580                         /* LINTED: pointer alignment */
1581                         *(uint32_t *)&resultp[0] = *(uint32_t *)blockp;
1582                         /* LINTED: pointer alignment */
1583                         *(uint32_t *)&resultp[4] = *(uint32_t *)&blockp[4];
1584                         /* LINTED: pointer alignment */
1585                         *(uint32_t *)&resultp[8] = *(uint32_t *)&blockp[8];
1586                         /* LINTED: pointer alignment */
1587                         *(uint32_t *)&resultp[12] = *(uint32_t *)&blockp[12];
1588                 } else {
1589                         AES_COPY_BLOCK(blockp, resultp);
1590                 }
1591 
1592                 ctx->ac_ccm_processed_data_len += AES_BLOCK_LEN;
1593 
1594                 ctx->ac_lastp = blockp;
1595 
1596                 /* Update pointer to next block of data to be processed. */
1597                 if (ctx->ac_remainder_len != 0) {
1598                         datap += need;
1599                         ctx->ac_remainder_len = 0;


1645         pt_len = ctx->ac_ccm_data_len;
1646 
1647         /* Make sure output buffer can fit all of the plaintext */
1648         if (out->cd_length < pt_len) {
1649                 return (CRYPTO_ARGUMENTS_BAD);
1650         }
1651 
1652         pt = ctx->ac_ccm_pt_buf;
1653         mac_remain = ctx->ac_ccm_processed_data_len;
1654         mac_buf = (uint8_t *)ctx->ac_ccm_mac_buf;
1655 
1656         macp = (uint8_t *)tmp;
1657 
1658         while (mac_remain > 0) {
1659 
1660                 if (mac_remain < AES_BLOCK_LEN) {
1661                         bzero(tmp, AES_BLOCK_LEN);
1662                         bcopy(pt, tmp, mac_remain);
1663                         mac_remain = 0;
1664                 } else {
1665                         if (IS_P2ALIGNED2(pt, macp, sizeof (uint32_t))) {

1666                                 /* LINTED: pointer alignment */
1667                                 *(uint32_t *)&macp[0] = *(uint32_t *)pt;
1668                                 /* LINTED: pointer alignment */
1669                                 *(uint32_t *)&macp[4] = *(uint32_t *)&pt[4];
1670                                 /* LINTED: pointer alignment */
1671                                 *(uint32_t *)&macp[8] = *(uint32_t *)&pt[8];
1672                                 /* LINTED: pointer alignment */
1673                                 *(uint32_t *)&macp[12] = *(uint32_t *)&pt[12];
1674                         } else {
1675                                 AES_COPY_BLOCK(pt, macp);
1676                         }
1677                         mac_remain -= AES_BLOCK_LEN;
1678                         pt += AES_BLOCK_LEN;
1679                 }
1680 
1681                 /* calculate the CBC MAC */
1682                 if (IS_P2ALIGNED2(macp, mac_buf, sizeof (uint32_t))) {

1683                         /* LINTED: pointer alignment */
1684                         *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&macp[0];
1685                         /* LINTED: pointer alignment */
1686                         *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&macp[4];
1687                         /* LINTED: pointer alignment */
1688                         *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&macp[8];
1689                         /* LINTED: pointer alignment */
1690                         *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&macp[12];
1691                 } else {
1692                         AES_XOR_BLOCK(macp, mac_buf);
1693                 }
1694                 aes_encrypt_block(ctx->ac_keysched, mac_buf, mac_buf);
1695         }
1696 
1697         /* Calculate the CCM MAC */
1698         ccm_mac_p = ccm_mac;
1699         calculate_ccm_mac(ctx, &ccm_mac_p);
1700 
1701         /* compare the input CCM MAC value with what we calculated */
1702         if (bcmp(ctx->ac_ccm_mac_input_buf, ccm_mac, ctx->ac_ccm_mac_len)) {