Print this page
5072963 Need an optimized AES implementation for amd64
6699938 CCM max payload computation is off by one

Split Close
Expand all
Collapse all
          --- old/usr/src/common/crypto/aes/aes_cbc_crypt.c
          +++ new/usr/src/common/crypto/aes/aes_cbc_crypt.c
↓ open down ↓ 15 lines elided ↑ open up ↑
  16   16   * fields enclosed by brackets "[]" replaced with your own identifying
  17   17   * information: Portions Copyright [yyyy] [name of copyright owner]
  18   18   *
  19   19   * CDDL HEADER END
  20   20   */
  21   21  /*
  22   22   * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
  23   23   * Use is subject to license terms.
  24   24   */
  25   25  
  26      -#pragma ident   "@(#)aes_cbc_crypt.c    1.9     08/05/09 SMI"
       26 +#pragma ident   "@(#)aes_cbc_crypt.c    1.10    08/06/13 SMI"
  27   27  
  28   28  
  29   29  #include <sys/sysmacros.h>
  30   30  #include <sys/systm.h>
  31   31  #include <sys/crypto/common.h>
  32   32  #include <sys/strsun.h>
  33   33  #include "aes_cbc_crypt.h"
  34   34  #include "aes_impl.h"
  35   35  #ifndef _KERNEL
  36   36  #include <limits.h>
↓ open down ↓ 6 lines elided ↑ open up ↑
  43   43  encode_adata_len(ulong_t auth_data_len, uint8_t *encoded, size_t *encoded_len);
  44   44  static void
  45   45  aes_ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize,
  46   46      ulong_t authDataSize, uint8_t *b0, aes_ctx_t *aes_ctx);
  47   47  static int
  48   48  aes_ccm_decrypt_contiguous_blocks(aes_ctx_t *ctx, char *data, size_t length,
  49   49      crypto_data_t *out);
  50   50  
  51   51  /*
  52   52   * Initialize by setting iov_or_mp to point to the current iovec or mp,
  53      - * and by setting current_offset to an offset within the current iovec or mp .
       53 + * and by setting current_offset to an offset within the current iovec or mp.
  54   54   */
  55   55  static void
  56   56  aes_init_ptrs(crypto_data_t *out, void **iov_or_mp, offset_t *current_offset)
  57   57  {
  58   58          offset_t offset;
  59   59  
  60   60          switch (out->cd_format) {
  61   61          case CRYPTO_DATA_RAW:
  62   62                  *current_offset = out->cd_offset;
  63   63                  break;
↓ open down ↓ 182 lines elided ↑ open up ↑
 246  246                                  AES_COPY_BLOCK(blockp, tmp8);
 247  247                          }
 248  248                          blockp = (uint8_t *)tmp;
 249  249                  }
 250  250  
 251  251                  if (ctx->ac_flags & AES_CBC_MODE) {
 252  252                          /*
 253  253                           * XOR the previous cipher block or IV with the
 254  254                           * current clear block. Check for alignment.
 255  255                           */
 256      -                        if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
 257      -                            IS_P2ALIGNED(lastp, sizeof (uint32_t))) {
      256 +                        if (IS_P2ALIGNED2(blockp, lastp, sizeof (uint32_t))) {
 258  257                                  /* LINTED: pointer alignment */
 259  258                                  *(uint32_t *)&blockp[0] ^=
 260  259                                  /* LINTED: pointer alignment */
 261  260                                      *(uint32_t *)&lastp[0];
 262  261                                  /* LINTED: pointer alignment */
 263  262                                  *(uint32_t *)&blockp[4] ^=
 264  263                                  /* LINTED: pointer alignment */
 265  264                                      *(uint32_t *)&lastp[4];
 266  265                                  /* LINTED: pointer alignment */
 267  266                                  *(uint32_t *)&blockp[8] ^=
↓ open down ↓ 19 lines elided ↑ open up ↑
 287  286                                      ctx->ac_remainder_len);
 288  287                                  bcopy(blockp + ctx->ac_remainder_len, datap,
 289  288                                      need);
 290  289                          }
 291  290                  } else {
 292  291                          aes_encrypt_block(ctx->ac_keysched, blockp, lastp);
 293  292                          aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
 294  293                              &out_data_1_len, &out_data_2, AES_BLOCK_LEN);
 295  294  
 296  295                          /* copy block to where it belongs */
 297      -                        bcopy(lastp, out_data_1, out_data_1_len);
      296 +                        if ((out_data_1_len == AES_BLOCK_LEN) &&
      297 +                            (IS_P2ALIGNED2(lastp, out_data_1,
      298 +                            sizeof (uint32_t)))) {
      299 +                                /* LINTED: pointer alignment */
      300 +                                uint32_t *d = (uint32_t *)out_data_1;
      301 +                                /* LINTED: pointer alignment */
      302 +                                d[0] = *(uint32_t *)lastp;
      303 +                                /* LINTED: pointer alignment */
      304 +                                d[1] = *(uint32_t *)&lastp[4];
      305 +                                /* LINTED: pointer alignment */
      306 +                                d[2] = *(uint32_t *)&lastp[8];
      307 +                                /* LINTED: pointer alignment */
      308 +                                d[3] = *(uint32_t *)&lastp[12];
      309 +                        } else {
      310 +                                bcopy(lastp, out_data_1, out_data_1_len);
      311 +                        }
 298  312                          if (out_data_2 != NULL) {
 299  313                                  bcopy(lastp + out_data_1_len, out_data_2,
 300  314                                      AES_BLOCK_LEN - out_data_1_len);
 301  315                          }
      316 +
 302  317                          /* update offset */
 303  318                          out->cd_offset += AES_BLOCK_LEN;
 304  319                  }
 305  320  
 306  321                  /* Update pointer to next block of data to be processed. */
 307  322                  if (ctx->ac_remainder_len != 0) {
 308  323                          datap += need;
 309  324                          ctx->ac_remainder_len = 0;
 310  325                  } else {
 311  326                          datap += AES_BLOCK_LEN;
↓ open down ↓ 144 lines elided ↑ open up ↑
 456  471                          blockp = (uint8_t *)tmp;
 457  472                  } else {
 458  473                          aes_decrypt_block(ctx->ac_keysched, blockp, blockp);
 459  474                  }
 460  475  
 461  476                  if (ctx->ac_flags & AES_CBC_MODE) {
 462  477                          /*
 463  478                           * XOR the previous cipher block or IV with the
 464  479                           * currently decrypted block.  Check for alignment.
 465  480                           */
 466      -                        if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
 467      -                            IS_P2ALIGNED(lastp, sizeof (uint32_t))) {
      481 +                        if (IS_P2ALIGNED2(blockp, lastp, sizeof (uint32_t))) {
 468  482                                  /* LINTED: pointer alignment */
 469  483                                  *(uint32_t *)blockp ^= *(uint32_t *)lastp;
 470  484                                  /* LINTED: pointer alignment */
 471  485                                  *(uint32_t *)&blockp[4] ^=
 472  486                                  /* LINTED: pointer alignment */
 473  487                                      *(uint32_t *)&lastp[4];
 474  488                                  /* LINTED: pointer alignment */
 475  489                                  *(uint32_t *)&blockp[8] ^=
 476  490                                  /* LINTED: pointer alignment */
 477  491                                      *(uint32_t *)&lastp[8];
↓ open down ↓ 7 lines elided ↑ open up ↑
 485  499  
 486  500                          /* LINTED: pointer alignment */
 487  501                          lastp = (uint8_t *)OTHER((uint64_t *)lastp, ctx);
 488  502                  }
 489  503  
 490  504                  if (out != NULL) {
 491  505                          aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
 492  506                              &out_data_1_len, &out_data_2, AES_BLOCK_LEN);
 493  507  
 494  508                          /* copy temporary block to where it belongs */
 495      -                        bcopy(&tmp, out_data_1, out_data_1_len);
      509 +                        if ((out_data_1_len == AES_BLOCK_LEN) &&
      510 +                            (IS_P2ALIGNED(out_data_1, sizeof (uint32_t)))) {
      511 +                                /* LINTED: pointer alignment */
      512 +                                uint32_t *d = (uint32_t *)out_data_1;
      513 +                                d[0] = tmp[0];
      514 +                                d[1] = tmp[1];
      515 +                                d[2] = tmp[2];
      516 +                                d[3] = tmp[3];
      517 +                        } else {
      518 +                                bcopy(&tmp, out_data_1, out_data_1_len);
      519 +                        }
 496  520                          if (out_data_2 != NULL) {
 497  521                                  bcopy((uint8_t *)&tmp + out_data_1_len,
 498  522                                      out_data_2, AES_BLOCK_LEN - out_data_1_len);
 499  523                          }
 500  524  
 501  525                          /* update offset */
 502  526                          out->cd_offset += AES_BLOCK_LEN;
 503  527  
 504  528                  } else if (ctx->ac_remainder_len > 0) {
 505  529                          /* copy temporary block to where it belongs */
↓ open down ↓ 181 lines elided ↑ open up ↑
 687  711                          blockp = (uint8_t *)tmp;
 688  712                  }
 689  713  
 690  714                  if (ctx->ac_flags & AES_CCM_MODE) {
 691  715                          /*
 692  716                           * do CBC MAC
 693  717                           *
 694  718                           * XOR the previous cipher block current clear block.
 695  719                           * mac_buf always contain previous cipher block.
 696  720                           */
 697      -                        if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
 698      -                            IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
      721 +                        if (IS_P2ALIGNED2(blockp, mac_buf, sizeof (uint32_t))) {
 699  722                                  /* LINTED: pointer alignment */
 700  723                                  *(uint32_t *)&mac_buf[0] ^=
 701  724                                  /* LINTED: pointer alignment */
 702  725                                      *(uint32_t *)&blockp[0];
 703  726                                  /* LINTED: pointer alignment */
 704  727                                  *(uint32_t *)&mac_buf[4] ^=
 705  728                                  /* LINTED: pointer alignment */
 706  729                                      *(uint32_t *)&blockp[4];
 707  730                                  /* LINTED: pointer alignment */
 708  731                                  *(uint32_t *)&mac_buf[8] ^=
↓ open down ↓ 44 lines elided ↑ open up ↑
 753  776                      (uint64_t)p[7]);
 754  777  #endif
 755  778                  counter &= ctx->ac_counter_mask;
 756  779                  ctx->ac_cb[1] =
 757  780                      (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter;
 758  781  
 759  782                  /*
 760  783                   * XOR the previous cipher block or IV with the
 761  784                   * current clear block. Check for alignment.
 762  785                   */
 763      -                if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
 764      -                    IS_P2ALIGNED(lastp, sizeof (uint32_t))) {
      786 +                if (IS_P2ALIGNED2(blockp, lastp, sizeof (uint32_t))) {
 765  787                          /* LINTED: pointer alignment */
 766  788                          *(uint32_t *)&blockp[0] ^=
 767  789                          /* LINTED: pointer alignment */
 768  790                              *(uint32_t *)&lastp[0];
 769  791                          /* LINTED: pointer alignment */
 770  792                          *(uint32_t *)&blockp[4] ^=
 771  793                          /* LINTED: pointer alignment */
 772  794                              *(uint32_t *)&lastp[4];
 773  795                          /* LINTED: pointer alignment */
 774  796                          *(uint32_t *)&blockp[8] ^=
↓ open down ↓ 18 lines elided ↑ open up ↑
 793  815                                  bcopy(blockp, ctx->ac_copy_to,
 794  816                                      ctx->ac_remainder_len);
 795  817                                  bcopy(blockp + ctx->ac_remainder_len, datap,
 796  818                                      need);
 797  819                          }
 798  820                  } else {
 799  821                          aes_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
 800  822                              &out_data_1_len, &out_data_2, AES_BLOCK_LEN);
 801  823  
 802  824                          /* copy block to where it belongs */
 803      -                        bcopy(lastp, out_data_1, out_data_1_len);
      825 +                        if ((out_data_1_len == AES_BLOCK_LEN) &&
      826 +                            (IS_P2ALIGNED2(lastp, out_data_1,
      827 +                            sizeof (uint32_t)))) {
      828 +                                /* LINTED: pointer alignment */
      829 +                                uint32_t *d = (uint32_t *)out_data_1;
      830 +                                /* LINTED: pointer alignment */
      831 +                                d[0] = *(uint32_t *)lastp;
      832 +                                /* LINTED: pointer alignment */
      833 +                                d[1] = *(uint32_t *)&lastp[4];
      834 +                                /* LINTED: pointer alignment */
      835 +                                d[2] = *(uint32_t *)&lastp[8];
      836 +                                /* LINTED: pointer alignment */
      837 +                                d[3] = *(uint32_t *)&lastp[12];
      838 +                        } else {
      839 +                                bcopy(lastp, out_data_1, out_data_1_len);
      840 +                        }
 804  841                          if (out_data_2 != NULL) {
 805  842                                  bcopy(lastp + out_data_1_len, out_data_2,
 806  843                                      AES_BLOCK_LEN - out_data_1_len);
 807  844                          }
      845 +
 808  846                          /* update offset */
 809  847                          out->cd_offset += AES_BLOCK_LEN;
 810  848                  }
 811  849  
 812  850                  /* Update pointer to next block of data to be processed. */
 813  851                  if (ctx->ac_remainder_len != 0) {
 814  852                          datap += need;
 815  853                          ctx->ac_remainder_len = 0;
 816  854                  } else {
 817  855                          datap += AES_BLOCK_LEN;
↓ open down ↓ 43 lines elided ↑ open up ↑
 861  899           * aes_ctx->ac_iv is used for storing the counter block
 862  900           * mac_buf will store b0 at this time.
 863  901           */
 864  902          aes_ccm_format_initial_blocks(nonce, nonce_len,
 865  903              auth_data_len, mac_buf, ctx);
 866  904  
 867  905          /* The IV for CBC MAC for AES CCM mode is always zero */
 868  906          bzero(iv, AES_BLOCK_LEN);
 869  907          ivp = (uint8_t *)iv;
 870  908  
 871      -        if (IS_P2ALIGNED(ivp, sizeof (uint32_t)) &&
 872      -            IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
      909 +        if (IS_P2ALIGNED2(ivp, mac_buf, sizeof (uint32_t))) {
 873  910                  /* LINTED: pointer alignment */
 874  911                  *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&ivp[0];
 875  912                  /* LINTED: pointer alignment */
 876  913                  *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&ivp[4];
 877  914                  /* LINTED: pointer alignment */
 878  915                  *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&ivp[8];
 879  916                  /* LINTED: pointer alignment */
 880  917                  *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&ivp[12];
 881  918          } else {
 882  919                  AES_XOR_BLOCK(ivp, mac_buf);
↓ open down ↓ 15 lines elided ↑ open up ↑
 898  935          authp = (uint8_t *)tmp;
 899  936          bzero(authp, AES_BLOCK_LEN);
 900  937          bcopy(encoded_a, authp, encoded_a_len);
 901  938          processed = AES_BLOCK_LEN - encoded_a_len;
 902  939          if (processed > auth_data_len) {
 903  940                  /* in case auth_data is very small */
 904  941                  processed = auth_data_len;
 905  942          }
 906  943          bcopy(auth_data, authp+encoded_a_len, processed);
 907  944          /* xor with previous buffer */
 908      -        if (IS_P2ALIGNED(authp, sizeof (uint32_t)) &&
 909      -            IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
      945 +        if (IS_P2ALIGNED2(authp, mac_buf, sizeof (uint32_t))) {
 910  946                  /* LINTED: pointer alignment */
 911  947                  *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&authp[0];
 912  948                  /* LINTED: pointer alignment */
 913  949                  *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&authp[4];
 914  950                  /* LINTED: pointer alignment */
 915  951                  *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&authp[8];
 916  952                  /* LINTED: pointer alignment */
 917  953                  *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&authp[12];
 918  954          } else {
 919  955                  AES_XOR_BLOCK(authp, mac_buf);
↓ open down ↓ 15 lines elided ↑ open up ↑
 935  971                          bcopy(&(auth_data[processed]), authp, remainder);
 936  972                          datap = (uint8_t *)authp;
 937  973                          remainder = 0;
 938  974                  } else {
 939  975                          datap = (uint8_t *)(&(auth_data[processed]));
 940  976                          processed += AES_BLOCK_LEN;
 941  977                          remainder -= AES_BLOCK_LEN;
 942  978                  }
 943  979  
 944  980                  /* xor with previous buffer */
 945      -                if (IS_P2ALIGNED(datap, sizeof (uint32_t)) &&
 946      -                    IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
      981 +                if (IS_P2ALIGNED2(datap, mac_buf, sizeof (uint32_t))) {
 947  982                          /* LINTED: pointer alignment */
 948  983                          *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&datap[0];
 949  984                          /* LINTED: pointer alignment */
 950  985                          *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&datap[4];
 951  986                          /* LINTED: pointer alignment */
 952  987                          *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&datap[8];
 953  988                          /* LINTED: pointer alignment */
 954  989                          *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&datap[12];
 955  990                  } else {
 956  991                          AES_XOR_BLOCK(datap, mac_buf);
↓ open down ↓ 70 lines elided ↑ open up ↑
1027 1062  
1028 1063          if (ctx->ac_remainder_len > 0) {
1029 1064  
1030 1065                  macp = (uint8_t *)tmp;
1031 1066                  bzero(macp, AES_BLOCK_LEN);
1032 1067  
1033 1068                  /* copy remainder to temporary buffer */
1034 1069                  bcopy(ctx->ac_remainder, macp, ctx->ac_remainder_len);
1035 1070  
1036 1071                  /* calculate the CBC MAC */
1037      -                if (IS_P2ALIGNED(macp, sizeof (uint32_t)) &&
1038      -                    IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
     1072 +                if (IS_P2ALIGNED2(macp, mac_buf, sizeof (uint32_t))) {
1039 1073                          /* LINTED: pointer alignment */
1040 1074                          *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&macp[0];
1041 1075                          /* LINTED: pointer alignment */
1042 1076                          *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&macp[4];
1043 1077                          /* LINTED: pointer alignment */
1044 1078                          *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&macp[8];
1045 1079                          /* LINTED: pointer alignment */
1046 1080                          *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&macp[12];
1047 1081                  } else {
1048 1082                          AES_XOR_BLOCK(macp, mac_buf);
↓ open down ↓ 91 lines elided ↑ open up ↑
1140 1174  int
1141 1175  aes_ccm_validate_args(CK_AES_CCM_PARAMS *ccm_param, boolean_t is_encrypt_init)
1142 1176  {
1143 1177  
1144 1178  /* EXPORT DELETE START */
1145 1179          size_t macSize, nonceSize;
1146 1180          uint8_t q;
1147 1181          uint64_t maxValue;
1148 1182  
1149 1183          /*
1150      -         * Check the length of the MAC.  Only valid length
     1184 +         * Check the byte length of the MAC.  The only valid
1151 1185           * lengths for the MAC are: 4, 6, 8, 10, 12, 14, 16
1152 1186           */
1153 1187          macSize = ccm_param->ulMACSize;
1154 1188          if ((macSize < 4) || (macSize > 16) || ((macSize % 2) != 0)) {
1155 1189                  return (CRYPTO_MECHANISM_PARAM_INVALID);
1156 1190          }
1157 1191  
1158      -        /* Check the nonce value.  Valid values are 7, 8, 9, 10, 11, 12, 13 */
     1192 +        /* Check the nonce length.  Valid values are 7, 8, 9, 10, 11, 12, 13 */
1159 1193          nonceSize = ccm_param->ulNonceSize;
1160 1194          if ((nonceSize < 7) || (nonceSize > 13)) {
1161 1195                  return (CRYPTO_MECHANISM_PARAM_INVALID);
1162 1196          }
1163 1197  
     1198 +        /* q is the length of the field storing the length, in bytes */
1164 1199          q = (uint8_t)((15 - nonceSize) & 0xFF);
1165 1200  
1166 1201  
1167 1202          /*
1168 1203           * If it is decrypt, need to make sure size of ciphertext is at least
1169 1204           * bigger than MAC len
1170 1205           */
1171 1206          if ((!is_encrypt_init) && (ccm_param->ulDataSize < macSize)) {
1172 1207                  return (CRYPTO_MECHANISM_PARAM_INVALID);
1173 1208          }
1174 1209  
1175 1210          /*
1176 1211           * Check to make sure the length of the payload is within the
1177 1212           * range of values allowed by q
1178 1213           */
1179 1214          if (q < 8) {
1180      -                maxValue = 1ULL << (q * 8);
     1215 +                maxValue = (1ULL << (q * 8)) - 1;
1181 1216          } else {
1182 1217                  maxValue = ULONG_MAX;
1183 1218          }
1184 1219  
1185 1220          if (ccm_param->ulDataSize > maxValue) {
1186 1221                  return (CRYPTO_MECHANISM_PARAM_INVALID);
1187 1222          }
1188 1223  
1189 1224  /* EXPORT DELETE END */
1190 1225          return (0);
1191 1226  }
1192 1227  
1193 1228  /*
1194 1229   * Format the first block used in CBC-MAC (B0) and the initial counter
1195      - * block based on formating functions and counter generation functions
     1230 + * block based on formatting functions and counter generation functions
1196 1231   * specified in RFC 3610 and NIST publication 800-38C, appendix A
1197 1232   *
1198 1233   * b0 is the first block used in CBC-MAC
1199 1234   * cb0 is the first counter block
1200 1235   *
1201 1236   * It's assumed that the arguments b0 and cb0 are preallocated AES blocks
1202 1237   *
1203 1238   */
1204 1239  static void
1205 1240  aes_ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize,
↓ open down ↓ 6 lines elided ↑ open up ↑
1212 1247          int i, j, k;
1213 1248          uint64_t mask = 0;
1214 1249          uint8_t *cb;
1215 1250  #ifdef _LITTLE_ENDIAN
1216 1251          uint8_t *p8;
1217 1252  #endif  /* _LITTLE_ENDIAN */
1218 1253  
1219 1254          q = (uint8_t)((15 - nonceSize) & 0xFF);
1220 1255          t = (uint8_t)((aes_ctx->ac_ccm_mac_len) & 0xFF);
1221 1256  
1222      -        /* Construct the first octect of b0 */
     1257 +        /* Construct the first octet of b0 */
1223 1258          if (authDataSize > 0) {
1224 1259                  have_adata = 1;
1225 1260          }
1226 1261          b0[0] = (have_adata << 6) | (((t - 2)  / 2) << 3) | (q - 1);
1227 1262  
1228 1263          /* copy the nonce value into b0 */
1229 1264          bcopy(nonce, &(b0[1]), nonceSize);
1230 1265  
1231 1266          /* store the length of the payload into b0 */
1232 1267          bzero(&(b0[1+nonceSize]), q);
↓ open down ↓ 32 lines elided ↑ open up ↑
1265 1300              ((uint64_t)p8[5] << 16) |
1266 1301              ((uint64_t)p8[6] << 8) |
1267 1302              (uint64_t)p8[7]);
1268 1303  #endif
1269 1304          aes_ctx->ac_counter_mask = mask;
1270 1305  
1271 1306          /*
1272 1307           * During calculation, we start using counter block 1, we will
1273 1308           * set it up right here.
1274 1309           * We can just set the last byte to have the value 1, because
1275      -         * even with the bigest nonce of 13, the last byte of the
     1310 +         * even with the biggest nonce of 13, the last byte of the
1276 1311           * counter block will be used for the counter value.
1277 1312           */
1278 1313          cb[15] = 0x01;
1279 1314  
1280 1315  /* EXPORT DELETE END */
1281 1316  
1282 1317  }
1283 1318  
1284 1319  /*
1285 1320   * Encode the length of the associated data as
↓ open down ↓ 232 lines elided ↑ open up ↑
1518 1553                      ((uint64_t)p[4] << 24) |
1519 1554                      ((uint64_t)p[5] << 16) |
1520 1555                      ((uint64_t)p[6] << 8) |
1521 1556                      (uint64_t)p[7]);
1522 1557  #endif
1523 1558                  counter &= ctx->ac_counter_mask;
1524 1559                  ctx->ac_cb[1] =
1525 1560                      (ctx->ac_cb[1] & ~(ctx->ac_counter_mask)) | counter;
1526 1561  
1527 1562                  /* XOR with the ciphertext */
1528      -                if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
1529      -                    IS_P2ALIGNED(cbp, sizeof (uint32_t))) {
     1563 +                if (IS_P2ALIGNED2(blockp, cbp, sizeof (uint32_t))) {
1530 1564                          /* LINTED: pointer alignment */
1531 1565                          *(uint32_t *)&blockp[0] ^= *(uint32_t *)&cbp[0];
1532 1566                          /* LINTED: pointer alignment */
1533 1567                          *(uint32_t *)&blockp[4] ^= *(uint32_t *)&cbp[4];
1534 1568                          /* LINTED: pointer alignment */
1535 1569                          *(uint32_t *)&blockp[8] ^= *(uint32_t *)&cbp[8];
1536 1570                          /* LINTED: pointer alignment */
1537 1571                          *(uint32_t *)&blockp[12] ^= *(uint32_t *)&cbp[12];
1538 1572                  } else {
1539 1573                          AES_XOR_BLOCK(cbp, blockp);
1540 1574                  }
1541 1575  
1542 1576                  /* Copy the plaintext to the "holding buffer" */
1543 1577                  resultp = (uint8_t *)ctx->ac_ccm_pt_buf +
1544 1578                      ctx->ac_ccm_processed_data_len;
1545      -                if (IS_P2ALIGNED(blockp, sizeof (uint32_t)) &&
1546      -                    IS_P2ALIGNED(resultp, sizeof (uint32_t))) {
     1579 +                if (IS_P2ALIGNED2(blockp, resultp, sizeof (uint32_t))) {
1547 1580                          /* LINTED: pointer alignment */
1548 1581                          *(uint32_t *)&resultp[0] = *(uint32_t *)blockp;
1549 1582                          /* LINTED: pointer alignment */
1550 1583                          *(uint32_t *)&resultp[4] = *(uint32_t *)&blockp[4];
1551 1584                          /* LINTED: pointer alignment */
1552 1585                          *(uint32_t *)&resultp[8] = *(uint32_t *)&blockp[8];
1553 1586                          /* LINTED: pointer alignment */
1554 1587                          *(uint32_t *)&resultp[12] = *(uint32_t *)&blockp[12];
1555 1588                  } else {
1556 1589                          AES_COPY_BLOCK(blockp, resultp);
↓ open down ↓ 65 lines elided ↑ open up ↑
1622 1655  
1623 1656          macp = (uint8_t *)tmp;
1624 1657  
1625 1658          while (mac_remain > 0) {
1626 1659  
1627 1660                  if (mac_remain < AES_BLOCK_LEN) {
1628 1661                          bzero(tmp, AES_BLOCK_LEN);
1629 1662                          bcopy(pt, tmp, mac_remain);
1630 1663                          mac_remain = 0;
1631 1664                  } else {
1632      -                        if (IS_P2ALIGNED(pt, sizeof (uint32_t)) &&
1633      -                            IS_P2ALIGNED(macp, sizeof (uint32_t))) {
     1665 +                        if (IS_P2ALIGNED2(pt, macp, sizeof (uint32_t))) {
1634 1666                                  /* LINTED: pointer alignment */
1635 1667                                  *(uint32_t *)&macp[0] = *(uint32_t *)pt;
1636 1668                                  /* LINTED: pointer alignment */
1637 1669                                  *(uint32_t *)&macp[4] = *(uint32_t *)&pt[4];
1638 1670                                  /* LINTED: pointer alignment */
1639 1671                                  *(uint32_t *)&macp[8] = *(uint32_t *)&pt[8];
1640 1672                                  /* LINTED: pointer alignment */
1641 1673                                  *(uint32_t *)&macp[12] = *(uint32_t *)&pt[12];
1642 1674                          } else {
1643 1675                                  AES_COPY_BLOCK(pt, macp);
1644 1676                          }
1645 1677                          mac_remain -= AES_BLOCK_LEN;
1646 1678                          pt += AES_BLOCK_LEN;
1647 1679                  }
1648 1680  
1649 1681                  /* calculate the CBC MAC */
1650      -                if (IS_P2ALIGNED(macp, sizeof (uint32_t)) &&
1651      -                    IS_P2ALIGNED(mac_buf, sizeof (uint32_t))) {
     1682 +                if (IS_P2ALIGNED2(macp, mac_buf, sizeof (uint32_t))) {
1652 1683                          /* LINTED: pointer alignment */
1653 1684                          *(uint32_t *)&mac_buf[0] ^= *(uint32_t *)&macp[0];
1654 1685                          /* LINTED: pointer alignment */
1655 1686                          *(uint32_t *)&mac_buf[4] ^= *(uint32_t *)&macp[4];
1656 1687                          /* LINTED: pointer alignment */
1657 1688                          *(uint32_t *)&mac_buf[8] ^= *(uint32_t *)&macp[8];
1658 1689                          /* LINTED: pointer alignment */
1659 1690                          *(uint32_t *)&mac_buf[12] ^= *(uint32_t *)&macp[12];
1660 1691                  } else {
1661 1692                          AES_XOR_BLOCK(macp, mac_buf);
↓ open down ↓ 27 lines elided ↑ open up ↑
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX