Print this page
5007142 Add ntohll and htonll to sys/byteorder.h
6717509 Need to use bswap/bswapq for byte swap of 64-bit integer on x32/x64
PSARC 2008/474

Split Close
Expand all
Collapse all
          --- old/usr/src/common/crypto/modes/ccm.c
          +++ new/usr/src/common/crypto/modes/ccm.c
↓ open down ↓ 28 lines elided ↑ open up ↑
  29   29  #include <assert.h>
  30   30  #include <security/cryptoki.h>
  31   31  #endif
  32   32  
  33   33  #include <sys/types.h>
  34   34  #include <sys/kmem.h>
  35   35  #include <modes/modes.h>
  36   36  #include <sys/crypto/common.h>
  37   37  #include <sys/crypto/impl.h>
  38   38  
       39 +#if defined(__i386) || defined(__amd64)
       40 +#include <sys/byteorder.h>
       41 +#define UNALIGNED_POINTERS_PERMITTED
       42 +#endif
       43 +
  39   44  /*
  40   45   * Encrypt multiple blocks of data in CCM mode.  Decrypt for CCM mode
  41   46   * is done in another function.
  42   47   */
  43   48  int
  44   49  ccm_mode_encrypt_contiguous_blocks(ccm_ctx_t *ctx, char *data, size_t length,
  45   50      crypto_data_t *out, size_t block_size,
  46   51      int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
  47   52      void (*copy_block)(uint8_t *, uint8_t *),
  48   53      void (*xor_block)(uint8_t *, uint8_t *))
↓ open down ↓ 3 lines elided ↑ open up ↑
  52   57          uint8_t *datap = (uint8_t *)data;
  53   58          uint8_t *blockp;
  54   59          uint8_t *lastp;
  55   60          void *iov_or_mp;
  56   61          offset_t offset;
  57   62          uint8_t *out_data_1;
  58   63          uint8_t *out_data_2;
  59   64          size_t out_data_1_len;
  60   65          uint64_t counter;
  61   66          uint8_t *mac_buf;
  62      -#ifdef _LITTLE_ENDIAN
  63      -        uint8_t *p;
  64      -#endif
  65   67  
  66   68          if (length + ctx->ccm_remainder_len < block_size) {
  67   69                  /* accumulate bytes here and return */
  68   70                  bcopy(datap,
  69   71                      (uint8_t *)ctx->ccm_remainder + ctx->ccm_remainder_len,
  70   72                      length);
  71   73                  ctx->ccm_remainder_len += length;
  72   74                  ctx->ccm_copy_to = datap;
  73   75                  return (CRYPTO_SUCCESS);
  74   76          }
↓ open down ↓ 32 lines elided ↑ open up ↑
 107  109                  /* ccm_cb is the counter block */
 108  110                  encrypt_block(ctx->ccm_keysched, (uint8_t *)ctx->ccm_cb,
 109  111                      (uint8_t *)ctx->ccm_tmp);
 110  112  
 111  113                  lastp = (uint8_t *)ctx->ccm_tmp;
 112  114  
 113  115                  /*
 114  116                   * Increment counter. Counter bits are confined
 115  117                   * to the bottom 64 bits of the counter block.
 116  118                   */
 117      -                counter = ctx->ccm_cb[1] & ctx->ccm_counter_mask;
 118  119  #ifdef _LITTLE_ENDIAN
 119      -                p = (uint8_t *)&counter;
 120      -                counter = (((uint64_t)p[0] << 56) |
 121      -                    ((uint64_t)p[1] << 48) |
 122      -                    ((uint64_t)p[2] << 40) |
 123      -                    ((uint64_t)p[3] << 32) |
 124      -                    ((uint64_t)p[4] << 24) |
 125      -                    ((uint64_t)p[5] << 16) |
 126      -                    ((uint64_t)p[6] << 8) |
 127      -                    (uint64_t)p[7]);
 128      -#endif
      120 +                counter = ntohll(ctx->ccm_cb[1] & ctx->ccm_counter_mask);
      121 +                counter = htonll(counter + 1);
      122 +#else
      123 +                counter = ctx->ccm_cb[1] & ctx->ccm_counter_mask;
 129  124                  counter++;
 130      -#ifdef _LITTLE_ENDIAN
 131      -                counter = (((uint64_t)p[0] << 56) |
 132      -                    ((uint64_t)p[1] << 48) |
 133      -                    ((uint64_t)p[2] << 40) |
 134      -                    ((uint64_t)p[3] << 32) |
 135      -                    ((uint64_t)p[4] << 24) |
 136      -                    ((uint64_t)p[5] << 16) |
 137      -                    ((uint64_t)p[6] << 8) |
 138      -                    (uint64_t)p[7]);
 139      -#endif
      125 +#endif  /* _LITTLE_ENDIAN */
 140  126                  counter &= ctx->ccm_counter_mask;
 141  127                  ctx->ccm_cb[1] =
 142  128                      (ctx->ccm_cb[1] & ~(ctx->ccm_counter_mask)) | counter;
 143  129  
 144  130                  /*
 145  131                   * XOR encrypted counter block with the current clear block.
 146  132                   */
 147  133                  xor_block(blockp, lastp);
 148  134  
 149  135                  ctx->ccm_processed_data_len += block_size;
↓ open down ↓ 159 lines elided ↑ open up ↑
 309  295                                      out_data_2, data_2_len_used);
 310  296                                  bcopy(ccm_mac_p, out_data_2 + data_2_len_used,
 311  297                                      ctx->ccm_mac_len);
 312  298                          } else {
 313  299                                  bcopy(macp, out_data_1, out_data_1_len);
 314  300                                  if (out_data_1_len == ctx->ccm_remainder_len) {
 315  301                                          /* mac will be in out_data_2 */
 316  302                                          bcopy(ccm_mac_p, out_data_2,
 317  303                                              ctx->ccm_mac_len);
 318  304                                  } else {
 319      -                                        size_t len_not_used
 320      -                                            = out_data_1_len -
      305 +                                        size_t len_not_used = out_data_1_len -
 321  306                                              ctx->ccm_remainder_len;
 322  307                                          /*
 323  308                                           * part of mac in will be in
 324  309                                           * out_data_1, part of the mac will be
 325  310                                           * in out_data_2
 326  311                                           */
 327  312                                          bcopy(ccm_mac_p,
 328  313                                              out_data_1 + ctx->ccm_remainder_len,
 329  314                                              len_not_used);
 330  315                                          bcopy(ccm_mac_p + len_not_used,
↓ open down ↓ 156 lines elided ↑ open up ↑
 487  472                  }
 488  473  
 489  474                  /* Calculate the counter mode, ccm_cb is the counter block */
 490  475                  cbp = (uint8_t *)ctx->ccm_tmp;
 491  476                  encrypt_block(ctx->ccm_keysched, (uint8_t *)ctx->ccm_cb, cbp);
 492  477  
 493  478                  /*
 494  479                   * Increment counter.
 495  480                   * Counter bits are confined to the bottom 64 bits
 496  481                   */
 497      -                counter = ctx->ccm_cb[1] & ctx->ccm_counter_mask;
 498  482  #ifdef _LITTLE_ENDIAN
 499      -                p = (uint8_t *)&counter;
 500      -                counter = (((uint64_t)p[0] << 56) |
 501      -                    ((uint64_t)p[1] << 48) |
 502      -                    ((uint64_t)p[2] << 40) |
 503      -                    ((uint64_t)p[3] << 32) |
 504      -                    ((uint64_t)p[4] << 24) |
 505      -                    ((uint64_t)p[5] << 16) |
 506      -                    ((uint64_t)p[6] << 8) |
 507      -                    (uint64_t)p[7]);
 508      -#endif
      483 +                counter = ntohll(ctx->ccm_cb[1] & ctx->ccm_counter_mask);
      484 +                counter = htonll(counter + 1);
      485 +#else
      486 +                counter = ctx->ccm_cb[1] & ctx->ccm_counter_mask;
 509  487                  counter++;
 510      -#ifdef _LITTLE_ENDIAN
 511      -                counter = (((uint64_t)p[0] << 56) |
 512      -                    ((uint64_t)p[1] << 48) |
 513      -                    ((uint64_t)p[2] << 40) |
 514      -                    ((uint64_t)p[3] << 32) |
 515      -                    ((uint64_t)p[4] << 24) |
 516      -                    ((uint64_t)p[5] << 16) |
 517      -                    ((uint64_t)p[6] << 8) |
 518      -                    (uint64_t)p[7]);
 519      -#endif
      488 +#endif  /* _LITTLE_ENDIAN */
 520  489                  counter &= ctx->ccm_counter_mask;
 521  490                  ctx->ccm_cb[1] =
 522  491                      (ctx->ccm_cb[1] & ~(ctx->ccm_counter_mask)) | counter;
 523  492  
 524  493                  /* XOR with the ciphertext */
 525  494                  xor_block(blockp, cbp);
 526  495  
 527  496                  /* Copy the plaintext to the "holding buffer" */
 528  497                  resultp = (uint8_t *)ctx->ccm_pt_buf +
 529  498                      ctx->ccm_processed_data_len;
↓ open down ↓ 167 lines elided ↑ open up ↑
 697  666  static void
 698  667  ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize,
 699  668      ulong_t authDataSize, uint8_t *b0, ccm_ctx_t *aes_ctx)
 700  669  {
 701  670          uint64_t payloadSize;
 702  671          uint8_t t, q, have_adata = 0;
 703  672          size_t limit;
 704  673          int i, j, k;
 705  674          uint64_t mask = 0;
 706  675          uint8_t *cb;
 707      -#ifdef _LITTLE_ENDIAN
 708      -        uint8_t *p8;
 709      -#endif  /* _LITTLE_ENDIAN */
 710  676  
 711  677          q = (uint8_t)((15 - nonceSize) & 0xFF);
 712  678          t = (uint8_t)((aes_ctx->ccm_mac_len) & 0xFF);
 713  679  
 714  680          /* Construct the first octet of b0 */
 715  681          if (authDataSize > 0) {
 716  682                  have_adata = 1;
 717  683          }
 718  684          b0[0] = (have_adata << 6) | (((t - 2)  / 2) << 3) | (q - 1);
 719  685  
↓ open down ↓ 21 lines elided ↑ open up ↑
 741  707  
 742  708          bzero(&(cb[1+nonceSize]), q);
 743  709  
 744  710          /* Create the mask for the counter field based on the size of nonce */
 745  711          q <<= 3;
 746  712          while (q-- > 0) {
 747  713                  mask |= (1ULL << q);
 748  714          }
 749  715  
 750  716  #ifdef _LITTLE_ENDIAN
 751      -        p8 = (uint8_t *)&mask;
 752      -        mask = (((uint64_t)p8[0] << 56) |
 753      -            ((uint64_t)p8[1] << 48) |
 754      -            ((uint64_t)p8[2] << 40) |
 755      -            ((uint64_t)p8[3] << 32) |
 756      -            ((uint64_t)p8[4] << 24) |
 757      -            ((uint64_t)p8[5] << 16) |
 758      -            ((uint64_t)p8[6] << 8) |
 759      -            (uint64_t)p8[7]);
      717 +        mask = htonll(mask);
 760  718  #endif
 761  719          aes_ctx->ccm_counter_mask = mask;
 762  720  
 763  721          /*
 764  722           * During calculation, we start using counter block 1, we will
 765  723           * set it up right here.
 766  724           * We can just set the last byte to have the value 1, because
 767  725           * even with the biggest nonce of 13, the last byte of the
 768  726           * counter block will be used for the counter value.
 769  727           */
 770  728          cb[15] = 0x01;
 771  729  }
 772  730  
 773  731  /*
 774  732   * Encode the length of the associated data as
 775  733   * specified in RFC 3610 and NIST publication 800-38C, appendix A
 776  734   */
 777  735  static void
 778  736  encode_adata_len(ulong_t auth_data_len, uint8_t *encoded, size_t *encoded_len)
 779  737  {
      738 +#ifdef UNALIGNED_POINTERS_PERMITTED
      739 +        uint32_t        *lencoded_ptr;
      740 +#ifdef _LP64
      741 +        uint64_t        *llencoded_ptr;
      742 +#endif
      743 +#endif  /* UNALIGNED_POINTERS_PERMITTED */
      744 +
 780  745          if (auth_data_len < ((1ULL<<16) - (1ULL<<8))) {
 781  746                  /* 0 < a < (2^16-2^8) */
 782  747                  *encoded_len = 2;
 783  748                  encoded[0] = (auth_data_len & 0xff00) >> 8;
 784  749                  encoded[1] = auth_data_len & 0xff;
 785  750  
 786  751          } else if ((auth_data_len >= ((1ULL<<16) - (1ULL<<8))) &&
 787  752              (auth_data_len < (1ULL << 31))) {
 788  753                  /* (2^16-2^8) <= a < 2^32 */
 789  754                  *encoded_len = 6;
 790  755                  encoded[0] = 0xff;
 791  756                  encoded[1] = 0xfe;
      757 +#ifdef UNALIGNED_POINTERS_PERMITTED
      758 +                lencoded_ptr = (uint32_t *)&encoded[2];
      759 +                *lencoded_ptr = htonl(auth_data_len);
      760 +#else
 792  761                  encoded[2] = (auth_data_len & 0xff000000) >> 24;
 793  762                  encoded[3] = (auth_data_len & 0xff0000) >> 16;
 794  763                  encoded[4] = (auth_data_len & 0xff00) >> 8;
 795  764                  encoded[5] = auth_data_len & 0xff;
      765 +#endif  /* UNALIGNED_POINTERS_PERMITTED */
      766 +
 796  767  #ifdef _LP64
 797  768          } else {
 798  769                  /* 2^32 <= a < 2^64 */
 799  770                  *encoded_len = 10;
 800  771                  encoded[0] = 0xff;
 801  772                  encoded[1] = 0xff;
      773 +#ifdef UNALIGNED_POINTERS_PERMITTED
      774 +                llencoded_ptr = (uint64_t *)&encoded[2];
      775 +                *llencoded_ptr = htonl(auth_data_len);
      776 +#else
 802  777                  encoded[2] = (auth_data_len & 0xff00000000000000) >> 56;
 803  778                  encoded[3] = (auth_data_len & 0xff000000000000) >> 48;
 804  779                  encoded[4] = (auth_data_len & 0xff0000000000) >> 40;
 805  780                  encoded[5] = (auth_data_len & 0xff00000000) >> 32;
 806  781                  encoded[6] = (auth_data_len & 0xff000000) >> 24;
 807  782                  encoded[7] = (auth_data_len & 0xff0000) >> 16;
 808  783                  encoded[8] = (auth_data_len & 0xff00) >> 8;
 809  784                  encoded[9] = auth_data_len & 0xff;
      785 +#endif  /* UNALIGNED_POINTERS_PERMITTED */
 810  786  #endif  /* _LP64 */
 811  787          }
 812  788  }
 813  789  
 814  790  /*
 815  791   * The following function should be call at encrypt or decrypt init time
 816  792   * for AES CCM mode.
 817  793   */
 818  794  int
 819  795  ccm_init(ccm_ctx_t *ctx, unsigned char *nonce, size_t nonce_len,
↓ open down ↓ 152 lines elided ↑ open up ↑
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX