Print this page
5007142 Add ntohll and htonll to sys/byteorder.h
6717509 Need to use bswap/bswapq for byte swap of 64-bit integer on x32/x64
PSARC 2008/474

Split Close
Expand all
Collapse all
          --- old/usr/src/common/crypto/des/des_impl.c
          +++ new/usr/src/common/crypto/des/des_impl.c
↓ open down ↓ 15 lines elided ↑ open up ↑
  16   16   * fields enclosed by brackets "[]" replaced with your own identifying
  17   17   * information: Portions Copyright [yyyy] [name of copyright owner]
  18   18   *
  19   19   * CDDL HEADER END
  20   20   */
  21   21  /*
  22   22   * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
  23   23   * Use is subject to license terms.
  24   24   */
  25   25  
  26      -#pragma ident   "%Z%%M% %I%     %E% SMI"
  27      -
  28   26  #include <sys/types.h>
  29   27  #include <sys/systm.h>
  30   28  #include <sys/ddi.h>
  31   29  #include <sys/sysmacros.h>
  32   30  #include <sys/strsun.h>
  33   31  #include <sys/crypto/spi.h>
  34   32  #include <modes/modes.h>
  35   33  #include <sys/crypto/common.h>
  36   34  #include "des_impl.h"
  37   35  #ifndef _KERNEL
  38   36  #include <strings.h>
  39   37  #include <stdlib.h>
  40   38  #endif  /* !_KERNEL */
  41   39  
       40 +#if defined(__i386) || defined(__amd64)
       41 +#include <sys/byteorder.h>
       42 +#define UNALIGNED_POINTERS_PERMITTED
       43 +#endif
       44 +
  42   45  /* EXPORT DELETE START */
  43   46  
  44   47  typedef struct keysched_s {
  45   48          uint64_t ksch_encrypt[16];
  46   49          uint64_t ksch_decrypt[16];
  47   50  } keysched_t;
  48   51  
  49   52  typedef struct keysched3_s {
  50   53          uint64_t ksch_encrypt[48];
  51   54          uint64_t ksch_decrypt[48];
↓ open down ↓ 459 lines elided ↑ open up ↑
 511  514           * The code below, that is always executed on LITTLE_ENDIAN machines,
 512  515           * reverses bytes in the block.  On BIG_ENDIAN, the same code
 513  516           * copies the block without reversing bytes.
 514  517           */
 515  518  #ifdef _BIG_ENDIAN
 516  519          if (IS_P2ALIGNED(block, sizeof (uint64_t)) &&
 517  520              IS_P2ALIGNED(out_block, sizeof (uint64_t))) {
 518  521                  if (decrypt == B_TRUE)
 519  522                          /* LINTED */
 520  523                          *(uint64_t *)out_block = des_crypt_impl(
 521      -                            ksch->ksch_decrypt,
 522      -                            /* LINTED */
      524 +                            ksch->ksch_decrypt, /* LINTED */
 523  525                              *(uint64_t *)block, 3);
 524  526                  else
 525  527                          /* LINTED */
 526  528                          *(uint64_t *)out_block = des_crypt_impl(
 527      -                            ksch->ksch_encrypt,
 528      -                            /* LINTED */
      529 +                            ksch->ksch_encrypt, /* LINTED */
 529  530                              *(uint64_t *)block, 3);
 530      -        } else {
 531      -#endif
      531 +        } else
      532 +#endif  /* _BIG_ENDIAN */
      533 +        {
 532  534                  uint64_t tmp;
 533  535  
      536 +#ifdef UNALIGNED_POINTERS_PERMITTED
      537 +                tmp = htonll(*(uint64_t *)&block[0]);
      538 +#else
 534  539                  tmp = (((uint64_t)block[0] << 56) | ((uint64_t)block[1] << 48) |
 535  540                      ((uint64_t)block[2] << 40) | ((uint64_t)block[3] << 32) |
 536  541                      ((uint64_t)block[4] << 24) | ((uint64_t)block[5] << 16) |
 537  542                      ((uint64_t)block[6] << 8) | (uint64_t)block[7]);
      543 +#endif  /* UNALIGNED_POINTERS_PERMITTED */
 538  544  
 539  545                  if (decrypt == B_TRUE)
 540  546                          tmp = des_crypt_impl(ksch->ksch_decrypt, tmp, 3);
 541  547                  else
 542  548                          tmp = des_crypt_impl(ksch->ksch_encrypt, tmp, 3);
 543  549  
      550 +#ifdef UNALIGNED_POINTERS_PERMITTED
      551 +                *(uint64_t *)&out_block[0] = htonll(tmp);
      552 +#else
 544  553                  out_block[0] = tmp >> 56;
 545  554                  out_block[1] = tmp >> 48;
 546  555                  out_block[2] = tmp >> 40;
 547  556                  out_block[3] = tmp >> 32;
 548  557                  out_block[4] = tmp >> 24;
 549  558                  out_block[5] = tmp >> 16;
 550  559                  out_block[6] = tmp >> 8;
 551  560                  out_block[7] = (uint8_t)tmp;
 552      -#ifdef _BIG_ENDIAN
      561 +#endif  /* UNALIGNED_POINTERS_PERMITTED */
 553  562          }
 554      -#endif
 555  563  /* EXPORT DELETE END */
 556  564          return (CRYPTO_SUCCESS);
 557  565  }
 558  566  
 559  567  int
 560  568  des_crunch_block(const void *cookie, const uint8_t block[DES_BLOCK_LEN],
 561  569      uint8_t out_block[DES_BLOCK_LEN], boolean_t decrypt)
 562  570  {
 563  571  /* EXPORT DELETE START */
 564  572          keysched_t *ksch = (keysched_t *)cookie;
↓ open down ↓ 2 lines elided ↑ open up ↑
 567  575           * The code below, that is always executed on LITTLE_ENDIAN machines,
 568  576           * reverses bytes in the block.  On BIG_ENDIAN, the same code
 569  577           * copies the block without reversing bytes.
 570  578           */
 571  579  #ifdef _BIG_ENDIAN
 572  580          if (IS_P2ALIGNED(block, sizeof (uint64_t)) &&
 573  581              IS_P2ALIGNED(out_block, sizeof (uint64_t))) {
 574  582                  if (decrypt == B_TRUE)
 575  583                          /* LINTED */
 576  584                          *(uint64_t *)out_block = des_crypt_impl(
 577      -                            ksch->ksch_decrypt,
 578      -                            /* LINTED */
      585 +                            ksch->ksch_decrypt, /* LINTED */
 579  586                              *(uint64_t *)block, 1);
 580  587                  else
 581  588                          /* LINTED */
 582  589                          *(uint64_t *)out_block = des_crypt_impl(
 583      -                            ksch->ksch_encrypt,
 584      -                            /* LINTED */
      590 +                            ksch->ksch_encrypt, /* LINTED */
 585  591                              *(uint64_t *)block, 1);
 586  592  
 587      -        } else {
 588      -#endif
      593 +        } else
      594 +#endif  /* _BIG_ENDIAN */
      595 +        {
 589  596                  uint64_t tmp;
 590  597  
      598 +#ifdef UNALIGNED_POINTERS_PERMITTED
      599 +                tmp = htonll(*(uint64_t *)&block[0]);
      600 +#else
 591  601                  tmp = (((uint64_t)block[0] << 56) | ((uint64_t)block[1] << 48) |
 592  602                      ((uint64_t)block[2] << 40) | ((uint64_t)block[3] << 32) |
 593  603                      ((uint64_t)block[4] << 24) | ((uint64_t)block[5] << 16) |
 594  604                      ((uint64_t)block[6] << 8) | (uint64_t)block[7]);
      605 +#endif  /* UNALIGNED_POINTERS_PERMITTED */
 595  606  
      607 +
 596  608                  if (decrypt == B_TRUE)
 597  609                          tmp = des_crypt_impl(ksch->ksch_decrypt, tmp, 1);
 598  610                  else
 599  611                          tmp = des_crypt_impl(ksch->ksch_encrypt, tmp, 1);
 600  612  
      613 +#ifdef UNALIGNED_POINTERS_PERMITTED
      614 +                *(uint64_t *)&out_block[0] = htonll(tmp);
      615 +#else
 601  616                  out_block[0] = tmp >> 56;
 602  617                  out_block[1] = tmp >> 48;
 603  618                  out_block[2] = tmp >> 40;
 604  619                  out_block[3] = tmp >> 32;
 605  620                  out_block[4] = tmp >> 24;
 606  621                  out_block[5] = tmp >> 16;
 607  622                  out_block[6] = tmp >> 8;
 608  623                  out_block[7] = (uint8_t)tmp;
 609      -#ifdef _BIG_ENDIAN
      624 +#endif  /* UNALIGNED_POINTERS_PERMITTED */
 610  625          }
 611      -#endif
 612  626  /* EXPORT DELETE END */
 613  627          return (CRYPTO_SUCCESS);
 614  628  }
 615  629  
 616  630  static boolean_t
 617  631  keycheck(uint8_t *key, uint8_t *corrected_key)
 618  632  {
 619  633  /* EXPORT DELETE START */
 620  634          uint64_t key_so_far;
 621  635          uint_t i;
↓ open down ↓ 21 lines elided ↑ open up ↑
 643  657                  0x01f101f101e001e0ULL,  0xf101f101e001e001ULL,
 644  658                  0x0efe0efe1ffe1ffeULL,  0xfe0efe0efe1ffe1fULL,
 645  659                  0x010e010e011f011fULL,  0x0e010e011f011f01ULL,
 646  660                  0xf1fef1fee0fee0feULL,  0xfef1fef1fee0fee0ULL,
 647  661  #else   /* Big endian */
 648  662                  0x1fe01fe00ef10ef1ULL,  0xe01fe01ff10ef10eULL,
 649  663                  0x01e001e001f101f1ULL,  0xe001e001f101f101ULL,
 650  664                  0x1ffe1ffe0efe0efeULL,  0xfe1ffe1ffe0efe0eULL,
 651  665                  0x011f011f010e010eULL,  0x1f011f010e010e01ULL,
 652  666                  0xe0fee0fef1fef1feULL,  0xfee0fee0fef1fef1ULL,
 653      -#endif
      667 +#endif  /* _LITTLE_ENDIAN */
 654  668  
 655  669                  /* We'll save the other possibly-weak keys for the future. */
 656  670          };
 657  671  
 658  672          if (key == NULL)
 659  673                  return (B_FALSE);
 660  674  
      675 +#ifdef UNALIGNED_POINTERS_PERMITTED
      676 +        key_so_far = htonll(*(uint64_t *)&key[0]);
      677 +#else
 661  678          /*
 662  679           * The code below reverses the bytes on LITTLE_ENDIAN machines.
 663  680           * On BIG_ENDIAN, the same code copies without reversing
 664  681           * the bytes.
 665  682           */
 666  683          key_so_far = (((uint64_t)key[0] << 56) | ((uint64_t)key[1] << 48) |
 667  684              ((uint64_t)key[2] << 40) | ((uint64_t)key[3] << 32) |
 668  685              ((uint64_t)key[4] << 24) | ((uint64_t)key[5] << 16) |
 669  686              ((uint64_t)key[6] << 8) | (uint64_t)key[7]);
      687 +#endif  /* UNALIGNED_POINTERS_PERMITTED */
 670  688  
 671  689          /*
 672  690           * Fix parity.
 673  691           */
 674  692          fix_des_parity(&key_so_far);
 675  693  
 676  694          /* Do weak key check itself. */
 677  695          for (i = 0; i < (sizeof (des_weak_keys) / sizeof (uint64_t)); i++)
 678  696                  if (key_so_far == des_weak_keys[i]) {
 679  697                          return (B_FALSE);
 680  698                  }
 681  699  
 682  700          if (corrected_key != NULL) {
      701 +#ifdef UNALIGNED_POINTERS_PERMITTED
      702 +                *(uint64_t *)&corrected_key[0] = htonll(key_so_far);
      703 +#else
 683  704                  /*
 684  705                   * The code below reverses the bytes on LITTLE_ENDIAN machines.
 685  706                   * On BIG_ENDIAN, the same code copies without reversing
 686  707                   * the bytes.
 687  708                   */
 688  709                  corrected_key[0] = key_so_far >> 56;
 689  710                  corrected_key[1] = key_so_far >> 48;
 690  711                  corrected_key[2] = key_so_far >> 40;
 691  712                  corrected_key[3] = key_so_far >> 32;
 692  713                  corrected_key[4] = key_so_far >> 24;
 693  714                  corrected_key[5] = key_so_far >> 16;
 694  715                  corrected_key[6] = key_so_far >> 8;
 695  716                  corrected_key[7] = (uint8_t)key_so_far;
      717 +#endif  /* UNALIGNED_POINTERS_PERMITTED */
 696  718          }
 697  719  /* EXPORT DELETE END */
 698  720          return (B_TRUE);
 699  721  }
 700  722  
 701  723  static boolean_t
 702  724  des3_keycheck(uint8_t *key, uint8_t *corrected_key)
 703  725  {
 704  726  /* EXPORT DELETE START */
 705  727          uint64_t aligned_key[DES3_KEYSIZE / sizeof (uint64_t)];
↓ open down ↓ 30 lines elided ↑ open up ↑
 736  758                           * continue on.
 737  759                           */
 738  760                  }
 739  761  
 740  762                  currentkey[j] = scratch;
 741  763          }
 742  764  
 743  765          /*
 744  766           * Perform key equivalence checks, now that parity is properly set.
 745  767           * 1st and 2nd keys must be unique, the 3rd key can be the same as
 746      -         * the 1st key for the 2 key varient of 3DES.
      768 +         * the 1st key for the 2 key variant of 3DES.
 747  769           */
 748  770          if (currentkey[0] == currentkey[1] || currentkey[1] == currentkey[2])
 749  771                  return (B_FALSE);
 750  772  
 751  773          if (corrected_key != NULL) {
 752  774                  bcopy(currentkey, corrected_key, DES3_KEYSIZE);
 753  775          }
 754  776  
 755  777  /* EXPORT DELETE END */
 756  778          return (B_TRUE);
↓ open down ↓ 21 lines elided ↑ open up ↑
 778  800          int i = 0, offset = 0;
 779  801  
 780  802          if (strength == DES)
 781  803                  bcopy(key, aligned_key, DES_KEYSIZE);
 782  804          else
 783  805                  bcopy(key, aligned_key, DES3_KEYSIZE);
 784  806  
 785  807          paritied_key = (uint8_t *)aligned_key;
 786  808          while (strength > i) {
 787  809                  offset = 8 * i;
      810 +#ifdef UNALIGNED_POINTERS_PERMITTED
      811 +                key_so_far = htonll(*(uint64_t *)&paritied_key[offset]);
      812 +#else
 788  813                  key_so_far = (((uint64_t)paritied_key[offset + 0] << 56) |
 789  814                      ((uint64_t)paritied_key[offset + 1] << 48) |
 790  815                      ((uint64_t)paritied_key[offset + 2] << 40) |
 791  816                      ((uint64_t)paritied_key[offset + 3] << 32) |
 792  817                      ((uint64_t)paritied_key[offset + 4] << 24) |
 793  818                      ((uint64_t)paritied_key[offset + 5] << 16) |
 794  819                      ((uint64_t)paritied_key[offset + 6] << 8) |
 795  820                      (uint64_t)paritied_key[offset + 7]);
      821 +#endif  /* UNALIGNED_POINTERS_PERMITTED */
 796  822  
 797  823                  fix_des_parity(&key_so_far);
 798  824  
      825 +#ifdef UNALIGNED_POINTERS_PERMITTED
      826 +                *(uint64_t *)&paritied_key[offset] = htonll(key_so_far);
      827 +#else
 799  828                  paritied_key[offset + 0] = key_so_far >> 56;
 800  829                  paritied_key[offset + 1] = key_so_far >> 48;
 801  830                  paritied_key[offset + 2] = key_so_far >> 40;
 802  831                  paritied_key[offset + 3] = key_so_far >> 32;
 803  832                  paritied_key[offset + 4] = key_so_far >> 24;
 804  833                  paritied_key[offset + 5] = key_so_far >> 16;
 805  834                  paritied_key[offset + 6] = key_so_far >> 8;
 806  835                  paritied_key[offset + 7] = (uint8_t)key_so_far;
      836 +#endif  /* UNALIGNED_POINTERS_PERMITTED */
 807  837  
 808  838                  i++;
 809  839          }
 810  840  
 811  841          bcopy(paritied_key, corrected_key, DES_KEYSIZE * strength);
 812  842  /* EXPORT DELETE END */
 813  843  }
 814  844  
 815  845  
 816  846  /*
↓ open down ↓ 31 lines elided ↑ open up ↑
 848  878           * The code below, that is always executed on LITTLE_ENDIAN machines,
 849  879           * reverses every 8 bytes in the key.  On BIG_ENDIAN, the same code
 850  880           * copies the key without reversing bytes.
 851  881           */
 852  882  #ifdef _BIG_ENDIAN
 853  883          if (IS_P2ALIGNED(cipherKey, sizeof (uint64_t))) {
 854  884                  for (i = 0, j = 0; j < keysize; i++, j += 8) {
 855  885                          /* LINTED: pointer alignment */
 856  886                          key_uint64[i] = *((uint64_t *)&cipherKey[j]);
 857  887                  }
 858      -        } else {
 859      -#endif
      888 +        } else
      889 +#endif  /* _BIG_ENDIAN */
 860  890          {
 861  891                  for (i = 0, j = 0; j < keysize; i++, j += 8) {
      892 +#ifdef UNALIGNED_POINTERS_PERMITTED
      893 +                        key_uint64[i] = htonll(*(uint64_t *)&cipherKey[j]);
      894 +#else
 862  895                          key_uint64[i] = (((uint64_t)cipherKey[j] << 56) |
 863  896                              ((uint64_t)cipherKey[j + 1] << 48) |
 864  897                              ((uint64_t)cipherKey[j + 2] << 40) |
 865  898                              ((uint64_t)cipherKey[j + 3] << 32) |
 866  899                              ((uint64_t)cipherKey[j + 4] << 24) |
 867  900                              ((uint64_t)cipherKey[j + 5] << 16) |
 868  901                              ((uint64_t)cipherKey[j + 6] << 8) |
 869  902                              (uint64_t)cipherKey[j + 7]);
      903 +#endif  /* UNALIGNED_POINTERS_PERMITTED */
 870  904                  }
 871  905          }
 872      -#ifdef _BIG_ENDIAN
 873      -        }
 874      -#endif
 875  906  
 876  907          switch (strength) {
 877  908          case DES:
 878  909                  des_ks(keysched, key_uint64[0]);
 879  910                  break;
 880  911  
 881  912          case DES2:
 882  913                  /* DES2 is just DES3 with the first and third keys the same */
 883  914                  bcopy(key_uint64, key_uint64 + 2, DES_KEYSIZE);
 884  915                  /* FALLTHRU */
↓ open down ↓ 59 lines elided ↑ open up ↑
 944  975                  *keysched_size = size;
 945  976  
 946  977  /* EXPORT DELETE END */
 947  978  
 948  979          return (keysched);
 949  980  }
 950  981  
 951  982  /*
 952  983   * Replace the LSB of each byte by the xor of the other
 953  984   * 7 bits.  The tricky thing is that the original contents of the LSBs
 954      - * are nullifed by including them twice in the xor computation.
      985 + * are nullified by including them twice in the xor computation.
 955  986   */
 956  987  static void
 957  988  fix_des_parity(uint64_t *keyp)
 958  989  {
 959  990  /* EXPORT DELETE START */
 960  991          uint64_t k = *keyp;
 961  992          k ^= k >> 1;
 962  993          k ^= k >> 2;
 963  994          k ^= k >> 4;
 964  995          *keyp ^= (k & 0x0101010101010101ULL);
↓ open down ↓ 16 lines elided ↑ open up ↑
 981 1012  }
 982 1013  
 983 1014  /* XOR block of data into dest */
 984 1015  void
 985 1016  des_xor_block(uint8_t *data, uint8_t *dst)
 986 1017  {
 987 1018          if (IS_P2ALIGNED(dst, sizeof (uint32_t)) &&
 988 1019              IS_P2ALIGNED(data, sizeof (uint32_t))) {
 989 1020                  /* LINTED: pointer alignment */
 990 1021                  *(uint32_t *)&dst[0] ^=
 991      -                /* LINTED: pointer alignment */
     1022 +                    /* LINTED: pointer alignment */
 992 1023                      *(uint32_t *)&data[0];
 993      -                /* LINTED: pointer alignment */
     1024 +                    /* LINTED: pointer alignment */
 994 1025                  *(uint32_t *)&dst[4] ^=
 995      -                /* LINTED: pointer alignment */
     1026 +                    /* LINTED: pointer alignment */
 996 1027                      *(uint32_t *)&data[4];
 997 1028          } else {
 998 1029                  DES_XOR_BLOCK(data, dst);
 999 1030          }
1000 1031  }
1001 1032  
1002 1033  int
1003 1034  des_encrypt_block(const void *keysched, const uint8_t *in, uint8_t *out)
1004 1035  {
1005 1036          return (des_crunch_block(keysched, in, out, B_FALSE));
↓ open down ↓ 87 lines elided ↑ open up ↑
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX