517 */
518 #ifdef _BIG_ENDIAN
519 if (IS_P2ALIGNED(block, sizeof (uint64_t)) &&
520 IS_P2ALIGNED(out_block, sizeof (uint64_t))) {
521 if (decrypt == B_TRUE)
522 /* LINTED */
523 *(uint64_t *)out_block = des_crypt_impl(
524 ksch->ksch_decrypt, /* LINTED */
525 *(uint64_t *)block, 3);
526 else
527 /* LINTED */
528 *(uint64_t *)out_block = des_crypt_impl(
529 ksch->ksch_encrypt, /* LINTED */
530 *(uint64_t *)block, 3);
531 } else
532 #endif /* _BIG_ENDIAN */
533 {
534 uint64_t tmp;
535
536 #ifdef UNALIGNED_POINTERS_PERMITTED
537 tmp = htonll(*(uint64_t *)&block[0]);
538 #else
539 tmp = (((uint64_t)block[0] << 56) | ((uint64_t)block[1] << 48) |
540 ((uint64_t)block[2] << 40) | ((uint64_t)block[3] << 32) |
541 ((uint64_t)block[4] << 24) | ((uint64_t)block[5] << 16) |
542 ((uint64_t)block[6] << 8) | (uint64_t)block[7]);
543 #endif /* UNALIGNED_POINTERS_PERMITTED */
544
545 if (decrypt == B_TRUE)
546 tmp = des_crypt_impl(ksch->ksch_decrypt, tmp, 3);
547 else
548 tmp = des_crypt_impl(ksch->ksch_encrypt, tmp, 3);
549
550 #ifdef UNALIGNED_POINTERS_PERMITTED
551 *(uint64_t *)&out_block[0] = htonll(tmp);
552 #else
553 out_block[0] = tmp >> 56;
554 out_block[1] = tmp >> 48;
555 out_block[2] = tmp >> 40;
556 out_block[3] = tmp >> 32;
557 out_block[4] = tmp >> 24;
558 out_block[5] = tmp >> 16;
559 out_block[6] = tmp >> 8;
560 out_block[7] = (uint8_t)tmp;
561 #endif /* UNALIGNED_POINTERS_PERMITTED */
562 }
563 /* EXPORT DELETE END */
564 return (CRYPTO_SUCCESS);
565 }
566
567 int
568 des_crunch_block(const void *cookie, const uint8_t block[DES_BLOCK_LEN],
569 uint8_t out_block[DES_BLOCK_LEN], boolean_t decrypt)
570 {
571 /* EXPORT DELETE START */
579 #ifdef _BIG_ENDIAN
580 if (IS_P2ALIGNED(block, sizeof (uint64_t)) &&
581 IS_P2ALIGNED(out_block, sizeof (uint64_t))) {
582 if (decrypt == B_TRUE)
583 /* LINTED */
584 *(uint64_t *)out_block = des_crypt_impl(
585 ksch->ksch_decrypt, /* LINTED */
586 *(uint64_t *)block, 1);
587 else
588 /* LINTED */
589 *(uint64_t *)out_block = des_crypt_impl(
590 ksch->ksch_encrypt, /* LINTED */
591 *(uint64_t *)block, 1);
592
593 } else
594 #endif /* _BIG_ENDIAN */
595 {
596 uint64_t tmp;
597
598 #ifdef UNALIGNED_POINTERS_PERMITTED
599 tmp = htonll(*(uint64_t *)&block[0]);
600 #else
601 tmp = (((uint64_t)block[0] << 56) | ((uint64_t)block[1] << 48) |
602 ((uint64_t)block[2] << 40) | ((uint64_t)block[3] << 32) |
603 ((uint64_t)block[4] << 24) | ((uint64_t)block[5] << 16) |
604 ((uint64_t)block[6] << 8) | (uint64_t)block[7]);
605 #endif /* UNALIGNED_POINTERS_PERMITTED */
606
607
608 if (decrypt == B_TRUE)
609 tmp = des_crypt_impl(ksch->ksch_decrypt, tmp, 1);
610 else
611 tmp = des_crypt_impl(ksch->ksch_encrypt, tmp, 1);
612
613 #ifdef UNALIGNED_POINTERS_PERMITTED
614 *(uint64_t *)&out_block[0] = htonll(tmp);
615 #else
616 out_block[0] = tmp >> 56;
617 out_block[1] = tmp >> 48;
618 out_block[2] = tmp >> 40;
619 out_block[3] = tmp >> 32;
620 out_block[4] = tmp >> 24;
621 out_block[5] = tmp >> 16;
622 out_block[6] = tmp >> 8;
623 out_block[7] = (uint8_t)tmp;
624 #endif /* UNALIGNED_POINTERS_PERMITTED */
625 }
626 /* EXPORT DELETE END */
627 return (CRYPTO_SUCCESS);
628 }
629
630 static boolean_t
631 keycheck(uint8_t *key, uint8_t *corrected_key)
632 {
633 /* EXPORT DELETE START */
634 uint64_t key_so_far;
656 0xf10ef10ee01fe01fULL, 0x0ef10ef11fe01fe0ULL,
657 0x01f101f101e001e0ULL, 0xf101f101e001e001ULL,
658 0x0efe0efe1ffe1ffeULL, 0xfe0efe0efe1ffe1fULL,
659 0x010e010e011f011fULL, 0x0e010e011f011f01ULL,
660 0xf1fef1fee0fee0feULL, 0xfef1fef1fee0fee0ULL,
661 #else /* Big endian */
662 0x1fe01fe00ef10ef1ULL, 0xe01fe01ff10ef10eULL,
663 0x01e001e001f101f1ULL, 0xe001e001f101f101ULL,
664 0x1ffe1ffe0efe0efeULL, 0xfe1ffe1ffe0efe0eULL,
665 0x011f011f010e010eULL, 0x1f011f010e010e01ULL,
666 0xe0fee0fef1fef1feULL, 0xfee0fee0fef1fef1ULL,
667 #endif /* _LITTLE_ENDIAN */
668
669 /* We'll save the other possibly-weak keys for the future. */
670 };
671
672 if (key == NULL)
673 return (B_FALSE);
674
675 #ifdef UNALIGNED_POINTERS_PERMITTED
676 key_so_far = htonll(*(uint64_t *)&key[0]);
677 #else
678 /*
679 * The code below reverses the bytes on LITTLE_ENDIAN machines.
680 * On BIG_ENDIAN, the same code copies without reversing
681 * the bytes.
682 */
683 key_so_far = (((uint64_t)key[0] << 56) | ((uint64_t)key[1] << 48) |
684 ((uint64_t)key[2] << 40) | ((uint64_t)key[3] << 32) |
685 ((uint64_t)key[4] << 24) | ((uint64_t)key[5] << 16) |
686 ((uint64_t)key[6] << 8) | (uint64_t)key[7]);
687 #endif /* UNALIGNED_POINTERS_PERMITTED */
688
689 /*
690 * Fix parity.
691 */
692 fix_des_parity(&key_so_far);
693
694 /* Do weak key check itself. */
695 for (i = 0; i < (sizeof (des_weak_keys) / sizeof (uint64_t)); i++)
696 if (key_so_far == des_weak_keys[i]) {
697 return (B_FALSE);
698 }
699
700 if (corrected_key != NULL) {
701 #ifdef UNALIGNED_POINTERS_PERMITTED
702 *(uint64_t *)&corrected_key[0] = htonll(key_so_far);
703 #else
704 /*
705 * The code below reverses the bytes on LITTLE_ENDIAN machines.
706 * On BIG_ENDIAN, the same code copies without reversing
707 * the bytes.
708 */
709 corrected_key[0] = key_so_far >> 56;
710 corrected_key[1] = key_so_far >> 48;
711 corrected_key[2] = key_so_far >> 40;
712 corrected_key[3] = key_so_far >> 32;
713 corrected_key[4] = key_so_far >> 24;
714 corrected_key[5] = key_so_far >> 16;
715 corrected_key[6] = key_so_far >> 8;
716 corrected_key[7] = (uint8_t)key_so_far;
717 #endif /* UNALIGNED_POINTERS_PERMITTED */
718 }
719 /* EXPORT DELETE END */
720 return (B_TRUE);
721 }
722
791 }
792
793 void
794 des_parity_fix(uint8_t *key, des_strength_t strength, uint8_t *corrected_key)
795 {
796 /* EXPORT DELETE START */
797 uint64_t aligned_key[DES3_KEYSIZE / sizeof (uint64_t)];
798 uint8_t *paritied_key;
799 uint64_t key_so_far;
800 int i = 0, offset = 0;
801
802 if (strength == DES)
803 bcopy(key, aligned_key, DES_KEYSIZE);
804 else
805 bcopy(key, aligned_key, DES3_KEYSIZE);
806
807 paritied_key = (uint8_t *)aligned_key;
808 while (strength > i) {
809 offset = 8 * i;
810 #ifdef UNALIGNED_POINTERS_PERMITTED
811 key_so_far = htonll(*(uint64_t *)&paritied_key[offset]);
812 #else
813 key_so_far = (((uint64_t)paritied_key[offset + 0] << 56) |
814 ((uint64_t)paritied_key[offset + 1] << 48) |
815 ((uint64_t)paritied_key[offset + 2] << 40) |
816 ((uint64_t)paritied_key[offset + 3] << 32) |
817 ((uint64_t)paritied_key[offset + 4] << 24) |
818 ((uint64_t)paritied_key[offset + 5] << 16) |
819 ((uint64_t)paritied_key[offset + 6] << 8) |
820 (uint64_t)paritied_key[offset + 7]);
821 #endif /* UNALIGNED_POINTERS_PERMITTED */
822
823 fix_des_parity(&key_so_far);
824
825 #ifdef UNALIGNED_POINTERS_PERMITTED
826 *(uint64_t *)&paritied_key[offset] = htonll(key_so_far);
827 #else
828 paritied_key[offset + 0] = key_so_far >> 56;
829 paritied_key[offset + 1] = key_so_far >> 48;
830 paritied_key[offset + 2] = key_so_far >> 40;
831 paritied_key[offset + 3] = key_so_far >> 32;
832 paritied_key[offset + 4] = key_so_far >> 24;
833 paritied_key[offset + 5] = key_so_far >> 16;
834 paritied_key[offset + 6] = key_so_far >> 8;
835 paritied_key[offset + 7] = (uint8_t)key_so_far;
836 #endif /* UNALIGNED_POINTERS_PERMITTED */
837
838 i++;
839 }
840
841 bcopy(paritied_key, corrected_key, DES_KEYSIZE * strength);
842 /* EXPORT DELETE END */
843 }
844
845
846 /*
873 encryption_ks = ((keysched3_t *)ks)->ksch_encrypt;
874 decryption_ks = ((keysched3_t *)ks)->ksch_decrypt;
875 }
876
877 /*
878 * The code below, that is always executed on LITTLE_ENDIAN machines,
879 * reverses every 8 bytes in the key. On BIG_ENDIAN, the same code
880 * copies the key without reversing bytes.
881 */
882 #ifdef _BIG_ENDIAN
883 if (IS_P2ALIGNED(cipherKey, sizeof (uint64_t))) {
884 for (i = 0, j = 0; j < keysize; i++, j += 8) {
885 /* LINTED: pointer alignment */
886 key_uint64[i] = *((uint64_t *)&cipherKey[j]);
887 }
888 } else
889 #endif /* _BIG_ENDIAN */
890 {
891 for (i = 0, j = 0; j < keysize; i++, j += 8) {
892 #ifdef UNALIGNED_POINTERS_PERMITTED
893 key_uint64[i] = htonll(*(uint64_t *)&cipherKey[j]);
894 #else
895 key_uint64[i] = (((uint64_t)cipherKey[j] << 56) |
896 ((uint64_t)cipherKey[j + 1] << 48) |
897 ((uint64_t)cipherKey[j + 2] << 40) |
898 ((uint64_t)cipherKey[j + 3] << 32) |
899 ((uint64_t)cipherKey[j + 4] << 24) |
900 ((uint64_t)cipherKey[j + 5] << 16) |
901 ((uint64_t)cipherKey[j + 6] << 8) |
902 (uint64_t)cipherKey[j + 7]);
903 #endif /* UNALIGNED_POINTERS_PERMITTED */
904 }
905 }
906
907 switch (strength) {
908 case DES:
909 des_ks(keysched, key_uint64[0]);
910 break;
911
912 case DES2:
913 /* DES2 is just DES3 with the first and third keys the same */
|
517 */
518 #ifdef _BIG_ENDIAN
519 if (IS_P2ALIGNED(block, sizeof (uint64_t)) &&
520 IS_P2ALIGNED(out_block, sizeof (uint64_t))) {
521 if (decrypt == B_TRUE)
522 /* LINTED */
523 *(uint64_t *)out_block = des_crypt_impl(
524 ksch->ksch_decrypt, /* LINTED */
525 *(uint64_t *)block, 3);
526 else
527 /* LINTED */
528 *(uint64_t *)out_block = des_crypt_impl(
529 ksch->ksch_encrypt, /* LINTED */
530 *(uint64_t *)block, 3);
531 } else
532 #endif /* _BIG_ENDIAN */
533 {
534 uint64_t tmp;
535
536 #ifdef UNALIGNED_POINTERS_PERMITTED
537 tmp = htonll(*(uint64_t *)(void *)&block[0]);
538 #else
539 tmp = (((uint64_t)block[0] << 56) | ((uint64_t)block[1] << 48) |
540 ((uint64_t)block[2] << 40) | ((uint64_t)block[3] << 32) |
541 ((uint64_t)block[4] << 24) | ((uint64_t)block[5] << 16) |
542 ((uint64_t)block[6] << 8) | (uint64_t)block[7]);
543 #endif /* UNALIGNED_POINTERS_PERMITTED */
544
545 if (decrypt == B_TRUE)
546 tmp = des_crypt_impl(ksch->ksch_decrypt, tmp, 3);
547 else
548 tmp = des_crypt_impl(ksch->ksch_encrypt, tmp, 3);
549
550 #ifdef UNALIGNED_POINTERS_PERMITTED
551 *(uint64_t *)(void *)&out_block[0] = htonll(tmp);
552 #else
553 out_block[0] = tmp >> 56;
554 out_block[1] = tmp >> 48;
555 out_block[2] = tmp >> 40;
556 out_block[3] = tmp >> 32;
557 out_block[4] = tmp >> 24;
558 out_block[5] = tmp >> 16;
559 out_block[6] = tmp >> 8;
560 out_block[7] = (uint8_t)tmp;
561 #endif /* UNALIGNED_POINTERS_PERMITTED */
562 }
563 /* EXPORT DELETE END */
564 return (CRYPTO_SUCCESS);
565 }
566
567 int
568 des_crunch_block(const void *cookie, const uint8_t block[DES_BLOCK_LEN],
569 uint8_t out_block[DES_BLOCK_LEN], boolean_t decrypt)
570 {
571 /* EXPORT DELETE START */
579 #ifdef _BIG_ENDIAN
580 if (IS_P2ALIGNED(block, sizeof (uint64_t)) &&
581 IS_P2ALIGNED(out_block, sizeof (uint64_t))) {
582 if (decrypt == B_TRUE)
583 /* LINTED */
584 *(uint64_t *)out_block = des_crypt_impl(
585 ksch->ksch_decrypt, /* LINTED */
586 *(uint64_t *)block, 1);
587 else
588 /* LINTED */
589 *(uint64_t *)out_block = des_crypt_impl(
590 ksch->ksch_encrypt, /* LINTED */
591 *(uint64_t *)block, 1);
592
593 } else
594 #endif /* _BIG_ENDIAN */
595 {
596 uint64_t tmp;
597
598 #ifdef UNALIGNED_POINTERS_PERMITTED
599 tmp = htonll(*(uint64_t *)(void *)&block[0]);
600 #else
601 tmp = (((uint64_t)block[0] << 56) | ((uint64_t)block[1] << 48) |
602 ((uint64_t)block[2] << 40) | ((uint64_t)block[3] << 32) |
603 ((uint64_t)block[4] << 24) | ((uint64_t)block[5] << 16) |
604 ((uint64_t)block[6] << 8) | (uint64_t)block[7]);
605 #endif /* UNALIGNED_POINTERS_PERMITTED */
606
607
608 if (decrypt == B_TRUE)
609 tmp = des_crypt_impl(ksch->ksch_decrypt, tmp, 1);
610 else
611 tmp = des_crypt_impl(ksch->ksch_encrypt, tmp, 1);
612
613 #ifdef UNALIGNED_POINTERS_PERMITTED
614 *(uint64_t *)(void *)&out_block[0] = htonll(tmp);
615 #else
616 out_block[0] = tmp >> 56;
617 out_block[1] = tmp >> 48;
618 out_block[2] = tmp >> 40;
619 out_block[3] = tmp >> 32;
620 out_block[4] = tmp >> 24;
621 out_block[5] = tmp >> 16;
622 out_block[6] = tmp >> 8;
623 out_block[7] = (uint8_t)tmp;
624 #endif /* UNALIGNED_POINTERS_PERMITTED */
625 }
626 /* EXPORT DELETE END */
627 return (CRYPTO_SUCCESS);
628 }
629
630 static boolean_t
631 keycheck(uint8_t *key, uint8_t *corrected_key)
632 {
633 /* EXPORT DELETE START */
634 uint64_t key_so_far;
656 0xf10ef10ee01fe01fULL, 0x0ef10ef11fe01fe0ULL,
657 0x01f101f101e001e0ULL, 0xf101f101e001e001ULL,
658 0x0efe0efe1ffe1ffeULL, 0xfe0efe0efe1ffe1fULL,
659 0x010e010e011f011fULL, 0x0e010e011f011f01ULL,
660 0xf1fef1fee0fee0feULL, 0xfef1fef1fee0fee0ULL,
661 #else /* Big endian */
662 0x1fe01fe00ef10ef1ULL, 0xe01fe01ff10ef10eULL,
663 0x01e001e001f101f1ULL, 0xe001e001f101f101ULL,
664 0x1ffe1ffe0efe0efeULL, 0xfe1ffe1ffe0efe0eULL,
665 0x011f011f010e010eULL, 0x1f011f010e010e01ULL,
666 0xe0fee0fef1fef1feULL, 0xfee0fee0fef1fef1ULL,
667 #endif /* _LITTLE_ENDIAN */
668
669 /* We'll save the other possibly-weak keys for the future. */
670 };
671
672 if (key == NULL)
673 return (B_FALSE);
674
675 #ifdef UNALIGNED_POINTERS_PERMITTED
676 key_so_far = htonll(*(uint64_t *)(void *)&key[0]);
677 #else
678 /*
679 * The code below reverses the bytes on LITTLE_ENDIAN machines.
680 * On BIG_ENDIAN, the same code copies without reversing
681 * the bytes.
682 */
683 key_so_far = (((uint64_t)key[0] << 56) | ((uint64_t)key[1] << 48) |
684 ((uint64_t)key[2] << 40) | ((uint64_t)key[3] << 32) |
685 ((uint64_t)key[4] << 24) | ((uint64_t)key[5] << 16) |
686 ((uint64_t)key[6] << 8) | (uint64_t)key[7]);
687 #endif /* UNALIGNED_POINTERS_PERMITTED */
688
689 /*
690 * Fix parity.
691 */
692 fix_des_parity(&key_so_far);
693
694 /* Do weak key check itself. */
695 for (i = 0; i < (sizeof (des_weak_keys) / sizeof (uint64_t)); i++)
696 if (key_so_far == des_weak_keys[i]) {
697 return (B_FALSE);
698 }
699
700 if (corrected_key != NULL) {
701 #ifdef UNALIGNED_POINTERS_PERMITTED
702 *(uint64_t *)(void *)&corrected_key[0] = htonll(key_so_far);
703 #else
704 /*
705 * The code below reverses the bytes on LITTLE_ENDIAN machines.
706 * On BIG_ENDIAN, the same code copies without reversing
707 * the bytes.
708 */
709 corrected_key[0] = key_so_far >> 56;
710 corrected_key[1] = key_so_far >> 48;
711 corrected_key[2] = key_so_far >> 40;
712 corrected_key[3] = key_so_far >> 32;
713 corrected_key[4] = key_so_far >> 24;
714 corrected_key[5] = key_so_far >> 16;
715 corrected_key[6] = key_so_far >> 8;
716 corrected_key[7] = (uint8_t)key_so_far;
717 #endif /* UNALIGNED_POINTERS_PERMITTED */
718 }
719 /* EXPORT DELETE END */
720 return (B_TRUE);
721 }
722
791 }
792
793 void
794 des_parity_fix(uint8_t *key, des_strength_t strength, uint8_t *corrected_key)
795 {
796 /* EXPORT DELETE START */
797 uint64_t aligned_key[DES3_KEYSIZE / sizeof (uint64_t)];
798 uint8_t *paritied_key;
799 uint64_t key_so_far;
800 int i = 0, offset = 0;
801
802 if (strength == DES)
803 bcopy(key, aligned_key, DES_KEYSIZE);
804 else
805 bcopy(key, aligned_key, DES3_KEYSIZE);
806
807 paritied_key = (uint8_t *)aligned_key;
808 while (strength > i) {
809 offset = 8 * i;
810 #ifdef UNALIGNED_POINTERS_PERMITTED
811 key_so_far = htonll(*(uint64_t *)(void *)&paritied_key[offset]);
812 #else
813 key_so_far = (((uint64_t)paritied_key[offset + 0] << 56) |
814 ((uint64_t)paritied_key[offset + 1] << 48) |
815 ((uint64_t)paritied_key[offset + 2] << 40) |
816 ((uint64_t)paritied_key[offset + 3] << 32) |
817 ((uint64_t)paritied_key[offset + 4] << 24) |
818 ((uint64_t)paritied_key[offset + 5] << 16) |
819 ((uint64_t)paritied_key[offset + 6] << 8) |
820 (uint64_t)paritied_key[offset + 7]);
821 #endif /* UNALIGNED_POINTERS_PERMITTED */
822
823 fix_des_parity(&key_so_far);
824
825 #ifdef UNALIGNED_POINTERS_PERMITTED
826 *(uint64_t *)(void *)&paritied_key[offset] = htonll(key_so_far);
827 #else
828 paritied_key[offset + 0] = key_so_far >> 56;
829 paritied_key[offset + 1] = key_so_far >> 48;
830 paritied_key[offset + 2] = key_so_far >> 40;
831 paritied_key[offset + 3] = key_so_far >> 32;
832 paritied_key[offset + 4] = key_so_far >> 24;
833 paritied_key[offset + 5] = key_so_far >> 16;
834 paritied_key[offset + 6] = key_so_far >> 8;
835 paritied_key[offset + 7] = (uint8_t)key_so_far;
836 #endif /* UNALIGNED_POINTERS_PERMITTED */
837
838 i++;
839 }
840
841 bcopy(paritied_key, corrected_key, DES_KEYSIZE * strength);
842 /* EXPORT DELETE END */
843 }
844
845
846 /*
873 encryption_ks = ((keysched3_t *)ks)->ksch_encrypt;
874 decryption_ks = ((keysched3_t *)ks)->ksch_decrypt;
875 }
876
877 /*
878 * The code below, that is always executed on LITTLE_ENDIAN machines,
879 * reverses every 8 bytes in the key. On BIG_ENDIAN, the same code
880 * copies the key without reversing bytes.
881 */
882 #ifdef _BIG_ENDIAN
883 if (IS_P2ALIGNED(cipherKey, sizeof (uint64_t))) {
884 for (i = 0, j = 0; j < keysize; i++, j += 8) {
885 /* LINTED: pointer alignment */
886 key_uint64[i] = *((uint64_t *)&cipherKey[j]);
887 }
888 } else
889 #endif /* _BIG_ENDIAN */
890 {
891 for (i = 0, j = 0; j < keysize; i++, j += 8) {
892 #ifdef UNALIGNED_POINTERS_PERMITTED
893 key_uint64[i] =
894 htonll(*(uint64_t *)(void *)&cipherKey[j]);
895 #else
896 key_uint64[i] = (((uint64_t)cipherKey[j] << 56) |
897 ((uint64_t)cipherKey[j + 1] << 48) |
898 ((uint64_t)cipherKey[j + 2] << 40) |
899 ((uint64_t)cipherKey[j + 3] << 32) |
900 ((uint64_t)cipherKey[j + 4] << 24) |
901 ((uint64_t)cipherKey[j + 5] << 16) |
902 ((uint64_t)cipherKey[j + 6] << 8) |
903 (uint64_t)cipherKey[j + 7]);
904 #endif /* UNALIGNED_POINTERS_PERMITTED */
905 }
906 }
907
908 switch (strength) {
909 case DES:
910 des_ks(keysched, key_uint64[0]);
911 break;
912
913 case DES2:
914 /* DES2 is just DES3 with the first and third keys the same */
|