Print this page
6717509 Need to use bswap/bswapq for byte swap of 64-bit integer on x32/x64 (fix lint)


1471          * For _LITTLE_ENDIAN machines (except AMD64), reverse every
1472          * 4 bytes in the key.  On _BIG_ENDIAN and AMD64, copy the key
1473          * without reversing bytes.
1474          * For AMD64, do not byte swap for aes_setupkeys().
1475          *
1476          * SPARCv8/v9 uses a key schedule array with 64-bit elements.
1477          * X86/AMD64  uses a key schedule array with 32-bit elements.
1478          */
1479 #ifndef AES_BYTE_SWAP
1480         if (IS_P2ALIGNED(cipherKey, sizeof (uint64_t))) {
1481                 for (i = 0, j = 0; j < keysize; i++, j += 8) {
1482                         /* LINTED: pointer alignment */
1483                         keyarr.ka64[i] = *((uint64_t *)&cipherKey[j]);
1484                 }
1485         } else {
1486                 bcopy(cipherKey, keyarr.ka32, keysize);
1487         }
1488 
1489 #else   /* byte swap */
1490         for (i = 0, j = 0; j < keysize; i++, j += 4) {
1491                 keyarr.ka32[i] = htonl(*(uint32_t *)&cipherKey[j]);
1492         }
1493 #endif
1494 
1495         aes_setupkeys(newbie, keyarr.ka32, keyBits);
1496 /* EXPORT DELETE END */
1497 }
1498 
1499 /*
1500  * Encrypt one block using AES.
1501  * Align if needed and (for x86 32-bit only) byte-swap.
1502  *
1503  * Parameters:
1504  * ks   Key schedule, of type aes_key_t
1505  * pt   Input block (plain text)
1506  * ct   Output block (crypto text).  Can overlap with pt
1507  */
1508 int
1509 aes_encrypt_block(const void *ks, const uint8_t *pt, uint8_t *ct)
1510 {
1511 /* EXPORT DELETE START */
1512         aes_key_t       *ksch = (aes_key_t *)ks;
1513 
1514 #ifndef AES_BYTE_SWAP
1515         if (IS_P2ALIGNED2(pt, ct, sizeof (uint32_t))) {
1516                 AES_ENCRYPT_IMPL(&ksch->encr_ks.ks32[0], ksch->nr,
1517                     /* LINTED:  pointer alignment */
1518                     (uint32_t *)pt, (uint32_t *)ct);
1519         } else {
1520 #endif
1521                 uint32_t buffer[AES_BLOCK_LEN / sizeof (uint32_t)];
1522 
1523                 /* Copy input block into buffer */
1524 #ifndef AES_BYTE_SWAP
1525                 bcopy(pt, &buffer, AES_BLOCK_LEN);
1526 
1527 #else   /* byte swap */
1528                 buffer[0] = htonl(*(uint32_t *)&pt[0]);
1529                 buffer[1] = htonl(*(uint32_t *)&pt[4]);
1530                 buffer[2] = htonl(*(uint32_t *)&pt[8]);
1531                 buffer[3] = htonl(*(uint32_t *)&pt[12]);
1532 #endif
1533 
1534                 AES_ENCRYPT_IMPL(&ksch->encr_ks.ks32[0], ksch->nr,
1535                     buffer, buffer);
1536 
1537                 /* Copy result from buffer to output block */
1538 #ifndef AES_BYTE_SWAP
1539                 bcopy(&buffer, ct, AES_BLOCK_LEN);
1540         }
1541 
1542 #else   /* byte swap */
1543                 *(uint32_t *)&ct[0] = htonl(buffer[0]);
1544                 *(uint32_t *)&ct[4] = htonl(buffer[1]);
1545                 *(uint32_t *)&ct[8] = htonl(buffer[2]);
1546                 *(uint32_t *)&ct[12] = htonl(buffer[3]);
1547 #endif
1548 /* EXPORT DELETE END */
1549         return (CRYPTO_SUCCESS);
1550 }
1551 
1552 /*
1553  * Decrypt one block using AES.
1554  * Align and byte-swap if needed.
1555  *
1556  * Parameters:
1557  * ks   Key schedule, of type aes_key_t
1558  * ct   Input block (crypto text)
1559  * pt   Output block (plain text). Can overlap with pt
1560  */
1561 int
1562 aes_decrypt_block(const void *ks, const uint8_t *ct, uint8_t *pt)
1563 {
1564 /* EXPORT DELETE START */
1565         aes_key_t       *ksch = (aes_key_t *)ks;
1566 
1567 #ifndef AES_BYTE_SWAP
1568         if (IS_P2ALIGNED2(ct, pt, sizeof (uint32_t))) {
1569                 AES_DECRYPT_IMPL(&ksch->decr_ks.ks32[0], ksch->nr,
1570                     /* LINTED:  pointer alignment */
1571                     (uint32_t *)ct, (uint32_t *)pt);
1572         } else {
1573 #endif
1574                 uint32_t buffer[AES_BLOCK_LEN / sizeof (uint32_t)];
1575 
1576                 /* Copy input block into buffer */
1577 #ifndef AES_BYTE_SWAP
1578                 bcopy(ct, &buffer, AES_BLOCK_LEN);
1579 
1580 #else   /* byte swap */
1581                 buffer[0] = htonl(*(uint32_t *)&ct[0]);
1582                 buffer[1] = htonl(*(uint32_t *)&ct[4]);
1583                 buffer[2] = htonl(*(uint32_t *)&ct[8]);
1584                 buffer[3] = htonl(*(uint32_t *)&ct[12]);
1585 #endif
1586 
1587                 AES_DECRYPT_IMPL(&ksch->decr_ks.ks32[0], ksch->nr,
1588                     buffer, buffer);
1589 
1590                 /* Copy result from buffer to output block */
1591 #ifndef AES_BYTE_SWAP
1592                 bcopy(&buffer, pt, AES_BLOCK_LEN);
1593         }
1594 
1595 #else   /* byte swap */
1596         *(uint32_t *)&pt[0] = htonl(buffer[0]);
1597         *(uint32_t *)&pt[4] = htonl(buffer[1]);
1598         *(uint32_t *)&pt[8] = htonl(buffer[2]);
1599         *(uint32_t *)&pt[12] = htonl(buffer[3]);
1600 #endif
1601 
1602 /* EXPORT DELETE END */
1603         return (CRYPTO_SUCCESS);
1604 }
1605 
1606 
1607 /*
1608  * Allocate key schedule for AES.
1609  *
1610  * Return the pointer and set size to the number of bytes allocated.
1611  * Memory allocated must be freed by the caller when done.
1612  *
1613  * Parameters:
1614  * size         Size of key schedule allocated, in bytes
1615  * kmflag       Flag passed to kmem_alloc(9F); ignored in userland.
1616  */
1617 /* ARGSUSED */
1618 void *
1619 aes_alloc_keysched(size_t *size, int kmflag)




1471          * For _LITTLE_ENDIAN machines (except AMD64), reverse every
1472          * 4 bytes in the key.  On _BIG_ENDIAN and AMD64, copy the key
1473          * without reversing bytes.
1474          * For AMD64, do not byte swap for aes_setupkeys().
1475          *
1476          * SPARCv8/v9 uses a key schedule array with 64-bit elements.
1477          * X86/AMD64  uses a key schedule array with 32-bit elements.
1478          */
1479 #ifndef AES_BYTE_SWAP
1480         if (IS_P2ALIGNED(cipherKey, sizeof (uint64_t))) {
1481                 for (i = 0, j = 0; j < keysize; i++, j += 8) {
1482                         /* LINTED: pointer alignment */
1483                         keyarr.ka64[i] = *((uint64_t *)&cipherKey[j]);
1484                 }
1485         } else {
1486                 bcopy(cipherKey, keyarr.ka32, keysize);
1487         }
1488 
1489 #else   /* byte swap */
1490         for (i = 0, j = 0; j < keysize; i++, j += 4) {
1491                 keyarr.ka32[i] = htonl(*(uint32_t *)(void *)&cipherKey[j]);
1492         }
1493 #endif
1494 
1495         aes_setupkeys(newbie, keyarr.ka32, keyBits);
1496 /* EXPORT DELETE END */
1497 }
1498 
1499 /*
1500  * Encrypt one block using AES.
1501  * Align if needed and (for x86 32-bit only) byte-swap.
1502  *
1503  * Parameters:
1504  * ks   Key schedule, of type aes_key_t
1505  * pt   Input block (plain text)
1506  * ct   Output block (crypto text).  Can overlap with pt
1507  */
1508 int
1509 aes_encrypt_block(const void *ks, const uint8_t *pt, uint8_t *ct)
1510 {
1511 /* EXPORT DELETE START */
1512         aes_key_t       *ksch = (aes_key_t *)ks;
1513 
1514 #ifndef AES_BYTE_SWAP
1515         if (IS_P2ALIGNED2(pt, ct, sizeof (uint32_t))) {
1516                 AES_ENCRYPT_IMPL(&ksch->encr_ks.ks32[0], ksch->nr,
1517                     /* LINTED:  pointer alignment */
1518                     (uint32_t *)pt, (uint32_t *)ct);
1519         } else {
1520 #endif
1521                 uint32_t buffer[AES_BLOCK_LEN / sizeof (uint32_t)];
1522 
1523                 /* Copy input block into buffer */
1524 #ifndef AES_BYTE_SWAP
1525                 bcopy(pt, &buffer, AES_BLOCK_LEN);
1526 
1527 #else   /* byte swap */
1528                 buffer[0] = htonl(*(uint32_t *)(void *)&pt[0]);
1529                 buffer[1] = htonl(*(uint32_t *)(void *)&pt[4]);
1530                 buffer[2] = htonl(*(uint32_t *)(void *)&pt[8]);
1531                 buffer[3] = htonl(*(uint32_t *)(void *)&pt[12]);
1532 #endif
1533 
1534                 AES_ENCRYPT_IMPL(&ksch->encr_ks.ks32[0], ksch->nr,
1535                     buffer, buffer);
1536 
1537                 /* Copy result from buffer to output block */
1538 #ifndef AES_BYTE_SWAP
1539                 bcopy(&buffer, ct, AES_BLOCK_LEN);
1540         }
1541 
1542 #else   /* byte swap */
1543                 *(uint32_t *)(void *)&ct[0] = htonl(buffer[0]);
1544                 *(uint32_t *)(void *)&ct[4] = htonl(buffer[1]);
1545                 *(uint32_t *)(void *)&ct[8] = htonl(buffer[2]);
1546                 *(uint32_t *)(void *)&ct[12] = htonl(buffer[3]);
1547 #endif
1548 /* EXPORT DELETE END */
1549         return (CRYPTO_SUCCESS);
1550 }
1551 
1552 /*
1553  * Decrypt one block using AES.
1554  * Align and byte-swap if needed.
1555  *
1556  * Parameters:
1557  * ks   Key schedule, of type aes_key_t
1558  * ct   Input block (crypto text)
1559  * pt   Output block (plain text). Can overlap with pt
1560  */
1561 int
1562 aes_decrypt_block(const void *ks, const uint8_t *ct, uint8_t *pt)
1563 {
1564 /* EXPORT DELETE START */
1565         aes_key_t       *ksch = (aes_key_t *)ks;
1566 
1567 #ifndef AES_BYTE_SWAP
1568         if (IS_P2ALIGNED2(ct, pt, sizeof (uint32_t))) {
1569                 AES_DECRYPT_IMPL(&ksch->decr_ks.ks32[0], ksch->nr,
1570                     /* LINTED:  pointer alignment */
1571                     (uint32_t *)ct, (uint32_t *)pt);
1572         } else {
1573 #endif
1574                 uint32_t buffer[AES_BLOCK_LEN / sizeof (uint32_t)];
1575 
1576                 /* Copy input block into buffer */
1577 #ifndef AES_BYTE_SWAP
1578                 bcopy(ct, &buffer, AES_BLOCK_LEN);
1579 
1580 #else   /* byte swap */
1581                 buffer[0] = htonl(*(uint32_t *)(void *)&ct[0]);
1582                 buffer[1] = htonl(*(uint32_t *)(void *)&ct[4]);
1583                 buffer[2] = htonl(*(uint32_t *)(void *)&ct[8]);
1584                 buffer[3] = htonl(*(uint32_t *)(void *)&ct[12]);
1585 #endif
1586 
1587                 AES_DECRYPT_IMPL(&ksch->decr_ks.ks32[0], ksch->nr,
1588                     buffer, buffer);
1589 
1590                 /* Copy result from buffer to output block */
1591 #ifndef AES_BYTE_SWAP
1592                 bcopy(&buffer, pt, AES_BLOCK_LEN);
1593         }
1594 
1595 #else   /* byte swap */
1596         *(uint32_t *)(void *)&pt[0] = htonl(buffer[0]);
1597         *(uint32_t *)(void *)&pt[4] = htonl(buffer[1]);
1598         *(uint32_t *)(void *)&pt[8] = htonl(buffer[2]);
1599         *(uint32_t *)(void *)&pt[12] = htonl(buffer[3]);
1600 #endif
1601 
1602 /* EXPORT DELETE END */
1603         return (CRYPTO_SUCCESS);
1604 }
1605 
1606 
1607 /*
1608  * Allocate key schedule for AES.
1609  *
1610  * Return the pointer and set size to the number of bytes allocated.
1611  * Memory allocated must be freed by the caller when done.
1612  *
1613  * Parameters:
1614  * size         Size of key schedule allocated, in bytes
1615  * kmflag       Flag passed to kmem_alloc(9F); ignored in userland.
1616  */
1617 /* ARGSUSED */
1618 void *
1619 aes_alloc_keysched(size_t *size, int kmflag)