382 keysched_t *ksch = (keysched_t *)cookie;
383
384 uint32_t left, right, tmp;
385 uint32_t *P = ksch->ksch_P;
386 uint32_t *S = ksch->ksch_S;
387 #ifdef _BIG_ENDIAN
388 uint32_t *b32;
389
390 if (IS_P2ALIGNED(block, sizeof (uint32_t))) {
391 /* LINTED: pointer alignment */
392 b32 = (uint32_t *)block;
393 left = b32[0];
394 right = b32[1];
395 } else
396 #endif
397 {
398 /*
399 * Read input block and place in left/right in big-endian order.
400 */
401 #ifdef UNALIGNED_POINTERS_PERMITTED
402 left = htonl(*(uint32_t *)&block[0]);
403 right = htonl(*(uint32_t *)&block[4]);
404 #else
405 left = ((uint32_t)block[0] << 24)
406 | ((uint32_t)block[1] << 16)
407 | ((uint32_t)block[2] << 8)
408 | (uint32_t)block[3];
409 right = ((uint32_t)block[4] << 24)
410 | ((uint32_t)block[5] << 16)
411 | ((uint32_t)block[6] << 8)
412 | (uint32_t)block[7];
413 #endif /* UNALIGNED_POINTERS_PERMITTED */
414 }
415
416 ROUND(left, right, 0);
417 ROUND(left, right, 1);
418 ROUND(left, right, 2);
419 ROUND(left, right, 3);
420 ROUND(left, right, 4);
421 ROUND(left, right, 5);
422 ROUND(left, right, 6);
423 ROUND(left, right, 7);
430 ROUND(left, right, 14);
431 ROUND(left, right, 15);
432
433 tmp = left;
434 left = right;
435 right = tmp;
436 right ^= P[16];
437 left ^= P[17];
438
439 #ifdef _BIG_ENDIAN
440 if (IS_P2ALIGNED(out_block, sizeof (uint32_t))) {
441 /* LINTED: pointer alignment */
442 b32 = (uint32_t *)out_block;
443 b32[0] = left;
444 b32[1] = right;
445 } else
446 #endif
447 {
448 /* Put the block back into the user's block with final swap */
449 #ifdef UNALIGNED_POINTERS_PERMITTED
450 *(uint32_t *)&out_block[0] = htonl(left);
451 *(uint32_t *)&out_block[4] = htonl(right);
452 #else
453 out_block[0] = left >> 24;
454 out_block[1] = left >> 16;
455 out_block[2] = left >> 8;
456 out_block[3] = left;
457 out_block[4] = right >> 24;
458 out_block[5] = right >> 16;
459 out_block[6] = right >> 8;
460 out_block[7] = right;
461 #endif /* UNALIGNED_POINTERS_PERMITTED */
462 }
463 /* EXPORT DELETE END */
464 return (CRYPTO_SUCCESS);
465 }
466
467 /*
468 * Decrypt a block of data. Because of addition operations, convert blocks
469 * to their big-endian representation, even on Intel boxen.
470 * It should look like the blowfish_encrypt_block() operation
471 * except for the order in which the S/P boxes are accessed.
479 keysched_t *ksch = (keysched_t *)cookie;
480
481 uint32_t left, right, tmp;
482 uint32_t *P = ksch->ksch_P;
483 uint32_t *S = ksch->ksch_S;
484 #ifdef _BIG_ENDIAN
485 uint32_t *b32;
486
487 if (IS_P2ALIGNED(block, sizeof (uint32_t))) {
488 /* LINTED: pointer alignment */
489 b32 = (uint32_t *)block;
490 left = b32[0];
491 right = b32[1];
492 } else
493 #endif
494 {
495 /*
496 * Read input block and place in left/right in big-endian order.
497 */
498 #ifdef UNALIGNED_POINTERS_PERMITTED
499 left = htonl(*(uint32_t *)&block[0]);
500 right = htonl(*(uint32_t *)&block[4]);
501 #else
502 left = ((uint32_t)block[0] << 24)
503 | ((uint32_t)block[1] << 16)
504 | ((uint32_t)block[2] << 8)
505 | (uint32_t)block[3];
506 right = ((uint32_t)block[4] << 24)
507 | ((uint32_t)block[5] << 16)
508 | ((uint32_t)block[6] << 8)
509 | (uint32_t)block[7];
510 #endif /* UNALIGNED_POINTERS_PERMITTED */
511 }
512
513 ROUND(left, right, 17);
514 ROUND(left, right, 16);
515 ROUND(left, right, 15);
516 ROUND(left, right, 14);
517 ROUND(left, right, 13);
518 ROUND(left, right, 12);
519 ROUND(left, right, 11);
520 ROUND(left, right, 10);
527 ROUND(left, right, 3);
528 ROUND(left, right, 2);
529
530 tmp = left;
531 left = right;
532 right = tmp;
533 right ^= P[1];
534 left ^= P[0];
535
536 #ifdef _BIG_ENDIAN
537 if (IS_P2ALIGNED(out_block, sizeof (uint32_t))) {
538 /* LINTED: pointer alignment */
539 b32 = (uint32_t *)out_block;
540 b32[0] = left;
541 b32[1] = right;
542 } else
543 #endif
544 {
545 /* Put the block back into the user's block with final swap */
546 #ifdef UNALIGNED_POINTERS_PERMITTED
547 *(uint32_t *)&out_block[0] = htonl(left);
548 *(uint32_t *)&out_block[4] = htonl(right);
549 #else
550 out_block[0] = left >> 24;
551 out_block[1] = left >> 16;
552 out_block[2] = left >> 8;
553 out_block[3] = left;
554 out_block[4] = right >> 24;
555 out_block[5] = right >> 16;
556 out_block[6] = right >> 8;
557 out_block[7] = right;
558 #endif /* UNALIGNED_POINTERS_PERMITTED */
559 }
560 /* EXPORT DELETE END */
561 return (CRYPTO_SUCCESS);
562 }
563
564 static void
565 bitrepeat(uint8_t *pattern, uint_t len_bytes, uint_t len_bits, uint8_t *dst,
566 uint_t dst_len_bytes)
567 {
568 /* EXPORT DELETE START */
|
382 keysched_t *ksch = (keysched_t *)cookie;
383
384 uint32_t left, right, tmp;
385 uint32_t *P = ksch->ksch_P;
386 uint32_t *S = ksch->ksch_S;
387 #ifdef _BIG_ENDIAN
388 uint32_t *b32;
389
390 if (IS_P2ALIGNED(block, sizeof (uint32_t))) {
391 /* LINTED: pointer alignment */
392 b32 = (uint32_t *)block;
393 left = b32[0];
394 right = b32[1];
395 } else
396 #endif
397 {
398 /*
399 * Read input block and place in left/right in big-endian order.
400 */
401 #ifdef UNALIGNED_POINTERS_PERMITTED
402 left = htonl(*(uint32_t *)(void *)&block[0]);
403 right = htonl(*(uint32_t *)(void *)&block[4]);
404 #else
405 left = ((uint32_t)block[0] << 24)
406 | ((uint32_t)block[1] << 16)
407 | ((uint32_t)block[2] << 8)
408 | (uint32_t)block[3];
409 right = ((uint32_t)block[4] << 24)
410 | ((uint32_t)block[5] << 16)
411 | ((uint32_t)block[6] << 8)
412 | (uint32_t)block[7];
413 #endif /* UNALIGNED_POINTERS_PERMITTED */
414 }
415
416 ROUND(left, right, 0);
417 ROUND(left, right, 1);
418 ROUND(left, right, 2);
419 ROUND(left, right, 3);
420 ROUND(left, right, 4);
421 ROUND(left, right, 5);
422 ROUND(left, right, 6);
423 ROUND(left, right, 7);
430 ROUND(left, right, 14);
431 ROUND(left, right, 15);
432
433 tmp = left;
434 left = right;
435 right = tmp;
436 right ^= P[16];
437 left ^= P[17];
438
439 #ifdef _BIG_ENDIAN
440 if (IS_P2ALIGNED(out_block, sizeof (uint32_t))) {
441 /* LINTED: pointer alignment */
442 b32 = (uint32_t *)out_block;
443 b32[0] = left;
444 b32[1] = right;
445 } else
446 #endif
447 {
448 /* Put the block back into the user's block with final swap */
449 #ifdef UNALIGNED_POINTERS_PERMITTED
450 *(uint32_t *)(void *)&out_block[0] = htonl(left);
451 *(uint32_t *)(void *)&out_block[4] = htonl(right);
452 #else
453 out_block[0] = left >> 24;
454 out_block[1] = left >> 16;
455 out_block[2] = left >> 8;
456 out_block[3] = left;
457 out_block[4] = right >> 24;
458 out_block[5] = right >> 16;
459 out_block[6] = right >> 8;
460 out_block[7] = right;
461 #endif /* UNALIGNED_POINTERS_PERMITTED */
462 }
463 /* EXPORT DELETE END */
464 return (CRYPTO_SUCCESS);
465 }
466
467 /*
468 * Decrypt a block of data. Because of addition operations, convert blocks
469 * to their big-endian representation, even on Intel boxen.
470 * It should look like the blowfish_encrypt_block() operation
471 * except for the order in which the S/P boxes are accessed.
479 keysched_t *ksch = (keysched_t *)cookie;
480
481 uint32_t left, right, tmp;
482 uint32_t *P = ksch->ksch_P;
483 uint32_t *S = ksch->ksch_S;
484 #ifdef _BIG_ENDIAN
485 uint32_t *b32;
486
487 if (IS_P2ALIGNED(block, sizeof (uint32_t))) {
488 /* LINTED: pointer alignment */
489 b32 = (uint32_t *)block;
490 left = b32[0];
491 right = b32[1];
492 } else
493 #endif
494 {
495 /*
496 * Read input block and place in left/right in big-endian order.
497 */
498 #ifdef UNALIGNED_POINTERS_PERMITTED
499 left = htonl(*(uint32_t *)(void *)&block[0]);
500 right = htonl(*(uint32_t *)(void *)&block[4]);
501 #else
502 left = ((uint32_t)block[0] << 24)
503 | ((uint32_t)block[1] << 16)
504 | ((uint32_t)block[2] << 8)
505 | (uint32_t)block[3];
506 right = ((uint32_t)block[4] << 24)
507 | ((uint32_t)block[5] << 16)
508 | ((uint32_t)block[6] << 8)
509 | (uint32_t)block[7];
510 #endif /* UNALIGNED_POINTERS_PERMITTED */
511 }
512
513 ROUND(left, right, 17);
514 ROUND(left, right, 16);
515 ROUND(left, right, 15);
516 ROUND(left, right, 14);
517 ROUND(left, right, 13);
518 ROUND(left, right, 12);
519 ROUND(left, right, 11);
520 ROUND(left, right, 10);
527 ROUND(left, right, 3);
528 ROUND(left, right, 2);
529
530 tmp = left;
531 left = right;
532 right = tmp;
533 right ^= P[1];
534 left ^= P[0];
535
536 #ifdef _BIG_ENDIAN
537 if (IS_P2ALIGNED(out_block, sizeof (uint32_t))) {
538 /* LINTED: pointer alignment */
539 b32 = (uint32_t *)out_block;
540 b32[0] = left;
541 b32[1] = right;
542 } else
543 #endif
544 {
545 /* Put the block back into the user's block with final swap */
546 #ifdef UNALIGNED_POINTERS_PERMITTED
547 *(uint32_t *)(void *)&out_block[0] = htonl(left);
548 *(uint32_t *)(void *)&out_block[4] = htonl(right);
549 #else
550 out_block[0] = left >> 24;
551 out_block[1] = left >> 16;
552 out_block[2] = left >> 8;
553 out_block[3] = left;
554 out_block[4] = right >> 24;
555 out_block[5] = right >> 16;
556 out_block[6] = right >> 8;
557 out_block[7] = right;
558 #endif /* UNALIGNED_POINTERS_PERMITTED */
559 }
560 /* EXPORT DELETE END */
561 return (CRYPTO_SUCCESS);
562 }
563
564 static void
565 bitrepeat(uint8_t *pattern, uint_t len_bytes, uint_t len_bits, uint8_t *dst,
566 uint_t dst_len_bytes)
567 {
568 /* EXPORT DELETE START */
|