2024-02-16 13:15:14 -05:00
|
|
|
//! Crypto Accelerator (CRYP)
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-20 11:54:39 -05:00
|
|
|
use core::cmp::min;
|
2024-02-18 21:40:18 -05:00
|
|
|
use core::marker::PhantomData;
|
2024-03-12 12:01:14 -04:00
|
|
|
use core::ptr;
|
2024-02-24 16:14:44 -05:00
|
|
|
|
2024-02-13 10:11:54 -05:00
|
|
|
use embassy_hal_internal::{into_ref, PeripheralRef};
|
2024-03-05 11:25:56 -05:00
|
|
|
use embassy_sync::waitqueue::AtomicWaker;
|
2024-02-13 10:11:54 -05:00
|
|
|
|
2024-03-12 12:01:14 -04:00
|
|
|
use crate::dma::{NoDma, Priority, Transfer, TransferOptions};
|
2024-03-05 11:25:56 -05:00
|
|
|
use crate::interrupt::typelevel::Interrupt;
|
2024-03-12 12:01:14 -04:00
|
|
|
use crate::{interrupt, pac, peripherals, Peripheral};
|
2024-02-14 20:24:52 -05:00
|
|
|
|
|
|
|
const DES_BLOCK_SIZE: usize = 8; // 64 bits
|
|
|
|
const AES_BLOCK_SIZE: usize = 16; // 128 bits
|
2024-02-13 10:11:54 -05:00
|
|
|
|
2024-03-05 11:25:56 -05:00
|
|
|
static CRYP_WAKER: AtomicWaker = AtomicWaker::new();
|
|
|
|
|
|
|
|
/// CRYP interrupt handler.
|
|
|
|
pub struct InterruptHandler<T: Instance> {
|
|
|
|
_phantom: PhantomData<T>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T: Instance> interrupt::typelevel::Handler<T::Interrupt> for InterruptHandler<T> {
|
|
|
|
unsafe fn on_interrupt() {
|
|
|
|
let bits = T::regs().misr().read();
|
|
|
|
if bits.inmis() {
|
|
|
|
T::regs().imscr().modify(|w| w.set_inim(false));
|
|
|
|
CRYP_WAKER.wake();
|
|
|
|
}
|
|
|
|
if bits.outmis() {
|
|
|
|
T::regs().imscr().modify(|w| w.set_outim(false));
|
|
|
|
CRYP_WAKER.wake();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-02-18 21:40:18 -05:00
|
|
|
/// This trait encapsulates all cipher-specific behavior/
|
|
|
|
pub trait Cipher<'c> {
|
|
|
|
/// Processing block size. Determined by the processor and the algorithm.
|
|
|
|
const BLOCK_SIZE: usize;
|
|
|
|
|
|
|
|
/// Indicates whether the cipher requires the application to provide padding.
|
|
|
|
/// If `true`, no partial blocks will be accepted (a panic will occur).
|
|
|
|
const REQUIRES_PADDING: bool = false;
|
|
|
|
|
|
|
|
/// Returns the symmetric key.
|
2024-02-20 11:54:39 -05:00
|
|
|
fn key(&self) -> &[u8];
|
2024-02-18 21:40:18 -05:00
|
|
|
|
|
|
|
/// Returns the initialization vector.
|
|
|
|
fn iv(&self) -> &[u8];
|
|
|
|
|
|
|
|
/// Sets the processor algorithm mode according to the associated cipher.
|
|
|
|
fn set_algomode(&self, p: &pac::cryp::Cryp);
|
|
|
|
|
|
|
|
/// Performs any key preparation within the processor, if necessary.
|
|
|
|
fn prepare_key(&self, _p: &pac::cryp::Cryp) {}
|
|
|
|
|
|
|
|
/// Performs any cipher-specific initialization.
|
2024-03-12 12:01:14 -04:00
|
|
|
fn init_phase_blocking<T: Instance, DmaIn, DmaOut>(&self, _p: &pac::cryp::Cryp, _cryp: &Cryp<T, DmaIn, DmaOut>) {}
|
|
|
|
|
|
|
|
/// Performs any cipher-specific initialization.
|
|
|
|
async fn init_phase<T: Instance, DmaIn, DmaOut>(&self, _p: &pac::cryp::Cryp, _cryp: &mut Cryp<'_, T, DmaIn, DmaOut>)
|
|
|
|
where
|
|
|
|
DmaIn: crate::cryp::DmaIn<T>,
|
|
|
|
DmaOut: crate::cryp::DmaOut<T>,
|
|
|
|
{}
|
2024-02-18 21:40:18 -05:00
|
|
|
|
|
|
|
/// Called prior to processing the last data block for cipher-specific operations.
|
2024-03-12 12:01:14 -04:00
|
|
|
fn pre_final(&self, _p: &pac::cryp::Cryp, _dir: Direction, _padding_len: usize) -> [u32; 4] {
|
2024-02-20 11:54:39 -05:00
|
|
|
return [0; 4];
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
|
|
|
|
/// Called after processing the last data block for cipher-specific operations.
|
2024-03-12 12:01:14 -04:00
|
|
|
fn post_final_blocking<T: Instance, DmaIn, DmaOut>(
|
2024-02-20 11:54:39 -05:00
|
|
|
&self,
|
|
|
|
_p: &pac::cryp::Cryp,
|
2024-03-12 12:01:14 -04:00
|
|
|
_cryp: &Cryp<T, DmaIn, DmaOut>,
|
2024-02-20 11:54:39 -05:00
|
|
|
_dir: Direction,
|
2024-02-20 14:27:37 -05:00
|
|
|
_int_data: &mut [u8; AES_BLOCK_SIZE],
|
2024-02-20 11:54:39 -05:00
|
|
|
_temp1: [u32; 4],
|
|
|
|
_padding_mask: [u8; 16],
|
|
|
|
) {
|
|
|
|
}
|
|
|
|
|
2024-03-12 12:01:14 -04:00
|
|
|
/// Called after processing the last data block for cipher-specific operations.
|
|
|
|
async fn post_final<T: Instance, DmaIn, DmaOut>(
|
|
|
|
&self,
|
|
|
|
_p: &pac::cryp::Cryp,
|
|
|
|
_cryp: &mut Cryp<'_, T, DmaIn, DmaOut>,
|
|
|
|
_dir: Direction,
|
|
|
|
_int_data: &mut [u8; AES_BLOCK_SIZE],
|
|
|
|
_temp1: [u32; 4],
|
|
|
|
_padding_mask: [u8; 16],
|
|
|
|
)
|
|
|
|
where
|
|
|
|
DmaIn: crate::cryp::DmaIn<T>,
|
|
|
|
DmaOut: crate::cryp::DmaOut<T>,
|
|
|
|
{}
|
|
|
|
|
2024-03-12 14:52:34 -04:00
|
|
|
/// Returns the AAD header block as required by the cipher.
|
2024-02-20 11:54:39 -05:00
|
|
|
fn get_header_block(&self) -> &[u8] {
|
|
|
|
return [0; 0].as_slice();
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// This trait enables restriction of ciphers to specific key sizes.
|
|
|
|
pub trait CipherSized {}
|
|
|
|
|
2024-02-21 12:07:53 -05:00
|
|
|
/// This trait enables restriction of initialization vectors to sizes compatibile with a cipher mode.
|
|
|
|
pub trait IVSized {}
|
|
|
|
|
2024-02-18 21:40:18 -05:00
|
|
|
/// This trait enables restriction of a header phase to authenticated ciphers only.
|
2024-02-21 12:07:53 -05:00
|
|
|
pub trait CipherAuthenticated<const TAG_SIZE: usize> {
|
|
|
|
/// Defines the authentication tag size.
|
|
|
|
const TAG_SIZE: usize = TAG_SIZE;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// TDES-ECB Cipher Mode
|
|
|
|
pub struct TdesEcb<'c, const KEY_SIZE: usize> {
|
|
|
|
iv: &'c [u8; 0],
|
|
|
|
key: &'c [u8; KEY_SIZE],
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> TdesEcb<'c, KEY_SIZE> {
|
|
|
|
/// Constructs a new AES-ECB cipher for a cryptographic operation.
|
|
|
|
pub fn new(key: &'c [u8; KEY_SIZE]) -> Self {
|
|
|
|
return Self { key: key, iv: &[0; 0] };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> Cipher<'c> for TdesEcb<'c, KEY_SIZE> {
|
|
|
|
const BLOCK_SIZE: usize = DES_BLOCK_SIZE;
|
|
|
|
const REQUIRES_PADDING: bool = true;
|
|
|
|
|
|
|
|
fn key(&self) -> &'c [u8] {
|
|
|
|
self.key
|
|
|
|
}
|
|
|
|
|
|
|
|
fn iv(&self) -> &'c [u8] {
|
|
|
|
self.iv
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_algomode(&self, p: &pac::cryp::Cryp) {
|
2024-02-21 12:39:10 -05:00
|
|
|
#[cfg(cryp_v1)]
|
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode(0));
|
|
|
|
}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:39:10 -05:00
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode0(0));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(false));
|
|
|
|
}
|
2024-02-21 12:07:53 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c> CipherSized for TdesEcb<'c, { 112 / 8 }> {}
|
|
|
|
impl<'c> CipherSized for TdesEcb<'c, { 168 / 8 }> {}
|
|
|
|
impl<'c, const KEY_SIZE: usize> IVSized for TdesEcb<'c, KEY_SIZE> {}
|
|
|
|
|
|
|
|
/// TDES-CBC Cipher Mode
|
|
|
|
pub struct TdesCbc<'c, const KEY_SIZE: usize> {
|
|
|
|
iv: &'c [u8; 8],
|
|
|
|
key: &'c [u8; KEY_SIZE],
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> TdesCbc<'c, KEY_SIZE> {
|
|
|
|
/// Constructs a new TDES-CBC cipher for a cryptographic operation.
|
|
|
|
pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 8]) -> Self {
|
|
|
|
return Self { key: key, iv: iv };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> Cipher<'c> for TdesCbc<'c, KEY_SIZE> {
|
|
|
|
const BLOCK_SIZE: usize = DES_BLOCK_SIZE;
|
|
|
|
const REQUIRES_PADDING: bool = true;
|
|
|
|
|
|
|
|
fn key(&self) -> &'c [u8] {
|
|
|
|
self.key
|
|
|
|
}
|
|
|
|
|
|
|
|
fn iv(&self) -> &'c [u8] {
|
|
|
|
self.iv
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_algomode(&self, p: &pac::cryp::Cryp) {
|
2024-02-21 12:39:10 -05:00
|
|
|
#[cfg(cryp_v1)]
|
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode(1));
|
|
|
|
}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:39:10 -05:00
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode0(1));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(false));
|
|
|
|
}
|
2024-02-21 12:07:53 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c> CipherSized for TdesCbc<'c, { 112 / 8 }> {}
|
|
|
|
impl<'c> CipherSized for TdesCbc<'c, { 168 / 8 }> {}
|
|
|
|
impl<'c, const KEY_SIZE: usize> IVSized for TdesCbc<'c, KEY_SIZE> {}
|
|
|
|
|
|
|
|
/// DES-ECB Cipher Mode
|
|
|
|
pub struct DesEcb<'c, const KEY_SIZE: usize> {
|
|
|
|
iv: &'c [u8; 0],
|
|
|
|
key: &'c [u8; KEY_SIZE],
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> DesEcb<'c, KEY_SIZE> {
|
|
|
|
/// Constructs a new AES-ECB cipher for a cryptographic operation.
|
|
|
|
pub fn new(key: &'c [u8; KEY_SIZE]) -> Self {
|
|
|
|
return Self { key: key, iv: &[0; 0] };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> Cipher<'c> for DesEcb<'c, KEY_SIZE> {
|
|
|
|
const BLOCK_SIZE: usize = DES_BLOCK_SIZE;
|
|
|
|
const REQUIRES_PADDING: bool = true;
|
|
|
|
|
|
|
|
fn key(&self) -> &'c [u8] {
|
|
|
|
self.key
|
|
|
|
}
|
|
|
|
|
|
|
|
fn iv(&self) -> &'c [u8] {
|
|
|
|
self.iv
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_algomode(&self, p: &pac::cryp::Cryp) {
|
2024-02-21 12:39:10 -05:00
|
|
|
#[cfg(cryp_v1)]
|
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode(2));
|
|
|
|
}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:39:10 -05:00
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode0(2));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(false));
|
|
|
|
}
|
2024-02-21 12:07:53 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c> CipherSized for DesEcb<'c, { 56 / 8 }> {}
|
|
|
|
impl<'c, const KEY_SIZE: usize> IVSized for DesEcb<'c, KEY_SIZE> {}
|
|
|
|
|
|
|
|
/// DES-CBC Cipher Mode
|
|
|
|
pub struct DesCbc<'c, const KEY_SIZE: usize> {
|
|
|
|
iv: &'c [u8; 8],
|
|
|
|
key: &'c [u8; KEY_SIZE],
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> DesCbc<'c, KEY_SIZE> {
|
|
|
|
/// Constructs a new AES-CBC cipher for a cryptographic operation.
|
|
|
|
pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 8]) -> Self {
|
|
|
|
return Self { key: key, iv: iv };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> Cipher<'c> for DesCbc<'c, KEY_SIZE> {
|
|
|
|
const BLOCK_SIZE: usize = DES_BLOCK_SIZE;
|
|
|
|
const REQUIRES_PADDING: bool = true;
|
|
|
|
|
|
|
|
fn key(&self) -> &'c [u8] {
|
|
|
|
self.key
|
|
|
|
}
|
|
|
|
|
|
|
|
fn iv(&self) -> &'c [u8] {
|
|
|
|
self.iv
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_algomode(&self, p: &pac::cryp::Cryp) {
|
2024-02-21 12:39:10 -05:00
|
|
|
#[cfg(cryp_v1)]
|
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode(3));
|
|
|
|
}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:39:10 -05:00
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode0(3));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(false));
|
|
|
|
}
|
2024-02-21 12:07:53 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c> CipherSized for DesCbc<'c, { 56 / 8 }> {}
|
|
|
|
impl<'c, const KEY_SIZE: usize> IVSized for DesCbc<'c, KEY_SIZE> {}
|
2024-02-18 21:40:18 -05:00
|
|
|
|
|
|
|
/// AES-ECB Cipher Mode
|
|
|
|
pub struct AesEcb<'c, const KEY_SIZE: usize> {
|
|
|
|
iv: &'c [u8; 0],
|
|
|
|
key: &'c [u8; KEY_SIZE],
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> AesEcb<'c, KEY_SIZE> {
|
|
|
|
/// Constructs a new AES-ECB cipher for a cryptographic operation.
|
|
|
|
pub fn new(key: &'c [u8; KEY_SIZE]) -> Self {
|
|
|
|
return Self { key: key, iv: &[0; 0] };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesEcb<'c, KEY_SIZE> {
|
|
|
|
const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
|
|
|
|
const REQUIRES_PADDING: bool = true;
|
|
|
|
|
|
|
|
fn key(&self) -> &'c [u8] {
|
|
|
|
self.key
|
|
|
|
}
|
|
|
|
|
|
|
|
fn iv(&self) -> &'c [u8] {
|
|
|
|
self.iv
|
|
|
|
}
|
|
|
|
|
|
|
|
fn prepare_key(&self, p: &pac::cryp::Cryp) {
|
2024-02-21 12:39:10 -05:00
|
|
|
#[cfg(cryp_v1)]
|
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode(7));
|
|
|
|
}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:39:10 -05:00
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode0(7));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(false));
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
while p.sr().read().busy() {}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_algomode(&self, p: &pac::cryp::Cryp) {
|
2024-02-21 12:39:10 -05:00
|
|
|
#[cfg(cryp_v1)]
|
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode(2));
|
|
|
|
}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:39:10 -05:00
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode0(2));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(false));
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c> CipherSized for AesEcb<'c, { 128 / 8 }> {}
|
|
|
|
impl<'c> CipherSized for AesEcb<'c, { 192 / 8 }> {}
|
|
|
|
impl<'c> CipherSized for AesEcb<'c, { 256 / 8 }> {}
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize> IVSized for AesEcb<'c, KEY_SIZE> {}
|
2024-02-18 21:40:18 -05:00
|
|
|
|
|
|
|
/// AES-CBC Cipher Mode
|
|
|
|
pub struct AesCbc<'c, const KEY_SIZE: usize> {
|
|
|
|
iv: &'c [u8; 16],
|
|
|
|
key: &'c [u8; KEY_SIZE],
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> AesCbc<'c, KEY_SIZE> {
|
|
|
|
/// Constructs a new AES-CBC cipher for a cryptographic operation.
|
|
|
|
pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 16]) -> Self {
|
|
|
|
return Self { key: key, iv: iv };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCbc<'c, KEY_SIZE> {
|
|
|
|
const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
|
|
|
|
const REQUIRES_PADDING: bool = true;
|
|
|
|
|
|
|
|
fn key(&self) -> &'c [u8] {
|
|
|
|
self.key
|
|
|
|
}
|
|
|
|
|
|
|
|
fn iv(&self) -> &'c [u8] {
|
|
|
|
self.iv
|
|
|
|
}
|
|
|
|
|
|
|
|
fn prepare_key(&self, p: &pac::cryp::Cryp) {
|
2024-02-21 12:39:10 -05:00
|
|
|
#[cfg(cryp_v1)]
|
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode(7));
|
|
|
|
}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:39:10 -05:00
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode0(7));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(false));
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
while p.sr().read().busy() {}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_algomode(&self, p: &pac::cryp::Cryp) {
|
2024-02-21 12:39:10 -05:00
|
|
|
#[cfg(cryp_v1)]
|
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode(5));
|
|
|
|
}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:39:10 -05:00
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode0(5));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(false));
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c> CipherSized for AesCbc<'c, { 128 / 8 }> {}
|
|
|
|
impl<'c> CipherSized for AesCbc<'c, { 192 / 8 }> {}
|
|
|
|
impl<'c> CipherSized for AesCbc<'c, { 256 / 8 }> {}
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize> IVSized for AesCbc<'c, KEY_SIZE> {}
|
2024-02-18 21:40:18 -05:00
|
|
|
|
|
|
|
/// AES-CTR Cipher Mode
|
|
|
|
pub struct AesCtr<'c, const KEY_SIZE: usize> {
|
|
|
|
iv: &'c [u8; 16],
|
|
|
|
key: &'c [u8; KEY_SIZE],
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> AesCtr<'c, KEY_SIZE> {
|
|
|
|
/// Constructs a new AES-CTR cipher for a cryptographic operation.
|
|
|
|
pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 16]) -> Self {
|
|
|
|
return Self { key: key, iv: iv };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCtr<'c, KEY_SIZE> {
|
|
|
|
const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
|
|
|
|
|
|
|
|
fn key(&self) -> &'c [u8] {
|
|
|
|
self.key
|
|
|
|
}
|
|
|
|
|
|
|
|
fn iv(&self) -> &'c [u8] {
|
|
|
|
self.iv
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_algomode(&self, p: &pac::cryp::Cryp) {
|
2024-02-21 12:39:10 -05:00
|
|
|
#[cfg(cryp_v1)]
|
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode(6));
|
|
|
|
}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:39:10 -05:00
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_algomode0(6));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(false));
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'c> CipherSized for AesCtr<'c, { 128 / 8 }> {}
|
|
|
|
impl<'c> CipherSized for AesCtr<'c, { 192 / 8 }> {}
|
|
|
|
impl<'c> CipherSized for AesCtr<'c, { 256 / 8 }> {}
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize> IVSized for AesCtr<'c, KEY_SIZE> {}
|
2024-02-18 21:40:18 -05:00
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-18 21:40:18 -05:00
|
|
|
///AES-GCM Cipher Mode
|
|
|
|
pub struct AesGcm<'c, const KEY_SIZE: usize> {
|
|
|
|
iv: [u8; 16],
|
|
|
|
key: &'c [u8; KEY_SIZE],
|
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-18 21:40:18 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize> AesGcm<'c, KEY_SIZE> {
|
|
|
|
/// Constucts a new AES-GCM cipher for a cryptographic operation.
|
|
|
|
pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self {
|
|
|
|
let mut new_gcm = Self { key: key, iv: [0; 16] };
|
|
|
|
new_gcm.iv[..12].copy_from_slice(iv);
|
|
|
|
new_gcm.iv[15] = 2;
|
|
|
|
new_gcm
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-18 21:40:18 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> {
|
|
|
|
const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
|
|
|
|
|
|
|
|
fn key(&self) -> &'c [u8] {
|
|
|
|
self.key
|
|
|
|
}
|
|
|
|
|
|
|
|
fn iv(&self) -> &[u8] {
|
|
|
|
self.iv.as_slice()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_algomode(&self, p: &pac::cryp::Cryp) {
|
|
|
|
p.cr().modify(|w| w.set_algomode0(0));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(true));
|
|
|
|
}
|
|
|
|
|
2024-03-12 12:01:14 -04:00
|
|
|
fn init_phase_blocking<T: Instance, DmaIn, DmaOut>(&self, p: &pac::cryp::Cryp, _cryp: &Cryp<T, DmaIn, DmaOut>) {
|
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(0));
|
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
while p.cr().read().crypen() {}
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn init_phase<T: Instance, DmaIn, DmaOut>(&self, p: &pac::cryp::Cryp, _cryp: &mut Cryp<'_, T, DmaIn, DmaOut>) {
|
2024-02-18 21:40:18 -05:00
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(0));
|
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
while p.cr().read().crypen() {}
|
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(cryp_v2)]
|
2024-03-12 12:01:14 -04:00
|
|
|
fn pre_final(&self, p: &pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] {
|
2024-02-18 21:40:18 -05:00
|
|
|
//Handle special GCM partial block process.
|
2024-02-20 11:54:39 -05:00
|
|
|
if dir == Direction::Encrypt {
|
|
|
|
p.cr().modify(|w| w.set_crypen(false));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(false));
|
|
|
|
p.cr().modify(|w| w.set_algomode0(6));
|
|
|
|
let iv1r = p.csgcmccmr(7).read() - 1;
|
|
|
|
p.init(1).ivrr().write_value(iv1r);
|
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
}
|
|
|
|
[0; 4]
|
2024-02-18 21:40:18 -05:00
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(cryp_v3)]
|
2024-03-12 14:52:34 -04:00
|
|
|
fn pre_final(&self, p: &pac::cryp::Cryp, _dir: Direction, padding_len: usize) -> [u32; 4] {
|
2024-02-29 19:09:44 -05:00
|
|
|
//Handle special GCM partial block process.
|
|
|
|
p.cr().modify(|w| w.set_npblb(padding_len as u8));
|
|
|
|
[0; 4]
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(cryp_v2)]
|
2024-03-12 12:01:14 -04:00
|
|
|
fn post_final_blocking<T: Instance, DmaIn, DmaOut>(
|
2024-02-20 11:54:39 -05:00
|
|
|
&self,
|
|
|
|
p: &pac::cryp::Cryp,
|
2024-03-12 12:01:14 -04:00
|
|
|
cryp: &Cryp<T, DmaIn, DmaOut>,
|
2024-02-20 11:54:39 -05:00
|
|
|
dir: Direction,
|
2024-02-20 14:27:37 -05:00
|
|
|
int_data: &mut [u8; AES_BLOCK_SIZE],
|
2024-02-20 11:54:39 -05:00
|
|
|
_temp1: [u32; 4],
|
2024-02-20 14:27:37 -05:00
|
|
|
padding_mask: [u8; AES_BLOCK_SIZE],
|
2024-02-20 11:54:39 -05:00
|
|
|
) {
|
2024-02-18 21:40:18 -05:00
|
|
|
if dir == Direction::Encrypt {
|
|
|
|
//Handle special GCM partial block process.
|
|
|
|
p.cr().modify(|w| w.set_crypen(false));
|
2024-02-20 14:27:37 -05:00
|
|
|
p.cr().modify(|w| w.set_algomode3(true));
|
|
|
|
p.cr().modify(|w| w.set_algomode0(0));
|
|
|
|
for i in 0..AES_BLOCK_SIZE {
|
|
|
|
int_data[i] = int_data[i] & padding_mask[i];
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(3));
|
2024-03-05 11:25:56 -05:00
|
|
|
|
|
|
|
cryp.write_bytes_blocking(Self::BLOCK_SIZE, int_data);
|
|
|
|
cryp.read_bytes_blocking(Self::BLOCK_SIZE, int_data);
|
2024-02-18 21:40:18 -05:00
|
|
|
}
|
|
|
|
}
|
2024-03-12 12:01:14 -04:00
|
|
|
|
|
|
|
#[cfg(cryp_v2)]
|
|
|
|
async fn post_final<T: Instance, DmaIn, DmaOut>(
|
|
|
|
&self,
|
|
|
|
p: &pac::cryp::Cryp,
|
|
|
|
cryp: &mut Cryp<'_, T, DmaIn, DmaOut>,
|
|
|
|
dir: Direction,
|
|
|
|
int_data: &mut [u8; AES_BLOCK_SIZE],
|
|
|
|
_temp1: [u32; 4],
|
|
|
|
padding_mask: [u8; AES_BLOCK_SIZE],
|
|
|
|
)
|
|
|
|
where
|
|
|
|
DmaIn: crate::cryp::DmaIn<T>,
|
|
|
|
DmaOut: crate::cryp::DmaOut<T>,
|
|
|
|
{
|
|
|
|
|
|
|
|
if dir == Direction::Encrypt {
|
|
|
|
// Handle special GCM partial block process.
|
|
|
|
p.cr().modify(|w| w.set_crypen(false));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(true));
|
|
|
|
p.cr().modify(|w| w.set_algomode0(0));
|
|
|
|
for i in 0..AES_BLOCK_SIZE {
|
|
|
|
int_data[i] = int_data[i] & padding_mask[i];
|
|
|
|
}
|
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(3));
|
|
|
|
|
|
|
|
let mut out_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
|
|
|
|
|
|
|
|
let read = Cryp::<T, DmaIn, DmaOut>::read_bytes(&mut cryp.outdma, Self::BLOCK_SIZE, &mut out_data);
|
|
|
|
let write = Cryp::<T, DmaIn, DmaOut>::write_bytes(&mut cryp.indma, Self::BLOCK_SIZE, int_data);
|
|
|
|
|
|
|
|
embassy_futures::join::join(read, write).await;
|
|
|
|
|
|
|
|
int_data.copy_from_slice(&out_data);
|
|
|
|
}
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-18 21:40:18 -05:00
|
|
|
impl<'c> CipherSized for AesGcm<'c, { 128 / 8 }> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-18 21:40:18 -05:00
|
|
|
impl<'c> CipherSized for AesGcm<'c, { 192 / 8 }> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-18 21:40:18 -05:00
|
|
|
impl<'c> CipherSized for AesGcm<'c, { 256 / 8 }> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize> CipherAuthenticated<16> for AesGcm<'c, KEY_SIZE> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize> IVSized for AesGcm<'c, KEY_SIZE> {}
|
2024-02-18 21:40:18 -05:00
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-18 21:40:18 -05:00
|
|
|
/// AES-GMAC Cipher Mode
|
|
|
|
pub struct AesGmac<'c, const KEY_SIZE: usize> {
|
|
|
|
iv: [u8; 16],
|
|
|
|
key: &'c [u8; KEY_SIZE],
|
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-18 21:40:18 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize> AesGmac<'c, KEY_SIZE> {
|
|
|
|
/// Constructs a new AES-GMAC cipher for a cryptographic operation.
|
|
|
|
pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self {
|
|
|
|
let mut new_gmac = Self { key: key, iv: [0; 16] };
|
|
|
|
new_gmac.iv[..12].copy_from_slice(iv);
|
|
|
|
new_gmac.iv[15] = 2;
|
|
|
|
new_gmac
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-18 21:40:18 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> {
|
|
|
|
const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
|
|
|
|
|
|
|
|
fn key(&self) -> &'c [u8] {
|
|
|
|
self.key
|
|
|
|
}
|
|
|
|
|
|
|
|
fn iv(&self) -> &[u8] {
|
|
|
|
self.iv.as_slice()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_algomode(&self, p: &pac::cryp::Cryp) {
|
|
|
|
p.cr().modify(|w| w.set_algomode0(0));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(true));
|
|
|
|
}
|
|
|
|
|
2024-03-12 12:01:14 -04:00
|
|
|
fn init_phase_blocking<T: Instance, DmaIn, DmaOut>(&self, p: &pac::cryp::Cryp, _cryp: &Cryp<T, DmaIn, DmaOut>) {
|
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(0));
|
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
while p.cr().read().crypen() {}
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn init_phase<T: Instance, DmaIn, DmaOut>(&self, p: &pac::cryp::Cryp, _cryp: &mut Cryp<'_, T, DmaIn, DmaOut>) {
|
2024-02-18 21:40:18 -05:00
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(0));
|
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
while p.cr().read().crypen() {}
|
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(cryp_v2)]
|
2024-03-12 12:01:14 -04:00
|
|
|
fn pre_final(&self, p: &pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] {
|
2024-02-18 21:40:18 -05:00
|
|
|
//Handle special GCM partial block process.
|
2024-02-20 11:54:39 -05:00
|
|
|
if dir == Direction::Encrypt {
|
|
|
|
p.cr().modify(|w| w.set_crypen(false));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(false));
|
|
|
|
p.cr().modify(|w| w.set_algomode0(6));
|
|
|
|
let iv1r = p.csgcmccmr(7).read() - 1;
|
|
|
|
p.init(1).ivrr().write_value(iv1r);
|
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
}
|
|
|
|
[0; 4]
|
2024-02-18 21:40:18 -05:00
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(cryp_v3)]
|
2024-03-12 14:52:34 -04:00
|
|
|
fn pre_final(&self, p: &pac::cryp::Cryp, _dir: Direction, padding_len: usize) -> [u32; 4] {
|
2024-02-29 19:09:44 -05:00
|
|
|
//Handle special GCM partial block process.
|
|
|
|
p.cr().modify(|w| w.set_npblb(padding_len as u8));
|
|
|
|
[0; 4]
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(cryp_v2)]
|
2024-03-12 12:01:14 -04:00
|
|
|
fn post_final_blocking<T: Instance, DmaIn, DmaOut>(
|
2024-02-20 11:54:39 -05:00
|
|
|
&self,
|
|
|
|
p: &pac::cryp::Cryp,
|
2024-03-12 12:01:14 -04:00
|
|
|
cryp: &Cryp<T, DmaIn, DmaOut>,
|
2024-02-20 11:54:39 -05:00
|
|
|
dir: Direction,
|
2024-02-20 14:27:37 -05:00
|
|
|
int_data: &mut [u8; AES_BLOCK_SIZE],
|
2024-02-20 11:54:39 -05:00
|
|
|
_temp1: [u32; 4],
|
2024-02-20 15:26:31 -05:00
|
|
|
padding_mask: [u8; AES_BLOCK_SIZE],
|
2024-02-20 11:54:39 -05:00
|
|
|
) {
|
2024-02-18 21:40:18 -05:00
|
|
|
if dir == Direction::Encrypt {
|
|
|
|
//Handle special GCM partial block process.
|
|
|
|
p.cr().modify(|w| w.set_crypen(false));
|
2024-02-20 15:26:31 -05:00
|
|
|
p.cr().modify(|w| w.set_algomode3(true));
|
|
|
|
p.cr().modify(|w| w.set_algomode0(0));
|
|
|
|
for i in 0..AES_BLOCK_SIZE {
|
|
|
|
int_data[i] = int_data[i] & padding_mask[i];
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(3));
|
2024-03-05 11:25:56 -05:00
|
|
|
|
|
|
|
cryp.write_bytes_blocking(Self::BLOCK_SIZE, int_data);
|
|
|
|
cryp.read_bytes_blocking(Self::BLOCK_SIZE, int_data);
|
2024-02-18 21:40:18 -05:00
|
|
|
}
|
|
|
|
}
|
2024-03-12 12:01:14 -04:00
|
|
|
|
|
|
|
#[cfg(cryp_v2)]
|
|
|
|
async fn post_final<T: Instance, DmaIn, DmaOut>(
|
|
|
|
&self,
|
|
|
|
p: &pac::cryp::Cryp,
|
|
|
|
cryp: &mut Cryp<'_, T, DmaIn, DmaOut>,
|
|
|
|
dir: Direction,
|
|
|
|
int_data: &mut [u8; AES_BLOCK_SIZE],
|
|
|
|
_temp1: [u32; 4],
|
|
|
|
padding_mask: [u8; AES_BLOCK_SIZE],
|
|
|
|
)
|
|
|
|
where
|
|
|
|
DmaIn: crate::cryp::DmaIn<T>,
|
|
|
|
DmaOut: crate::cryp::DmaOut<T>,
|
|
|
|
{
|
|
|
|
|
|
|
|
if dir == Direction::Encrypt {
|
|
|
|
// Handle special GCM partial block process.
|
|
|
|
p.cr().modify(|w| w.set_crypen(false));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(true));
|
|
|
|
p.cr().modify(|w| w.set_algomode0(0));
|
|
|
|
for i in 0..AES_BLOCK_SIZE {
|
|
|
|
int_data[i] = int_data[i] & padding_mask[i];
|
|
|
|
}
|
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(3));
|
|
|
|
|
|
|
|
let mut out_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
|
|
|
|
|
|
|
|
let read = Cryp::<T, DmaIn, DmaOut>::read_bytes(&mut cryp.outdma, Self::BLOCK_SIZE, &mut out_data);
|
|
|
|
let write = Cryp::<T, DmaIn, DmaOut>::write_bytes(&mut cryp.indma, Self::BLOCK_SIZE, int_data);
|
|
|
|
|
|
|
|
embassy_futures::join::join(read, write).await;
|
|
|
|
}
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-18 21:40:18 -05:00
|
|
|
impl<'c> CipherSized for AesGmac<'c, { 128 / 8 }> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-18 21:40:18 -05:00
|
|
|
impl<'c> CipherSized for AesGmac<'c, { 192 / 8 }> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-18 21:40:18 -05:00
|
|
|
impl<'c> CipherSized for AesGmac<'c, { 256 / 8 }> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize> CipherAuthenticated<16> for AesGmac<'c, KEY_SIZE> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize> IVSized for AesGmac<'c, KEY_SIZE> {}
|
2024-02-18 21:40:18 -05:00
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
/// AES-CCM Cipher Mode
|
|
|
|
pub struct AesCcm<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> {
|
2024-02-20 11:54:39 -05:00
|
|
|
key: &'c [u8; KEY_SIZE],
|
|
|
|
aad_header: [u8; 6],
|
|
|
|
aad_header_len: usize,
|
|
|
|
block0: [u8; 16],
|
|
|
|
ctr: [u8; 16],
|
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> AesCcm<'c, KEY_SIZE, TAG_SIZE, IV_SIZE> {
|
|
|
|
/// Constructs a new AES-CCM cipher for a cryptographic operation.
|
|
|
|
pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; IV_SIZE], aad_len: usize, payload_len: usize) -> Self {
|
2024-02-20 11:54:39 -05:00
|
|
|
let mut aad_header: [u8; 6] = [0; 6];
|
|
|
|
let mut aad_header_len = 0;
|
|
|
|
let mut block0: [u8; 16] = [0; 16];
|
|
|
|
if aad_len != 0 {
|
|
|
|
if aad_len < 65280 {
|
|
|
|
aad_header[0] = (aad_len >> 8) as u8 & 0xFF;
|
|
|
|
aad_header[1] = aad_len as u8 & 0xFF;
|
|
|
|
aad_header_len = 2;
|
|
|
|
} else {
|
|
|
|
aad_header[0] = 0xFF;
|
|
|
|
aad_header[1] = 0xFE;
|
|
|
|
let aad_len_bytes: [u8; 4] = aad_len.to_be_bytes();
|
|
|
|
aad_header[2] = aad_len_bytes[0];
|
|
|
|
aad_header[3] = aad_len_bytes[1];
|
|
|
|
aad_header[4] = aad_len_bytes[2];
|
|
|
|
aad_header[5] = aad_len_bytes[3];
|
|
|
|
aad_header_len = 6;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let total_aad_len = aad_header_len + aad_len;
|
|
|
|
let mut aad_padding_len = 16 - (total_aad_len % 16);
|
|
|
|
if aad_padding_len == 16 {
|
|
|
|
aad_padding_len = 0;
|
|
|
|
}
|
|
|
|
aad_header_len += aad_padding_len;
|
|
|
|
let total_aad_len_padded = aad_header_len + aad_len;
|
|
|
|
if total_aad_len_padded > 0 {
|
|
|
|
block0[0] = 0x40;
|
|
|
|
}
|
2024-02-21 12:07:53 -05:00
|
|
|
block0[0] |= ((((TAG_SIZE as u8) - 2) >> 1) & 0x07) << 3;
|
2024-02-20 11:54:39 -05:00
|
|
|
block0[0] |= ((15 - (iv.len() as u8)) - 1) & 0x07;
|
|
|
|
block0[1..1 + iv.len()].copy_from_slice(iv);
|
|
|
|
let payload_len_bytes: [u8; 4] = payload_len.to_be_bytes();
|
|
|
|
if iv.len() <= 11 {
|
|
|
|
block0[12] = payload_len_bytes[0];
|
|
|
|
} else if payload_len_bytes[0] > 0 {
|
|
|
|
panic!("Message is too large for given IV size.");
|
|
|
|
}
|
|
|
|
if iv.len() <= 12 {
|
|
|
|
block0[13] = payload_len_bytes[1];
|
|
|
|
} else if payload_len_bytes[1] > 0 {
|
|
|
|
panic!("Message is too large for given IV size.");
|
|
|
|
}
|
|
|
|
block0[14] = payload_len_bytes[2];
|
|
|
|
block0[15] = payload_len_bytes[3];
|
|
|
|
let mut ctr: [u8; 16] = [0; 16];
|
|
|
|
ctr[0] = block0[0] & 0x07;
|
|
|
|
ctr[1..1 + iv.len()].copy_from_slice(&block0[1..1 + iv.len()]);
|
|
|
|
ctr[15] = 0x01;
|
|
|
|
|
|
|
|
return Self {
|
|
|
|
key: key,
|
|
|
|
aad_header: aad_header,
|
|
|
|
aad_header_len: aad_header_len,
|
|
|
|
block0: block0,
|
|
|
|
ctr: ctr,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cipher<'c>
|
|
|
|
for AesCcm<'c, KEY_SIZE, TAG_SIZE, IV_SIZE>
|
|
|
|
{
|
2024-02-20 11:54:39 -05:00
|
|
|
const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
|
|
|
|
|
|
|
|
fn key(&self) -> &'c [u8] {
|
|
|
|
self.key
|
|
|
|
}
|
|
|
|
|
|
|
|
fn iv(&self) -> &[u8] {
|
|
|
|
self.ctr.as_slice()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_algomode(&self, p: &pac::cryp::Cryp) {
|
|
|
|
p.cr().modify(|w| w.set_algomode0(1));
|
|
|
|
p.cr().modify(|w| w.set_algomode3(true));
|
|
|
|
}
|
|
|
|
|
2024-03-12 12:01:14 -04:00
|
|
|
fn init_phase_blocking<T: Instance, DmaIn, DmaOut>(&self, p: &pac::cryp::Cryp, cryp: &Cryp<T, DmaIn, DmaOut>) {
|
2024-02-20 11:54:39 -05:00
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(0));
|
|
|
|
|
2024-03-05 11:25:56 -05:00
|
|
|
cryp.write_bytes_blocking(Self::BLOCK_SIZE, &self.block0);
|
|
|
|
|
2024-02-20 11:54:39 -05:00
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
while p.cr().read().crypen() {}
|
|
|
|
}
|
|
|
|
|
2024-03-12 12:01:14 -04:00
|
|
|
async fn init_phase<T: Instance, DmaIn, DmaOut>(&self, p: &pac::cryp::Cryp, cryp: &mut Cryp<'_, T, DmaIn, DmaOut>)
|
|
|
|
where
|
|
|
|
DmaIn: crate::cryp::DmaIn<T>,
|
|
|
|
DmaOut: crate::cryp::DmaOut<T>,
|
|
|
|
{
|
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(0));
|
|
|
|
|
|
|
|
Cryp::<T, DmaIn, DmaOut>::write_bytes(&mut cryp.indma, Self::BLOCK_SIZE, &self.block0).await;
|
|
|
|
|
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
while p.cr().read().crypen() {}
|
|
|
|
}
|
|
|
|
|
2024-02-20 11:54:39 -05:00
|
|
|
fn get_header_block(&self) -> &[u8] {
|
|
|
|
return &self.aad_header[0..self.aad_header_len];
|
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(cryp_v2)]
|
2024-03-12 12:01:14 -04:00
|
|
|
fn pre_final(&self, p: &pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] {
|
2024-02-20 11:54:39 -05:00
|
|
|
//Handle special CCM partial block process.
|
|
|
|
let mut temp1 = [0; 4];
|
|
|
|
if dir == Direction::Decrypt {
|
|
|
|
p.cr().modify(|w| w.set_crypen(false));
|
|
|
|
let iv1temp = p.init(1).ivrr().read();
|
2024-02-20 15:26:31 -05:00
|
|
|
temp1[0] = p.csgcmccmr(0).read().swap_bytes();
|
|
|
|
temp1[1] = p.csgcmccmr(1).read().swap_bytes();
|
|
|
|
temp1[2] = p.csgcmccmr(2).read().swap_bytes();
|
|
|
|
temp1[3] = p.csgcmccmr(3).read().swap_bytes();
|
2024-02-20 11:54:39 -05:00
|
|
|
p.init(1).ivrr().write_value(iv1temp);
|
|
|
|
p.cr().modify(|w| w.set_algomode3(false));
|
|
|
|
p.cr().modify(|w| w.set_algomode0(6));
|
|
|
|
p.cr().modify(|w| w.set_crypen(true));
|
|
|
|
}
|
|
|
|
return temp1;
|
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(cryp_v3)]
|
2024-03-12 14:52:34 -04:00
|
|
|
fn pre_final(&self, p: &pac::cryp::Cryp, _dir: Direction, padding_len: usize) -> [u32; 4] {
|
2024-02-29 19:09:44 -05:00
|
|
|
//Handle special GCM partial block process.
|
|
|
|
p.cr().modify(|w| w.set_npblb(padding_len as u8));
|
|
|
|
[0; 4]
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(cryp_v2)]
|
2024-03-12 12:01:14 -04:00
|
|
|
fn post_final_blocking<T: Instance, DmaIn, DmaOut>(
|
2024-02-20 11:54:39 -05:00
|
|
|
&self,
|
|
|
|
p: &pac::cryp::Cryp,
|
2024-03-12 12:01:14 -04:00
|
|
|
cryp: &Cryp<T, DmaIn, DmaOut>,
|
2024-02-20 11:54:39 -05:00
|
|
|
dir: Direction,
|
2024-02-20 14:27:37 -05:00
|
|
|
int_data: &mut [u8; AES_BLOCK_SIZE],
|
2024-02-20 11:54:39 -05:00
|
|
|
temp1: [u32; 4],
|
|
|
|
padding_mask: [u8; 16],
|
|
|
|
) {
|
|
|
|
if dir == Direction::Decrypt {
|
|
|
|
//Handle special CCM partial block process.
|
|
|
|
let mut temp2 = [0; 4];
|
2024-02-20 15:26:31 -05:00
|
|
|
temp2[0] = p.csgcmccmr(0).read().swap_bytes();
|
|
|
|
temp2[1] = p.csgcmccmr(1).read().swap_bytes();
|
|
|
|
temp2[2] = p.csgcmccmr(2).read().swap_bytes();
|
|
|
|
temp2[3] = p.csgcmccmr(3).read().swap_bytes();
|
|
|
|
p.cr().modify(|w| w.set_algomode3(true));
|
|
|
|
p.cr().modify(|w| w.set_algomode0(1));
|
2024-02-20 11:54:39 -05:00
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(3));
|
|
|
|
// Header phase
|
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(1));
|
2024-02-20 15:26:31 -05:00
|
|
|
for i in 0..AES_BLOCK_SIZE {
|
|
|
|
int_data[i] = int_data[i] & padding_mask[i];
|
|
|
|
}
|
2024-02-20 11:54:39 -05:00
|
|
|
let mut in_data: [u32; 4] = [0; 4];
|
|
|
|
for i in 0..in_data.len() {
|
2024-02-20 15:26:31 -05:00
|
|
|
let mut int_bytes: [u8; 4] = [0; 4];
|
|
|
|
int_bytes.copy_from_slice(&int_data[(i * 4)..(i * 4) + 4]);
|
|
|
|
let int_word = u32::from_le_bytes(int_bytes);
|
|
|
|
in_data[i] = int_word;
|
2024-02-20 11:54:39 -05:00
|
|
|
in_data[i] = in_data[i] ^ temp1[i] ^ temp2[i];
|
|
|
|
}
|
2024-03-05 11:25:56 -05:00
|
|
|
cryp.write_words_blocking(Self::BLOCK_SIZE, &in_data);
|
2024-02-20 11:54:39 -05:00
|
|
|
}
|
|
|
|
}
|
2024-03-12 12:01:14 -04:00
|
|
|
|
|
|
|
#[cfg(cryp_v2)]
|
|
|
|
async fn post_final<T: Instance, DmaIn, DmaOut>(
|
|
|
|
&self,
|
|
|
|
p: &pac::cryp::Cryp,
|
|
|
|
cryp: &mut Cryp<'_, T, DmaIn, DmaOut>,
|
|
|
|
dir: Direction,
|
|
|
|
int_data: &mut [u8; AES_BLOCK_SIZE],
|
|
|
|
temp1: [u32; 4],
|
|
|
|
padding_mask: [u8; 16],
|
|
|
|
)
|
|
|
|
where
|
|
|
|
DmaIn: crate::cryp::DmaIn<T>,
|
|
|
|
DmaOut: crate::cryp::DmaOut<T>,
|
|
|
|
{
|
|
|
|
if dir == Direction::Decrypt {
|
|
|
|
//Handle special CCM partial block process.
|
|
|
|
let mut temp2 = [0; 4];
|
|
|
|
temp2[0] = p.csgcmccmr(0).read().swap_bytes();
|
|
|
|
temp2[1] = p.csgcmccmr(1).read().swap_bytes();
|
|
|
|
temp2[2] = p.csgcmccmr(2).read().swap_bytes();
|
|
|
|
temp2[3] = p.csgcmccmr(3).read().swap_bytes();
|
|
|
|
p.cr().modify(|w| w.set_algomode3(true));
|
|
|
|
p.cr().modify(|w| w.set_algomode0(1));
|
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(3));
|
|
|
|
// Header phase
|
|
|
|
p.cr().modify(|w| w.set_gcm_ccmph(1));
|
|
|
|
for i in 0..AES_BLOCK_SIZE {
|
|
|
|
int_data[i] = int_data[i] & padding_mask[i];
|
|
|
|
}
|
|
|
|
let mut in_data: [u32; 4] = [0; 4];
|
|
|
|
for i in 0..in_data.len() {
|
|
|
|
let mut int_bytes: [u8; 4] = [0; 4];
|
|
|
|
int_bytes.copy_from_slice(&int_data[(i * 4)..(i * 4) + 4]);
|
|
|
|
let int_word = u32::from_le_bytes(int_bytes);
|
|
|
|
in_data[i] = int_word;
|
|
|
|
in_data[i] = in_data[i] ^ temp1[i] ^ temp2[i];
|
|
|
|
}
|
|
|
|
Cryp::<T, DmaIn, DmaOut>::write_words(&mut cryp.indma, Self::BLOCK_SIZE, &in_data).await;
|
|
|
|
}
|
|
|
|
}
|
2024-02-20 11:54:39 -05:00
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 128 / 8 }, TAG_SIZE, IV_SIZE> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 192 / 8 }, TAG_SIZE, IV_SIZE> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 256 / 8 }, TAG_SIZE, IV_SIZE> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<4> for AesCcm<'c, KEY_SIZE, 4, IV_SIZE> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<6> for AesCcm<'c, KEY_SIZE, 6, IV_SIZE> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<8> for AesCcm<'c, KEY_SIZE, 8, IV_SIZE> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<10> for AesCcm<'c, KEY_SIZE, 10, IV_SIZE> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<12> for AesCcm<'c, KEY_SIZE, 12, IV_SIZE> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<14> for AesCcm<'c, KEY_SIZE, 14, IV_SIZE> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<16> for AesCcm<'c, KEY_SIZE, 16, IV_SIZE> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 7> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 8> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 9> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 10> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 11> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 12> {}
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:07:53 -05:00
|
|
|
impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 13> {}
|
2024-02-18 21:40:18 -05:00
|
|
|
|
2024-02-21 12:39:10 -05:00
|
|
|
#[allow(dead_code)]
|
2024-02-16 13:15:14 -05:00
|
|
|
/// Holds the state information for a cipher operation.
|
|
|
|
/// Allows suspending/resuming of cipher operations.
|
2024-02-18 21:40:18 -05:00
|
|
|
pub struct Context<'c, C: Cipher<'c> + CipherSized> {
|
|
|
|
phantom_data: PhantomData<&'c C>,
|
|
|
|
cipher: &'c C,
|
2024-02-14 20:24:52 -05:00
|
|
|
dir: Direction,
|
|
|
|
last_block_processed: bool,
|
2024-02-20 11:54:39 -05:00
|
|
|
header_processed: bool,
|
2024-02-14 20:24:52 -05:00
|
|
|
aad_complete: bool,
|
|
|
|
cr: u32,
|
|
|
|
iv: [u32; 4],
|
|
|
|
csgcmccm: [u32; 8],
|
|
|
|
csgcm: [u32; 8],
|
2024-02-16 13:15:14 -05:00
|
|
|
header_len: u64,
|
|
|
|
payload_len: u64,
|
2024-02-20 11:54:39 -05:00
|
|
|
aad_buffer: [u8; 16],
|
|
|
|
aad_buffer_len: usize,
|
2024-02-13 10:11:54 -05:00
|
|
|
}
|
|
|
|
|
2024-02-16 13:15:14 -05:00
|
|
|
/// Selects whether the crypto processor operates in encryption or decryption mode.
|
2024-02-14 20:24:52 -05:00
|
|
|
#[derive(PartialEq, Clone, Copy)]
|
2024-02-13 10:11:54 -05:00
|
|
|
pub enum Direction {
|
2024-02-16 13:15:14 -05:00
|
|
|
/// Encryption mode
|
2024-02-13 10:11:54 -05:00
|
|
|
Encrypt,
|
2024-02-16 13:15:14 -05:00
|
|
|
/// Decryption mode
|
2024-02-13 10:11:54 -05:00
|
|
|
Decrypt,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Crypto Accelerator Driver
|
2024-03-12 12:01:14 -04:00
|
|
|
pub struct Cryp<'d, T: Instance, DmaIn = NoDma, DmaOut = NoDma> {
|
2024-02-13 10:11:54 -05:00
|
|
|
_peripheral: PeripheralRef<'d, T>,
|
2024-03-12 12:01:14 -04:00
|
|
|
indma: PeripheralRef<'d, DmaIn>,
|
|
|
|
outdma: PeripheralRef<'d, DmaOut>,
|
2024-02-13 10:11:54 -05:00
|
|
|
}
|
|
|
|
|
2024-03-12 12:01:14 -04:00
|
|
|
impl<'d, T: Instance, DmaIn, DmaOut> Cryp<'d, T, DmaIn, DmaOut> {
|
2024-02-13 10:11:54 -05:00
|
|
|
/// Create a new CRYP driver.
|
2024-03-05 11:25:56 -05:00
|
|
|
pub fn new(
|
|
|
|
peri: impl Peripheral<P = T> + 'd,
|
2024-03-12 12:01:14 -04:00
|
|
|
indma: impl Peripheral<P = DmaIn> + 'd,
|
|
|
|
outdma: impl Peripheral<P = DmaOut> + 'd,
|
2024-03-05 11:25:56 -05:00
|
|
|
_irq: impl interrupt::typelevel::Binding<T::Interrupt, InterruptHandler<T>> + 'd,
|
|
|
|
) -> Self {
|
2024-02-24 16:14:44 -05:00
|
|
|
T::enable_and_reset();
|
2024-03-05 11:25:56 -05:00
|
|
|
into_ref!(peri, indma, outdma);
|
|
|
|
let instance = Self {
|
|
|
|
_peripheral: peri,
|
|
|
|
indma: indma,
|
|
|
|
outdma: outdma,
|
|
|
|
};
|
|
|
|
|
|
|
|
T::Interrupt::unpend();
|
|
|
|
unsafe { T::Interrupt::enable() };
|
|
|
|
|
2024-02-13 10:11:54 -05:00
|
|
|
instance
|
|
|
|
}
|
|
|
|
|
2024-03-12 14:52:34 -04:00
|
|
|
/// Start a new encrypt or decrypt operation for the given cipher.
|
2024-03-12 12:01:14 -04:00
|
|
|
pub fn start_blocking<'c, C: Cipher<'c> + CipherSized + IVSized>(&self, cipher: &'c C, dir: Direction) -> Context<'c, C> {
|
|
|
|
let mut ctx: Context<'c, C> = Context {
|
|
|
|
dir,
|
|
|
|
last_block_processed: false,
|
|
|
|
cr: 0,
|
|
|
|
iv: [0; 4],
|
|
|
|
csgcmccm: [0; 8],
|
|
|
|
csgcm: [0; 8],
|
|
|
|
aad_complete: false,
|
|
|
|
header_len: 0,
|
|
|
|
payload_len: 0,
|
|
|
|
cipher: cipher,
|
|
|
|
phantom_data: PhantomData,
|
|
|
|
header_processed: false,
|
|
|
|
aad_buffer: [0; 16],
|
|
|
|
aad_buffer_len: 0,
|
|
|
|
};
|
|
|
|
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
|
|
|
|
let key = ctx.cipher.key();
|
|
|
|
|
|
|
|
if key.len() == (128 / 8) {
|
|
|
|
T::regs().cr().modify(|w| w.set_keysize(0));
|
|
|
|
} else if key.len() == (192 / 8) {
|
|
|
|
T::regs().cr().modify(|w| w.set_keysize(1));
|
|
|
|
} else if key.len() == (256 / 8) {
|
|
|
|
T::regs().cr().modify(|w| w.set_keysize(2));
|
|
|
|
}
|
|
|
|
|
|
|
|
self.load_key(key);
|
|
|
|
|
|
|
|
// Set data type to 8-bit. This will match software implementations.
|
|
|
|
T::regs().cr().modify(|w| w.set_datatype(2));
|
|
|
|
|
|
|
|
ctx.cipher.prepare_key(&T::regs());
|
|
|
|
|
|
|
|
ctx.cipher.set_algomode(&T::regs());
|
|
|
|
|
|
|
|
// Set encrypt/decrypt
|
|
|
|
if dir == Direction::Encrypt {
|
|
|
|
T::regs().cr().modify(|w| w.set_algodir(false));
|
|
|
|
} else {
|
|
|
|
T::regs().cr().modify(|w| w.set_algodir(true));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Load the IV into the registers.
|
|
|
|
let iv = ctx.cipher.iv();
|
|
|
|
let mut full_iv: [u8; 16] = [0; 16];
|
|
|
|
full_iv[0..iv.len()].copy_from_slice(iv);
|
|
|
|
let mut iv_idx = 0;
|
|
|
|
let mut iv_word: [u8; 4] = [0; 4];
|
|
|
|
iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
|
|
|
|
iv_idx += 4;
|
|
|
|
T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word));
|
|
|
|
iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
|
|
|
|
iv_idx += 4;
|
|
|
|
T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word));
|
|
|
|
iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
|
|
|
|
iv_idx += 4;
|
|
|
|
T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word));
|
|
|
|
iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
|
|
|
|
T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word));
|
|
|
|
|
|
|
|
// Flush in/out FIFOs
|
|
|
|
T::regs().cr().modify(|w| w.fflush());
|
|
|
|
|
|
|
|
ctx.cipher.init_phase_blocking(&T::regs(), self);
|
|
|
|
|
|
|
|
self.store_context(&mut ctx);
|
|
|
|
|
|
|
|
ctx
|
|
|
|
}
|
|
|
|
|
2024-03-12 14:52:34 -04:00
|
|
|
/// Start a new encrypt or decrypt operation for the given cipher.
|
2024-03-12 12:01:14 -04:00
|
|
|
pub async fn start<'c, C: Cipher<'c> + CipherSized + IVSized>(&mut self, cipher: &'c C, dir: Direction) -> Context<'c, C>
|
|
|
|
where
|
|
|
|
DmaIn: crate::cryp::DmaIn<T>,
|
|
|
|
DmaOut: crate::cryp::DmaOut<T>,
|
|
|
|
{
|
2024-02-18 21:40:18 -05:00
|
|
|
let mut ctx: Context<'c, C> = Context {
|
2024-02-14 20:24:52 -05:00
|
|
|
dir,
|
|
|
|
last_block_processed: false,
|
|
|
|
cr: 0,
|
|
|
|
iv: [0; 4],
|
|
|
|
csgcmccm: [0; 8],
|
|
|
|
csgcm: [0; 8],
|
|
|
|
aad_complete: false,
|
2024-02-16 13:15:14 -05:00
|
|
|
header_len: 0,
|
|
|
|
payload_len: 0,
|
2024-02-18 21:40:18 -05:00
|
|
|
cipher: cipher,
|
|
|
|
phantom_data: PhantomData,
|
2024-02-20 11:54:39 -05:00
|
|
|
header_processed: false,
|
|
|
|
aad_buffer: [0; 16],
|
|
|
|
aad_buffer_len: 0,
|
2024-02-14 20:24:52 -05:00
|
|
|
};
|
2024-02-13 10:11:54 -05:00
|
|
|
|
2024-02-14 20:24:52 -05:00
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
2024-02-13 10:11:54 -05:00
|
|
|
|
2024-02-18 21:40:18 -05:00
|
|
|
let key = ctx.cipher.key();
|
2024-02-13 10:11:54 -05:00
|
|
|
|
2024-02-18 21:40:18 -05:00
|
|
|
if key.len() == (128 / 8) {
|
|
|
|
T::regs().cr().modify(|w| w.set_keysize(0));
|
|
|
|
} else if key.len() == (192 / 8) {
|
|
|
|
T::regs().cr().modify(|w| w.set_keysize(1));
|
|
|
|
} else if key.len() == (256 / 8) {
|
|
|
|
T::regs().cr().modify(|w| w.set_keysize(2));
|
2024-02-13 10:11:54 -05:00
|
|
|
}
|
|
|
|
|
2024-02-14 20:24:52 -05:00
|
|
|
self.load_key(key);
|
2024-02-13 10:11:54 -05:00
|
|
|
|
|
|
|
// Set data type to 8-bit. This will match software implementations.
|
|
|
|
T::regs().cr().modify(|w| w.set_datatype(2));
|
|
|
|
|
2024-02-18 21:40:18 -05:00
|
|
|
ctx.cipher.prepare_key(&T::regs());
|
2024-02-13 10:11:54 -05:00
|
|
|
|
2024-02-18 21:40:18 -05:00
|
|
|
ctx.cipher.set_algomode(&T::regs());
|
2024-02-13 10:11:54 -05:00
|
|
|
|
|
|
|
// Set encrypt/decrypt
|
|
|
|
if dir == Direction::Encrypt {
|
|
|
|
T::regs().cr().modify(|w| w.set_algodir(false));
|
|
|
|
} else {
|
|
|
|
T::regs().cr().modify(|w| w.set_algodir(true));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Load the IV into the registers.
|
2024-02-18 21:40:18 -05:00
|
|
|
let iv = ctx.cipher.iv();
|
|
|
|
let mut full_iv: [u8; 16] = [0; 16];
|
|
|
|
full_iv[0..iv.len()].copy_from_slice(iv);
|
|
|
|
let mut iv_idx = 0;
|
|
|
|
let mut iv_word: [u8; 4] = [0; 4];
|
|
|
|
iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
|
|
|
|
iv_idx += 4;
|
|
|
|
T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word));
|
|
|
|
iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
|
|
|
|
iv_idx += 4;
|
|
|
|
T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word));
|
|
|
|
iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
|
|
|
|
iv_idx += 4;
|
|
|
|
T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word));
|
|
|
|
iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
|
|
|
|
T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word));
|
2024-02-13 10:11:54 -05:00
|
|
|
|
|
|
|
// Flush in/out FIFOs
|
|
|
|
T::regs().cr().modify(|w| w.fflush());
|
|
|
|
|
2024-03-12 12:01:14 -04:00
|
|
|
ctx.cipher.init_phase(&T::regs(), self).await;
|
2024-02-14 20:24:52 -05:00
|
|
|
|
|
|
|
self.store_context(&mut ctx);
|
2024-02-13 10:11:54 -05:00
|
|
|
|
|
|
|
ctx
|
|
|
|
}
|
2024-02-14 20:24:52 -05:00
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-16 13:15:14 -05:00
|
|
|
/// Controls the header phase of cipher processing.
|
2024-03-12 14:52:34 -04:00
|
|
|
/// This function is only valid for authenticated ciphers including GCM, CCM, and GMAC.
|
|
|
|
/// All additional associated data (AAD) must be supplied to this function prior to starting the payload phase with `payload_blocking`.
|
|
|
|
/// The AAD must be supplied in multiples of the block size (128-bits for AES, 64-bits for DES), except when supplying the last block.
|
2024-02-16 13:15:14 -05:00
|
|
|
/// When supplying the last block of AAD, `last_aad_block` must be `true`.
|
2024-02-21 12:07:53 -05:00
|
|
|
pub fn aad_blocking<
|
|
|
|
'c,
|
|
|
|
const TAG_SIZE: usize,
|
|
|
|
C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>,
|
|
|
|
>(
|
2024-02-18 21:40:18 -05:00
|
|
|
&self,
|
|
|
|
ctx: &mut Context<'c, C>,
|
|
|
|
aad: &[u8],
|
|
|
|
last_aad_block: bool,
|
|
|
|
) {
|
2024-02-16 13:15:14 -05:00
|
|
|
self.load_context(ctx);
|
2024-02-14 20:24:52 -05:00
|
|
|
|
2024-02-16 13:15:14 -05:00
|
|
|
// Perform checks for correctness.
|
|
|
|
if ctx.aad_complete {
|
2024-02-20 11:54:39 -05:00
|
|
|
panic!("Cannot update AAD after starting payload!")
|
2024-02-16 13:15:14 -05:00
|
|
|
}
|
2024-02-14 20:24:52 -05:00
|
|
|
|
2024-02-16 13:15:14 -05:00
|
|
|
ctx.header_len += aad.len() as u64;
|
2024-02-14 20:24:52 -05:00
|
|
|
|
2024-02-18 21:40:18 -05:00
|
|
|
// Header phase
|
2024-02-16 13:15:14 -05:00
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
T::regs().cr().modify(|w| w.set_gcm_ccmph(1));
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(true));
|
2024-02-14 20:24:52 -05:00
|
|
|
|
2024-02-20 11:54:39 -05:00
|
|
|
// First write the header B1 block if not yet written.
|
|
|
|
if !ctx.header_processed {
|
|
|
|
ctx.header_processed = true;
|
|
|
|
let header = ctx.cipher.get_header_block();
|
|
|
|
ctx.aad_buffer[0..header.len()].copy_from_slice(header);
|
|
|
|
ctx.aad_buffer_len += header.len();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Fill the header block to make a full block.
|
|
|
|
let len_to_copy = min(aad.len(), C::BLOCK_SIZE - ctx.aad_buffer_len);
|
|
|
|
ctx.aad_buffer[ctx.aad_buffer_len..ctx.aad_buffer_len + len_to_copy].copy_from_slice(&aad[..len_to_copy]);
|
|
|
|
ctx.aad_buffer_len += len_to_copy;
|
|
|
|
ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
|
|
|
|
let mut aad_len_remaining = aad.len() - len_to_copy;
|
|
|
|
|
|
|
|
if ctx.aad_buffer_len < C::BLOCK_SIZE {
|
|
|
|
// The buffer isn't full and this is the last buffer, so process it as is (already padded).
|
|
|
|
if last_aad_block {
|
2024-03-05 11:25:56 -05:00
|
|
|
self.write_bytes_blocking(C::BLOCK_SIZE, &ctx.aad_buffer);
|
2024-02-20 11:54:39 -05:00
|
|
|
// Block until input FIFO is empty.
|
|
|
|
while !T::regs().sr().read().ifem() {}
|
|
|
|
|
|
|
|
// Switch to payload phase.
|
|
|
|
ctx.aad_complete = true;
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
|
|
|
|
T::regs().cr().modify(|w| w.fflush());
|
|
|
|
} else {
|
|
|
|
// Just return because we don't yet have a full block to process.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// Load the full block from the buffer.
|
2024-03-05 11:25:56 -05:00
|
|
|
self.write_bytes_blocking(C::BLOCK_SIZE, &ctx.aad_buffer);
|
2024-02-16 13:15:14 -05:00
|
|
|
// Block until input FIFO is empty.
|
|
|
|
while !T::regs().sr().read().ifem() {}
|
|
|
|
}
|
2024-02-14 20:24:52 -05:00
|
|
|
|
2024-02-20 11:54:39 -05:00
|
|
|
// Handle a partial block that is passed in.
|
|
|
|
ctx.aad_buffer_len = 0;
|
|
|
|
let leftovers = aad_len_remaining % C::BLOCK_SIZE;
|
|
|
|
ctx.aad_buffer[..leftovers].copy_from_slice(&aad[aad.len() - leftovers..aad.len()]);
|
2024-02-24 15:55:20 -05:00
|
|
|
ctx.aad_buffer_len += leftovers;
|
|
|
|
ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
|
2024-02-20 11:54:39 -05:00
|
|
|
aad_len_remaining -= leftovers;
|
|
|
|
assert_eq!(aad_len_remaining % C::BLOCK_SIZE, 0);
|
|
|
|
|
|
|
|
// Load full data blocks into core.
|
|
|
|
let num_full_blocks = aad_len_remaining / C::BLOCK_SIZE;
|
2024-03-05 11:25:56 -05:00
|
|
|
let start_index = len_to_copy;
|
|
|
|
let end_index = start_index + (C::BLOCK_SIZE * num_full_blocks);
|
|
|
|
self.write_bytes_blocking(C::BLOCK_SIZE, &aad[start_index..end_index]);
|
2024-02-16 13:15:14 -05:00
|
|
|
|
|
|
|
if last_aad_block {
|
2024-02-24 15:55:20 -05:00
|
|
|
if leftovers > 0 {
|
2024-03-05 11:25:56 -05:00
|
|
|
self.write_bytes_blocking(C::BLOCK_SIZE, &ctx.aad_buffer);
|
2024-02-24 15:55:20 -05:00
|
|
|
}
|
2024-02-16 13:15:14 -05:00
|
|
|
// Switch to payload phase.
|
|
|
|
ctx.aad_complete = true;
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
|
|
|
|
T::regs().cr().modify(|w| w.fflush());
|
|
|
|
}
|
|
|
|
|
|
|
|
self.store_context(ctx);
|
|
|
|
}
|
|
|
|
|
2024-03-12 12:01:14 -04:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
|
|
|
/// Controls the header phase of cipher processing.
|
2024-03-12 14:52:34 -04:00
|
|
|
/// This function is only valid for authenticated ciphers including GCM, CCM, and GMAC.
|
|
|
|
/// All additional associated data (AAD) must be supplied to this function prior to starting the payload phase with `payload`.
|
|
|
|
/// The AAD must be supplied in multiples of the block size (128-bits for AES, 64-bits for DES), except when supplying the last block.
|
2024-03-12 12:01:14 -04:00
|
|
|
/// When supplying the last block of AAD, `last_aad_block` must be `true`.
|
|
|
|
pub async fn aad<
|
|
|
|
'c,
|
|
|
|
const TAG_SIZE: usize,
|
|
|
|
C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>,
|
|
|
|
>(
|
|
|
|
&mut self,
|
|
|
|
ctx: &mut Context<'c, C>,
|
|
|
|
aad: &[u8],
|
|
|
|
last_aad_block: bool,
|
|
|
|
)
|
|
|
|
where
|
|
|
|
DmaIn: crate::cryp::DmaIn<T>,
|
|
|
|
DmaOut: crate::cryp::DmaOut<T>,
|
|
|
|
{
|
|
|
|
self.load_context(ctx);
|
|
|
|
|
|
|
|
// Perform checks for correctness.
|
|
|
|
if ctx.aad_complete {
|
|
|
|
panic!("Cannot update AAD after starting payload!")
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx.header_len += aad.len() as u64;
|
|
|
|
|
|
|
|
// Header phase
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
T::regs().cr().modify(|w| w.set_gcm_ccmph(1));
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(true));
|
|
|
|
|
|
|
|
// First write the header B1 block if not yet written.
|
|
|
|
if !ctx.header_processed {
|
|
|
|
ctx.header_processed = true;
|
|
|
|
let header = ctx.cipher.get_header_block();
|
|
|
|
ctx.aad_buffer[0..header.len()].copy_from_slice(header);
|
|
|
|
ctx.aad_buffer_len += header.len();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Fill the header block to make a full block.
|
|
|
|
let len_to_copy = min(aad.len(), C::BLOCK_SIZE - ctx.aad_buffer_len);
|
|
|
|
ctx.aad_buffer[ctx.aad_buffer_len..ctx.aad_buffer_len + len_to_copy].copy_from_slice(&aad[..len_to_copy]);
|
|
|
|
ctx.aad_buffer_len += len_to_copy;
|
|
|
|
ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
|
|
|
|
let mut aad_len_remaining = aad.len() - len_to_copy;
|
|
|
|
|
|
|
|
if ctx.aad_buffer_len < C::BLOCK_SIZE {
|
|
|
|
// The buffer isn't full and this is the last buffer, so process it as is (already padded).
|
|
|
|
if last_aad_block {
|
|
|
|
Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &ctx.aad_buffer).await;
|
|
|
|
assert_eq!(T::regs().sr().read().ifem(), true);
|
|
|
|
|
|
|
|
// Switch to payload phase.
|
|
|
|
ctx.aad_complete = true;
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
|
|
|
|
T::regs().cr().modify(|w| w.fflush());
|
|
|
|
} else {
|
|
|
|
// Just return because we don't yet have a full block to process.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// Load the full block from the buffer.
|
|
|
|
Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &ctx.aad_buffer).await;
|
|
|
|
assert_eq!(T::regs().sr().read().ifem(), true);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Handle a partial block that is passed in.
|
|
|
|
ctx.aad_buffer_len = 0;
|
|
|
|
let leftovers = aad_len_remaining % C::BLOCK_SIZE;
|
|
|
|
ctx.aad_buffer[..leftovers].copy_from_slice(&aad[aad.len() - leftovers..aad.len()]);
|
|
|
|
ctx.aad_buffer_len += leftovers;
|
|
|
|
ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
|
|
|
|
aad_len_remaining -= leftovers;
|
|
|
|
assert_eq!(aad_len_remaining % C::BLOCK_SIZE, 0);
|
|
|
|
|
|
|
|
// Load full data blocks into core.
|
|
|
|
let num_full_blocks = aad_len_remaining / C::BLOCK_SIZE;
|
|
|
|
let start_index = len_to_copy;
|
|
|
|
let end_index = start_index + (C::BLOCK_SIZE * num_full_blocks);
|
|
|
|
Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &aad[start_index..end_index]).await;
|
|
|
|
|
|
|
|
if last_aad_block {
|
|
|
|
if leftovers > 0 {
|
|
|
|
Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &ctx.aad_buffer).await;
|
|
|
|
assert_eq!(T::regs().sr().read().ifem(), true);
|
|
|
|
}
|
|
|
|
// Switch to payload phase.
|
|
|
|
ctx.aad_complete = true;
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
|
|
|
|
T::regs().cr().modify(|w| w.fflush());
|
|
|
|
}
|
|
|
|
|
|
|
|
self.store_context(ctx);
|
|
|
|
}
|
|
|
|
|
2024-02-16 13:15:14 -05:00
|
|
|
/// Performs encryption/decryption on the provided context.
|
|
|
|
/// The context determines algorithm, mode, and state of the crypto accelerator.
|
|
|
|
/// When the last piece of data is supplied, `last_block` should be `true`.
|
|
|
|
/// This function panics under various mismatches of parameters.
|
2024-03-12 14:52:34 -04:00
|
|
|
/// Output buffer must be at least as long as the input buffer.
|
2024-02-16 13:15:14 -05:00
|
|
|
/// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes.
|
|
|
|
/// Padding or ciphertext stealing must be managed by the application for these modes.
|
|
|
|
/// Data must also be a multiple of block size unless `last_block` is `true`.
|
2024-02-21 12:07:53 -05:00
|
|
|
pub fn payload_blocking<'c, C: Cipher<'c> + CipherSized + IVSized>(
|
2024-02-18 21:40:18 -05:00
|
|
|
&self,
|
|
|
|
ctx: &mut Context<'c, C>,
|
|
|
|
input: &[u8],
|
|
|
|
output: &mut [u8],
|
|
|
|
last_block: bool,
|
|
|
|
) {
|
2024-02-16 13:15:14 -05:00
|
|
|
self.load_context(ctx);
|
2024-02-14 20:24:52 -05:00
|
|
|
|
2024-02-18 21:40:18 -05:00
|
|
|
let last_block_remainder = input.len() % C::BLOCK_SIZE;
|
2024-02-14 20:24:52 -05:00
|
|
|
|
|
|
|
// Perform checks for correctness.
|
2024-02-16 13:15:14 -05:00
|
|
|
if !ctx.aad_complete && ctx.header_len > 0 {
|
|
|
|
panic!("Additional associated data must be processed first!");
|
|
|
|
} else if !ctx.aad_complete {
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-21 12:39:10 -05:00
|
|
|
{
|
|
|
|
ctx.aad_complete = true;
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
|
|
|
|
T::regs().cr().modify(|w| w.fflush());
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(true));
|
|
|
|
}
|
2024-02-16 13:15:14 -05:00
|
|
|
}
|
2024-02-14 20:24:52 -05:00
|
|
|
if ctx.last_block_processed {
|
|
|
|
panic!("The last block has already been processed!");
|
|
|
|
}
|
2024-02-24 15:55:20 -05:00
|
|
|
if input.len() > output.len() {
|
2024-02-14 20:24:52 -05:00
|
|
|
panic!("Output buffer length must match input length.");
|
|
|
|
}
|
|
|
|
if !last_block {
|
|
|
|
if last_block_remainder != 0 {
|
2024-02-18 21:40:18 -05:00
|
|
|
panic!("Input length must be a multiple of {} bytes.", C::BLOCK_SIZE);
|
2024-02-14 20:24:52 -05:00
|
|
|
}
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
if C::REQUIRES_PADDING {
|
2024-02-14 20:24:52 -05:00
|
|
|
if last_block_remainder != 0 {
|
2024-02-18 21:40:18 -05:00
|
|
|
panic!("Input must be a multiple of {} bytes in ECB and CBC modes. Consider padding or ciphertext stealing.", C::BLOCK_SIZE);
|
2024-02-14 20:24:52 -05:00
|
|
|
}
|
|
|
|
}
|
2024-02-14 22:11:38 -05:00
|
|
|
if last_block {
|
|
|
|
ctx.last_block_processed = true;
|
|
|
|
}
|
|
|
|
|
2024-02-14 20:24:52 -05:00
|
|
|
// Load data into core, block by block.
|
2024-02-18 21:40:18 -05:00
|
|
|
let num_full_blocks = input.len() / C::BLOCK_SIZE;
|
2024-02-14 20:24:52 -05:00
|
|
|
for block in 0..num_full_blocks {
|
2024-03-05 11:25:56 -05:00
|
|
|
let index = block * C::BLOCK_SIZE;
|
2024-02-14 20:24:52 -05:00
|
|
|
// Write block in
|
2024-03-12 14:52:34 -04:00
|
|
|
self.write_bytes_blocking(C::BLOCK_SIZE, &input[index..index + C::BLOCK_SIZE]);
|
2024-02-14 20:24:52 -05:00
|
|
|
// Read block out
|
2024-03-12 14:52:34 -04:00
|
|
|
self.read_bytes_blocking(C::BLOCK_SIZE, &mut output[index..index + C::BLOCK_SIZE]);
|
2024-02-14 20:24:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Handle the final block, which is incomplete.
|
|
|
|
if last_block_remainder > 0 {
|
2024-02-29 19:09:44 -05:00
|
|
|
let padding_len = C::BLOCK_SIZE - last_block_remainder;
|
2024-03-12 12:01:14 -04:00
|
|
|
let temp1 = ctx.cipher.pre_final(&T::regs(), ctx.dir, padding_len);
|
2024-02-14 20:24:52 -05:00
|
|
|
|
2024-02-16 13:15:14 -05:00
|
|
|
let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
|
|
|
|
let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
|
2024-02-14 22:11:38 -05:00
|
|
|
last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]);
|
2024-03-05 11:25:56 -05:00
|
|
|
self.write_bytes_blocking(C::BLOCK_SIZE, &last_block);
|
|
|
|
self.read_bytes_blocking(C::BLOCK_SIZE, &mut intermediate_data);
|
2024-02-14 20:24:52 -05:00
|
|
|
|
|
|
|
// Handle the last block depending on mode.
|
2024-02-14 22:11:38 -05:00
|
|
|
let output_len = output.len();
|
|
|
|
output[output_len - last_block_remainder..output_len]
|
2024-02-14 20:24:52 -05:00
|
|
|
.copy_from_slice(&intermediate_data[0..last_block_remainder]);
|
|
|
|
|
2024-02-20 11:54:39 -05:00
|
|
|
let mut mask: [u8; 16] = [0; 16];
|
|
|
|
mask[..last_block_remainder].fill(0xFF);
|
|
|
|
ctx.cipher
|
2024-03-12 12:01:14 -04:00
|
|
|
.post_final_blocking(&T::regs(), self, ctx.dir, &mut intermediate_data, temp1, mask);
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx.payload_len += input.len() as u64;
|
|
|
|
|
|
|
|
self.store_context(ctx);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Performs encryption/decryption on the provided context.
|
|
|
|
/// The context determines algorithm, mode, and state of the crypto accelerator.
|
|
|
|
/// When the last piece of data is supplied, `last_block` should be `true`.
|
|
|
|
/// This function panics under various mismatches of parameters.
|
2024-03-12 14:52:34 -04:00
|
|
|
/// Output buffer must be at least as long as the input buffer.
|
2024-03-12 12:01:14 -04:00
|
|
|
/// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes.
|
|
|
|
/// Padding or ciphertext stealing must be managed by the application for these modes.
|
|
|
|
/// Data must also be a multiple of block size unless `last_block` is `true`.
|
|
|
|
pub async fn payload<'c, C: Cipher<'c> + CipherSized + IVSized>(
|
|
|
|
&mut self,
|
|
|
|
ctx: &mut Context<'c, C>,
|
|
|
|
input: &[u8],
|
|
|
|
output: &mut [u8],
|
|
|
|
last_block: bool,
|
|
|
|
)
|
|
|
|
where
|
|
|
|
DmaIn: crate::cryp::DmaIn<T>,
|
|
|
|
DmaOut: crate::cryp::DmaOut<T>,
|
|
|
|
{
|
|
|
|
self.load_context(ctx);
|
|
|
|
|
|
|
|
let last_block_remainder = input.len() % C::BLOCK_SIZE;
|
|
|
|
|
|
|
|
// Perform checks for correctness.
|
|
|
|
if !ctx.aad_complete && ctx.header_len > 0 {
|
|
|
|
panic!("Additional associated data must be processed first!");
|
|
|
|
} else if !ctx.aad_complete {
|
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
|
|
|
{
|
|
|
|
ctx.aad_complete = true;
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
|
|
|
|
T::regs().cr().modify(|w| w.fflush());
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(true));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if ctx.last_block_processed {
|
|
|
|
panic!("The last block has already been processed!");
|
|
|
|
}
|
|
|
|
if input.len() > output.len() {
|
|
|
|
panic!("Output buffer length must match input length.");
|
|
|
|
}
|
|
|
|
if !last_block {
|
|
|
|
if last_block_remainder != 0 {
|
|
|
|
panic!("Input length must be a multiple of {} bytes.", C::BLOCK_SIZE);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if C::REQUIRES_PADDING {
|
|
|
|
if last_block_remainder != 0 {
|
|
|
|
panic!("Input must be a multiple of {} bytes in ECB and CBC modes. Consider padding or ciphertext stealing.", C::BLOCK_SIZE);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if last_block {
|
|
|
|
ctx.last_block_processed = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Load data into core, block by block.
|
|
|
|
let num_full_blocks = input.len() / C::BLOCK_SIZE;
|
|
|
|
for block in 0..num_full_blocks {
|
|
|
|
let index = block * C::BLOCK_SIZE;
|
|
|
|
// Read block out
|
2024-03-12 14:52:34 -04:00
|
|
|
let read = Self::read_bytes(&mut self.outdma, C::BLOCK_SIZE, &mut output[index..index + C::BLOCK_SIZE]);
|
2024-03-12 12:01:14 -04:00
|
|
|
// Write block in
|
2024-03-12 14:52:34 -04:00
|
|
|
let write = Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &input[index..index + C::BLOCK_SIZE]);
|
2024-03-12 12:01:14 -04:00
|
|
|
embassy_futures::join::join(read, write).await;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Handle the final block, which is incomplete.
|
|
|
|
if last_block_remainder > 0 {
|
|
|
|
let padding_len = C::BLOCK_SIZE - last_block_remainder;
|
|
|
|
let temp1 = ctx.cipher.pre_final(&T::regs(), ctx.dir, padding_len);
|
|
|
|
|
|
|
|
let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
|
|
|
|
let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
|
|
|
|
last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]);
|
|
|
|
let read = Self::read_bytes(&mut self.outdma, C::BLOCK_SIZE, &mut intermediate_data);
|
|
|
|
let write = Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &last_block);
|
|
|
|
embassy_futures::join::join(read, write).await;
|
|
|
|
|
|
|
|
// Handle the last block depending on mode.
|
|
|
|
let output_len = output.len();
|
|
|
|
output[output_len - last_block_remainder..output_len]
|
|
|
|
.copy_from_slice(&intermediate_data[0..last_block_remainder]);
|
|
|
|
|
|
|
|
let mut mask: [u8; 16] = [0; 16];
|
|
|
|
mask[..last_block_remainder].fill(0xFF);
|
|
|
|
ctx.cipher
|
|
|
|
.post_final(&T::regs(), self, ctx.dir, &mut intermediate_data, temp1, mask).await;
|
2024-02-14 20:24:52 -05:00
|
|
|
}
|
2024-02-16 13:15:14 -05:00
|
|
|
|
|
|
|
ctx.payload_len += input.len() as u64;
|
2024-02-20 11:54:39 -05:00
|
|
|
|
|
|
|
self.store_context(ctx);
|
2024-02-16 13:15:14 -05:00
|
|
|
}
|
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-03-12 14:52:34 -04:00
|
|
|
/// Generates an authentication tag for authenticated ciphers including GCM, CCM, and GMAC.
|
|
|
|
/// Called after the all data has been encrypted/decrypted by `payload`.
|
2024-02-21 12:07:53 -05:00
|
|
|
pub fn finish_blocking<
|
|
|
|
'c,
|
|
|
|
const TAG_SIZE: usize,
|
|
|
|
C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>,
|
|
|
|
>(
|
2024-02-18 21:40:18 -05:00
|
|
|
&self,
|
|
|
|
mut ctx: Context<'c, C>,
|
2024-02-21 12:07:53 -05:00
|
|
|
) -> [u8; TAG_SIZE] {
|
2024-02-16 13:15:14 -05:00
|
|
|
self.load_context(&mut ctx);
|
|
|
|
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
T::regs().cr().modify(|w| w.set_gcm_ccmph(3));
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(true));
|
|
|
|
|
2024-02-23 16:05:18 -05:00
|
|
|
let headerlen1: u32 = ((ctx.header_len * 8) >> 32) as u32;
|
|
|
|
let headerlen2: u32 = (ctx.header_len * 8) as u32;
|
|
|
|
let payloadlen1: u32 = ((ctx.payload_len * 8) >> 32) as u32;
|
|
|
|
let payloadlen2: u32 = (ctx.payload_len * 8) as u32;
|
2024-02-16 13:15:14 -05:00
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(cryp_v2)]
|
2024-03-05 11:25:56 -05:00
|
|
|
let footer: [u32; 4] = [
|
|
|
|
headerlen1.swap_bytes(),
|
|
|
|
headerlen2.swap_bytes(),
|
|
|
|
payloadlen1.swap_bytes(),
|
|
|
|
payloadlen2.swap_bytes(),
|
|
|
|
];
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(cryp_v3)]
|
2024-03-05 11:25:56 -05:00
|
|
|
let footer: [u32; 4] = [headerlen1, headerlen2, payloadlen1, payloadlen2];
|
|
|
|
|
|
|
|
self.write_words_blocking(C::BLOCK_SIZE, &footer);
|
2024-02-16 13:15:14 -05:00
|
|
|
|
|
|
|
while !T::regs().sr().read().ofne() {}
|
|
|
|
|
2024-02-21 12:07:53 -05:00
|
|
|
let mut full_tag: [u8; 16] = [0; 16];
|
2024-03-05 11:25:56 -05:00
|
|
|
self.read_bytes_blocking(C::BLOCK_SIZE, &mut full_tag);
|
2024-02-21 12:07:53 -05:00
|
|
|
let mut tag: [u8; TAG_SIZE] = [0; TAG_SIZE];
|
|
|
|
tag.copy_from_slice(&full_tag[0..TAG_SIZE]);
|
2024-02-16 13:15:14 -05:00
|
|
|
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
2024-02-21 12:07:53 -05:00
|
|
|
|
|
|
|
tag
|
2024-02-14 20:24:52 -05:00
|
|
|
}
|
|
|
|
|
2024-03-12 12:01:14 -04:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-03-12 14:52:34 -04:00
|
|
|
// Generates an authentication tag for authenticated ciphers including GCM, CCM, and GMAC.
|
|
|
|
/// Called after the all data has been encrypted/decrypted by `payload`.
|
2024-03-12 12:01:14 -04:00
|
|
|
pub async fn finish<'c, const TAG_SIZE: usize, C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>>(&mut self, mut ctx: Context<'c, C>) -> [u8; TAG_SIZE]
|
|
|
|
where
|
|
|
|
DmaIn: crate::cryp::DmaIn<T>,
|
|
|
|
DmaOut: crate::cryp::DmaOut<T>,
|
|
|
|
{
|
|
|
|
self.load_context(&mut ctx);
|
|
|
|
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
T::regs().cr().modify(|w| w.set_gcm_ccmph(3));
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(true));
|
|
|
|
|
|
|
|
let headerlen1: u32 = ((ctx.header_len * 8) >> 32) as u32;
|
|
|
|
let headerlen2: u32 = (ctx.header_len * 8) as u32;
|
|
|
|
let payloadlen1: u32 = ((ctx.payload_len * 8) >> 32) as u32;
|
|
|
|
let payloadlen2: u32 = (ctx.payload_len * 8) as u32;
|
|
|
|
|
|
|
|
#[cfg(cryp_v2)]
|
|
|
|
let footer: [u32; 4] = [
|
|
|
|
headerlen1.swap_bytes(),
|
|
|
|
headerlen2.swap_bytes(),
|
|
|
|
payloadlen1.swap_bytes(),
|
|
|
|
payloadlen2.swap_bytes(),
|
|
|
|
];
|
|
|
|
#[cfg(cryp_v3)]
|
|
|
|
let footer: [u32; 4] = [headerlen1, headerlen2, payloadlen1, payloadlen2];
|
|
|
|
|
|
|
|
let write = Self::write_words(&mut self.indma, C::BLOCK_SIZE, &footer);
|
|
|
|
|
|
|
|
let mut full_tag: [u8; 16] = [0; 16];
|
|
|
|
let read = Self::read_bytes(&mut self.outdma, C::BLOCK_SIZE, &mut full_tag);
|
|
|
|
|
|
|
|
embassy_futures::join::join(read, write).await;
|
|
|
|
|
|
|
|
let mut tag: [u8; TAG_SIZE] = [0; TAG_SIZE];
|
|
|
|
tag.copy_from_slice(&full_tag[0..TAG_SIZE]);
|
|
|
|
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
|
|
|
|
tag
|
|
|
|
}
|
|
|
|
|
2024-02-14 20:24:52 -05:00
|
|
|
fn load_key(&self, key: &[u8]) {
|
|
|
|
// Load the key into the registers.
|
|
|
|
let mut keyidx = 0;
|
|
|
|
let mut keyword: [u8; 4] = [0; 4];
|
|
|
|
let keylen = key.len() * 8;
|
|
|
|
if keylen > 192 {
|
|
|
|
keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
|
|
|
|
keyidx += 4;
|
|
|
|
T::regs().key(0).klr().write_value(u32::from_be_bytes(keyword));
|
|
|
|
keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
|
|
|
|
keyidx += 4;
|
|
|
|
T::regs().key(0).krr().write_value(u32::from_be_bytes(keyword));
|
|
|
|
}
|
|
|
|
if keylen > 128 {
|
|
|
|
keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
|
|
|
|
keyidx += 4;
|
|
|
|
T::regs().key(1).klr().write_value(u32::from_be_bytes(keyword));
|
|
|
|
keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
|
|
|
|
keyidx += 4;
|
|
|
|
T::regs().key(1).krr().write_value(u32::from_be_bytes(keyword));
|
|
|
|
}
|
|
|
|
if keylen > 64 {
|
|
|
|
keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
|
|
|
|
keyidx += 4;
|
|
|
|
T::regs().key(2).klr().write_value(u32::from_be_bytes(keyword));
|
|
|
|
keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
|
|
|
|
keyidx += 4;
|
|
|
|
T::regs().key(2).krr().write_value(u32::from_be_bytes(keyword));
|
|
|
|
}
|
|
|
|
keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
|
|
|
|
keyidx += 4;
|
|
|
|
T::regs().key(3).klr().write_value(u32::from_be_bytes(keyword));
|
2024-02-21 12:07:53 -05:00
|
|
|
keyword = [0; 4];
|
|
|
|
keyword[0..key.len() - keyidx].copy_from_slice(&key[keyidx..key.len()]);
|
2024-02-14 20:24:52 -05:00
|
|
|
T::regs().key(3).krr().write_value(u32::from_be_bytes(keyword));
|
|
|
|
}
|
|
|
|
|
2024-02-18 21:40:18 -05:00
|
|
|
fn store_context<'c, C: Cipher<'c> + CipherSized>(&self, ctx: &mut Context<'c, C>) {
|
2024-02-14 20:24:52 -05:00
|
|
|
// Wait for data block processing to finish.
|
|
|
|
while !T::regs().sr().read().ifem() {}
|
|
|
|
while T::regs().sr().read().ofne() {}
|
|
|
|
while T::regs().sr().read().busy() {}
|
|
|
|
|
|
|
|
// Disable crypto processor.
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(false));
|
|
|
|
|
|
|
|
// Save the peripheral state.
|
|
|
|
ctx.cr = T::regs().cr().read().0;
|
|
|
|
ctx.iv[0] = T::regs().init(0).ivlr().read();
|
|
|
|
ctx.iv[1] = T::regs().init(0).ivrr().read();
|
|
|
|
ctx.iv[2] = T::regs().init(1).ivlr().read();
|
|
|
|
ctx.iv[3] = T::regs().init(1).ivrr().read();
|
2024-02-21 12:39:10 -05:00
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-14 20:24:52 -05:00
|
|
|
for i in 0..8 {
|
|
|
|
ctx.csgcmccm[i] = T::regs().csgcmccmr(i).read();
|
|
|
|
ctx.csgcm[i] = T::regs().csgcmr(i).read();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-02-18 21:40:18 -05:00
|
|
|
fn load_context<'c, C: Cipher<'c> + CipherSized>(&self, ctx: &Context<'c, C>) {
|
2024-02-14 20:24:52 -05:00
|
|
|
// Reload state registers.
|
|
|
|
T::regs().cr().write(|w| w.0 = ctx.cr);
|
|
|
|
T::regs().init(0).ivlr().write_value(ctx.iv[0]);
|
|
|
|
T::regs().init(0).ivrr().write_value(ctx.iv[1]);
|
|
|
|
T::regs().init(1).ivlr().write_value(ctx.iv[2]);
|
|
|
|
T::regs().init(1).ivrr().write_value(ctx.iv[3]);
|
2024-02-21 12:39:10 -05:00
|
|
|
|
2024-02-29 19:09:44 -05:00
|
|
|
#[cfg(any(cryp_v2, cryp_v3))]
|
2024-02-14 20:24:52 -05:00
|
|
|
for i in 0..8 {
|
|
|
|
T::regs().csgcmccmr(i).write_value(ctx.csgcmccm[i]);
|
|
|
|
T::regs().csgcmr(i).write_value(ctx.csgcm[i]);
|
|
|
|
}
|
2024-02-18 21:40:18 -05:00
|
|
|
self.load_key(ctx.cipher.key());
|
2024-02-14 20:24:52 -05:00
|
|
|
|
|
|
|
// Prepare key if applicable.
|
2024-02-18 21:40:18 -05:00
|
|
|
ctx.cipher.prepare_key(&T::regs());
|
2024-02-14 22:11:38 -05:00
|
|
|
T::regs().cr().write(|w| w.0 = ctx.cr);
|
2024-02-14 20:24:52 -05:00
|
|
|
|
|
|
|
// Enable crypto processor.
|
|
|
|
T::regs().cr().modify(|w| w.set_crypen(true));
|
|
|
|
}
|
2024-03-05 11:25:56 -05:00
|
|
|
|
|
|
|
fn write_bytes_blocking(&self, block_size: usize, blocks: &[u8]) {
|
|
|
|
// Ensure input is a multiple of block size.
|
|
|
|
assert_eq!(blocks.len() % block_size, 0);
|
|
|
|
let mut index = 0;
|
|
|
|
let end_index = blocks.len();
|
|
|
|
while index < end_index {
|
|
|
|
let mut in_word: [u8; 4] = [0; 4];
|
|
|
|
in_word.copy_from_slice(&blocks[index..index + 4]);
|
|
|
|
T::regs().din().write_value(u32::from_ne_bytes(in_word));
|
|
|
|
index += 4;
|
|
|
|
if index % block_size == 0 {
|
|
|
|
// Block until input FIFO is empty.
|
|
|
|
while !T::regs().sr().read().ifem() {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-12 12:01:14 -04:00
|
|
|
async fn write_bytes(dma: &mut PeripheralRef<'_, DmaIn>, block_size: usize, blocks: &[u8])
|
|
|
|
where
|
|
|
|
DmaIn: crate::cryp::DmaIn<T>,
|
|
|
|
{
|
|
|
|
if blocks.len() == 0 {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// Ensure input is a multiple of block size.
|
|
|
|
assert_eq!(blocks.len() % block_size, 0);
|
|
|
|
// Configure DMA to transfer input to crypto core.
|
|
|
|
let dma_request = dma.request();
|
|
|
|
let dst_ptr = T::regs().din().as_ptr();
|
|
|
|
let num_words = blocks.len() / 4;
|
|
|
|
let src_ptr = ptr::slice_from_raw_parts(blocks.as_ptr().cast(), num_words);
|
|
|
|
let options = TransferOptions {
|
|
|
|
priority: Priority::High,
|
|
|
|
..Default::default()
|
|
|
|
};
|
|
|
|
let dma_transfer = unsafe { Transfer::new_write_raw(dma, dma_request, src_ptr, dst_ptr, options) };
|
|
|
|
T::regs().dmacr().modify(|w| w.set_dien(true));
|
|
|
|
// Wait for the transfer to complete.
|
|
|
|
dma_transfer.await;
|
|
|
|
}
|
|
|
|
|
2024-03-05 11:25:56 -05:00
|
|
|
fn write_words_blocking(&self, block_size: usize, blocks: &[u32]) {
|
|
|
|
assert_eq!((blocks.len() * 4) % block_size, 0);
|
|
|
|
let mut byte_counter: usize = 0;
|
|
|
|
for word in blocks {
|
|
|
|
T::regs().din().write_value(*word);
|
|
|
|
byte_counter += 4;
|
|
|
|
if byte_counter % block_size == 0 {
|
|
|
|
// Block until input FIFO is empty.
|
|
|
|
while !T::regs().sr().read().ifem() {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-12 12:01:14 -04:00
|
|
|
async fn write_words(dma: &mut PeripheralRef<'_, DmaIn>, block_size: usize, blocks: &[u32])
|
|
|
|
where
|
|
|
|
DmaIn: crate::cryp::DmaIn<T>,
|
|
|
|
{
|
|
|
|
if blocks.len() == 0 {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// Ensure input is a multiple of block size.
|
|
|
|
assert_eq!((blocks.len() * 4) % block_size, 0);
|
|
|
|
// Configure DMA to transfer input to crypto core.
|
|
|
|
let dma_request = dma.request();
|
|
|
|
let dst_ptr = T::regs().din().as_ptr();
|
|
|
|
let num_words = blocks.len();
|
|
|
|
let src_ptr = ptr::slice_from_raw_parts(blocks.as_ptr().cast(), num_words);
|
|
|
|
let options = TransferOptions {
|
|
|
|
priority: Priority::High,
|
|
|
|
..Default::default()
|
|
|
|
};
|
|
|
|
let dma_transfer = unsafe { Transfer::new_write_raw(dma, dma_request, src_ptr, dst_ptr, options) };
|
|
|
|
T::regs().dmacr().modify(|w| w.set_dien(true));
|
|
|
|
// Wait for the transfer to complete.
|
|
|
|
dma_transfer.await;
|
|
|
|
}
|
|
|
|
|
2024-03-05 11:25:56 -05:00
|
|
|
fn read_bytes_blocking(&self, block_size: usize, blocks: &mut [u8]) {
|
|
|
|
// Block until there is output to read.
|
|
|
|
while !T::regs().sr().read().ofne() {}
|
|
|
|
// Ensure input is a multiple of block size.
|
|
|
|
assert_eq!(blocks.len() % block_size, 0);
|
|
|
|
// Read block out
|
|
|
|
let mut index = 0;
|
|
|
|
let end_index = blocks.len();
|
|
|
|
while index < end_index {
|
|
|
|
let out_word: u32 = T::regs().dout().read();
|
|
|
|
blocks[index..index + 4].copy_from_slice(u32::to_ne_bytes(out_word).as_slice());
|
|
|
|
index += 4;
|
|
|
|
}
|
|
|
|
}
|
2024-03-12 12:01:14 -04:00
|
|
|
|
|
|
|
async fn read_bytes(dma: &mut PeripheralRef<'_, DmaOut>, block_size: usize, blocks: &mut [u8])
|
|
|
|
where
|
|
|
|
DmaOut: crate::cryp::DmaOut<T>,
|
|
|
|
{
|
|
|
|
if blocks.len() == 0 {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// Ensure input is a multiple of block size.
|
|
|
|
assert_eq!(blocks.len() % block_size, 0);
|
|
|
|
// Configure DMA to get output from crypto core.
|
|
|
|
let dma_request = dma.request();
|
|
|
|
let src_ptr = T::regs().dout().as_ptr();
|
|
|
|
let num_words = blocks.len() / 4;
|
|
|
|
let dst_ptr = ptr::slice_from_raw_parts_mut(blocks.as_mut_ptr().cast(), num_words);
|
|
|
|
let options = TransferOptions {
|
|
|
|
priority: Priority::VeryHigh,
|
|
|
|
..Default::default()
|
|
|
|
};
|
|
|
|
let dma_transfer = unsafe { Transfer::new_read_raw(dma, dma_request, src_ptr, dst_ptr, options) };
|
|
|
|
T::regs().dmacr().modify(|w| w.set_doen(true));
|
|
|
|
// Wait for the transfer to complete.
|
|
|
|
dma_transfer.await;
|
|
|
|
}
|
2024-02-13 10:11:54 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub(crate) mod sealed {
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
pub trait Instance {
|
|
|
|
fn regs() -> pac::cryp::Cryp;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-02-14 22:11:38 -05:00
|
|
|
/// CRYP instance trait.
|
2024-02-13 10:11:54 -05:00
|
|
|
pub trait Instance: sealed::Instance + Peripheral<P = Self> + crate::rcc::RccPeripheral + 'static + Send {
|
2024-02-14 22:11:38 -05:00
|
|
|
/// Interrupt for this CRYP instance.
|
2024-02-13 10:11:54 -05:00
|
|
|
type Interrupt: interrupt::typelevel::Interrupt;
|
|
|
|
}
|
|
|
|
|
|
|
|
foreach_interrupt!(
|
2024-02-14 22:11:38 -05:00
|
|
|
($inst:ident, cryp, CRYP, GLOBAL, $irq:ident) => {
|
2024-02-13 10:11:54 -05:00
|
|
|
impl Instance for peripherals::$inst {
|
|
|
|
type Interrupt = crate::interrupt::typelevel::$irq;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl sealed::Instance for peripherals::$inst {
|
|
|
|
fn regs() -> crate::pac::cryp::Cryp {
|
|
|
|
crate::pac::$inst
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
);
|
2024-03-12 12:01:14 -04:00
|
|
|
|
|
|
|
dma_trait!(DmaIn, Instance);
|
|
|
|
dma_trait!(DmaOut, Instance);
|