From 79e5e8b052b56f8c6fc07d8407fcfc3aaf39bab3 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Tue, 13 Feb 2024 10:11:54 -0500 Subject: [PATCH 01/23] Add cryp configuration. --- embassy-stm32/src/cryp/mod.rs | 227 ++++++++++++++++++++++++++++++++++ embassy-stm32/src/lib.rs | 2 + 2 files changed, 229 insertions(+) create mode 100644 embassy-stm32/src/cryp/mod.rs diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs new file mode 100644 index 000000000..dedc6ddc5 --- /dev/null +++ b/embassy-stm32/src/cryp/mod.rs @@ -0,0 +1,227 @@ +use embassy_hal_internal::{into_ref, PeripheralRef}; +use pac::cryp::Init; + +use crate::pac; +use crate::peripherals::CRYP; +use crate::rcc::sealed::RccPeripheral; +use crate::{interrupt, peripherals, Peripheral}; + +pub struct Context<'c> { + key: &'c [u8], +} + +#[derive(PartialEq)] +pub enum Algorithm { + AES, + DES, + TDES, +} + +#[derive(PartialEq)] +pub enum Mode { + ECB, + CBC, + CTR, + GCM, + GMAC, + CCM, +} + +#[derive(PartialEq)] +pub enum Direction { + Encrypt, + Decrypt, +} + +/// Crypto Accelerator Driver +pub struct Cryp<'d, T: Instance, In, Out> { + _peripheral: PeripheralRef<'d, T>, + indma: PeripheralRef<'d, In>, + outdma: PeripheralRef<'d, Out>, +} + +type InitVector<'v> = Option<&'v [u8]>; + +impl<'d, T: Instance, In, Out> Cryp<'d, T, In, Out> { + /// Create a new CRYP driver. + pub fn new( + peri: impl Peripheral<P = T> + 'd, + indma: impl Peripheral<P = In> + 'd, + outdma: impl Peripheral<P = Out> + 'd, + ) -> Self { + CRYP::enable_and_reset(); + into_ref!(peri, indma, outdma); + let instance = Self { + _peripheral: peri, + indma: indma, + outdma: outdma, + }; + instance + } + + /// Start a new cipher operation. + /// Key size must be 128, 192, or 256 bits. + pub fn start(key: &[u8], iv: InitVector, algo: Algorithm, mode: Mode, dir: Direction) -> Context { + T::regs().cr().modify(|w| w.set_crypen(false)); + + let keylen = key.len() * 8; + let ivlen; + if let Some(iv) = iv { + ivlen = iv.len() * 8; + } else { + ivlen = 0; + } + + // Checks for correctness + if algo == Algorithm::AES { + match keylen { + 128 => T::regs().cr().write(|w| w.set_keysize(0)), + 192 => T::regs().cr().write(|w| w.set_keysize(1)), + 256 => T::regs().cr().write(|w| w.set_keysize(2)), + _ => panic!("Key length must be 128, 192, or 256 bits."), + } + + if (mode == Mode::GCM) && (ivlen != 96) { + panic!("IV length must be 96 bits for GCM."); + } else if (mode == Mode::CBC) && (ivlen != 128) { + panic!("IV length must be 128 bits for CBC."); + } else if (mode == Mode::CCM) && (ivlen != 128) { + panic!("IV length must be 128 bits for CCM."); + } else if (mode == Mode::CTR) && (ivlen != 64) { + panic!("IV length must be 64 bits for CTR."); + } else if (mode == Mode::GCM) && (ivlen != 96) { + panic!("IV length must be 96 bits for GCM."); + } else if (mode == Mode::GMAC) && (ivlen != 96) { + panic!("IV length must be 96 bits for GMAC."); + } + } + + // Load the key into the registers. + let mut keyidx = 0; + let mut keyword: [u8; 4] = [0; 4]; + if keylen > 192 { + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(0).klr().write_value(u32::from_be_bytes(keyword)); + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(0).krr().write_value(u32::from_be_bytes(keyword)); + } + if keylen > 128 { + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(1).klr().write_value(u32::from_be_bytes(keyword)); + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(1).krr().write_value(u32::from_be_bytes(keyword)); + } + if keylen > 64 { + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(2).klr().write_value(u32::from_be_bytes(keyword)); + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(2).krr().write_value(u32::from_be_bytes(keyword)); + } + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(3).klr().write_value(u32::from_be_bytes(keyword)); + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + T::regs().key(3).krr().write_value(u32::from_be_bytes(keyword)); + + // Set data type to 8-bit. This will match software implementations. + T::regs().cr().modify(|w| w.set_datatype(2)); + + if algo == Algorithm::AES { + if (mode == Mode::ECB) || (mode == Mode::CBC) { + T::regs().cr().modify(|w| w.set_algomode0(7)); + T::regs().cr().modify(|w| w.set_crypen(true)); + while T::regs().sr().read().busy() {} + } + + match mode { + Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(4)), + Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(5)), + Mode::CTR => T::regs().cr().modify(|w| w.set_algomode0(6)), + Mode::GCM => T::regs().cr().modify(|w| w.set_algomode0(8)), + Mode::GMAC => T::regs().cr().modify(|w| w.set_algomode0(8)), + Mode::CCM => T::regs().cr().modify(|w| w.set_algomode0(9)), + } + } else if algo == Algorithm::DES { + match mode { + Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(2)), + Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(3)), + _ => panic!("Only ECB and CBC modes are valid for DES."), + } + } else if algo == Algorithm::TDES { + match mode { + Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(0)), + Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(1)), + _ => panic!("Only ECB and CBC modes are valid for TDES."), + } + } + + // Set encrypt/decrypt + if dir == Direction::Encrypt { + T::regs().cr().modify(|w| w.set_algodir(false)); + } else { + T::regs().cr().modify(|w| w.set_algodir(true)); + } + + // Load the IV into the registers. + if let Some(iv) = iv { + let mut iv_idx = 0; + let mut iv_word: [u8; 4] = [0; 4]; + iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); + iv_idx += 4; + T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word)); + iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); + iv_idx += 4; + if iv.len() >= 12 { + T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word)); + iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); + iv_idx += 4; + } + if iv.len() >= 16 { + T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word)); + iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); + T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word)); + } + } + + // Flush in/out FIFOs + T::regs().cr().modify(|w| w.fflush()); + + let ctx = Context { key: key }; + + ctx + } +} + +pub(crate) mod sealed { + use super::*; + + pub trait Instance { + fn regs() -> pac::cryp::Cryp; + } +} + +/// RNG instance trait. +pub trait Instance: sealed::Instance + Peripheral<P = Self> + crate::rcc::RccPeripheral + 'static + Send { + /// Interrupt for this RNG instance. + type Interrupt: interrupt::typelevel::Interrupt; +} + +foreach_interrupt!( + ($inst:ident, rng, CRYP, GLOBAL, $irq:ident) => { + impl Instance for peripherals::$inst { + type Interrupt = crate::interrupt::typelevel::$irq; + } + + impl sealed::Instance for peripherals::$inst { + fn regs() -> crate::pac::cryp::Cryp { + crate::pac::$inst + } + } + }; +); diff --git a/embassy-stm32/src/lib.rs b/embassy-stm32/src/lib.rs index cd1ede0fa..6859eef6c 100644 --- a/embassy-stm32/src/lib.rs +++ b/embassy-stm32/src/lib.rs @@ -34,6 +34,8 @@ pub mod adc; pub mod can; #[cfg(crc)] pub mod crc; +#[cfg(cryp)] +pub mod cryp; #[cfg(dac)] pub mod dac; #[cfg(dcmi)] From a0a8a4ec864763948d4a965ccf8ec11ca91cb15f Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Wed, 14 Feb 2024 20:24:52 -0500 Subject: [PATCH 02/23] Support CBC, ECB, CTR modes. --- embassy-stm32/src/cryp/mod.rs | 350 +++++++++++++++++++++++++++------- 1 file changed, 282 insertions(+), 68 deletions(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index dedc6ddc5..f266313c1 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -1,23 +1,34 @@ use embassy_hal_internal::{into_ref, PeripheralRef}; -use pac::cryp::Init; use crate::pac; use crate::peripherals::CRYP; use crate::rcc::sealed::RccPeripheral; -use crate::{interrupt, peripherals, Peripheral}; +use crate::{interrupt, Peripheral}; + +const DES_BLOCK_SIZE: usize = 8; // 64 bits +const AES_BLOCK_SIZE: usize = 16; // 128 bits pub struct Context<'c> { + algo: Algorithm, + mode: Mode, + dir: Direction, + last_block_processed: bool, + aad_complete: bool, + cr: u32, + iv: [u32; 4], key: &'c [u8], + csgcmccm: [u32; 8], + csgcm: [u32; 8], } -#[derive(PartialEq)] +#[derive(PartialEq, Clone, Copy)] pub enum Algorithm { AES, DES, TDES, } -#[derive(PartialEq)] +#[derive(PartialEq, Clone, Copy)] pub enum Mode { ECB, CBC, @@ -27,53 +38,55 @@ pub enum Mode { CCM, } -#[derive(PartialEq)] +#[derive(PartialEq, Clone, Copy)] pub enum Direction { Encrypt, Decrypt, } /// Crypto Accelerator Driver -pub struct Cryp<'d, T: Instance, In, Out> { +pub struct Cryp<'d, T: Instance> { _peripheral: PeripheralRef<'d, T>, - indma: PeripheralRef<'d, In>, - outdma: PeripheralRef<'d, Out>, } type InitVector<'v> = Option<&'v [u8]>; -impl<'d, T: Instance, In, Out> Cryp<'d, T, In, Out> { +impl<'d, T: Instance> Cryp<'d, T> { /// Create a new CRYP driver. - pub fn new( - peri: impl Peripheral<P = T> + 'd, - indma: impl Peripheral<P = In> + 'd, - outdma: impl Peripheral<P = Out> + 'd, - ) -> Self { + pub fn new(peri: impl Peripheral<P = T> + 'd) -> Self { CRYP::enable_and_reset(); - into_ref!(peri, indma, outdma); - let instance = Self { - _peripheral: peri, - indma: indma, - outdma: outdma, - }; + into_ref!(peri); + let instance = Self { _peripheral: peri }; instance } /// Start a new cipher operation. /// Key size must be 128, 192, or 256 bits. - pub fn start(key: &[u8], iv: InitVector, algo: Algorithm, mode: Mode, dir: Direction) -> Context { - T::regs().cr().modify(|w| w.set_crypen(false)); + pub fn start<'c>(&self, key: &'c [u8], iv: InitVector, algo: Algorithm, mode: Mode, dir: Direction) -> Context<'c> { + let mut ctx = Context { + algo, + mode, + dir, + last_block_processed: false, + cr: 0, + iv: [0; 4], + key, + csgcmccm: [0; 8], + csgcm: [0; 8], + aad_complete: false, + }; - let keylen = key.len() * 8; - let ivlen; - if let Some(iv) = iv { - ivlen = iv.len() * 8; - } else { - ivlen = 0; - } + T::regs().cr().modify(|w| w.set_crypen(false)); // Checks for correctness if algo == Algorithm::AES { + let keylen = key.len() * 8; + let ivlen; + if let Some(iv) = iv { + ivlen = iv.len() * 8; + } else { + ivlen = 0; + } match keylen { 128 => T::regs().cr().write(|w| w.set_keysize(0)), 192 => T::regs().cr().write(|w| w.set_keysize(1)), @@ -96,49 +109,14 @@ impl<'d, T: Instance, In, Out> Cryp<'d, T, In, Out> { } } - // Load the key into the registers. - let mut keyidx = 0; - let mut keyword: [u8; 4] = [0; 4]; - if keylen > 192 { - keyword.copy_from_slice(&key[keyidx..keyidx + 4]); - keyidx += 4; - T::regs().key(0).klr().write_value(u32::from_be_bytes(keyword)); - keyword.copy_from_slice(&key[keyidx..keyidx + 4]); - keyidx += 4; - T::regs().key(0).krr().write_value(u32::from_be_bytes(keyword)); - } - if keylen > 128 { - keyword.copy_from_slice(&key[keyidx..keyidx + 4]); - keyidx += 4; - T::regs().key(1).klr().write_value(u32::from_be_bytes(keyword)); - keyword.copy_from_slice(&key[keyidx..keyidx + 4]); - keyidx += 4; - T::regs().key(1).krr().write_value(u32::from_be_bytes(keyword)); - } - if keylen > 64 { - keyword.copy_from_slice(&key[keyidx..keyidx + 4]); - keyidx += 4; - T::regs().key(2).klr().write_value(u32::from_be_bytes(keyword)); - keyword.copy_from_slice(&key[keyidx..keyidx + 4]); - keyidx += 4; - T::regs().key(2).krr().write_value(u32::from_be_bytes(keyword)); - } - keyword.copy_from_slice(&key[keyidx..keyidx + 4]); - keyidx += 4; - T::regs().key(3).klr().write_value(u32::from_be_bytes(keyword)); - keyword.copy_from_slice(&key[keyidx..keyidx + 4]); - T::regs().key(3).krr().write_value(u32::from_be_bytes(keyword)); + self.load_key(key); // Set data type to 8-bit. This will match software implementations. T::regs().cr().modify(|w| w.set_datatype(2)); - if algo == Algorithm::AES { - if (mode == Mode::ECB) || (mode == Mode::CBC) { - T::regs().cr().modify(|w| w.set_algomode0(7)); - T::regs().cr().modify(|w| w.set_crypen(true)); - while T::regs().sr().read().busy() {} - } + self.prepare_key(&ctx); + if algo == Algorithm::AES { match mode { Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(4)), Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(5)), @@ -192,10 +170,246 @@ impl<'d, T: Instance, In, Out> Cryp<'d, T, In, Out> { // Flush in/out FIFOs T::regs().cr().modify(|w| w.fflush()); - let ctx = Context { key: key }; + if mode == Mode::GCM { + // GCM init phase + T::regs().cr().modify(|w| w.set_gcm_ccmph(0)); + T::regs().cr().modify(|w| w.set_crypen(true)); + while T::regs().cr().read().crypen() {} + } + + self.store_context(&mut ctx); ctx } + + // pub fn aad_blocking(&self, ctx: &mut Context, aad: &[u8]) { + // if ctx.aad_complete { + // panic!("Cannot update AAD after calling 'update'!") + // } + // if (ctx.mode != Mode::GCM) && (ctx.mode != Mode::GMAC) && (ctx.mode != Mode::CCM) { + // panic!("Associated data only valid for GCM, GMAC, and CCM modes.") + // } + + // let mut header_size = 0; + // let mut header: [u8;] + + // if aad.len() < 65280 { + + // } + + // // GCM header phase + // T::regs().cr().modify(|w| w.set_gcm_ccmph(1)); + // T::regs().cr().modify(|w| w.set_crypen(true)); + // } + + pub fn update_blocking(&self, ctx: &mut Context, input: &[u8], output: &mut [u8], last_block: bool) { + self.load_context(ctx); + + ctx.aad_complete = true; + if last_block { + ctx.last_block_processed = true; + } + + let block_size; + if ctx.algo == Algorithm::DES { + block_size = 8; + } else { + block_size = 16; + } + let last_block_remainder = input.len() % block_size; + + // Perform checks for correctness. + + if ctx.mode == Mode::GMAC { + panic!("GMAC works on header data only. Do not call this function for GMAC."); + } + if ctx.last_block_processed { + panic!("The last block has already been processed!"); + } + if input.len() != output.len() { + panic!("Output buffer length must match input length."); + } + if !last_block { + if last_block_remainder != 0 { + panic!("Input length must be a multiple of {block_size} bytes."); + } + } + if (ctx.mode == Mode::ECB) || (ctx.mode == Mode::CBC) { + if last_block_remainder != 0 { + panic!("Input must be a multiple of {block_size} bytes in ECB and CBC modes. Consider padding or ciphertext stealing."); + } + } + + // Load data into core, block by block. + let num_full_blocks = input.len() / block_size; + for block in 0..num_full_blocks { + let mut index = block * block_size; + let end_index = index + block_size; + // Write block in + while index < end_index { + let mut in_word: [u8; 4] = [0; 4]; + in_word.copy_from_slice(&input[index..index + 4]); + T::regs().din().write_value(u32::from_ne_bytes(in_word)); + index += 4; + } + let mut index = block * block_size; + let end_index = index + block_size; + // Block until there is output to read. + while !T::regs().sr().read().ofne() {} + // Read block out + while index < end_index { + let out_word: u32 = T::regs().dout().read(); + output[index..index + 4].copy_from_slice(u32::to_ne_bytes(out_word).as_slice()); + index += 4; + } + } + + // Handle the final block, which is incomplete. + if last_block_remainder > 0 { + if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt { + //Handle special GCM partial block process. + T::regs().cr().modify(|w| w.set_crypen(false)); + T::regs().cr().write(|w| w.set_algomode0(6)); + let iv1r = T::regs().csgcmccmr(7).read() - 1; + T::regs().init(1).ivrr().write_value(iv1r); + T::regs().cr().modify(|w| w.set_crypen(true)); + } + + let mut intermediate_data: [u8; 16] = [0; 16]; + let mut last_block: [u8; 16] = [0; 16]; + last_block.copy_from_slice(&input[input.len() - last_block_remainder..input.len()]); + let mut index = 0; + let end_index = block_size; + // Write block in + while index < end_index { + let mut in_word: [u8; 4] = [0; 4]; + in_word.copy_from_slice(&last_block[index..index + 4]); + T::regs().din().write_value(u32::from_ne_bytes(in_word)); + index += 4; + } + let mut index = 0; + let end_index = block_size; + // Block until there is output to read. + while !T::regs().sr().read().ofne() {} + // Read block out + while index < end_index { + let out_word: u32 = T::regs().dout().read(); + intermediate_data[index..index + 4].copy_from_slice(u32::to_ne_bytes(out_word).as_slice()); + index += 4; + } + + // Handle the last block depending on mode. + output[output.len() - last_block_remainder..output.len()] + .copy_from_slice(&intermediate_data[0..last_block_remainder]); + + if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt { + //Handle special GCM partial block process. + T::regs().cr().modify(|w| w.set_crypen(false)); + T::regs().cr().write(|w| w.set_algomode0(8)); + T::regs().init(1).ivrr().write_value(2); + T::regs().cr().modify(|w| w.set_crypen(true)); + T::regs().cr().modify(|w| w.set_gcm_ccmph(3)); + let mut index = 0; + let end_index = block_size; + while index < end_index { + let mut in_word: [u8; 4] = [0; 4]; + in_word.copy_from_slice(&intermediate_data[index..index + 4]); + T::regs().din().write_value(u32::from_ne_bytes(in_word)); + index += 4; + } + for _ in 0..4 { + T::regs().dout().read(); + } + } + } + } + + fn prepare_key(&self, ctx: &Context) { + if ctx.algo == Algorithm::AES { + if (ctx.mode == Mode::ECB) || (ctx.mode == Mode::CBC) { + T::regs().cr().modify(|w| w.set_algomode0(7)); + T::regs().cr().modify(|w| w.set_crypen(true)); + while T::regs().sr().read().busy() {} + } + } + } + + fn load_key(&self, key: &[u8]) { + // Load the key into the registers. + let mut keyidx = 0; + let mut keyword: [u8; 4] = [0; 4]; + let keylen = key.len() * 8; + if keylen > 192 { + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(0).klr().write_value(u32::from_be_bytes(keyword)); + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(0).krr().write_value(u32::from_be_bytes(keyword)); + } + if keylen > 128 { + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(1).klr().write_value(u32::from_be_bytes(keyword)); + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(1).krr().write_value(u32::from_be_bytes(keyword)); + } + if keylen > 64 { + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(2).klr().write_value(u32::from_be_bytes(keyword)); + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(2).krr().write_value(u32::from_be_bytes(keyword)); + } + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyidx += 4; + T::regs().key(3).klr().write_value(u32::from_be_bytes(keyword)); + keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + T::regs().key(3).krr().write_value(u32::from_be_bytes(keyword)); + } + + fn store_context(&self, ctx: &mut Context) { + // Wait for data block processing to finish. + while !T::regs().sr().read().ifem() {} + while T::regs().sr().read().ofne() {} + while T::regs().sr().read().busy() {} + + // Disable crypto processor. + T::regs().cr().modify(|w| w.set_crypen(false)); + + // Save the peripheral state. + ctx.cr = T::regs().cr().read().0; + ctx.iv[0] = T::regs().init(0).ivlr().read(); + ctx.iv[1] = T::regs().init(0).ivrr().read(); + ctx.iv[2] = T::regs().init(1).ivlr().read(); + ctx.iv[3] = T::regs().init(1).ivrr().read(); + for i in 0..8 { + ctx.csgcmccm[i] = T::regs().csgcmccmr(i).read(); + ctx.csgcm[i] = T::regs().csgcmr(i).read(); + } + } + + fn load_context(&self, ctx: &Context) { + // Reload state registers. + T::regs().cr().write(|w| w.0 = ctx.cr); + T::regs().init(0).ivlr().write_value(ctx.iv[0]); + T::regs().init(0).ivrr().write_value(ctx.iv[1]); + T::regs().init(1).ivlr().write_value(ctx.iv[2]); + T::regs().init(1).ivrr().write_value(ctx.iv[3]); + for i in 0..8 { + T::regs().csgcmccmr(i).write_value(ctx.csgcmccm[i]); + T::regs().csgcmr(i).write_value(ctx.csgcm[i]); + } + self.load_key(ctx.key); + + // Prepare key if applicable. + self.prepare_key(ctx); + + // Enable crypto processor. + T::regs().cr().modify(|w| w.set_crypen(true)); + } } pub(crate) mod sealed { From 72e4cacd914195352c9760856e8b8e40a7851752 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Wed, 14 Feb 2024 22:11:38 -0500 Subject: [PATCH 03/23] CBC and ECB AES modes functional. --- embassy-stm32/src/cryp/mod.rs | 39 +++++++++++++++++++---------------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index f266313c1..b368930da 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -3,7 +3,7 @@ use embassy_hal_internal::{into_ref, PeripheralRef}; use crate::pac; use crate::peripherals::CRYP; use crate::rcc::sealed::RccPeripheral; -use crate::{interrupt, Peripheral}; +use crate::{interrupt, peripherals, Peripheral}; const DES_BLOCK_SIZE: usize = 8; // 64 bits const AES_BLOCK_SIZE: usize = 16; // 128 bits @@ -49,7 +49,7 @@ pub struct Cryp<'d, T: Instance> { _peripheral: PeripheralRef<'d, T>, } -type InitVector<'v> = Option<&'v [u8]>; +pub type InitVector<'v> = Option<&'v [u8]>; impl<'d, T: Instance> Cryp<'d, T> { /// Create a new CRYP driver. @@ -88,9 +88,9 @@ impl<'d, T: Instance> Cryp<'d, T> { ivlen = 0; } match keylen { - 128 => T::regs().cr().write(|w| w.set_keysize(0)), - 192 => T::regs().cr().write(|w| w.set_keysize(1)), - 256 => T::regs().cr().write(|w| w.set_keysize(2)), + 128 => T::regs().cr().modify(|w| w.set_keysize(0)), + 192 => T::regs().cr().modify(|w| w.set_keysize(1)), + 256 => T::regs().cr().modify(|w| w.set_keysize(2)), _ => panic!("Key length must be 128, 192, or 256 bits."), } @@ -155,13 +155,13 @@ impl<'d, T: Instance> Cryp<'d, T> { T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word)); iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); iv_idx += 4; + T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word)); if iv.len() >= 12 { - T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word)); iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); iv_idx += 4; + T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word)); } if iv.len() >= 16 { - T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word)); iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word)); } @@ -206,9 +206,6 @@ impl<'d, T: Instance> Cryp<'d, T> { self.load_context(ctx); ctx.aad_complete = true; - if last_block { - ctx.last_block_processed = true; - } let block_size; if ctx.algo == Algorithm::DES { @@ -231,15 +228,19 @@ impl<'d, T: Instance> Cryp<'d, T> { } if !last_block { if last_block_remainder != 0 { - panic!("Input length must be a multiple of {block_size} bytes."); + panic!("Input length must be a multiple of {} bytes.", block_size); } } if (ctx.mode == Mode::ECB) || (ctx.mode == Mode::CBC) { if last_block_remainder != 0 { - panic!("Input must be a multiple of {block_size} bytes in ECB and CBC modes. Consider padding or ciphertext stealing."); + panic!("Input must be a multiple of {} bytes in ECB and CBC modes. Consider padding or ciphertext stealing.", block_size); } } + if last_block { + ctx.last_block_processed = true; + } + // Load data into core, block by block. let num_full_blocks = input.len() / block_size; for block in 0..num_full_blocks { @@ -277,7 +278,7 @@ impl<'d, T: Instance> Cryp<'d, T> { let mut intermediate_data: [u8; 16] = [0; 16]; let mut last_block: [u8; 16] = [0; 16]; - last_block.copy_from_slice(&input[input.len() - last_block_remainder..input.len()]); + last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]); let mut index = 0; let end_index = block_size; // Write block in @@ -299,7 +300,8 @@ impl<'d, T: Instance> Cryp<'d, T> { } // Handle the last block depending on mode. - output[output.len() - last_block_remainder..output.len()] + let output_len = output.len(); + output[output_len - last_block_remainder..output_len] .copy_from_slice(&intermediate_data[0..last_block_remainder]); if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt { @@ -325,7 +327,7 @@ impl<'d, T: Instance> Cryp<'d, T> { } fn prepare_key(&self, ctx: &Context) { - if ctx.algo == Algorithm::AES { + if ctx.algo == Algorithm::AES && ctx.dir == Direction::Decrypt { if (ctx.mode == Mode::ECB) || (ctx.mode == Mode::CBC) { T::regs().cr().modify(|w| w.set_algomode0(7)); T::regs().cr().modify(|w| w.set_crypen(true)); @@ -406,6 +408,7 @@ impl<'d, T: Instance> Cryp<'d, T> { // Prepare key if applicable. self.prepare_key(ctx); + T::regs().cr().write(|w| w.0 = ctx.cr); // Enable crypto processor. T::regs().cr().modify(|w| w.set_crypen(true)); @@ -420,14 +423,14 @@ pub(crate) mod sealed { } } -/// RNG instance trait. +/// CRYP instance trait. pub trait Instance: sealed::Instance + Peripheral<P = Self> + crate::rcc::RccPeripheral + 'static + Send { - /// Interrupt for this RNG instance. + /// Interrupt for this CRYP instance. type Interrupt: interrupt::typelevel::Interrupt; } foreach_interrupt!( - ($inst:ident, rng, CRYP, GLOBAL, $irq:ident) => { + ($inst:ident, cryp, CRYP, GLOBAL, $irq:ident) => { impl Instance for peripherals::$inst { type Interrupt = crate::interrupt::typelevel::$irq; } From 565acdf24301a72fe084aa18b7c55a6110609374 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Wed, 14 Feb 2024 22:38:05 -0500 Subject: [PATCH 04/23] CTR mode functional. --- embassy-stm32/src/cryp/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index b368930da..4db95d55c 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -100,8 +100,8 @@ impl<'d, T: Instance> Cryp<'d, T> { panic!("IV length must be 128 bits for CBC."); } else if (mode == Mode::CCM) && (ivlen != 128) { panic!("IV length must be 128 bits for CCM."); - } else if (mode == Mode::CTR) && (ivlen != 64) { - panic!("IV length must be 64 bits for CTR."); + } else if (mode == Mode::CTR) && (ivlen != 128) { + panic!("IV length must be 128 bits for CTR."); } else if (mode == Mode::GCM) && (ivlen != 96) { panic!("IV length must be 96 bits for GCM."); } else if (mode == Mode::GMAC) && (ivlen != 96) { From c2b03eff62245bd325a781e1e260c150e0a5040c Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Fri, 16 Feb 2024 13:15:14 -0500 Subject: [PATCH 05/23] GCM mode functional. --- embassy-stm32/src/cryp/mod.rs | 244 +++++++++++++++++++++++++++------- 1 file changed, 198 insertions(+), 46 deletions(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index 4db95d55c..447bcf2f8 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -1,3 +1,4 @@ +//! Crypto Accelerator (CRYP) use embassy_hal_internal::{into_ref, PeripheralRef}; use crate::pac; @@ -8,6 +9,8 @@ use crate::{interrupt, peripherals, Peripheral}; const DES_BLOCK_SIZE: usize = 8; // 64 bits const AES_BLOCK_SIZE: usize = 16; // 128 bits +/// Holds the state information for a cipher operation. +/// Allows suspending/resuming of cipher operations. pub struct Context<'c> { algo: Algorithm, mode: Mode, @@ -19,28 +22,44 @@ pub struct Context<'c> { key: &'c [u8], csgcmccm: [u32; 8], csgcm: [u32; 8], + header_len: u64, + payload_len: u64, } +/// Selects the encryption algorithm. #[derive(PartialEq, Clone, Copy)] pub enum Algorithm { + /// Advanced Encryption Standard AES, + /// Data Encryption Standard DES, + /// Triple-DES TDES, } +/// Selects the cipher mode. #[derive(PartialEq, Clone, Copy)] pub enum Mode { + /// Electronic Codebook ECB, + /// Cipher Block Chaining CBC, + /// Counter Mode CTR, + /// Galois Counter Mode GCM, + /// Galois Message Authentication Code GMAC, + /// Counter with CBC-MAC CCM, } +/// Selects whether the crypto processor operates in encryption or decryption mode. #[derive(PartialEq, Clone, Copy)] pub enum Direction { + /// Encryption mode Encrypt, + /// Decryption mode Decrypt, } @@ -49,6 +68,8 @@ pub struct Cryp<'d, T: Instance> { _peripheral: PeripheralRef<'d, T>, } +/// Initialization vector of arbitrary length. +/// When an initialization vector is not needed, `None` may be supplied. pub type InitVector<'v> = Option<&'v [u8]>; impl<'d, T: Instance> Cryp<'d, T> { @@ -62,6 +83,8 @@ impl<'d, T: Instance> Cryp<'d, T> { /// Start a new cipher operation. /// Key size must be 128, 192, or 256 bits. + /// Initialization vector must only be supplied if necessary. + /// Panics if there is any mismatch in parameters, such as an incorrect IV length or invalid mode. pub fn start<'c>(&self, key: &'c [u8], iv: InitVector, algo: Algorithm, mode: Mode, dir: Direction) -> Context<'c> { let mut ctx = Context { algo, @@ -74,6 +97,8 @@ impl<'d, T: Instance> Cryp<'d, T> { csgcmccm: [0; 8], csgcm: [0; 8], aad_complete: false, + header_len: 0, + payload_len: 0, }; T::regs().cr().modify(|w| w.set_crypen(false)); @@ -102,8 +127,6 @@ impl<'d, T: Instance> Cryp<'d, T> { panic!("IV length must be 128 bits for CCM."); } else if (mode == Mode::CTR) && (ivlen != 128) { panic!("IV length must be 128 bits for CTR."); - } else if (mode == Mode::GCM) && (ivlen != 96) { - panic!("IV length must be 96 bits for GCM."); } else if (mode == Mode::GMAC) && (ivlen != 96) { panic!("IV length must be 96 bits for GMAC."); } @@ -121,17 +144,27 @@ impl<'d, T: Instance> Cryp<'d, T> { Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(4)), Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(5)), Mode::CTR => T::regs().cr().modify(|w| w.set_algomode0(6)), - Mode::GCM => T::regs().cr().modify(|w| w.set_algomode0(8)), - Mode::GMAC => T::regs().cr().modify(|w| w.set_algomode0(8)), - Mode::CCM => T::regs().cr().modify(|w| w.set_algomode0(9)), + Mode::GCM => T::regs().cr().modify(|w| w.set_algomode0(0)), + Mode::GMAC => T::regs().cr().modify(|w| w.set_algomode0(0)), + Mode::CCM => T::regs().cr().modify(|w| w.set_algomode0(1)), + } + match mode { + Mode::ECB => T::regs().cr().modify(|w| w.set_algomode3(false)), + Mode::CBC => T::regs().cr().modify(|w| w.set_algomode3(false)), + Mode::CTR => T::regs().cr().modify(|w| w.set_algomode3(false)), + Mode::GCM => T::regs().cr().modify(|w| w.set_algomode3(true)), + Mode::GMAC => T::regs().cr().modify(|w| w.set_algomode3(true)), + Mode::CCM => T::regs().cr().modify(|w| w.set_algomode3(true)), } } else if algo == Algorithm::DES { + T::regs().cr().modify(|w| w.set_algomode3(false)); match mode { Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(2)), Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(3)), _ => panic!("Only ECB and CBC modes are valid for DES."), } } else if algo == Algorithm::TDES { + T::regs().cr().modify(|w| w.set_algomode3(false)); match mode { Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(0)), Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(1)), @@ -148,23 +181,26 @@ impl<'d, T: Instance> Cryp<'d, T> { // Load the IV into the registers. if let Some(iv) = iv { + let mut full_iv: [u8; 16] = [0; 16]; + full_iv[0..iv.len()].copy_from_slice(iv); + + if (mode == Mode::GCM) || (mode == Mode::GMAC) { + full_iv[15] = 2; + } + let mut iv_idx = 0; let mut iv_word: [u8; 4] = [0; 4]; - iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); + iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); iv_idx += 4; T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word)); - iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); + iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); iv_idx += 4; T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word)); - if iv.len() >= 12 { - iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); - iv_idx += 4; - T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word)); - } - if iv.len() >= 16 { - iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); - T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word)); - } + iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); + iv_idx += 4; + T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word)); + iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); + T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word)); } // Flush in/out FIFOs @@ -182,41 +218,116 @@ impl<'d, T: Instance> Cryp<'d, T> { ctx } - // pub fn aad_blocking(&self, ctx: &mut Context, aad: &[u8]) { - // if ctx.aad_complete { - // panic!("Cannot update AAD after calling 'update'!") - // } - // if (ctx.mode != Mode::GCM) && (ctx.mode != Mode::GMAC) && (ctx.mode != Mode::CCM) { - // panic!("Associated data only valid for GCM, GMAC, and CCM modes.") - // } - - // let mut header_size = 0; - // let mut header: [u8;] - - // if aad.len() < 65280 { - - // } - - // // GCM header phase - // T::regs().cr().modify(|w| w.set_gcm_ccmph(1)); - // T::regs().cr().modify(|w| w.set_crypen(true)); - // } - - pub fn update_blocking(&self, ctx: &mut Context, input: &[u8], output: &mut [u8], last_block: bool) { + /// Controls the header phase of cipher processing. + /// This function is only valid for GCM, CCM, and GMAC modes. + /// It only needs to be called if using one of these modes and there is associated data. + /// All AAD must be supplied to this function prior to starting the payload phase with `payload_blocking`. + /// The AAD must be supplied in multiples of the block size (128 bits), except when supplying the last block. + /// When supplying the last block of AAD, `last_aad_block` must be `true`. + pub fn aad_blocking(&self, ctx: &mut Context, aad: &[u8], last_aad_block: bool) { self.load_context(ctx); - ctx.aad_complete = true; - let block_size; if ctx.algo == Algorithm::DES { - block_size = 8; + block_size = DES_BLOCK_SIZE; } else { - block_size = 16; + block_size = AES_BLOCK_SIZE; + } + let last_block_remainder = aad.len() % block_size; + + // Perform checks for correctness. + if ctx.aad_complete { + panic!("Cannot update AAD after calling 'update'!") + } + if (ctx.mode != Mode::GCM) && (ctx.mode != Mode::GMAC) && (ctx.mode != Mode::CCM) { + panic!("Associated data only valid for GCM, GMAC, and CCM modes.") + } + if !last_aad_block { + if last_block_remainder != 0 { + panic!("Input length must be a multiple of {} bytes.", block_size); + } + } + + ctx.header_len += aad.len() as u64; + + // GCM header phase + T::regs().cr().modify(|w| w.set_crypen(false)); + T::regs().cr().modify(|w| w.set_gcm_ccmph(1)); + T::regs().cr().modify(|w| w.set_crypen(true)); + + // Load data into core, block by block. + let num_full_blocks = aad.len() / block_size; + for block in 0..num_full_blocks { + let mut index = block * block_size; + let end_index = index + block_size; + // Write block in + while index < end_index { + let mut in_word: [u8; 4] = [0; 4]; + in_word.copy_from_slice(&aad[index..index + 4]); + T::regs().din().write_value(u32::from_ne_bytes(in_word)); + index += 4; + } + // Block until input FIFO is empty. + while !T::regs().sr().read().ifem() {} + } + + // Handle the final block, which is incomplete. + if last_block_remainder > 0 { + let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; + last_block[..last_block_remainder].copy_from_slice(&aad[aad.len() - last_block_remainder..aad.len()]); + let mut index = 0; + let end_index = block_size; + // Write block in + while index < end_index { + let mut in_word: [u8; 4] = [0; 4]; + in_word.copy_from_slice(&last_block[index..index + 4]); + T::regs().din().write_value(u32::from_ne_bytes(in_word)); + index += 4; + } + // Block until input FIFO is empty + while !T::regs().sr().read().ifem() {} + } + + if last_aad_block { + // Switch to payload phase. + ctx.aad_complete = true; + T::regs().cr().modify(|w| w.set_crypen(false)); + T::regs().cr().modify(|w| w.set_gcm_ccmph(2)); + T::regs().cr().modify(|w| w.fflush()); + } + + self.store_context(ctx); + } + + /// Performs encryption/decryption on the provided context. + /// The context determines algorithm, mode, and state of the crypto accelerator. + /// When the last piece of data is supplied, `last_block` should be `true`. + /// This function panics under various mismatches of parameters. + /// Input and output buffer lengths must match. + /// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes. + /// Padding or ciphertext stealing must be managed by the application for these modes. + /// Data must also be a multiple of block size unless `last_block` is `true`. + pub fn payload_blocking(&self, ctx: &mut Context, input: &[u8], output: &mut [u8], last_block: bool) { + self.load_context(ctx); + + let block_size; + if ctx.algo == Algorithm::DES { + block_size = DES_BLOCK_SIZE; + } else { + block_size = AES_BLOCK_SIZE; } let last_block_remainder = input.len() % block_size; // Perform checks for correctness. - + if !ctx.aad_complete && ctx.header_len > 0 { + panic!("Additional associated data must be processed first!"); + } else if !ctx.aad_complete { + ctx.aad_complete = true; + T::regs().cr().modify(|w| w.set_crypen(false)); + T::regs().cr().modify(|w| w.set_gcm_ccmph(2)); + T::regs().cr().modify(|w| w.fflush()); + T::regs().cr().modify(|w| w.set_crypen(true)); + } if ctx.mode == Mode::GMAC { panic!("GMAC works on header data only. Do not call this function for GMAC."); } @@ -270,14 +381,15 @@ impl<'d, T: Instance> Cryp<'d, T> { if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt { //Handle special GCM partial block process. T::regs().cr().modify(|w| w.set_crypen(false)); - T::regs().cr().write(|w| w.set_algomode0(6)); + T::regs().cr().modify(|w| w.set_algomode3(false)); + T::regs().cr().modify(|w| w.set_algomode0(6)); let iv1r = T::regs().csgcmccmr(7).read() - 1; T::regs().init(1).ivrr().write_value(iv1r); T::regs().cr().modify(|w| w.set_crypen(true)); } - let mut intermediate_data: [u8; 16] = [0; 16]; - let mut last_block: [u8; 16] = [0; 16]; + let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; + let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]); let mut index = 0; let end_index = block_size; @@ -307,7 +419,8 @@ impl<'d, T: Instance> Cryp<'d, T> { if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt { //Handle special GCM partial block process. T::regs().cr().modify(|w| w.set_crypen(false)); - T::regs().cr().write(|w| w.set_algomode0(8)); + T::regs().cr().write(|w| w.set_algomode3(true)); + T::regs().cr().write(|w| w.set_algomode0(0)); T::regs().init(1).ivrr().write_value(2); T::regs().cr().modify(|w| w.set_crypen(true)); T::regs().cr().modify(|w| w.set_gcm_ccmph(3)); @@ -324,12 +437,51 @@ impl<'d, T: Instance> Cryp<'d, T> { } } } + + ctx.payload_len += input.len() as u64; + } + + /// This function only needs to be called for GCM, CCM, and GMAC modes to + /// generate an authentication tag. Calling this function on any other mode + /// does nothing except consumes the context. A buffer for the authentication + /// tag must be supplied. + pub fn finish_blocking(&self, mut ctx: Context, tag: &mut [u8; 16]) { + // Just consume the context if called for any other mode. + if (ctx.mode != Mode::GCM) || (ctx.mode != Mode::CCM) || (ctx.mode != Mode::GMAC) { + return; + } + + self.load_context(&mut ctx); + + T::regs().cr().modify(|w| w.set_crypen(false)); + T::regs().cr().modify(|w| w.set_gcm_ccmph(3)); + T::regs().cr().modify(|w| w.set_crypen(true)); + + let headerlen1: u32 = (ctx.header_len >> 32) as u32; + let headerlen2: u32 = ctx.header_len as u32; + let payloadlen1: u32 = (ctx.payload_len >> 32) as u32; + let payloadlen2: u32 = ctx.payload_len as u32; + + T::regs().din().write_value(headerlen1.swap_bytes()); + T::regs().din().write_value(headerlen2.swap_bytes()); + T::regs().din().write_value(payloadlen1.swap_bytes()); + T::regs().din().write_value(payloadlen2.swap_bytes()); + + while !T::regs().sr().read().ofne() {} + + tag[0..4].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice()); + tag[4..8].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice()); + tag[8..12].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice()); + tag[12..16].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice()); + + T::regs().cr().modify(|w| w.set_crypen(false)); } fn prepare_key(&self, ctx: &Context) { if ctx.algo == Algorithm::AES && ctx.dir == Direction::Decrypt { if (ctx.mode == Mode::ECB) || (ctx.mode == Mode::CBC) { T::regs().cr().modify(|w| w.set_algomode0(7)); + T::regs().cr().modify(|w| w.set_algomode3(false)); T::regs().cr().modify(|w| w.set_crypen(true)); while T::regs().sr().read().busy() {} } From fec26e896052cc0eac6bfa6415a4ebad5352d1d9 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Sun, 18 Feb 2024 21:40:18 -0500 Subject: [PATCH 06/23] Refactored ciphers into traits. --- embassy-stm32/src/cryp/mod.rs | 651 ++++++++++++++++++++++------------ 1 file changed, 431 insertions(+), 220 deletions(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index 447bcf2f8..29c1db12e 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -1,4 +1,6 @@ //! Crypto Accelerator (CRYP) +use core::marker::PhantomData; + use embassy_hal_internal::{into_ref, PeripheralRef}; use crate::pac; @@ -9,51 +11,375 @@ use crate::{interrupt, peripherals, Peripheral}; const DES_BLOCK_SIZE: usize = 8; // 64 bits const AES_BLOCK_SIZE: usize = 16; // 128 bits +/// This trait encapsulates all cipher-specific behavior/ +pub trait Cipher<'c> { + /// Processing block size. Determined by the processor and the algorithm. + const BLOCK_SIZE: usize; + + /// Indicates whether the cipher requires the application to provide padding. + /// If `true`, no partial blocks will be accepted (a panic will occur). + const REQUIRES_PADDING: bool = false; + + /// Returns the symmetric key. + fn key(&self) -> &'c [u8]; + + /// Returns the initialization vector. + fn iv(&self) -> &[u8]; + + /// Sets the processor algorithm mode according to the associated cipher. + fn set_algomode(&self, p: &pac::cryp::Cryp); + + /// Performs any key preparation within the processor, if necessary. + fn prepare_key(&self, _p: &pac::cryp::Cryp) {} + + /// Performs any cipher-specific initialization. + fn init_phase(&self, _p: &pac::cryp::Cryp) {} + + /// Called prior to processing the last data block for cipher-specific operations. + fn pre_final_block(&self, _p: &pac::cryp::Cryp) {} + + /// Called after processing the last data block for cipher-specific operations. + fn post_final_block(&self, _p: &pac::cryp::Cryp, _dir: Direction, _int_data: &[u8; AES_BLOCK_SIZE]) {} +} + +/// This trait enables restriction of ciphers to specific key sizes. +pub trait CipherSized {} + +/// This trait enables restriction of a header phase to authenticated ciphers only. +pub trait CipherAuthenticated {} + +/// AES-ECB Cipher Mode +pub struct AesEcb<'c, const KEY_SIZE: usize> { + iv: &'c [u8; 0], + key: &'c [u8; KEY_SIZE], +} + +impl<'c, const KEY_SIZE: usize> AesEcb<'c, KEY_SIZE> { + /// Constructs a new AES-ECB cipher for a cryptographic operation. + pub fn new(key: &'c [u8; KEY_SIZE]) -> Self { + return Self { key: key, iv: &[0; 0] }; + } +} + +impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesEcb<'c, KEY_SIZE> { + const BLOCK_SIZE: usize = AES_BLOCK_SIZE; + const REQUIRES_PADDING: bool = true; + + fn key(&self) -> &'c [u8] { + self.key + } + + fn iv(&self) -> &'c [u8] { + self.iv + } + + fn prepare_key(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_algomode0(7)); + p.cr().modify(|w| w.set_algomode3(false)); + p.cr().modify(|w| w.set_crypen(true)); + while p.sr().read().busy() {} + } + + fn set_algomode(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_algomode0(4)); + p.cr().modify(|w| w.set_algomode3(false)); + } +} + +impl<'c> CipherSized for AesEcb<'c, { 128 / 8 }> {} +impl<'c> CipherSized for AesEcb<'c, { 192 / 8 }> {} +impl<'c> CipherSized for AesEcb<'c, { 256 / 8 }> {} + +/// AES-CBC Cipher Mode +pub struct AesCbc<'c, const KEY_SIZE: usize> { + iv: &'c [u8; 16], + key: &'c [u8; KEY_SIZE], +} + +impl<'c, const KEY_SIZE: usize> AesCbc<'c, KEY_SIZE> { + /// Constructs a new AES-CBC cipher for a cryptographic operation. + pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 16]) -> Self { + return Self { key: key, iv: iv }; + } +} + +impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCbc<'c, KEY_SIZE> { + const BLOCK_SIZE: usize = AES_BLOCK_SIZE; + const REQUIRES_PADDING: bool = true; + + fn key(&self) -> &'c [u8] { + self.key + } + + fn iv(&self) -> &'c [u8] { + self.iv + } + + fn prepare_key(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_algomode0(7)); + p.cr().modify(|w| w.set_algomode3(false)); + p.cr().modify(|w| w.set_crypen(true)); + while p.sr().read().busy() {} + } + + fn set_algomode(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_algomode0(5)); + p.cr().modify(|w| w.set_algomode3(false)); + } +} + +impl<'c> CipherSized for AesCbc<'c, { 128 / 8 }> {} +impl<'c> CipherSized for AesCbc<'c, { 192 / 8 }> {} +impl<'c> CipherSized for AesCbc<'c, { 256 / 8 }> {} + +/// AES-CTR Cipher Mode +pub struct AesCtr<'c, const KEY_SIZE: usize> { + iv: &'c [u8; 16], + key: &'c [u8; KEY_SIZE], +} + +impl<'c, const KEY_SIZE: usize> AesCtr<'c, KEY_SIZE> { + /// Constructs a new AES-CTR cipher for a cryptographic operation. + pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 16]) -> Self { + return Self { key: key, iv: iv }; + } +} + +impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCtr<'c, KEY_SIZE> { + const BLOCK_SIZE: usize = AES_BLOCK_SIZE; + + fn key(&self) -> &'c [u8] { + self.key + } + + fn iv(&self) -> &'c [u8] { + self.iv + } + + fn set_algomode(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_algomode0(6)); + p.cr().modify(|w| w.set_algomode3(false)); + } +} + +impl<'c> CipherSized for AesCtr<'c, { 128 / 8 }> {} +impl<'c> CipherSized for AesCtr<'c, { 192 / 8 }> {} +impl<'c> CipherSized for AesCtr<'c, { 256 / 8 }> {} + +///AES-GCM Cipher Mode +pub struct AesGcm<'c, const KEY_SIZE: usize> { + iv: [u8; 16], + key: &'c [u8; KEY_SIZE], +} + +impl<'c, const KEY_SIZE: usize> AesGcm<'c, KEY_SIZE> { + /// Constucts a new AES-GCM cipher for a cryptographic operation. + pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self { + let mut new_gcm = Self { key: key, iv: [0; 16] }; + new_gcm.iv[..12].copy_from_slice(iv); + new_gcm.iv[15] = 2; + new_gcm + } +} + +impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> { + const BLOCK_SIZE: usize = AES_BLOCK_SIZE; + + fn key(&self) -> &'c [u8] { + self.key + } + + fn iv(&self) -> &[u8] { + self.iv.as_slice() + } + + fn set_algomode(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_algomode0(0)); + p.cr().modify(|w| w.set_algomode3(true)); + } + + fn init_phase(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_gcm_ccmph(0)); + p.cr().modify(|w| w.set_crypen(true)); + while p.cr().read().crypen() {} + } + + fn pre_final_block(&self, p: &pac::cryp::Cryp) { + //Handle special GCM partial block process. + p.cr().modify(|w| w.set_crypen(false)); + p.cr().modify(|w| w.set_algomode3(false)); + p.cr().modify(|w| w.set_algomode0(6)); + let iv1r = p.csgcmccmr(7).read() - 1; + p.init(1).ivrr().write_value(iv1r); + p.cr().modify(|w| w.set_crypen(true)); + } + + fn post_final_block(&self, p: &pac::cryp::Cryp, dir: Direction, int_data: &[u8; AES_BLOCK_SIZE]) { + if dir == Direction::Encrypt { + //Handle special GCM partial block process. + p.cr().modify(|w| w.set_crypen(false)); + p.cr().write(|w| w.set_algomode3(true)); + p.cr().write(|w| w.set_algomode0(0)); + p.init(1).ivrr().write_value(2); + p.cr().modify(|w| w.set_crypen(true)); + p.cr().modify(|w| w.set_gcm_ccmph(3)); + let mut index = 0; + let end_index = Self::BLOCK_SIZE; + while index < end_index { + let mut in_word: [u8; 4] = [0; 4]; + in_word.copy_from_slice(&int_data[index..index + 4]); + p.din().write_value(u32::from_ne_bytes(in_word)); + index += 4; + } + for _ in 0..4 { + p.dout().read(); + } + } + } +} + +impl<'c> CipherSized for AesGcm<'c, { 128 / 8 }> {} +impl<'c> CipherSized for AesGcm<'c, { 192 / 8 }> {} +impl<'c> CipherSized for AesGcm<'c, { 256 / 8 }> {} +impl<'c, const KEY_SIZE: usize> CipherAuthenticated for AesGcm<'c, KEY_SIZE> {} + +/// AES-GMAC Cipher Mode +pub struct AesGmac<'c, const KEY_SIZE: usize> { + iv: [u8; 16], + key: &'c [u8; KEY_SIZE], +} + +impl<'c, const KEY_SIZE: usize> AesGmac<'c, KEY_SIZE> { + /// Constructs a new AES-GMAC cipher for a cryptographic operation. + pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self { + let mut new_gmac = Self { key: key, iv: [0; 16] }; + new_gmac.iv[..12].copy_from_slice(iv); + new_gmac.iv[15] = 2; + new_gmac + } +} + +impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> { + const BLOCK_SIZE: usize = AES_BLOCK_SIZE; + + fn key(&self) -> &'c [u8] { + self.key + } + + fn iv(&self) -> &[u8] { + self.iv.as_slice() + } + + fn set_algomode(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_algomode0(0)); + p.cr().modify(|w| w.set_algomode3(true)); + } + + fn init_phase(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_gcm_ccmph(0)); + p.cr().modify(|w| w.set_crypen(true)); + while p.cr().read().crypen() {} + } + + fn pre_final_block(&self, p: &pac::cryp::Cryp) { + //Handle special GCM partial block process. + p.cr().modify(|w| w.set_crypen(false)); + p.cr().modify(|w| w.set_algomode3(false)); + p.cr().modify(|w| w.set_algomode0(6)); + let iv1r = p.csgcmccmr(7).read() - 1; + p.init(1).ivrr().write_value(iv1r); + p.cr().modify(|w| w.set_crypen(true)); + } + + fn post_final_block(&self, p: &pac::cryp::Cryp, dir: Direction, int_data: &[u8; AES_BLOCK_SIZE]) { + if dir == Direction::Encrypt { + //Handle special GCM partial block process. + p.cr().modify(|w| w.set_crypen(false)); + p.cr().write(|w| w.set_algomode3(true)); + p.cr().write(|w| w.set_algomode0(0)); + p.init(1).ivrr().write_value(2); + p.cr().modify(|w| w.set_crypen(true)); + p.cr().modify(|w| w.set_gcm_ccmph(3)); + let mut index = 0; + let end_index = Self::BLOCK_SIZE; + while index < end_index { + let mut in_word: [u8; 4] = [0; 4]; + in_word.copy_from_slice(&int_data[index..index + 4]); + p.din().write_value(u32::from_ne_bytes(in_word)); + index += 4; + } + for _ in 0..4 { + p.dout().read(); + } + } + } +} + +impl<'c> CipherSized for AesGmac<'c, { 128 / 8 }> {} +impl<'c> CipherSized for AesGmac<'c, { 192 / 8 }> {} +impl<'c> CipherSized for AesGmac<'c, { 256 / 8 }> {} +impl<'c, const KEY_SIZE: usize> CipherAuthenticated for AesGmac<'c, KEY_SIZE> {} + +// struct AesCcm<'c, const KEY_SIZE: usize> { +// iv: &'c [u8], +// key: &'c [u8; KEY_SIZE], +// aad_len: usize, +// payload_len: usize, +// } + +// impl<'c, const KEY_SIZE: usize> AesCcm<'c, KEY_SIZE> { +// pub fn new(&self, key: &[u8; KEY_SIZE], iv: &[u8], aad_len: usize, payload_len: usize) { +// if iv.len() > 13 { +// panic!("CCM IV length must be 13 bytes or less."); +// } +// self.key = key; +// self.iv = iv; +// self.aad_len = aad_len; +// self.payload_len = payload_len; +// } +// } + +// impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCcm<'c, KEY_SIZE> { +// const BLOCK_SIZE: usize = AES_BLOCK_SIZE; + +// fn key(&self) -> &'c [u8] { +// self.key +// } + +// fn iv(&self) -> &'c [u8] { +// self.iv +// } + +// fn set_algomode(&self, p: &pac::cryp::Cryp) { +// p.cr().modify(|w| w.set_algomode0(1)); +// p.cr().modify(|w| w.set_algomode3(true)); +// } + +// fn init_phase(&self, p: &pac::cryp::Cryp) { +// todo!(); +// } +// } + +// impl<'c> CipherSized for AesCcm<'c, { 128 / 8 }> {} +// impl<'c> CipherSized for AesCcm<'c, { 192 / 8 }> {} +// impl<'c> CipherSized for AesCcm<'c, { 256 / 8 }> {} + /// Holds the state information for a cipher operation. /// Allows suspending/resuming of cipher operations. -pub struct Context<'c> { - algo: Algorithm, - mode: Mode, +pub struct Context<'c, C: Cipher<'c> + CipherSized> { + phantom_data: PhantomData<&'c C>, + cipher: &'c C, dir: Direction, last_block_processed: bool, aad_complete: bool, cr: u32, iv: [u32; 4], - key: &'c [u8], csgcmccm: [u32; 8], csgcm: [u32; 8], header_len: u64, payload_len: u64, } -/// Selects the encryption algorithm. -#[derive(PartialEq, Clone, Copy)] -pub enum Algorithm { - /// Advanced Encryption Standard - AES, - /// Data Encryption Standard - DES, - /// Triple-DES - TDES, -} - -/// Selects the cipher mode. -#[derive(PartialEq, Clone, Copy)] -pub enum Mode { - /// Electronic Codebook - ECB, - /// Cipher Block Chaining - CBC, - /// Counter Mode - CTR, - /// Galois Counter Mode - GCM, - /// Galois Message Authentication Code - GMAC, - /// Counter with CBC-MAC - CCM, -} - /// Selects whether the crypto processor operates in encryption or decryption mode. #[derive(PartialEq, Clone, Copy)] pub enum Direction { @@ -68,10 +394,6 @@ pub struct Cryp<'d, T: Instance> { _peripheral: PeripheralRef<'d, T>, } -/// Initialization vector of arbitrary length. -/// When an initialization vector is not needed, `None` may be supplied. -pub type InitVector<'v> = Option<&'v [u8]>; - impl<'d, T: Instance> Cryp<'d, T> { /// Create a new CRYP driver. pub fn new(peri: impl Peripheral<P = T> + 'd) -> Self { @@ -85,51 +407,31 @@ impl<'d, T: Instance> Cryp<'d, T> { /// Key size must be 128, 192, or 256 bits. /// Initialization vector must only be supplied if necessary. /// Panics if there is any mismatch in parameters, such as an incorrect IV length or invalid mode. - pub fn start<'c>(&self, key: &'c [u8], iv: InitVector, algo: Algorithm, mode: Mode, dir: Direction) -> Context<'c> { - let mut ctx = Context { - algo, - mode, + pub fn start<'c, C: Cipher<'c> + CipherSized>(&self, cipher: &'c C, dir: Direction) -> Context<'c, C> { + let mut ctx: Context<'c, C> = Context { dir, last_block_processed: false, cr: 0, iv: [0; 4], - key, csgcmccm: [0; 8], csgcm: [0; 8], aad_complete: false, header_len: 0, payload_len: 0, + cipher: cipher, + phantom_data: PhantomData, }; T::regs().cr().modify(|w| w.set_crypen(false)); - // Checks for correctness - if algo == Algorithm::AES { - let keylen = key.len() * 8; - let ivlen; - if let Some(iv) = iv { - ivlen = iv.len() * 8; - } else { - ivlen = 0; - } - match keylen { - 128 => T::regs().cr().modify(|w| w.set_keysize(0)), - 192 => T::regs().cr().modify(|w| w.set_keysize(1)), - 256 => T::regs().cr().modify(|w| w.set_keysize(2)), - _ => panic!("Key length must be 128, 192, or 256 bits."), - } + let key = ctx.cipher.key(); - if (mode == Mode::GCM) && (ivlen != 96) { - panic!("IV length must be 96 bits for GCM."); - } else if (mode == Mode::CBC) && (ivlen != 128) { - panic!("IV length must be 128 bits for CBC."); - } else if (mode == Mode::CCM) && (ivlen != 128) { - panic!("IV length must be 128 bits for CCM."); - } else if (mode == Mode::CTR) && (ivlen != 128) { - panic!("IV length must be 128 bits for CTR."); - } else if (mode == Mode::GMAC) && (ivlen != 96) { - panic!("IV length must be 96 bits for GMAC."); - } + if key.len() == (128 / 8) { + T::regs().cr().modify(|w| w.set_keysize(0)); + } else if key.len() == (192 / 8) { + T::regs().cr().modify(|w| w.set_keysize(1)); + } else if key.len() == (256 / 8) { + T::regs().cr().modify(|w| w.set_keysize(2)); } self.load_key(key); @@ -137,40 +439,9 @@ impl<'d, T: Instance> Cryp<'d, T> { // Set data type to 8-bit. This will match software implementations. T::regs().cr().modify(|w| w.set_datatype(2)); - self.prepare_key(&ctx); + ctx.cipher.prepare_key(&T::regs()); - if algo == Algorithm::AES { - match mode { - Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(4)), - Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(5)), - Mode::CTR => T::regs().cr().modify(|w| w.set_algomode0(6)), - Mode::GCM => T::regs().cr().modify(|w| w.set_algomode0(0)), - Mode::GMAC => T::regs().cr().modify(|w| w.set_algomode0(0)), - Mode::CCM => T::regs().cr().modify(|w| w.set_algomode0(1)), - } - match mode { - Mode::ECB => T::regs().cr().modify(|w| w.set_algomode3(false)), - Mode::CBC => T::regs().cr().modify(|w| w.set_algomode3(false)), - Mode::CTR => T::regs().cr().modify(|w| w.set_algomode3(false)), - Mode::GCM => T::regs().cr().modify(|w| w.set_algomode3(true)), - Mode::GMAC => T::regs().cr().modify(|w| w.set_algomode3(true)), - Mode::CCM => T::regs().cr().modify(|w| w.set_algomode3(true)), - } - } else if algo == Algorithm::DES { - T::regs().cr().modify(|w| w.set_algomode3(false)); - match mode { - Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(2)), - Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(3)), - _ => panic!("Only ECB and CBC modes are valid for DES."), - } - } else if algo == Algorithm::TDES { - T::regs().cr().modify(|w| w.set_algomode3(false)); - match mode { - Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(0)), - Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(1)), - _ => panic!("Only ECB and CBC modes are valid for TDES."), - } - } + ctx.cipher.set_algomode(&T::regs()); // Set encrypt/decrypt if dir == Direction::Encrypt { @@ -180,38 +451,27 @@ impl<'d, T: Instance> Cryp<'d, T> { } // Load the IV into the registers. - if let Some(iv) = iv { - let mut full_iv: [u8; 16] = [0; 16]; - full_iv[0..iv.len()].copy_from_slice(iv); - - if (mode == Mode::GCM) || (mode == Mode::GMAC) { - full_iv[15] = 2; - } - - let mut iv_idx = 0; - let mut iv_word: [u8; 4] = [0; 4]; - iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); - iv_idx += 4; - T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word)); - iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); - iv_idx += 4; - T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word)); - iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); - iv_idx += 4; - T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word)); - iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); - T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word)); - } + let iv = ctx.cipher.iv(); + let mut full_iv: [u8; 16] = [0; 16]; + full_iv[0..iv.len()].copy_from_slice(iv); + let mut iv_idx = 0; + let mut iv_word: [u8; 4] = [0; 4]; + iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); + iv_idx += 4; + T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word)); + iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); + iv_idx += 4; + T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word)); + iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); + iv_idx += 4; + T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word)); + iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); + T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word)); // Flush in/out FIFOs T::regs().cr().modify(|w| w.fflush()); - if mode == Mode::GCM { - // GCM init phase - T::regs().cr().modify(|w| w.set_gcm_ccmph(0)); - T::regs().cr().modify(|w| w.set_crypen(true)); - while T::regs().cr().read().crypen() {} - } + ctx.cipher.init_phase(&T::regs()); self.store_context(&mut ctx); @@ -224,42 +484,38 @@ impl<'d, T: Instance> Cryp<'d, T> { /// All AAD must be supplied to this function prior to starting the payload phase with `payload_blocking`. /// The AAD must be supplied in multiples of the block size (128 bits), except when supplying the last block. /// When supplying the last block of AAD, `last_aad_block` must be `true`. - pub fn aad_blocking(&self, ctx: &mut Context, aad: &[u8], last_aad_block: bool) { + pub fn aad_blocking<'c, C: Cipher<'c> + CipherSized + CipherAuthenticated>( + &self, + ctx: &mut Context<'c, C>, + aad: &[u8], + last_aad_block: bool, + ) { self.load_context(ctx); - let block_size; - if ctx.algo == Algorithm::DES { - block_size = DES_BLOCK_SIZE; - } else { - block_size = AES_BLOCK_SIZE; - } - let last_block_remainder = aad.len() % block_size; + let last_block_remainder = aad.len() % C::BLOCK_SIZE; // Perform checks for correctness. if ctx.aad_complete { panic!("Cannot update AAD after calling 'update'!") } - if (ctx.mode != Mode::GCM) && (ctx.mode != Mode::GMAC) && (ctx.mode != Mode::CCM) { - panic!("Associated data only valid for GCM, GMAC, and CCM modes.") - } if !last_aad_block { if last_block_remainder != 0 { - panic!("Input length must be a multiple of {} bytes.", block_size); + panic!("Input length must be a multiple of {} bytes.", C::BLOCK_SIZE); } } ctx.header_len += aad.len() as u64; - // GCM header phase + // Header phase T::regs().cr().modify(|w| w.set_crypen(false)); T::regs().cr().modify(|w| w.set_gcm_ccmph(1)); T::regs().cr().modify(|w| w.set_crypen(true)); // Load data into core, block by block. - let num_full_blocks = aad.len() / block_size; + let num_full_blocks = aad.len() / C::BLOCK_SIZE; for block in 0..num_full_blocks { - let mut index = block * block_size; - let end_index = index + block_size; + let mut index = block * C::BLOCK_SIZE; + let end_index = index + C::BLOCK_SIZE; // Write block in while index < end_index { let mut in_word: [u8; 4] = [0; 4]; @@ -276,7 +532,7 @@ impl<'d, T: Instance> Cryp<'d, T> { let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; last_block[..last_block_remainder].copy_from_slice(&aad[aad.len() - last_block_remainder..aad.len()]); let mut index = 0; - let end_index = block_size; + let end_index = C::BLOCK_SIZE; // Write block in while index < end_index { let mut in_word: [u8; 4] = [0; 4]; @@ -307,16 +563,16 @@ impl<'d, T: Instance> Cryp<'d, T> { /// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes. /// Padding or ciphertext stealing must be managed by the application for these modes. /// Data must also be a multiple of block size unless `last_block` is `true`. - pub fn payload_blocking(&self, ctx: &mut Context, input: &[u8], output: &mut [u8], last_block: bool) { + pub fn payload_blocking<'c, C: Cipher<'c> + CipherSized>( + &self, + ctx: &mut Context<'c, C>, + input: &[u8], + output: &mut [u8], + last_block: bool, + ) { self.load_context(ctx); - let block_size; - if ctx.algo == Algorithm::DES { - block_size = DES_BLOCK_SIZE; - } else { - block_size = AES_BLOCK_SIZE; - } - let last_block_remainder = input.len() % block_size; + let last_block_remainder = input.len() % C::BLOCK_SIZE; // Perform checks for correctness. if !ctx.aad_complete && ctx.header_len > 0 { @@ -328,9 +584,6 @@ impl<'d, T: Instance> Cryp<'d, T> { T::regs().cr().modify(|w| w.fflush()); T::regs().cr().modify(|w| w.set_crypen(true)); } - if ctx.mode == Mode::GMAC { - panic!("GMAC works on header data only. Do not call this function for GMAC."); - } if ctx.last_block_processed { panic!("The last block has already been processed!"); } @@ -339,24 +592,23 @@ impl<'d, T: Instance> Cryp<'d, T> { } if !last_block { if last_block_remainder != 0 { - panic!("Input length must be a multiple of {} bytes.", block_size); + panic!("Input length must be a multiple of {} bytes.", C::BLOCK_SIZE); } } - if (ctx.mode == Mode::ECB) || (ctx.mode == Mode::CBC) { + if C::REQUIRES_PADDING { if last_block_remainder != 0 { - panic!("Input must be a multiple of {} bytes in ECB and CBC modes. Consider padding or ciphertext stealing.", block_size); + panic!("Input must be a multiple of {} bytes in ECB and CBC modes. Consider padding or ciphertext stealing.", C::BLOCK_SIZE); } } - if last_block { ctx.last_block_processed = true; } // Load data into core, block by block. - let num_full_blocks = input.len() / block_size; + let num_full_blocks = input.len() / C::BLOCK_SIZE; for block in 0..num_full_blocks { - let mut index = block * block_size; - let end_index = index + block_size; + let mut index = block * C::BLOCK_SIZE; + let end_index = index + C::BLOCK_SIZE; // Write block in while index < end_index { let mut in_word: [u8; 4] = [0; 4]; @@ -364,8 +616,8 @@ impl<'d, T: Instance> Cryp<'d, T> { T::regs().din().write_value(u32::from_ne_bytes(in_word)); index += 4; } - let mut index = block * block_size; - let end_index = index + block_size; + let mut index = block * C::BLOCK_SIZE; + let end_index = index + C::BLOCK_SIZE; // Block until there is output to read. while !T::regs().sr().read().ofne() {} // Read block out @@ -378,21 +630,13 @@ impl<'d, T: Instance> Cryp<'d, T> { // Handle the final block, which is incomplete. if last_block_remainder > 0 { - if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt { - //Handle special GCM partial block process. - T::regs().cr().modify(|w| w.set_crypen(false)); - T::regs().cr().modify(|w| w.set_algomode3(false)); - T::regs().cr().modify(|w| w.set_algomode0(6)); - let iv1r = T::regs().csgcmccmr(7).read() - 1; - T::regs().init(1).ivrr().write_value(iv1r); - T::regs().cr().modify(|w| w.set_crypen(true)); - } + ctx.cipher.pre_final_block(&T::regs()); let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]); let mut index = 0; - let end_index = block_size; + let end_index = C::BLOCK_SIZE; // Write block in while index < end_index { let mut in_word: [u8; 4] = [0; 4]; @@ -401,7 +645,7 @@ impl<'d, T: Instance> Cryp<'d, T> { index += 4; } let mut index = 0; - let end_index = block_size; + let end_index = C::BLOCK_SIZE; // Block until there is output to read. while !T::regs().sr().read().ofne() {} // Read block out @@ -416,41 +660,19 @@ impl<'d, T: Instance> Cryp<'d, T> { output[output_len - last_block_remainder..output_len] .copy_from_slice(&intermediate_data[0..last_block_remainder]); - if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt { - //Handle special GCM partial block process. - T::regs().cr().modify(|w| w.set_crypen(false)); - T::regs().cr().write(|w| w.set_algomode3(true)); - T::regs().cr().write(|w| w.set_algomode0(0)); - T::regs().init(1).ivrr().write_value(2); - T::regs().cr().modify(|w| w.set_crypen(true)); - T::regs().cr().modify(|w| w.set_gcm_ccmph(3)); - let mut index = 0; - let end_index = block_size; - while index < end_index { - let mut in_word: [u8; 4] = [0; 4]; - in_word.copy_from_slice(&intermediate_data[index..index + 4]); - T::regs().din().write_value(u32::from_ne_bytes(in_word)); - index += 4; - } - for _ in 0..4 { - T::regs().dout().read(); - } - } + ctx.cipher.post_final_block(&T::regs(), ctx.dir, &intermediate_data); } ctx.payload_len += input.len() as u64; } /// This function only needs to be called for GCM, CCM, and GMAC modes to - /// generate an authentication tag. Calling this function on any other mode - /// does nothing except consumes the context. A buffer for the authentication - /// tag must be supplied. - pub fn finish_blocking(&self, mut ctx: Context, tag: &mut [u8; 16]) { - // Just consume the context if called for any other mode. - if (ctx.mode != Mode::GCM) || (ctx.mode != Mode::CCM) || (ctx.mode != Mode::GMAC) { - return; - } - + /// generate an authentication tag. + pub fn finish_blocking<'c, C: Cipher<'c> + CipherSized + CipherAuthenticated>( + &self, + mut ctx: Context<'c, C>, + tag: &mut [u8; 16], + ) { self.load_context(&mut ctx); T::regs().cr().modify(|w| w.set_crypen(false)); @@ -477,17 +699,6 @@ impl<'d, T: Instance> Cryp<'d, T> { T::regs().cr().modify(|w| w.set_crypen(false)); } - fn prepare_key(&self, ctx: &Context) { - if ctx.algo == Algorithm::AES && ctx.dir == Direction::Decrypt { - if (ctx.mode == Mode::ECB) || (ctx.mode == Mode::CBC) { - T::regs().cr().modify(|w| w.set_algomode0(7)); - T::regs().cr().modify(|w| w.set_algomode3(false)); - T::regs().cr().modify(|w| w.set_crypen(true)); - while T::regs().sr().read().busy() {} - } - } - } - fn load_key(&self, key: &[u8]) { // Load the key into the registers. let mut keyidx = 0; @@ -524,7 +735,7 @@ impl<'d, T: Instance> Cryp<'d, T> { T::regs().key(3).krr().write_value(u32::from_be_bytes(keyword)); } - fn store_context(&self, ctx: &mut Context) { + fn store_context<'c, C: Cipher<'c> + CipherSized>(&self, ctx: &mut Context<'c, C>) { // Wait for data block processing to finish. while !T::regs().sr().read().ifem() {} while T::regs().sr().read().ofne() {} @@ -545,7 +756,7 @@ impl<'d, T: Instance> Cryp<'d, T> { } } - fn load_context(&self, ctx: &Context) { + fn load_context<'c, C: Cipher<'c> + CipherSized>(&self, ctx: &Context<'c, C>) { // Reload state registers. T::regs().cr().write(|w| w.0 = ctx.cr); T::regs().init(0).ivlr().write_value(ctx.iv[0]); @@ -556,10 +767,10 @@ impl<'d, T: Instance> Cryp<'d, T> { T::regs().csgcmccmr(i).write_value(ctx.csgcmccm[i]); T::regs().csgcmr(i).write_value(ctx.csgcm[i]); } - self.load_key(ctx.key); + self.load_key(ctx.cipher.key()); // Prepare key if applicable. - self.prepare_key(ctx); + ctx.cipher.prepare_key(&T::regs()); T::regs().cr().write(|w| w.0 = ctx.cr); // Enable crypto processor. From 690b2118c6fdad88bf1e595b6a0c0afdb0583d28 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Tue, 20 Feb 2024 11:54:39 -0500 Subject: [PATCH 07/23] CCM mode functional. --- embassy-stm32/src/cryp/mod.rs | 372 ++++++++++++++++++++++++++-------- 1 file changed, 293 insertions(+), 79 deletions(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index 29c1db12e..fe248def1 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -1,6 +1,6 @@ //! Crypto Accelerator (CRYP) +use core::cmp::min; use core::marker::PhantomData; - use embassy_hal_internal::{into_ref, PeripheralRef}; use crate::pac; @@ -21,7 +21,7 @@ pub trait Cipher<'c> { const REQUIRES_PADDING: bool = false; /// Returns the symmetric key. - fn key(&self) -> &'c [u8]; + fn key(&self) -> &[u8]; /// Returns the initialization vector. fn iv(&self) -> &[u8]; @@ -36,10 +36,25 @@ pub trait Cipher<'c> { fn init_phase(&self, _p: &pac::cryp::Cryp) {} /// Called prior to processing the last data block for cipher-specific operations. - fn pre_final_block(&self, _p: &pac::cryp::Cryp) {} + fn pre_final_block(&self, _p: &pac::cryp::Cryp, _dir: Direction) -> [u32; 4] { + return [0; 4]; + } /// Called after processing the last data block for cipher-specific operations. - fn post_final_block(&self, _p: &pac::cryp::Cryp, _dir: Direction, _int_data: &[u8; AES_BLOCK_SIZE]) {} + fn post_final_block( + &self, + _p: &pac::cryp::Cryp, + _dir: Direction, + _int_data: &[u8; AES_BLOCK_SIZE], + _temp1: [u32; 4], + _padding_mask: [u8; 16], + ) { + } + + /// Called prior to processing the first associated data block for cipher-specific operations. + fn get_header_block(&self) -> &[u8] { + return [0; 0].as_slice(); + } } /// This trait enables restriction of ciphers to specific key sizes. @@ -204,17 +219,27 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> { while p.cr().read().crypen() {} } - fn pre_final_block(&self, p: &pac::cryp::Cryp) { + fn pre_final_block(&self, p: &pac::cryp::Cryp, dir: Direction) -> [u32; 4] { //Handle special GCM partial block process. - p.cr().modify(|w| w.set_crypen(false)); - p.cr().modify(|w| w.set_algomode3(false)); - p.cr().modify(|w| w.set_algomode0(6)); - let iv1r = p.csgcmccmr(7).read() - 1; - p.init(1).ivrr().write_value(iv1r); - p.cr().modify(|w| w.set_crypen(true)); + if dir == Direction::Encrypt { + p.cr().modify(|w| w.set_crypen(false)); + p.cr().modify(|w| w.set_algomode3(false)); + p.cr().modify(|w| w.set_algomode0(6)); + let iv1r = p.csgcmccmr(7).read() - 1; + p.init(1).ivrr().write_value(iv1r); + p.cr().modify(|w| w.set_crypen(true)); + } + [0; 4] } - fn post_final_block(&self, p: &pac::cryp::Cryp, dir: Direction, int_data: &[u8; AES_BLOCK_SIZE]) { + fn post_final_block( + &self, + p: &pac::cryp::Cryp, + dir: Direction, + int_data: &[u8; AES_BLOCK_SIZE], + _temp1: [u32; 4], + _padding_mask: [u8; 16], + ) { if dir == Direction::Encrypt { //Handle special GCM partial block process. p.cr().modify(|w| w.set_crypen(false)); @@ -281,17 +306,27 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> { while p.cr().read().crypen() {} } - fn pre_final_block(&self, p: &pac::cryp::Cryp) { + fn pre_final_block(&self, p: &pac::cryp::Cryp, dir: Direction) -> [u32; 4] { //Handle special GCM partial block process. - p.cr().modify(|w| w.set_crypen(false)); - p.cr().modify(|w| w.set_algomode3(false)); - p.cr().modify(|w| w.set_algomode0(6)); - let iv1r = p.csgcmccmr(7).read() - 1; - p.init(1).ivrr().write_value(iv1r); - p.cr().modify(|w| w.set_crypen(true)); + if dir == Direction::Encrypt { + p.cr().modify(|w| w.set_crypen(false)); + p.cr().modify(|w| w.set_algomode3(false)); + p.cr().modify(|w| w.set_algomode0(6)); + let iv1r = p.csgcmccmr(7).read() - 1; + p.init(1).ivrr().write_value(iv1r); + p.cr().modify(|w| w.set_crypen(true)); + } + [0; 4] } - fn post_final_block(&self, p: &pac::cryp::Cryp, dir: Direction, int_data: &[u8; AES_BLOCK_SIZE]) { + fn post_final_block( + &self, + p: &pac::cryp::Cryp, + dir: Direction, + int_data: &[u8; AES_BLOCK_SIZE], + _temp1: [u32; 4], + _padding_mask: [u8; 16], + ) { if dir == Direction::Encrypt { //Handle special GCM partial block process. p.cr().modify(|w| w.set_crypen(false)); @@ -320,49 +355,180 @@ impl<'c> CipherSized for AesGmac<'c, { 192 / 8 }> {} impl<'c> CipherSized for AesGmac<'c, { 256 / 8 }> {} impl<'c, const KEY_SIZE: usize> CipherAuthenticated for AesGmac<'c, KEY_SIZE> {} -// struct AesCcm<'c, const KEY_SIZE: usize> { -// iv: &'c [u8], -// key: &'c [u8; KEY_SIZE], -// aad_len: usize, -// payload_len: usize, -// } +pub struct AesCcm<'c, const KEY_SIZE: usize> { + key: &'c [u8; KEY_SIZE], + aad_header: [u8; 6], + aad_header_len: usize, + block0: [u8; 16], + ctr: [u8; 16], +} -// impl<'c, const KEY_SIZE: usize> AesCcm<'c, KEY_SIZE> { -// pub fn new(&self, key: &[u8; KEY_SIZE], iv: &[u8], aad_len: usize, payload_len: usize) { -// if iv.len() > 13 { -// panic!("CCM IV length must be 13 bytes or less."); -// } -// self.key = key; -// self.iv = iv; -// self.aad_len = aad_len; -// self.payload_len = payload_len; -// } -// } +impl<'c, const KEY_SIZE: usize> AesCcm<'c, KEY_SIZE> { + pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8], aad_len: usize, payload_len: usize, tag_len: u8) -> Self { + if (iv.len()) > 13 || (iv.len() < 7) { + panic!("CCM IV length must be 7-13 bytes."); + } + if (tag_len < 4) || (tag_len > 16) { + panic!("Tag length must be between 4 and 16 bytes."); + } + if tag_len % 2 > 0 { + panic!("Tag length must be a multiple of 2 bytes."); + } -// impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCcm<'c, KEY_SIZE> { -// const BLOCK_SIZE: usize = AES_BLOCK_SIZE; + let mut aad_header: [u8; 6] = [0; 6]; + let mut aad_header_len = 0; + let mut block0: [u8; 16] = [0; 16]; + if aad_len != 0 { + if aad_len < 65280 { + aad_header[0] = (aad_len >> 8) as u8 & 0xFF; + aad_header[1] = aad_len as u8 & 0xFF; + aad_header_len = 2; + } else { + aad_header[0] = 0xFF; + aad_header[1] = 0xFE; + let aad_len_bytes: [u8; 4] = aad_len.to_be_bytes(); + aad_header[2] = aad_len_bytes[0]; + aad_header[3] = aad_len_bytes[1]; + aad_header[4] = aad_len_bytes[2]; + aad_header[5] = aad_len_bytes[3]; + aad_header_len = 6; + } + } + let total_aad_len = aad_header_len + aad_len; + let mut aad_padding_len = 16 - (total_aad_len % 16); + if aad_padding_len == 16 { + aad_padding_len = 0; + } + aad_header_len += aad_padding_len; + let total_aad_len_padded = aad_header_len + aad_len; + if total_aad_len_padded > 0 { + block0[0] = 0x40; + } + block0[0] |= (((tag_len - 2) >> 1) & 0x07) << 3; + block0[0] |= ((15 - (iv.len() as u8)) - 1) & 0x07; + block0[1..1 + iv.len()].copy_from_slice(iv); + let payload_len_bytes: [u8; 4] = payload_len.to_be_bytes(); + if iv.len() <= 11 { + block0[12] = payload_len_bytes[0]; + } else if payload_len_bytes[0] > 0 { + panic!("Message is too large for given IV size."); + } + if iv.len() <= 12 { + block0[13] = payload_len_bytes[1]; + } else if payload_len_bytes[1] > 0 { + panic!("Message is too large for given IV size."); + } + block0[14] = payload_len_bytes[2]; + block0[15] = payload_len_bytes[3]; + let mut ctr: [u8; 16] = [0; 16]; + ctr[0] = block0[0] & 0x07; + ctr[1..1 + iv.len()].copy_from_slice(&block0[1..1 + iv.len()]); + ctr[15] = 0x01; -// fn key(&self) -> &'c [u8] { -// self.key -// } + return Self { + key: key, + aad_header: aad_header, + aad_header_len: aad_header_len, + block0: block0, + ctr: ctr, + }; + } +} -// fn iv(&self) -> &'c [u8] { -// self.iv -// } +impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCcm<'c, KEY_SIZE> { + const BLOCK_SIZE: usize = AES_BLOCK_SIZE; -// fn set_algomode(&self, p: &pac::cryp::Cryp) { -// p.cr().modify(|w| w.set_algomode0(1)); -// p.cr().modify(|w| w.set_algomode3(true)); -// } + fn key(&self) -> &'c [u8] { + self.key + } -// fn init_phase(&self, p: &pac::cryp::Cryp) { -// todo!(); -// } -// } + fn iv(&self) -> &[u8] { + self.ctr.as_slice() + } -// impl<'c> CipherSized for AesCcm<'c, { 128 / 8 }> {} -// impl<'c> CipherSized for AesCcm<'c, { 192 / 8 }> {} -// impl<'c> CipherSized for AesCcm<'c, { 256 / 8 }> {} + fn set_algomode(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_algomode0(1)); + p.cr().modify(|w| w.set_algomode3(true)); + } + + fn init_phase(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_gcm_ccmph(0)); + + let mut index = 0; + let end_index = index + Self::BLOCK_SIZE; + // Write block in + while index < end_index { + let mut in_word: [u8; 4] = [0; 4]; + in_word.copy_from_slice(&self.block0[index..index + 4]); + p.din().write_value(u32::from_ne_bytes(in_word)); + index += 4; + } + p.cr().modify(|w| w.set_crypen(true)); + while p.cr().read().crypen() {} + } + + fn get_header_block(&self) -> &[u8] { + return &self.aad_header[0..self.aad_header_len]; + } + + fn pre_final_block(&self, p: &pac::cryp::Cryp, dir: Direction) -> [u32; 4] { + //Handle special CCM partial block process. + let mut temp1 = [0; 4]; + if dir == Direction::Decrypt { + p.cr().modify(|w| w.set_crypen(false)); + let iv1temp = p.init(1).ivrr().read(); + temp1[0] = p.csgcmccmr(0).read(); + temp1[1] = p.csgcmccmr(1).read(); + temp1[2] = p.csgcmccmr(2).read(); + temp1[3] = p.csgcmccmr(3).read(); + p.init(1).ivrr().write_value(iv1temp); + p.cr().modify(|w| w.set_algomode3(false)); + p.cr().modify(|w| w.set_algomode0(6)); + p.cr().modify(|w| w.set_crypen(true)); + } + return temp1; + } + + fn post_final_block( + &self, + p: &pac::cryp::Cryp, + dir: Direction, + int_data: &[u8; AES_BLOCK_SIZE], + temp1: [u32; 4], + padding_mask: [u8; 16], + ) { + if dir == Direction::Decrypt { + //Handle special CCM partial block process. + let mut intdata_o: [u32; 4] = [0; 4]; + for i in 0..intdata_o.len() { + intdata_o[i] = p.dout().read(); + } + let mut temp2 = [0; 4]; + temp2[0] = p.csgcmccmr(0).read(); + temp2[1] = p.csgcmccmr(1).read(); + temp2[2] = p.csgcmccmr(2).read(); + temp2[3] = p.csgcmccmr(3).read(); + p.cr().write(|w| w.set_algomode3(true)); + p.cr().write(|w| w.set_algomode0(1)); + p.cr().modify(|w| w.set_gcm_ccmph(3)); + // Header phase + p.cr().modify(|w| w.set_gcm_ccmph(1)); + let mut in_data: [u32; 4] = [0; 4]; + for i in 0..in_data.len() { + let mut mask_bytes: [u8; 4] = [0; 4]; + mask_bytes.copy_from_slice(&padding_mask[(i * 4)..(i * 4) + 4]); + let mask_word = u32::from_le_bytes(mask_bytes); + in_data[i] = intdata_o[i] & mask_word; + in_data[i] = in_data[i] ^ temp1[i] ^ temp2[i]; + } + } + } +} + +impl<'c> CipherSized for AesCcm<'c, { 128 / 8 }> {} +impl<'c> CipherSized for AesCcm<'c, { 192 / 8 }> {} +impl<'c> CipherSized for AesCcm<'c, { 256 / 8 }> {} +impl<'c, const KEY_SIZE: usize> CipherAuthenticated for AesCcm<'c, KEY_SIZE> {} /// Holds the state information for a cipher operation. /// Allows suspending/resuming of cipher operations. @@ -371,6 +537,7 @@ pub struct Context<'c, C: Cipher<'c> + CipherSized> { cipher: &'c C, dir: Direction, last_block_processed: bool, + header_processed: bool, aad_complete: bool, cr: u32, iv: [u32; 4], @@ -378,6 +545,8 @@ pub struct Context<'c, C: Cipher<'c> + CipherSized> { csgcm: [u32; 8], header_len: u64, payload_len: u64, + aad_buffer: [u8; 16], + aad_buffer_len: usize, } /// Selects whether the crypto processor operates in encryption or decryption mode. @@ -420,6 +589,9 @@ impl<'d, T: Instance> Cryp<'d, T> { payload_len: 0, cipher: cipher, phantom_data: PhantomData, + header_processed: false, + aad_buffer: [0; 16], + aad_buffer_len: 0, }; T::regs().cr().modify(|w| w.set_crypen(false)); @@ -492,16 +664,9 @@ impl<'d, T: Instance> Cryp<'d, T> { ) { self.load_context(ctx); - let last_block_remainder = aad.len() % C::BLOCK_SIZE; - // Perform checks for correctness. if ctx.aad_complete { - panic!("Cannot update AAD after calling 'update'!") - } - if !last_aad_block { - if last_block_remainder != 0 { - panic!("Input length must be a multiple of {} bytes.", C::BLOCK_SIZE); - } + panic!("Cannot update AAD after starting payload!") } ctx.header_len += aad.len() as u64; @@ -511,11 +676,49 @@ impl<'d, T: Instance> Cryp<'d, T> { T::regs().cr().modify(|w| w.set_gcm_ccmph(1)); T::regs().cr().modify(|w| w.set_crypen(true)); - // Load data into core, block by block. - let num_full_blocks = aad.len() / C::BLOCK_SIZE; - for block in 0..num_full_blocks { - let mut index = block * C::BLOCK_SIZE; - let end_index = index + C::BLOCK_SIZE; + // First write the header B1 block if not yet written. + if !ctx.header_processed { + ctx.header_processed = true; + let header = ctx.cipher.get_header_block(); + ctx.aad_buffer[0..header.len()].copy_from_slice(header); + ctx.aad_buffer_len += header.len(); + } + + // Fill the header block to make a full block. + let len_to_copy = min(aad.len(), C::BLOCK_SIZE - ctx.aad_buffer_len); + ctx.aad_buffer[ctx.aad_buffer_len..ctx.aad_buffer_len + len_to_copy].copy_from_slice(&aad[..len_to_copy]); + ctx.aad_buffer_len += len_to_copy; + ctx.aad_buffer[ctx.aad_buffer_len..].fill(0); + let mut aad_len_remaining = aad.len() - len_to_copy; + + if ctx.aad_buffer_len < C::BLOCK_SIZE { + // The buffer isn't full and this is the last buffer, so process it as is (already padded). + if last_aad_block { + let mut index = 0; + let end_index = C::BLOCK_SIZE; + // Write block in + while index < end_index { + let mut in_word: [u8; 4] = [0; 4]; + in_word.copy_from_slice(&aad[index..index + 4]); + T::regs().din().write_value(u32::from_ne_bytes(in_word)); + index += 4; + } + // Block until input FIFO is empty. + while !T::regs().sr().read().ifem() {} + + // Switch to payload phase. + ctx.aad_complete = true; + T::regs().cr().modify(|w| w.set_crypen(false)); + T::regs().cr().modify(|w| w.set_gcm_ccmph(2)); + T::regs().cr().modify(|w| w.fflush()); + } else { + // Just return because we don't yet have a full block to process. + return; + } + } else { + // Load the full block from the buffer. + let mut index = 0; + let end_index = C::BLOCK_SIZE; // Write block in while index < end_index { let mut in_word: [u8; 4] = [0; 4]; @@ -527,20 +730,26 @@ impl<'d, T: Instance> Cryp<'d, T> { while !T::regs().sr().read().ifem() {} } - // Handle the final block, which is incomplete. - if last_block_remainder > 0 { - let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; - last_block[..last_block_remainder].copy_from_slice(&aad[aad.len() - last_block_remainder..aad.len()]); - let mut index = 0; - let end_index = C::BLOCK_SIZE; + // Handle a partial block that is passed in. + ctx.aad_buffer_len = 0; + let leftovers = aad_len_remaining % C::BLOCK_SIZE; + ctx.aad_buffer[..leftovers].copy_from_slice(&aad[aad.len() - leftovers..aad.len()]); + aad_len_remaining -= leftovers; + assert_eq!(aad_len_remaining % C::BLOCK_SIZE, 0); + + // Load full data blocks into core. + let num_full_blocks = aad_len_remaining / C::BLOCK_SIZE; + for _ in 0..num_full_blocks { + let mut index = len_to_copy; + let end_index = len_to_copy + C::BLOCK_SIZE; // Write block in while index < end_index { let mut in_word: [u8; 4] = [0; 4]; - in_word.copy_from_slice(&last_block[index..index + 4]); + in_word.copy_from_slice(&aad[index..index + 4]); T::regs().din().write_value(u32::from_ne_bytes(in_word)); index += 4; } - // Block until input FIFO is empty + // Block until input FIFO is empty. while !T::regs().sr().read().ifem() {} } @@ -630,7 +839,7 @@ impl<'d, T: Instance> Cryp<'d, T> { // Handle the final block, which is incomplete. if last_block_remainder > 0 { - ctx.cipher.pre_final_block(&T::regs()); + let temp1 = ctx.cipher.pre_final_block(&T::regs(), ctx.dir); let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; @@ -660,10 +869,15 @@ impl<'d, T: Instance> Cryp<'d, T> { output[output_len - last_block_remainder..output_len] .copy_from_slice(&intermediate_data[0..last_block_remainder]); - ctx.cipher.post_final_block(&T::regs(), ctx.dir, &intermediate_data); + let mut mask: [u8; 16] = [0; 16]; + mask[..last_block_remainder].fill(0xFF); + ctx.cipher + .post_final_block(&T::regs(), ctx.dir, &intermediate_data, temp1, mask); } ctx.payload_len += input.len() as u64; + + self.store_context(ctx); } /// This function only needs to be called for GCM, CCM, and GMAC modes to From 1e21b758f795b5cc8a2331aacbc2a9a39bb7a7fb Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Tue, 20 Feb 2024 14:27:37 -0500 Subject: [PATCH 08/23] Corrected GCM tag generation. --- embassy-stm32/src/cryp/mod.rs | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index fe248def1..81446e39e 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -45,7 +45,7 @@ pub trait Cipher<'c> { &self, _p: &pac::cryp::Cryp, _dir: Direction, - _int_data: &[u8; AES_BLOCK_SIZE], + _int_data: &mut [u8; AES_BLOCK_SIZE], _temp1: [u32; 4], _padding_mask: [u8; 16], ) { @@ -236,16 +236,18 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> { &self, p: &pac::cryp::Cryp, dir: Direction, - int_data: &[u8; AES_BLOCK_SIZE], + int_data: &mut [u8; AES_BLOCK_SIZE], _temp1: [u32; 4], - _padding_mask: [u8; 16], + padding_mask: [u8; AES_BLOCK_SIZE], ) { if dir == Direction::Encrypt { //Handle special GCM partial block process. p.cr().modify(|w| w.set_crypen(false)); - p.cr().write(|w| w.set_algomode3(true)); - p.cr().write(|w| w.set_algomode0(0)); - p.init(1).ivrr().write_value(2); + p.cr().modify(|w| w.set_algomode3(true)); + p.cr().modify(|w| w.set_algomode0(0)); + for i in 0..AES_BLOCK_SIZE { + int_data[i] = int_data[i] & padding_mask[i]; + } p.cr().modify(|w| w.set_crypen(true)); p.cr().modify(|w| w.set_gcm_ccmph(3)); let mut index = 0; @@ -323,7 +325,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> { &self, p: &pac::cryp::Cryp, dir: Direction, - int_data: &[u8; AES_BLOCK_SIZE], + int_data: &mut [u8; AES_BLOCK_SIZE], _temp1: [u32; 4], _padding_mask: [u8; 16], ) { @@ -493,7 +495,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCcm<'c, KEY_SIZE> { &self, p: &pac::cryp::Cryp, dir: Direction, - int_data: &[u8; AES_BLOCK_SIZE], + int_data: &mut [u8; AES_BLOCK_SIZE], temp1: [u32; 4], padding_mask: [u8; 16], ) { @@ -872,7 +874,7 @@ impl<'d, T: Instance> Cryp<'d, T> { let mut mask: [u8; 16] = [0; 16]; mask[..last_block_remainder].fill(0xFF); ctx.cipher - .post_final_block(&T::regs(), ctx.dir, &intermediate_data, temp1, mask); + .post_final_block(&T::regs(), ctx.dir, &mut intermediate_data, temp1, mask); } ctx.payload_len += input.len() as u64; From f64a62149e423f6fdb643f7343d971eedc4a3a12 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Tue, 20 Feb 2024 15:26:31 -0500 Subject: [PATCH 09/23] Corrected CCM partial block ops. --- embassy-stm32/src/cryp/mod.rs | 46 ++++++++++++++++++----------------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index 81446e39e..634c85883 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -327,14 +327,16 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> { dir: Direction, int_data: &mut [u8; AES_BLOCK_SIZE], _temp1: [u32; 4], - _padding_mask: [u8; 16], + padding_mask: [u8; AES_BLOCK_SIZE], ) { if dir == Direction::Encrypt { //Handle special GCM partial block process. p.cr().modify(|w| w.set_crypen(false)); - p.cr().write(|w| w.set_algomode3(true)); - p.cr().write(|w| w.set_algomode0(0)); - p.init(1).ivrr().write_value(2); + p.cr().modify(|w| w.set_algomode3(true)); + p.cr().modify(|w| w.set_algomode0(0)); + for i in 0..AES_BLOCK_SIZE { + int_data[i] = int_data[i] & padding_mask[i]; + } p.cr().modify(|w| w.set_crypen(true)); p.cr().modify(|w| w.set_gcm_ccmph(3)); let mut index = 0; @@ -479,10 +481,10 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCcm<'c, KEY_SIZE> { if dir == Direction::Decrypt { p.cr().modify(|w| w.set_crypen(false)); let iv1temp = p.init(1).ivrr().read(); - temp1[0] = p.csgcmccmr(0).read(); - temp1[1] = p.csgcmccmr(1).read(); - temp1[2] = p.csgcmccmr(2).read(); - temp1[3] = p.csgcmccmr(3).read(); + temp1[0] = p.csgcmccmr(0).read().swap_bytes(); + temp1[1] = p.csgcmccmr(1).read().swap_bytes(); + temp1[2] = p.csgcmccmr(2).read().swap_bytes(); + temp1[3] = p.csgcmccmr(3).read().swap_bytes(); p.init(1).ivrr().write_value(iv1temp); p.cr().modify(|w| w.set_algomode3(false)); p.cr().modify(|w| w.set_algomode0(6)); @@ -501,27 +503,27 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCcm<'c, KEY_SIZE> { ) { if dir == Direction::Decrypt { //Handle special CCM partial block process. - let mut intdata_o: [u32; 4] = [0; 4]; - for i in 0..intdata_o.len() { - intdata_o[i] = p.dout().read(); - } let mut temp2 = [0; 4]; - temp2[0] = p.csgcmccmr(0).read(); - temp2[1] = p.csgcmccmr(1).read(); - temp2[2] = p.csgcmccmr(2).read(); - temp2[3] = p.csgcmccmr(3).read(); - p.cr().write(|w| w.set_algomode3(true)); - p.cr().write(|w| w.set_algomode0(1)); + temp2[0] = p.csgcmccmr(0).read().swap_bytes(); + temp2[1] = p.csgcmccmr(1).read().swap_bytes(); + temp2[2] = p.csgcmccmr(2).read().swap_bytes(); + temp2[3] = p.csgcmccmr(3).read().swap_bytes(); + p.cr().modify(|w| w.set_algomode3(true)); + p.cr().modify(|w| w.set_algomode0(1)); p.cr().modify(|w| w.set_gcm_ccmph(3)); // Header phase p.cr().modify(|w| w.set_gcm_ccmph(1)); + for i in 0..AES_BLOCK_SIZE { + int_data[i] = int_data[i] & padding_mask[i]; + } let mut in_data: [u32; 4] = [0; 4]; for i in 0..in_data.len() { - let mut mask_bytes: [u8; 4] = [0; 4]; - mask_bytes.copy_from_slice(&padding_mask[(i * 4)..(i * 4) + 4]); - let mask_word = u32::from_le_bytes(mask_bytes); - in_data[i] = intdata_o[i] & mask_word; + let mut int_bytes: [u8; 4] = [0; 4]; + int_bytes.copy_from_slice(&int_data[(i * 4)..(i * 4) + 4]); + let int_word = u32::from_le_bytes(int_bytes); + in_data[i] = int_word; in_data[i] = in_data[i] ^ temp1[i] ^ temp2[i]; + p.din().write_value(in_data[i]); } } } From 14c2c28e068d6e506c372611800e6dded8d8f440 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Tue, 20 Feb 2024 18:05:35 -0500 Subject: [PATCH 10/23] Corrected additional associated data operation. --- embassy-stm32/src/cryp/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index 634c85883..d53252a6a 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -703,7 +703,7 @@ impl<'d, T: Instance> Cryp<'d, T> { // Write block in while index < end_index { let mut in_word: [u8; 4] = [0; 4]; - in_word.copy_from_slice(&aad[index..index + 4]); + in_word.copy_from_slice(&ctx.aad_buffer[index..index + 4]); T::regs().din().write_value(u32::from_ne_bytes(in_word)); index += 4; } From 29d8b459568b53f1e281d0914b5c897206c9bd4b Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Wed, 21 Feb 2024 12:07:53 -0500 Subject: [PATCH 11/23] Add DES and TDES support. Support variable tag sizes. --- embassy-stm32/src/cryp/mod.rs | 237 +++++++++++++++++++++++++++++----- 1 file changed, 203 insertions(+), 34 deletions(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index d53252a6a..a4f1e42dc 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -60,8 +60,152 @@ pub trait Cipher<'c> { /// This trait enables restriction of ciphers to specific key sizes. pub trait CipherSized {} +/// This trait enables restriction of initialization vectors to sizes compatibile with a cipher mode. +pub trait IVSized {} + /// This trait enables restriction of a header phase to authenticated ciphers only. -pub trait CipherAuthenticated {} +pub trait CipherAuthenticated<const TAG_SIZE: usize> { + /// Defines the authentication tag size. + const TAG_SIZE: usize = TAG_SIZE; +} + +/// TDES-ECB Cipher Mode +pub struct TdesEcb<'c, const KEY_SIZE: usize> { + iv: &'c [u8; 0], + key: &'c [u8; KEY_SIZE], +} + +impl<'c, const KEY_SIZE: usize> TdesEcb<'c, KEY_SIZE> { + /// Constructs a new AES-ECB cipher for a cryptographic operation. + pub fn new(key: &'c [u8; KEY_SIZE]) -> Self { + return Self { key: key, iv: &[0; 0] }; + } +} + +impl<'c, const KEY_SIZE: usize> Cipher<'c> for TdesEcb<'c, KEY_SIZE> { + const BLOCK_SIZE: usize = DES_BLOCK_SIZE; + const REQUIRES_PADDING: bool = true; + + fn key(&self) -> &'c [u8] { + self.key + } + + fn iv(&self) -> &'c [u8] { + self.iv + } + + fn set_algomode(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_algomode0(0)); + p.cr().modify(|w| w.set_algomode3(false)); + } +} + +impl<'c> CipherSized for TdesEcb<'c, { 112 / 8 }> {} +impl<'c> CipherSized for TdesEcb<'c, { 168 / 8 }> {} +impl<'c, const KEY_SIZE: usize> IVSized for TdesEcb<'c, KEY_SIZE> {} + +/// TDES-CBC Cipher Mode +pub struct TdesCbc<'c, const KEY_SIZE: usize> { + iv: &'c [u8; 8], + key: &'c [u8; KEY_SIZE], +} + +impl<'c, const KEY_SIZE: usize> TdesCbc<'c, KEY_SIZE> { + /// Constructs a new TDES-CBC cipher for a cryptographic operation. + pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 8]) -> Self { + return Self { key: key, iv: iv }; + } +} + +impl<'c, const KEY_SIZE: usize> Cipher<'c> for TdesCbc<'c, KEY_SIZE> { + const BLOCK_SIZE: usize = DES_BLOCK_SIZE; + const REQUIRES_PADDING: bool = true; + + fn key(&self) -> &'c [u8] { + self.key + } + + fn iv(&self) -> &'c [u8] { + self.iv + } + + fn set_algomode(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_algomode0(1)); + p.cr().modify(|w| w.set_algomode3(false)); + } +} + +impl<'c> CipherSized for TdesCbc<'c, { 112 / 8 }> {} +impl<'c> CipherSized for TdesCbc<'c, { 168 / 8 }> {} +impl<'c, const KEY_SIZE: usize> IVSized for TdesCbc<'c, KEY_SIZE> {} + +/// DES-ECB Cipher Mode +pub struct DesEcb<'c, const KEY_SIZE: usize> { + iv: &'c [u8; 0], + key: &'c [u8; KEY_SIZE], +} + +impl<'c, const KEY_SIZE: usize> DesEcb<'c, KEY_SIZE> { + /// Constructs a new AES-ECB cipher for a cryptographic operation. + pub fn new(key: &'c [u8; KEY_SIZE]) -> Self { + return Self { key: key, iv: &[0; 0] }; + } +} + +impl<'c, const KEY_SIZE: usize> Cipher<'c> for DesEcb<'c, KEY_SIZE> { + const BLOCK_SIZE: usize = DES_BLOCK_SIZE; + const REQUIRES_PADDING: bool = true; + + fn key(&self) -> &'c [u8] { + self.key + } + + fn iv(&self) -> &'c [u8] { + self.iv + } + + fn set_algomode(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_algomode0(2)); + p.cr().modify(|w| w.set_algomode3(false)); + } +} + +impl<'c> CipherSized for DesEcb<'c, { 56 / 8 }> {} +impl<'c, const KEY_SIZE: usize> IVSized for DesEcb<'c, KEY_SIZE> {} + +/// DES-CBC Cipher Mode +pub struct DesCbc<'c, const KEY_SIZE: usize> { + iv: &'c [u8; 8], + key: &'c [u8; KEY_SIZE], +} + +impl<'c, const KEY_SIZE: usize> DesCbc<'c, KEY_SIZE> { + /// Constructs a new AES-CBC cipher for a cryptographic operation. + pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 8]) -> Self { + return Self { key: key, iv: iv }; + } +} + +impl<'c, const KEY_SIZE: usize> Cipher<'c> for DesCbc<'c, KEY_SIZE> { + const BLOCK_SIZE: usize = DES_BLOCK_SIZE; + const REQUIRES_PADDING: bool = true; + + fn key(&self) -> &'c [u8] { + self.key + } + + fn iv(&self) -> &'c [u8] { + self.iv + } + + fn set_algomode(&self, p: &pac::cryp::Cryp) { + p.cr().modify(|w| w.set_algomode0(3)); + p.cr().modify(|w| w.set_algomode3(false)); + } +} + +impl<'c> CipherSized for DesCbc<'c, { 56 / 8 }> {} +impl<'c, const KEY_SIZE: usize> IVSized for DesCbc<'c, KEY_SIZE> {} /// AES-ECB Cipher Mode pub struct AesEcb<'c, const KEY_SIZE: usize> { @@ -96,7 +240,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesEcb<'c, KEY_SIZE> { } fn set_algomode(&self, p: &pac::cryp::Cryp) { - p.cr().modify(|w| w.set_algomode0(4)); + p.cr().modify(|w| w.set_algomode0(2)); p.cr().modify(|w| w.set_algomode3(false)); } } @@ -104,6 +248,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesEcb<'c, KEY_SIZE> { impl<'c> CipherSized for AesEcb<'c, { 128 / 8 }> {} impl<'c> CipherSized for AesEcb<'c, { 192 / 8 }> {} impl<'c> CipherSized for AesEcb<'c, { 256 / 8 }> {} +impl<'c, const KEY_SIZE: usize> IVSized for AesEcb<'c, KEY_SIZE> {} /// AES-CBC Cipher Mode pub struct AesCbc<'c, const KEY_SIZE: usize> { @@ -146,6 +291,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCbc<'c, KEY_SIZE> { impl<'c> CipherSized for AesCbc<'c, { 128 / 8 }> {} impl<'c> CipherSized for AesCbc<'c, { 192 / 8 }> {} impl<'c> CipherSized for AesCbc<'c, { 256 / 8 }> {} +impl<'c, const KEY_SIZE: usize> IVSized for AesCbc<'c, KEY_SIZE> {} /// AES-CTR Cipher Mode pub struct AesCtr<'c, const KEY_SIZE: usize> { @@ -180,6 +326,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCtr<'c, KEY_SIZE> { impl<'c> CipherSized for AesCtr<'c, { 128 / 8 }> {} impl<'c> CipherSized for AesCtr<'c, { 192 / 8 }> {} impl<'c> CipherSized for AesCtr<'c, { 256 / 8 }> {} +impl<'c, const KEY_SIZE: usize> IVSized for AesCtr<'c, KEY_SIZE> {} ///AES-GCM Cipher Mode pub struct AesGcm<'c, const KEY_SIZE: usize> { @@ -268,7 +415,8 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> { impl<'c> CipherSized for AesGcm<'c, { 128 / 8 }> {} impl<'c> CipherSized for AesGcm<'c, { 192 / 8 }> {} impl<'c> CipherSized for AesGcm<'c, { 256 / 8 }> {} -impl<'c, const KEY_SIZE: usize> CipherAuthenticated for AesGcm<'c, KEY_SIZE> {} +impl<'c, const KEY_SIZE: usize> CipherAuthenticated<16> for AesGcm<'c, KEY_SIZE> {} +impl<'c, const KEY_SIZE: usize> IVSized for AesGcm<'c, KEY_SIZE> {} /// AES-GMAC Cipher Mode pub struct AesGmac<'c, const KEY_SIZE: usize> { @@ -357,9 +505,11 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> { impl<'c> CipherSized for AesGmac<'c, { 128 / 8 }> {} impl<'c> CipherSized for AesGmac<'c, { 192 / 8 }> {} impl<'c> CipherSized for AesGmac<'c, { 256 / 8 }> {} -impl<'c, const KEY_SIZE: usize> CipherAuthenticated for AesGmac<'c, KEY_SIZE> {} +impl<'c, const KEY_SIZE: usize> CipherAuthenticated<16> for AesGmac<'c, KEY_SIZE> {} +impl<'c, const KEY_SIZE: usize> IVSized for AesGmac<'c, KEY_SIZE> {} -pub struct AesCcm<'c, const KEY_SIZE: usize> { +/// AES-CCM Cipher Mode +pub struct AesCcm<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> { key: &'c [u8; KEY_SIZE], aad_header: [u8; 6], aad_header_len: usize, @@ -367,18 +517,9 @@ pub struct AesCcm<'c, const KEY_SIZE: usize> { ctr: [u8; 16], } -impl<'c, const KEY_SIZE: usize> AesCcm<'c, KEY_SIZE> { - pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8], aad_len: usize, payload_len: usize, tag_len: u8) -> Self { - if (iv.len()) > 13 || (iv.len() < 7) { - panic!("CCM IV length must be 7-13 bytes."); - } - if (tag_len < 4) || (tag_len > 16) { - panic!("Tag length must be between 4 and 16 bytes."); - } - if tag_len % 2 > 0 { - panic!("Tag length must be a multiple of 2 bytes."); - } - +impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> AesCcm<'c, KEY_SIZE, TAG_SIZE, IV_SIZE> { + /// Constructs a new AES-CCM cipher for a cryptographic operation. + pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; IV_SIZE], aad_len: usize, payload_len: usize) -> Self { let mut aad_header: [u8; 6] = [0; 6]; let mut aad_header_len = 0; let mut block0: [u8; 16] = [0; 16]; @@ -408,7 +549,7 @@ impl<'c, const KEY_SIZE: usize> AesCcm<'c, KEY_SIZE> { if total_aad_len_padded > 0 { block0[0] = 0x40; } - block0[0] |= (((tag_len - 2) >> 1) & 0x07) << 3; + block0[0] |= ((((TAG_SIZE as u8) - 2) >> 1) & 0x07) << 3; block0[0] |= ((15 - (iv.len() as u8)) - 1) & 0x07; block0[1..1 + iv.len()].copy_from_slice(iv); let payload_len_bytes: [u8; 4] = payload_len.to_be_bytes(); @@ -439,7 +580,9 @@ impl<'c, const KEY_SIZE: usize> AesCcm<'c, KEY_SIZE> { } } -impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCcm<'c, KEY_SIZE> { +impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cipher<'c> + for AesCcm<'c, KEY_SIZE, TAG_SIZE, IV_SIZE> +{ const BLOCK_SIZE: usize = AES_BLOCK_SIZE; fn key(&self) -> &'c [u8] { @@ -529,10 +672,23 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCcm<'c, KEY_SIZE> { } } -impl<'c> CipherSized for AesCcm<'c, { 128 / 8 }> {} -impl<'c> CipherSized for AesCcm<'c, { 192 / 8 }> {} -impl<'c> CipherSized for AesCcm<'c, { 256 / 8 }> {} -impl<'c, const KEY_SIZE: usize> CipherAuthenticated for AesCcm<'c, KEY_SIZE> {} +impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 128 / 8 }, TAG_SIZE, IV_SIZE> {} +impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 192 / 8 }, TAG_SIZE, IV_SIZE> {} +impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 256 / 8 }, TAG_SIZE, IV_SIZE> {} +impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<4> for AesCcm<'c, KEY_SIZE, 4, IV_SIZE> {} +impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<6> for AesCcm<'c, KEY_SIZE, 6, IV_SIZE> {} +impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<8> for AesCcm<'c, KEY_SIZE, 8, IV_SIZE> {} +impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<10> for AesCcm<'c, KEY_SIZE, 10, IV_SIZE> {} +impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<12> for AesCcm<'c, KEY_SIZE, 12, IV_SIZE> {} +impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<14> for AesCcm<'c, KEY_SIZE, 14, IV_SIZE> {} +impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<16> for AesCcm<'c, KEY_SIZE, 16, IV_SIZE> {} +impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 7> {} +impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 8> {} +impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 9> {} +impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 10> {} +impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 11> {} +impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 12> {} +impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 13> {} /// Holds the state information for a cipher operation. /// Allows suspending/resuming of cipher operations. @@ -580,7 +736,7 @@ impl<'d, T: Instance> Cryp<'d, T> { /// Key size must be 128, 192, or 256 bits. /// Initialization vector must only be supplied if necessary. /// Panics if there is any mismatch in parameters, such as an incorrect IV length or invalid mode. - pub fn start<'c, C: Cipher<'c> + CipherSized>(&self, cipher: &'c C, dir: Direction) -> Context<'c, C> { + pub fn start<'c, C: Cipher<'c> + CipherSized + IVSized>(&self, cipher: &'c C, dir: Direction) -> Context<'c, C> { let mut ctx: Context<'c, C> = Context { dir, last_block_processed: false, @@ -660,7 +816,11 @@ impl<'d, T: Instance> Cryp<'d, T> { /// All AAD must be supplied to this function prior to starting the payload phase with `payload_blocking`. /// The AAD must be supplied in multiples of the block size (128 bits), except when supplying the last block. /// When supplying the last block of AAD, `last_aad_block` must be `true`. - pub fn aad_blocking<'c, C: Cipher<'c> + CipherSized + CipherAuthenticated>( + pub fn aad_blocking< + 'c, + const TAG_SIZE: usize, + C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>, + >( &self, ctx: &mut Context<'c, C>, aad: &[u8], @@ -776,7 +936,7 @@ impl<'d, T: Instance> Cryp<'d, T> { /// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes. /// Padding or ciphertext stealing must be managed by the application for these modes. /// Data must also be a multiple of block size unless `last_block` is `true`. - pub fn payload_blocking<'c, C: Cipher<'c> + CipherSized>( + pub fn payload_blocking<'c, C: Cipher<'c> + CipherSized + IVSized>( &self, ctx: &mut Context<'c, C>, input: &[u8], @@ -886,11 +1046,14 @@ impl<'d, T: Instance> Cryp<'d, T> { /// This function only needs to be called for GCM, CCM, and GMAC modes to /// generate an authentication tag. - pub fn finish_blocking<'c, C: Cipher<'c> + CipherSized + CipherAuthenticated>( + pub fn finish_blocking< + 'c, + const TAG_SIZE: usize, + C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>, + >( &self, mut ctx: Context<'c, C>, - tag: &mut [u8; 16], - ) { + ) -> [u8; TAG_SIZE] { self.load_context(&mut ctx); T::regs().cr().modify(|w| w.set_crypen(false)); @@ -909,12 +1072,17 @@ impl<'d, T: Instance> Cryp<'d, T> { while !T::regs().sr().read().ofne() {} - tag[0..4].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice()); - tag[4..8].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice()); - tag[8..12].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice()); - tag[12..16].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice()); + let mut full_tag: [u8; 16] = [0; 16]; + full_tag[0..4].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice()); + full_tag[4..8].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice()); + full_tag[8..12].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice()); + full_tag[12..16].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice()); + let mut tag: [u8; TAG_SIZE] = [0; TAG_SIZE]; + tag.copy_from_slice(&full_tag[0..TAG_SIZE]); T::regs().cr().modify(|w| w.set_crypen(false)); + + tag } fn load_key(&self, key: &[u8]) { @@ -949,7 +1117,8 @@ impl<'d, T: Instance> Cryp<'d, T> { keyword.copy_from_slice(&key[keyidx..keyidx + 4]); keyidx += 4; T::regs().key(3).klr().write_value(u32::from_be_bytes(keyword)); - keyword.copy_from_slice(&key[keyidx..keyidx + 4]); + keyword = [0; 4]; + keyword[0..key.len() - keyidx].copy_from_slice(&key[keyidx..key.len()]); T::regs().key(3).krr().write_value(u32::from_be_bytes(keyword)); } From cbca3a5c9f8f46582287b88db173ad6876686141 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Wed, 21 Feb 2024 12:39:10 -0500 Subject: [PATCH 12/23] Support v1 and v2 cryp variants. --- embassy-stm32/src/cryp/mod.rs | 156 +++++++++++++++++++++++++++++----- 1 file changed, 133 insertions(+), 23 deletions(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index a4f1e42dc..965e4a35d 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -1,4 +1,5 @@ //! Crypto Accelerator (CRYP) +#[cfg(cryp_v2)] use core::cmp::min; use core::marker::PhantomData; use embassy_hal_internal::{into_ref, PeripheralRef}; @@ -95,8 +96,15 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for TdesEcb<'c, KEY_SIZE> { } fn set_algomode(&self, p: &pac::cryp::Cryp) { - p.cr().modify(|w| w.set_algomode0(0)); - p.cr().modify(|w| w.set_algomode3(false)); + #[cfg(cryp_v1)] + { + p.cr().modify(|w| w.set_algomode(0)); + } + #[cfg(cryp_v2)] + { + p.cr().modify(|w| w.set_algomode0(0)); + p.cr().modify(|w| w.set_algomode3(false)); + } } } @@ -130,8 +138,15 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for TdesCbc<'c, KEY_SIZE> { } fn set_algomode(&self, p: &pac::cryp::Cryp) { - p.cr().modify(|w| w.set_algomode0(1)); - p.cr().modify(|w| w.set_algomode3(false)); + #[cfg(cryp_v1)] + { + p.cr().modify(|w| w.set_algomode(1)); + } + #[cfg(cryp_v2)] + { + p.cr().modify(|w| w.set_algomode0(1)); + p.cr().modify(|w| w.set_algomode3(false)); + } } } @@ -165,8 +180,15 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for DesEcb<'c, KEY_SIZE> { } fn set_algomode(&self, p: &pac::cryp::Cryp) { - p.cr().modify(|w| w.set_algomode0(2)); - p.cr().modify(|w| w.set_algomode3(false)); + #[cfg(cryp_v1)] + { + p.cr().modify(|w| w.set_algomode(2)); + } + #[cfg(cryp_v2)] + { + p.cr().modify(|w| w.set_algomode0(2)); + p.cr().modify(|w| w.set_algomode3(false)); + } } } @@ -199,8 +221,15 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for DesCbc<'c, KEY_SIZE> { } fn set_algomode(&self, p: &pac::cryp::Cryp) { - p.cr().modify(|w| w.set_algomode0(3)); - p.cr().modify(|w| w.set_algomode3(false)); + #[cfg(cryp_v1)] + { + p.cr().modify(|w| w.set_algomode(3)); + } + #[cfg(cryp_v2)] + { + p.cr().modify(|w| w.set_algomode0(3)); + p.cr().modify(|w| w.set_algomode3(false)); + } } } @@ -233,15 +262,29 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesEcb<'c, KEY_SIZE> { } fn prepare_key(&self, p: &pac::cryp::Cryp) { - p.cr().modify(|w| w.set_algomode0(7)); - p.cr().modify(|w| w.set_algomode3(false)); + #[cfg(cryp_v1)] + { + p.cr().modify(|w| w.set_algomode(7)); + } + #[cfg(cryp_v2)] + { + p.cr().modify(|w| w.set_algomode0(7)); + p.cr().modify(|w| w.set_algomode3(false)); + } p.cr().modify(|w| w.set_crypen(true)); while p.sr().read().busy() {} } fn set_algomode(&self, p: &pac::cryp::Cryp) { - p.cr().modify(|w| w.set_algomode0(2)); - p.cr().modify(|w| w.set_algomode3(false)); + #[cfg(cryp_v1)] + { + p.cr().modify(|w| w.set_algomode(2)); + } + #[cfg(cryp_v2)] + { + p.cr().modify(|w| w.set_algomode0(2)); + p.cr().modify(|w| w.set_algomode3(false)); + } } } @@ -276,15 +319,29 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCbc<'c, KEY_SIZE> { } fn prepare_key(&self, p: &pac::cryp::Cryp) { - p.cr().modify(|w| w.set_algomode0(7)); - p.cr().modify(|w| w.set_algomode3(false)); + #[cfg(cryp_v1)] + { + p.cr().modify(|w| w.set_algomode(7)); + } + #[cfg(cryp_v2)] + { + p.cr().modify(|w| w.set_algomode0(7)); + p.cr().modify(|w| w.set_algomode3(false)); + } p.cr().modify(|w| w.set_crypen(true)); while p.sr().read().busy() {} } fn set_algomode(&self, p: &pac::cryp::Cryp) { - p.cr().modify(|w| w.set_algomode0(5)); - p.cr().modify(|w| w.set_algomode3(false)); + #[cfg(cryp_v1)] + { + p.cr().modify(|w| w.set_algomode(5)); + } + #[cfg(cryp_v2)] + { + p.cr().modify(|w| w.set_algomode0(5)); + p.cr().modify(|w| w.set_algomode3(false)); + } } } @@ -318,8 +375,15 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCtr<'c, KEY_SIZE> { } fn set_algomode(&self, p: &pac::cryp::Cryp) { - p.cr().modify(|w| w.set_algomode0(6)); - p.cr().modify(|w| w.set_algomode3(false)); + #[cfg(cryp_v1)] + { + p.cr().modify(|w| w.set_algomode(6)); + } + #[cfg(cryp_v2)] + { + p.cr().modify(|w| w.set_algomode0(6)); + p.cr().modify(|w| w.set_algomode3(false)); + } } } @@ -328,12 +392,14 @@ impl<'c> CipherSized for AesCtr<'c, { 192 / 8 }> {} impl<'c> CipherSized for AesCtr<'c, { 256 / 8 }> {} impl<'c, const KEY_SIZE: usize> IVSized for AesCtr<'c, KEY_SIZE> {} +#[cfg(cryp_v2)] ///AES-GCM Cipher Mode pub struct AesGcm<'c, const KEY_SIZE: usize> { iv: [u8; 16], key: &'c [u8; KEY_SIZE], } +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize> AesGcm<'c, KEY_SIZE> { /// Constucts a new AES-GCM cipher for a cryptographic operation. pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self { @@ -344,6 +410,7 @@ impl<'c, const KEY_SIZE: usize> AesGcm<'c, KEY_SIZE> { } } +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> { const BLOCK_SIZE: usize = AES_BLOCK_SIZE; @@ -412,18 +479,25 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> { } } +#[cfg(cryp_v2)] impl<'c> CipherSized for AesGcm<'c, { 128 / 8 }> {} +#[cfg(cryp_v2)] impl<'c> CipherSized for AesGcm<'c, { 192 / 8 }> {} +#[cfg(cryp_v2)] impl<'c> CipherSized for AesGcm<'c, { 256 / 8 }> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize> CipherAuthenticated<16> for AesGcm<'c, KEY_SIZE> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize> IVSized for AesGcm<'c, KEY_SIZE> {} +#[cfg(cryp_v2)] /// AES-GMAC Cipher Mode pub struct AesGmac<'c, const KEY_SIZE: usize> { iv: [u8; 16], key: &'c [u8; KEY_SIZE], } +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize> AesGmac<'c, KEY_SIZE> { /// Constructs a new AES-GMAC cipher for a cryptographic operation. pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self { @@ -434,6 +508,7 @@ impl<'c, const KEY_SIZE: usize> AesGmac<'c, KEY_SIZE> { } } +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> { const BLOCK_SIZE: usize = AES_BLOCK_SIZE; @@ -502,12 +577,18 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> { } } +#[cfg(cryp_v2)] impl<'c> CipherSized for AesGmac<'c, { 128 / 8 }> {} +#[cfg(cryp_v2)] impl<'c> CipherSized for AesGmac<'c, { 192 / 8 }> {} +#[cfg(cryp_v2)] impl<'c> CipherSized for AesGmac<'c, { 256 / 8 }> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize> CipherAuthenticated<16> for AesGmac<'c, KEY_SIZE> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize> IVSized for AesGmac<'c, KEY_SIZE> {} +#[cfg(cryp_v2)] /// AES-CCM Cipher Mode pub struct AesCcm<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> { key: &'c [u8; KEY_SIZE], @@ -517,6 +598,7 @@ pub struct AesCcm<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZ ctr: [u8; 16], } +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> AesCcm<'c, KEY_SIZE, TAG_SIZE, IV_SIZE> { /// Constructs a new AES-CCM cipher for a cryptographic operation. pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; IV_SIZE], aad_len: usize, payload_len: usize) -> Self { @@ -580,6 +662,7 @@ impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Aes } } +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cipher<'c> for AesCcm<'c, KEY_SIZE, TAG_SIZE, IV_SIZE> { @@ -672,24 +755,42 @@ impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cip } } +#[cfg(cryp_v2)] impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 128 / 8 }, TAG_SIZE, IV_SIZE> {} +#[cfg(cryp_v2)] impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 192 / 8 }, TAG_SIZE, IV_SIZE> {} +#[cfg(cryp_v2)] impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 256 / 8 }, TAG_SIZE, IV_SIZE> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<4> for AesCcm<'c, KEY_SIZE, 4, IV_SIZE> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<6> for AesCcm<'c, KEY_SIZE, 6, IV_SIZE> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<8> for AesCcm<'c, KEY_SIZE, 8, IV_SIZE> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<10> for AesCcm<'c, KEY_SIZE, 10, IV_SIZE> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<12> for AesCcm<'c, KEY_SIZE, 12, IV_SIZE> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<14> for AesCcm<'c, KEY_SIZE, 14, IV_SIZE> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<16> for AesCcm<'c, KEY_SIZE, 16, IV_SIZE> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 7> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 8> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 9> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 10> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 11> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 12> {} +#[cfg(cryp_v2)] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 13> {} +#[allow(dead_code)] /// Holds the state information for a cipher operation. /// Allows suspending/resuming of cipher operations. pub struct Context<'c, C: Cipher<'c> + CipherSized> { @@ -810,6 +911,7 @@ impl<'d, T: Instance> Cryp<'d, T> { ctx } + #[cfg(cryp_v2)] /// Controls the header phase of cipher processing. /// This function is only valid for GCM, CCM, and GMAC modes. /// It only needs to be called if using one of these modes and there is associated data. @@ -951,11 +1053,14 @@ impl<'d, T: Instance> Cryp<'d, T> { if !ctx.aad_complete && ctx.header_len > 0 { panic!("Additional associated data must be processed first!"); } else if !ctx.aad_complete { - ctx.aad_complete = true; - T::regs().cr().modify(|w| w.set_crypen(false)); - T::regs().cr().modify(|w| w.set_gcm_ccmph(2)); - T::regs().cr().modify(|w| w.fflush()); - T::regs().cr().modify(|w| w.set_crypen(true)); + #[cfg(cryp_v2)] + { + ctx.aad_complete = true; + T::regs().cr().modify(|w| w.set_crypen(false)); + T::regs().cr().modify(|w| w.set_gcm_ccmph(2)); + T::regs().cr().modify(|w| w.fflush()); + T::regs().cr().modify(|w| w.set_crypen(true)); + } } if ctx.last_block_processed { panic!("The last block has already been processed!"); @@ -1044,6 +1149,7 @@ impl<'d, T: Instance> Cryp<'d, T> { self.store_context(ctx); } + #[cfg(cryp_v2)] /// This function only needs to be called for GCM, CCM, and GMAC modes to /// generate an authentication tag. pub fn finish_blocking< @@ -1137,6 +1243,8 @@ impl<'d, T: Instance> Cryp<'d, T> { ctx.iv[1] = T::regs().init(0).ivrr().read(); ctx.iv[2] = T::regs().init(1).ivlr().read(); ctx.iv[3] = T::regs().init(1).ivrr().read(); + + #[cfg(cryp_v2)] for i in 0..8 { ctx.csgcmccm[i] = T::regs().csgcmccmr(i).read(); ctx.csgcm[i] = T::regs().csgcmr(i).read(); @@ -1150,6 +1258,8 @@ impl<'d, T: Instance> Cryp<'d, T> { T::regs().init(0).ivrr().write_value(ctx.iv[1]); T::regs().init(1).ivlr().write_value(ctx.iv[2]); T::regs().init(1).ivrr().write_value(ctx.iv[3]); + + #[cfg(cryp_v2)] for i in 0..8 { T::regs().csgcmccmr(i).write_value(ctx.csgcmccm[i]); T::regs().csgcmr(i).write_value(ctx.csgcm[i]); From bf4cbd75779b230e9e33a9d2a849f67335a68cf9 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Thu, 22 Feb 2024 15:47:36 -0500 Subject: [PATCH 13/23] Add CRYP example. --- examples/stm32f7/Cargo.toml | 1 + examples/stm32f7/src/bin/cryp.rs | 69 ++++++++++++++++++++++++++++++++ 2 files changed, 70 insertions(+) create mode 100644 examples/stm32f7/src/bin/cryp.rs diff --git a/examples/stm32f7/Cargo.toml b/examples/stm32f7/Cargo.toml index 736e81723..305816a2b 100644 --- a/examples/stm32f7/Cargo.toml +++ b/examples/stm32f7/Cargo.toml @@ -30,6 +30,7 @@ embedded-storage = "0.3.1" static_cell = "2" sha2 = { version = "0.10.8", default-features = false } hmac = "0.12.1" +aes-gcm = {version = "0.10.3", default-features = false, features = ["aes", "heapless"] } [profile.release] debug = 2 diff --git a/examples/stm32f7/src/bin/cryp.rs b/examples/stm32f7/src/bin/cryp.rs new file mode 100644 index 000000000..c1b80ddc3 --- /dev/null +++ b/examples/stm32f7/src/bin/cryp.rs @@ -0,0 +1,69 @@ +#![no_std] +#![no_main] + +use aes_gcm::{ + aead::{heapless::Vec, AeadInPlace, KeyInit}, + Aes128Gcm, +}; +use defmt::info; +use embassy_executor::Spawner; +use embassy_stm32::cryp::*; +use embassy_stm32::Config; +use embassy_time::Instant; +use {defmt_rtt as _, panic_probe as _}; + +#[embassy_executor::main] +async fn main(_spawner: Spawner) -> ! { + let config = Config::default(); + let p = embassy_stm32::init(config); + + let payload: &[u8] = b"hello world"; + let aad: &[u8] = b"additional data"; + + let hw_cryp = Cryp::new(p.CRYP); + let key: [u8; 16] = [0; 16]; + let mut ciphertext: [u8; 11] = [0; 11]; + let mut plaintext: [u8; 11] = [0; 11]; + let iv: [u8; 12] = [0; 12]; + + let hw_start_time = Instant::now(); + + // Encrypt in hardware using AES-GCM 128-bit + let aes_gcm = AesGcm::new(&key, &iv); + let mut gcm_encrypt = hw_cryp.start(&aes_gcm, Direction::Encrypt); + hw_cryp.aad_blocking(&mut gcm_encrypt, aad, true); + hw_cryp.payload_blocking(&mut gcm_encrypt, payload, &mut ciphertext, true); + let encrypt_tag = hw_cryp.finish_blocking(gcm_encrypt); + + // Decrypt in hardware using AES-GCM 128-bit + let mut gcm_decrypt = hw_cryp.start(&aes_gcm, Direction::Decrypt); + hw_cryp.aad_blocking(&mut gcm_decrypt, aad, true); + hw_cryp.payload_blocking(&mut gcm_decrypt, &ciphertext, &mut plaintext, true); + let decrypt_tag = hw_cryp.finish_blocking(gcm_decrypt); + + let hw_end_time = Instant::now(); + let hw_execution_time = hw_end_time - hw_start_time; + + info!("AES-GCM Ciphertext: {:?}", ciphertext); + info!("AES-GCM Plaintext: {:?}", plaintext); + assert_eq!(payload, plaintext); + assert_eq!(encrypt_tag, decrypt_tag); + + let sw_start_time = Instant::now(); + + //Encrypt in software using AES-GCM 128-bit + let mut payload_vec: Vec<u8, 32> = Vec::from_slice(&payload).unwrap(); + let cipher = Aes128Gcm::new(&key.into()); + let _ = cipher.encrypt_in_place(&iv.into(), aad.into(), &mut payload_vec); + + //Decrypt in software using AES-GCM 128-bit + let _ = cipher.encrypt_in_place(&iv.into(), aad.into(), &mut payload_vec); + + let sw_end_time = Instant::now(); + let sw_execution_time = sw_end_time - sw_start_time; + + info!("Hardware Execution Time: {:?}", hw_execution_time); + info!("Software Execution Time: {:?}", sw_execution_time); + + loop {} +} From 967b4927b002dbcdcfbe968bf9c15014fc1de2a0 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Fri, 23 Feb 2024 16:05:18 -0500 Subject: [PATCH 14/23] Correct tag generation. --- embassy-stm32/src/cryp/mod.rs | 8 ++++---- examples/stm32f7/src/bin/cryp.rs | 9 ++++++--- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index 965e4a35d..038923870 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -1166,10 +1166,10 @@ impl<'d, T: Instance> Cryp<'d, T> { T::regs().cr().modify(|w| w.set_gcm_ccmph(3)); T::regs().cr().modify(|w| w.set_crypen(true)); - let headerlen1: u32 = (ctx.header_len >> 32) as u32; - let headerlen2: u32 = ctx.header_len as u32; - let payloadlen1: u32 = (ctx.payload_len >> 32) as u32; - let payloadlen2: u32 = ctx.payload_len as u32; + let headerlen1: u32 = ((ctx.header_len * 8) >> 32) as u32; + let headerlen2: u32 = (ctx.header_len * 8) as u32; + let payloadlen1: u32 = ((ctx.payload_len * 8) >> 32) as u32; + let payloadlen2: u32 = (ctx.payload_len * 8) as u32; T::regs().din().write_value(headerlen1.swap_bytes()); T::regs().din().write_value(headerlen2.swap_bytes()); diff --git a/examples/stm32f7/src/bin/cryp.rs b/examples/stm32f7/src/bin/cryp.rs index c1b80ddc3..be41955c5 100644 --- a/examples/stm32f7/src/bin/cryp.rs +++ b/examples/stm32f7/src/bin/cryp.rs @@ -51,13 +51,16 @@ async fn main(_spawner: Spawner) -> ! { let sw_start_time = Instant::now(); - //Encrypt in software using AES-GCM 128-bit + // Encrypt in software using AES-GCM 128-bit let mut payload_vec: Vec<u8, 32> = Vec::from_slice(&payload).unwrap(); let cipher = Aes128Gcm::new(&key.into()); let _ = cipher.encrypt_in_place(&iv.into(), aad.into(), &mut payload_vec); + + assert_eq!(ciphertext, payload_vec[0..ciphertext.len()]); + assert_eq!(encrypt_tag, payload_vec[ciphertext.len()..ciphertext.len() + encrypt_tag.len()]); - //Decrypt in software using AES-GCM 128-bit - let _ = cipher.encrypt_in_place(&iv.into(), aad.into(), &mut payload_vec); + // Decrypt in software using AES-GCM 128-bit + let _ = cipher.decrypt_in_place(&iv.into(), aad.into(), &mut payload_vec); let sw_end_time = Instant::now(); let sw_execution_time = sw_end_time - sw_start_time; From 25ec838af597cc2e39c530b44f1a101c80b24260 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Sat, 24 Feb 2024 15:55:20 -0500 Subject: [PATCH 15/23] Correct AAD ingest. --- embassy-stm32/src/cryp/mod.rs | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index 038923870..9d1a62905 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -988,7 +988,7 @@ impl<'d, T: Instance> Cryp<'d, T> { // Write block in while index < end_index { let mut in_word: [u8; 4] = [0; 4]; - in_word.copy_from_slice(&aad[index..index + 4]); + in_word.copy_from_slice(&ctx.aad_buffer[index..index + 4]); T::regs().din().write_value(u32::from_ne_bytes(in_word)); index += 4; } @@ -1000,14 +1000,16 @@ impl<'d, T: Instance> Cryp<'d, T> { ctx.aad_buffer_len = 0; let leftovers = aad_len_remaining % C::BLOCK_SIZE; ctx.aad_buffer[..leftovers].copy_from_slice(&aad[aad.len() - leftovers..aad.len()]); + ctx.aad_buffer_len += leftovers; + ctx.aad_buffer[ctx.aad_buffer_len..].fill(0); aad_len_remaining -= leftovers; assert_eq!(aad_len_remaining % C::BLOCK_SIZE, 0); // Load full data blocks into core. let num_full_blocks = aad_len_remaining / C::BLOCK_SIZE; - for _ in 0..num_full_blocks { - let mut index = len_to_copy; - let end_index = len_to_copy + C::BLOCK_SIZE; + for block in 0..num_full_blocks { + let mut index = len_to_copy + (block * C::BLOCK_SIZE); + let end_index = index + C::BLOCK_SIZE; // Write block in while index < end_index { let mut in_word: [u8; 4] = [0; 4]; @@ -1020,6 +1022,19 @@ impl<'d, T: Instance> Cryp<'d, T> { } if last_aad_block { + if leftovers > 0 { + let mut index = 0; + let end_index = C::BLOCK_SIZE; + // Write block in + while index < end_index { + let mut in_word: [u8; 4] = [0; 4]; + in_word.copy_from_slice(&ctx.aad_buffer[index..index + 4]); + T::regs().din().write_value(u32::from_ne_bytes(in_word)); + index += 4; + } + // Block until input FIFO is empty. + while !T::regs().sr().read().ifem() {} + } // Switch to payload phase. ctx.aad_complete = true; T::regs().cr().modify(|w| w.set_crypen(false)); @@ -1065,7 +1080,7 @@ impl<'d, T: Instance> Cryp<'d, T> { if ctx.last_block_processed { panic!("The last block has already been processed!"); } - if input.len() != output.len() { + if input.len() > output.len() { panic!("Output buffer length must match input length."); } if !last_block { From f352b6d68b17fee886af58494b7e793cea3ea383 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Sat, 24 Feb 2024 16:14:44 -0500 Subject: [PATCH 16/23] Address CI build issues. --- embassy-stm32/Cargo.toml | 4 ++-- embassy-stm32/src/cryp/mod.rs | 7 +++---- examples/stm32f7/src/bin/cryp.rs | 14 ++++++++------ 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/embassy-stm32/Cargo.toml b/embassy-stm32/Cargo.toml index d585d2cd6..4c856141b 100644 --- a/embassy-stm32/Cargo.toml +++ b/embassy-stm32/Cargo.toml @@ -70,7 +70,7 @@ rand_core = "0.6.3" sdio-host = "0.5.0" critical-section = "1.1" #stm32-metapac = { version = "15" } -stm32-metapac = { git = "https://github.com/embassy-rs/stm32-data-generated", tag = "stm32-data-6097928f720646c73d6483a3245f922bd5faee2f" } +stm32-metapac = { git = "https://github.com/embassy-rs/stm32-data-generated", tag = "stm32-data-ca48d946840840c5b311c96ff17cf4f8a865f9fb" } vcell = "0.1.3" bxcan = "0.7.0" nb = "1.0.0" @@ -94,7 +94,7 @@ critical-section = { version = "1.1", features = ["std"] } proc-macro2 = "1.0.36" quote = "1.0.15" #stm32-metapac = { version = "15", default-features = false, features = ["metadata"]} -stm32-metapac = { git = "https://github.com/embassy-rs/stm32-data-generated", tag = "stm32-data-6097928f720646c73d6483a3245f922bd5faee2f", default-features = false, features = ["metadata"]} +stm32-metapac = { git = "https://github.com/embassy-rs/stm32-data-generated", tag = "stm32-data-ca48d946840840c5b311c96ff17cf4f8a865f9fb", default-features = false, features = ["metadata"]} [features] diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index 9d1a62905..fef5def6a 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -2,12 +2,11 @@ #[cfg(cryp_v2)] use core::cmp::min; use core::marker::PhantomData; + use embassy_hal_internal::{into_ref, PeripheralRef}; -use crate::pac; -use crate::peripherals::CRYP; use crate::rcc::sealed::RccPeripheral; -use crate::{interrupt, peripherals, Peripheral}; +use crate::{interrupt, pac, peripherals, Peripheral}; const DES_BLOCK_SIZE: usize = 8; // 64 bits const AES_BLOCK_SIZE: usize = 16; // 128 bits @@ -827,7 +826,7 @@ pub struct Cryp<'d, T: Instance> { impl<'d, T: Instance> Cryp<'d, T> { /// Create a new CRYP driver. pub fn new(peri: impl Peripheral<P = T> + 'd) -> Self { - CRYP::enable_and_reset(); + T::enable_and_reset(); into_ref!(peri); let instance = Self { _peripheral: peri }; instance diff --git a/examples/stm32f7/src/bin/cryp.rs b/examples/stm32f7/src/bin/cryp.rs index be41955c5..04927841a 100644 --- a/examples/stm32f7/src/bin/cryp.rs +++ b/examples/stm32f7/src/bin/cryp.rs @@ -1,10 +1,9 @@ #![no_std] #![no_main] -use aes_gcm::{ - aead::{heapless::Vec, AeadInPlace, KeyInit}, - Aes128Gcm, -}; +use aes_gcm::aead::heapless::Vec; +use aes_gcm::aead::{AeadInPlace, KeyInit}; +use aes_gcm::Aes128Gcm; use defmt::info; use embassy_executor::Spawner; use embassy_stm32::cryp::*; @@ -55,9 +54,12 @@ async fn main(_spawner: Spawner) -> ! { let mut payload_vec: Vec<u8, 32> = Vec::from_slice(&payload).unwrap(); let cipher = Aes128Gcm::new(&key.into()); let _ = cipher.encrypt_in_place(&iv.into(), aad.into(), &mut payload_vec); - + assert_eq!(ciphertext, payload_vec[0..ciphertext.len()]); - assert_eq!(encrypt_tag, payload_vec[ciphertext.len()..ciphertext.len() + encrypt_tag.len()]); + assert_eq!( + encrypt_tag, + payload_vec[ciphertext.len()..ciphertext.len() + encrypt_tag.len()] + ); // Decrypt in software using AES-GCM 128-bit let _ = cipher.decrypt_in_place(&iv.into(), aad.into(), &mut payload_vec); From 236fc6f650af41980af05ef03a3901b2dfcfc381 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Sat, 24 Feb 2024 16:31:43 -0500 Subject: [PATCH 17/23] Add CRYP test. --- embassy-stm32/src/cryp/mod.rs | 1 - tests/stm32/Cargo.toml | 11 +++++- tests/stm32/src/bin/cryp.rs | 71 +++++++++++++++++++++++++++++++++++ 3 files changed, 80 insertions(+), 3 deletions(-) create mode 100644 tests/stm32/src/bin/cryp.rs diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index fef5def6a..bb64fa423 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -5,7 +5,6 @@ use core::marker::PhantomData; use embassy_hal_internal::{into_ref, PeripheralRef}; -use crate::rcc::sealed::RccPeripheral; use crate::{interrupt, pac, peripherals, Peripheral}; const DES_BLOCK_SIZE: usize = 8; // 64 bits diff --git a/tests/stm32/Cargo.toml b/tests/stm32/Cargo.toml index 828a28e2c..37519ba11 100644 --- a/tests/stm32/Cargo.toml +++ b/tests/stm32/Cargo.toml @@ -16,8 +16,8 @@ stm32f767zi = ["embassy-stm32/stm32f767zi", "chrono", "not-gpdma", "eth", "rng"] stm32g071rb = ["embassy-stm32/stm32g071rb", "cm0", "not-gpdma", "dac"] stm32g491re = ["embassy-stm32/stm32g491re", "chrono", "stop", "not-gpdma", "rng", "fdcan"] stm32h563zi = ["embassy-stm32/stm32h563zi", "chrono", "eth", "rng", "hash"] -stm32h753zi = ["embassy-stm32/stm32h753zi", "chrono", "not-gpdma", "eth", "rng", "fdcan", "hash"] -stm32h755zi = ["embassy-stm32/stm32h755zi-cm7", "chrono", "not-gpdma", "eth", "dac", "rng", "fdcan", "hash"] +stm32h753zi = ["embassy-stm32/stm32h753zi", "chrono", "not-gpdma", "eth", "rng", "fdcan", "hash", "cryp"] +stm32h755zi = ["embassy-stm32/stm32h755zi-cm7", "chrono", "not-gpdma", "eth", "dac", "rng", "fdcan", "hash", "cryp"] stm32h7a3zi = ["embassy-stm32/stm32h7a3zi", "not-gpdma", "rng", "fdcan"] stm32l073rz = ["embassy-stm32/stm32l073rz", "cm0", "not-gpdma", "rng"] stm32l152re = ["embassy-stm32/stm32l152re", "chrono", "not-gpdma"] @@ -33,6 +33,7 @@ stm32wl55jc = ["embassy-stm32/stm32wl55jc-cm4", "not-gpdma", "rng", "chrono"] stm32f091rc = ["embassy-stm32/stm32f091rc", "cm0", "not-gpdma", "chrono"] stm32h503rb = ["embassy-stm32/stm32h503rb", "rng"] +cryp = [] hash = [] eth = ["embassy-executor/task-arena-size-16384"] rng = [] @@ -80,6 +81,7 @@ portable-atomic = { version = "1.5", features = [] } chrono = { version = "^0.4", default-features = false, optional = true} sha2 = { version = "0.10.8", default-features = false } hmac = "0.12.1" +aes-gcm = {version = "0.10.3", default-features = false, features = ["aes", "heapless"] } # BEGIN TESTS # Generated by gen_test.py. DO NOT EDIT. @@ -88,6 +90,11 @@ name = "can" path = "src/bin/can.rs" required-features = [ "can",] +[[bin]] +name = "cryp" +path = "src/bin/cryp.rs" +required-features = [ "hash",] + [[bin]] name = "dac" path = "src/bin/dac.rs" diff --git a/tests/stm32/src/bin/cryp.rs b/tests/stm32/src/bin/cryp.rs new file mode 100644 index 000000000..59c85f258 --- /dev/null +++ b/tests/stm32/src/bin/cryp.rs @@ -0,0 +1,71 @@ +// required-features: cryp +#![no_std] +#![no_main] + +#[path = "../common.rs"] +mod common; + +use aes_gcm::aead::heapless::Vec; +use aes_gcm::aead::{AeadInPlace, KeyInit}; +use aes_gcm::Aes128Gcm; +use common::*; +use embassy_executor::Spawner; +use embassy_stm32::cryp::*; +use {defmt_rtt as _, panic_probe as _}; + +#[embassy_executor::main] +async fn main(_spawner: Spawner) { + let p: embassy_stm32::Peripherals = embassy_stm32::init(config()); + + const PAYLOAD1: &[u8] = b"payload data 1 ;zdfhzdfhS;GKJASBDG;ASKDJBAL,zdfhzdfhzdfhzdfhvljhb,jhbjhb,sdhsdghsdhsfhsghzdfhzdfhzdfhzdfdhsdthsthsdhsgaadfhhgkdgfuoyguoft6783567"; + const PAYLOAD2: &[u8] = b"payload data 2 ;SKEzdfhzdfhzbhgvljhb,jhbjhb,sdhsdghsdhsfhsghshsfhshstsdthadfhsdfjhsfgjsfgjxfgjzdhgDFghSDGHjtfjtjszftjzsdtjhstdsdhsdhsdhsdhsdthsthsdhsgfh"; + const AAD1: &[u8] = b"additional data 1 stdargadrhaethaethjatjatjaetjartjstrjsfkk;'jopofyuisrteytweTASTUIKFUKIXTRDTEREharhaeryhaterjartjarthaethjrtjarthaetrhartjatejatrjsrtjartjyt1"; + const AAD2: &[u8] = b"additional data 2 stdhthsthsthsrthsrthsrtjdykjdukdyuldadfhsdghsdghsdghsadghjk'hioethjrtjarthaetrhartjatecfgjhzdfhgzdfhzdfghzdfhzdfhzfhjatrjsrtjartjytjfytjfyg"; + + let hw_cryp = Cryp::new(p.CRYP); + let key: [u8; 16] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; + let mut ciphertext: [u8; PAYLOAD1.len() + PAYLOAD2.len()] = [0; PAYLOAD1.len() + PAYLOAD2.len()]; + let mut plaintext: [u8; PAYLOAD1.len() + PAYLOAD2.len()] = [0; PAYLOAD1.len() + PAYLOAD2.len()]; + let iv: [u8; 12] = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]; + + // Encrypt in hardware using AES-GCM 128-bit + let aes_gcm = AesGcm::new(&key, &iv); + let mut gcm_encrypt = hw_cryp.start(&aes_gcm, Direction::Encrypt); + hw_cryp.aad_blocking(&mut gcm_encrypt, AAD1, false); + hw_cryp.aad_blocking(&mut gcm_encrypt, AAD2, true); + hw_cryp.payload_blocking(&mut gcm_encrypt, PAYLOAD1, &mut ciphertext[..PAYLOAD1.len()], false); + hw_cryp.payload_blocking(&mut gcm_encrypt, PAYLOAD2, &mut ciphertext[PAYLOAD1.len()..], true); + let encrypt_tag = hw_cryp.finish_blocking(gcm_encrypt); + + // Decrypt in hardware using AES-GCM 128-bit + let mut gcm_decrypt = hw_cryp.start(&aes_gcm, Direction::Decrypt); + hw_cryp.aad_blocking(&mut gcm_decrypt, AAD1, false); + hw_cryp.aad_blocking(&mut gcm_decrypt, AAD2, true); + hw_cryp.payload_blocking(&mut gcm_decrypt, &ciphertext, &mut plaintext, true); + let decrypt_tag = hw_cryp.finish_blocking(gcm_decrypt); + + info!("AES-GCM Ciphertext: {:?}", ciphertext); + info!("AES-GCM Plaintext: {:?}", plaintext); + defmt::assert!(PAYLOAD1 == &plaintext[..PAYLOAD1.len()]); + defmt::assert!(PAYLOAD2 == &plaintext[PAYLOAD1.len()..]); + defmt::assert!(encrypt_tag == decrypt_tag); + + // Encrypt in software using AES-GCM 128-bit + let mut payload_vec: Vec<u8, { PAYLOAD1.len() + PAYLOAD2.len() + 16 }> = Vec::from_slice(&PAYLOAD1).unwrap(); + payload_vec.extend_from_slice(&PAYLOAD2).unwrap(); + let cipher = Aes128Gcm::new(&key.into()); + let mut aad: Vec<u8, { AAD1.len() + AAD2.len() }> = Vec::from_slice(&AAD1).unwrap(); + aad.extend_from_slice(&AAD2).unwrap(); + let _ = cipher.encrypt_in_place(&iv.into(), &aad, &mut payload_vec); + + defmt::assert!(ciphertext == payload_vec[0..ciphertext.len()]); + defmt::assert!( + encrypt_tag == payload_vec[ciphertext.len()..ciphertext.len() + encrypt_tag.len()] + ); + + // Decrypt in software using AES-GCM 128-bit + let _ = cipher.decrypt_in_place(&iv.into(), &aad, &mut payload_vec); + + info!("Test OK"); + cortex_m::asm::bkpt(); +} From d9c0da8102226cebc36c92b874b8f0bf966ac959 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Sun, 25 Feb 2024 20:58:35 -0500 Subject: [PATCH 18/23] Update metapac to address CI build issue. --- embassy-stm32/Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/embassy-stm32/Cargo.toml b/embassy-stm32/Cargo.toml index 4c856141b..e0bee6a92 100644 --- a/embassy-stm32/Cargo.toml +++ b/embassy-stm32/Cargo.toml @@ -70,7 +70,7 @@ rand_core = "0.6.3" sdio-host = "0.5.0" critical-section = "1.1" #stm32-metapac = { version = "15" } -stm32-metapac = { git = "https://github.com/embassy-rs/stm32-data-generated", tag = "stm32-data-ca48d946840840c5b311c96ff17cf4f8a865f9fb" } +stm32-metapac = { git = "https://github.com/embassy-rs/stm32-data-generated", tag = "stm32-data-88f71cbcd2f048c40bad162c7e7864cc3897eba4" } vcell = "0.1.3" bxcan = "0.7.0" nb = "1.0.0" @@ -94,7 +94,7 @@ critical-section = { version = "1.1", features = ["std"] } proc-macro2 = "1.0.36" quote = "1.0.15" #stm32-metapac = { version = "15", default-features = false, features = ["metadata"]} -stm32-metapac = { git = "https://github.com/embassy-rs/stm32-data-generated", tag = "stm32-data-ca48d946840840c5b311c96ff17cf4f8a865f9fb", default-features = false, features = ["metadata"]} +stm32-metapac = { git = "https://github.com/embassy-rs/stm32-data-generated", tag = "stm32-data-88f71cbcd2f048c40bad162c7e7864cc3897eba4", default-features = false, features = ["metadata"]} [features] From 766372e06a413352dc07b864dd76e85e03782790 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Sun, 25 Feb 2024 21:16:43 -0500 Subject: [PATCH 19/23] rustfmt --- tests/stm32/src/bin/cryp.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/stm32/src/bin/cryp.rs b/tests/stm32/src/bin/cryp.rs index 59c85f258..f105abf26 100644 --- a/tests/stm32/src/bin/cryp.rs +++ b/tests/stm32/src/bin/cryp.rs @@ -59,9 +59,7 @@ async fn main(_spawner: Spawner) { let _ = cipher.encrypt_in_place(&iv.into(), &aad, &mut payload_vec); defmt::assert!(ciphertext == payload_vec[0..ciphertext.len()]); - defmt::assert!( - encrypt_tag == payload_vec[ciphertext.len()..ciphertext.len() + encrypt_tag.len()] - ); + defmt::assert!(encrypt_tag == payload_vec[ciphertext.len()..ciphertext.len() + encrypt_tag.len()]); // Decrypt in software using AES-GCM 128-bit let _ = cipher.decrypt_in_place(&iv.into(), &aad, &mut payload_vec); From 54f502e5e6a355e0f132f33f3eecd2a0abe298bc Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Sun, 25 Feb 2024 21:31:25 -0500 Subject: [PATCH 20/23] Run gen_test.py --- tests/stm32/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/stm32/Cargo.toml b/tests/stm32/Cargo.toml index 37519ba11..bfe003a11 100644 --- a/tests/stm32/Cargo.toml +++ b/tests/stm32/Cargo.toml @@ -93,7 +93,7 @@ required-features = [ "can",] [[bin]] name = "cryp" path = "src/bin/cryp.rs" -required-features = [ "hash",] +required-features = [ "cryp",] [[bin]] name = "dac" From c9cca3c007eb4b85c559f74655c6cb018d9e28f1 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Thu, 29 Feb 2024 19:09:44 -0500 Subject: [PATCH 21/23] Fix H7 CRYP operation. --- embassy-stm32/Cargo.toml | 4 +- embassy-stm32/src/cryp/mod.rs | 159 +++++++++++++++++++++------------- tests/stm32/Cargo.toml | 4 +- 3 files changed, 103 insertions(+), 64 deletions(-) diff --git a/embassy-stm32/Cargo.toml b/embassy-stm32/Cargo.toml index 08ccd35ae..460184920 100644 --- a/embassy-stm32/Cargo.toml +++ b/embassy-stm32/Cargo.toml @@ -70,7 +70,7 @@ rand_core = "0.6.3" sdio-host = "0.5.0" critical-section = "1.1" #stm32-metapac = { version = "15" } -stm32-metapac = { git = "https://github.com/embassy-rs/stm32-data-generated", tag = "stm32-data-4a0bcec33362449fb733c066936d25cbabab396a" } +stm32-metapac = { git = "https://github.com/embassy-rs/stm32-data-generated", tag = "stm32-data-d7462d805ef05892531a83cd9ad60c9cba568d54" } vcell = "0.1.3" bxcan = "0.7.0" nb = "1.0.0" @@ -94,7 +94,7 @@ critical-section = { version = "1.1", features = ["std"] } proc-macro2 = "1.0.36" quote = "1.0.15" #stm32-metapac = { version = "15", default-features = false, features = ["metadata"]} -stm32-metapac = { git = "https://github.com/embassy-rs/stm32-data-generated", tag = "stm32-data-4a0bcec33362449fb733c066936d25cbabab396a", default-features = false, features = ["metadata"]} +stm32-metapac = { git = "https://github.com/embassy-rs/stm32-data-generated", tag = "stm32-data-d7462d805ef05892531a83cd9ad60c9cba568d54", default-features = false, features = ["metadata"]} [features] diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index bb64fa423..8f259520a 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs @@ -1,5 +1,5 @@ //! Crypto Accelerator (CRYP) -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] use core::cmp::min; use core::marker::PhantomData; @@ -35,7 +35,7 @@ pub trait Cipher<'c> { fn init_phase(&self, _p: &pac::cryp::Cryp) {} /// Called prior to processing the last data block for cipher-specific operations. - fn pre_final_block(&self, _p: &pac::cryp::Cryp, _dir: Direction) -> [u32; 4] { + fn pre_final_block(&self, _p: &pac::cryp::Cryp, _dir: Direction, _padding_len: usize) -> [u32; 4] { return [0; 4]; } @@ -98,7 +98,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for TdesEcb<'c, KEY_SIZE> { { p.cr().modify(|w| w.set_algomode(0)); } - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] { p.cr().modify(|w| w.set_algomode0(0)); p.cr().modify(|w| w.set_algomode3(false)); @@ -140,7 +140,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for TdesCbc<'c, KEY_SIZE> { { p.cr().modify(|w| w.set_algomode(1)); } - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] { p.cr().modify(|w| w.set_algomode0(1)); p.cr().modify(|w| w.set_algomode3(false)); @@ -182,7 +182,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for DesEcb<'c, KEY_SIZE> { { p.cr().modify(|w| w.set_algomode(2)); } - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] { p.cr().modify(|w| w.set_algomode0(2)); p.cr().modify(|w| w.set_algomode3(false)); @@ -223,7 +223,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for DesCbc<'c, KEY_SIZE> { { p.cr().modify(|w| w.set_algomode(3)); } - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] { p.cr().modify(|w| w.set_algomode0(3)); p.cr().modify(|w| w.set_algomode3(false)); @@ -264,7 +264,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesEcb<'c, KEY_SIZE> { { p.cr().modify(|w| w.set_algomode(7)); } - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] { p.cr().modify(|w| w.set_algomode0(7)); p.cr().modify(|w| w.set_algomode3(false)); @@ -278,7 +278,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesEcb<'c, KEY_SIZE> { { p.cr().modify(|w| w.set_algomode(2)); } - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] { p.cr().modify(|w| w.set_algomode0(2)); p.cr().modify(|w| w.set_algomode3(false)); @@ -321,7 +321,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCbc<'c, KEY_SIZE> { { p.cr().modify(|w| w.set_algomode(7)); } - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] { p.cr().modify(|w| w.set_algomode0(7)); p.cr().modify(|w| w.set_algomode3(false)); @@ -335,7 +335,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCbc<'c, KEY_SIZE> { { p.cr().modify(|w| w.set_algomode(5)); } - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] { p.cr().modify(|w| w.set_algomode0(5)); p.cr().modify(|w| w.set_algomode3(false)); @@ -377,7 +377,7 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCtr<'c, KEY_SIZE> { { p.cr().modify(|w| w.set_algomode(6)); } - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] { p.cr().modify(|w| w.set_algomode0(6)); p.cr().modify(|w| w.set_algomode3(false)); @@ -390,14 +390,14 @@ impl<'c> CipherSized for AesCtr<'c, { 192 / 8 }> {} impl<'c> CipherSized for AesCtr<'c, { 256 / 8 }> {} impl<'c, const KEY_SIZE: usize> IVSized for AesCtr<'c, KEY_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] ///AES-GCM Cipher Mode pub struct AesGcm<'c, const KEY_SIZE: usize> { iv: [u8; 16], key: &'c [u8; KEY_SIZE], } -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize> AesGcm<'c, KEY_SIZE> { /// Constucts a new AES-GCM cipher for a cryptographic operation. pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self { @@ -408,7 +408,7 @@ impl<'c, const KEY_SIZE: usize> AesGcm<'c, KEY_SIZE> { } } -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> { const BLOCK_SIZE: usize = AES_BLOCK_SIZE; @@ -431,7 +431,8 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> { while p.cr().read().crypen() {} } - fn pre_final_block(&self, p: &pac::cryp::Cryp, dir: Direction) -> [u32; 4] { + #[cfg(cryp_v2)] + fn pre_final_block(&self, p: &pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] { //Handle special GCM partial block process. if dir == Direction::Encrypt { p.cr().modify(|w| w.set_crypen(false)); @@ -444,6 +445,14 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> { [0; 4] } + #[cfg(cryp_v3)] + fn pre_final_block(&self, p: &pac::cryp::Cryp, _dir: Direction, padding_len: usize) -> [u32; 4] { + //Handle special GCM partial block process. + p.cr().modify(|w| w.set_npblb(padding_len as u8)); + [0; 4] + } + + #[cfg(cryp_v2)] fn post_final_block( &self, p: &pac::cryp::Cryp, @@ -477,25 +486,25 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> { } } -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c> CipherSized for AesGcm<'c, { 128 / 8 }> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c> CipherSized for AesGcm<'c, { 192 / 8 }> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c> CipherSized for AesGcm<'c, { 256 / 8 }> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize> CipherAuthenticated<16> for AesGcm<'c, KEY_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize> IVSized for AesGcm<'c, KEY_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] /// AES-GMAC Cipher Mode pub struct AesGmac<'c, const KEY_SIZE: usize> { iv: [u8; 16], key: &'c [u8; KEY_SIZE], } -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize> AesGmac<'c, KEY_SIZE> { /// Constructs a new AES-GMAC cipher for a cryptographic operation. pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self { @@ -506,7 +515,7 @@ impl<'c, const KEY_SIZE: usize> AesGmac<'c, KEY_SIZE> { } } -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> { const BLOCK_SIZE: usize = AES_BLOCK_SIZE; @@ -529,7 +538,8 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> { while p.cr().read().crypen() {} } - fn pre_final_block(&self, p: &pac::cryp::Cryp, dir: Direction) -> [u32; 4] { + #[cfg(cryp_v2)] + fn pre_final_block(&self, p: &pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] { //Handle special GCM partial block process. if dir == Direction::Encrypt { p.cr().modify(|w| w.set_crypen(false)); @@ -542,6 +552,14 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> { [0; 4] } + #[cfg(cryp_v3)] + fn pre_final_block(&self, p: &pac::cryp::Cryp, _dir: Direction, padding_len: usize) -> [u32; 4] { + //Handle special GCM partial block process. + p.cr().modify(|w| w.set_npblb(padding_len as u8)); + [0; 4] + } + + #[cfg(cryp_v2)] fn post_final_block( &self, p: &pac::cryp::Cryp, @@ -575,18 +593,18 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> { } } -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c> CipherSized for AesGmac<'c, { 128 / 8 }> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c> CipherSized for AesGmac<'c, { 192 / 8 }> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c> CipherSized for AesGmac<'c, { 256 / 8 }> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize> CipherAuthenticated<16> for AesGmac<'c, KEY_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize> IVSized for AesGmac<'c, KEY_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] /// AES-CCM Cipher Mode pub struct AesCcm<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> { key: &'c [u8; KEY_SIZE], @@ -596,7 +614,7 @@ pub struct AesCcm<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZ ctr: [u8; 16], } -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> AesCcm<'c, KEY_SIZE, TAG_SIZE, IV_SIZE> { /// Constructs a new AES-CCM cipher for a cryptographic operation. pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; IV_SIZE], aad_len: usize, payload_len: usize) -> Self { @@ -660,7 +678,7 @@ impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Aes } } -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cipher<'c> for AesCcm<'c, KEY_SIZE, TAG_SIZE, IV_SIZE> { @@ -699,7 +717,8 @@ impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cip return &self.aad_header[0..self.aad_header_len]; } - fn pre_final_block(&self, p: &pac::cryp::Cryp, dir: Direction) -> [u32; 4] { + #[cfg(cryp_v2)] + fn pre_final_block(&self, p: &pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] { //Handle special CCM partial block process. let mut temp1 = [0; 4]; if dir == Direction::Decrypt { @@ -717,6 +736,14 @@ impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cip return temp1; } + #[cfg(cryp_v3)] + fn pre_final_block(&self, p: &pac::cryp::Cryp, _dir: Direction, padding_len: usize) -> [u32; 4] { + //Handle special GCM partial block process. + p.cr().modify(|w| w.set_npblb(padding_len as u8)); + [0; 4] + } + + #[cfg(cryp_v2)] fn post_final_block( &self, p: &pac::cryp::Cryp, @@ -753,39 +780,39 @@ impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cip } } -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 128 / 8 }, TAG_SIZE, IV_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 192 / 8 }, TAG_SIZE, IV_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 256 / 8 }, TAG_SIZE, IV_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<4> for AesCcm<'c, KEY_SIZE, 4, IV_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<6> for AesCcm<'c, KEY_SIZE, 6, IV_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<8> for AesCcm<'c, KEY_SIZE, 8, IV_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<10> for AesCcm<'c, KEY_SIZE, 10, IV_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<12> for AesCcm<'c, KEY_SIZE, 12, IV_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<14> for AesCcm<'c, KEY_SIZE, 14, IV_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<16> for AesCcm<'c, KEY_SIZE, 16, IV_SIZE> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 7> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 8> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 9> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 10> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 11> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 12> {} -#[cfg(cryp_v2)] +#[cfg(any(cryp_v2, cryp_v3))] impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 13> {} #[allow(dead_code)] @@ -909,7 +936,7 @@ impl<'d, T: Instance> Cryp<'d, T> { ctx } - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] /// Controls the header phase of cipher processing. /// This function is only valid for GCM, CCM, and GMAC modes. /// It only needs to be called if using one of these modes and there is associated data. @@ -1066,7 +1093,7 @@ impl<'d, T: Instance> Cryp<'d, T> { if !ctx.aad_complete && ctx.header_len > 0 { panic!("Additional associated data must be processed first!"); } else if !ctx.aad_complete { - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] { ctx.aad_complete = true; T::regs().cr().modify(|w| w.set_crypen(false)); @@ -1121,7 +1148,8 @@ impl<'d, T: Instance> Cryp<'d, T> { // Handle the final block, which is incomplete. if last_block_remainder > 0 { - let temp1 = ctx.cipher.pre_final_block(&T::regs(), ctx.dir); + let padding_len = C::BLOCK_SIZE - last_block_remainder; + let temp1 = ctx.cipher.pre_final_block(&T::regs(), ctx.dir, padding_len); let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; @@ -1162,7 +1190,7 @@ impl<'d, T: Instance> Cryp<'d, T> { self.store_context(ctx); } - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] /// This function only needs to be called for GCM, CCM, and GMAC modes to /// generate an authentication tag. pub fn finish_blocking< @@ -1184,10 +1212,21 @@ impl<'d, T: Instance> Cryp<'d, T> { let payloadlen1: u32 = ((ctx.payload_len * 8) >> 32) as u32; let payloadlen2: u32 = (ctx.payload_len * 8) as u32; - T::regs().din().write_value(headerlen1.swap_bytes()); - T::regs().din().write_value(headerlen2.swap_bytes()); - T::regs().din().write_value(payloadlen1.swap_bytes()); - T::regs().din().write_value(payloadlen2.swap_bytes()); + #[cfg(cryp_v2)] + { + T::regs().din().write_value(headerlen1.swap_bytes()); + T::regs().din().write_value(headerlen2.swap_bytes()); + T::regs().din().write_value(payloadlen1.swap_bytes()); + T::regs().din().write_value(payloadlen2.swap_bytes()); + } + + #[cfg(cryp_v3)] + { + T::regs().din().write_value(headerlen1); + T::regs().din().write_value(headerlen2); + T::regs().din().write_value(payloadlen1); + T::regs().din().write_value(payloadlen2); + } while !T::regs().sr().read().ofne() {} @@ -1257,7 +1296,7 @@ impl<'d, T: Instance> Cryp<'d, T> { ctx.iv[2] = T::regs().init(1).ivlr().read(); ctx.iv[3] = T::regs().init(1).ivrr().read(); - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] for i in 0..8 { ctx.csgcmccm[i] = T::regs().csgcmccmr(i).read(); ctx.csgcm[i] = T::regs().csgcmr(i).read(); @@ -1272,7 +1311,7 @@ impl<'d, T: Instance> Cryp<'d, T> { T::regs().init(1).ivlr().write_value(ctx.iv[2]); T::regs().init(1).ivrr().write_value(ctx.iv[3]); - #[cfg(cryp_v2)] + #[cfg(any(cryp_v2, cryp_v3))] for i in 0..8 { T::regs().csgcmccmr(i).write_value(ctx.csgcmccm[i]); T::regs().csgcmr(i).write_value(ctx.csgcm[i]); diff --git a/tests/stm32/Cargo.toml b/tests/stm32/Cargo.toml index bfe003a11..3d7db2025 100644 --- a/tests/stm32/Cargo.toml +++ b/tests/stm32/Cargo.toml @@ -10,7 +10,7 @@ stm32c031c6 = ["embassy-stm32/stm32c031c6", "cm0", "not-gpdma"] stm32f103c8 = ["embassy-stm32/stm32f103c8", "not-gpdma"] stm32f207zg = ["embassy-stm32/stm32f207zg", "chrono", "not-gpdma", "eth", "rng"] stm32f303ze = ["embassy-stm32/stm32f303ze", "chrono", "not-gpdma"] -stm32f429zi = ["embassy-stm32/stm32f429zi", "chrono", "eth", "stop", "can", "not-gpdma", "dac", "rng"] +stm32f429zi = ["embassy-stm32/stm32f429zi", "chrono", "eth", "stop", "can", "not-gpdma", "dac", "rng", "cryp"] stm32f446re = ["embassy-stm32/stm32f446re", "chrono", "stop", "can", "not-gpdma", "dac", "sdmmc"] stm32f767zi = ["embassy-stm32/stm32f767zi", "chrono", "not-gpdma", "eth", "rng"] stm32g071rb = ["embassy-stm32/stm32g071rb", "cm0", "not-gpdma", "dac"] @@ -18,7 +18,7 @@ stm32g491re = ["embassy-stm32/stm32g491re", "chrono", "stop", "not-gpdma", "rng" stm32h563zi = ["embassy-stm32/stm32h563zi", "chrono", "eth", "rng", "hash"] stm32h753zi = ["embassy-stm32/stm32h753zi", "chrono", "not-gpdma", "eth", "rng", "fdcan", "hash", "cryp"] stm32h755zi = ["embassy-stm32/stm32h755zi-cm7", "chrono", "not-gpdma", "eth", "dac", "rng", "fdcan", "hash", "cryp"] -stm32h7a3zi = ["embassy-stm32/stm32h7a3zi", "not-gpdma", "rng", "fdcan"] +stm32h7a3zi = ["embassy-stm32/stm32h7a3zi", "not-gpdma", "rng", "fdcan", "cryp"] stm32l073rz = ["embassy-stm32/stm32l073rz", "cm0", "not-gpdma", "rng"] stm32l152re = ["embassy-stm32/stm32l152re", "chrono", "not-gpdma"] stm32l496zg = ["embassy-stm32/stm32l496zg", "not-gpdma", "rng"] From 27fac380cfe05439adbe4f0256765f906e403733 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Thu, 29 Feb 2024 19:15:32 -0500 Subject: [PATCH 22/23] Remove CRYP from H7A3. --- tests/stm32/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/stm32/Cargo.toml b/tests/stm32/Cargo.toml index 3d7db2025..c5f605565 100644 --- a/tests/stm32/Cargo.toml +++ b/tests/stm32/Cargo.toml @@ -18,7 +18,7 @@ stm32g491re = ["embassy-stm32/stm32g491re", "chrono", "stop", "not-gpdma", "rng" stm32h563zi = ["embassy-stm32/stm32h563zi", "chrono", "eth", "rng", "hash"] stm32h753zi = ["embassy-stm32/stm32h753zi", "chrono", "not-gpdma", "eth", "rng", "fdcan", "hash", "cryp"] stm32h755zi = ["embassy-stm32/stm32h755zi-cm7", "chrono", "not-gpdma", "eth", "dac", "rng", "fdcan", "hash", "cryp"] -stm32h7a3zi = ["embassy-stm32/stm32h7a3zi", "not-gpdma", "rng", "fdcan", "cryp"] +stm32h7a3zi = ["embassy-stm32/stm32h7a3zi", "not-gpdma", "rng", "fdcan"] stm32l073rz = ["embassy-stm32/stm32l073rz", "cm0", "not-gpdma", "rng"] stm32l152re = ["embassy-stm32/stm32l152re", "chrono", "not-gpdma"] stm32l496zg = ["embassy-stm32/stm32l496zg", "not-gpdma", "rng"] From 97e125872e707e96bf81cd8e601f92f0f9f688a1 Mon Sep 17 00:00:00 2001 From: Caleb Garrett <47389035+caleb-garrett@users.noreply.github.com> Date: Thu, 29 Feb 2024 19:18:25 -0500 Subject: [PATCH 23/23] Remove CRYP from F429. --- tests/stm32/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/stm32/Cargo.toml b/tests/stm32/Cargo.toml index c5f605565..bfe003a11 100644 --- a/tests/stm32/Cargo.toml +++ b/tests/stm32/Cargo.toml @@ -10,7 +10,7 @@ stm32c031c6 = ["embassy-stm32/stm32c031c6", "cm0", "not-gpdma"] stm32f103c8 = ["embassy-stm32/stm32f103c8", "not-gpdma"] stm32f207zg = ["embassy-stm32/stm32f207zg", "chrono", "not-gpdma", "eth", "rng"] stm32f303ze = ["embassy-stm32/stm32f303ze", "chrono", "not-gpdma"] -stm32f429zi = ["embassy-stm32/stm32f429zi", "chrono", "eth", "stop", "can", "not-gpdma", "dac", "rng", "cryp"] +stm32f429zi = ["embassy-stm32/stm32f429zi", "chrono", "eth", "stop", "can", "not-gpdma", "dac", "rng"] stm32f446re = ["embassy-stm32/stm32f446re", "chrono", "stop", "can", "not-gpdma", "dac", "sdmmc"] stm32f767zi = ["embassy-stm32/stm32f767zi", "chrono", "not-gpdma", "eth", "rng"] stm32g071rb = ["embassy-stm32/stm32g071rb", "cm0", "not-gpdma", "dac"]