[ENG-361] Crypto crate docs and tests (#572)

* add hashing tests

* add encryption/decryption tests

* remove `reason`

* add file header tests (preview media deserialization is broken)

* fix keyslot reading bug

* add sd-crypto testing to ci

* add tests/constants for all hashing algorthms and param levels

* add blake3-kdf tests

* use `const` arrays for storing expected output

* test for `5MiB` `encrypt_streams` and `decrypt_streams`

* add invalid/mismatched nonce tests

* update `primitives` docs

* remove erroneous `,`

* grammar tweaks

* add errors to `#[should_panic]`

* cleanup `stream` tests

* cleanup hashing tests a little

* function docs
This commit is contained in:
jake
2023-02-16 11:42:30 +00:00
committed by GitHub
parent b856f15b22
commit c7dbc784cd
12 changed files with 1036 additions and 105 deletions

View File

@@ -188,6 +188,9 @@ jobs:
- name: Check core
run: cargo check -p sd-core --release
- name: Cargo test sd-crypto
run: cargo test -p sd-crypto --release --lib --all-features
- name: Bundle Desktop
run: pnpm desktop tauri build

View File

@@ -5,16 +5,16 @@ use tokio::{
io::{self, AsyncReadExt},
};
const BLOCK_SIZE: usize = 1048576;
const BLOCK_LEN: usize = 1048576;
pub async fn file_checksum(path: impl AsRef<Path>) -> Result<String, io::Error> {
let mut reader = File::open(path).await?;
let mut context = Hasher::new();
let mut buffer = vec![0; BLOCK_SIZE].into_boxed_slice();
let mut buffer = vec![0; BLOCK_LEN].into_boxed_slice();
loop {
let read_count = reader.read(&mut buffer).await?;
context.update(&buffer[..read_count]);
if read_count != BLOCK_SIZE {
if read_count != BLOCK_LEN {
break;
}
}

View File

@@ -65,10 +65,10 @@ tokio = { workspace = true, features = [
"macros",
] } # features needed for examples
[[bench]]
name = "aes-256-gcm"
path = "benches/aes-256-gcm.rs"
harness = false
# [[bench]]
# name = "aes-256-gcm"
# path = "benches/aes-256-gcm.rs"
# harness = false
# [[bench]]
# name = "xchacha20-poly1305"

View File

@@ -6,7 +6,7 @@ use std::io::Cursor;
use crate::{
primitives::{
types::{Key, Nonce},
AEAD_TAG_SIZE, BLOCK_SIZE,
AEAD_TAG_LEN, BLOCK_LEN,
},
Error, Protected, Result,
};
@@ -104,7 +104,7 @@ impl StreamEncryption {
/// This function should be used for encrypting large amounts of data.
///
/// The streaming implementation reads blocks of data in `BLOCK_SIZE`, encrypts, and writes to the writer.
/// The streaming implementation reads blocks of data in `BLOCK_LEN`, encrypts, and writes to the writer.
///
/// It requires a reader, a writer, and any AAD to go with it.
///
@@ -119,20 +119,20 @@ impl StreamEncryption {
R: AsyncReadExt + Unpin + Send,
W: AsyncWriteExt + Unpin + Send,
{
let mut read_buffer = vec![0u8; BLOCK_SIZE].into_boxed_slice();
let mut read_buffer = vec![0u8; BLOCK_LEN].into_boxed_slice();
loop {
let mut read_count = 0;
loop {
let i = reader.read(&mut read_buffer[read_count..]).await?;
read_count += i;
if i == 0 || read_count == BLOCK_SIZE {
if i == 0 || read_count == BLOCK_LEN {
// if we're EOF or the buffer is filled
break;
}
}
if read_count == BLOCK_SIZE {
if read_count == BLOCK_LEN {
let payload = Payload {
aad,
msg: &read_buffer,
@@ -231,7 +231,7 @@ impl StreamDecryption {
/// This function should be used for decrypting large amounts of data.
///
/// The streaming implementation reads blocks of data in `BLOCK_SIZE`, decrypts, and writes to the writer.
/// The streaming implementation reads blocks of data in `BLOCK_LEN`, decrypts, and writes to the writer.
///
/// It requires a reader, a writer, and any AAD that was used.
///
@@ -246,20 +246,20 @@ impl StreamDecryption {
R: AsyncReadExt + Unpin + Send,
W: AsyncWriteExt + Unpin + Send,
{
let mut read_buffer = vec![0u8; BLOCK_SIZE + AEAD_TAG_SIZE].into_boxed_slice();
let mut read_buffer = vec![0u8; BLOCK_LEN + AEAD_TAG_LEN].into_boxed_slice();
loop {
let mut read_count = 0;
loop {
let i = reader.read(&mut read_buffer[read_count..]).await?;
read_count += i;
if i == 0 || read_count == (BLOCK_SIZE + AEAD_TAG_SIZE) {
if i == 0 || read_count == (BLOCK_LEN + AEAD_TAG_LEN) {
// if we're EOF or the buffer is filled
break;
}
}
if read_count == (BLOCK_SIZE + AEAD_TAG_SIZE) {
if read_count == (BLOCK_LEN + AEAD_TAG_LEN) {
let payload = Payload {
aad,
msg: &read_buffer,
@@ -304,3 +304,348 @@ impl StreamDecryption {
.map_or_else(Err, |_| Ok(Protected::new(writer.into_inner())))
}
}
#[cfg(test)]
mod tests {
use rand::{RngCore, SeedableRng};
use rand_chacha::ChaCha20Rng;
use super::*;
const KEY: Key = Key::new([
0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23,
0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23,
0x23, 0x23,
]);
const AES_NONCE: Nonce = Nonce::Aes256Gcm([0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9]);
const XCHACHA_NONCE: Nonce = Nonce::XChaCha20Poly1305([
0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9, 0xE9,
0xE9, 0xE9, 0xE9, 0xE9, 0xE9,
]);
const PLAINTEXT: [u8; 32] = [
0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A,
0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A, 0x5A,
0x5A, 0x5A,
];
const AAD: [u8; 16] = [
0x92, 0x92, 0x92, 0x92, 0x92, 0x92, 0x92, 0x92, 0x92, 0x92, 0x92, 0x92, 0x92, 0x92, 0x92,
0x92,
];
// for the `const` arrays below, [0] is without AAD, [1] is with AAD
const AES_BYTES_EXPECTED: [[u8; 48]; 2] = [
[
38, 96, 235, 51, 131, 187, 162, 152, 183, 13, 174, 87, 108, 113, 198, 88, 106, 121,
208, 37, 20, 10, 2, 107, 69, 147, 171, 141, 46, 255, 181, 123, 24, 150, 104, 25, 70,
198, 169, 232, 124, 99, 151, 226, 84, 113, 184, 134,
],
[
38, 96, 235, 51, 131, 187, 162, 152, 183, 13, 174, 87, 108, 113, 198, 88, 106, 121,
208, 37, 20, 10, 2, 107, 69, 147, 171, 141, 46, 255, 181, 123, 172, 121, 35, 145, 71,
115, 203, 224, 20, 183, 1, 99, 223, 230, 255, 76,
],
];
const XCHACHA_BYTES_EXPECTED: [[u8; 48]; 2] = [
[
35, 174, 252, 59, 215, 65, 5, 237, 198, 2, 51, 72, 239, 88, 36, 177, 136, 252, 64, 157,
141, 53, 138, 98, 185, 2, 75, 173, 253, 99, 133, 207, 145, 54, 100, 51, 44, 230, 60, 5,
157, 70, 110, 145, 166, 41, 215, 95,
],
[
35, 174, 252, 59, 215, 65, 5, 237, 198, 2, 51, 72, 239, 88, 36, 177, 136, 252, 64, 157,
141, 53, 138, 98, 185, 2, 75, 173, 253, 99, 133, 207, 110, 4, 255, 118, 55, 88, 24,
170, 101, 74, 104, 122, 105, 216, 225, 243,
],
];
#[tokio::test]
async fn aes_encrypt_bytes() {
let ciphertext =
StreamEncryption::encrypt_bytes(KEY, AES_NONCE, Algorithm::Aes256Gcm, &PLAINTEXT, &[])
.await
.unwrap();
assert_eq!(AES_BYTES_EXPECTED[0].to_vec(), ciphertext)
}
#[tokio::test]
async fn aes_encrypt_bytes_with_aad() {
let ciphertext =
StreamEncryption::encrypt_bytes(KEY, AES_NONCE, Algorithm::Aes256Gcm, &PLAINTEXT, &AAD)
.await
.unwrap();
assert_eq!(AES_BYTES_EXPECTED[1].to_vec(), ciphertext)
}
#[tokio::test]
async fn aes_decrypt_bytes() {
let plaintext = StreamDecryption::decrypt_bytes(
KEY,
AES_NONCE,
Algorithm::Aes256Gcm,
&AES_BYTES_EXPECTED[0],
&[],
)
.await
.unwrap();
assert_eq!(PLAINTEXT.to_vec(), plaintext.expose().to_vec())
}
#[tokio::test]
async fn aes_decrypt_bytes_with_aad() {
let plaintext = StreamDecryption::decrypt_bytes(
KEY,
AES_NONCE,
Algorithm::Aes256Gcm,
&AES_BYTES_EXPECTED[1],
&AAD,
)
.await
.unwrap();
assert_eq!(PLAINTEXT.to_vec(), plaintext.expose().to_vec())
}
#[tokio::test]
#[should_panic(expected = "Decrypt")]
async fn aes_decrypt_bytes_missing_aad() {
StreamDecryption::decrypt_bytes(
KEY,
AES_NONCE,
Algorithm::Aes256Gcm,
&AES_BYTES_EXPECTED[1],
&[],
)
.await
.unwrap();
}
#[tokio::test]
async fn aes_encrypt_and_decrypt_5_blocks() {
let mut buf = vec![0u8; BLOCK_LEN * 5];
ChaCha20Rng::from_entropy().fill_bytes(&mut buf);
let mut reader = Cursor::new(buf.clone());
let mut writer = Cursor::new(Vec::new());
let encryptor = StreamEncryption::new(KEY, AES_NONCE, Algorithm::Aes256Gcm).unwrap();
encryptor
.encrypt_streams(&mut reader, &mut writer, &[])
.await
.unwrap();
let mut reader = Cursor::new(writer.into_inner());
let mut writer = Cursor::new(Vec::new());
let decryptor = StreamDecryption::new(KEY, AES_NONCE, Algorithm::Aes256Gcm).unwrap();
decryptor
.decrypt_streams(&mut reader, &mut writer, &[])
.await
.unwrap();
let output = writer.into_inner();
assert_eq!(buf, output);
}
#[tokio::test]
async fn aes_encrypt_and_decrypt_5_blocks_with_aad() {
let mut buf = vec![0u8; BLOCK_LEN * 5];
ChaCha20Rng::from_entropy().fill_bytes(&mut buf);
let mut reader = Cursor::new(buf.clone());
let mut writer = Cursor::new(Vec::new());
let encryptor = StreamEncryption::new(KEY, AES_NONCE, Algorithm::Aes256Gcm).unwrap();
encryptor
.encrypt_streams(&mut reader, &mut writer, &AAD)
.await
.unwrap();
let mut reader = Cursor::new(writer.into_inner());
let mut writer = Cursor::new(Vec::new());
let decryptor = StreamDecryption::new(KEY, AES_NONCE, Algorithm::Aes256Gcm).unwrap();
decryptor
.decrypt_streams(&mut reader, &mut writer, &AAD)
.await
.unwrap();
let output = writer.into_inner();
assert_eq!(buf, output);
}
#[tokio::test]
async fn xchacha_encrypt_bytes() {
let ciphertext = StreamEncryption::encrypt_bytes(
KEY,
XCHACHA_NONCE,
Algorithm::XChaCha20Poly1305,
&PLAINTEXT,
&[],
)
.await
.unwrap();
assert_eq!(XCHACHA_BYTES_EXPECTED[0].to_vec(), ciphertext)
}
#[tokio::test]
async fn xchacha_encrypt_bytes_with_aad() {
let ciphertext = StreamEncryption::encrypt_bytes(
KEY,
XCHACHA_NONCE,
Algorithm::XChaCha20Poly1305,
&PLAINTEXT,
&AAD,
)
.await
.unwrap();
assert_eq!(XCHACHA_BYTES_EXPECTED[1].to_vec(), ciphertext)
}
#[tokio::test]
async fn xchacha_decrypt_bytes() {
let plaintext = StreamDecryption::decrypt_bytes(
KEY,
XCHACHA_NONCE,
Algorithm::XChaCha20Poly1305,
&XCHACHA_BYTES_EXPECTED[0],
&[],
)
.await
.unwrap();
assert_eq!(PLAINTEXT.to_vec(), plaintext.expose().to_vec())
}
#[tokio::test]
async fn xchacha_decrypt_bytes_with_aad() {
let plaintext = StreamDecryption::decrypt_bytes(
KEY,
XCHACHA_NONCE,
Algorithm::XChaCha20Poly1305,
&XCHACHA_BYTES_EXPECTED[1],
&AAD,
)
.await
.unwrap();
assert_eq!(PLAINTEXT.to_vec(), plaintext.expose().to_vec())
}
#[tokio::test]
#[should_panic(expected = "Decrypt")]
async fn xchacha_decrypt_bytes_missing_aad() {
StreamDecryption::decrypt_bytes(
KEY,
XCHACHA_NONCE,
Algorithm::XChaCha20Poly1305,
&XCHACHA_BYTES_EXPECTED[1],
&[],
)
.await
.unwrap();
}
#[tokio::test]
async fn xchacha_encrypt_and_decrypt_5_blocks() {
let mut buf = vec![0u8; BLOCK_LEN * 5];
ChaCha20Rng::from_entropy().fill_bytes(&mut buf);
let mut reader = Cursor::new(buf.clone());
let mut writer = Cursor::new(Vec::new());
let encryptor =
StreamEncryption::new(KEY, XCHACHA_NONCE, Algorithm::XChaCha20Poly1305).unwrap();
encryptor
.encrypt_streams(&mut reader, &mut writer, &[])
.await
.unwrap();
let mut reader = Cursor::new(writer.into_inner());
let mut writer = Cursor::new(Vec::new());
let decryptor =
StreamDecryption::new(KEY, XCHACHA_NONCE, Algorithm::XChaCha20Poly1305).unwrap();
decryptor
.decrypt_streams(&mut reader, &mut writer, &[])
.await
.unwrap();
let output = writer.into_inner();
assert_eq!(buf, output);
}
#[tokio::test]
async fn xchacha_encrypt_and_decrypt_5_blocks_with_aad() {
let mut buf = vec![0u8; BLOCK_LEN * 5];
ChaCha20Rng::from_entropy().fill_bytes(&mut buf);
let mut reader = Cursor::new(buf.clone());
let mut writer = Cursor::new(Vec::new());
let encryptor =
StreamEncryption::new(KEY, XCHACHA_NONCE, Algorithm::XChaCha20Poly1305).unwrap();
encryptor
.encrypt_streams(&mut reader, &mut writer, &AAD)
.await
.unwrap();
let mut reader = Cursor::new(writer.into_inner());
let mut writer = Cursor::new(Vec::new());
let decryptor =
StreamDecryption::new(KEY, XCHACHA_NONCE, Algorithm::XChaCha20Poly1305).unwrap();
decryptor
.decrypt_streams(&mut reader, &mut writer, &AAD)
.await
.unwrap();
let output = writer.into_inner();
assert_eq!(buf, output);
}
#[tokio::test]
#[should_panic(expected = "NonceLengthMismatch")]
async fn encrypt_with_invalid_nonce() {
StreamEncryption::encrypt_bytes(
KEY,
AES_NONCE,
Algorithm::XChaCha20Poly1305,
&PLAINTEXT,
&[],
)
.await
.unwrap();
}
#[tokio::test]
#[should_panic(expected = "NonceLengthMismatch")]
async fn decrypt_with_invalid_nonce() {
StreamDecryption::decrypt_bytes(
KEY,
AES_NONCE,
Algorithm::XChaCha20Poly1305,
&XCHACHA_BYTES_EXPECTED[0],
&[],
)
.await
.unwrap();
}
}

View File

@@ -1,4 +1,4 @@
use crate::{primitives::BLOCK_SIZE, Result};
use crate::{primitives::BLOCK_LEN, Result};
use rand::{RngCore, SeedableRng};
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt};
@@ -7,7 +7,7 @@ use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt};
///
/// It requires the file size, a stream and the amount of passes (to overwrite the entire stream with random data)
///
/// It works against `BLOCK_SIZE`.
/// It works against `BLOCK_LEN`.
///
/// Note, it will not be ideal on flash-based storage devices.
/// The drive will be worn down, and due to wear-levelling built into the drive's firmware no tool (short of an ATA secure erase command)
@@ -18,10 +18,10 @@ pub async fn erase<RW>(stream: &mut RW, size: usize, passes: usize) -> Result<()
where
RW: AsyncReadExt + AsyncWriteExt + AsyncSeekExt + Unpin + Send,
{
let block_count = size / BLOCK_SIZE;
let additional = size % BLOCK_SIZE;
let block_count = size / BLOCK_LEN;
let additional = size % BLOCK_LEN;
let mut buf = vec![0u8; BLOCK_SIZE].into_boxed_slice();
let mut buf = vec![0u8; BLOCK_LEN].into_boxed_slice();
let mut end_buf = vec![0u8; additional].into_boxed_slice();
for _ in 0..passes {

View File

@@ -29,7 +29,7 @@
//! // Write the header to the file
//! header.write(&mut writer).unwrap();
//! ```
use std::io::SeekFrom;
use std::io::{Cursor, SeekFrom};
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt};
@@ -79,6 +79,10 @@ impl FileHeader {
algorithm: Algorithm,
keyslots: Vec<Keyslot>,
) -> Result<Self> {
if keyslots.len() > 2 {
return Err(Error::TooManyKeyslots);
}
let f = Self {
version,
algorithm,
@@ -290,25 +294,26 @@ impl FileHeader {
// read and discard the padding
reader.read_exact(&mut vec![0u8; 25 - nonce.len()]).await?;
let mut keyslot_bytes = [0u8; (KEYSLOT_SIZE * 2)]; // length of 2x keyslots
let mut keyslot_bytes = vec![0u8; KEYSLOT_SIZE * 2]; // length of 2x keyslots
let mut keyslots: Vec<Keyslot> = Vec::new();
reader.read_exact(&mut keyslot_bytes).await?;
let mut keyslot_reader = Cursor::new(keyslot_bytes);
for _ in 0..2 {
Keyslot::from_reader(&mut keyslot_bytes.as_ref())
Keyslot::from_reader(&mut keyslot_reader)
.map(|k| keyslots.push(k))
.ok();
}
let metadata = if let Ok(metadata) = Metadata::from_reader(reader).await {
Ok::<Option<Metadata>, Error>(Some(metadata))
} else {
reader
.seek(SeekFrom::Start(
Self::size(version) as u64 + (KEYSLOT_SIZE * 2) as u64,
))
.await?;
Ok::<Option<Metadata>, Error>(Some(metadata))
} else {
Ok(None)
}?;
@@ -343,3 +348,324 @@ impl FileHeader {
Ok((header, aad))
}
}
#[cfg(test)]
mod tests {
use std::io::Cursor;
use crate::{
keys::hashing::{HashingAlgorithm, Params},
primitives::{types::Salt, LATEST_FILE_HEADER, LATEST_KEYSLOT, LATEST_PREVIEW_MEDIA},
};
use super::*;
const ALGORITHM: Algorithm = Algorithm::XChaCha20Poly1305;
const HASHING_ALGORITHM: HashingAlgorithm = HashingAlgorithm::Argon2id(Params::Standard);
const PVM_BYTES: [u8; 4] = [0x01, 0x02, 0x03, 0x04];
#[tokio::test]
async fn serialize_and_deserialize_header() {
let mk = Key::generate();
let content_salt = Salt::generate();
let hashed_pw = Key::generate(); // not hashed, but that'd be expensive
let mut writer: Cursor<Vec<u8>> = Cursor::new(vec![]);
let header = FileHeader::new(
LATEST_FILE_HEADER,
ALGORITHM,
vec![Keyslot::new(
LATEST_KEYSLOT,
ALGORITHM,
HASHING_ALGORITHM,
content_salt,
hashed_pw,
mk,
)
.await
.unwrap()],
)
.unwrap();
header.write(&mut writer).await.unwrap();
writer.rewind().await.unwrap();
FileHeader::from_reader(&mut writer).await.unwrap();
assert!(writer.position() == 260)
}
#[tokio::test]
async fn serialize_and_deserialize_header_with_preview_media() {
let mk = Key::generate();
let mut writer: Cursor<Vec<u8>> = Cursor::new(vec![]);
let mut header = FileHeader::new(
LATEST_FILE_HEADER,
ALGORITHM,
vec![Keyslot::new(
LATEST_KEYSLOT,
ALGORITHM,
HASHING_ALGORITHM,
Salt::generate(),
Key::generate(),
mk.clone(),
)
.await
.unwrap()],
)
.unwrap();
header
.add_preview_media(LATEST_PREVIEW_MEDIA, ALGORITHM, mk, &PVM_BYTES)
.await
.unwrap();
header.write(&mut writer).await.unwrap();
writer.rewind().await.unwrap();
let (header, _) = FileHeader::from_reader(&mut writer).await.unwrap();
assert!(header.preview_media.is_some());
assert!(header.metadata.is_none());
assert!(header.keyslots.len() == 1);
}
#[cfg(feature = "serde")]
#[tokio::test]
async fn serialize_and_deserialize_header_with_metadata() {
use crate::primitives::LATEST_METADATA;
#[derive(serde::Serialize)]
struct Metadata {
pub name: String,
pub favorite: bool,
}
let mk = Key::generate();
let md = Metadata {
name: "file.txt".to_string(),
favorite: true,
};
let mut writer: Cursor<Vec<u8>> = Cursor::new(vec![]);
let mut header = FileHeader::new(
LATEST_FILE_HEADER,
ALGORITHM,
vec![Keyslot::new(
LATEST_KEYSLOT,
ALGORITHM,
HASHING_ALGORITHM,
Salt::generate(),
Key::generate(),
mk.clone(),
)
.await
.unwrap()],
)
.unwrap();
header
.add_metadata(LATEST_METADATA, ALGORITHM, mk, &md)
.await
.unwrap();
header.write(&mut writer).await.unwrap();
writer.rewind().await.unwrap();
let (header, _) = FileHeader::from_reader(&mut writer).await.unwrap();
assert!(header.metadata.is_some());
assert!(header.preview_media.is_none());
assert!(header.keyslots.len() == 1);
}
#[tokio::test]
async fn serialize_and_deserialize_header_with_two_keyslots() {
let mut writer: Cursor<Vec<u8>> = Cursor::new(vec![]);
let mk = Key::generate();
let header = FileHeader::new(
LATEST_FILE_HEADER,
ALGORITHM,
vec![
Keyslot::new(
LATEST_KEYSLOT,
ALGORITHM,
HASHING_ALGORITHM,
Salt::generate(),
Key::generate(),
mk.clone(),
)
.await
.unwrap(),
Keyslot::new(
LATEST_KEYSLOT,
ALGORITHM,
HASHING_ALGORITHM,
Salt::generate(),
Key::generate(),
mk,
)
.await
.unwrap(),
],
)
.unwrap();
header.write(&mut writer).await.unwrap();
writer.rewind().await.unwrap();
let (header, _) = FileHeader::from_reader(&mut writer).await.unwrap();
assert!(header.keyslots.len() == 2);
assert!(header.metadata.is_none());
assert!(header.preview_media.is_none());
}
#[tokio::test]
#[should_panic(expected = "TooManyKeyslots")]
async fn serialize_and_deserialize_header_with_too_many_keyslots() {
let mk = Key::generate();
FileHeader::new(
LATEST_FILE_HEADER,
ALGORITHM,
vec![
Keyslot::new(
LATEST_KEYSLOT,
ALGORITHM,
HASHING_ALGORITHM,
Salt::generate(),
Key::generate(),
mk.clone(),
)
.await
.unwrap(),
Keyslot::new(
LATEST_KEYSLOT,
ALGORITHM,
HASHING_ALGORITHM,
Salt::generate(),
Key::generate(),
mk.clone(),
)
.await
.unwrap(),
Keyslot::new(
LATEST_KEYSLOT,
ALGORITHM,
HASHING_ALGORITHM,
Salt::generate(),
Key::generate(),
mk,
)
.await
.unwrap(),
],
)
.unwrap();
}
#[cfg(feature = "serde")]
#[tokio::test]
async fn serialize_and_deserialize_header_with_all() {
use crate::primitives::LATEST_METADATA;
#[derive(serde::Serialize)]
struct Metadata {
pub name: String,
pub favorite: bool,
}
let mut writer: Cursor<Vec<u8>> = Cursor::new(vec![]);
let mk = Key::generate();
let md = Metadata {
name: "file.txt".to_string(),
favorite: true,
};
let mut header = FileHeader::new(
LATEST_FILE_HEADER,
ALGORITHM,
vec![
Keyslot::new(
LATEST_KEYSLOT,
ALGORITHM,
HASHING_ALGORITHM,
Salt::generate(),
Key::generate(),
mk.clone(),
)
.await
.unwrap(),
Keyslot::new(
LATEST_KEYSLOT,
ALGORITHM,
HASHING_ALGORITHM,
Salt::generate(),
Key::generate(),
mk.clone(),
)
.await
.unwrap(),
],
)
.unwrap();
header
.add_metadata(LATEST_METADATA, ALGORITHM, mk.clone(), &md)
.await
.unwrap();
header
.add_preview_media(LATEST_PREVIEW_MEDIA, ALGORITHM, mk, &PVM_BYTES)
.await
.unwrap();
header.write(&mut writer).await.unwrap();
writer.rewind().await.unwrap();
let (header, _) = FileHeader::from_reader(&mut writer).await.unwrap();
assert!(header.metadata.is_some());
assert!(header.preview_media.is_some());
assert!(header.keyslots.len() == 2);
}
#[tokio::test]
async fn aad_validity() {
let mut writer: Cursor<Vec<u8>> = Cursor::new(vec![]);
let header = FileHeader::new(
LATEST_FILE_HEADER,
ALGORITHM,
vec![Keyslot::new(
LATEST_KEYSLOT,
ALGORITHM,
HASHING_ALGORITHM,
Salt::generate(),
Key::generate(),
Key::generate(),
)
.await
.unwrap()],
)
.unwrap();
header.write(&mut writer).await.unwrap();
writer.rewind().await.unwrap();
let (header, aad) = FileHeader::from_reader(&mut writer).await.unwrap();
assert_eq!(header.generate_aad(), aad);
assert_eq!(&header.to_bytes().unwrap()[..36], aad);
}
}

View File

@@ -23,7 +23,7 @@ use balloon_hash::Balloon;
/// These parameters define the password-hashing level.
///
/// The harder the parameter, the longer the password will take to hash.
/// The greater the parameter, the longer the password will take to hash.
#[derive(Clone, Copy, PartialEq, Eq)]
#[cfg_attr(
feature = "serde",
@@ -77,10 +77,6 @@ impl Params {
pub fn argon2id(&self) -> argon2::Params {
match self {
// We can use `.unwrap()` here as the values are hardcoded, and this shouldn't error
// The values are NOT final, as we need to find a good average.
// It's very hardware dependant but we should aim for at least 64MB of RAM usage on standard
// Provided they all take one (ish) second or longer, and less than 3/4 seconds (for paranoid), they will be fine
// It's not so much the parameters themselves that matter, it's the duration (and ensuring that they use enough RAM to hinder ASIC brute-force attacks)
Self::Standard => argon2::Params::new(131_072, 8, 4, None).unwrap(),
Self::Hardened => argon2::Params::new(262_144, 8, 4, None).unwrap(),
Self::Paranoid => argon2::Params::new(524_288, 8, 4, None).unwrap(),
@@ -89,15 +85,11 @@ impl Params {
/// This function is used to generate parameters for password hashing.
///
/// This should not be called directly. Call it via the `HashingAlgorithm` struct (e.g. `HashingAlgorithm::Argon2id(Params::Standard).hash()`)
/// This should not be called directly. Call it via the `HashingAlgorithm` struct (e.g. `HashingAlgorithm::BalloonBlake3(Params::Standard).hash()`)
#[must_use]
pub fn balloon_blake3(&self) -> balloon_hash::Params {
match self {
// We can use `.unwrap()` here as the values are hardcoded, and this shouldn't error
// The values are NOT final, as we need to find a good average.
// It's very hardware dependant but we should aim for at least 64MB of RAM usage on standard
// Provided they all take one (ish) second or longer, and less than 3/4 seconds (for paranoid), they will be fine
// It's not so much the parameters themselves that matter, it's the duration (and ensuring that they use enough RAM to hinder ASIC brute-force attacks)
Self::Standard => balloon_hash::Params::new(131_072, 2, 1).unwrap(),
Self::Hardened => balloon_hash::Params::new(262_144, 2, 1).unwrap(),
Self::Paranoid => balloon_hash::Params::new(524_288, 2, 1).unwrap(),
@@ -157,3 +149,217 @@ impl PasswordHasher {
.map_or(Err(Error::PasswordHash), |_| Ok(Key::new(key)))
}
}
#[cfg(test)]
mod tests {
use super::*;
const TEST_CONTEXT: &str = "spacedrive 2023-02-09 17:44:14 test key derivation";
const ARGON2ID_STANDARD: HashingAlgorithm = HashingAlgorithm::Argon2id(Params::Standard);
const ARGON2ID_HARDENED: HashingAlgorithm = HashingAlgorithm::Argon2id(Params::Hardened);
const ARGON2ID_PARANOID: HashingAlgorithm = HashingAlgorithm::Argon2id(Params::Paranoid);
const B3BALLOON_STANDARD: HashingAlgorithm = HashingAlgorithm::BalloonBlake3(Params::Standard);
const B3BALLOON_HARDENED: HashingAlgorithm = HashingAlgorithm::BalloonBlake3(Params::Hardened);
const B3BALLOON_PARANOID: HashingAlgorithm = HashingAlgorithm::BalloonBlake3(Params::Paranoid);
const PASSWORD: [u8; 8] = [0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64];
const KEY: Key = Key::new([
0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23,
0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23, 0x23,
0x23, 0x23,
]);
const SALT: Salt = Salt([
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF,
]);
const SECRET_KEY: SecretKey = SecretKey::new([
0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
0x55, 0x55, 0x55,
]);
// for the `const` arrays below, [0] is standard params, [1] is hardened and [2] is paranoid
const HASH_ARGON2ID_EXPECTED: [[u8; 32]; 3] = [
[
194, 153, 245, 125, 12, 102, 65, 30, 254, 191, 9, 125, 4, 113, 99, 209, 162, 43, 140,
93, 217, 220, 222, 46, 105, 48, 123, 220, 180, 103, 20, 11,
],
[
173, 45, 167, 171, 125, 13, 245, 47, 231, 62, 175, 215, 21, 253, 84, 188, 249, 68, 229,
98, 16, 55, 110, 202, 105, 109, 102, 71, 216, 125, 170, 66,
],
[
27, 158, 230, 75, 99, 236, 40, 137, 60, 237, 145, 119, 159, 207, 56, 50, 210, 5, 157,
227, 162, 162, 148, 142, 230, 237, 138, 133, 112, 182, 156, 198,
],
];
const HASH_ARGON2ID_WITH_SECRET_EXPECTED: [[u8; 32]; 3] = [
[
132, 102, 123, 67, 87, 219, 88, 76, 81, 191, 128, 41, 246, 201, 103, 155, 200, 114, 54,
116, 240, 66, 155, 78, 73, 44, 87, 174, 231, 196, 206, 236,
],
[
246, 200, 29, 33, 86, 21, 66, 177, 154, 2, 134, 181, 254, 148, 104, 205, 235, 108, 121,
127, 184, 230, 109, 240, 128, 101, 137, 179, 212, 89, 37, 41,
],
[
3, 60, 179, 196, 172, 30, 0, 201, 15, 9, 213, 59, 37, 219, 173, 134, 132, 166, 32, 60,
33, 216, 3, 249, 185, 120, 110, 14, 155, 242, 134, 215,
],
];
const HASH_B3BALLOON_EXPECTED: [[u8; 32]; 3] = [
[
105, 36, 165, 219, 22, 136, 156, 19, 32, 143, 237, 150, 236, 194, 70, 113, 73, 137,
243, 106, 80, 31, 43, 73, 207, 210, 29, 251, 88, 6, 132, 77,
],
[
179, 71, 60, 122, 54, 72, 132, 209, 146, 96, 15, 115, 41, 95, 5, 75, 214, 135, 6, 122,
82, 42, 158, 9, 117, 19, 19, 40, 48, 233, 207, 237,
],
[
233, 60, 62, 184, 29, 152, 111, 46, 239, 126, 98, 90, 211, 255, 151, 0, 10, 189, 61,
84, 229, 11, 245, 228, 47, 114, 87, 74, 227, 67, 24, 141,
],
];
const HASH_B3BALLOON_WITH_SECRET_EXPECTED: [[u8; 32]; 3] = [
[
188, 0, 43, 39, 137, 199, 91, 142, 97, 31, 98, 6, 130, 75, 251, 71, 150, 109, 29, 62,
237, 171, 210, 22, 139, 108, 94, 190, 91, 74, 134, 47,
],
[
19, 247, 102, 192, 129, 184, 29, 147, 68, 215, 234, 146, 153, 221, 65, 134, 68, 120,
207, 209, 184, 246, 127, 131, 9, 245, 91, 250, 220, 61, 76, 248,
],
[
165, 240, 162, 25, 172, 3, 232, 2, 43, 230, 226, 128, 174, 28, 211, 61, 139, 136, 221,
197, 16, 83, 221, 18, 212, 190, 138, 79, 239, 148, 89, 215,
],
];
const DERIVE_B3_EXPECTED: [u8; 32] = [
27, 34, 251, 101, 201, 89, 78, 90, 20, 175, 62, 206, 200, 153, 166, 103, 118, 179, 194, 44,
216, 26, 48, 120, 137, 157, 60, 234, 234, 53, 46, 60,
];
#[test]
fn hash_argon2id_standard() {
let output = ARGON2ID_STANDARD
.hash(Protected::new(PASSWORD.to_vec()), SALT, None)
.unwrap();
assert_eq!(&HASH_ARGON2ID_EXPECTED[0], output.expose())
}
#[test]
fn hash_argon2id_standard_with_secret() {
let output = ARGON2ID_STANDARD
.hash(Protected::new(PASSWORD.to_vec()), SALT, Some(SECRET_KEY))
.unwrap();
assert_eq!(&HASH_ARGON2ID_WITH_SECRET_EXPECTED[0], output.expose())
}
#[test]
fn hash_argon2id_hardened() {
let output = ARGON2ID_HARDENED
.hash(Protected::new(PASSWORD.to_vec()), SALT, None)
.unwrap();
assert_eq!(&HASH_ARGON2ID_EXPECTED[1], output.expose())
}
#[test]
fn hash_argon2id_hardened_with_secret() {
let output = ARGON2ID_HARDENED
.hash(Protected::new(PASSWORD.to_vec()), SALT, Some(SECRET_KEY))
.unwrap();
assert_eq!(&HASH_ARGON2ID_WITH_SECRET_EXPECTED[1], output.expose())
}
#[test]
fn hash_argon2id_paranoid() {
let output = ARGON2ID_PARANOID
.hash(Protected::new(PASSWORD.to_vec()), SALT, None)
.unwrap();
assert_eq!(&HASH_ARGON2ID_EXPECTED[2], output.expose())
}
#[test]
fn hash_argon2id_paranoid_with_secret() {
let output = ARGON2ID_PARANOID
.hash(Protected::new(PASSWORD.to_vec()), SALT, Some(SECRET_KEY))
.unwrap();
assert_eq!(&HASH_ARGON2ID_WITH_SECRET_EXPECTED[2], output.expose())
}
#[test]
fn hash_b3balloon_standard() {
let output = B3BALLOON_STANDARD
.hash(Protected::new(PASSWORD.to_vec()), SALT, None)
.unwrap();
assert_eq!(&HASH_B3BALLOON_EXPECTED[0], output.expose())
}
#[test]
fn hash_b3balloon_standard_with_secret() {
let output = B3BALLOON_STANDARD
.hash(Protected::new(PASSWORD.to_vec()), SALT, Some(SECRET_KEY))
.unwrap();
assert_eq!(&HASH_B3BALLOON_WITH_SECRET_EXPECTED[0], output.expose())
}
#[test]
fn hash_b3balloon_hardened() {
let output = B3BALLOON_HARDENED
.hash(Protected::new(PASSWORD.to_vec()), SALT, None)
.unwrap();
assert_eq!(&HASH_B3BALLOON_EXPECTED[1], output.expose())
}
#[test]
fn hash_b3balloon_hardened_with_secret() {
let output = B3BALLOON_HARDENED
.hash(Protected::new(PASSWORD.to_vec()), SALT, Some(SECRET_KEY))
.unwrap();
assert_eq!(&HASH_B3BALLOON_WITH_SECRET_EXPECTED[1], output.expose())
}
#[test]
fn hash_b3balloon_paranoid() {
let output = B3BALLOON_PARANOID
.hash(Protected::new(PASSWORD.to_vec()), SALT, None)
.unwrap();
assert_eq!(&HASH_B3BALLOON_EXPECTED[2], output.expose())
}
#[test]
fn hash_b3balloon_paranoid_with_secret() {
let output = B3BALLOON_PARANOID
.hash(Protected::new(PASSWORD.to_vec()), SALT, Some(SECRET_KEY))
.unwrap();
assert_eq!(&HASH_B3BALLOON_WITH_SECRET_EXPECTED[2], output.expose())
}
#[test]
fn derive_b3() {
let output = Key::derive(KEY, SALT, TEST_CONTEXT);
assert_eq!(&DERIVE_B3_EXPECTED, output.expose())
}
}

View File

@@ -59,7 +59,9 @@ use super::{
keyring::{Identifier, KeyringInterface},
};
/// This is a stored key, and can be freely written to Prisma/another database.
/// This is a stored key, and can be freely written to the database.
///
/// It contains no sensitive information that is not encrypted.
#[derive(Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
@@ -79,6 +81,7 @@ pub struct StoredKey {
pub automount: bool,
}
/// This denotes the type of key. `Root` keys can be used to unlock the key manager, and `User` keys are ordinary keys.
#[derive(Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
@@ -87,6 +90,7 @@ pub enum StoredKeyType {
Root,
}
/// This denotes the `StoredKey` version.
#[derive(Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
@@ -105,7 +109,7 @@ pub struct MountedKey {
/// This is the key manager itself.
///
/// It contains the keystore, the keymount, the master password and the default key.
/// It contains the keystore, the keymount, the root key and a few other pieces of information.
///
/// Use the associated functions to interact with it.
pub struct KeyManager {
@@ -117,10 +121,8 @@ pub struct KeyManager {
mounting_queue: DashSet<Uuid>,
keyring: Option<Arc<Mutex<KeyringInterface>>>,
}
/// The `KeyManager` functions should be used for all key-related management.
impl KeyManager {
/// Initialize the Key Manager with `StoredKeys` retrieved from Prisma
/// Initialize the Key Manager with `StoredKeys` retrieved from the database.
pub async fn new(stored_keys: Vec<StoredKey>) -> Result<Self> {
let keyring = KeyringInterface::new()
.map(|k| Arc::new(Mutex::new(k)))
@@ -141,7 +143,9 @@ impl KeyManager {
Ok(keymanager)
}
// A returned error here should be treated as `false`
/// This should be used for checking if the secret key contains an item.
///
/// A returned error here should be treated as `false`
pub async fn keyring_contains(&self, library_uuid: Uuid, usage: String) -> Result<()> {
self.get_keyring()?.lock().await.retrieve(Identifier {
application: APP_IDENTIFIER,
@@ -152,7 +156,7 @@ impl KeyManager {
Ok(())
}
// This verifies that the key manager is unlocked before continuing the calling function.
/// This verifies that the key manager is unlocked before continuing the calling function.
pub async fn ensure_unlocked(&self) -> Result<()> {
self.is_unlocked()
.await
@@ -160,20 +164,21 @@ impl KeyManager {
.ok_or(Error::NotUnlocked)
}
// This verifies that the target key is not already queued before continuing the operation.
/// This verifies that the target key is not already queued before continuing the operation.
pub fn ensure_not_queued(&self, uuid: Uuid) -> Result<()> {
(!self.is_queued(uuid))
.then_some(())
.ok_or(Error::KeyAlreadyMounted)
}
// This verifies that the target key is not already mounted before continuing the operation.
/// This verifies that the target key is not already mounted before continuing the operation.
pub fn ensure_not_mounted(&self, uuid: Uuid) -> Result<()> {
(!self.keymount.contains_key(&uuid))
.then_some(())
.ok_or(Error::KeyAlreadyMounted)
}
/// This is used to retrieve an item from OS keyrings
pub async fn keyring_retrieve(
&self,
library_uuid: Uuid,
@@ -211,6 +216,7 @@ impl KeyManager {
Ok(())
}
/// This is used to insert an item into OS keyrings
async fn keyring_insert(
&self,
library_uuid: Uuid,
@@ -237,7 +243,7 @@ impl KeyManager {
/// This should be used to generate everything for the user during onboarding.
///
/// This will create a master password (a 7-word diceware passphrase), and a secret key (16 bytes, hex encoded)
/// This will create a secret key and attempt to store it in OS keyrings.
///
/// It will also generate a verification key, which should be written to the database.
#[allow(clippy::needless_pass_by_value)]
@@ -322,7 +328,7 @@ impl KeyManager {
///
/// It's suitable for when you created the key manager without populating it.
///
/// This also detects the nil-UUID master passphrase verification key
/// This also detects any `Root` type keys, that are used for unlocking the key manager.
pub async fn populate_keystore(&self, stored_keys: Vec<StoredKey>) -> Result<()> {
for key in stored_keys {
if self.keystore.contains_key(&key.uuid) {
@@ -339,7 +345,7 @@ impl KeyManager {
Ok(())
}
/// This function removes a key from the keystore, the keymount and it's unset as the default.
/// This function removes a key from the keystore, keymount and from the default (if set).
pub async fn remove_key(&self, uuid: Uuid) -> Result<()> {
self.ensure_unlocked().await?;
@@ -364,6 +370,7 @@ impl KeyManager {
Ok(())
}
/// This is used for changing a master password. It will re-generate a new secret key.
#[allow(clippy::needless_pass_by_value)]
pub async fn change_master_password(
&self,
@@ -447,7 +454,7 @@ impl KeyManager {
/// This re-encrypts master keys so they can be imported from a key backup into the current key manager.
///
/// It returns a `Vec<StoredKey>` so they can be written to Prisma
/// It returns a `Vec<StoredKey>` so they can be written to the database.
#[allow(clippy::needless_pass_by_value)]
pub async fn import_keystore_backup(
&self,
@@ -565,9 +572,7 @@ impl KeyManager {
/// This is used for unlocking the key manager, and requires both the master password and the secret key.
///
/// The master password and secret key are hashed together.
///
/// Only provide the secret key if it should not/can not be sourced from an OS keychain (e.g. web, OS keychains not enabled/available, etc).
/// Only provide the secret key if it should not/can not be sourced from an OS keyring (e.g. web, OS keyrings not enabled/available, etc).
///
/// This minimizes the risk of an attacker obtaining the master password, as both of these are required to unlock the vault (and both should be stored separately).
///
@@ -681,11 +686,7 @@ impl KeyManager {
/// This function does not return a value by design.
///
/// Once a key is mounted, access it with `KeyManager::access()`
///
/// This is to ensure that only functions which require access to the mounted key receive it.
///
/// We could add a log to this, so that the user can view mounts
pub async fn mount(&self, uuid: Uuid) -> Result<()> {
self.ensure_unlocked().await?;
self.ensure_not_mounted(uuid)?;
@@ -757,8 +758,6 @@ impl KeyManager {
}
/// This function is used for getting the key value itself, from a given UUID.
///
/// The master password/salt needs to be present, so we are able to decrypt the key itself from the stored key.
pub async fn get_key(&self, uuid: Uuid) -> Result<Password> {
self.ensure_unlocked().await?;
@@ -799,11 +798,11 @@ impl KeyManager {
///
/// It does not mount the key, it just registers it.
///
/// Once added, you will need to use `KeyManager::access_keystore()` to retrieve it and add it to Prisma.
/// Once added, you will need to use `KeyManager::access_keystore()` to retrieve it and add it to the database.
///
/// You may use the returned ID to identify this key.
/// You may use the returned UUID to identify this key.
///
/// You may optionally provide a content salt, if not one will be generated (used primarily for password-based decryption)
/// You may optionally provide a content salt, if not one will be generated
#[allow(clippy::needless_pass_by_value)]
pub async fn add_to_keystore(
&self,
@@ -906,14 +905,14 @@ impl KeyManager {
}
}
/// This allows you to get the default key's ID
/// This allows you to get the default key's UUID
pub async fn get_default(&self) -> Result<Uuid> {
self.ensure_unlocked().await?;
self.default.lock().await.ok_or(Error::NoDefaultKeySet)
}
/// This should ONLY be used internally.
/// This should ONLY be used internally, for accessing the root key.
async fn get_root_key(&self) -> Result<Key> {
self.root_key.lock().await.clone().ok_or(Error::NotUnlocked)
}
@@ -926,6 +925,7 @@ impl KeyManager {
.ok_or(Error::NoVerificationKey)
}
/// This is used for checking if a key is memory only.
pub async fn is_memory_only(&self, uuid: Uuid) -> Result<bool> {
self.ensure_unlocked().await?;
@@ -934,6 +934,9 @@ impl KeyManager {
.map_or(Err(Error::KeyNotFound), |v| Ok(v.memory_only))
}
/// This is for changing the automount status of a key in the keystore.
///
/// The database needs to be updated externally
pub async fn change_automount_status(&self, uuid: Uuid, status: bool) -> Result<()> {
self.ensure_unlocked().await?;
@@ -954,8 +957,6 @@ impl KeyManager {
/// This function is for getting an entire collection of hashed keys.
///
/// These are ideal for passing over to decryption functions, as each decryption attempt is negligible, performance wise.
///
/// This means we don't need to keep super specific track of which key goes to which file, and we can just throw all of them at it.
#[must_use]
pub fn enumerate_hashed_keys(&self) -> Vec<Key> {
self.keymount
@@ -987,7 +988,7 @@ impl KeyManager {
Ok(updated_key)
}
/// This function is for removing a previously-added master password
/// This function is for locking the key manager
pub async fn clear_root_key(&self) -> Result<()> {
*self.root_key.lock().await = None;
@@ -1023,24 +1024,29 @@ impl KeyManager {
self.keystore.iter().map(|key| key.clone()).collect()
}
/// This function returns all mounted keys from the key manager
pub fn get_mounted_uuids(&self) -> Vec<Uuid> {
self.keymount.iter().map(|key| key.uuid).collect()
}
/// This function gets the entire internal key manager queue
pub fn get_queue(&self) -> Vec<Uuid> {
self.mounting_queue.iter().map(|u| *u).collect()
}
/// This function checks to see if a key is queued
pub fn is_queued(&self, uuid: Uuid) -> bool {
self.mounting_queue.contains(&uuid)
}
/// This function checks to see if the key manager is unlocking
pub async fn is_unlocking(&self) -> Result<bool> {
Ok(self
.mounting_queue
.contains(&self.get_verification_key().await?.uuid))
}
/// This function removes a key from the mounting queue (if present)
pub fn remove_from_queue(&self, uuid: Uuid) -> Result<()> {
self.mounting_queue
.remove(&uuid)

View File

@@ -6,6 +6,7 @@ pub mod linux;
#[cfg(any(target_os = "macos", target_os = "ios"))]
pub mod apple;
/// This identifier is platform-agnostic and is used for identifying keys within OS keyrings
#[derive(Clone, Copy)]
pub struct Identifier<'a> {
pub application: &'a str,
@@ -45,6 +46,7 @@ pub trait Keyring {
fn delete(&self, identifier: Identifier) -> Result<()>;
}
/// This should be used to interact with all OS keyrings.
pub struct KeyringInterface {
keyring: Box<dyn Keyring + Send>,
}

View File

@@ -1,3 +1,5 @@
//! This is Spacedrive's `crypto` crate. It handles cryptographic operations
//! such as key hashing, encryption/decryption, key management and much more.
#![forbid(unsafe_code)]
#![warn(clippy::pedantic)]
#![warn(clippy::correctness)]

View File

@@ -1,7 +1,7 @@
//! This module contains constant values and functions that are used around the crate.
//! This module contains constant values, functions and types that are used around the crate.
//!
//! This includes things such as cryptographically-secure random salt/master key/nonce generation,
//! lengths for master keys and even the streaming block size.
//! lengths for master keys and even the STREAM block size.
use zeroize::Zeroize;
use crate::{
@@ -15,47 +15,62 @@ use crate::{
pub mod types;
/// This is the default salt size, and the recommended size for argon2id.
/// This is the salt size.
pub const SALT_LEN: usize = 16;
/// The length of the secret key, in bytes.
pub const SECRET_KEY_LEN: usize = 18;
/// The size used for streaming encryption/decryption. This size seems to offer the best performance compared to alternatives.
/// The block size used for STREAM encryption/decryption. This size seems to offer the best performance compared to alternatives.
///
/// The file size gain is 16 bytes per 1048576 bytes (due to the AEAD tag). Plus the size of the header.
pub const BLOCK_SIZE: usize = 1_048_576;
/// The file size gain is 16 bytes per 1048576 bytes (due to the AEAD tag), plus the size of the header.
pub const BLOCK_LEN: usize = 1_048_576;
pub const AEAD_TAG_SIZE: usize = 16;
/// This is the default AEAD tag size for all encryption algorithms used within the crate.
pub const AEAD_TAG_LEN: usize = 16;
/// The length of the encrypted master key
/// The length of encrypted master keys (`KEY_LEN` + `AEAD_TAG_LEN`)
pub const ENCRYPTED_KEY_LEN: usize = 48;
/// The length of the (unencrypted) master key
/// The length of plain master/hashed keys
pub const KEY_LEN: usize = 32;
pub const PASSPHRASE_LEN: usize = 7;
/// Used for OS keyrings to identify our items.
pub const APP_IDENTIFIER: &str = "Spacedrive";
/// Used for OS keyrings to identify our items.
pub const SECRET_KEY_IDENTIFIER: &str = "Secret key";
/// Defines the latest `FileHeaderVersion`
pub const LATEST_FILE_HEADER: FileHeaderVersion = FileHeaderVersion::V1;
/// Defines the latest `KeyslotVersion`
pub const LATEST_KEYSLOT: KeyslotVersion = KeyslotVersion::V1;
/// Defines the latest `MetadataVersion`
pub const LATEST_METADATA: MetadataVersion = MetadataVersion::V1;
/// Defines the latest `PreviewMediaVersion`
pub const LATEST_PREVIEW_MEDIA: PreviewMediaVersion = PreviewMediaVersion::V1;
/// Defines the latest `StoredKeyVersion`
pub const LATEST_STORED_KEY: StoredKeyVersion = StoredKeyVersion::V1;
pub const ROOT_KEY_CONTEXT: &str = "spacedrive 2022-12-14 12:53:54 root key derivation"; // used for deriving keys from the root key
pub const MASTER_PASSWORD_CONTEXT: &str =
"spacedrive 2022-12-14 15:35:41 master password hash derivation"; // used for deriving keys from the master password hash
pub const FILE_KEY_CONTEXT: &str = "spacedrive 2022-12-14 12:54:12 file key derivation"; // used for deriving keys from user key/content salt hashes (for file encryption)
/// Defines the context string for BLAKE3-KDF in regards to root key derivation
pub const ROOT_KEY_CONTEXT: &str = "spacedrive 2022-12-14 12:53:54 root key derivation";
/// This is used for converting a `Vec<u8>` to an array of bytes
/// Defines the context string for BLAKE3-KDF in regards to master password hash derivation
pub const MASTER_PASSWORD_CONTEXT: &str =
"spacedrive 2022-12-14 15:35:41 master password hash derivation";
/// Defines the context string for BLAKE3-KDF in regards to file key derivation (for file encryption)
pub const FILE_KEY_CONTEXT: &str = "spacedrive 2022-12-14 12:54:12 file key derivation";
/// This is used for converting a `&[u8]` to an array of bytes.
///
/// It's main usage is for converting an encrypted master key from a `Vec<u8>` to `EncryptedKey`
/// It does `Clone`, with `to_vec()`.
///
/// As the master key is encrypted at this point, it does not need to be `Protected<>`
///
/// This function still `zeroize`s any data it can
/// This function calls `zeroize` on any data it can
pub fn to_array<const I: usize>(bytes: &[u8]) -> Result<[u8; I]> {
bytes.to_vec().try_into().map_err(|mut b: Vec<u8>| {
b.zeroize();

View File

@@ -1,9 +1,19 @@
//! This module defines all of the possible types used throughout this crate,
//! in an effort to add additional type safety.
use rand::{RngCore, SeedableRng};
use std::ops::Deref;
use zeroize::Zeroize;
use crate::{crypto::stream::Algorithm, keys::hashing::HashingAlgorithm, Error, Protected};
use super::{to_array, ENCRYPTED_KEY_LEN, KEY_LEN, SALT_LEN, SECRET_KEY_LEN};
#[cfg(feature = "serde")]
use serde_big_array::BigArray;
/// This should be used for providing a nonce to encrypt/decrypt functions.
///
/// You may also generate a nonce for a given algorithm with `Nonce::generate()`
#[derive(Clone, Copy, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
@@ -67,6 +77,11 @@ impl Deref for Nonce {
}
}
/// This should be used for providing a key to functions.
///
/// It can either be a random key, or a hashed key.
///
/// You may also generate a secure random key with `Key::generate()`
#[derive(Clone)]
pub struct Key(pub Protected<[u8; KEY_LEN]>);
@@ -117,6 +132,9 @@ impl Deref for Key {
}
}
/// This should be used for providing a secret key to functions.
///
/// You may also generate a secret key with `SecretKey::generate()`
#[derive(Clone)]
pub struct SecretKey(pub Protected<[u8; SECRET_KEY_LEN]>);
@@ -147,6 +165,24 @@ impl Deref for SecretKey {
}
}
/// This should be used for passing a secret key string around.
///
/// It is `SECRET_KEY_LEN` bytes, encoded in hex and delimited with `-` every 6 characters.
#[derive(Clone)]
pub struct SecretKeyString(pub Protected<String>);
impl SecretKeyString {
#[must_use]
pub const fn new(v: String) -> Self {
Self(Protected::new(v))
}
#[must_use]
pub const fn expose(&self) -> &String {
self.0.expose()
}
}
impl From<SecretKey> for SecretKeyString {
fn from(v: SecretKey) -> Self {
let hex_string: String = hex::encode_upper(v.0.expose())
@@ -184,6 +220,9 @@ impl From<SecretKeyString> for SecretKey {
}
}
/// This should be used for passing a password around.
///
/// It can be a string of any length.
#[derive(Clone)]
pub struct Password(pub Protected<String>);
@@ -199,25 +238,9 @@ impl Password {
}
}
#[derive(Clone)]
pub struct SecretKeyString(pub Protected<String>);
impl SecretKeyString {
#[must_use]
pub const fn new(v: String) -> Self {
Self(Protected::new(v))
}
#[must_use]
pub const fn expose(&self) -> &String {
self.0.expose()
}
}
#[cfg(feature = "serde")]
use serde_big_array::BigArray;
use super::{to_array, ENCRYPTED_KEY_LEN, KEY_LEN, SALT_LEN, SECRET_KEY_LEN};
/// This should be used for passing an encrypted key around.
///
/// This is always `ENCRYPTED_KEY_LEN` (which is `KEY_LEM` + `AEAD_TAG_LEN`)
#[derive(Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
@@ -242,6 +265,9 @@ impl TryFrom<Vec<u8>> for EncryptedKey {
}
}
/// This should be used for passing a salt around.
///
/// You may also generate a salt with `Salt::generate()`
#[derive(Clone, PartialEq, Eq, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]