Merge branch 'main' into rav/signing_keys_upload_response

This commit is contained in:
Richard van der Hoff
2023-05-02 10:54:52 +01:00
committed by GitHub
37 changed files with 844 additions and 578 deletions

View File

@@ -45,6 +45,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
- name: Get xtask
uses: actions/cache/restore@v3
@@ -115,6 +117,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
- name: Install Node.js
uses: actions/setup-node@v3
@@ -171,6 +175,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
- name: Install Node.js
uses: actions/setup-node@v3
@@ -217,6 +223,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
- name: Get xtask
uses: actions/cache/restore@v3

View File

@@ -89,6 +89,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
- name: Install nextest
uses: taiki-e/install-action@nextest
@@ -119,6 +121,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
- name: Install nextest
uses: taiki-e/install-action@nextest
@@ -172,6 +176,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
- name: Install nextest
uses: taiki-e/install-action@nextest
@@ -294,6 +300,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
- name: Install nextest
uses: taiki-e/install-action@nextest
@@ -361,6 +369,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
- name: Get xtask
uses: actions/cache/restore@v3
@@ -388,6 +398,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
- name: Install nextest
uses: taiki-e/install-action@nextest
@@ -436,7 +448,7 @@ jobs:
# run sliding sync and point it at the postgres container and synapse container.
# the postgres container needs to be above this to make sure it has started prior to this service.
slidingsync:
image: "ghcr.io/matrix-org/sliding-sync:v0.99.0"
image: "ghcr.io/matrix-org/sliding-sync:v0.99.2"
env:
SYNCV3_SERVER: "http://synapse:8008"
SYNCV3_SECRET: "SUPER_CI_SECRET"
@@ -464,6 +476,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
- name: Install nextest
uses: taiki-e/install-action@nextest

View File

@@ -30,6 +30,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
- name: Install tarpaulin
uses: taiki-e/install-action@v2

View File

@@ -29,6 +29,8 @@ jobs:
- name: Load cache
uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/head/main' }}
# Keep in sync with xtask docs
- name: Build rust documentation

View File

@@ -83,8 +83,6 @@ jobs:
targets: ${{ matrix.target }}
- name: Install Node.js
uses: actions/setup-node@v3
- name: Load cache
uses: Swatinem/rust-cache@v2
- if: ${{ matrix.apt_install }}
run: |
sudo apt-get update

View File

@@ -33,9 +33,6 @@ jobs:
with:
targets: wasm32-unknown-unknown
- name: Load cache
uses: Swatinem/rust-cache@v2
- name: Install Node.js
uses: actions/setup-node@v3
with:

36
Cargo.lock generated
View File

@@ -4336,7 +4336,7 @@ dependencies = [
[[package]]
name = "ruma"
version = "0.8.2"
source = "git+https://github.com/ruma/ruma?rev=0143bd9b9f5dcfcaa835afb76f342c12f014f945#0143bd9b9f5dcfcaa835afb76f342c12f014f945"
source = "git+https://github.com/ruma/ruma?rev=54a4223caa1c1052464ecdba0f1e08f126e07bcd#54a4223caa1c1052464ecdba0f1e08f126e07bcd"
dependencies = [
"assign",
"js_int",
@@ -4351,7 +4351,7 @@ dependencies = [
[[package]]
name = "ruma-appservice-api"
version = "0.8.1"
source = "git+https://github.com/ruma/ruma?rev=0143bd9b9f5dcfcaa835afb76f342c12f014f945#0143bd9b9f5dcfcaa835afb76f342c12f014f945"
source = "git+https://github.com/ruma/ruma?rev=54a4223caa1c1052464ecdba0f1e08f126e07bcd#54a4223caa1c1052464ecdba0f1e08f126e07bcd"
dependencies = [
"js_int",
"ruma-common",
@@ -4362,7 +4362,7 @@ dependencies = [
[[package]]
name = "ruma-client-api"
version = "0.16.2"
source = "git+https://github.com/ruma/ruma?rev=0143bd9b9f5dcfcaa835afb76f342c12f014f945#0143bd9b9f5dcfcaa835afb76f342c12f014f945"
source = "git+https://github.com/ruma/ruma?rev=54a4223caa1c1052464ecdba0f1e08f126e07bcd#54a4223caa1c1052464ecdba0f1e08f126e07bcd"
dependencies = [
"assign",
"bytes",
@@ -4379,7 +4379,7 @@ dependencies = [
[[package]]
name = "ruma-common"
version = "0.11.3"
source = "git+https://github.com/ruma/ruma?rev=0143bd9b9f5dcfcaa835afb76f342c12f014f945#0143bd9b9f5dcfcaa835afb76f342c12f014f945"
source = "git+https://github.com/ruma/ruma?rev=54a4223caa1c1052464ecdba0f1e08f126e07bcd#54a4223caa1c1052464ecdba0f1e08f126e07bcd"
dependencies = [
"base64 0.21.0",
"bytes",
@@ -4412,7 +4412,7 @@ dependencies = [
[[package]]
name = "ruma-federation-api"
version = "0.7.1"
source = "git+https://github.com/ruma/ruma?rev=0143bd9b9f5dcfcaa835afb76f342c12f014f945#0143bd9b9f5dcfcaa835afb76f342c12f014f945"
source = "git+https://github.com/ruma/ruma?rev=54a4223caa1c1052464ecdba0f1e08f126e07bcd#54a4223caa1c1052464ecdba0f1e08f126e07bcd"
dependencies = [
"js_int",
"ruma-common",
@@ -4423,7 +4423,7 @@ dependencies = [
[[package]]
name = "ruma-identifiers-validation"
version = "0.9.1"
source = "git+https://github.com/ruma/ruma?rev=0143bd9b9f5dcfcaa835afb76f342c12f014f945#0143bd9b9f5dcfcaa835afb76f342c12f014f945"
source = "git+https://github.com/ruma/ruma?rev=54a4223caa1c1052464ecdba0f1e08f126e07bcd#54a4223caa1c1052464ecdba0f1e08f126e07bcd"
dependencies = [
"js_int",
"thiserror",
@@ -4432,7 +4432,7 @@ dependencies = [
[[package]]
name = "ruma-macros"
version = "0.11.3"
source = "git+https://github.com/ruma/ruma?rev=0143bd9b9f5dcfcaa835afb76f342c12f014f945#0143bd9b9f5dcfcaa835afb76f342c12f014f945"
source = "git+https://github.com/ruma/ruma?rev=54a4223caa1c1052464ecdba0f1e08f126e07bcd#54a4223caa1c1052464ecdba0f1e08f126e07bcd"
dependencies = [
"once_cell",
"proc-macro-crate",
@@ -4440,14 +4440,14 @@ dependencies = [
"quote",
"ruma-identifiers-validation",
"serde",
"syn 1.0.109",
"syn 2.0.15",
"toml 0.7.3",
]
[[package]]
name = "ruma-push-gateway-api"
version = "0.7.1"
source = "git+https://github.com/ruma/ruma?rev=0143bd9b9f5dcfcaa835afb76f342c12f014f945#0143bd9b9f5dcfcaa835afb76f342c12f014f945"
source = "git+https://github.com/ruma/ruma?rev=54a4223caa1c1052464ecdba0f1e08f126e07bcd#54a4223caa1c1052464ecdba0f1e08f126e07bcd"
dependencies = [
"js_int",
"ruma-common",
@@ -5529,7 +5529,7 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "uniffi"
version = "0.23.0"
source = "git+https://github.com/mozilla/uniffi-rs?rev=aa91307b6ac27aae6d5c7ad971b762df952d2745#aa91307b6ac27aae6d5c7ad971b762df952d2745"
source = "git+https://github.com/mozilla/uniffi-rs?rev=9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb#9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb"
dependencies = [
"anyhow",
"camino",
@@ -5550,7 +5550,7 @@ dependencies = [
[[package]]
name = "uniffi_bindgen"
version = "0.23.0"
source = "git+https://github.com/mozilla/uniffi-rs?rev=aa91307b6ac27aae6d5c7ad971b762df952d2745#aa91307b6ac27aae6d5c7ad971b762df952d2745"
source = "git+https://github.com/mozilla/uniffi-rs?rev=9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb#9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb"
dependencies = [
"anyhow",
"askama",
@@ -5572,7 +5572,7 @@ dependencies = [
[[package]]
name = "uniffi_build"
version = "0.23.0"
source = "git+https://github.com/mozilla/uniffi-rs?rev=aa91307b6ac27aae6d5c7ad971b762df952d2745#aa91307b6ac27aae6d5c7ad971b762df952d2745"
source = "git+https://github.com/mozilla/uniffi-rs?rev=9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb#9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb"
dependencies = [
"anyhow",
"camino",
@@ -5582,7 +5582,7 @@ dependencies = [
[[package]]
name = "uniffi_checksum_derive"
version = "0.23.0"
source = "git+https://github.com/mozilla/uniffi-rs?rev=aa91307b6ac27aae6d5c7ad971b762df952d2745#aa91307b6ac27aae6d5c7ad971b762df952d2745"
source = "git+https://github.com/mozilla/uniffi-rs?rev=9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb#9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb"
dependencies = [
"quote",
"syn 1.0.109",
@@ -5591,7 +5591,7 @@ dependencies = [
[[package]]
name = "uniffi_core"
version = "0.23.0"
source = "git+https://github.com/mozilla/uniffi-rs?rev=aa91307b6ac27aae6d5c7ad971b762df952d2745#aa91307b6ac27aae6d5c7ad971b762df952d2745"
source = "git+https://github.com/mozilla/uniffi-rs?rev=9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb#9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb"
dependencies = [
"anyhow",
"bytes",
@@ -5606,7 +5606,7 @@ dependencies = [
[[package]]
name = "uniffi_macros"
version = "0.23.0"
source = "git+https://github.com/mozilla/uniffi-rs?rev=aa91307b6ac27aae6d5c7ad971b762df952d2745#aa91307b6ac27aae6d5c7ad971b762df952d2745"
source = "git+https://github.com/mozilla/uniffi-rs?rev=9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb#9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb"
dependencies = [
"bincode",
"camino",
@@ -5624,7 +5624,7 @@ dependencies = [
[[package]]
name = "uniffi_meta"
version = "0.23.0"
source = "git+https://github.com/mozilla/uniffi-rs?rev=aa91307b6ac27aae6d5c7ad971b762df952d2745#aa91307b6ac27aae6d5c7ad971b762df952d2745"
source = "git+https://github.com/mozilla/uniffi-rs?rev=9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb#9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb"
dependencies = [
"anyhow",
"bytes",
@@ -5637,7 +5637,7 @@ dependencies = [
[[package]]
name = "uniffi_testing"
version = "0.23.0"
source = "git+https://github.com/mozilla/uniffi-rs?rev=aa91307b6ac27aae6d5c7ad971b762df952d2745#aa91307b6ac27aae6d5c7ad971b762df952d2745"
source = "git+https://github.com/mozilla/uniffi-rs?rev=9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb#9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb"
dependencies = [
"anyhow",
"camino",
@@ -5936,7 +5936,7 @@ dependencies = [
[[package]]
name = "weedle2"
version = "4.0.0"
source = "git+https://github.com/mozilla/uniffi-rs?rev=aa91307b6ac27aae6d5c7ad971b762df952d2745#aa91307b6ac27aae6d5c7ad971b762df952d2745"
source = "git+https://github.com/mozilla/uniffi-rs?rev=9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb#9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb"
dependencies = [
"nom",
]

View File

@@ -32,8 +32,8 @@ eyeball = "0.6.0"
eyeball-im = "0.2.0"
futures-util = { version = "0.3.26", default-features = false, features = ["alloc"] }
http = "0.2.6"
ruma = { git = "https://github.com/ruma/ruma", rev = "0143bd9b9f5dcfcaa835afb76f342c12f014f945", features = ["client-api-c", "compat-user-id"] }
ruma-common = { git = "https://github.com/ruma/ruma", rev = "0143bd9b9f5dcfcaa835afb76f342c12f014f945" }
ruma = { git = "https://github.com/ruma/ruma", rev = "54a4223caa1c1052464ecdba0f1e08f126e07bcd", features = ["client-api-c", "compat-user-id"] }
ruma-common = { git = "https://github.com/ruma/ruma", rev = "54a4223caa1c1052464ecdba0f1e08f126e07bcd" }
once_cell = "1.16.0"
serde = "1.0.151"
serde_html_form = "0.2.0"
@@ -42,8 +42,8 @@ thiserror = "1.0.38"
tokio = { version = "1.24", default-features = false, features = ["sync"] }
tracing = { version = "0.1.36", default-features = false, features = ["std"] }
tracing-core = "0.1.30"
uniffi = { git = "https://github.com/mozilla/uniffi-rs", rev = "aa91307b6ac27aae6d5c7ad971b762df952d2745" }
uniffi_bindgen = { git = "https://github.com/mozilla/uniffi-rs", rev = "aa91307b6ac27aae6d5c7ad971b762df952d2745" }
uniffi = { git = "https://github.com/mozilla/uniffi-rs", rev = "9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb" }
uniffi_bindgen = { git = "https://github.com/mozilla/uniffi-rs", rev = "9e01d2281bb4a603fc9ed6409a02ad1854cdc8fb" }
vodozemac = { git = "https://github.com/matrix-org/vodozemac", rev = "fb609ca1e4df5a7a818490ae86ac694119e41e71" }
zeroize = "1.3.0"

View File

@@ -1,4 +1,4 @@
use std::{collections::HashMap, iter, ops::DerefMut};
use std::{collections::HashMap, iter, ops::DerefMut, sync::Arc};
use hmac::Hmac;
use matrix_sdk_crypto::{
@@ -12,6 +12,7 @@ use thiserror::Error;
use zeroize::Zeroize;
/// The private part of the backup key, the one used for recovery.
#[derive(uniffi::Object)]
pub struct BackupRecoveryKey {
pub(crate) inner: RecoveryKey,
pub(crate) passphrase_info: Option<PassphraseInfo>,
@@ -62,46 +63,40 @@ pub struct MegolmV1BackupKey {
pub backup_algorithm: String,
}
#[uniffi::export]
impl BackupRecoveryKey {
/// Convert the recovery key to a base 58 encoded string.
pub fn to_base58(&self) -> String {
self.inner.to_base58()
}
/// Convert the recovery key to a base 64 encoded string.
pub fn to_base64(&self) -> String {
self.inner.to_base64()
}
}
impl BackupRecoveryKey {
const KEY_SIZE: usize = 32;
const SALT_SIZE: usize = 32;
const PBKDF_ROUNDS: i32 = 500_000;
}
#[uniffi::export]
impl BackupRecoveryKey {
/// Create a new random [`BackupRecoveryKey`].
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
Self {
#[uniffi::constructor]
pub fn new() -> Arc<Self> {
Arc::new(Self {
inner: RecoveryKey::new()
.expect("Can't gather enough randomness to create a recovery key"),
passphrase_info: None,
}
})
}
/// Try to create a [`BackupRecoveryKey`] from a base 64 encoded string.
pub fn from_base64(key: String) -> Result<Self, DecodeError> {
Ok(Self { inner: RecoveryKey::from_base64(&key)?, passphrase_info: None })
#[uniffi::constructor]
pub fn from_base64(key: String) -> Result<Arc<Self>, DecodeError> {
Ok(Arc::new(Self { inner: RecoveryKey::from_base64(&key)?, passphrase_info: None }))
}
/// Try to create a [`BackupRecoveryKey`] from a base 58 encoded string.
pub fn from_base58(key: String) -> Result<Self, DecodeError> {
Ok(Self { inner: RecoveryKey::from_base58(&key)?, passphrase_info: None })
#[uniffi::constructor]
pub fn from_base58(key: String) -> Result<Arc<Self>, DecodeError> {
Ok(Arc::new(Self { inner: RecoveryKey::from_base58(&key)?, passphrase_info: None }))
}
/// Create a new [`BackupRecoveryKey`] from the given passphrase.
pub fn new_from_passphrase(passphrase: String) -> Self {
#[uniffi::constructor]
pub fn new_from_passphrase(passphrase: String) -> Arc<Self> {
let mut rng = thread_rng();
let salt: String = iter::repeat(())
.map(|()| rng.sample(Alphanumeric))
@@ -113,7 +108,8 @@ impl BackupRecoveryKey {
}
/// Restore a [`BackupRecoveryKey`] from the given passphrase.
pub fn from_passphrase(passphrase: String, salt: String, rounds: i32) -> Self {
#[uniffi::constructor]
pub fn from_passphrase(passphrase: String, salt: String, rounds: i32) -> Arc<Self> {
let mut key = Box::new([0u8; Self::KEY_SIZE]);
let rounds = rounds as u32;
@@ -123,18 +119,25 @@ impl BackupRecoveryKey {
key.zeroize();
Self {
Arc::new(Self {
inner: recovery_key,
passphrase_info: Some(PassphraseInfo {
private_key_salt: salt,
private_key_iterations: rounds as i32,
}),
}
})
}
/// Convert the recovery key to a base 58 encoded string.
pub fn to_base58(&self) -> String {
self.inner.to_base58()
}
/// Convert the recovery key to a base 64 encoded string.
pub fn to_base64(&self) -> String {
self.inner.to_base64()
}
}
#[uniffi::export]
impl BackupRecoveryKey {
/// Get the public part of the backup key.
pub fn megolm_v1_public_key(&self) -> MegolmV1BackupKey {
let public_key = self.inner.megolm_v1_public_key();

View File

@@ -42,7 +42,8 @@ pub enum SignatureError {
UnknownUserIdentity(String),
}
#[derive(Debug, thiserror::Error)]
#[derive(Debug, thiserror::Error, uniffi::Error)]
#[uniffi(flat_error)]
pub enum CryptoStoreError {
#[error("Failed to open the store")]
OpenStore(#[from] OpenStoreError),

View File

@@ -972,7 +972,12 @@ mod test {
migrate(migration_data, path.clone(), None, Box::new(|_, _| {}))?;
let machine = OlmMachine::new("@ganfra146:matrix.org", "DEWRCMENGS", &path, None)?;
let machine = OlmMachine::new(
"@ganfra146:matrix.org".to_owned(),
"DEWRCMENGS".to_owned(),
path,
None,
)?;
assert_eq!(
machine.identity_keys()["ed25519"],

View File

@@ -59,6 +59,7 @@ use crate::{
};
/// A high level state machine that handles E2EE for Matrix.
#[derive(uniffi::Object)]
pub struct OlmMachine {
pub(crate) inner: ManuallyDrop<InnerMachine>,
pub(crate) runtime: Runtime,
@@ -128,6 +129,7 @@ impl From<RustSignatureCheckResult> for SignatureVerification {
}
}
#[uniffi::export]
impl OlmMachine {
/// Create a new `OlmMachine`
///
@@ -142,14 +144,15 @@ impl OlmMachine {
/// * `passphrase` - The passphrase that should be used to encrypt the data
/// at rest in the Sled store. **Warning**, if no passphrase is given, the
/// store and all its data will remain unencrypted.
#[uniffi::constructor]
pub fn new(
user_id: &str,
device_id: &str,
path: &str,
user_id: String,
device_id: String,
path: String,
mut passphrase: Option<String>,
) -> Result<Self, CryptoStoreError> {
let user_id = parse_user_id(user_id)?;
let device_id = device_id.into();
) -> Result<Arc<Self>, CryptoStoreError> {
let user_id = parse_user_id(&user_id)?;
let device_id = device_id.as_str().into();
let runtime = Runtime::new().expect("Couldn't create a tokio runtime");
let store = runtime
@@ -160,41 +163,9 @@ impl OlmMachine {
let inner =
runtime.block_on(InnerMachine::with_store(&user_id, device_id, Arc::new(store)))?;
Ok(OlmMachine { inner: ManuallyDrop::new(inner), runtime })
Ok(Arc::new(OlmMachine { inner: ManuallyDrop::new(inner), runtime }))
}
fn import_room_keys_helper(
&self,
keys: Vec<ExportedRoomKey>,
from_backup: bool,
progress_listener: Box<dyn ProgressListener>,
) -> Result<KeysImportResult, KeyImportError> {
let listener = |progress: usize, total: usize| {
progress_listener.on_progress(progress as i32, total as i32)
};
let result =
self.runtime.block_on(self.inner.import_room_keys(keys, from_backup, listener))?;
Ok(KeysImportResult {
imported: result.imported_count as i64,
total: result.total_count as i64,
keys: result
.keys
.into_iter()
.map(|(r, m)| {
(
r.to_string(),
m.into_iter().map(|(s, k)| (s, k.into_iter().collect())).collect(),
)
})
.collect(),
})
}
}
#[uniffi::export]
impl OlmMachine {
/// Get the user ID of the owner of this `OlmMachine`.
pub fn user_id(&self) -> String {
self.inner.user_id().to_string()
@@ -1402,3 +1373,34 @@ impl OlmMachine {
.into())
}
}
impl OlmMachine {
fn import_room_keys_helper(
&self,
keys: Vec<ExportedRoomKey>,
from_backup: bool,
progress_listener: Box<dyn ProgressListener>,
) -> Result<KeysImportResult, KeyImportError> {
let listener = |progress: usize, total: usize| {
progress_listener.on_progress(progress as i32, total as i32)
};
let result =
self.runtime.block_on(self.inner.import_room_keys(keys, from_backup, listener))?;
Ok(KeysImportResult {
imported: result.imported_count as i64,
total: result.total_count as i64,
keys: result
.keys
.into_iter()
.map(|(r, m)| {
(
r.to_string(),
m.into_iter().map(|(s, k)| (s, k.into_iter().collect())).collect(),
)
})
.collect(),
})
}
}

View File

@@ -8,16 +8,6 @@ callback interface ProgressListener {
void on_progress(i32 progress, i32 total);
};
[Error]
enum CryptoStoreError {
"OpenStore",
"CryptoStore",
"OlmError",
"Serialization",
"InvalidUserId",
"Identifier",
};
dictionary CancelInfo {
string cancel_code;
string reason;
@@ -71,31 +61,9 @@ enum LocalTrust {
"Unset",
};
interface OlmMachine {
[Throws=CryptoStoreError]
constructor(
[ByRef] string user_id,
[ByRef] string device_id,
[ByRef] string path,
string? passphrase
);
};
enum SignatureState {
"Missing",
"Invalid",
"ValidButNotTrusted",
"ValidAndTrusted",
};
interface BackupRecoveryKey {
constructor();
[Name=from_passphrase]
constructor(string passphrase, string salt, i32 rounds);
[Name=new_from_passphrase]
constructor(string passphrase);
[Name=from_base64, Throws=DecodeError]
constructor(string key);
[Name=from_base58, Throws=DecodeError]
constructor(string key);
};

View File

@@ -1,7 +1,10 @@
# v0.1.0-alpha.8
- Extend `OlmDevice.markRequestAsSent` to accept respinses to
- Extend `OlmDevice.markRequestAsSent` to accept responses to
`SigningKeysUploadRequest`s.
- `importCrossSigningKeys`: change the parameters to be individual keys
rather than a `CrossSigningKeyExport` object.
- Make `unused_fallback_keys` optional in `Machine.receive_sync_changes`
# v0.1.0-alpha.7

View File

@@ -19,7 +19,11 @@ cd $(dirname "$0")/..
RUSTFLAGS='-C opt-level=z' WASM_BINDGEN_WEAKREF=1 wasm-pack build --target nodejs --scope matrix-org --out-dir pkg "${WASM_PACK_ARGS[@]}"
# Convert the Wasm into a JS file that exports the base64'ed Wasm.
echo "module.exports = \`$(base64 pkg/matrix_sdk_crypto_js_bg.wasm)\`;" > pkg/matrix_sdk_crypto_js_bg.wasm.js
{
printf 'module.exports = `'
base64 < pkg/matrix_sdk_crypto_js_bg.wasm
printf '`;'
} > pkg/matrix_sdk_crypto_js_bg.wasm.js
# In the JavaScript:
# 1. Strip out the lines that load the WASM, and our new epilogue.

View File

@@ -222,7 +222,7 @@ impl OlmMachine {
to_device_events: &str,
changed_devices: &sync_events::DeviceLists,
one_time_key_counts: &Map,
unused_fallback_keys: &Set,
unused_fallback_keys: Option<Set>,
) -> Result<Promise, JsError> {
let to_device_events = serde_json::from_str(to_device_events)?;
let changed_devices = changed_devices.inner.clone();
@@ -239,13 +239,18 @@ impl OlmMachine {
Some((key, value))
})
.collect();
let unused_fallback_keys: Option<Vec<DeviceKeyAlgorithm>> = Some(
unused_fallback_keys
.values()
.into_iter()
.filter_map(|js_value| Some(DeviceKeyAlgorithm::from(js_value.ok()?.as_string()?)))
.collect(),
);
// Convert the unused_fallback_keys JS Set to a `Vec<DeviceKeyAlgorithm>`
let unused_fallback_keys: Option<Vec<DeviceKeyAlgorithm>> =
unused_fallback_keys.map(|fallback_keys| {
fallback_keys
.values()
.into_iter()
.filter_map(|js_value| {
Some(DeviceKeyAlgorithm::from(js_value.ok()?.as_string()?))
})
.collect()
});
let me = self.inner.clone();
@@ -392,11 +397,11 @@ impl OlmMachine {
/// Export all the private cross signing keys we have.
///
/// The export will contain the seed for the ed25519 keys as a
/// unpadded base64 encoded string.
/// The export will contain the seeds for the ed25519 keys as
/// unpadded base64 encoded strings.
///
/// This method returns None if we dont have any private cross
/// signing keys.
/// Returns `null` if we dont have any private cross signing keys;
/// otherwise returns a `CrossSigningKeyExport`.
#[wasm_bindgen(js_name = "exportCrossSigningKeys")]
pub fn export_cross_signing_keys(&self) -> Promise {
let me = self.inner.clone();
@@ -408,12 +413,22 @@ impl OlmMachine {
/// Import our private cross signing keys.
///
/// The export needs to contain the seed for the ed25519 keys as
/// an unpadded base64 encoded string.
/// The keys should be provided as unpadded-base64-encoded strings.
///
/// Returns a `CrossSigningStatus`.
#[wasm_bindgen(js_name = "importCrossSigningKeys")]
pub fn import_cross_signing_keys(&self, export: store::CrossSigningKeyExport) -> Promise {
pub fn import_cross_signing_keys(
&self,
master_key: Option<String>,
self_signing_key: Option<String>,
user_signing_key: Option<String>,
) -> Promise {
let me = self.inner.clone();
let export = export.inner;
let export = matrix_sdk_crypto::store::CrossSigningKeyExport {
master_key,
self_signing_key,
user_signing_key,
};
future_to_promise(async move {
Ok(me.import_cross_signing_keys(export).await.map(olm::CrossSigningStatus::from)?)

View File

@@ -220,6 +220,19 @@ describe(OlmMachine.name, () => {
expect(receiveSyncChanges).toEqual([]);
});
test("can receive sync changes with unusedFallbackKeys as undefined", async () => {
const m = await machine();
const toDeviceEvents = JSON.stringify([]);
const changedDevices = new DeviceLists();
const oneTimeKeyCounts = new Map();
const receiveSyncChanges = JSON.parse(
await m.receiveSyncChanges(toDeviceEvents, changedDevices, oneTimeKeyCounts, undefined),
);
expect(receiveSyncChanges).toEqual([]);
});
test("can get the outgoing requests that need to be send out", async () => {
const m = await machine();
const toDeviceEvents = JSON.stringify([]);

View File

@@ -1,14 +1,33 @@
namespace matrix_sdk_ffi {};
[Error]
interface ClientError {
Generic(string msg);
};
callback interface ClientDelegate {
void did_receive_auth_error(boolean is_soft_logout);
};
callback interface NotificationDelegate {
void did_receive_notification(NotificationItem notification);
};
interface AuthenticationService {};
interface Span {};
interface NotificationService {};
dictionary NotificationItem {
TimelineEvent event;
string room_id;
string? sender_display_name;
string? sender_avatar_url;
string room_display_name;
string? room_avatar_url;
boolean is_noisy;
boolean is_direct;
boolean is_encrypted;
boolean is_read;
u64 timestamp;
};
interface TimelineEvent {};
dictionary UpdateSummary {
sequence<string> lists;
sequence<string> rooms;
@@ -78,14 +97,6 @@ callback interface SlidingSyncListRoomItemsObserver {
void did_receive_update();
};
interface SlidingSyncListBuilder {
constructor();
};
interface ClientBuilder {
constructor();
};
dictionary CreateRoomParameters {
string? name;
string? topic = null;
@@ -146,14 +157,6 @@ interface MediaSource {
string url();
};
interface AuthenticationService {
constructor(string base_path, string? passphrase, string? custom_sliding_sync_proxy);
};
interface NotificationService {
constructor(string base_path, string user_id);
};
interface SessionVerificationEmoji {};
callback interface SessionVerificationControllerDelegate {
@@ -164,17 +167,3 @@ callback interface SessionVerificationControllerDelegate {
void did_cancel();
void did_finish();
};
interface Span {
constructor(string file, u32 line, u32 column, LogLevel level, string target, string name);
[Name=current]
constructor();
};
enum LogLevel {
"Error",
"Warn",
"Info",
"Debug",
"Trace",
};

View File

@@ -78,44 +78,24 @@ impl HomeserverLoginDetails {
}
}
#[uniffi::export]
impl AuthenticationService {
/// Creates a new service to authenticate a user with.
#[uniffi::constructor]
pub fn new(
base_path: String,
passphrase: Option<String>,
custom_sliding_sync_proxy: Option<String>,
) -> Self {
AuthenticationService {
) -> Arc<Self> {
Arc::new(AuthenticationService {
base_path,
passphrase,
client: RwLock::new(None),
homeserver_details: RwLock::new(None),
custom_sliding_sync_proxy: RwLock::new(custom_sliding_sync_proxy),
}
})
}
/// Get the homeserver login details from a client.
async fn details_from_client(
&self,
client: &Arc<Client>,
) -> Result<HomeserverLoginDetails, AuthenticationError> {
let login_details = join3(
client.async_homeserver(),
client.authentication_issuer(),
client.supports_password_login(),
)
.await;
let url = login_details.0;
let authentication_issuer = login_details.1;
let supports_password_login = login_details.2?;
Ok(HomeserverLoginDetails { url, authentication_issuer, supports_password_login })
}
}
#[uniffi::export]
impl AuthenticationService {
pub fn homeserver_details(&self) -> Option<Arc<HomeserverLoginDetails>> {
self.homeserver_details.read().unwrap().clone()
}
@@ -126,7 +106,7 @@ impl AuthenticationService {
&self,
server_name_or_homeserver_url: String,
) -> Result<(), AuthenticationError> {
let mut builder = Arc::new(ClientBuilder::new()).base_path(self.base_path.clone());
let mut builder = ClientBuilder::new().base_path(self.base_path.clone());
// Attempt discovery as a server name first.
let result = matrix_sdk::sanitize_server_name(&server_name_or_homeserver_url);
@@ -152,7 +132,7 @@ impl AuthenticationService {
return Err(e);
}
// When discovery fails, fallback to the homeserver URL if supplied.
let mut builder = Arc::new(ClientBuilder::new()).base_path(self.base_path.clone());
let mut builder = ClientBuilder::new().base_path(self.base_path.clone());
builder = builder.homeserver_url(server_name_or_homeserver_url);
builder.build_inner()
})?;
@@ -205,7 +185,7 @@ impl AuthenticationService {
sliding_sync_proxy = None;
}
let client = Arc::new(ClientBuilder::new())
let client = ClientBuilder::new()
.base_path(self.base_path.clone())
.passphrase(self.passphrase.clone())
.homeserver_url(homeserver_url)
@@ -258,7 +238,7 @@ impl AuthenticationService {
user_id: whoami.user_id.clone(),
device_id,
};
let client = Arc::new(ClientBuilder::new())
let client = ClientBuilder::new()
.base_path(self.base_path.clone())
.passphrase(self.passphrase.clone())
.homeserver_url(homeserver_url)
@@ -270,3 +250,24 @@ impl AuthenticationService {
Ok(client)
}
}
impl AuthenticationService {
/// Get the homeserver login details from a client.
async fn details_from_client(
&self,
client: &Arc<Client>,
) -> Result<HomeserverLoginDetails, AuthenticationError> {
let login_details = join3(
client.async_homeserver(),
client.authentication_issuer(),
client.supports_password_login(),
)
.await;
let url = login_details.0;
let authentication_issuer = login_details.1;
let supports_password_login = login_details.2?;
Ok(HomeserverLoginDetails { url, authentication_issuer, supports_password_login })
}
}

View File

@@ -32,7 +32,7 @@ use tokio::sync::broadcast::error::RecvError;
use tracing::{debug, error, warn};
use super::{room::Room, session_verification::SessionVerificationController, RUNTIME};
use crate::{client, ClientError};
use crate::{client, ClientError, NotificationItem};
#[derive(Clone, uniffi::Record)]
pub struct PusherIdentifiers {
@@ -105,10 +105,15 @@ pub trait ClientDelegate: Sync + Send {
fn did_receive_auth_error(&self, is_soft_logout: bool);
}
pub trait NotificationDelegate: Sync + Send {
fn did_receive_notification(&self, notification: NotificationItem);
}
#[derive(Clone, uniffi::Object)]
pub struct Client {
pub(crate) client: MatrixClient,
delegate: Arc<RwLock<Option<Box<dyn ClientDelegate>>>>,
notification_delegate: Arc<RwLock<Option<Box<dyn NotificationDelegate>>>>,
session_verification_controller:
Arc<tokio::sync::RwLock<Option<SessionVerificationController>>>,
/// The sliding sync proxy that the client is configured to use by default.
@@ -139,6 +144,7 @@ impl Client {
let client = Client {
client,
delegate: Arc::new(RwLock::new(None)),
notification_delegate: Arc::new(RwLock::new(None)),
session_verification_controller,
sliding_sync_proxy: Arc::new(RwLock::new(None)),
sliding_sync_reset_broadcast_tx: Default::default(),
@@ -547,6 +553,34 @@ impl Client {
Ok(user_profile)
})
}
/// Sets a notification delegate and a handler.
///
/// The sliding sync requires to have registered m.room.member with value
/// $ME and m.room.power_levels to be able to intercept the events.
/// This function blocks execution and should be dispatched concurrently.
pub fn set_notification_delegate(
&self,
notification_delegate: Option<Box<dyn NotificationDelegate>>,
) {
*self.notification_delegate.write().unwrap() = notification_delegate;
let notification_delegate = Arc::clone(&self.notification_delegate);
let handler = move |notification, room: SdkRoom, _| {
let notification_delegate = Arc::clone(&notification_delegate);
async move {
if let Ok(notification_item) = NotificationItem::new(notification, room).await {
if let Some(notification_delegate) =
notification_delegate.read().unwrap().as_ref()
{
notification_delegate.did_receive_notification(notification_item);
}
}
}
};
RUNTIME.block_on(async move {
self.client.register_notification_handler(handler).await;
})
}
}
#[derive(uniffi::Record)]

View File

@@ -15,7 +15,7 @@ use zeroize::Zeroizing;
use super::{client::Client, RUNTIME};
use crate::{error::ClientError, helpers::unwrap_or_clone_arc};
#[derive(Clone)]
#[derive(Clone, uniffi::Object)]
pub struct ClientBuilder {
base_path: Option<String>,
username: Option<String>,
@@ -28,24 +28,13 @@ pub struct ClientBuilder {
inner: MatrixClientBuilder,
}
impl ClientBuilder {
pub fn new() -> Self {
Self {
base_path: None,
username: None,
server_name: None,
homeserver_url: None,
server_versions: None,
passphrase: Zeroizing::new(None),
user_agent: None,
sliding_sync_proxy: None,
inner: MatrixClient::builder(),
}
}
}
#[uniffi::export]
impl ClientBuilder {
#[uniffi::constructor]
pub fn new() -> Arc<Self> {
Arc::new(Self::default())
}
pub fn base_path(self: Arc<Self>, path: String) -> Arc<Self> {
let mut builder = unwrap_or_clone_arc(self);
builder.base_path = Some(path);
@@ -168,6 +157,16 @@ impl ClientBuilder {
impl Default for ClientBuilder {
fn default() -> Self {
Self::new()
Self {
base_path: None,
username: None,
server_name: None,
homeserver_url: None,
server_versions: None,
passphrase: Zeroizing::new(None),
user_agent: None,
sliding_sync_proxy: None,
inner: MatrixClient::builder(),
}
}
}

View File

@@ -1,6 +1,6 @@
use matrix_sdk::{self, encryption::CryptoStoreError, HttpError, IdParseError, StoreError};
#[derive(thiserror::Error, Debug)]
#[derive(Debug, thiserror::Error, uniffi::Error)]
pub enum ClientError {
#[error("client error: {msg}")]
Generic { msg: String },

View File

@@ -0,0 +1,194 @@
use anyhow::{bail, Context};
use ruma::events::{
AnySyncMessageLikeEvent, AnySyncStateEvent, AnySyncTimelineEvent,
MessageLikeEventContent as RumaMessageLikeEventContent, RedactContent,
RedactedStateEventContent, StaticStateEventContent, SyncMessageLikeEvent, SyncStateEvent,
};
use crate::{ClientError, MembershipState, MessageType};
pub struct TimelineEvent(pub(crate) AnySyncTimelineEvent);
#[uniffi::export]
impl TimelineEvent {
pub fn event_id(&self) -> String {
self.0.event_id().to_string()
}
pub fn sender_id(&self) -> String {
self.0.sender().to_string()
}
pub fn event_type(&self) -> Result<TimelineEventType, ClientError> {
let event_type = match &self.0 {
AnySyncTimelineEvent::MessageLike(event) => {
TimelineEventType::MessageLike { content: event.clone().try_into()? }
}
AnySyncTimelineEvent::State(event) => {
TimelineEventType::State { content: event.clone().try_into()? }
}
};
Ok(event_type)
}
}
#[derive(uniffi::Enum)]
pub enum TimelineEventType {
MessageLike { content: MessageLikeEventContent },
State { content: StateEventContent },
}
#[derive(uniffi::Enum)]
pub enum StateEventContent {
PolicyRuleRoom,
PolicyRuleServer,
PolicyRuleUser,
RoomAliases,
RoomAvatar,
RoomCanonicalAlias,
RoomCreate,
RoomEncryption,
RoomGuestAccess,
RoomHistoryVisibility,
RoomJoinRules,
RoomMemberContent { user_id: String, membership_state: MembershipState },
RoomName,
RoomPinnedEvents,
RoomPowerLevels,
RoomServerAcl,
RoomThirdPartyInvite,
RoomTombstone,
RoomTopic,
SpaceChild,
SpaceParent,
}
impl TryFrom<AnySyncStateEvent> for StateEventContent {
type Error = anyhow::Error;
fn try_from(value: AnySyncStateEvent) -> anyhow::Result<Self> {
let event = match value {
AnySyncStateEvent::PolicyRuleRoom(_) => StateEventContent::PolicyRuleRoom,
AnySyncStateEvent::PolicyRuleServer(_) => StateEventContent::PolicyRuleServer,
AnySyncStateEvent::PolicyRuleUser(_) => StateEventContent::PolicyRuleUser,
AnySyncStateEvent::RoomAliases(_) => StateEventContent::RoomAliases,
AnySyncStateEvent::RoomAvatar(_) => StateEventContent::RoomAvatar,
AnySyncStateEvent::RoomCanonicalAlias(_) => StateEventContent::RoomCanonicalAlias,
AnySyncStateEvent::RoomCreate(_) => StateEventContent::RoomCreate,
AnySyncStateEvent::RoomEncryption(_) => StateEventContent::RoomEncryption,
AnySyncStateEvent::RoomGuestAccess(_) => StateEventContent::RoomGuestAccess,
AnySyncStateEvent::RoomHistoryVisibility(_) => StateEventContent::RoomHistoryVisibility,
AnySyncStateEvent::RoomJoinRules(_) => StateEventContent::RoomJoinRules,
AnySyncStateEvent::RoomMember(content) => {
let state_key = content.state_key().to_string();
let original_content = get_state_event_original_content(content)?;
StateEventContent::RoomMemberContent {
user_id: state_key,
membership_state: original_content.membership.into(),
}
}
AnySyncStateEvent::RoomName(_) => StateEventContent::RoomName,
AnySyncStateEvent::RoomPinnedEvents(_) => StateEventContent::RoomPinnedEvents,
AnySyncStateEvent::RoomPowerLevels(_) => StateEventContent::RoomPowerLevels,
AnySyncStateEvent::RoomServerAcl(_) => StateEventContent::RoomServerAcl,
AnySyncStateEvent::RoomThirdPartyInvite(_) => StateEventContent::RoomThirdPartyInvite,
AnySyncStateEvent::RoomTombstone(_) => StateEventContent::RoomTombstone,
AnySyncStateEvent::RoomTopic(_) => StateEventContent::RoomTopic,
AnySyncStateEvent::SpaceChild(_) => StateEventContent::SpaceChild,
AnySyncStateEvent::SpaceParent(_) => StateEventContent::SpaceParent,
_ => bail!("Unsupported state event"),
};
Ok(event)
}
}
#[derive(uniffi::Enum)]
pub enum MessageLikeEventContent {
CallAnswer,
CallInvite,
CallHangup,
CallCandidates,
KeyVerificationReady,
KeyVerificationStart,
KeyVerificationCancel,
KeyVerificationAccept,
KeyVerificationKey,
KeyVerificationMac,
KeyVerificationDone,
ReactionContent { related_event_id: String },
RoomEncrypted,
RoomMessage { message_type: MessageType },
RoomRedaction,
Sticker,
}
impl TryFrom<AnySyncMessageLikeEvent> for MessageLikeEventContent {
type Error = anyhow::Error;
fn try_from(value: AnySyncMessageLikeEvent) -> anyhow::Result<Self> {
let content = match value {
AnySyncMessageLikeEvent::CallAnswer(_) => MessageLikeEventContent::CallAnswer,
AnySyncMessageLikeEvent::CallInvite(_) => MessageLikeEventContent::CallInvite,
AnySyncMessageLikeEvent::CallHangup(_) => MessageLikeEventContent::CallHangup,
AnySyncMessageLikeEvent::CallCandidates(_) => MessageLikeEventContent::CallCandidates,
AnySyncMessageLikeEvent::KeyVerificationReady(_) => {
MessageLikeEventContent::KeyVerificationReady
}
AnySyncMessageLikeEvent::KeyVerificationStart(_) => {
MessageLikeEventContent::KeyVerificationStart
}
AnySyncMessageLikeEvent::KeyVerificationCancel(_) => {
MessageLikeEventContent::KeyVerificationCancel
}
AnySyncMessageLikeEvent::KeyVerificationAccept(_) => {
MessageLikeEventContent::KeyVerificationAccept
}
AnySyncMessageLikeEvent::KeyVerificationKey(_) => {
MessageLikeEventContent::KeyVerificationKey
}
AnySyncMessageLikeEvent::KeyVerificationMac(_) => {
MessageLikeEventContent::KeyVerificationMac
}
AnySyncMessageLikeEvent::KeyVerificationDone(_) => {
MessageLikeEventContent::KeyVerificationDone
}
AnySyncMessageLikeEvent::Reaction(content) => {
let original_content = get_message_like_event_original_content(content)?;
MessageLikeEventContent::ReactionContent {
related_event_id: original_content.relates_to.event_id.to_string(),
}
}
AnySyncMessageLikeEvent::RoomEncrypted(_) => MessageLikeEventContent::RoomEncrypted,
AnySyncMessageLikeEvent::RoomMessage(content) => {
let original_content = get_message_like_event_original_content(content)?;
MessageLikeEventContent::RoomMessage {
message_type: original_content.msgtype.try_into()?,
}
}
AnySyncMessageLikeEvent::RoomRedaction(_) => MessageLikeEventContent::RoomRedaction,
AnySyncMessageLikeEvent::Sticker(_) => MessageLikeEventContent::Sticker,
_ => bail!("Unsupported Event Type"),
};
Ok(content)
}
}
fn get_state_event_original_content<C>(event: SyncStateEvent<C>) -> anyhow::Result<C>
where
C: StaticStateEventContent + RedactContent + Clone,
<C as RedactContent>::Redacted: RedactedStateEventContent<StateKey = C::StateKey>,
{
let original_content =
event.as_original().context("Failed to get original content")?.content.clone();
Ok(original_content)
}
fn get_message_like_event_original_content<C>(event: SyncMessageLikeEvent<C>) -> anyhow::Result<C>
where
C: RumaMessageLikeEventContent + RedactContent + Clone,
<C as ruma::events::RedactContent>::Redacted: ruma::events::RedactedMessageLikeEventContent,
{
let original_content =
event.as_original().context("Failed to get original content")?.content.clone();
Ok(original_content)
}

View File

@@ -26,6 +26,7 @@ pub mod authentication_service;
pub mod client;
pub mod client_builder;
mod error;
pub mod event;
mod helpers;
pub mod notification_service;
pub mod room;
@@ -39,7 +40,7 @@ use once_cell::sync::Lazy;
use tokio::runtime::Runtime;
// Re-exports for more convenient use inside other submodules
use self::{client::Client, client_builder::ClientBuilder, error::ClientError};
use self::{client::Client, error::ClientError};
pub static RUNTIME: Lazy<Runtime> =
Lazy::new(|| Runtime::new().expect("Can't start Tokio runtime"));
@@ -51,8 +52,8 @@ pub use matrix_sdk::{
pub use platform::*;
pub use self::{
authentication_service::*, client::*, notification_service::*, room::*, room_member::*,
session_verification::*, sliding_sync::*, timeline::*, tracing::*,
authentication_service::*, client::*, event::*, notification_service::*, room::*,
room_member::*, session_verification::*, sliding_sync::*, timeline::*, tracing::*,
};
uniffi::include_scaffolding!("api");

View File

@@ -1,6 +1,59 @@
use std::sync::Arc;
use crate::{error::ClientError, TimelineItem};
use matrix_sdk::room::Room;
use ruma::api::client::push::get_notifications::v3::Notification;
use crate::{error::ClientError, event::TimelineEvent};
pub struct NotificationItem {
pub event: Arc<TimelineEvent>,
pub room_id: String,
pub sender_display_name: Option<String>,
pub sender_avatar_url: Option<String>,
pub room_display_name: String,
pub room_avatar_url: Option<String>,
pub is_noisy: bool,
pub is_direct: bool,
pub is_encrypted: bool,
pub is_read: bool,
pub timestamp: u64,
}
impl NotificationItem {
pub(crate) async fn new(notification: Notification, room: Room) -> anyhow::Result<Self> {
let deserialized_event = notification.event.deserialize()?;
let sender = room.get_member(deserialized_event.sender()).await?;
let mut sender_display_name = None;
let mut sender_avatar_url = None;
if let Some(sender) = sender {
sender_display_name = sender.display_name().map(|s| s.to_owned());
sender_avatar_url = sender.avatar_url().map(|s| s.to_string());
}
let is_noisy =
notification.actions.iter().any(|a| a.sound().is_some() && a.should_notify());
let item = Self {
event: Arc::new(TimelineEvent(deserialized_event)),
room_id: room.room_id().to_string(),
sender_display_name,
sender_avatar_url,
room_display_name: room.display_name().await?.to_string(),
room_avatar_url: room.avatar_url().map(|s| s.to_string()),
is_noisy,
is_direct: room.is_direct().await?,
is_encrypted: room.is_encrypted().await?,
is_read: notification.read,
timestamp: notification.ts.0.into(),
};
Ok(item)
}
}
#[allow(dead_code)]
pub struct NotificationService {
@@ -8,35 +61,18 @@ pub struct NotificationService {
user_id: String,
}
/// Notification item struct.
#[derive(uniffi::Record)]
pub struct NotificationItem {
/// Actual timeline item for the event sent.
pub item: Arc<TimelineItem>,
/// Title of the notification. Usually would be event sender's display name.
pub title: String,
/// Subtitle of the notification. Usually would be the room name for
/// non-direct rooms, and none for direct rooms.
pub subtitle: Option<String>,
/// Flag indicating the notification should play a sound.
pub is_noisy: bool,
/// Avatar url of the room the event sent to (if any).
pub avatar_url: Option<String>,
}
#[uniffi::export]
impl NotificationService {
/// Creates a new notification service.
///
/// Will be used to fetch an event after receiving a notification.
/// Please note that this will be called on a new process than the
/// application context.
pub fn new(base_path: String, user_id: String) -> Self {
Self { base_path, user_id }
#[uniffi::constructor]
pub fn new(base_path: String, user_id: String) -> Arc<Self> {
Arc::new(Self { base_path, user_id })
}
}
#[uniffi::export]
impl NotificationService {
/// Get notification item for a given `room_id `and `event_id`.
///
/// Returns `None` if this notification should not be displayed to the user.

View File

@@ -430,7 +430,7 @@ pub trait SlidingSyncListStateObserver: Sync + Send {
fn did_receive_update(&self, new_state: SlidingSyncState);
}
#[derive(Clone)]
#[derive(Clone, uniffi::Object)]
pub struct SlidingSyncListBuilder {
inner: matrix_sdk::SlidingSyncListBuilder,
}
@@ -471,14 +471,13 @@ impl From<SlidingSyncRequestListFilters> for SyncRequestListFilters {
}
}
impl SlidingSyncListBuilder {
pub fn new() -> Self {
Self { inner: matrix_sdk::SlidingSyncList::builder() }
}
}
#[uniffi::export]
impl SlidingSyncListBuilder {
#[uniffi::constructor]
pub fn new() -> Arc<Self> {
Arc::new(Self { inner: matrix_sdk::SlidingSyncList::builder() })
}
pub fn sync_mode(self: Arc<Self>, mode: SlidingSyncMode) -> Arc<Self> {
let mut builder = unwrap_or_clone_arc(self);
builder.inner = builder.inner.sync_mode(mode);

View File

@@ -1,5 +1,6 @@
use std::sync::Arc;
use anyhow::bail;
use extension_trait::extension_trait;
use eyeball_im::VectorDiff;
use matrix_sdk::room::timeline::{Profile, TimelineDetails};
@@ -449,56 +450,7 @@ pub struct Message(matrix_sdk::room::timeline::Message);
#[uniffi::export]
impl Message {
pub fn msgtype(&self) -> Option<MessageType> {
use matrix_sdk::ruma::events::room::message::MessageType as MTy;
match self.0.msgtype() {
MTy::Emote(c) => Some(MessageType::Emote {
content: EmoteMessageContent {
body: c.body.clone(),
formatted: c.formatted.as_ref().map(Into::into),
},
}),
MTy::Image(c) => Some(MessageType::Image {
content: ImageMessageContent {
body: c.body.clone(),
source: Arc::new(c.source.clone()),
info: c.info.as_deref().map(Into::into),
},
}),
MTy::Audio(c) => Some(MessageType::Audio {
content: AudioMessageContent {
body: c.body.clone(),
source: Arc::new(c.source.clone()),
info: c.info.as_deref().map(Into::into),
},
}),
MTy::Video(c) => Some(MessageType::Video {
content: VideoMessageContent {
body: c.body.clone(),
source: Arc::new(c.source.clone()),
info: c.info.as_deref().map(Into::into),
},
}),
MTy::File(c) => Some(MessageType::File {
content: FileMessageContent {
body: c.body.clone(),
source: Arc::new(c.source.clone()),
info: c.info.as_deref().map(Into::into),
},
}),
MTy::Notice(c) => Some(MessageType::Notice {
content: NoticeMessageContent {
body: c.body.clone(),
formatted: c.formatted.as_ref().map(Into::into),
},
}),
MTy::Text(c) => Some(MessageType::Text {
content: TextMessageContent {
body: c.body.clone(),
formatted: c.formatted.as_ref().map(Into::into),
},
}),
_ => None,
}
self.0.msgtype().clone().try_into().ok()
}
pub fn body(&self) -> String {
@@ -525,6 +477,66 @@ pub enum MessageType {
Text { content: TextMessageContent },
}
impl TryFrom<matrix_sdk::ruma::events::room::message::MessageType> for MessageType {
type Error = anyhow::Error;
fn try_from(
value: matrix_sdk::ruma::events::room::message::MessageType,
) -> anyhow::Result<Self> {
use matrix_sdk::ruma::events::room::message::MessageType as MTy;
let message_type = match value {
MTy::Emote(c) => MessageType::Emote {
content: EmoteMessageContent {
body: c.body.clone(),
formatted: c.formatted.as_ref().map(Into::into),
},
},
MTy::Image(c) => MessageType::Image {
content: ImageMessageContent {
body: c.body.clone(),
source: Arc::new(c.source.clone()),
info: c.info.as_deref().map(Into::into),
},
},
MTy::Audio(c) => MessageType::Audio {
content: AudioMessageContent {
body: c.body.clone(),
source: Arc::new(c.source.clone()),
info: c.info.as_deref().map(Into::into),
},
},
MTy::Video(c) => MessageType::Video {
content: VideoMessageContent {
body: c.body.clone(),
source: Arc::new(c.source.clone()),
info: c.info.as_deref().map(Into::into),
},
},
MTy::File(c) => MessageType::File {
content: FileMessageContent {
body: c.body.clone(),
source: Arc::new(c.source.clone()),
info: c.info.as_deref().map(Into::into),
},
},
MTy::Notice(c) => MessageType::Notice {
content: NoticeMessageContent {
body: c.body.clone(),
formatted: c.formatted.as_ref().map(Into::into),
},
},
MTy::Text(c) => MessageType::Text {
content: TextMessageContent {
body: c.body.clone(),
formatted: c.formatted.as_ref().map(Into::into),
},
},
_ => bail!("Unsupported type"),
};
Ok(message_type)
}
}
#[derive(Clone, uniffi::Record)]
pub struct EmoteMessageContent {
pub body: String,

View File

@@ -1,4 +1,7 @@
use std::{collections::BTreeMap, sync::Mutex};
use std::{
collections::BTreeMap,
sync::{Arc, Mutex},
};
use once_cell::sync::OnceCell;
use tracing::{callsite::DefaultCallsite, field::FieldSet, Callsite};
@@ -94,6 +97,7 @@ fn span_or_event_enabled(callsite: &'static DefaultCallsite) -> bool {
pub struct Span(tracing::Span);
#[uniffi::export]
impl Span {
/// Create a span originating at the given callsite (file, line and column).
///
@@ -119,6 +123,7 @@ impl Span {
/// case it should also be exited on all of them individually; that is,
/// unless you *want* the span to be attached to all further events created
/// on that thread.
#[uniffi::constructor]
pub fn new(
file: String,
line: u32,
@@ -126,7 +131,7 @@ impl Span {
level: LogLevel,
target: String,
name: String,
) -> Self {
) -> Arc<Self> {
static CALLSITES: Mutex<BTreeMap<Location, &'static DefaultCallsite>> =
Mutex::new(BTreeMap::new());
let loc = Location::new(file, line, column);
@@ -143,16 +148,14 @@ impl Span {
tracing::Span::none()
};
Span(span)
Arc::new(Self(span))
}
pub fn current() -> Self {
Self(tracing::Span::current())
#[uniffi::constructor]
pub fn current() -> Arc<Self> {
Arc::new(Self(tracing::Span::current()))
}
}
#[uniffi::export]
impl Span {
fn enter(&self) {
self.0.with_subscriber(|(id, dispatch)| dispatch.enter(id));
}
@@ -166,6 +169,7 @@ impl Span {
}
}
#[derive(uniffi::Enum)]
pub enum LogLevel {
Error,
Warn,

View File

@@ -54,7 +54,7 @@ use ruma::{
MilliSecondsSinceUnixEpoch, OwnedUserId, RoomId, UInt, UserId,
};
use tokio::sync::RwLock;
use tracing::{debug, info, trace, warn};
use tracing::{debug, info, instrument, trace, warn};
use crate::{
deserialized_responses::{AmbiguityChanges, MembersResponse, SyncTimelineEvent},
@@ -297,6 +297,7 @@ impl BaseClient {
}
#[allow(clippy::too_many_arguments)]
#[instrument(skip_all, fields(room_id = ?room_info.room_id))]
pub(crate) async fn handle_timeline(
&self,
room: &Room,
@@ -438,6 +439,7 @@ impl BaseClient {
Ok(timeline)
}
#[instrument(skip_all, fields(room_id = ?room_info.room_id))]
pub(crate) fn handle_invited_state(
&self,
events: &[Raw<AnyStrippedStateEvent>],
@@ -467,6 +469,7 @@ impl BaseClient {
changes.stripped_state.insert(room_info.room_id().to_owned(), state_events);
}
#[instrument(skip_all, fields(room_id = ?room_info.room_id))]
pub(crate) async fn handle_state(
&self,
events: &[Raw<AnySyncStateEvent>],
@@ -478,13 +481,11 @@ impl BaseClient {
let mut user_ids = BTreeSet::new();
let mut profiles = BTreeMap::new();
let room_id = room_info.room_id.clone();
for raw_event in events {
let event = match raw_event.deserialize() {
Ok(e) => e,
Ok(ev) => ev,
Err(e) => {
warn!(?room_id, "Couldn't deserialize state event: {e:?}");
warn!("Couldn't deserialize state event: {e}");
continue;
}
};
@@ -492,7 +493,7 @@ impl BaseClient {
room_info.handle_state_event(&event);
if let AnySyncStateEvent::RoomMember(member) = &event {
ambiguity_cache.handle_event(changes, &room_id, member).await?;
ambiguity_cache.handle_event(changes, &room_info.room_id, member).await?;
match member.membership() {
MembershipState::Join | MembershipState::Invite => {
@@ -516,12 +517,13 @@ impl BaseClient {
.insert(event.state_key().to_owned(), raw_event.clone());
}
changes.profiles.insert((*room_id).to_owned(), profiles);
changes.state.insert((*room_id).to_owned(), state_events);
changes.profiles.insert((*room_info.room_id).to_owned(), profiles);
changes.state.insert((*room_info.room_id).to_owned(), state_events);
Ok(user_ids)
}
#[instrument(skip_all, fields(?room_id))]
pub(crate) async fn handle_room_account_data(
&self,
room_id: &RoomId,
@@ -535,6 +537,7 @@ impl BaseClient {
}
}
#[instrument(skip_all)]
pub(crate) async fn handle_account_data(
&self,
events: &[Raw<AnyGlobalAccountDataEvent>],
@@ -580,6 +583,7 @@ impl BaseClient {
}
#[cfg(feature = "e2e-encryption")]
#[instrument(skip_all)]
pub(crate) async fn preprocess_to_device_events(
&self,
to_device_events: Vec<Raw<ruma::events::AnyToDeviceEvent>>,
@@ -656,6 +660,7 @@ impl BaseClient {
/// # Arguments
///
/// * `response` - The response that we received after a successful sync.
#[instrument(skip_all)]
pub async fn receive_sync_response(
&self,
response: api::sync::sync_events::v3::Response,
@@ -720,13 +725,21 @@ impl BaseClient {
)
.await?;
if let Some(event) =
new_info.ephemeral.events.iter().find_map(|e| match e.deserialize() {
Ok(AnySyncEphemeralRoomEvent::Receipt(event)) => Some(event.content),
_ => None,
})
{
changes.add_receipts(&room_id, event);
for raw in &new_info.ephemeral.events {
match raw.deserialize() {
Ok(AnySyncEphemeralRoomEvent::Receipt(event)) => {
changes.add_receipts(&room_id, event.content);
}
Ok(_) => {}
Err(e) => {
let event_id: Option<String> = raw.get_field("event_id").ok().flatten();
#[rustfmt::skip]
info!(
?room_id, event_id,
"Failed to deserialize ephemeral room event: {e}"
);
}
}
}
if new_info.timeline.limited {
@@ -907,6 +920,7 @@ impl BaseClient {
/// * `room_id` - The room id this response belongs to.
///
/// * `response` - The raw response that was received from the server.
#[instrument(skip_all, fields(?room_id))]
pub async fn receive_members(
&self,
room_id: &RoomId,

View File

@@ -1,13 +1,9 @@
use std::collections::BTreeMap;
#[cfg(feature = "e2e-encryption")]
use std::ops::Deref;
use ruma::{
api::client::sync::sync_events::{
v3::{self, Ephemeral},
v4, DeviceLists,
},
DeviceKeyAlgorithm, UInt,
use ruma::api::client::sync::sync_events::{
v3::{self, Ephemeral},
v4,
};
use tracing::{debug, info, instrument};
@@ -56,61 +52,35 @@ impl BaseClient {
let to_device_events = to_device.as_ref().map(|v4| v4.events.clone()).unwrap_or_default();
// Destructure the single `None` of the E2EE extension into separate objects
// since that's what the `OlmMachine` API expects. Passing in the default
// empty maps and vecs for this is completely fine, since the `OlmMachine`
// assumes empty maps/vecs mean no change in the one-time key counts.
// We declare default values that can be referenced hereinbelow. When we try to
// extract values from `e2ee`, that would be unfortunate to clone the
// value just to pass them (to remove them `e2ee`) as a reference later.
let device_one_time_keys_count = BTreeMap::<DeviceKeyAlgorithm, UInt>::default();
let device_unused_fallback_key_types = None;
let (device_lists, device_one_time_keys_count, device_unused_fallback_key_types) = e2ee
.as_ref()
.map(|e2ee| {
(
e2ee.device_lists.clone(),
&e2ee.device_one_time_keys_count,
&e2ee.device_unused_fallback_key_types,
)
})
.unwrap_or_else(|| {
(
DeviceLists::default(),
&device_one_time_keys_count,
&device_unused_fallback_key_types,
)
});
info!(
to_device_events = to_device_events.len(),
device_one_time_keys_count = device_one_time_keys_count.len(),
device_one_time_keys_count = e2ee.device_one_time_keys_count.len(),
device_unused_fallback_key_types =
device_unused_fallback_key_types.as_ref().map(|v| v.len())
e2ee.device_unused_fallback_key_types.as_ref().map(|v| v.len())
);
// Process the to-device events and other related e2ee data. This returns a list
// of all the to-device events that were passed in but encrypted ones
// were replaced with their decrypted version.
// Passing in the default empty maps and vecs for this is completely fine, since
// the `OlmMachine` assumes empty maps/vecs mean no change in the one-time key
// counts.
#[cfg(feature = "e2e-encryption")]
let to_device_events = {
self.preprocess_to_device_events(
let to_device_events = self
.preprocess_to_device_events(
to_device_events,
&device_lists,
device_one_time_keys_count,
device_unused_fallback_key_types.as_deref(),
&e2ee.device_lists,
&e2ee.device_one_time_keys_count,
e2ee.device_unused_fallback_key_types.as_deref(),
)
.await?
};
.await?;
let store = self.store.clone();
let mut changes = StateChanges::default();
let mut ambiguity_cache = AmbiguityCache::new(store.inner.clone());
if let Some(global_data) = account_data.as_ref() {
self.handle_account_data(&global_data.global, &mut changes).await;
if !account_data.is_empty() {
self.handle_account_data(&account_data.global, &mut changes).await;
}
let push_rules = self.get_push_rules(&changes).await?;
@@ -118,8 +88,7 @@ impl BaseClient {
let mut new_rooms = Rooms::default();
for (room_id, room_data) in rooms {
if !room_data.invite_state.is_empty() {
let invite_states = &room_data.invite_state;
if let Some(invite_state) = &room_data.invite_state {
let room = store.get_or_create_stripped_room(room_id).await;
let mut room_info = room.clone_info();
room_info.mark_state_partially_synced();
@@ -131,11 +100,11 @@ impl BaseClient {
changes.add_room(room_info);
}
self.handle_invited_state(invite_states.as_slice(), &mut room_info, &mut changes);
self.handle_invited_state(invite_state.as_slice(), &mut room_info, &mut changes);
new_rooms.invite.insert(
room_id.clone(),
v3::InvitedRoom::from(v3::InviteState::from(invite_states.clone())),
v3::InvitedRoom::from(v3::InviteState::from(invite_state.clone())),
);
} else {
let room = store.get_or_create_room(room_id, RoomState::Joined).await;
@@ -160,13 +129,9 @@ impl BaseClient {
Default::default()
};
let room_account_data = if let Some(inner_account_data) = &account_data {
if let Some(events) = inner_account_data.rooms.get(room_id) {
self.handle_room_account_data(room_id, events, &mut changes).await;
Some(events.to_vec())
} else {
None
}
let room_account_data = if let Some(events) = account_data.rooms.get(room_id) {
self.handle_room_account_data(room_id, events, &mut changes).await;
Some(events.to_vec())
} else {
None
};
@@ -225,10 +190,18 @@ impl BaseClient {
}
// Process receipts now we have rooms
if let Some(receipts) = &receipts {
for (room_id, receipt_edu) in &receipts.rooms {
if let Ok(receipt_edu) = receipt_edu.deserialize() {
changes.add_receipts(room_id, receipt_edu.content);
for (room_id, raw) in &receipts.rooms {
match raw.deserialize() {
Ok(event) => {
changes.add_receipts(room_id, event.content);
}
Err(e) => {
let event_id: Option<String> = raw.get_field("event_id").ok().flatten();
#[rustfmt::skip]
info!(
?room_id, event_id,
"Failed to deserialize ephemeral room event: {e}"
);
}
}
}
@@ -237,8 +210,8 @@ impl BaseClient {
// because we want to have the push rules in place before we process
// rooms and their events, but we want to create the rooms before we
// process the `m.direct` account data event.
if let Some(global_data) = account_data.as_ref() {
self.handle_account_data(&global_data.global, &mut changes).await;
if !account_data.is_empty() {
self.handle_account_data(&account_data.global, &mut changes).await;
}
// FIXME not yet supported by sliding sync.
@@ -260,7 +233,7 @@ impl BaseClient {
debug!("applied changes");
let device_one_time_keys_count =
device_one_time_keys_count.iter().map(|(k, v)| (k.clone(), (*v).into())).collect();
e2ee.device_one_time_keys_count.iter().map(|(k, v)| (k.clone(), (*v).into())).collect();
Ok(SyncResponse {
rooms: new_rooms,
@@ -268,9 +241,9 @@ impl BaseClient {
notifications: changes.notifications,
// FIXME not yet supported by sliding sync.
presence: Default::default(),
account_data: account_data.as_ref().map(|a| a.global.clone()).unwrap_or_default(),
account_data: account_data.global.clone(),
to_device_events,
device_lists,
device_lists: e2ee.device_lists.clone(),
device_one_time_keys_count,
})
}

View File

@@ -26,7 +26,7 @@ use ruma::{
room::{
encrypted::RoomEncryptedEventContent,
member::{Change, RoomMemberEventContent},
message::{self, MessageType, RoomMessageEventContent, SyncRoomMessageEvent},
message::{self, MessageType, Relation, RoomMessageEventContent, SyncRoomMessageEvent},
redaction::{
OriginalSyncRoomRedactionEvent, RoomRedactionEventContent, SyncRoomRedactionEvent,
},
@@ -946,7 +946,16 @@ impl NewEventTimelineItem {
) -> Self {
let edited = relations.has_replacement();
let edit = relations.replace.and_then(|r| match *r {
AnySyncMessageLikeEvent::RoomMessage(SyncRoomMessageEvent::Original(ev)) => Some(ev),
AnySyncMessageLikeEvent::RoomMessage(SyncRoomMessageEvent::Original(ev)) => match ev
.content
.relates_to
{
Some(Relation::Replacement(re)) => Some(re),
_ => {
error!("got m.room.message event with an edit without a valid m.relate relation");
None
}
},
AnySyncMessageLikeEvent::RoomMessage(SyncRoomMessageEvent::Redacted(_)) => None,
_ => {
error!("got m.room.message event with an edit of a different event type");
@@ -955,7 +964,7 @@ impl NewEventTimelineItem {
});
let content = TimelineItemContent::Message(Message {
msgtype: edit.map_or(c.msgtype, |e| e.content.msgtype),
msgtype: edit.map_or(c.msgtype, |e| e.new_content),
in_reply_to: c.relates_to.and_then(InReplyToDetails::from_relation),
edited,
});

View File

@@ -10,7 +10,6 @@ use ruma::{
self, AccountDataConfig, E2EEConfig, ExtensionsConfig, ReceiptsConfig, ToDeviceConfig,
TypingConfig,
},
assign,
events::TimelineEventType,
OwnedRoomId,
};
@@ -84,18 +83,17 @@ impl SlidingSyncBuilder {
/// does not matter.
pub fn with_common_extensions(mut self) -> Self {
{
let mut cfg = self.extensions.get_or_insert_with(Default::default);
if cfg.to_device.is_none() {
cfg.to_device = Some(assign!(ToDeviceConfig::default(), { enabled: Some(true) }));
let cfg = self.extensions.get_or_insert_with(Default::default);
if cfg.to_device.enabled.is_none() {
cfg.to_device.enabled = Some(true);
}
if cfg.e2ee.is_none() {
cfg.e2ee = Some(assign!(E2EEConfig::default(), { enabled: Some(true) }));
if cfg.e2ee.enabled.is_none() {
cfg.e2ee.enabled = Some(true);
}
if cfg.account_data.is_none() {
cfg.account_data =
Some(assign!(AccountDataConfig::default(), { enabled: Some(true) }));
if cfg.account_data.enabled.is_none() {
cfg.account_data.enabled = Some(true);
}
}
self
@@ -108,26 +106,25 @@ impl SlidingSyncBuilder {
/// does not matter.
pub fn with_all_extensions(mut self) -> Self {
{
let mut cfg = self.extensions.get_or_insert_with(Default::default);
if cfg.to_device.is_none() {
cfg.to_device = Some(assign!(ToDeviceConfig::default(), { enabled: Some(true) }));
let cfg = self.extensions.get_or_insert_with(Default::default);
if cfg.to_device.enabled.is_none() {
cfg.to_device.enabled = Some(true);
}
if cfg.e2ee.is_none() {
cfg.e2ee = Some(assign!(E2EEConfig::default(), { enabled: Some(true) }));
if cfg.e2ee.enabled.is_none() {
cfg.e2ee.enabled = Some(true);
}
if cfg.account_data.is_none() {
cfg.account_data =
Some(assign!(AccountDataConfig::default(), { enabled: Some(true) }));
if cfg.account_data.enabled.is_none() {
cfg.account_data.enabled = Some(true);
}
if cfg.receipts.is_none() {
cfg.receipts = Some(assign!(ReceiptsConfig::default(), { enabled: Some(true) }));
if cfg.receipts.enabled.is_none() {
cfg.receipts.enabled = Some(true);
}
if cfg.typing.is_none() {
cfg.typing = Some(assign!(TypingConfig::default(), { enabled: Some(true) }));
if cfg.typing.enabled.is_none() {
cfg.typing.enabled = Some(true);
}
}
self
@@ -135,61 +132,62 @@ impl SlidingSyncBuilder {
/// Set the E2EE extension configuration.
pub fn with_e2ee_extension(mut self, e2ee: E2EEConfig) -> Self {
self.extensions.get_or_insert_with(Default::default).e2ee = Some(e2ee);
self.extensions.get_or_insert_with(Default::default).e2ee = e2ee;
self
}
/// Unset the E2EE extension configuration.
pub fn without_e2ee_extension(mut self) -> Self {
self.extensions.get_or_insert_with(Default::default).e2ee = None;
self.extensions.get_or_insert_with(Default::default).e2ee = E2EEConfig::default();
self
}
/// Set the ToDevice extension configuration.
pub fn with_to_device_extension(mut self, to_device: ToDeviceConfig) -> Self {
self.extensions.get_or_insert_with(Default::default).to_device = Some(to_device);
self.extensions.get_or_insert_with(Default::default).to_device = to_device;
self
}
/// Unset the ToDevice extension configuration.
pub fn without_to_device_extension(mut self) -> Self {
self.extensions.get_or_insert_with(Default::default).to_device = None;
self.extensions.get_or_insert_with(Default::default).to_device = ToDeviceConfig::default();
self
}
/// Set the account data extension configuration.
pub fn with_account_data_extension(mut self, account_data: AccountDataConfig) -> Self {
self.extensions.get_or_insert_with(Default::default).account_data = Some(account_data);
self.extensions.get_or_insert_with(Default::default).account_data = account_data;
self
}
/// Unset the account data extension configuration.
pub fn without_account_data_extension(mut self) -> Self {
self.extensions.get_or_insert_with(Default::default).account_data = None;
self.extensions.get_or_insert_with(Default::default).account_data =
AccountDataConfig::default();
self
}
/// Set the Typing extension configuration.
pub fn with_typing_extension(mut self, typing: TypingConfig) -> Self {
self.extensions.get_or_insert_with(Default::default).typing = Some(typing);
self.extensions.get_or_insert_with(Default::default).typing = typing;
self
}
/// Unset the Typing extension configuration.
pub fn without_typing_extension(mut self) -> Self {
self.extensions.get_or_insert_with(Default::default).typing = None;
self.extensions.get_or_insert_with(Default::default).typing = TypingConfig::default();
self
}
/// Set the Receipt extension configuration.
pub fn with_receipt_extension(mut self, receipt: ReceiptsConfig) -> Self {
self.extensions.get_or_insert_with(Default::default).receipts = Some(receipt);
self.extensions.get_or_insert_with(Default::default).receipts = receipt;
self
}
/// Unset the Receipt extension configuration.
pub fn without_receipt_extension(mut self) -> Self {
self.extensions.get_or_insert_with(Default::default).receipts = None;
self.extensions.get_or_insert_with(Default::default).receipts = ReceiptsConfig::default();
self
}

View File

@@ -174,9 +174,8 @@ pub(super) async fn restore_sliding_sync_state(
// Let's update the `SlidingSync`.
if let Some(since) = to_device_since {
if let Some(to_device_ext) =
extensions.get_or_insert_with(Default::default).to_device.as_mut()
{
let to_device_ext = &mut extensions.get_or_insert_with(Default::default).to_device;
if to_device_ext.enabled == Some(true) {
to_device_ext.since = Some(since);
}
}

View File

@@ -615,7 +615,7 @@ fn apply_sync_operations(
// > knew about entries in this range.
v4::SlidingOp::Sync => {
// Extract `start` and `end` from the operation's range.
let (start, mut end) = operation
let (start, end) = operation
.range
.ok_or_else(|| {
Error::BadResponse(
@@ -629,23 +629,6 @@ fn apply_sync_operations(
)
})?;
// The `end` bound of the range might not be correct… At the time of writing,
// there is a bug in the Sliding Sync Proxy that can return
// ranges greater than the `room_list` size.
//
// For example, if the client asks for a range `0..=9`, and there is only one
// room, the server will reply with one `room_id` (which is correct) but with
// the range `0..=9` instead of `0..=0`.
//
// So, a safe workaround is to take the minimum between `end` and the
// `room_list`'s length.
//
// The “safety” is ensured by the fact we also compare the size of the new range
// with the size of the `operation.room_ids` length later on.
//
// See https://github.com/matrix-org/sliding-sync/issues/52.
end = min(end, room_list.len());
// Range is invalid.
if start > end {
return Err(Error::BadResponse(format!(
@@ -654,7 +637,16 @@ fn apply_sync_operations(
)));
}
let mut room_entry_range = start..end;
// Range is too big.
if end > room_list.len() {
return Err(Error::BadResponse(format!(
"`range` is out of the `rooms_list`'s bounds ({} > {})",
end,
room_list.len(),
)));
}
let room_entry_range = start..end;
// `room_ids` is absent.
if operation.room_ids.is_empty() {
@@ -667,18 +659,6 @@ fn apply_sync_operations(
// Mismatch between the `range` and `room_ids`.
if room_entry_range.len() != room_ids.len() {
// Because of https://github.com/matrix-org/sliding-sync/issues/52, we
// can't trust the `range` returned by the server. That's a
// problem. Let's try to work around
// that.
//
// Let's pretend the `start` bound of the range is… correct.
room_entry_range = start..room_ids.len();
// Once the bug is fixed on the Sliding Sync Proxy side, we
// can remove this code, and uncomment
// the code below.
/*
return Err(Error::BadResponse(
format!(
"There is a mismatch between the number of items in `range` and `room_ids` ({} != {})",
@@ -686,7 +666,6 @@ fn apply_sync_operations(
room_ids.len(),
)
));
*/
}
// Update parts `room_list`.
@@ -783,7 +762,7 @@ fn apply_sync_operations(
// > arrive from the server.
v4::SlidingOp::Invalidate => {
// Extract `start` and `end` from the operation's range.
let (start, mut end) = operation
let (start, end) = operation
.range
.ok_or_else(|| {
Error::BadResponse(
@@ -797,23 +776,6 @@ fn apply_sync_operations(
)
})?;
// The `end` bound of the range might not be correct… At the time of writing,
// there is a bug in the Sliding Sync Proxy that can return
// ranges greater than the `room_list` size.
//
// For example, if the client asks for a range `0..=9`, and there is only one
// room, the server will reply with one `room_id` (which is correct) but with
// the range `0..=9` instead of `0..=0`.
//
// So, a safe workaround is to take the minimum between `end` and the
// `room_list`'s length.
//
// The “safety” is ensured by the fact we also compare the size of the new range
// with the size of the `operation.room_ids` length later on.
//
// See https://github.com/matrix-org/sliding-sync/issues/52.
end = min(end, room_list.len());
// Range is invalid.
if start > end {
return Err(Error::BadResponse(format!(
@@ -822,6 +784,15 @@ fn apply_sync_operations(
)));
}
// Range is too big.
if end > room_list.len() {
return Err(Error::BadResponse(format!(
"`range` is out of the `room_list`' bounds ({} > {})",
end,
room_list.len(),
)));
}
let room_entry_range = start..end;
// Invalidate parts of `room_list`.
@@ -1781,6 +1752,7 @@ mod tests {
macro_rules! assert_sync_operations {
(
$assert_description:literal :
room_list = [ $( $room_list_entries:tt )* ],
sync_operations = [
$(
@@ -1817,8 +1789,13 @@ mod tests {
let result = apply_sync_operations(operations, &mut room_list, &mut rooms_that_have_received_an_update);
assert!(result.$result());
assert_eq!(*room_list, entries![ $( $expected_room_list_entries )* ]);
assert!(result.$result(), "{}; assert the `Result`", $assert_description);
assert_eq!(
*room_list,
entries![ $( $expected_room_list_entries )* ],
"{}; asserting the `room_list`",
$assert_description,
);
$(
#[allow(unused_mut)]
@@ -1830,15 +1807,20 @@ mod tests {
)*
}
assert_eq!(rooms_that_have_received_an_update, expected_rooms_that_have_received_an_update);
assert_eq!(
rooms_that_have_received_an_update,
expected_rooms_that_have_received_an_update,
"{}; asserting the rooms that have received an update",
$assert_description,
);
)?
};
}
#[test]
fn test_sync_operations_sync() {
// All room list is updated.
assert_sync_operations! {
"All room list is updated":
room_list = [E, E, E, F("!r3:x.y")],
sync_operations = [
{
@@ -1854,8 +1836,8 @@ mod tests {
rooms = ["!r3:x.y"],
};
// Partial update.
assert_sync_operations! {
"Partial update":
room_list = [E, E, E],
sync_operations = [
{
@@ -1868,7 +1850,9 @@ mod tests {
result = is_ok,
room_list = [F("!r0:x.y"), F("!r1:x.y"), E],
};
assert_sync_operations! {
"Partial update":
room_list = [E, E, E],
sync_operations = [
{
@@ -1882,26 +1866,23 @@ mod tests {
room_list = [E, F("!r1:x.y"), F("!r2:x.y")],
};
// The range returned by the server is too large compared to the `room_ids` but
// we can fix it on-the-fly.
//
// See https://github.com/matrix-org/sliding-sync/issues/52.
assert_sync_operations! {
"The range returned by the server is too large compared to the `room_ids`":
room_list = [E],
sync_operations = [
{
"op": SlidingOp::Sync,
"range": [0, 9], // <- it should be [0, 0]
"range": [0, 2], // <- it should be [0, 0]
"room_ids": ["!r0:x.y"],
}
]
=>
result = is_ok, // <- because we have fixed it
room_list = [F("!r0:x.y")],
result = is_err,
room_list = [E],
};
// Missing `range`.
assert_sync_operations! {
"Missing `range`":
room_list = [E, E, E],
sync_operations = [
{
@@ -1914,8 +1895,8 @@ mod tests {
room_list = [E, E, E],
};
// Invalid `range`.
assert_sync_operations! {
"Invalid `range`":
room_list = [E, E, E],
sync_operations = [
{
@@ -1929,8 +1910,8 @@ mod tests {
room_list = [E, E, E],
};
// Missing `room_ids`.
assert_sync_operations! {
"Missing `room_ids`":
room_list = [E, E, E],
sync_operations = [
{
@@ -1943,9 +1924,9 @@ mod tests {
room_list = [E, E, E],
};
// Out of bounds operation.
assert_sync_operations! {
room_list = [E, E, E],
"Out of bounds operation":
room_list = [E, F("!r1:x.y"), E],
sync_operations = [
{
"op": SlidingOp::Sync,
@@ -1954,19 +1935,12 @@ mod tests {
}
]
=>
// As soon https://github.com/matrix-org/sliding-sync/issues/52
// is fixed, let's uncomment the real test.
result = is_ok,
room_list = [E, E, E],
/*
result = is_err,
room_list = [E, E, E],
*/
room_list = [E, F("!r1:x.y"), E],
};
// The server replies with a particular range, but some room IDs are
// missing.
assert_sync_operations! {
"The server replies with a particular range, but some room IDs are missing":
room_list = [E, E, E],
sync_operations = [
{
@@ -1976,19 +1950,12 @@ mod tests {
}
]
=>
// As soon https://github.com/matrix-org/sliding-sync/issues/52
// is fixed, let's uncomment the real test.
result = is_ok,
room_list = [F("!r0:x.y"), E, E],
/*
result = is_err,
room_list = [E, E, E],
*/
};
// The server replies with a particular range, but there is too much
// room IDs.
assert_sync_operations! {
"The server replies with a particular range, but there is too much room IDs":
room_list = [E, E, E],
sync_operations = [
{
@@ -1998,21 +1965,15 @@ mod tests {
}
]
=>
// As soon https://github.com/matrix-org/sliding-sync/issues/52
// is fixed, let's uncomment the real test.
result = is_ok,
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!extra:x.y")],
/*
result = is_err,
room_list = [E, E, E],
*/
};
}
#[test]
fn test_sync_operations_delete() {
// Delete a room entry in the middle.
assert_sync_operations! {
"Delete a room entry in the middle":
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
sync_operations = [
{
@@ -2027,8 +1988,8 @@ mod tests {
rooms = ["!r0:x.y"],
};
// Delete a room entry at the beginning.
assert_sync_operations! {
"Delete a room entry at the beginning":
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
sync_operations = [
{
@@ -2041,8 +2002,8 @@ mod tests {
room_list = [F("!r1:x.y"), F("!r2:x.y")],
};
// Delete a room entry at the end.
assert_sync_operations! {
"Delete a room entry at the end":
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
sync_operations = [
{
@@ -2055,8 +2016,8 @@ mod tests {
room_list = [F("!r0:x.y"), F("!r1:x.y")],
};
// Delete an out of bounds room entry.
assert_sync_operations! {
"Delete an out of bounds room entry":
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
sync_operations = [
{
@@ -2072,8 +2033,8 @@ mod tests {
#[test]
fn test_sync_operations_insert() {
// Insert a room entry in the middle.
assert_sync_operations! {
"Insert a room entry in the middle":
room_list = [E, E, E],
sync_operations = [
{
@@ -2089,8 +2050,8 @@ mod tests {
rooms = ["!r0:x.y"],
};
// Insert a room entry at the beginning.
assert_sync_operations! {
"Insert a room entry at the beginning":
room_list = [E, E, E],
sync_operations = [
{
@@ -2104,8 +2065,8 @@ mod tests {
room_list = [F("!r0:x.y"), E, E, E],
};
// Insert a room entry at the end
assert_sync_operations! {
"Insert a room entry at the end":
room_list = [E, E, E],
sync_operations = [
{
@@ -2119,8 +2080,8 @@ mod tests {
room_list = [E, E, E, F("!r3:x.y")],
};
// Insert an out of bounds room entry.
assert_sync_operations! {
"Insert an out of bounds room entry":
room_list = [E, F("!r1:x.y"), E],
sync_operations = [
{
@@ -2137,8 +2098,8 @@ mod tests {
#[test]
fn test_sync_operations_invalidate() {
// Invalidating an empty room.
assert_sync_operations! {
"Invalidating an empty room":
room_list = [E, F("!r1:x.y")],
sync_operations = [
{
@@ -2153,8 +2114,8 @@ mod tests {
rooms = ["!r1:x.y"],
};
// Invalidating a filled room.
assert_sync_operations! {
"Invalidating a filled room":
room_list = [F("!r0:x.y"), F("!r1:x.y")],
sync_operations = [
{
@@ -2169,8 +2130,8 @@ mod tests {
rooms = ["!r1:x.y"],
};
// Invalidating an invalidated room.
assert_sync_operations! {
"Invalidating an invalidated room":
room_list = [I("!r0:x.y"), F("!r1:x.y")],
sync_operations = [
{
@@ -2185,8 +2146,8 @@ mod tests {
rooms = ["!r1:x.y"],
};
// Partial update.
assert_sync_operations! {
"Partial update from the beginning":
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
sync_operations = [
{
@@ -2198,7 +2159,9 @@ mod tests {
result = is_ok,
room_list = [I("!r0:x.y"), I("!r1:x.y"), F("!r2:x.y")],
};
assert_sync_operations! {
"Partial update from the end":
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
sync_operations = [
{
@@ -2211,8 +2174,8 @@ mod tests {
room_list = [F("!r0:x.y"), I("!r1:x.y"), I("!r2:x.y")],
};
// Full update.
assert_sync_operations! {
"Full update":
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
sync_operations = [
{
@@ -2225,11 +2188,8 @@ mod tests {
room_list = [I("!r0:x.y"), I("!r1:x.y"), I("!r2:x.y")],
};
// The range returned by the server is too large compared to the `room_lists`
// but we can fix it on-the-fly.
//
// See https://github.com/matrix-org/sliding-sync/issues/52.
assert_sync_operations! {
"The range returned by the server is too large compared to the `room_lists`":
room_list = [F("!r0:x.y")],
sync_operations = [
{
@@ -2238,16 +2198,16 @@ mod tests {
}
]
=>
result = is_ok, // <- because we have fixed it
room_list = [I("!r0:x.y")],
result = is_err,
room_list = [F("!r0:x.y")],
};
// Missing `range`.
assert_sync_operations! {
"Missing `range`":
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
sync_operations = [
{
"op": SlidingOp::Invalidate,
"op": SlidingOp::Delete,
}
]
=>
@@ -2255,12 +2215,12 @@ mod tests {
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
};
// Invalid `range`.
assert_sync_operations! {
"Invalid `range`":
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
sync_operations = [
{
"op": SlidingOp::Invalidate,
"op": SlidingOp::Delete,
"range": [12, 0],
}
]
@@ -2268,5 +2228,19 @@ mod tests {
result = is_err,
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
};
assert_sync_operations! {
"Out of bounds operation":
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
sync_operations = [
{
"op": SlidingOp::Delete,
"range": [2, 3],
}
]
=>
result = is_err,
room_list = [F("!r0:x.y"), F("!r1:x.y"), F("!r2:x.y")],
};
}
}

View File

@@ -45,9 +45,7 @@ pub use room::*;
use ruma::{
api::client::{
error::ErrorKind,
sync::sync_events::v4::{
self, AccountDataConfig, E2EEConfig, ExtensionsConfig, ToDeviceConfig,
},
sync::sync_events::v4::{self, ExtensionsConfig},
},
assign,
events::TimelineEventType,
@@ -154,18 +152,18 @@ impl SlidingSync {
/// Add the common extensions if not already configured.
pub fn add_common_extensions(&self) {
let mut lock = self.inner.extensions.lock().unwrap();
let mut cfg = lock.get_or_insert_with(Default::default);
let cfg = lock.get_or_insert_with(Default::default);
if cfg.to_device.is_none() {
cfg.to_device = Some(assign!(ToDeviceConfig::default(), { enabled: Some(true) }));
if cfg.to_device.enabled.is_none() {
cfg.to_device.enabled = Some(true);
}
if cfg.e2ee.is_none() {
cfg.e2ee = Some(assign!(E2EEConfig::default(), { enabled: Some(true) }));
if cfg.e2ee.enabled.is_none() {
cfg.e2ee.enabled = Some(true);
}
if cfg.account_data.is_none() {
cfg.account_data = Some(assign!(AccountDataConfig::default(), { enabled: Some(true) }));
if cfg.account_data.enabled.is_none() {
cfg.account_data.enabled = Some(false);
}
}
@@ -189,7 +187,6 @@ impl SlidingSync {
.unwrap()
.get_or_insert_with(Default::default)
.to_device
.get_or_insert_with(Default::default)
.since = Some(since);
}
@@ -257,11 +254,8 @@ impl SlidingSync {
//
// The token is also loaded from storage in the `SlidingSyncBuilder::build()`
// method.
let mut to_device = extensions.to_device.unwrap_or_default();
to_device.enabled = Some(true);
extensions.to_device = Some(to_device);
extensions.e2ee = Some(assign!(E2EEConfig::default(), { enabled: Some(true) }));
extensions.to_device.enabled = Some(true);
extensions.e2ee.enabled = Some(true);
extensions
} else {
@@ -273,10 +267,10 @@ impl SlidingSync {
.lock()
.unwrap()
.as_ref()
.and_then(|e| e.to_device.as_ref()?.since.to_owned());
.and_then(|e| e.to_device.since.clone());
let mut extensions: ExtensionsConfig = Default::default();
extensions.to_device = Some(assign!(ToDeviceConfig::default(), { since }));
extensions.to_device.since = since;
extensions
}
@@ -642,7 +636,7 @@ impl From<&SlidingSync> for FrozenSlidingSync {
.lock()
.unwrap()
.as_ref()
.and_then(|ext| ext.to_device.as_ref()?.since.clone()),
.and_then(|ext| ext.to_device.since.clone()),
}
}
}
@@ -660,6 +654,7 @@ pub struct UpdateSummary {
#[cfg(test)]
mod test {
use assert_matches::assert_matches;
use ruma::api::client::sync::sync_events::v4::{E2EEConfig, ToDeviceConfig};
use wiremock::MockServer;
use super::*;
@@ -677,9 +672,9 @@ mod test {
// e2ee anyways.
assert_matches!(
extensions.to_device,
Some(ToDeviceConfig { enabled: Some(true), since: None, .. })
ToDeviceConfig { enabled: Some(true), since: None, .. }
);
assert_matches!(extensions.e2ee, Some(E2EEConfig { enabled: Some(true), .. }));
assert_matches!(extensions.e2ee, E2EEConfig { enabled: Some(true), .. });
let some_since = "some_since".to_owned();
sync.update_to_device_since(some_since.to_owned());
@@ -690,16 +685,16 @@ mod test {
// stickyness.
assert_matches!(
extensions.to_device,
Some(ToDeviceConfig { enabled: None, since: Some(since), .. }) if since == some_since
ToDeviceConfig { enabled: None, since: Some(since), .. } if since == some_since
);
assert_matches!(extensions.e2ee, None);
assert_matches!(extensions.e2ee, E2EEConfig { enabled: None, .. });
let extensions = sync.prepare_extension_config(None);
// Even if there isn't a `pos`, if we have a to-device `since` token, we put it
// into the request.
assert_matches!(
extensions.to_device,
Some(ToDeviceConfig { enabled: Some(true), since: Some(since), .. }) if since == some_since
ToDeviceConfig { enabled: Some(true), since: Some(since), .. } if since == some_since
);
Ok(())

View File

@@ -170,7 +170,7 @@ impl SlidingSyncRoom {
self.inner.is_dm = is_dm;
}
if !invite_state.is_empty() {
if invite_state.is_some() {
self.inner.invite_state = invite_state;
}

View File

@@ -28,7 +28,7 @@ services:
- ./data/db:/var/lib/postgresql/data
sliding-sync-proxy:
image: ghcr.io/matrix-org/sliding-sync:v0.99.1
image: ghcr.io/matrix-org/sliding-sync:v0.99.2
depends_on:
postgres:
condition: service_healthy