Merge branch 'main' into ganfra/kotlin_bindings

This commit is contained in:
ganfra
2022-10-27 14:37:53 +02:00
138 changed files with 5609 additions and 2378 deletions

View File

@@ -149,11 +149,6 @@ jobs:
profile: minimal
override: true
- name: Install targets
run: |
rustup target add aarch64-apple-ios-sim --toolchain nightly
rustup target add x86_64-apple-ios --toolchain nightly
- name: Load cache
uses: Swatinem/rust-cache@v1
@@ -161,17 +156,12 @@ jobs:
uses: actions-rs/cargo@v1
with:
command: install
# keep in sync with uniffi dependency in Cargo.toml's
args: uniffi_bindgen --git https://github.com/mozilla/uniffi-rs --rev 0eee77f67b716c4896494606e5931d249871b23a
# keep in sync with uniffi dependency in root Cargo.toml
args: uniffi_bindgen --git https://github.com/mozilla/uniffi-rs --rev fdb769b567865d9c5c7c682a18d0c1301a039c85
- name: Generate .xcframework
working-directory: bindings/apple
run: sh ./debug_build_xcframework.sh x86_64 ci
- name: Build library & bindings
run: cargo xtask swift build-library
- name: Run XCTests
working-directory: bindings/apple
run: |
xcodebuild test \
-scheme MatrixRustSDK \
-sdk iphonesimulator \
-destination 'platform=iOS Simulator,name=iPhone 13'
run: swift test

13
.github/workflows/cancel_others.yml vendored Normal file
View File

@@ -0,0 +1,13 @@
on:
pull_request:
branches: [main]
jobs:
cancel-others:
runs-on: ubuntu-latest
steps:
- name: Cancel workflows for older commits
uses: styfle/cancel-workflow-action@0.11.0
with:
workflow_id: all
all_but_latest: true

View File

@@ -16,15 +16,6 @@ env:
CARGO_TERM_COLOR: always
jobs:
cancel-others:
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
steps:
- name: Cancel Previous Runs
uses: styfle/cancel-workflow-action@0.9.1
with:
access_token: ${{ github.token }}
xtask:
runs-on: ubuntu-latest
steps:

864
Cargo.lock generated
View File

File diff suppressed because it is too large Load Diff

View File

@@ -15,6 +15,16 @@ members = [
default-members = ["benchmarks", "crates/*"]
resolver = "2"
[workspace.dependencies]
ruma = { version = "0.7.4", features = ["client-api-c"] }
tracing = { version = "0.1.36", default-features = false, features = ["std"] }
uniffi = { git = "https://github.com/mozilla/uniffi-rs", rev = "fdb769b567865d9c5c7c682a18d0c1301a039c85" }
uniffi_macros = { git = "https://github.com/mozilla/uniffi-rs", rev = "fdb769b567865d9c5c7c682a18d0c1301a039c85" }
uniffi_bindgen = { git = "https://github.com/mozilla/uniffi-rs", rev = "fdb769b567865d9c5c7c682a18d0c1301a039c85" }
uniffi_build = { git = "https://github.com/mozilla/uniffi-rs", rev = "fdb769b567865d9c5c7c682a18d0c1301a039c85", features = ["builtin-bindgen"] }
vodozemac = "0.3.0"
zeroize = "1.3.0"
[profile.release]
lto = true

View File

@@ -26,7 +26,7 @@ The rust-sdk consists of multiple crates that can be picked at your convenience:
## Minimum Supported Rust Version (MSRV)
These crates are built with the Rust language version 2021 and require a minimum compiler version of `1.62`
These crates are built with the Rust language version 2021 and require a minimum compiler version of `1.64`
## Status

View File

@@ -12,7 +12,7 @@ criterion = { version = "0.3.5", features = ["async", "async_tokio", "html_repor
matrix-sdk-crypto = { path = "../crates/matrix-sdk-crypto", version = "0.6.0"}
matrix-sdk-sled = { path = "../crates/matrix-sdk-sled", version = "0.2.0", default-features = false, features = ["crypto-store"] }
matrix-sdk-test = { path = "../testing/matrix-sdk-test", version = "0.6.0"}
ruma = "0.7.0"
ruma = { workspace = true }
serde_json = "1.0.79"
tempfile = "3.3.0"
tokio = { version = "1.17.0", default-features = false, features = ["rt-multi-thread"] }

View File

@@ -5,18 +5,18 @@ maintained by the owners of the Matrix Rust SDK project.
* [`apple`] or `matrix-rust-components-swift`, Swift bindings of the
[`matrix-sdk`] crate via [`matrix-sdk-ffi`],
* [`matrix-sdk-crypto-ffi`], bindings of the [`matrix-sdk-crypto`]
* [`matrix-sdk-crypto-ffi`], UniFFI (Kotlin, Swift, Python, Ruby) bindings of the [`matrix-sdk-crypto`]
crate,
* [`matrix-sdk-crypto-js`], JavaScript bindings of the
[`matrix-sdk-crypto`] crate,
* [`matrix-sdk-crypto-nodejs`], Node.js bindings of the
[`matrix-sdk-crypto`] crate,
* [`matrix-sdk-ffi`], bindings of the [`matrix-sdk`] crate,
* [`matrix-sdk-ffi`], UniFFI bindings of the [`matrix-sdk`] crate.
[`apple`]: ./apple
[`matrix-sdk-crypto-ffi`]: ./matrix-sdk-crypto-ffi
[`matrix-sdk-crypto-js`]: ../crates/matrix-sdk-crypto
[`matrix-sdk-crypto-nodejs`]: ../crates/matrix-sdk-crypto
[`matrix-sdk-crypto-js`]: ./matrix-sdk-crypto-js
[`matrix-sdk-crypto-nodejs`]: ./matrix-sdk-crypto-nodejs
[`matrix-sdk-crypto`]: ../crates/matrix-sdk-crypto
[`matrix-sdk-ffi`]: ./matrix-sdk-ffi
[`matrix-sdk`]: ../crates/matrix-sdk

View File

@@ -5,17 +5,25 @@ import PackageDescription
let package = Package(
name: "MatrixRustSDK",
platforms: [.iOS(.v15)],
products: [
.library(name: "MatrixRustSDK",
targets: ["MatrixRustSDK"]),
],
targets: [
.binaryTarget(name: "MatrixSDKFFI", path: "generated/MatrixSDKFFI.xcframework"),
.target(name: "MatrixRustSDK",
dependencies: [.target(name: "MatrixSDKFFI")],
path: "generated/swift"),
path: "generated/swift",
swiftSettings: [
.unsafeFlags(["-I", "./generated/matrix_sdk_ffi"])
]),
.testTarget(name: "MatrixRustSDKTests",
dependencies: ["MatrixRustSDK"]),
dependencies: ["MatrixRustSDK"],
swiftSettings: [
.unsafeFlags(["-I", "./generated/matrix_sdk_ffi"])
],
linkerSettings: [
.linkedLibrary("matrix_sdk_ffi", .when(platforms: [.macOS])),
.linkedLibrary("matrix_sdk_ffiFFI", .when(platforms: [.linux])),
.unsafeFlags(["-L./generated/matrix_sdk_ffi"])
])
]
)

View File

@@ -38,15 +38,11 @@ The `build_crypto_xcframework.sh` script will go through all the steps required
4. `xcodebuild` an `xcframework` from the fat static libs and the original iOS one, and add the header and module map to it under `generated/MatrixSDKCryptoFFI.xcframework`
5. cleanup and delete the generated files except the .xcframework and the swift sources (that aren't part of the framework)
## Running the Xcode project
## Building & testing the Swift package
The Xcode project is meant to provide a simple example on how to integrate everything together but also a place to run unit and integration tests from.
`Package.swift` is meant to provide a simple example on how to integrate everything together but also a place to run unit and integration tests from.
It's pre-configured to link to the generated .xcframework and .swift files so successfully running the script first is necessary for it to compile.
It makes the compiled code available to swift by importing the C header through its bridging header.
Once all the generated components are available running it should be as easy as choosing a platform and clicking run.
It's pre-configured to link to the generated static lib and .swift files so successfully running `cargo xtask swift build-library` first is necessary for it to compile. Afterwards you can execute the tests with `swift test`. Note that for the moment this only works on macOS but we're planning to add Linux support in the future.
## Distribution

View File

@@ -51,7 +51,12 @@ lipo -create \
-output "${GENERATED_DIR}/simulator/libmatrix_sdk_crypto_ffi.a"
# Generate uniffi files
uniffi-bindgen generate "${SRC_ROOT}/bindings/${TARGET_CRATE}/src/olm.udl" --language swift --config "${SRC_ROOT}/bindings/${TARGET_CRATE}/uniffi.toml" --out-dir ${GENERATED_DIR}
uniffi-bindgen generate \
--language swift \
--lib-file "${TARGET_DIR}/aarch64-apple-ios-sim/${REL_TYPE_DIR}/libmatrix_sdk_crypto_ffi.a" \
--config "${SRC_ROOT}/bindings/${TARGET_CRATE}/uniffi.toml" \
--out-dir ${GENERATED_DIR} \
"${SRC_ROOT}/bindings/${TARGET_CRATE}/src/olm.udl"
# Move headers to the right place
HEADERS_DIR=${GENERATED_DIR}/headers

View File

@@ -52,7 +52,8 @@ uniffi-bindgen generate "${SRC_ROOT}/bindings/matrix-sdk-ffi/src/api.udl" \
--language kotlin \
--out-dir ${GENERATED_DIR} \
--lib-file "${BASE_TARGET_DIR}/aarch64-apple-darwin/${RELEASE_TYPE_DIR}/libmatrix_sdk_ffi.a" \
--version \
# Create android library
cd "${KOTLIN_ROOT}"
./gradlew :sdk:sdk-android:assemble

View File

@@ -19,17 +19,18 @@ hmac = "0.12.1"
http = "0.2.6"
pbkdf2 = "0.11.0"
rand = "0.8.5"
ruma = { version = "0.7.0", features = ["client-api-c"] }
ruma = { workspace = true }
serde = "1.0.136"
serde_json = "1.0.79"
sha2 = "0.10.2"
thiserror = "1.0.30"
tracing = "0.1.34"
tracing = { workspace = true }
tracing-subscriber = { version = "0.3.11", features = ["env-filter"] }
# keep in sync with uniffi dependency in matrix-sdk-ffi, and uniffi_bindgen in ffi CI job
uniffi = { git = "https://github.com/mozilla/uniffi-rs", rev = "0eee77f67b716c4896494606e5931d249871b23a" }
uniffi_macros = { git = "https://github.com/mozilla/uniffi-rs", rev = "0eee77f67b716c4896494606e5931d249871b23a" }
zeroize = { version = "1.3.0", features = ["zeroize_derive"] }
uniffi = { workspace = true }
uniffi_macros = { workspace = true }
vodozemac = { workspace = true }
zeroize = { workspace = true, features = ["zeroize_derive"] }
[dependencies.js_int]
version = "0.2.2"
@@ -55,11 +56,8 @@ version = "1.17.0"
default_features = false
features = ["rt-multi-thread"]
[dependencies.vodozemac]
version = "0.3.0"
[build-dependencies]
uniffi_build = { git = "https://github.com/mozilla/uniffi-rs", rev = "0eee77f67b716c4896494606e5931d249871b23a", features = ["builtin-bindgen"] }
uniffi_build = { workspace = true, features = ["builtin-bindgen"] }
[dev-dependencies]
tempfile = "3.3.0"

View File

@@ -16,7 +16,7 @@ mod uniffi_api;
mod users;
mod verification;
use std::{borrow::Borrow, collections::HashMap, str::FromStr, sync::Arc};
use std::{borrow::Borrow, collections::HashMap, str::FromStr, sync::Arc, time::Duration};
pub use backup_recovery_key::{
BackupRecoveryKey, DecodeError, MegolmV1BackupKey, PassphraseInfo, PkDecryptionError,
@@ -28,12 +28,18 @@ pub use error::{
use js_int::UInt;
pub use logger::{set_logger, Logger};
pub use machine::{KeyRequestPair, OlmMachine};
use matrix_sdk_crypto::types::{EventEncryptionAlgorithm, SigningKey};
use matrix_sdk_crypto::{
types::{EventEncryptionAlgorithm as RustEventEncryptionAlgorithm, SigningKey},
EncryptionSettings as RustEncryptionSettings, LocalTrust,
};
pub use responses::{
BootstrapCrossSigningResult, DeviceLists, KeysImportResult, OutgoingVerificationRequest,
Request, RequestType, SignatureUploadRequest, UploadSigningKeysRequest,
};
use ruma::{DeviceId, DeviceKeyAlgorithm, OwnedUserId, RoomId, SecondsSinceUnixEpoch, UserId};
use ruma::{
events::room::history_visibility::HistoryVisibility as RustHistoryVisibility, DeviceId,
DeviceKeyAlgorithm, OwnedUserId, RoomId, SecondsSinceUnixEpoch, UserId,
};
use serde::{Deserialize, Serialize};
pub use users::UserIdentity;
pub use verification::{
@@ -272,7 +278,7 @@ pub fn migrate(
imported: session.imported,
backed_up: session.backed_up,
history_visibility: None,
algorithm: EventEncryptionAlgorithm::MegolmV1AesSha2,
algorithm: RustEventEncryptionAlgorithm::MegolmV1AesSha2,
};
let session = matrix_sdk_crypto::olm::InboundGroupSession::from_pickle(pickle)?;
@@ -347,6 +353,99 @@ impl<T: Fn(i32, i32)> ProgressListener for T {
}
}
/// An encryption algorithm to be used to encrypt messages sent to a room.
pub enum EventEncryptionAlgorithm {
/// Olm version 1 using Curve25519, AES-256, and SHA-256.
OlmV1Curve25519AesSha2,
/// Megolm version 1 using AES-256 and SHA-256.
MegolmV1AesSha2,
}
impl From<EventEncryptionAlgorithm> for RustEventEncryptionAlgorithm {
fn from(a: EventEncryptionAlgorithm) -> Self {
match a {
EventEncryptionAlgorithm::OlmV1Curve25519AesSha2 => {
RustEventEncryptionAlgorithm::OlmV1Curve25519AesSha2
}
EventEncryptionAlgorithm::MegolmV1AesSha2 => {
RustEventEncryptionAlgorithm::MegolmV1AesSha2
}
}
}
}
/// Who can see a room's history.
pub enum HistoryVisibility {
/// Previous events are accessible to newly joined members from the point
/// they were invited onwards.
///
/// Events stop being accessible when the member's state changes to
/// something other than *invite* or *join*.
Invited,
/// Previous events are accessible to newly joined members from the point
/// they joined the room onwards.
/// Events stop being accessible when the member's state changes to
/// something other than *join*.
Joined,
/// Previous events are always accessible to newly joined members.
///
/// All events in the room are accessible, even those sent when the member
/// was not a part of the room.
Shared,
/// All events while this is the `HistoryVisibility` value may be shared by
/// any participating homeserver with anyone, regardless of whether they
/// have ever joined the room.
WorldReadable,
}
impl From<HistoryVisibility> for RustHistoryVisibility {
fn from(h: HistoryVisibility) -> Self {
match h {
HistoryVisibility::Invited => RustHistoryVisibility::Invited,
HistoryVisibility::Joined => RustHistoryVisibility::Joined,
HistoryVisibility::Shared => RustHistoryVisibility::Shared,
HistoryVisibility::WorldReadable => RustHistoryVisibility::Shared,
}
}
}
/// Settings that should be used when a room key is shared.
///
/// These settings control which algorithm the room key should use, how long a
/// room key should be used and some other important information that determines
/// the lifetime of a room key.
pub struct EncryptionSettings {
/// The encryption algorithm that should be used in the room.
pub algorithm: EventEncryptionAlgorithm,
/// How long can the room key be used before it should be rotated. Time in
/// seconds.
pub rotation_period: u64,
/// How many messages should be sent before the room key should be rotated.
pub rotation_period_msgs: u64,
/// The current history visibility of the room. The visibility will be
/// tracked by the room key and the key will be rotated if the visibility
/// changes.
pub history_visibility: HistoryVisibility,
/// Should untrusted devices receive the room key, or should they be
/// excluded from the conversation.
pub only_allow_trusted_devices: bool,
}
impl From<EncryptionSettings> for RustEncryptionSettings {
fn from(v: EncryptionSettings) -> Self {
RustEncryptionSettings {
algorithm: v.algorithm.into(),
rotation_period: Duration::from_secs(v.rotation_period),
rotation_period_msgs: v.rotation_period_msgs,
history_visibility: v.history_visibility.into(),
only_allow_trusted_devices: v.only_allow_trusted_devices,
}
}
}
/// An event that was successfully decrypted.
pub struct DecryptedEvent {
/// The decrypted version of the event.

View File

@@ -51,6 +51,7 @@ pub fn set_logger(logger: Box<dyn Logger>) {
let _ = tracing_subscriber::fmt()
.with_writer(logger)
.with_env_filter(filter)
.with_ansi(false)
.without_time()
.try_init();
}

View File

@@ -11,9 +11,8 @@ use js_int::UInt;
use matrix_sdk_common::deserialized_responses::AlgorithmInfo;
use matrix_sdk_crypto::{
backups::MegolmV1BackupKey as RustBackupKey, decrypt_room_key_export, encrypt_room_key_export,
matrix_sdk_qrcode::QrVerificationData, olm::ExportedRoomKey, store::RecoveryKey,
EncryptionSettings, LocalTrust, OlmMachine as InnerMachine, UserIdentities,
Verification as RustVerification,
matrix_sdk_qrcode::QrVerificationData, olm::ExportedRoomKey, store::RecoveryKey, LocalTrust,
OlmMachine as InnerMachine, UserIdentities, Verification as RustVerification,
};
use ruma::{
api::{
@@ -46,9 +45,9 @@ use crate::{
responses::{response_from_string, OutgoingVerificationRequest, OwnedResponse},
BackupKeys, BackupRecoveryKey, BootstrapCrossSigningResult, ConfirmVerificationResult,
CrossSigningKeyExport, CrossSigningStatus, DecodeError, DecryptedEvent, Device, DeviceLists,
KeyImportError, KeysImportResult, MegolmV1BackupKey, ProgressListener, QrCode, Request,
RequestType, RequestVerificationResult, RoomKeyCounts, ScanResult, SignatureUploadRequest,
StartSasResult, UserIdentity, Verification, VerificationRequest,
EncryptionSettings, KeyImportError, KeysImportResult, MegolmV1BackupKey, ProgressListener,
QrCode, Request, RequestType, RequestVerificationResult, RoomKeyCounts, ScanResult,
SignatureUploadRequest, StartSasResult, UserIdentity, Verification, VerificationRequest,
};
/// A high level state machine that handles E2EE for Matrix.
@@ -283,11 +282,13 @@ impl OlmMachine {
}
}
/// Mark the device of the given user with the given device ID as trusted.
pub fn mark_device_as_trusted(
/// Set local trust state for the device of the given user without creating
/// or uploading any signatures if verified
pub fn set_local_trust(
&self,
user_id: &str,
device_id: &str,
trust_state: LocalTrust,
) -> Result<(), CryptoStoreError> {
let user_id = parse_user_id(user_id)?;
@@ -295,7 +296,7 @@ impl OlmMachine {
self.runtime.block_on(self.inner.get_device(&user_id, device_id.into(), None))?;
if let Some(device) = device {
self.runtime.block_on(device.set_local_trust(LocalTrust::Verified))?;
self.runtime.block_on(device.set_local_trust(trust_state))?;
}
Ok(())
@@ -418,7 +419,7 @@ impl OlmMachine {
key_counts: HashMap<String, i32>,
unused_fallback_keys: Option<Vec<String>>,
) -> Result<String, CryptoStoreError> {
let events: ToDevice = serde_json::from_str(events)?;
let to_device: ToDevice = serde_json::from_str(events)?;
let device_changes: RumaDeviceLists = device_changes.into();
let key_counts: BTreeMap<DeviceKeyAlgorithm, UInt> = key_counts
.into_iter()
@@ -436,7 +437,7 @@ impl OlmMachine {
unused_fallback_keys.map(|u| u.into_iter().map(DeviceKeyAlgorithm::from).collect());
let events = self.runtime.block_on(self.inner.receive_sync_changes(
events,
to_device.events,
&device_changes,
&key_counts,
unused_fallback_keys.as_deref(),
@@ -515,10 +516,13 @@ impl OlmMachine {
///
/// * `users` - The list of users which are considered to be members of the
/// room and should receive the room key.
///
/// * `settings` - The settings that should be used for the room key.
pub fn share_room_key(
&self,
room_id: &str,
users: Vec<String>,
settings: EncryptionSettings,
) -> Result<Vec<Request>, CryptoStoreError> {
let users: Vec<OwnedUserId> =
users.into_iter().filter_map(|u| UserId::parse(u).ok()).collect();
@@ -527,7 +531,7 @@ impl OlmMachine {
let requests = self.runtime.block_on(self.inner.share_room_key(
&room_id,
users.iter().map(Deref::deref),
EncryptionSettings::default(),
settings,
))?;
Ok(requests.into_iter().map(|r| r.as_ref().into()).collect())

View File

@@ -247,6 +247,33 @@ enum RequestType {
"RoomMessage",
};
enum LocalTrust {
"Verified",
"BlackListed",
"Ignored",
"Unset",
};
enum EventEncryptionAlgorithm {
"OlmV1Curve25519AesSha2",
"MegolmV1AesSha2",
};
enum HistoryVisibility {
"Invited",
"Joined",
"Shared",
"WorldReadable",
};
dictionary EncryptionSettings {
EventEncryptionAlgorithm algorithm;
u64 rotation_period;
u64 rotation_period_msgs;
HistoryVisibility history_visibility;
boolean only_allow_trusted_devices;
};
interface OlmMachine {
[Throws=CryptoStoreError]
constructor(
@@ -282,7 +309,7 @@ interface OlmMachine {
[Throws=CryptoStoreError]
Device? get_device([ByRef] string user_id, [ByRef] string device_id, u32 timeout);
[Throws=CryptoStoreError]
void mark_device_as_trusted([ByRef] string user_id, [ByRef] string device_id);
void set_local_trust([ByRef] string user_id, [ByRef] string device_id, LocalTrust trust_state);
[Throws=SignatureError]
SignatureUploadRequest verify_device([ByRef] string user_id, [ByRef] string device_id);
[Throws=CryptoStoreError]
@@ -294,7 +321,11 @@ interface OlmMachine {
[Throws=CryptoStoreError]
Request? get_missing_sessions(sequence<string> users);
[Throws=CryptoStoreError]
sequence<Request> share_room_key([ByRef] string room_id, sequence<string> users);
sequence<Request> share_room_key(
[ByRef] string room_id,
sequence<string> users,
EncryptionSettings settings
);
[Throws=CryptoStoreError]
void receive_unencrypted_verification_event([ByRef] string event, [ByRef] string room_id);

View File

@@ -19,7 +19,7 @@ use ruma::{
},
},
message::send_message_event::v3::Response as RoomMessageResponse,
sync::sync_events::v3::DeviceLists as RumaDeviceLists,
sync::sync_events::DeviceLists as RumaDeviceLists,
to_device::send_event_to_device::v3::Response as ToDeviceResponse,
},
assign,

View File

@@ -24,15 +24,15 @@ crate-type = ["cdylib"]
[features]
default = ["tracing", "qrcode"]
qrcode = ["matrix-sdk-crypto/qrcode", "dep:matrix-sdk-qrcode"]
tracing = []
tracing = ["dep:tracing"]
[dependencies]
matrix-sdk-common = { version = "0.6.0", path = "../../crates/matrix-sdk-common", features = ["js"] }
matrix-sdk-crypto = { version = "0.6.0", path = "../../crates/matrix-sdk-crypto", features = ["js"] }
matrix-sdk-indexeddb = { version = "0.2.0", path = "../../crates/matrix-sdk-indexeddb", features = ["experimental-nodejs"] }
matrix-sdk-qrcode = { version = "0.4.0", path = "../../crates/matrix-sdk-qrcode", optional = true }
ruma = { version = "0.7.0", features = ["client-api-c", "js", "rand", "unstable-msc2676", "unstable-msc2677"] }
vodozemac = { version = "0.3.0", features = ["js"] }
ruma = { workspace = true, features = ["js", "rand", "unstable-msc2676", "unstable-msc2677"] }
vodozemac = { workspace = true, features = ["js"] }
wasm-bindgen = "0.2.80"
wasm-bindgen-futures = "0.4.30"
js-sys = "0.3.49"
@@ -40,6 +40,6 @@ console_error_panic_hook = "0.1.7"
serde_json = "1.0.79"
http = "0.2.6"
anyhow = "1.0.58"
tracing = { version = "0.1.35", default-features = false, features = ["attributes"] }
tracing = { workspace = true, optional = true }
tracing-subscriber = { version = "0.3.14", default-features = false, features = ["registry", "std"] }
zeroize = "1.3.0"
zeroize = { workspace = true }

View File

@@ -30,20 +30,25 @@ pub struct OlmMachine {
impl OlmMachine {
/// Create a new memory based `OlmMachine`.
///
/// The created machine will keep the encryption keys only in
/// memory and once the objects is dropped, the keys will be lost.
/// The created machine will keep the encryption keys either in a IndexedDB
/// based store, or in a memory store and once the objects is dropped,
/// the keys will be lost.
///
/// `user_id` represents the unique ID of the user that owns this
/// machine. `device_id` represents the unique ID of the device
/// # Arguments
///
/// * `user_id` - represents the unique ID of the user that owns this
/// machine.
///
/// * `device_id` - represents the unique ID of the device
/// that owns this machine.
///
/// `store_name` and `store_passphrase` are both optional, but
/// must be both set to have an effect. If they are both set, the
/// state of the machine will persist in a database named
/// `store_name` where its content is encrypted by the passphrase
/// given by `store_passphrase`. If they are not both set, the
/// created machine will keep the encryption keys only in memory,
/// and once the object is dropped, the keys will be lost.
/// * `store_name` - The name that should be used to open the IndexedDB
/// based database. If this isn't provided, a memory-only store will be
/// used. *Note* the memory-only store will lose your E2EE keys when the
/// `OlmMachine` gets dropped.
///
/// * `store_passphrase` - The passphrase that should be used to encrypt the
/// IndexedDB based
#[wasm_bindgen(constructor)]
#[allow(clippy::new_ret_no_self)]
pub fn new(
@@ -66,6 +71,7 @@ impl OlmMachine {
#[cfg(target_arch = "wasm32")]
(Some(store_name), Some(mut store_passphrase)) => {
use std::sync::Arc;
use zeroize::Zeroize;
let store = Some(
@@ -82,9 +88,22 @@ impl OlmMachine {
store
}
(Some(_), None) => return Err(anyhow::Error::msg("The `store_name` has been set, and so, it expects a `store_passphrase`, which is not set; please provide one")),
#[cfg(target_arch = "wasm32")]
(Some(store_name), None) => {
use std::sync::Arc;
Some(
matrix_sdk_indexeddb::IndexeddbCryptoStore::open_with_name(&store_name)
.await
.map(Arc::new)?,
)
}
(None, Some(_)) => return Err(anyhow::Error::msg("The `store_passphrase` has been set, but it has an effect only if `store_name` is set, which is not; please provide one")),
(None, Some(_)) => {
return Err(anyhow::Error::msg(
"The `store_passphrase` has been set, but it has an effect only if \
`store_name` is set, which is not; please provide one",
))
}
_ => None,
};

View File

@@ -156,7 +156,7 @@ impl DecryptedRoomEvent {
/// trusted.
#[wasm_bindgen(getter, js_name = "senderDevice")]
pub fn sender_device(&self) -> Option<identifiers::DeviceId> {
Some(identifiers::DeviceId::from(self.encryption_info.as_ref()?.sender_device.clone()))
Some(self.encryption_info.as_ref()?.sender_device.as_ref()?.clone().into())
}
/// The Curve25519 key of the device that created the megolm

View File

@@ -9,7 +9,7 @@ use crate::{identifiers, js::downcast};
#[wasm_bindgen]
#[derive(Debug)]
pub struct DeviceLists {
pub(crate) inner: ruma::api::client::sync::sync_events::v3::DeviceLists,
pub(crate) inner: ruma::api::client::sync::sync_events::DeviceLists,
}
#[wasm_bindgen]
@@ -19,7 +19,7 @@ impl DeviceLists {
/// `changed` and `left` must be an array of `UserId`.
#[wasm_bindgen(constructor)]
pub fn new(changed: Option<Array>, left: Option<Array>) -> Result<DeviceLists, JsError> {
let mut inner = ruma::api::client::sync::sync_events::v3::DeviceLists::default();
let mut inner = ruma::api::client::sync::sync_events::DeviceLists::default();
inner.changed = changed
.unwrap_or_default()

View File

@@ -176,13 +176,11 @@ describe('Key Verification', () => {
outgoingVerificationRequest = JSON.parse(outgoingVerificationRequest.body);
expect(outgoingVerificationRequest.event_type).toStrictEqual('m.key.verification.request');
const toDeviceEvents = {
events: [{
sender: userId1.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId2.toString()][deviceId2.toString()],
}]
};
const toDeviceEvents = [{
sender: userId1.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId2.toString()][deviceId2.toString()],
}];
// Let's send the verification request to `m2`.
await m2.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -230,13 +228,11 @@ describe('Key Verification', () => {
outgoingVerificationRequest = JSON.parse(outgoingVerificationRequest.body);
expect(outgoingVerificationRequest.event_type).toStrictEqual('m.key.verification.ready');
const toDeviceEvents = {
events: [{
sender: userId2.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId1.toString()][deviceId1.toString()],
}],
};
const toDeviceEvents = [{
sender: userId2.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId1.toString()][deviceId1.toString()],
}];
// Let's send the verification ready to `m1`.
await m1.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -287,13 +283,11 @@ describe('Key Verification', () => {
outgoingVerificationRequest = JSON.parse(outgoingVerificationRequest.body);
expect(outgoingVerificationRequest.event_type).toStrictEqual('m.key.verification.start');
const toDeviceEvents = {
events: [{
sender: userId2.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId1.toString()][deviceId1.toString()],
}],
};
const toDeviceEvents = [{
sender: userId2.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId1.toString()][deviceId1.toString()],
}];
// Let's send the SAS start to `m1`.
await m1.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -335,13 +329,11 @@ describe('Key Verification', () => {
outgoingVerificationRequest = JSON.parse(outgoingVerificationRequest.body);
expect(outgoingVerificationRequest.event_type).toStrictEqual('m.key.verification.accept');
const toDeviceEvents = {
events: [{
sender: userId1.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId2.toString()][deviceId2.toString()],
}],
};
const toDeviceEvents = [{
sender: userId1.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId2.toString()][deviceId2.toString()],
}];
// Let's send the SAS accept to `m2`.
await m2.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -364,13 +356,11 @@ describe('Key Verification', () => {
toDeviceRequest = JSON.parse(toDeviceRequest.body);
expect(toDeviceRequest.event_type).toStrictEqual('m.key.verification.key');
const toDeviceEvents = {
events: [{
sender: userId2.toString(),
type: toDeviceRequest.event_type,
content: toDeviceRequest.messages[userId1.toString()][deviceId1.toString()],
}],
};
const toDeviceEvents = [{
sender: userId2.toString(),
type: toDeviceRequest.event_type,
content: toDeviceRequest.messages[userId1.toString()][deviceId1.toString()],
}];
// Let's send te SAS key to `m1`.
await m1.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -390,13 +380,11 @@ describe('Key Verification', () => {
toDeviceRequest = JSON.parse(toDeviceRequest.body);
expect(toDeviceRequest.event_type).toStrictEqual('m.key.verification.key');
const toDeviceEvents = {
events: [{
sender: userId1.toString(),
type: toDeviceRequest.event_type,
content: toDeviceRequest.messages[userId2.toString()][deviceId2.toString()],
}],
};
const toDeviceEvents = [{
sender: userId1.toString(),
type: toDeviceRequest.event_type,
content: toDeviceRequest.messages[userId2.toString()][deviceId2.toString()],
}];
// Let's send te SAS key to `m2`.
await m2.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -463,13 +451,11 @@ describe('Key Verification', () => {
outgoingVerificationRequest = JSON.parse(outgoingVerificationRequest.body);
expect(outgoingVerificationRequest.event_type).toStrictEqual('m.key.verification.mac');
const toDeviceEvents = {
events: [{
sender: userId1.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId2.toString()][deviceId2.toString()],
}],
};
const toDeviceEvents = [{
sender: userId1.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId2.toString()][deviceId2.toString()],
}];
// Let's send te SAS confirmation to `m2`.
await m2.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -491,13 +477,11 @@ describe('Key Verification', () => {
outgoingVerificationRequest = JSON.parse(outgoingVerificationRequest.body);
expect(outgoingVerificationRequest.event_type).toStrictEqual('m.key.verification.mac');
const toDeviceEvents = {
events: [{
sender: userId2.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId1.toString()][deviceId1.toString()],
}],
};
const toDeviceEvents = [{
sender: userId2.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId1.toString()][deviceId1.toString()],
}];
// Let's send te SAS confirmation to `m1`.
await m1.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -512,13 +496,11 @@ describe('Key Verification', () => {
outgoingVerificationRequest = JSON.parse(outgoingVerificationRequest.body);
expect(outgoingVerificationRequest.event_type).toStrictEqual('m.key.verification.done');
const toDeviceEvents = {
events: [{
sender: userId2.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId1.toString()][deviceId1.toString()],
}],
};
const toDeviceEvents = [{
sender: userId2.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId1.toString()][deviceId1.toString()],
}];
// Let's send te SAS done to `m1`.
await m1.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -538,13 +520,11 @@ describe('Key Verification', () => {
toDeviceRequest = JSON.parse(toDeviceRequest.body);
expect(toDeviceRequest.event_type).toStrictEqual('m.key.verification.done');
const toDeviceEvents = {
events: [{
sender: userId1.toString(),
type: toDeviceRequest.event_type,
content: toDeviceRequest.messages[userId2.toString()][deviceId2.toString()],
}],
};
const toDeviceEvents = [{
sender: userId1.toString(),
type: toDeviceRequest.event_type,
content: toDeviceRequest.messages[userId2.toString()][deviceId2.toString()],
}];
// Let's send te SAS key to `m2`.
await m2.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -628,13 +608,11 @@ describe('Key Verification', () => {
outgoingVerificationRequest = JSON.parse(outgoingVerificationRequest.body);
expect(outgoingVerificationRequest.event_type).toStrictEqual('m.key.verification.request');
const toDeviceEvents = {
events: [{
sender: userId1.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId2.toString()][deviceId2.toString()],
}]
};
const toDeviceEvents = [{
sender: userId1.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId2.toString()][deviceId2.toString()],
}];
// Let's send the verification request to `m2`.
await m2.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -685,13 +663,11 @@ describe('Key Verification', () => {
outgoingVerificationRequest = JSON.parse(outgoingVerificationRequest.body);
expect(outgoingVerificationRequest.event_type).toStrictEqual('m.key.verification.ready');
const toDeviceEvents = {
events: [{
sender: userId2.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId1.toString()][deviceId1.toString()],
}],
};
const toDeviceEvents = [{
sender: userId2.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId1.toString()][deviceId1.toString()],
}];
// Let's send the verification ready to `m1`.
await m1.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -848,13 +824,11 @@ describe('Key Verification', () => {
outgoingVerificationRequest = JSON.parse(outgoingVerificationRequest.body);
expect(outgoingVerificationRequest.event_type).toStrictEqual('m.key.verification.start');
const toDeviceEvents = {
events: [{
sender: userId1.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId2.toString()][deviceId2.toString()],
}]
};
const toDeviceEvents = [{
sender: userId1.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId2.toString()][deviceId2.toString()],
}];
// Let's send the verification request to `m2`.
await m2.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());
@@ -872,13 +846,11 @@ describe('Key Verification', () => {
outgoingVerificationRequest = JSON.parse(outgoingVerificationRequest.body);
expect(outgoingVerificationRequest.event_type).toStrictEqual('m.key.verification.done');
const toDeviceEvents = {
events: [{
sender: userId2.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId1.toString()][deviceId1.toString()],
}]
};
const toDeviceEvents = [{
sender: userId2.toString(),
type: outgoingVerificationRequest.event_type,
content: outgoingVerificationRequest.messages[userId1.toString()][deviceId1.toString()],
}];
// Let's send the verification request to `m2`.
await m2.receiveSyncChanges(JSON.stringify(toDeviceEvents), new DeviceLists(), new Map(), new Set());

View File

@@ -11,14 +11,14 @@ function* zip(...arrays) {
// Add a machine to another machine, i.e. be sure a machine knows
// another exists.
async function addMachineToMachine(machineToAdd, machine) {
const toDeviceEvents = JSON.stringify({});
const toDeviceEvents = JSON.stringify([]);
const changedDevices = new DeviceLists();
const oneTimeKeyCounts = new Map();
const unusedFallbackKeys = new Set();
const receiveSyncChanges = JSON.parse(await machineToAdd.receiveSyncChanges(toDeviceEvents, changedDevices, oneTimeKeyCounts, unusedFallbackKeys));
expect(receiveSyncChanges).toEqual({});
expect(receiveSyncChanges).toEqual([]);
const outgoingRequests = await machineToAdd.outgoingRequests();

View File

@@ -128,26 +128,26 @@ describe(OlmMachine.name, () => {
test('can receive sync changes', async () => {
const m = await machine();
const toDeviceEvents = JSON.stringify({});
const toDeviceEvents = JSON.stringify([]);
const changedDevices = new DeviceLists();
const oneTimeKeyCounts = new Map();
const unusedFallbackKeys = new Set();
const receiveSyncChanges = JSON.parse(await m.receiveSyncChanges(toDeviceEvents, changedDevices, oneTimeKeyCounts, unusedFallbackKeys));
expect(receiveSyncChanges).toEqual({});
expect(receiveSyncChanges).toEqual([]);
});
test('can get the outgoing requests that need to be send out', async () => {
const m = await machine();
const toDeviceEvents = JSON.stringify({});
const toDeviceEvents = JSON.stringify([]);
const changedDevices = new DeviceLists();
const oneTimeKeyCounts = new Map();
const unusedFallbackKeys = new Set();
const receiveSyncChanges = JSON.parse(await m.receiveSyncChanges(toDeviceEvents, changedDevices, oneTimeKeyCounts, unusedFallbackKeys));
expect(receiveSyncChanges).toEqual({});
expect(receiveSyncChanges).toEqual([]);
const outgoingRequests = await m.outgoingRequests();
@@ -182,7 +182,7 @@ describe(OlmMachine.name, () => {
beforeAll(async () => {
m = await machine(new UserId('@alice:example.org'), new DeviceId('DEVICEID'));
const toDeviceEvents = JSON.stringify({});
const toDeviceEvents = JSON.stringify([]);
const changedDevices = new DeviceLists();
const oneTimeKeyCounts = new Map();
const unusedFallbackKeys = new Set();

View File

@@ -26,17 +26,14 @@ tracing = ["dep:tracing-subscriber"]
matrix-sdk-crypto = { version = "0.6.0", path = "../../crates/matrix-sdk-crypto", features = ["js"] }
matrix-sdk-common = { version = "0.6.0", path = "../../crates/matrix-sdk-common", features = ["js"] }
matrix-sdk-sled = { version = "0.2.0", path = "../../crates/matrix-sdk-sled", default-features = false, features = ["crypto-store"] }
ruma = { version = "0.7.0", features = ["client-api-c", "rand", "unstable-msc2676", "unstable-msc2677"] }
ruma = { workspace = true, features = ["rand", "unstable-msc2676", "unstable-msc2677"] }
napi = { version = "2.9.1", default-features = false, features = ["napi6", "tokio_rt"] }
napi-derive = "2.9.1"
serde_json = "1.0.79"
http = "0.2.6"
zeroize = "1.3.0"
tracing-subscriber = { version = "0.3", default-features = false, features = ["tracing-log", "time", "smallvec", "fmt", "env-filter"], optional = true }
[dependencies.vodozemac]
version = "0.3.0"
features = ["js"]
vodozemac = { workspace = true, features = ["js"]}
zeroize = { workspace = true }
[build-dependencies]
napi-build = "2.0.0"

View File

@@ -36,7 +36,7 @@ impl OlmMachine {
// the factory function. We also manually implement the
// constructor to raise an error when called.
/// Create a new memory-based `OlmMachine` asynchronously.
/// Create a new `OlmMachine` asynchronously.
///
/// The persistence of the encryption keys and all the inner
/// objects are controlled by the `store_path` argument.

View File

@@ -152,7 +152,7 @@ impl DecryptedRoomEvent {
/// trusted.
#[napi(getter)]
pub fn sender_device(&self) -> Option<identifiers::DeviceId> {
Some(identifiers::DeviceId::from(self.encryption_info.as_ref()?.sender_device.clone()))
Some(self.encryption_info.as_ref()?.sender_device.as_ref()?.clone().into())
}
/// The Curve25519 key of the device that created the megolm

View File

@@ -7,7 +7,7 @@ use crate::identifiers;
/// Information on E2E device updates.
#[napi]
pub struct DeviceLists {
pub(crate) inner: ruma::api::client::sync::sync_events::v3::DeviceLists,
pub(crate) inner: ruma::api::client::sync::sync_events::DeviceLists,
}
#[napi]
@@ -18,7 +18,7 @@ impl DeviceLists {
changed: Option<Vec<&identifiers::UserId>>,
left: Option<Vec<&identifiers::UserId>>,
) -> Self {
let mut inner = ruma::api::client::sync::sync_events::v3::DeviceLists::default();
let mut inner = ruma::api::client::sync::sync_events::DeviceLists::default();
inner.changed = changed.into_iter().flatten().map(|user| user.inner.clone()).collect();
inner.left = left.into_iter().flatten().map(|user| user.inner.clone()).collect();

View File

@@ -51,26 +51,26 @@ describe(OlmMachine.name, () => {
test('can receive sync changes', async () => {
const m = await machine();
const toDeviceEvents = JSON.stringify({});
const toDeviceEvents = JSON.stringify([]);
const changedDevices = new DeviceLists();
const oneTimeKeyCounts = {};
const unusedFallbackKeys = [];
const receiveSyncChanges = JSON.parse(await m.receiveSyncChanges(toDeviceEvents, changedDevices, oneTimeKeyCounts, unusedFallbackKeys));
expect(receiveSyncChanges).toEqual({});
expect(receiveSyncChanges).toEqual([]);
});
test('can get the outgoing requests that need to be send out', async () => {
const m = await machine();
const toDeviceEvents = JSON.stringify({});
const toDeviceEvents = JSON.stringify([]);
const changedDevices = new DeviceLists();
const oneTimeKeyCounts = {};
const unusedFallbackKeys = [];
const receiveSyncChanges = JSON.parse(await m.receiveSyncChanges(toDeviceEvents, changedDevices, oneTimeKeyCounts, unusedFallbackKeys));
expect(receiveSyncChanges).toEqual({});
expect(receiveSyncChanges).toEqual([]);
const outgoingRequests = await m.outgoingRequests();
@@ -105,12 +105,12 @@ describe(OlmMachine.name, () => {
beforeAll(async () => {
m = await machine(new UserId('@alice:example.org'), new DeviceId('DEVICEID'));
const toDeviceEvents = JSON.stringify({});
const toDeviceEvents = JSON.stringify([]);
const changedDevices = new DeviceLists();
const oneTimeKeyCounts = {};
const unusedFallbackKeys = [];
const receiveSyncChanges = await m.receiveSyncChanges(toDeviceEvents, changedDevices, oneTimeKeyCounts, unusedFallbackKeys);
await m.receiveSyncChanges(toDeviceEvents, changedDevices, oneTimeKeyCounts, unusedFallbackKeys);
outgoingRequests = await m.outgoingRequests();
expect(outgoingRequests).toHaveLength(2);

View File

@@ -12,19 +12,23 @@ repository = "https://github.com/matrix-org/matrix-rust-sdk"
[lib]
crate-type = ["cdylib", "staticlib"]
[features]
default = ["experimental-room-preview"] # the whole crate is still very experimental, so this is fine
experimental-room-preview = ["matrix-sdk/experimental-room-preview"]
[build-dependencies]
uniffi_build = { git = "https://github.com/mozilla/uniffi-rs", rev = "0eee77f67b716c4896494606e5931d249871b23a", features = ["builtin-bindgen"] }
uniffi_build = { workspace = true, features = ["builtin-bindgen"] }
[dependencies]
anyhow = "1.0.51"
extension-trait = "1.0.1"
futures-core = "0.3.17"
futures-signals = { version = "0.3.28" }
futures-signals = { version = "0.3.30", default-features = false }
futures-util = { version = "0.3.17", default-features = false }
# FIXME: we currently can't feature flag anything in the api.udl, therefore we must enforce sliding-sync being exposed here..
# see https://github.com/matrix-org/matrix-rust-sdk/issues/1014
#matrix-sdk = { path = "../../crates/matrix-sdk", features = ["anyhow", "markdown", "sliding-sync", "socks"], version = "0.6.0" }
matrix-sdk = { path = "../../crates/matrix-sdk", default-features = false, features = ["anyhow", "e2e-encryption", "sled", "markdown", "sliding-sync", "socks", "rustls-tls"], version = "0.6.0" }
#matrix-sdk = { path = "../../crates/matrix-sdk", features = ["anyhow", "experimental-timeline", "markdown", "sliding-sync", "socks"], version = "0.6.0" }
matrix-sdk = { path = "../../crates/matrix-sdk", default-features = false, features = ["anyhow", "sled", "e2e-encryption", "experimental-timeline", "markdown", "sliding-sync", "socks", "rustls-tls"], version = "0.6.0" }
once_cell = "1.10.0"
sanitize-filename-reader-friendly = "2.2.1"
serde = { version = "1", features = ["derive"] }
@@ -32,8 +36,7 @@ serde_json = { version = "1" }
thiserror = "1.0.30"
tokio = { version = "1", features = ["rt-multi-thread", "macros"] }
tokio-stream = "0.1.8"
tracing = "0.1.32"
tracing = { workspace = true }
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# keep in sync with uniffi dependency in matrix-sdk-crypto-ffi, and uniffi_bindgen in ffi CI job
uniffi = { git = "https://github.com/mozilla/uniffi-rs", rev = "0eee77f67b716c4896494606e5931d249871b23a" }
uniffi_macros = { git = "https://github.com/mozilla/uniffi-rs", rev = "0eee77f67b716c4896494606e5931d249871b23a" }
uniffi = { workspace = true }
uniffi_macros = { workspace = true }

View File

@@ -175,6 +175,9 @@ interface SlidingSyncBuilder {
[Self=ByArc]
SlidingSyncBuilder add_view(SlidingSyncView view);
[Self=ByArc]
SlidingSyncBuilder with_common_extensions();
[Throws=ClientError, Self=ByArc]
SlidingSync build();
};
@@ -225,10 +228,6 @@ interface Client {
void logout();
};
callback interface RoomDelegate {
void did_receive_message(AnyMessage message);
};
enum Membership {
"Invited",
"Joined",
@@ -247,6 +246,14 @@ interface Room {
[Throws=ClientError]
string? member_display_name(string user_id);
void add_timeline_listener(TimelineListener listener);
// Loads older messages into the timeline.
//
// Raises an exception if there are no timeline listeners.
[Throws=ClientError]
PaginationOutcome paginate_backwards(u16 limit);
[Throws=ClientError]
void send(RoomMessageEventContent msg, string? txn_id);
@@ -257,50 +264,140 @@ interface Room {
void redact(string event_id, string? reason, string? txn_id);
};
interface BackwardsStream {
sequence<AnyMessage> paginate_backwards(u64 count);
callback interface TimelineListener {
void on_update(TimelineDiff update);
};
interface TimelineDiff {
TimelineChange change();
[Self=ByArc]
sequence<TimelineItem>? replace();
[Self=ByArc]
InsertAtData? insert_at();
[Self=ByArc]
UpdateAtData? update_at();
u32? remove_at();
MoveData? move();
[Self=ByArc]
TimelineItem? push();
};
enum TimelineChange {
"Replace",
"InsertAt",
"UpdateAt",
"RemoveAt",
"Move",
"Push",
"Pop",
"Clear",
};
dictionary InsertAtData {
u32 index;
TimelineItem item;
};
dictionary UpdateAtData {
u32 index;
TimelineItem item;
};
dictionary MoveData {
u32 old_index;
u32 new_index;
};
interface TimelineItem {};
interface EventTimelineItem {
TimelineKey key();
sequence<Reaction> reactions();
};
[Enum]
interface TimelineKey {
TransactionId(string txn_id);
EventId(string event_id);
};
// Other methods defined via proc-macro
interface Message {
MessageType? msgtype();
};
[Enum]
interface MessageType {
Emote(EmoteMessageContent content);
Image(ImageMessageContent content);
Notice(NoticeMessageContent content);
Text(TextMessageContent content);
};
dictionary EmoteMessageContent {
string body;
FormattedBody? formatted;
};
dictionary ImageMessageContent {
string body;
MediaSource source;
ImageInfo? info;
};
dictionary ImageInfo {
u64? height;
u64? width;
string? mimetype;
u64? size;
ThumbnailInfo? thumbnail_info;
MediaSource? thumbnail_source;
string? blurhash;
};
dictionary ThumbnailInfo {
u64? height;
u64? width;
string? mimetype;
u64? size;
};
dictionary NoticeMessageContent {
string body;
FormattedBody? formatted;
};
dictionary TextMessageContent {
string body;
FormattedBody? formatted;
};
dictionary FormattedBody {
MessageFormat format;
string body;
};
enum MessageFormat {
"Html",
"Unknown",
};
dictionary Reaction {
string key;
u64 count;
// senders to come
};
interface VirtualTimelineItem {};
dictionary PaginationOutcome {
// Whether there's more messages to be paginated.
boolean more_messages;
};
interface RoomMessageEventContent {};
interface AnyMessage {
TextMessage? text_message();
ImageMessage? image_message();
NoticeMessage? notice_message();
EmoteMessage? emote_message();
};
interface BaseMessage {
string id();
string body();
string sender();
u64 origin_server_ts();
string? transaction_id();
};
interface TextMessage {
BaseMessage base_message();
string? html_body();
};
interface ImageMessage {
BaseMessage base_message();
MediaSource source();
u64? width();
u64? height();
string? blurhash();
};
interface NoticeMessage {
BaseMessage base_message();
string? html_body();
};
interface EmoteMessage {
BaseMessage base_message();
string? html_body();
};
interface MediaSource {
string url();
};

View File

@@ -1,42 +0,0 @@
use core::pin::Pin;
use std::sync::Arc;
use futures_core::Stream;
use matrix_sdk::{deserialized_responses::SyncTimelineEvent, locks::Mutex, Result};
use tokio_stream::StreamExt;
use tracing::error;
use super::{
messages::{sync_event_to_message, AnyMessage},
RUNTIME,
};
type MsgStream = Pin<Box<dyn Stream<Item = Result<SyncTimelineEvent>> + Send>>;
pub struct BackwardsStream {
stream: Arc<Mutex<MsgStream>>,
}
impl BackwardsStream {
pub fn new(stream: MsgStream) -> Self {
BackwardsStream { stream: Arc::new(Mutex::new(Box::pin(stream))) }
}
pub fn paginate_backwards(&self, count: u64) -> Vec<Arc<AnyMessage>> {
let stream = self.stream.clone();
RUNTIME.block_on(async move {
let mut stream = stream.lock().await;
(&mut *stream)
.take(count as usize)
.filter_map(|r| match r {
Ok(ev) => sync_event_to_message(ev),
Err(e) => {
error!("Pagniation error: {e}");
None
}
})
.collect()
.await
})
}
}

View File

@@ -1,26 +1,23 @@
use std::sync::{Arc, RwLock};
use anyhow::anyhow;
use anyhow::{anyhow, Context};
use matrix_sdk::{
config::SyncSettings,
media::{MediaFormat, MediaRequest, MediaThumbnailSize},
ruma::{
api::{
client::{
account::whoami,
error::ErrorKind,
filter::{FilterDefinition, LazyLoadOptions, RoomEventFilter, RoomFilter},
media::get_content_thumbnail::v3::Method,
session::get_login_types,
sync::sync_events::v3::Filter,
},
error::{FromHttpResponseError, ServerError},
api::client::{
account::whoami,
error::ErrorKind,
filter::{FilterDefinition, LazyLoadOptions, RoomEventFilter, RoomFilter},
media::get_content_thumbnail::v3::Method,
session::get_login_types,
sync::sync_events::v3::Filter,
},
events::room::MediaSource,
serde::Raw,
TransactionId, UInt,
},
Client as MatrixClient, Error, HttpError, LoopCtrl, RumaApiError, Session,
Client as MatrixClient, Error, LoopCtrl, RumaApiError, Session,
};
use super::{
@@ -132,7 +129,7 @@ impl Client {
pub fn restore_token(&self) -> anyhow::Result<String> {
RUNTIME.block_on(async move {
let session = self.client.session().expect("Missing session");
let session = self.client.session().context("Missing session")?;
let homeurl = self.client.homeserver().await.into();
Ok(serde_json::to_string(&RestoreToken {
session,
@@ -144,14 +141,14 @@ impl Client {
}
pub fn user_id(&self) -> anyhow::Result<String> {
let user_id = self.client.user_id().expect("No User ID found");
let user_id = self.client.user_id().context("No User ID found")?;
Ok(user_id.to_string())
}
pub fn display_name(&self) -> anyhow::Result<String> {
let l = self.client.clone();
RUNTIME.block_on(async move {
let display_name = l.account().get_display_name().await?.expect("No User ID found");
let display_name = l.account().get_display_name().await?.context("No User ID found")?;
Ok(display_name)
})
}
@@ -159,13 +156,13 @@ impl Client {
pub fn avatar_url(&self) -> anyhow::Result<String> {
let l = self.client.clone();
RUNTIME.block_on(async move {
let avatar_url = l.account().get_avatar_url().await?.expect("No User ID found");
let avatar_url = l.account().get_avatar_url().await?.context("No User ID found")?;
Ok(avatar_url.to_string())
})
}
pub fn device_id(&self) -> anyhow::Result<String> {
let device_id = self.client.device_id().expect("No Device ID found");
let device_id = self.client.device_id().context("No Device ID found")?;
Ok(device_id.to_string())
}
@@ -238,13 +235,13 @@ impl Client {
return Ok(Arc::new(session_verification_controller.clone()));
}
let user_id = self.client.user_id().expect("Failed retrieving current user_id");
let user_id = self.client.user_id().context("Failed retrieving current user_id")?;
let user_identity = self
.client
.encryption()
.get_user_identity(user_id)
.await?
.expect("Failed retrieving user identity");
.context("Failed retrieving user identity")?;
let session_verification_controller = SessionVerificationController::new(user_identity);
@@ -268,10 +265,7 @@ impl Client {
/// Process a sync error and return loop control accordingly
fn process_sync_error(&self, sync_error: Error) -> LoopCtrl {
let mut control = LoopCtrl::Continue;
if let Error::Http(HttpError::Api(FromHttpResponseError::Server(ServerError::Known(
RumaApiError::ClientApi(error),
)))) = sync_error
{
if let Some(RumaApiError::ClientApi(error)) = sync_error.as_ruma_api_error() {
if let ErrorKind::UnknownToken { soft_logout } = error.kind {
self.state.write().unwrap().is_soft_logout = soft_logout;
if let Some(delegate) = &*self.delegate.read().unwrap() {
@@ -363,7 +357,7 @@ impl Client {
&*session_verification_controller.read().await
{
session_verification_controller
.process_to_device_messages(sync_response.to_device)
.process_to_device_messages(sync_response.to_device_events)
.await;
}

View File

@@ -74,7 +74,7 @@ impl ClientBuilder {
let data_path = PathBuf::from(base_path).join(sanitize(username));
fs::create_dir_all(&data_path)?;
inner_builder = RUNTIME.block_on(inner_builder.sled_store(data_path, None))?;
inner_builder = inner_builder.sled_store(data_path, None);
}
// Determine server either from URL, server name or user ID.

View File

@@ -2,15 +2,32 @@
#![allow(unused_qualifications)]
macro_rules! unwrap_or_clone_arc_into_variant {
(
$arc:ident $(, .$field:tt)?, $pat:pat => $body:expr
) => {
#[allow(unused_variables)]
match &(*$arc)$(.$field)? {
$pat => {
#[warn(unused_variables)]
match crate::helpers::unwrap_or_clone_arc($arc)$(.$field)? {
$pat => Some($body),
_ => unreachable!(),
}
},
_ => None,
}
};
}
pub mod authentication_service;
pub mod backward_stream;
pub mod client;
pub mod client_builder;
mod helpers;
pub mod messages;
pub mod room;
pub mod session_verification;
pub mod sliding_sync;
pub mod timeline;
mod uniffi_api;
use std::io;
@@ -27,11 +44,14 @@ pub use uniffi_api::*;
pub static RUNTIME: Lazy<Runtime> =
Lazy::new(|| Runtime::new().expect("Can't start Tokio runtime"));
pub use matrix_sdk::ruma::{api::client::account::register, UserId};
pub use matrix_sdk::{
room::timeline::PaginationOutcome,
ruma::{api::client::account::register, UserId},
};
pub use self::{
authentication_service::*, backward_stream::*, client::*, messages::*, room::*,
session_verification::*, sliding_sync::*,
authentication_service::*, client::*, room::*, session_verification::*, sliding_sync::*,
timeline::*,
};
#[derive(Default, Debug)]
@@ -79,12 +99,14 @@ mod uniffi_types {
authentication_service::{AuthenticationService, HomeserverLoginDetails},
client::Client,
client_builder::ClientBuilder,
messages::AnyMessage,
room::Room,
session_verification::SessionVerificationEmoji,
sliding_sync::{
SlidingSync, SlidingSyncBuilder, SlidingSyncRoom, SlidingSyncView, StoppableSpawn,
UnreadNotificationsCount,
},
timeline::{
EventTimelineItem, Message, TimelineItem, TimelineItemContent, VirtualTimelineItem,
},
};
}

View File

@@ -1,254 +0,0 @@
use std::sync::Arc;
use extension_trait::extension_trait;
pub use matrix_sdk::ruma::events::room::{message::RoomMessageEventContent, MediaSource};
use matrix_sdk::{
deserialized_responses::SyncTimelineEvent,
ruma::events::{
room::{
message::{ImageMessageEventContent, MessageFormat, MessageType},
ImageInfo,
},
AnySyncMessageLikeEvent, AnySyncTimelineEvent, SyncMessageLikeEvent,
},
};
#[derive(Clone)]
pub struct BaseMessage {
id: String,
body: String,
sender: String,
origin_server_ts: u64,
transaction_id: Option<String>,
}
impl BaseMessage {
pub fn id(&self) -> String {
self.id.clone()
}
pub fn body(&self) -> String {
self.body.clone()
}
pub fn sender(&self) -> String {
self.sender.clone()
}
pub fn origin_server_ts(&self) -> u64 {
self.origin_server_ts
}
pub fn transaction_id(&self) -> Option<String> {
self.transaction_id.clone()
}
}
pub struct TextMessage {
base_message: Arc<BaseMessage>,
html_body: Option<String>,
}
impl TextMessage {
pub fn base_message(&self) -> Arc<BaseMessage> {
self.base_message.clone()
}
pub fn html_body(&self) -> Option<String> {
self.html_body.clone()
}
}
pub struct ImageMessage {
base_message: Arc<BaseMessage>,
source: Arc<MediaSource>,
info: Option<Box<ImageInfo>>,
}
impl ImageMessage {
pub fn base_message(&self) -> Arc<BaseMessage> {
self.base_message.clone()
}
pub fn source(&self) -> Arc<MediaSource> {
self.source.clone()
}
pub fn width(&self) -> Option<u64> {
self.info.clone()?.width?.try_into().ok()
}
pub fn height(&self) -> Option<u64> {
self.info.clone()?.height?.try_into().ok()
}
pub fn blurhash(&self) -> Option<String> {
self.info.clone()?.blurhash
}
}
pub struct NoticeMessage {
base_message: Arc<BaseMessage>,
html_body: Option<String>,
}
impl NoticeMessage {
pub fn base_message(&self) -> Arc<BaseMessage> {
self.base_message.clone()
}
pub fn html_body(&self) -> Option<String> {
self.html_body.clone()
}
}
pub struct EmoteMessage {
base_message: Arc<BaseMessage>,
html_body: Option<String>,
}
impl EmoteMessage {
pub fn base_message(&self) -> Arc<BaseMessage> {
self.base_message.clone()
}
pub fn html_body(&self) -> Option<String> {
self.html_body.clone()
}
}
pub struct AnyMessage {
text: Option<Arc<TextMessage>>,
image: Option<Arc<ImageMessage>>,
notice: Option<Arc<NoticeMessage>>,
emote: Option<Arc<EmoteMessage>>,
}
impl AnyMessage {
pub fn text_message(&self) -> Option<Arc<TextMessage>> {
self.text.clone()
}
pub fn image_message(&self) -> Option<Arc<ImageMessage>> {
self.image.clone()
}
pub fn notice_message(&self) -> Option<Arc<NoticeMessage>> {
self.notice.clone()
}
pub fn emote_message(&self) -> Option<Arc<EmoteMessage>> {
self.emote.clone()
}
}
pub fn sync_event_to_message(sync_event: SyncTimelineEvent) -> Option<Arc<AnyMessage>> {
match sync_event.event.deserialize() {
Ok(AnySyncTimelineEvent::MessageLike(AnySyncMessageLikeEvent::RoomMessage(
SyncMessageLikeEvent::Original(m),
))) => {
let base_message = Arc::new(BaseMessage {
id: m.event_id.to_string(),
body: m.content.body().to_owned(),
sender: m.sender.to_string(),
origin_server_ts: m.origin_server_ts.as_secs().into(),
transaction_id: m.unsigned.transaction_id.map(|txn_id| txn_id.to_string()),
});
match m.content.msgtype {
MessageType::Image(ImageMessageEventContent { source, info, .. }) => {
let any_message = AnyMessage {
text: None,
image: Some(Arc::new(ImageMessage {
base_message,
source: Arc::new(source),
info,
})),
notice: None,
emote: None,
};
Some(Arc::new(any_message))
}
MessageType::Text(content) => {
let mut html_body: Option<String> = None;
if let Some(formatted_body) = content.formatted {
if formatted_body.format == MessageFormat::Html {
html_body = Some(formatted_body.body);
}
}
let any_message = AnyMessage {
text: Some(Arc::new(TextMessage { base_message, html_body })),
image: None,
notice: None,
emote: None,
};
Some(Arc::new(any_message))
}
MessageType::Notice(content) => {
let mut html_body: Option<String> = None;
if let Some(formatted_body) = content.formatted {
if formatted_body.format == MessageFormat::Html {
html_body = Some(formatted_body.body);
}
}
let any_message = AnyMessage {
text: None,
image: None,
notice: Some(Arc::new(NoticeMessage { base_message, html_body })),
emote: None,
};
Some(Arc::new(any_message))
}
MessageType::Emote(content) => {
let mut html_body: Option<String> = None;
if let Some(formatted_body) = content.formatted {
if formatted_body.format == MessageFormat::Html {
html_body = Some(formatted_body.body);
}
}
let any_message = AnyMessage {
text: None,
image: None,
notice: None,
emote: Some(Arc::new(EmoteMessage { base_message, html_body })),
};
Some(Arc::new(any_message))
}
_ => {
let any_message = AnyMessage {
text: Some(Arc::new(TextMessage { base_message, html_body: None })),
image: None,
notice: None,
emote: None,
};
Some(Arc::new(any_message))
}
}
}
_ => None,
}
}
#[uniffi::export]
pub fn media_source_from_url(url: String) -> Arc<MediaSource> {
Arc::new(MediaSource::Plain(url.into()))
}
#[uniffi::export]
pub fn message_event_content_from_markdown(md: String) -> Arc<RoomMessageEventContent> {
Arc::new(RoomMessageEventContent::text_markdown(md))
}
#[extension_trait]
pub impl MediaSourceExt for MediaSource {
fn url(&self) -> String {
match self {
MediaSource::Plain(url) => url.to_string(),
MediaSource::Encrypted(file) => file.url.to_string(),
}
}
}

View File

@@ -1,19 +1,24 @@
use std::{convert::TryFrom, sync::Arc};
use std::{
convert::TryFrom,
sync::{Arc, RwLock},
};
use anyhow::{bail, Context, Result};
use futures_signals::signal_vec::SignalVecExt;
use matrix_sdk::{
room::Room as MatrixRoom,
room::{
timeline::{PaginationOutcome, Timeline},
Room as SdkRoom,
},
ruma::{
events::room::message::{RoomMessageEvent, RoomMessageEventContent},
EventId, UserId,
},
};
use tracing::error;
use super::{messages::AnyMessage, RUNTIME};
pub trait RoomDelegate: Sync + Send {
fn did_receive_message(&self, messages: Arc<AnyMessage>);
}
use super::RUNTIME;
use crate::{TimelineDiff, TimelineListener};
pub enum Membership {
Invited,
@@ -22,7 +27,8 @@ pub enum Membership {
}
pub struct Room {
room: MatrixRoom,
room: SdkRoom,
timeline: RwLock<Option<Arc<Timeline>>>,
}
#[uniffi::export]
@@ -62,11 +68,19 @@ impl Room {
pub fn is_tombstoned(&self) -> bool {
self.room.is_tombstoned()
}
/// Removes the timeline.
///
/// Timeline items cached in memory as well as timeline listeners are
/// dropped.
pub fn remove_timeline(&self) {
*self.timeline.write().unwrap() = None;
}
}
impl Room {
pub fn new(room: MatrixRoom) -> Self {
Room { room }
pub fn new(room: SdkRoom) -> Self {
Room { room, timeline: RwLock::default() }
}
pub fn display_name(&self) -> Result<String> {
@@ -78,8 +92,8 @@ impl Room {
let room = self.room.clone();
let user_id = user_id;
RUNTIME.block_on(async move {
let user_id = <&UserId>::try_from(&*user_id).expect("Invalid user id.");
let member = room.get_member(user_id).await?.expect("No user found");
let user_id = <&UserId>::try_from(&*user_id).context("Invalid user id.")?;
let member = room.get_member(user_id).await?.context("No user found")?;
let avatar_url_string = member.avatar_url().map(|m| m.to_string());
Ok(avatar_url_string)
})
@@ -89,8 +103,8 @@ impl Room {
let room = self.room.clone();
let user_id = user_id;
RUNTIME.block_on(async move {
let user_id = <&UserId>::try_from(&*user_id).expect("Invalid user id.");
let member = room.get_member(user_id).await?.expect("No user found");
let user_id = <&UserId>::try_from(&*user_id).context("Invalid user id.")?;
let member = room.get_member(user_id).await?.context("No user found")?;
let avatar_url_string = member.display_name().map(|m| m.to_owned());
Ok(avatar_url_string)
})
@@ -98,20 +112,50 @@ impl Room {
pub fn membership(&self) -> Membership {
match &self.room {
MatrixRoom::Invited(_) => Membership::Invited,
MatrixRoom::Joined(_) => Membership::Joined,
MatrixRoom::Left(_) => Membership::Left,
SdkRoom::Invited(_) => Membership::Invited,
SdkRoom::Joined(_) => Membership::Joined,
SdkRoom::Left(_) => Membership::Left,
}
}
pub fn add_timeline_listener(&self, listener: Box<dyn TimelineListener>) {
let timeline_signal = self
.timeline
.write()
.unwrap()
.get_or_insert_with(|| Arc::new(self.room.timeline()))
.signal();
let listener: Arc<dyn TimelineListener> = listener.into();
RUNTIME.spawn(timeline_signal.for_each(move |diff| {
let listener = listener.clone();
let fut = RUNTIME
.spawn_blocking(move || listener.on_update(Arc::new(TimelineDiff::new(diff))));
async move {
if let Err(e) = fut.await {
error!("Timeline listener error: {e}");
}
}
}));
}
pub fn paginate_backwards(&self, limit: u16) -> Result<PaginationOutcome> {
if let Some(timeline) = &*self.timeline.read().unwrap() {
RUNTIME.block_on(async move { Ok(timeline.paginate_backwards(limit.into()).await?) })
} else {
bail!("No timeline listeners registered, can't paginate");
}
}
pub fn send(&self, msg: Arc<RoomMessageEventContent>, txn_id: Option<String>) -> Result<()> {
let room = match &self.room {
MatrixRoom::Joined(j) => j.clone(),
_ => bail!("Can't send to a room that isn't in joined state"),
let timeline = match &*self.timeline.read().unwrap() {
Some(t) => Arc::clone(t),
None => bail!("Timeline not set up, can't send message"),
};
RUNTIME.block_on(async move {
room.send((*msg).to_owned(), txn_id.as_deref().map(Into::into)).await?;
timeline.send((*msg).to_owned().into(), txn_id.as_deref().map(Into::into)).await?;
Ok(())
})
}
@@ -123,10 +167,15 @@ impl Room {
txn_id: Option<String>,
) -> Result<()> {
let room = match &self.room {
MatrixRoom::Joined(j) => j.clone(),
SdkRoom::Joined(j) => j.clone(),
_ => bail!("Can't send to a room that isn't in joined state"),
};
let timeline = match &*self.timeline.read().unwrap() {
Some(t) => Arc::clone(t),
None => bail!("Timeline not set up, can't send message"),
};
let event_id: &EventId =
in_reply_to_event_id.as_str().try_into().context("Failed to create EventId.")?;
@@ -144,7 +193,7 @@ impl Room {
let reply_content =
RoomMessageEventContent::text_markdown(msg).make_reply_to(original_message);
room.send(reply_content, txn_id.as_deref().map(Into::into)).await?;
timeline.send(reply_content.into(), txn_id.as_deref().map(Into::into)).await?;
Ok(())
})
@@ -167,7 +216,7 @@ impl Room {
txn_id: Option<String>,
) -> Result<()> {
let room = match &self.room {
MatrixRoom::Joined(j) => j.clone(),
SdkRoom::Joined(j) => j.clone(),
_ => bail!("Can't redact in a room that isn't in joined state"),
};
@@ -180,8 +229,9 @@ impl Room {
}
impl std::ops::Deref for Room {
type Target = MatrixRoom;
fn deref(&self) -> &MatrixRoom {
type Target = SdkRoom;
fn deref(&self) -> &SdkRoom {
&self.room
}
}

View File

@@ -6,8 +6,8 @@ use matrix_sdk::{
verification::{SasVerification, VerificationRequest},
},
ruma::{
api::client::sync::sync_events::v3::ToDevice,
events::{key::verification::VerificationMethod, AnyToDeviceEvent},
serde::Raw,
},
};
@@ -106,10 +106,10 @@ impl SessionVerificationController {
})
}
pub async fn process_to_device_messages(&self, to_device: ToDevice) {
pub async fn process_to_device_messages(&self, to_device_events: Vec<Raw<AnyToDeviceEvent>>) {
let sas_verification = self.sas_verification.clone();
for event in to_device.events.into_iter().filter_map(|e| e.deserialize().ok()) {
for event in to_device_events.into_iter().filter_map(|e| e.deserialize().ok()) {
match event {
AnyToDeviceEvent::KeyVerificationReady(event) => {
if !self.is_transaction_id_valid(event.content.transaction_id.to_string()) {

View File

@@ -5,6 +5,10 @@ use futures_signals::{
signal_vec::{SignalVecExt, VecDiff},
};
use futures_util::{pin_mut, StreamExt};
#[cfg(feature = "experimental-room-preview")]
use matrix_sdk::ruma::events::{
room::message::SyncRoomMessageEvent, AnySyncMessageLikeEvent, AnySyncTimelineEvent,
};
use matrix_sdk::ruma::{
api::client::sync::sync_events::{
v4::RoomSubscription as RumaRoomSubscription,
@@ -19,7 +23,9 @@ pub use matrix_sdk::{
use tokio::task::JoinHandle;
use super::{Client, Room, RUNTIME};
use crate::{helpers::unwrap_or_clone_arc, messages::AnyMessage};
use crate::helpers::unwrap_or_clone_arc;
#[cfg(feature = "experimental-room-preview")]
use crate::EventTimelineItem;
pub struct StoppableSpawn {
handle: Arc<RwLock<Option<JoinHandle<()>>>>,
@@ -115,22 +121,31 @@ impl SlidingSyncRoom {
Arc::new(self.inner.unread_notifications.clone().into())
}
pub fn full_room(&self) -> Option<Arc<Room>> {
self.client.get_room(self.inner.room_id()).map(|room| Arc::new(Room::new(room)))
}
}
#[cfg(feature = "experimental-room-preview")]
#[uniffi::export]
impl SlidingSyncRoom {
#[allow(clippy::significant_drop_in_scrutinee)]
pub fn latest_room_message(&self) -> Option<Arc<AnyMessage>> {
pub fn latest_room_message(&self) -> Option<Arc<EventTimelineItem>> {
let messages = self.inner.timeline();
// room is having the latest events at the end,
let lock = messages.lock_ref();
for m in lock.iter().rev() {
if let Some(e) = crate::messages::sync_event_to_message(m.clone().into()) {
return Some(e);
for ev in lock.iter().rev() {
if let Ok(AnySyncTimelineEvent::MessageLike(AnySyncMessageLikeEvent::RoomMessage(
SyncRoomMessageEvent::Original(o),
))) = ev.event.deserialize()
{
let inner =
matrix_sdk::room::timeline::EventTimelineItem::_new(o, ev.event.clone());
return Some(Arc::new(EventTimelineItem(inner)));
}
}
None
}
pub fn full_room(&self) -> Option<Arc<Room>> {
self.client.get_room(self.inner.room_id()).map(|room| Arc::new(Room::new(room)))
}
}
pub struct UpdateSummary {
@@ -570,6 +585,12 @@ impl SlidingSyncBuilder {
Arc::new(builder)
}
pub fn with_common_extensions(self: Arc<Self>) -> Arc<Self> {
let mut builder = unwrap_or_clone_arc(self);
builder.inner = builder.inner.with_common_extensions();
Arc::new(builder)
}
pub fn build(self: Arc<Self>) -> anyhow::Result<Arc<SlidingSync>> {
let builder = unwrap_or_clone_arc(self);
Ok(Arc::new(SlidingSync::new(builder.inner.build()?, builder.client)))

View File

@@ -0,0 +1,405 @@
use std::sync::Arc;
use extension_trait::extension_trait;
use futures_signals::signal_vec::VecDiff;
pub use matrix_sdk::ruma::events::room::{message::RoomMessageEventContent, MediaSource};
#[uniffi::export]
pub fn media_source_from_url(url: String) -> Arc<MediaSource> {
Arc::new(MediaSource::Plain(url.into()))
}
#[uniffi::export]
pub fn message_event_content_from_markdown(md: String) -> Arc<RoomMessageEventContent> {
Arc::new(RoomMessageEventContent::text_markdown(md))
}
pub trait TimelineListener: Sync + Send {
fn on_update(&self, diff: Arc<TimelineDiff>);
}
#[repr(transparent)]
#[derive(Clone)]
pub struct TimelineDiff(VecDiff<Arc<TimelineItem>>);
impl TimelineDiff {
pub(crate) fn new(inner: VecDiff<Arc<matrix_sdk::room::timeline::TimelineItem>>) -> Self {
TimelineDiff(match inner {
// Note: It's _probably_ valid to only transmute here too but not
// as clear, and less important because this only happens
// once when constructing the timeline.
VecDiff::Replace { values } => VecDiff::Replace {
values: values.into_iter().map(TimelineItem::from_arc).collect(),
},
VecDiff::InsertAt { index, value } => {
VecDiff::InsertAt { index, value: TimelineItem::from_arc(value) }
}
VecDiff::UpdateAt { index, value } => {
VecDiff::UpdateAt { index, value: TimelineItem::from_arc(value) }
}
VecDiff::RemoveAt { index } => VecDiff::RemoveAt { index },
VecDiff::Move { old_index, new_index } => VecDiff::Move { old_index, new_index },
VecDiff::Push { value } => VecDiff::Push { value: TimelineItem::from_arc(value) },
VecDiff::Pop {} => VecDiff::Pop {},
VecDiff::Clear {} => VecDiff::Clear {},
})
}
pub fn change(&self) -> TimelineChange {
match &self.0 {
VecDiff::Replace { .. } => TimelineChange::Replace,
VecDiff::InsertAt { .. } => TimelineChange::InsertAt,
VecDiff::UpdateAt { .. } => TimelineChange::UpdateAt,
VecDiff::RemoveAt { .. } => TimelineChange::RemoveAt,
VecDiff::Move { .. } => TimelineChange::Move,
VecDiff::Push { .. } => TimelineChange::Push,
VecDiff::Pop {} => TimelineChange::Pop,
VecDiff::Clear {} => TimelineChange::Clear,
}
}
pub fn replace(self: Arc<Self>) -> Option<Vec<Arc<TimelineItem>>> {
unwrap_or_clone_arc_into_variant!(self, .0, VecDiff::Replace { values } => values)
}
pub fn insert_at(self: Arc<Self>) -> Option<InsertAtData> {
unwrap_or_clone_arc_into_variant!(self, .0, VecDiff::InsertAt { index, value } => {
InsertAtData { index: index.try_into().unwrap(), item: value }
})
}
pub fn update_at(self: Arc<Self>) -> Option<UpdateAtData> {
unwrap_or_clone_arc_into_variant!(self, .0, VecDiff::UpdateAt { index, value } => {
UpdateAtData { index: index.try_into().unwrap(), item: value }
})
}
pub fn remove_at(&self) -> Option<u32> {
match &self.0 {
VecDiff::RemoveAt { index } => Some((*index).try_into().unwrap()),
_ => None,
}
}
pub fn r#move(&self) -> Option<MoveData> {
match &self.0 {
VecDiff::Move { old_index, new_index } => Some(MoveData {
old_index: (*old_index).try_into().unwrap(),
new_index: (*new_index).try_into().unwrap(),
}),
_ => None,
}
}
pub fn push(self: Arc<Self>) -> Option<Arc<TimelineItem>> {
unwrap_or_clone_arc_into_variant!(self, .0, VecDiff::Push { value } => value)
}
}
pub struct InsertAtData {
pub index: u32,
pub item: Arc<TimelineItem>,
}
pub struct UpdateAtData {
pub index: u32,
pub item: Arc<TimelineItem>,
}
pub struct MoveData {
pub old_index: u32,
pub new_index: u32,
}
#[derive(Clone, Copy)]
pub enum TimelineChange {
Replace,
InsertAt,
UpdateAt,
RemoveAt,
Move,
Push,
Pop,
Clear,
}
#[repr(transparent)]
#[derive(Clone)]
pub struct TimelineItem(matrix_sdk::room::timeline::TimelineItem);
impl TimelineItem {
fn from_arc(arc: Arc<matrix_sdk::room::timeline::TimelineItem>) -> Arc<Self> {
// SAFETY: This is valid because Self is a repr(transparent) wrapper
// around the other Timeline type.
unsafe { Arc::from_raw(Arc::into_raw(arc) as _) }
}
}
#[uniffi::export]
impl TimelineItem {
pub fn as_event(self: Arc<Self>) -> Option<Arc<EventTimelineItem>> {
use matrix_sdk::room::timeline::TimelineItem as Item;
unwrap_or_clone_arc_into_variant!(self, .0, Item::Event(evt) => {
Arc::new(EventTimelineItem(evt))
})
}
pub fn as_virtual(self: Arc<Self>) -> Option<Arc<VirtualTimelineItem>> {
use matrix_sdk::room::timeline::TimelineItem as Item;
unwrap_or_clone_arc_into_variant!(self, .0, Item::Virtual(vt) => {
Arc::new(VirtualTimelineItem(vt))
})
}
}
pub struct EventTimelineItem(pub(crate) matrix_sdk::room::timeline::EventTimelineItem);
impl EventTimelineItem {
pub fn key(&self) -> TimelineKey {
self.0.key().into()
}
pub fn reactions(&self) -> Vec<Reaction> {
self.0
.reactions()
.iter()
.map(|(k, v)| Reaction { key: k.to_owned(), count: v.count.into() })
.collect()
}
}
#[uniffi::export]
impl EventTimelineItem {
pub fn event_id(&self) -> Option<String> {
self.0.event_id().map(ToString::to_string)
}
pub fn sender(&self) -> String {
self.0.sender().to_string()
}
pub fn is_own(&self) -> bool {
self.0.is_own()
}
pub fn content(&self) -> Arc<TimelineItemContent> {
Arc::new(TimelineItemContent(self.0.content().clone()))
}
pub fn origin_server_ts(&self) -> Option<u64> {
self.0.origin_server_ts().map(|ts| ts.0.into())
}
pub fn raw(&self) -> Option<String> {
self.0.raw().map(|r| r.json().get().to_owned())
}
}
#[derive(Clone, uniffi::Object)]
pub struct TimelineItemContent(matrix_sdk::room::timeline::TimelineItemContent);
#[uniffi::export]
impl TimelineItemContent {
pub fn as_message(self: Arc<Self>) -> Option<Arc<Message>> {
use matrix_sdk::room::timeline::TimelineItemContent as C;
unwrap_or_clone_arc_into_variant!(self, .0, C::Message(msg) => Arc::new(Message(msg)))
}
pub fn is_redacted_message(&self) -> bool {
use matrix_sdk::room::timeline::TimelineItemContent as C;
matches!(self.0, C::RedactedMessage)
}
}
#[derive(Clone)]
pub struct Message(matrix_sdk::room::timeline::Message);
impl Message {
pub fn msgtype(&self) -> Option<MessageType> {
use matrix_sdk::ruma::events::room::message::MessageType as MTy;
match self.0.msgtype() {
MTy::Emote(c) => Some(MessageType::Emote {
content: EmoteMessageContent {
body: c.body.clone(),
formatted: c.formatted.as_ref().map(Into::into),
},
}),
MTy::Image(c) => Some(MessageType::Image {
content: ImageMessageContent {
body: c.body.clone(),
source: Arc::new(c.source.clone()),
info: c.info.as_deref().map(Into::into),
},
}),
MTy::Notice(c) => Some(MessageType::Notice {
content: NoticeMessageContent {
body: c.body.clone(),
formatted: c.formatted.as_ref().map(Into::into),
},
}),
MTy::Text(c) => Some(MessageType::Text {
content: TextMessageContent {
body: c.body.clone(),
formatted: c.formatted.as_ref().map(Into::into),
},
}),
_ => None,
}
}
}
#[uniffi::export]
impl Message {
pub fn body(&self) -> String {
self.0.msgtype().body().to_owned()
}
// This event ID string will be replaced by something more useful later.
pub fn in_reply_to(&self) -> Option<String> {
self.0.in_reply_to().map(ToString::to_string)
}
pub fn is_edited(&self) -> bool {
self.0.is_edited()
}
}
#[derive(Clone)]
pub enum MessageType {
Emote { content: EmoteMessageContent },
Image { content: ImageMessageContent },
Notice { content: NoticeMessageContent },
Text { content: TextMessageContent },
}
#[derive(Clone)]
pub struct EmoteMessageContent {
pub body: String,
pub formatted: Option<FormattedBody>,
}
#[derive(Clone)]
pub struct ImageMessageContent {
pub body: String,
pub source: Arc<MediaSource>,
pub info: Option<ImageInfo>,
}
#[derive(Clone)]
pub struct ImageInfo {
pub height: Option<u64>,
pub width: Option<u64>,
pub mimetype: Option<String>,
pub size: Option<u64>,
pub thumbnail_info: Option<ThumbnailInfo>,
pub thumbnail_source: Option<Arc<MediaSource>>,
pub blurhash: Option<String>,
}
#[derive(Clone)]
pub struct ThumbnailInfo {
pub height: Option<u64>,
pub width: Option<u64>,
pub mimetype: Option<String>,
pub size: Option<u64>,
}
#[derive(Clone)]
pub struct NoticeMessageContent {
pub body: String,
pub formatted: Option<FormattedBody>,
}
#[derive(Clone)]
pub struct TextMessageContent {
pub body: String,
pub formatted: Option<FormattedBody>,
}
#[derive(Clone)]
pub struct FormattedBody {
pub format: MessageFormat,
pub body: String,
}
impl From<&matrix_sdk::ruma::events::room::message::FormattedBody> for FormattedBody {
fn from(f: &matrix_sdk::ruma::events::room::message::FormattedBody) -> Self {
Self {
format: match &f.format {
matrix_sdk::ruma::events::room::message::MessageFormat::Html => MessageFormat::Html,
_ => MessageFormat::Unknown,
},
body: f.body.clone(),
}
}
}
#[derive(Clone, Copy)]
pub enum MessageFormat {
Html,
Unknown,
}
impl From<&matrix_sdk::ruma::events::room::ImageInfo> for ImageInfo {
fn from(info: &matrix_sdk::ruma::events::room::ImageInfo) -> Self {
let thumbnail_info = info.thumbnail_info.as_ref().map(|info| ThumbnailInfo {
height: info.height.map(Into::into),
width: info.width.map(Into::into),
mimetype: info.mimetype.clone(),
size: info.size.map(Into::into),
});
Self {
height: info.height.map(Into::into),
width: info.width.map(Into::into),
mimetype: info.mimetype.clone(),
size: info.size.map(Into::into),
thumbnail_info,
thumbnail_source: info.thumbnail_source.clone().map(Arc::new),
blurhash: info.blurhash.clone(),
}
}
}
#[derive(Clone)]
pub struct Reaction {
pub key: String,
pub count: u64,
// TODO: Also expose senders
}
#[derive(Clone)]
pub struct ReactionDetails {
pub id: TimelineKey,
pub sender: String,
}
#[derive(Clone)]
pub enum TimelineKey {
TransactionId { txn_id: String },
EventId { event_id: String },
}
impl From<&matrix_sdk::room::timeline::TimelineKey> for TimelineKey {
fn from(timeline_key: &matrix_sdk::room::timeline::TimelineKey) -> Self {
use matrix_sdk::room::timeline::TimelineKey::*;
match timeline_key {
TransactionId(txn_id) => TimelineKey::TransactionId { txn_id: txn_id.to_string() },
EventId(event_id) => TimelineKey::EventId { event_id: event_id.to_string() },
}
}
}
#[derive(Clone)]
pub struct VirtualTimelineItem(matrix_sdk::room::timeline::VirtualTimelineItem);
#[extension_trait]
pub impl MediaSourceExt for MediaSource {
fn url(&self) -> String {
match self {
MediaSource::Plain(url) => url.to_string(),
MediaSource::Encrypted(file) => file.url.to_string(),
}
}
}

View File

@@ -30,20 +30,22 @@ sso-login = ["matrix-sdk/sso-login"]
docs = []
[dependencies]
axum = { version = "0.5.16", default-features = false, features = ["json"] }
dashmap = "5.2.0"
http = "0.2.6"
http-body = "0.4.5"
hyper = { version = "0.14.20", features = ["http1", "http2", "server"] }
matrix-sdk = { version = "0.6.0", path = "../matrix-sdk", default-features = false, features = ["appservice"] }
percent-encoding = "2.1.0"
regex = "1.5.5"
ruma = { version = "0.7.0", features = ["client-api-c", "appservice-api-s"] }
ruma = { workspace = true, features = ["appservice-api-s"] }
serde = "1.0.136"
serde_json = "1.0.79"
serde_yaml = "0.9.4"
tokio = { version = "1.17.0", default-features = false, features = ["rt-multi-thread"] }
thiserror = "1.0.30"
tracing = "0.1.34"
tower = { version = "0.4.13", default-features = false }
tracing = { workspace = true }
url = "2.2.2"
warp = { version = "0.3.2", default-features = false }
[dev-dependencies]
matrix-sdk-test = { version = "0.6.0", path = "../../testing/matrix-sdk-test", features = ["appservice"] }

View File

@@ -77,8 +77,8 @@ pub enum Error {
#[error("utf8 error: {0}")]
Utf8(#[from] std::str::Utf8Error),
#[error("warp rejection: {0}")]
WarpRejection(String),
#[error("hyper error: {0}")]
Hyper(#[from] hyper::Error),
}
impl Error {
@@ -101,14 +101,6 @@ impl Error {
}
}
impl warp::reject::Reject for Error {}
impl From<warp::Rejection> for Error {
fn from(rejection: warp::Rejection) -> Self {
Self::WarpRejection(format!("{:?}", rejection))
}
}
impl From<matrix_sdk::HttpError> for Error {
fn from(e: matrix_sdk::HttpError) -> Self {
matrix_sdk::Error::from(e).into()

View File

@@ -79,8 +79,9 @@
//! [matrix-org/matrix-rust-sdk#228]: https://github.com/matrix-org/matrix-rust-sdk/issues/228
//! [examples directory]: https://github.com/matrix-org/matrix-rust-sdk/tree/main/crates/matrix-sdk-appservice/examples
use std::sync::Arc;
use std::{fmt::Debug, sync::Arc};
use axum::body::HttpBody;
use dashmap::DashMap;
pub use error::Error;
use event_handler::AppserviceFn;
@@ -114,6 +115,7 @@ mod webserver;
pub use registration::AppServiceRegistration;
use registration::NamespaceCache;
pub use virtual_user::VirtualUserBuilder;
pub use webserver::AppServiceRouter;
pub type Result<T, E = Error> = std::result::Result<T, E>;
@@ -403,30 +405,21 @@ impl AppService {
}
/// Check if given `user_id` is in any of the [`AppServiceRegistration`]'s
/// `users` namespaces
/// `users` namespaces.
pub fn user_id_is_in_namespace(&self, user_id: impl AsRef<str>) -> bool {
for regex in &self.namespaces.users {
if regex.is_match(user_id.as_ref()) {
return true;
}
}
false
let user_id = user_id.as_ref();
self.namespaces.users.iter().any(|regex| regex.is_match(user_id))
}
/// Returns a [`warp::Filter`] to be used as [`warp::serve()`] route.
///
/// Note that if you handle any of the [application-service-specific
/// routes], including the legacy routes, you will break the appservice
/// functionality.
///
/// Hint: [`warp::Filter`]s can be converted to an `hyper::Service` using
/// [`warp::service`], which allows using it with tower-compatible
/// frameworks such as axum.
///
/// [application-service-specific routes]: https://spec.matrix.org/unstable/application-service-api/#legacy-routes
pub fn warp_filter(&self) -> warp::filters::BoxedFilter<(impl warp::Reply,)> {
webserver::warp_filter(self.clone())
/// Returns a [`Service`][tower::Service] that processes appservice
/// requests.
pub fn service<B>(&self) -> AppServiceRouter<B>
where
B: HttpBody + Send + 'static,
B::Data: Send,
B::Error: Into<axum::BoxError>,
{
webserver::router(self.clone())
}
/// Receive an incoming [transaction], pushing the contained events to
@@ -572,6 +565,8 @@ mod tests {
sync::{Arc, Mutex},
};
use http::{Method, Request};
use hyper::Body;
use matrix_sdk::{
config::RequestConfig,
ruma::{api::appservice::Registration, events::room::member::OriginalSyncRoomMemberEvent},
@@ -585,7 +580,7 @@ mod tests {
serde::Raw,
};
use serde_json::json;
use warp::{Filter, Reply};
use tower::{Service, ServiceExt};
use wiremock::{
matchers::{body_json, header, method, path},
Mock, MockServer, ResponseTemplate,
@@ -656,21 +651,22 @@ mod tests {
let mut transaction_builder = TransactionBuilder::new();
transaction_builder.add_timeline_event(TimelineTestEvent::Member);
let transaction = transaction_builder.build_json_transaction();
let transaction = transaction_builder.build_transaction();
let appservice = appservice(None, None).await?;
let status = warp::test::request()
.method("PUT")
.path(uri)
.json(&transaction)
.filter(&appservice.warp_filter())
let response = appservice(None, None)
.await?
.service()
.oneshot(
Request::builder()
.method(Method::PUT)
.uri(uri)
.body(Body::from(transaction))
.unwrap(),
)
.await
.unwrap()
.into_response()
.status();
.unwrap();
assert_eq!(status, 200);
assert_eq!(response.status(), 200);
Ok(())
}
@@ -681,7 +677,7 @@ mod tests {
let mut transaction_builder = TransactionBuilder::new();
transaction_builder.add_timeline_event(TimelineTestEvent::Member);
let transaction = transaction_builder.build_json_transaction();
let transaction = transaction_builder.build_transaction();
let appservice = appservice(None, None).await?;
@@ -695,17 +691,20 @@ mod tests {
}
});
let status = warp::test::request()
.method("PUT")
.path(uri)
.json(&transaction)
.filter(&appservice.warp_filter())
.await
.unwrap()
.into_response()
.status();
let mut service = appservice.service();
assert_eq!(status, 200);
let response = service
.call(
Request::builder()
.method(Method::PUT)
.uri(uri)
.body(Body::from(transaction.clone()))
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), 200);
{
let on_room_member_called = *on_state_member.lock().unwrap();
assert!(on_room_member_called);
@@ -717,19 +716,20 @@ mod tests {
*on_room_member_called = false;
}
let status = warp::test::request()
.method("PUT")
.path(uri)
.json(&transaction)
.filter(&appservice.warp_filter())
let response = service
.call(
Request::builder()
.method(Method::PUT)
.uri(uri)
.body(Body::from(transaction))
.unwrap(),
)
.await
.unwrap()
.into_response()
.status();
.unwrap();
// According to https://spec.matrix.org/v1.2/application-service-api/#pushing-events
// This should noop and return 200.
assert_eq!(status, 200);
assert_eq!(response.status(), 200);
{
let on_room_member_called = *on_state_member.lock().unwrap();
// This time we should not have called the event handler.
@@ -746,16 +746,13 @@ mod tests {
let uri = "/_matrix/app/v1/users/%40_botty_1%3Adev.famedly.local?access_token=hs_token";
let status = warp::test::request()
.method("GET")
.path(uri)
.filter(&appservice.warp_filter())
let response = appservice
.service()
.oneshot(Request::builder().uri(uri).body(Body::empty()).unwrap())
.await
.unwrap()
.into_response()
.status();
.unwrap();
assert_eq!(status, 200);
assert_eq!(response.status(), 200);
Ok(())
}
@@ -767,16 +764,13 @@ mod tests {
let uri = "/_matrix/app/v1/rooms/%23magicforest%3Aexample.com?access_token=hs_token";
let status = warp::test::request()
.method("GET")
.path(uri)
.filter(&appservice.warp_filter())
let response = appservice
.service()
.oneshot(Request::builder().uri(uri).body(Body::empty()).unwrap())
.await
.unwrap()
.into_response()
.status();
.unwrap();
assert_eq!(status, 200);
assert_eq!(response.status(), 200);
Ok(())
}
@@ -786,23 +780,24 @@ mod tests {
let uri = "/_matrix/app/v1/transactions/1?access_token=invalid_token";
let mut transaction_builder = TransactionBuilder::new();
let transaction = transaction_builder
.add_timeline_event(TimelineTestEvent::Member)
.build_json_transaction();
let transaction =
transaction_builder.add_timeline_event(TimelineTestEvent::Member).build_transaction();
let appservice = appservice(None, None).await?;
let status = warp::test::request()
.method("PUT")
.path(uri)
.json(&transaction)
.filter(&appservice.warp_filter())
let response = appservice
.service()
.oneshot(
Request::builder()
.method(Method::PUT)
.uri(uri)
.body(Body::from(transaction))
.unwrap(),
)
.await
.unwrap()
.into_response()
.status();
.unwrap();
assert_eq!(status, 401);
assert_eq!(response.status(), 401);
Ok(())
}
@@ -813,23 +808,23 @@ mod tests {
let mut transaction_builder = TransactionBuilder::new();
transaction_builder.add_timeline_event(TimelineTestEvent::Member);
let transaction = transaction_builder.build_json_transaction();
let transaction = transaction_builder.build_transaction();
let appservice = appservice(None, None).await?;
{
let status = warp::test::request()
.method("PUT")
.path(uri)
.json(&transaction)
.filter(&appservice.warp_filter())
.await
.unwrap()
.into_response()
.status();
let response = appservice
.service()
.oneshot(
Request::builder()
.method(Method::PUT)
.uri(uri)
.body(Body::from(transaction))
.unwrap(),
)
.await
.unwrap();
assert_eq!(status, 401);
}
assert_eq!(response.status(), 401);
Ok(())
}
@@ -852,13 +847,17 @@ mod tests {
let mut transaction_builder = TransactionBuilder::new();
transaction_builder.add_timeline_event(TimelineTestEvent::Member);
let transaction = transaction_builder.build_json_transaction();
let transaction = transaction_builder.build_transaction();
warp::test::request()
.method("PUT")
.path(uri)
.json(&transaction)
.filter(&appservice.warp_filter())
appservice
.service()
.oneshot(
Request::builder()
.method(Method::PUT)
.uri(uri)
.body(Body::from(transaction))
.unwrap(),
)
.await
.unwrap();
@@ -868,30 +867,6 @@ mod tests {
Ok(())
}
#[async_test]
async fn test_unrelated_path() -> Result<()> {
let appservice = appservice(None, None).await?;
let status = {
let consumer_filter = warp::any()
.and(appservice.warp_filter())
.or(warp::get().and(warp::path("unrelated").map(warp::reply)));
let response = warp::test::request()
.method("GET")
.path("/unrelated")
.filter(&consumer_filter)
.await?
.into_response();
response.status()
};
assert_eq!(status, 200);
Ok(())
}
#[async_test]
async fn test_appservice_on_sub_path() -> Result<()> {
let room_id = room_id!("!SVkFJHzfwvuaIEawgC:localhost");
@@ -900,29 +875,33 @@ mod tests {
let mut transaction_builder = TransactionBuilder::new();
transaction_builder.add_timeline_event(TimelineTestEvent::Member);
let transaction_1 = transaction_builder.build_json_transaction();
let transaction_1 = transaction_builder.build_transaction();
let mut transaction_builder = TransactionBuilder::new();
transaction_builder.add_timeline_event(TimelineTestEvent::MemberNameChange);
let transaction_2 = transaction_builder.build_json_transaction();
let transaction_2 = transaction_builder.build_transaction();
let appservice = appservice(None, None).await?;
let mut service = axum::Router::new().nest("/sub_path", appservice.service());
{
warp::test::request()
.method("PUT")
.path(uri_1)
.json(&transaction_1)
.filter(&warp::path("sub_path").and(appservice.warp_filter()))
.await?;
warp::test::request()
.method("PUT")
.path(uri_2)
.json(&transaction_2)
.filter(&warp::path("sub_path").and(appservice.warp_filter()))
.await?;
};
service
.call(
Request::builder()
.method(Method::PUT)
.uri(uri_1)
.body(Body::from(transaction_1))?,
)
.await
.unwrap();
service
.call(
Request::builder()
.method(Method::PUT)
.uri(uri_2)
.body(Body::from(transaction_2))?,
)
.await
.unwrap();
let members = appservice
.virtual_user(None)
@@ -1037,7 +1016,9 @@ mod tests {
}
mod registration {
use super::*;
use ruma::api::appservice::Registration;
use crate::{tests::registration_string, AppServiceRegistration, Result};
#[test]
fn test_registration() -> Result<()> {

View File

@@ -12,22 +12,28 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::net::ToSocketAddrs;
use matrix_sdk::{
bytes::Bytes,
ruma::{
self,
api::{
appservice::query::{
query_room_alias::v1 as query_room, query_user_id::v1 as query_user,
},
IncomingRequest,
},
},
use std::{
convert::Infallible,
future::Future,
net::ToSocketAddrs,
pin::Pin,
task::{self, Poll},
};
use serde::Serialize;
use warp::{filters::BoxedFilter, path::FullPath, Filter, Rejection, Reply};
use axum::{
async_trait,
body::{Bytes, HttpBody},
extract::{FromRequest, Path, RequestParts},
middleware::{self, Next},
response::{ErrorResponse, IntoResponse, Response},
routing::{future::RouteFuture, get, put},
BoxError, Extension, Json, Router,
};
use http::StatusCode;
use hyper::Body;
use matrix_sdk::ruma::{self, api::IncomingRequest};
use serde::{Deserialize, Serialize};
use tower::{make, Service, ServiceBuilder};
use crate::{AppService, Error, Result};
@@ -36,195 +42,179 @@ pub async fn run_server(
host: impl Into<String>,
port: impl Into<u16>,
) -> Result<()> {
let routes = warp_filter(appservice);
let router: AppServiceRouter = router(appservice);
let mut addr = format!("{}:{}", host.into(), port.into()).to_socket_addrs()?;
let mut addr = (host.into(), port.into()).to_socket_addrs()?;
if let Some(addr) = addr.next() {
warp::serve(routes).run(addr).await;
hyper::Server::bind(&addr).serve(make::Shared::new(router)).await?;
Ok(())
} else {
Err(Error::HostPortToSocketAddrs)
}
}
pub fn warp_filter(appservice: AppService) -> BoxedFilter<(impl Reply,)> {
// TODO: try to use a struct instead of needlessly cloning appservice multiple
// times on every request
warp::any()
.and(filters::transactions(appservice.clone()))
.or(filters::users(appservice.clone()))
.or(filters::rooms(appservice))
.recover(handle_rejection)
.boxed()
pub fn router<B>(appservice: AppService) -> AppServiceRouter<B>
where
B: HttpBody + Send + 'static,
B::Data: Send,
B::Error: Into<BoxError>,
{
AppServiceRouter(
Router::new()
.route("/_matrix/app/v1/users/:user_id", get(handlers::user))
.route("/_matrix/app/v1/rooms/:room_id", get(handlers::room))
.route("/_matrix/app/v1/transactions/:txn_id", put(handlers::transaction))
.route("/users/:user_id", get(handlers::user))
.route("/rooms/:room_id", get(handlers::room))
.route("/transactions/:txn_id", put(handlers::transaction))
// FIXME: Use Route::with_state instead of an Extension layer in axum 0.6
.layer(
ServiceBuilder::new()
.layer(Extension(appservice))
.layer(middleware::from_fn(validate_access_token)),
),
)
}
mod filters {
use super::*;
#[derive(Debug)]
pub struct AppServiceRouter<B = Body>(Router<B>);
pub fn users(appservice: AppService) -> BoxedFilter<(impl Reply,)> {
warp::get()
.and(
warp::path!("_matrix" / "app" / "v1" / "users" / String)
// legacy route
.or(warp::path!("users" / String))
.unify(),
)
.and(warp::path::end())
.and(common(appservice))
.and_then(handlers::user)
.boxed()
impl<B> Clone for AppServiceRouter<B> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<B> Service<http::Request<B>> for AppServiceRouter<B>
where
B: HttpBody + Send + 'static,
B::Data: Send,
B::Error: Into<BoxError>,
{
// axum's Response type is part of the signature because axum::Router::nest
// requires the inner service to have that exact response (body) type in
// 0.5.x; this will be fixed in axum 0.6.0.
type Response = Response;
type Error = Infallible;
type Future = AppServiceRouteFuture<B>;
fn poll_ready(&mut self, cx: &mut task::Context<'_>) -> Poll<Result<(), Self::Error>> {
self.0.poll_ready(cx)
}
pub fn rooms(appservice: AppService) -> BoxedFilter<(impl Reply,)> {
warp::get()
.and(
warp::path!("_matrix" / "app" / "v1" / "rooms" / String)
// legacy route
.or(warp::path!("rooms" / String))
.unify(),
)
.and(warp::path::end())
.and(common(appservice))
.and_then(handlers::room)
.boxed()
fn call(&mut self, req: http::Request<B>) -> Self::Future {
AppServiceRouteFuture(self.0.call(req))
}
}
pub fn transactions(appservice: AppService) -> BoxedFilter<(impl Reply,)> {
warp::put()
.and(
warp::path!("_matrix" / "app" / "v1" / "transactions" / String)
// legacy route
.or(warp::path!("transactions" / String))
.unify(),
)
.and(warp::path::end())
.and(common(appservice))
.and_then(handlers::transaction)
.boxed()
pub struct AppServiceRouteFuture<B>(RouteFuture<B, Infallible>);
impl<B> Future for AppServiceRouteFuture<B>
where
B: HttpBody,
{
type Output = Result<Response, Infallible>;
fn poll(mut self: Pin<&mut Self>, cx: &mut task::Context<'_>) -> Poll<Self::Output> {
Pin::new(&mut self.0).poll(cx)
}
}
fn common(appservice: AppService) -> BoxedFilter<(AppService, http::Request<Bytes>)> {
warp::any()
.and(valid_access_token(appservice.registration().hs_token.clone()))
.map(move || appservice.clone())
.and(
http_request().and_then(|request| async move {
Ok::<http::Request<Bytes>, Rejection>(request)
}),
)
.boxed()
}
pub struct MatrixRequest<T>(T);
pub fn valid_access_token(token: String) -> BoxedFilter<()> {
warp::any()
.map(move || token.clone())
.and(warp::query::raw())
.and_then(|token: String, query: String| async move {
let query: Vec<(String, String)> =
ruma::serde::urlencoded::from_str(&query).map_err(Error::from)?;
#[async_trait]
impl<T, B> FromRequest<B> for MatrixRequest<T>
where
T: IncomingRequest,
B: HttpBody + Send,
B::Data: Send,
B::Error: Into<BoxError>,
{
type Rejection = Response;
if query.into_iter().any(|(key, value)| key == "access_token" && value == token) {
Ok::<(), Rejection>(())
} else {
Err(warp::reject::custom(Unauthorized))
}
})
.untuple_one()
.boxed()
}
async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> {
let path_params =
req.extract::<Path<Vec<String>>>().await.map_err(IntoResponse::into_response)?;
let parts = req.extract::<http::request::Parts>().await.map_err(|e| match e {})?;
let body = req.extract::<Bytes>().await.map_err(IntoResponse::into_response)?;
pub fn http_request() -> impl Filter<Extract = (http::Request<Bytes>,), Error = Rejection> + Copy
{
// TODO: extract `hyper::Request` instead
// blocked by https://github.com/seanmonstar/warp/issues/139
warp::any()
.and(warp::method())
.and(warp::filters::path::full())
.and(warp::filters::query::raw())
.and(warp::header::headers_cloned())
.and(warp::body::bytes())
.and_then(|method, path: FullPath, query, headers, bytes| async move {
let uri = http::uri::Builder::new()
.path_and_query(format!("{}?{query}", path.as_str()))
.build()
.map_err(Error::from)?;
let http_request = http::Request::from_parts(parts, body);
let mut request = http::Request::builder()
.method(method)
.uri(uri)
.body(bytes)
.map_err(Error::from)?;
let request = T::try_from_http_request(http_request, &path_params).map_err(|_e| {
// TODO: JSON error response
StatusCode::BAD_REQUEST.into_response()
})?;
*request.headers_mut() = headers;
Ok::<http::Request<Bytes>, Rejection>(request)
})
Ok(Self(request))
}
}
mod handlers {
use percent_encoding::percent_decode_str;
use axum::{response::IntoResponse, Extension, Json};
use http::StatusCode;
use ruma::api::appservice::{
event::push_events,
query::{query_room_alias, query_user_id},
};
use serde::Serialize;
use super::*;
use super::{ErrorMessage, MatrixRequest};
use crate::AppService;
#[derive(Serialize)]
struct EmptyObject {}
pub async fn user(
user_id: String,
appservice: AppService,
request: http::Request<Bytes>,
) -> Result<impl Reply, Rejection> {
Extension(appservice): Extension<AppService>,
MatrixRequest(request): MatrixRequest<query_user_id::v1::IncomingRequest>,
) -> impl IntoResponse {
if let Some(user_exists) = appservice.event_handler.users.lock().await.as_mut() {
let user_id = percent_decode_str(&user_id).decode_utf8().map_err(Error::from)?;
let request = query_user::IncomingRequest::try_from_http_request(request, &[user_id])
.map_err(Error::from)?;
return if user_exists(appservice.clone(), request).await {
Ok(warp::reply::json(&EmptyObject {}))
if user_exists(appservice.clone(), request).await {
Ok(Json(EmptyObject {}))
} else {
Err(warp::reject::not_found())
};
Err(StatusCode::NOT_FOUND)
}
} else {
Ok(Json(EmptyObject {}))
}
Ok(warp::reply::json(&EmptyObject {}))
}
pub async fn room(
room_id: String,
appservice: AppService,
request: http::Request<Bytes>,
) -> Result<impl Reply, Rejection> {
Extension(appservice): Extension<AppService>,
MatrixRequest(request): MatrixRequest<query_room_alias::v1::IncomingRequest>,
) -> impl IntoResponse {
if let Some(room_exists) = appservice.event_handler.rooms.lock().await.as_mut() {
let room_id = percent_decode_str(&room_id).decode_utf8().map_err(Error::from)?;
let request = query_room::IncomingRequest::try_from_http_request(request, &[room_id])
.map_err(Error::from)?;
return if room_exists(appservice.clone(), request).await {
Ok(warp::reply::json(&EmptyObject {}))
if room_exists(appservice.clone(), request).await {
Ok(Json(&EmptyObject {}))
} else {
Err(warp::reject::not_found())
};
Err(StatusCode::NOT_FOUND)
}
} else {
Ok(Json(&EmptyObject {}))
}
Ok(warp::reply::json(&EmptyObject {}))
}
pub async fn transaction(
txn_id: String,
appservice: AppService,
request: http::Request<Bytes>,
) -> Result<impl Reply, Rejection> {
let incoming_transaction: ruma::api::appservice::event::push_events::v1::IncomingRequest =
ruma::api::IncomingRequest::try_from_http_request(request, &[txn_id])
.map_err(Error::from)?;
appservice.receive_transaction(incoming_transaction).await?;
Ok(warp::reply::json(&EmptyObject {}))
appservice: Extension<AppService>,
MatrixRequest(request): MatrixRequest<push_events::v1::IncomingRequest>,
) -> impl IntoResponse {
match appservice.receive_transaction(request).await {
Ok(_) => Ok(Json(&EmptyObject {})),
Err(e) => {
let status_code = StatusCode::INTERNAL_SERVER_ERROR;
Err((
status_code,
Json(ErrorMessage { code: status_code.as_u16(), message: e.to_string() }),
))
}
}
}
}
#[derive(Debug)]
struct Unauthorized;
impl warp::reject::Reject for Unauthorized {}
#[derive(Deserialize)]
struct QueryParameters {
access_token: String,
}
#[derive(Serialize)]
struct ErrorMessage {
@@ -232,15 +222,23 @@ struct ErrorMessage {
message: String,
}
pub async fn handle_rejection(err: Rejection) -> Result<impl Reply, Rejection> {
if err.find::<Unauthorized>().is_some() || err.find::<warp::reject::InvalidQuery>().is_some() {
let code = http::StatusCode::UNAUTHORIZED;
let message = "UNAUTHORIZED";
async fn validate_access_token<B>(
req: http::Request<B>,
next: Next<B>,
) -> Result<Response, ErrorResponse> {
let appservice =
req.extensions().get::<AppService>().ok_or(StatusCode::INTERNAL_SERVER_ERROR)?;
let json =
warp::reply::json(&ErrorMessage { code: code.as_u16(), message: message.into() });
Ok(warp::reply::with_status(json, code))
} else {
Err(err)
let query_string = req.uri().query().unwrap_or("");
match ruma::serde::urlencoded::from_str::<QueryParameters>(query_string) {
Ok(query) if query.access_token == appservice.registration.hs_token => {
Ok(next.run(req).await)
}
_ => {
let status_code = StatusCode::UNAUTHORIZED;
let message =
ErrorMessage { code: status_code.as_u16(), message: "UNAUTHORIZED".into() };
Err((status_code, Json(message)).into())
}
}
}

View File

@@ -18,7 +18,7 @@ rustdoc-args = ["--cfg", "docsrs"]
[features]
default = []
e2e-encryption = ["dep:matrix-sdk-crypto"]
js = ["matrix-sdk-common/js", "matrix-sdk-crypto?/js", "ruma/js"]
js = ["matrix-sdk-common/js", "matrix-sdk-crypto?/js", "ruma/js", "matrix-sdk-store-encryption/js"]
qrcode = ["matrix-sdk-crypto?/qrcode"]
sliding-sync = ["ruma/unstable-msc3575"]
@@ -37,21 +37,22 @@ http = { version = "0.2.6", optional = true }
lru = "0.8.0"
matrix-sdk-common = { version = "0.6.0", path = "../matrix-sdk-common" }
matrix-sdk-crypto = { version = "0.6.0", path = "../matrix-sdk-crypto", optional = true }
matrix-sdk-store-encryption = { version = "0.2.0", path = "../matrix-sdk-store-encryption" }
once_cell = "1.10.0"
ruma = { version = "0.7.0", features = ["client-api-c", "canonical-json"] }
ruma = { workspace = true, features = ["canonical-json"] }
serde = { version = "1.0.136", features = ["rc"] }
serde_json = "1.0.79"
thiserror = "1.0.30"
tracing = "0.1.34"
zeroize = { version = "1.3.0", features = ["zeroize_derive"] }
tracing = { workspace = true }
zeroize = { workspace = true, features = ["zeroize_derive"] }
[dev-dependencies]
futures = { version = "0.3.21", default-features = false, features = ["executor"] }
tracing = { version = "0.1.26", features = ["log"] }
http = "0.2.6"
assign = "1.1.1"
env_logger = "0.9.0"
ctor = "0.1.23"
futures = { version = "0.3.21", default-features = false, features = ["executor"] }
http = "0.2.6"
matrix-sdk-test = { version = "0.6.0", path = "../../testing/matrix-sdk-test" }
tracing-subscriber = { version = "0.3.11", features = ["env-filter"] }
[target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies]
tokio = { version = "1.17.0", default-features = false, features = ["rt-multi-thread", "macros"] }

View File

@@ -94,9 +94,8 @@ impl fmt::Debug for BaseClient {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Client")
.field("session_meta", &self.store.session_meta())
.field("session_tokens", &self.store.session_tokens)
.field("sync_token", &self.store.sync_token)
.finish()
.finish_non_exhaustive()
}
}
@@ -253,10 +252,12 @@ impl BaseClient {
}
#[allow(clippy::too_many_arguments)]
async fn handle_timeline(
pub(crate) async fn handle_timeline(
&self,
room: &Room,
ruma_timeline: api::sync::sync_events::v3::Timeline,
limited: bool,
events: Vec<Raw<AnySyncTimelineEvent>>,
prev_batch: Option<String>,
push_rules: &Ruleset,
user_ids: &mut BTreeSet<OwnedUserId>,
room_info: &mut RoomInfo,
@@ -265,10 +266,10 @@ impl BaseClient {
) -> Result<Timeline> {
let room_id = room.room_id();
let user_id = room.own_user_id();
let mut timeline = Timeline::new(ruma_timeline.limited, ruma_timeline.prev_batch.clone());
let mut timeline = Timeline::new(limited, prev_batch);
let mut push_context = self.get_push_room_context(room, room_info, changes).await?;
for event in ruma_timeline.events {
for event in events {
#[allow(unused_mut)]
let mut event: SyncTimelineEvent = event.into();
@@ -497,7 +498,7 @@ impl BaseClient {
Ok(user_ids)
}
async fn handle_room_account_data(
pub(crate) async fn handle_room_account_data(
&self,
room_id: &RoomId,
events: &[Raw<AnyRoomAccountDataEvent>],
@@ -510,7 +511,7 @@ impl BaseClient {
}
}
async fn handle_account_data(
pub(crate) async fn handle_account_data(
&self,
events: &[Raw<AnyGlobalAccountDataEvent>],
changes: &mut StateChanges,
@@ -553,6 +554,31 @@ impl BaseClient {
changes.account_data = account_data;
}
#[cfg(feature = "e2e-encryption")]
pub(crate) async fn preprocess_to_device_events(
&self,
to_device_events: Vec<Raw<ruma::events::AnyToDeviceEvent>>,
changed_devices: &api::sync::sync_events::DeviceLists,
one_time_keys_counts: &BTreeMap<ruma::DeviceKeyAlgorithm, UInt>,
unused_fallback_keys: Option<&[ruma::DeviceKeyAlgorithm]>,
) -> Result<Vec<Raw<ruma::events::AnyToDeviceEvent>>> {
if let Some(o) = self.olm_machine() {
// Let the crypto machine handle the sync response, this
// decrypts to-device events, but leaves room events alone.
// This makes sure that we have the decryption keys for the room
// events at hand.
Ok(o.receive_sync_changes(
to_device_events,
changed_devices,
one_time_keys_counts,
unused_fallback_keys,
)
.await?)
} else {
Ok(to_device_events)
}
}
/// Receive a response from a sync call.
///
/// # Arguments
@@ -583,25 +609,17 @@ impl BaseClient {
}
let now = Instant::now();
let to_device_events = to_device.events;
#[cfg(feature = "e2e-encryption")]
let to_device = {
if let Some(o) = self.olm_machine() {
// Let the crypto machine handle the sync response, this
// decrypts to-device events, but leaves room events alone.
// This makes sure that we have the decryption keys for the room
// events at hand.
o.receive_sync_changes(
to_device,
&device_lists,
&device_one_time_keys_count,
device_unused_fallback_key_types.as_deref(),
)
.await?
} else {
to_device
}
};
let to_device_events = self
.preprocess_to_device_events(
to_device_events,
&device_lists,
&device_one_time_keys_count,
device_unused_fallback_key_types.as_deref(),
)
.await?;
let mut changes = StateChanges::new(next_batch.clone());
let mut ambiguity_cache = AmbiguityCache::new(self.store.inner.clone());
@@ -645,7 +663,9 @@ impl BaseClient {
let timeline = self
.handle_timeline(
&room,
new_info.timeline,
new_info.timeline.limited,
new_info.timeline.events,
new_info.timeline.prev_batch,
&push_rules,
&mut user_ids,
&mut room_info,
@@ -684,7 +704,7 @@ impl BaseClient {
JoinedRoom::new(
timeline,
new_info.state,
new_info.account_data,
new_info.account_data.events,
new_info.ephemeral,
notification_count,
),
@@ -710,7 +730,9 @@ impl BaseClient {
let timeline = self
.handle_timeline(
&room,
new_info.timeline,
new_info.timeline.limited,
new_info.timeline.events,
new_info.timeline.prev_batch,
&push_rules,
&mut user_ids,
&mut room_info,
@@ -772,8 +794,8 @@ impl BaseClient {
next_batch,
rooms: new_rooms,
presence,
account_data,
to_device,
account_data: account_data.events,
to_device_events,
device_lists,
device_one_time_keys_count: device_one_time_keys_count
.into_iter()

View File

@@ -45,3 +45,13 @@ pub use store::{StateChanges, StateStore, StoreError};
pub use utils::{
MinimalRoomMemberEvent, MinimalStateEvent, OriginalMinimalStateEvent, RedactedMinimalStateEvent,
};
#[cfg(all(test, not(target_arch = "wasm32")))]
#[ctor::ctor]
fn init_logging() {
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
tracing_subscriber::registry()
.with(tracing_subscriber::EnvFilter::from_default_env())
.with(tracing_subscriber::fmt::layer().with_test_writer())
.init();
}

View File

@@ -60,7 +60,7 @@ pub struct MediaThumbnailSize {
impl UniqueKey for MediaThumbnailSize {
fn unique_key(&self) -> String {
format!("{}{}{}x{}", self.method, UNIQUE_SEPARATOR, self.width, self.height)
format!("{}{UNIQUE_SEPARATOR}{}x{}", self.method, self.width, self.height)
}
}
@@ -85,7 +85,7 @@ pub struct MediaRequest {
impl UniqueKey for MediaRequest {
fn unique_key(&self) -> String {
format!("{}{}{}", self.source.unique_key(), UNIQUE_SEPARATOR, self.format.unique_key())
format!("{}{UNIQUE_SEPARATOR}{}", self.source.unique_key(), self.format.unique_key())
}
}
/// Trait for media event content.

View File

@@ -748,7 +748,6 @@ mod test {
#[async_test]
async fn test_display_name_default() {
let _ = env_logger::try_init();
let (_, room) = make_room(RoomType::Joined);
assert_eq!(room.display_name().await.unwrap(), DisplayName::Empty);
@@ -786,7 +785,6 @@ mod test {
#[async_test]
async fn test_display_name_dm_invited() {
let _ = env_logger::try_init();
let (store, room) = make_room(RoomType::Invited);
let room_id = room_id!("!test:localhost");
let matthew = user_id!("@matthew:example.org");
@@ -809,7 +807,6 @@ mod test {
#[async_test]
async fn test_display_name_dm_invited_no_heroes() {
let _ = env_logger::try_init();
let (store, room) = make_room(RoomType::Invited);
let room_id = room_id!("!test:localhost");
let matthew = user_id!("@matthew:example.org");
@@ -828,7 +825,6 @@ mod test {
#[async_test]
async fn test_display_name_dm_joined() {
let _ = env_logger::try_init();
let (store, room) = make_room(RoomType::Joined);
let room_id = room_id!("!test:localhost");
let matthew = user_id!("@matthew:example.org");
@@ -860,7 +856,6 @@ mod test {
#[async_test]
async fn test_display_name_dm_joined_no_heroes() {
let _ = env_logger::try_init();
let (store, room) = make_room(RoomType::Joined);
let room_id = room_id!("!test:localhost");
let matthew = user_id!("@matthew:example.org");
@@ -887,7 +882,6 @@ mod test {
#[async_test]
async fn test_display_name_dm_alone() {
let _ = env_logger::try_init();
let (store, room) = make_room(RoomType::Joined);
let room_id = room_id!("!test:localhost");
let matthew = user_id!("@matthew:example.org");

View File

@@ -15,6 +15,8 @@
//! User sessions.
use std::fmt;
use ruma::{api::client::session::refresh_token, OwnedDeviceId, OwnedUserId};
use serde::{Deserialize, Serialize};
@@ -36,7 +38,7 @@ use serde::{Deserialize, Serialize};
///
/// assert_eq!(session.device_id.as_str(), "MYDEVICEID");
/// ```
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)]
#[derive(Clone, Eq, Hash, PartialEq, Serialize, Deserialize)]
pub struct Session {
/// The access token used for this session.
pub access_token: String,
@@ -66,6 +68,15 @@ impl Session {
}
}
impl fmt::Debug for Session {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Session")
.field("user_id", &self.user_id)
.field("device_id", &self.device_id)
.finish_non_exhaustive()
}
}
impl From<ruma::api::client::session::login::v3::Response> for Session {
fn from(response: ruma::api::client::session::login::v3::Response) -> Self {
Self {
@@ -88,7 +99,8 @@ pub struct SessionMeta {
/// The mutable parts of the session: the access token and optional refresh
/// token.
#[derive(Clone, Debug)]
#[derive(Clone)]
#[allow(missing_debug_implementations)]
pub struct SessionTokens {
/// The access token used for this session.
pub access_token: String,

View File

@@ -30,60 +30,68 @@ impl BaseClient {
// next_batch,
rooms,
lists,
extensions,
// FIXME: missing compared to v3::Response
//presence,
//account_data,
//to_device,
//device_lists,
//device_one_time_keys_count,
//device_unused_fallback_key_types,
..
} = response;
// FIXME not yet supported by sliding sync. see
// https://github.com/matrix-org/matrix-rust-sdk/issues/1014
// #[cfg(feature = "encryption")]
// let to_device = {
// if let Some(o) = self.olm_machine().await {
// // Let the crypto machine handle the sync response, this
// // decrypts to-device events, but leaves room events alone.
// // This makes sure that we have the decryption keys for the room
// // events at hand.
// o.receive_sync_changes(
// to_device,
// &device_lists,
// &device_one_time_keys_count,
// device_unused_fallback_key_types.as_deref(),
// )
// .await?
// } else {
// to_device
// }
// };
if rooms.is_empty() {
// nothing for us to handle here
if rooms.is_empty() && extensions.is_empty() {
// we received a room reshuffling event only, there won't be anything for us to
// process. stop early
return Ok(SyncResponse::default());
};
let v4::Extensions { to_device, e2ee, account_data, .. } = extensions;
let to_device_events = to_device.map(|v4| v4.events).unwrap_or_default();
#[cfg(feature = "e2e-encryption")]
let to_device_events = {
if let Some(e2ee) = &e2ee {
self.preprocess_to_device_events(
to_device_events,
&e2ee.device_lists,
&e2ee.device_one_time_keys_count,
e2ee.device_unused_fallback_key_types.as_deref(),
)
.await?
} else {
to_device_events
}
};
let (device_lists, device_one_time_keys_count) = e2ee
.map(|e2ee| {
(
e2ee.device_lists,
e2ee.device_one_time_keys_count
.into_iter()
.map(|(k, v)| (k, v.into()))
.collect(),
)
})
.unwrap_or_default();
let store = self.store.clone();
let mut changes = StateChanges::default();
let mut ambiguity_cache = AmbiguityCache::new(store.inner.clone());
// FIXME not yet supported by sliding sync.
// self.handle_account_data(&account_data.events, &mut changes).await;
if let Some(global_data) = account_data.as_ref().map(|a| &a.global) {
self.handle_account_data(global_data, &mut changes).await;
}
let _push_rules = self.get_push_rules(&changes).await?;
let push_rules = self.get_push_rules(&changes).await?;
let mut new_rooms = Rooms::default();
for (room_id, room_data) in &rooms {
for (room_id, room_data) in rooms.into_iter() {
if !room_data.invite_state.is_empty() {
let invite_states = &room_data.invite_state;
let room = store.get_or_create_stripped_room(room_id).await;
let room = store.get_or_create_stripped_room(&room_id).await;
let mut room_info = room.clone_info();
if let Some(r) = store.get_room(room_id) {
if let Some(r) = store.get_room(&room_id) {
let mut room_info = r.clone_info();
room_info.mark_as_invited(); // FIXME: this might not be accurate
changes.add_room(room_info);
@@ -96,7 +104,7 @@ impl BaseClient {
v3::InvitedRoom::from(v3::InviteState::from(invite_states.clone())),
);
} else {
let room = store.get_or_create_room(room_id, RoomType::Joined).await;
let room = store.get_or_create_room(&room_id, RoomType::Joined).await;
let mut room_info = room.clone_info();
room_info.mark_as_joined(); // FIXME: this might not be accurate
@@ -105,18 +113,16 @@ impl BaseClient {
room_info.set_prev_batch(room_data.prev_batch.as_deref());
let user_ids = if room_data.required_state.is_empty() {
None
} else {
Some(
self.handle_state(
&room_data.required_state,
&mut room_info,
&mut changes,
&mut ambiguity_cache,
)
.await?,
let mut user_ids = if !room_data.required_state.is_empty() {
self.handle_state(
&room_data.required_state,
&mut room_info,
&mut changes,
&mut ambiguity_cache,
)
.await?
} else {
Default::default()
};
// FIXME not yet supported by sliding sync. see
@@ -130,36 +136,34 @@ impl BaseClient {
// changes.add_receipts(&room_id, event);
// }
// FIXME not yet supported by sliding sync.
// self.handle_room_account_data(&room_id, &room_data.account_data.events, &mut
// changes) .await;
let room_account_data = if let Some(inner_account_data) = &account_data {
if let Some(events) = inner_account_data.rooms.get(&room_id) {
self.handle_room_account_data(&room_id, events, &mut changes).await;
Some(events.to_vec())
} else {
None
}
} else {
None
};
// FIXME not yet supported by sliding sync.
// if room_data.timeline.limited {
// room_info.mark_members_missing();
// }
if room_data.limited {
room_info.mark_members_missing();
}
// let timeline = self
// .handle_timeline(
// &room,
// room_data.timeline,
// &push_rules,
// &mut room_info,
// &mut changes,
// &mut ambiguity_cache,
// &mut user_ids,
// )
// .await?;
// let timeline_slice = TimelineSlice::new(
// timeline.events.clone(),
// next_batch.clone(),
// timeline.prev_batch.clone(),
// timeline.limited,
// true,
// );
// changes.add_timeline(&room_id, timeline_slice);
let timeline = self
.handle_timeline(
&room,
room_data.limited,
room_data.timeline,
room_data.prev_batch,
&push_rules,
&mut user_ids,
&mut room_info,
&mut changes,
&mut ambiguity_cache,
)
.await?;
#[cfg(feature = "e2e-encryption")]
if room_info.is_encrypted() {
@@ -168,15 +172,15 @@ impl BaseClient {
// The room turned on encryption in this sync, we need
// to also get all the existing users and mark them for
// tracking.
let joined = store.get_joined_user_ids(room_id).await?;
let invited = store.get_invited_user_ids(room_id).await?;
let joined = store.get_joined_user_ids(&room_id).await?;
let invited = store.get_invited_user_ids(&room_id).await?;
let user_ids: Vec<&UserId> =
joined.iter().chain(&invited).map(Deref::deref).collect();
o.update_tracked_users(user_ids).await
}
if let Some(user_ids) = user_ids {
if !user_ids.is_empty() {
o.update_tracked_users(user_ids.iter().map(Deref::deref)).await;
}
}
@@ -187,9 +191,9 @@ impl BaseClient {
new_rooms.join.insert(
room_id.clone(),
JoinedRoom::new(
Default::default(), //timeline,
timeline,
v3::State::with_events(room_data.required_state.clone()),
Default::default(), // room_info.account_data,
room_account_data.unwrap_or_default(),
Default::default(), // room_info.ephemeral,
notification_count,
),
@@ -199,9 +203,13 @@ impl BaseClient {
}
}
// FIXME not yet supported by sliding sync. see
// https://github.com/matrix-org/matrix-rust-sdk/issues/1014
// self.handle_account_data(&account_data.events, &mut changes).await;
// TODO remove this, we're processing account data events here again
// because we want to have the push rules in place before we process
// rooms and their events, but we want to create the rooms before we
// process the `m.direct` account data event.
if let Some(global_data) = account_data.as_ref().map(|a| &a.global) {
self.handle_account_data(global_data, &mut changes).await;
}
// FIXME not yet supported by sliding sync.
// changes.presence = presence
@@ -228,10 +236,10 @@ impl BaseClient {
notifications: changes.notifications,
// FIXME not yet supported by sliding sync.
presence: Default::default(),
account_data: Default::default(),
to_device: Default::default(),
device_lists: Default::default(),
device_one_time_keys_count: Default::default(),
account_data: account_data.map(|a| a.global).unwrap_or_default(),
to_device_events,
device_lists,
device_one_time_keys_count,
})
}
}

View File

@@ -23,9 +23,11 @@
use std::{
borrow::Borrow,
collections::{BTreeMap, BTreeSet},
fmt,
ops::Deref,
pin::Pin,
result::Result as StdResult,
str::Utf8Error,
sync::Arc,
};
@@ -41,6 +43,7 @@ use dashmap::DashMap;
use matrix_sdk_common::{locks::RwLock, AsyncTraitDeps};
#[cfg(feature = "e2e-encryption")]
use matrix_sdk_crypto::store::{CryptoStore, IntoCryptoStore};
pub use matrix_sdk_store_encryption::Error as StoreEncryptionError;
use ruma::{
api::client::push::get_notifications::v3::Notification,
events::{
@@ -98,10 +101,11 @@ pub enum StoreError {
UnencryptedStore,
/// The store failed to encrypt or decrypt some data.
#[error("Error encrypting or decrypting data from the store: {0}")]
Encryption(String),
Encryption(#[from] StoreEncryptionError),
/// The store failed to encode or decode some data.
#[error("Error encoding or decoding data from the store: {0}")]
Codec(String),
Codec(#[from] Utf8Error),
/// The database format has changed in a backwards incompatible way.
#[error(
@@ -492,7 +496,7 @@ where
///
/// This adds additional higher level store functionality on top of a
/// `StateStore` implementation.
#[derive(Debug, Clone)]
#[derive(Clone)]
pub(crate) struct Store {
pub(super) inner: Arc<dyn StateStore>,
session_meta: Arc<OnceCell<SessionMeta>>,
@@ -510,10 +514,8 @@ impl Store {
Self::new(inner)
}
}
impl Store {
/// Create a new store, wrappning the given `StateStore`
/// Create a new store, wrapping the given `StateStore`
pub fn new(inner: Arc<dyn StateStore>) -> Self {
Self {
inner,
@@ -634,6 +636,18 @@ impl Store {
}
}
impl fmt::Debug for Store {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Store")
.field("inner", &self.inner)
.field("session_meta", &self.session_meta)
.field("sync_token", &self.sync_token)
.field("rooms", &self.rooms)
.field("stripped_rooms", &self.stripped_rooms)
.finish_non_exhaustive()
}
}
impl Deref for Store {
type Target = dyn StateStore;

View File

@@ -21,7 +21,7 @@ js = ["instant/wasm-bindgen", "instant/inaccurate", "wasm-bindgen-futures"]
[dependencies]
futures-core = "0.3.21"
instant = "0.1.12"
ruma = { version = "0.7.0", features = ["client-api-c"] }
ruma = { workspace = true }
serde = "1.0.136"
[target.'cfg(target_arch = "wasm32")'.dependencies]

View File

@@ -4,11 +4,8 @@ use ruma::{
api::client::{
push::get_notifications::v3::Notification,
sync::sync_events::{
v3::{
DeviceLists, Ephemeral, GlobalAccountData, InvitedRoom, Presence, RoomAccountData,
State, ToDevice,
},
UnreadNotificationsCount as RumaUnreadNotificationsCount,
v3::{Ephemeral, InvitedRoom, Presence, RoomAccountData, State},
DeviceLists, UnreadNotificationsCount as RumaUnreadNotificationsCount,
},
},
events::{
@@ -16,7 +13,8 @@ use ruma::{
MembershipState, RoomMemberEvent, RoomMemberEventContent, StrippedRoomMemberEvent,
SyncRoomMemberEvent,
},
AnySyncTimelineEvent, AnyTimelineEvent,
AnyGlobalAccountDataEvent, AnyRoomAccountDataEvent, AnySyncTimelineEvent, AnyTimelineEvent,
AnyToDeviceEvent,
},
serde::Raw,
DeviceKeyAlgorithm, EventId, MilliSecondsSinceUnixEpoch, OwnedDeviceId, OwnedEventId,
@@ -79,7 +77,7 @@ pub struct EncryptionInfo {
pub sender: OwnedUserId,
/// The device ID of the device that sent us the event, note this is
/// untrusted data unless `verification_state` is as well trusted.
pub sender_device: OwnedDeviceId,
pub sender_device: Option<OwnedDeviceId>,
/// Information about the algorithm that was used to encrypt the event.
pub algorithm_info: AlgorithmInfo,
/// The verification state of the device that sent us the event, note this
@@ -133,9 +131,9 @@ pub struct SyncResponse {
/// Updates to the presence status of other users.
pub presence: Presence,
/// The global private data created by this user.
pub account_data: GlobalAccountData,
pub account_data: Vec<Raw<AnyGlobalAccountDataEvent>>,
/// Messages sent directly between devices.
pub to_device: ToDevice,
pub to_device_events: Vec<Raw<AnyToDeviceEvent>>,
/// Information on E2E device updates.
///
/// Only present on an incremental sync.
@@ -187,7 +185,7 @@ pub struct JoinedRoom {
/// true).
pub state: State,
/// The private data that this user has attached to this room.
pub account_data: RoomAccountData,
pub account_data: Vec<Raw<AnyRoomAccountDataEvent>>,
/// The ephemeral events in the room that aren't recorded in the timeline or
/// state of the room. e.g. typing.
pub ephemeral: Ephemeral,
@@ -197,7 +195,7 @@ impl JoinedRoom {
pub fn new(
timeline: Timeline,
state: State,
account_data: RoomAccountData,
account_data: Vec<Raw<AnyRoomAccountDataEvent>>,
ephemeral: Ephemeral,
unread_notifications: UnreadNotificationsCount,
) -> Self {

View File

@@ -34,7 +34,9 @@ byteorder = "1.4.3"
ctr = "0.9.1"
dashmap = "5.2.0"
event-listener = "2.5.2"
futures-core = "0.3.24"
futures-util = { version = "0.3.21", default-features = false, features = ["alloc"] }
futures-signals = { version = "0.3.31", default-features = false }
hmac = "0.12.1"
http = { version = "0.2.6", optional = true } # feature = testing only
matrix-sdk-qrcode = { version = "0.4.0", path = "../matrix-sdk-qrcode", optional = true }
@@ -42,19 +44,21 @@ matrix-sdk-common = { version = "0.6.0", path = "../matrix-sdk-common" }
olm-rs = { version = "2.2.0", features = ["serde"], optional = true }
pbkdf2 = { version = "0.11.0", default-features = false }
rand = "0.8.5"
ruma = { version = "0.7.0", features = ["client-api-c", "rand", "canonical-json", "unstable-msc2676", "unstable-msc2677"] }
ruma = { workspace = true, features = ["rand", "canonical-json", "unstable-msc2676", "unstable-msc2677"] }
serde = { version = "1.0.136", features = ["derive", "rc"] }
serde_json = "1.0.79"
sha2 = "0.10.2"
thiserror = "1.0.30"
tracing = "0.1.34"
vodozemac = "0.3.0"
zeroize = { version = "1.3.0", features = ["zeroize_derive"] }
tracing = { workspace = true, features = ["attributes"] }
vodozemac = { workspace = true }
zeroize = { workspace = true, features = ["zeroize_derive"] }
cfg-if = "1.0"
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
tokio = { version = "1.18", default-features = false, features = ["time"] }
[dev-dependencies]
anyhow = "1.0.65"
futures = { version = "0.3.21", default-features = false, features = ["executor"] }
http = "0.2.6"
indoc = "1.0.4"

View File

@@ -30,14 +30,13 @@ async fn main() -> Result<(), OlmError> {
let alice = user_id!("@alice:example.org");
let machine = OlmMachine::new(&alice, device_id!("DEVICEID")).await;
let to_device_events = ToDevice::default();
let changed_devices = DeviceLists::default();
let one_time_key_counts = BTreeMap::default();
let unused_fallback_keys = Some(Vec::new());
// Push changes that the server sent to us in a sync response.
let decrypted_to_device = machine.receive_sync_changes(
to_device_events,
vec![],
&changed_devices,
&one_time_key_counts,
unused_fallback_keys.as_deref(),

View File

@@ -41,18 +41,13 @@ use crate::{
requests::{OutgoingRequest, ToDeviceRequest},
session_manager::GroupSessionCache,
store::{Changes, CryptoStoreError, SecretImportError, Store},
types::{
events::{
forwarded_room_key::{ForwardedMegolmV1AesSha2Content, ForwardedRoomKeyContent},
olm_v1::{DecryptedForwardedRoomKeyEvent, DecryptedSecretSendEvent},
room::encrypted::EncryptedEvent,
room_key_request::{
Action, MegolmV1AesSha2Content, RequestedKeyInfo, RoomKeyRequestEvent,
},
secret_send::SecretSendContent,
EventType,
},
EventEncryptionAlgorithm,
types::events::{
forwarded_room_key::ForwardedRoomKeyContent,
olm_v1::{DecryptedForwardedRoomKeyEvent, DecryptedSecretSendEvent},
room::encrypted::EncryptedEvent,
room_key_request::{Action, RequestedKeyInfo, RoomKeyRequestEvent},
secret_send::SecretSendContent,
EventType,
},
Device, MegolmError,
};
@@ -310,27 +305,13 @@ impl GossipMachine {
})
}
async fn handle_megolm_v1_request(
/// Answer a room key request after we found the matching
/// `InboundGroupSession`.
async fn answer_room_key_request(
&self,
event: &RoomKeyRequestEvent,
key_info: &MegolmV1AesSha2Content,
session: InboundGroupSession,
) -> OlmResult<Option<Session>> {
let session =
self.store.get_inbound_group_session(&key_info.room_id, &key_info.session_id).await?;
let session = if let Some(s) = session {
s
} else {
debug!(
user_id = event.sender.as_str(),
device_id = event.content.requesting_device_id.as_str(),
session_id = key_info.session_id.as_str(),
room_id = key_info.room_id.as_str(),
"Received a room key request for an unknown inbound group session",
);
return Ok(None);
};
let device =
self.store.get_device(&event.sender, &event.content.requesting_device_id).await?;
@@ -342,7 +323,7 @@ impl GossipMachine {
user_id = device.user_id().as_str(),
device_id = device.device_id().as_str(),
"Received a key request from a device that changed \
their curve25519 sender key"
their Curve25519 sender key"
);
} else {
debug!(
@@ -357,21 +338,21 @@ impl GossipMachine {
}
Ok(message_index) => {
info!(
user_id = device.user_id().as_str(),
device_id = device.device_id().as_str(),
session_id = key_info.session_id.as_str(),
room_id = key_info.room_id.as_str(),
user_id = %device.user_id(),
device_id = %device.device_id(),
session_id = session.session_id(),
room_id = %session.room_id,
?message_index,
"Serving a room key request",
);
match self.share_session(&session, &device, message_index).await {
match self.forward_room_key(&session, &device, message_index).await {
Ok(s) => Ok(Some(s)),
Err(OlmError::MissingSession) => {
info!(
user_id = device.user_id().as_str(),
device_id = device.device_id().as_str(),
session_id = key_info.session_id.as_str(),
user_id = %device.user_id(),
device_id = %device.device_id(),
session_id = session.session_id(),
"Key request is missing an Olm session, \
putting the request in the wait queue",
);
@@ -381,9 +362,9 @@ impl GossipMachine {
}
Err(OlmError::SessionExport(e)) => {
warn!(
user_id = device.user_id().as_str(),
device_id = device.device_id().as_str(),
session_id = key_info.session_id.as_str(),
user_id = %device.user_id(),
device_id = %device.device_id(),
session_id = session.session_id(),
"Can't serve a room key request, the session \
can't be exported into a forwarded room key: \
{:?}",
@@ -397,8 +378,8 @@ impl GossipMachine {
}
} else {
warn!(
user_id = event.sender.as_str(),
device_id = event.content.requesting_device_id.as_str(),
user_id = %event.sender,
device_id = %event.content.requesting_device_id,
"Received a key request from an unknown device",
);
self.store.update_tracked_user(&event.sender, true).await?;
@@ -407,18 +388,40 @@ impl GossipMachine {
}
}
async fn handle_supported_key_request(
&self,
event: &RoomKeyRequestEvent,
room_id: &RoomId,
session_id: &str,
) -> OlmResult<Option<Session>> {
let session = self.store.get_inbound_group_session(room_id, session_id).await?;
if let Some(s) = session {
self.answer_room_key_request(event, s).await
} else {
debug!(
user_id = %event.sender,
device_id = %event.content.requesting_device_id,
session_id,
%room_id,
"Received a room key request for an unknown inbound group session",
);
Ok(None)
}
}
/// Handle a single incoming key request.
async fn handle_key_request(&self, event: &RoomKeyRequestEvent) -> OlmResult<Option<Session>> {
match &event.content.action {
Action::Request(info) => match info {
RequestedKeyInfo::MegolmV1AesSha2(i) => {
self.handle_megolm_v1_request(event, i).await
self.handle_supported_key_request(event, &i.room_id, &i.session_id).await
}
// V2 room key requests don't have a sender_key field, we
// currently can't fetch an inbound group session without a
// sender key, so ignore the request.
#[cfg(feature = "experimental-algorithms")]
RequestedKeyInfo::MegolmV2AesSha2(_) => Ok(None),
RequestedKeyInfo::MegolmV2AesSha2(i) => {
self.handle_supported_key_request(event, &i.room_id, &i.session_id).await
}
RequestedKeyInfo::Unknown(i) => {
debug!(
sender = %event.sender,
@@ -458,7 +461,7 @@ impl GossipMachine {
Ok(used_session)
}
async fn share_session(
async fn forward_room_key(
&self,
session: &InboundGroupSession,
device: &Device,
@@ -689,18 +692,6 @@ impl GossipMachine {
Ok(())
}
/// Get an outgoing key info that matches the forwarded room key content.
async fn get_key_info(
&self,
event: &DecryptedForwardedRoomKeyEvent,
) -> Result<Option<GossipRequest>, CryptoStoreError> {
if let Some(info) = event.room_key_info().map(|i| i.into()) {
self.store.get_secret_request_by_info(&info).await
} else {
Ok(None)
}
}
/// Delete the given outgoing key info.
async fn delete_key_info(&self, info: &GossipRequest) -> Result<(), CryptoStoreError> {
self.store.delete_outgoing_secret_requests(&info.request_id).await
@@ -868,18 +859,16 @@ impl GossipMachine {
async fn accept_forwarded_room_key(
&self,
info: &GossipRequest,
sender: &UserId,
sender_key: Curve25519PublicKey,
algorithm: EventEncryptionAlgorithm,
content: &ForwardedMegolmV1AesSha2Content,
event: &DecryptedForwardedRoomKeyEvent,
) -> Result<Option<InboundGroupSession>, CryptoStoreError> {
match InboundGroupSession::from_forwarded_key(&algorithm, content) {
match InboundGroupSession::try_from(event) {
Ok(session) => {
if self.store.compare_group_session(&session).await? == SessionOrdering::Better {
self.mark_as_done(info).await?;
info!(
%sender,
sender = %event.sender,
%sender_key,
claimed_sender_key = %session.sender_key(),
room_id = session.room_id().as_str(),
@@ -891,7 +880,7 @@ impl GossipMachine {
Ok(Some(session))
} else {
info!(
%sender,
sender = %event.sender,
%sender_key,
claimed_sender_key = %session.sender_key(),
room_id = %session.room_id,
@@ -905,11 +894,8 @@ impl GossipMachine {
}
Err(e) => {
warn!(
%sender,
sender_key = sender_key.to_base64(),
claimed_sender_key = content.claimed_sender_key.to_base64(),
room_id = content.room_id.as_str(),
%algorithm,
sender = %event.sender,
%sender_key,
"Couldn't create a group session from a received room key"
);
Err(e.into())
@@ -938,36 +924,44 @@ impl GossipMachine {
&self,
sender_key: Curve25519PublicKey,
event: &DecryptedForwardedRoomKeyEvent,
content: &ForwardedMegolmV1AesSha2Content,
) -> Result<Option<InboundGroupSession>, CryptoStoreError> {
let algorithm = event.content.algorithm();
if let Some(info) = event.room_key_info() {
if let Some(request) =
self.store.get_secret_request_by_info(&info.clone().into()).await?
{
if self.should_accept_forward(&request, sender_key).await? {
self.accept_forwarded_room_key(&request, sender_key, event).await
} else {
warn!(
sender = %event.sender,
%sender_key,
room_id = %info.room_id(),
session_id = info.session_id(),
"Received a forwarded room key from an unknown device, or \
from a device that the key request recipient doesn't own",
);
if let Some(info) = self.get_key_info(event).await? {
if self.should_accept_forward(&info, sender_key).await? {
self.accept_forwarded_room_key(&info, &event.sender, sender_key, algorithm, content)
.await
Ok(None)
}
} else {
warn!(
sender = %event.sender,
%sender_key,
room_id = %content.room_id,
session_id = content.session_id.as_str(),
claimed_sender_key = %content.claimed_sender_key,
"Received a forwarded room key from an unknown device, or \
from a device that the key request recipient doesn't own",
sender_key = %sender_key,
room_id = %info.room_id(),
session_id = info.session_id(),
sender_key = %sender_key,
algorithm = %info.algorithm(),
"Received a forwarded room key that we didn't request",
);
Ok(None)
}
} else {
warn!(
sender = %event.sender,
sender_key = %sender_key,
room_id = %content.room_id,
session_id = content.session_id.as_str(),
claimed_sender_key = %content.claimed_sender_key,
algorithm = %algorithm,
"Received a forwarded room key that we didn't request",
sender = event.sender.as_str(),
sender_key = sender_key.to_base64(),
algorithm = %event.content.algorithm(),
"Received a forwarded room key with an unsupported algorithm",
);
Ok(None)
@@ -980,13 +974,13 @@ impl GossipMachine {
sender_key: Curve25519PublicKey,
event: &DecryptedForwardedRoomKeyEvent,
) -> Result<Option<InboundGroupSession>, CryptoStoreError> {
match &event.content {
ForwardedRoomKeyContent::MegolmV1AesSha2(content) => {
self.receive_supported_keys(sender_key, event, content).await
match event.content {
ForwardedRoomKeyContent::MegolmV1AesSha2(_) => {
self.receive_supported_keys(sender_key, event).await
}
#[cfg(feature = "experimental-algorithms")]
ForwardedRoomKeyContent::MegolmV2AesSha2(content) => {
self.receive_supported_keys(sender_key, event, content).await
ForwardedRoomKeyContent::MegolmV2AesSha2(_) => {
self.receive_supported_keys(sender_key, event).await
}
ForwardedRoomKeyContent::Unknown(_) => {
warn!(
@@ -1029,14 +1023,19 @@ mod tests {
olm::{Account, OutboundGroupSession, PrivateCrossSigningIdentity, ReadOnlyAccount},
session_manager::GroupSessionCache,
store::{Changes, CryptoStore, MemoryStore, Store},
types::events::{
forwarded_room_key::ForwardedRoomKeyContent,
olm_v1::{AnyDecryptedOlmEvent, DecryptedOlmV1Event},
room::encrypted::{EncryptedEvent, EncryptedToDeviceEvent, RoomEncryptedEventContent},
EventType, ToDeviceEvent,
types::{
events::{
forwarded_room_key::ForwardedRoomKeyContent,
olm_v1::{AnyDecryptedOlmEvent, DecryptedOlmV1Event},
room::encrypted::{
EncryptedEvent, EncryptedToDeviceEvent, RoomEncryptedEventContent,
},
EventType, ToDeviceEvent,
},
EventEncryptionAlgorithm,
},
verification::VerificationMachine,
OutgoingRequest, OutgoingRequests,
EncryptionSettings, OutgoingRequest, OutgoingRequests,
};
fn alice_id() -> &'static UserId {
@@ -1122,6 +1121,7 @@ mod tests {
async fn machines_for_key_share(
other_machine_owner: &UserId,
create_sessions: bool,
algorithm: EventEncryptionAlgorithm,
) -> (GossipMachine, Account, OutboundGroupSession, GossipMachine) {
let alice_machine = get_machine().await;
let alice_account = Account {
@@ -1151,8 +1151,13 @@ mod tests {
bob_machine.store.save_sessions(&[bob_session]).await.unwrap();
}
let (group_session, inbound_group_session) =
bob_machine.store.account().create_group_session_pair_with_defaults(room_id()).await;
let settings = EncryptionSettings { algorithm, ..Default::default() };
let (group_session, inbound_group_session) = bob_machine
.store
.account()
.create_group_session_pair(room_id(), settings)
.await
.unwrap();
let content = group_session.encrypt(json!({}), "m.dummy").await;
let event = wrap_encrypted_content(bob_machine.user_id(), content);
@@ -1503,10 +1508,9 @@ mod tests {
assert_matches!(machine.should_share_key(&own_device, &other_inbound).await, Ok(None));
}
#[async_test]
async fn key_share_cycle() {
async fn key_share_cycle(algorithm: EventEncryptionAlgorithm) {
let (alice_machine, alice_account, group_session, bob_machine) =
machines_for_key_share(alice_id(), true).await;
machines_for_key_share(alice_id(), true, algorithm).await;
// Get the request and convert it into a event.
let requests = alice_machine.outgoing_to_device_requests().await.unwrap();
@@ -1565,7 +1569,7 @@ mod tests {
#[async_test]
async fn reject_forward_from_another_user() {
let (alice_machine, alice_account, group_session, bob_machine) =
machines_for_key_share(bob_id(), true).await;
machines_for_key_share(bob_id(), true, EventEncryptionAlgorithm::MegolmV1AesSha2).await;
// Get the request and convert it into a event.
let requests = alice_machine.outgoing_to_device_requests().await.unwrap();
@@ -1611,6 +1615,17 @@ mod tests {
}
}
#[async_test]
async fn key_share_cycle_megolm_v1() {
key_share_cycle(EventEncryptionAlgorithm::MegolmV1AesSha2).await;
}
#[cfg(feature = "experimental-algorithms")]
#[async_test]
async fn key_share_cycle_megolm_v2() {
key_share_cycle(EventEncryptionAlgorithm::MegolmV2AesSha2).await;
}
#[async_test]
async fn secret_share_cycle() {
let alice_machine = get_machine().await;
@@ -1684,7 +1699,8 @@ mod tests {
#[async_test]
async fn key_share_cycle_without_session() {
let (alice_machine, alice_account, group_session, bob_machine) =
machines_for_key_share(alice_id(), false).await;
machines_for_key_share(alice_id(), false, EventEncryptionAlgorithm::MegolmV1AesSha2)
.await;
// Get the request and convert it into a event.
let requests = alice_machine.outgoing_to_device_requests().await.unwrap();

View File

@@ -20,13 +20,14 @@ use dashmap::{DashMap, DashSet};
pub(crate) use machine::GossipMachine;
use ruma::{
events::{
room_key_request::{Action, RequestedKeyInfo, ToDeviceRoomKeyRequestEventContent},
room_key_request::{Action, ToDeviceRoomKeyRequestEventContent},
secret::request::{
RequestAction, SecretName, ToDeviceSecretRequestEvent as SecretRequestEvent,
ToDeviceSecretRequestEventContent as SecretRequestEventContent,
},
AnyToDeviceEventContent,
AnyToDeviceEventContent, ToDeviceEventType,
},
serde::Raw,
to_device::DeviceIdOrAllDevices,
DeviceId, OwnedDeviceId, OwnedTransactionId, OwnedUserId, TransactionId, UserId,
};
@@ -36,7 +37,9 @@ use tracing::error;
use crate::{
requests::{OutgoingRequest, ToDeviceRequest},
types::events::room_key_request::{RoomKeyRequestEvent, SupportedKeyInfo},
types::events::room_key_request::{
RoomKeyRequestContent, RoomKeyRequestEvent, SupportedKeyInfo,
},
Device,
};
@@ -84,16 +87,16 @@ pub enum SecretInfo {
impl SecretInfo {
/// Serialize `SecretInfo` into `String` for usage as database keys and
/// comparison
/// comparison.
pub fn as_key(&self) -> String {
match &self {
SecretInfo::KeyRequest(ref info) => format!(
SecretInfo::KeyRequest(info) => format!(
"keyRequest:{:}:{:}:{:}",
info.room_id().as_str(),
info.session_id(),
&info.algorithm(),
),
SecretInfo::SecretRequest(ref sname) => format!("secretName:{:}", sname),
SecretInfo::SecretRequest(sname) => format!("secretName:{sname}"),
}
}
}
@@ -132,45 +135,42 @@ impl GossipRequest {
}
fn to_request(&self, own_device_id: &DeviceId) -> OutgoingRequest {
let content = match &self.info {
let request = match &self.info {
SecretInfo::KeyRequest(r) => {
let info = match r {
SupportedKeyInfo::MegolmV1AesSha2(c) => RequestedKeyInfo::new(
ruma::EventEncryptionAlgorithm::MegolmV1AesSha2,
c.room_id.to_owned(),
c.sender_key.to_base64(),
c.session_id.to_owned(),
),
#[cfg(feature = "experimental-algorithms")]
#[allow(clippy::todo)]
SupportedKeyInfo::MegolmV2AesSha2(_) => {
todo!("Requesting megolm.v2 room keys is not supported yet")
}
};
AnyToDeviceEventContent::RoomKeyRequest(ToDeviceRoomKeyRequestEventContent::new(
Action::Request,
Some(info),
let content = RoomKeyRequestContent::new_request(
r.clone().into(),
own_device_id.to_owned(),
self.request_id.to_owned(),
);
let content = Raw::new(&content)
.expect("We can always serialize a room key request info")
.cast();
ToDeviceRequest::with_id_raw(
&self.request_recipient,
DeviceIdOrAllDevices::AllDevices,
content,
ToDeviceEventType::RoomKeyRequest,
self.request_id.clone(),
))
)
}
SecretInfo::SecretRequest(s) => {
AnyToDeviceEventContent::SecretRequest(SecretRequestEventContent::new(
RequestAction::Request(s.clone()),
own_device_id.to_owned(),
let content =
AnyToDeviceEventContent::SecretRequest(SecretRequestEventContent::new(
RequestAction::Request(s.clone()),
own_device_id.to_owned(),
self.request_id.clone(),
));
ToDeviceRequest::with_id(
&self.request_recipient,
DeviceIdOrAllDevices::AllDevices,
content,
self.request_id.clone(),
))
)
}
};
let request = ToDeviceRequest::with_id(
&self.request_recipient,
DeviceIdOrAllDevices::AllDevices,
content,
self.request_id.clone(),
);
OutgoingRequest { request_id: request.txn_id.clone(), request: Arc::new(request.into()) }
}

View File

@@ -806,6 +806,23 @@ impl ReadOnlyOwnUserIdentity {
})
}
#[cfg(test)]
pub(crate) async fn from_private(identity: &crate::olm::PrivateCrossSigningIdentity) -> Self {
let master_key = identity.master_key.lock().await.as_ref().unwrap().public_key.clone();
let self_signing_key =
identity.self_signing_key.lock().await.as_ref().unwrap().public_key.clone();
let user_signing_key =
identity.user_signing_key.lock().await.as_ref().unwrap().public_key.clone();
Self {
user_id: identity.user_id().into(),
master_key,
self_signing_key,
user_signing_key,
verified: Arc::new(AtomicBool::new(false)),
}
}
/// Get the user id of this identity.
pub fn user_id(&self) -> &UserId {
&self.user_id

View File

@@ -88,7 +88,8 @@ pub use requests::{
};
pub use store::{CrossSigningKeyExport, CryptoStoreError, SecretImportError, SecretInfo};
pub use verification::{
format_emojis, AcceptSettings, CancelInfo, Emoji, Sas, Verification, VerificationRequest,
format_emojis, AcceptSettings, AcceptedProtocols, CancelInfo, Emoji, EmojiShortAuthString, Sas,
SasState, Verification, VerificationRequest,
};
#[cfg(feature = "qrcode")]
pub use verification::{QrVerification, ScanError};

View File

@@ -31,15 +31,16 @@ use ruma::{
upload_keys,
upload_signatures::v3::Request as UploadSignaturesRequest,
},
sync::sync_events::v3::{DeviceLists, ToDevice},
sync::sync_events::DeviceLists,
},
assign,
events::{
secret::request::SecretName, AnyMessageLikeEvent, AnyTimelineEvent, MessageLikeEventContent,
secret::request::SecretName, AnyMessageLikeEvent, AnyTimelineEvent, AnyToDeviceEvent,
MessageLikeEventContent,
},
serde::Raw,
DeviceId, DeviceKeyAlgorithm, OwnedDeviceKeyId, OwnedTransactionId, OwnedUserId, RoomId,
TransactionId, UInt, UserId,
DeviceId, DeviceKeyAlgorithm, OwnedDeviceId, OwnedDeviceKeyId, OwnedTransactionId, OwnedUserId,
RoomId, TransactionId, UInt, UserId,
};
use serde_json::{value::to_raw_value, Value};
use tracing::{debug, error, info, trace, warn};
@@ -891,11 +892,11 @@ impl OlmMachine {
/// [`decrypt_room_event`]: #method.decrypt_room_event
pub async fn receive_sync_changes(
&self,
to_device_events: ToDevice,
to_device_events: Vec<Raw<AnyToDeviceEvent>>,
changed_devices: &DeviceLists,
one_time_keys_counts: &BTreeMap<DeviceKeyAlgorithm, UInt>,
unused_fallback_keys: Option<&[DeviceKeyAlgorithm]>,
) -> OlmResult<ToDevice> {
) -> OlmResult<Vec<Raw<AnyToDeviceEvent>>> {
// Remove verification objects that have expired or are done.
let mut events = self.verification_machine.garbage_collect();
@@ -912,7 +913,7 @@ impl OlmMachine {
}
}
for mut raw_event in to_device_events.events {
for mut raw_event in to_device_events {
let event: ToDeviceEvents = match raw_event.deserialize_as() {
Ok(e) => e,
Err(e) => {
@@ -1002,10 +1003,7 @@ impl OlmMachine {
self.store.save_changes(changes).await?;
let mut to_device = ToDevice::new();
to_device.events = events;
Ok(to_device)
Ok(events)
}
/// Request a room key from our devices.
@@ -1037,16 +1035,16 @@ impl OlmMachine {
&self,
session: &InboundGroupSession,
sender: &UserId,
device_id: &DeviceId,
) -> StoreResult<VerificationState> {
) -> StoreResult<(VerificationState, Option<OwnedDeviceId>)> {
Ok(
// First find the device corresponding to the Curve25519 identity
// key that sent us the session (recorded upon successful
// decryption of the `m.room_key` to-device message).
if let Some(device) = self
.get_device(sender, device_id, None)
.get_user_devices(sender, None)
.await?
.filter(|d| d.curve25519_key().map(|k| k == session.sender_key()).unwrap_or(false))
.devices()
.find(|d| d.curve25519_key() == Some(session.sender_key()))
{
// If the `Device` is confirmed to be the owner of the
// `InboundGroupSession` we will consider the session (i.e.
@@ -1058,14 +1056,14 @@ impl OlmMachine {
if device.is_owner_of_session(session)
&& (device.is_our_own_device() || device.is_verified())
{
VerificationState::Trusted
(VerificationState::Trusted, Some(device.device_id().to_owned()))
} else {
VerificationState::Untrusted
(VerificationState::Untrusted, Some(device.device_id().to_owned()))
}
} else {
// We didn't find a device, no way to know if we should trust
// the `InboundGroupSession` or not.
VerificationState::UnknownDevice
(VerificationState::UnknownDevice, None)
},
)
}
@@ -1079,12 +1077,10 @@ impl OlmMachine {
&self,
session: &InboundGroupSession,
sender: &UserId,
device_id: &DeviceId,
) -> StoreResult<EncryptionInfo> {
let verification_state = self.get_verification_state(session, sender, device_id).await?;
let (verification_state, device_id) = self.get_verification_state(session, sender).await?;
let sender = sender.to_owned();
let device_id = device_id.to_owned();
Ok(EncryptionInfo {
sender,
@@ -1143,8 +1139,7 @@ impl OlmMachine {
}
}
let encryption_info =
self.get_encryption_info(&session, &event.sender, content.device_id()).await?;
let encryption_info = self.get_encryption_info(&session, &event.sender).await?;
Ok(TimelineEvent { encryption_info: Some(encryption_info), event: decrypted_event })
} else {
@@ -1589,7 +1584,7 @@ pub(crate) mod tests {
api::{
client::{
keys::{claim_keys, get_keys, upload_keys},
sync::sync_events::v3::{DeviceLists, ToDevice},
sync::sync_events::v3::DeviceLists,
},
IncomingResponse,
},
@@ -1984,7 +1979,7 @@ pub(crate) mod tests {
if let AnyToDeviceEvent::Dummy(e) = event {
assert_eq!(&e.sender, alice.user_id());
} else {
panic!("Wrong event type found {:?}", event);
panic!("Wrong event type found {event:?}");
}
}
@@ -2008,21 +2003,18 @@ pub(crate) mod tests {
let alice_session =
alice.group_session_manager.get_outbound_group_session(room_id).unwrap();
let mut to_device = ToDevice::new();
to_device.events.push(event);
let decrypted = bob
.receive_sync_changes(to_device, &Default::default(), &Default::default(), None)
.receive_sync_changes(vec![event], &Default::default(), &Default::default(), None)
.await
.unwrap();
let event = decrypted.events[0].deserialize().unwrap();
let event = decrypted[0].deserialize().unwrap();
if let AnyToDeviceEvent::RoomKey(event) = event {
assert_eq!(&event.sender, alice.user_id());
assert!(event.content.session_key.is_empty());
} else {
panic!("expected RoomKeyEvent found {:?}", event);
panic!("expected RoomKeyEvent found {event:?}");
}
let session =
@@ -2301,7 +2293,7 @@ pub(crate) mod tests {
// Bob verifies that the MAC is valid and also sends a "done" message.
let msgs = bob.verification_machine.outgoing_messages();
eprintln!("{:?}", msgs);
eprintln!("{msgs:?}");
assert!(msgs.len() == 1);
let event = msgs.first().map(|r| outgoing_request_to_event(bob.user_id(), r)).unwrap();
@@ -2322,7 +2314,7 @@ pub(crate) mod tests {
assert!(!alice_sas.is_done());
assert!(!bob_device.is_verified());
// Alices receives the done message
eprintln!("{:?}", event);
eprintln!("{event:?}");
alice.handle_verification_event(&event).await;
assert!(alice_sas.is_done());
@@ -2345,13 +2337,13 @@ pub(crate) mod tests {
other: Default::default(),
};
let event = json_convert(&event).unwrap();
let mut to_device = ToDevice::new();
to_device.events.push(event);
let changed_devices = DeviceLists::new();
let key_counts = Default::default();
let _ =
bob.receive_sync_changes(to_device, &changed_devices, &key_counts, None).await.unwrap();
let _ = bob
.receive_sync_changes(vec![event], &changed_devices, &key_counts, None)
.await
.unwrap();
let group_session = GroupSession::new(SessionConfig::version_1());
let session_key = group_session.session_key();
@@ -2381,10 +2373,8 @@ pub(crate) mod tests {
);
let event: Raw<AnyToDeviceEvent> = json_convert(&event).unwrap();
let mut to_device = ToDevice::new();
to_device.events.push(event.clone());
bob.receive_sync_changes(to_device, &changed_devices, &key_counts, None).await.unwrap();
bob.receive_sync_changes(vec![event], &changed_devices, &key_counts, None).await.unwrap();
let session = bob.store.get_inbound_group_session(room_id, &session_id).await;

View File

@@ -138,7 +138,7 @@ impl OlmMessageHash {
let sha = Sha256::new()
.chain_update(sender_key.as_bytes())
.chain_update([message_type as u8])
.chain_update(&ciphertext)
.chain_update(ciphertext)
.finalize();
Self { sender_key, hash: encode(sha.as_slice()) }

View File

@@ -14,6 +14,7 @@
use std::{
fmt,
ops::Deref,
sync::{
atomic::{AtomicBool, Ordering::SeqCst},
Arc,
@@ -44,7 +45,11 @@ use crate::{
types::{
deserialize_curve_key,
events::{
forwarded_room_key::ForwardedMegolmV1AesSha2Content,
forwarded_room_key::{
ForwardedMegolmV1AesSha2Content, ForwardedMegolmV2AesSha2Content,
ForwardedRoomKeyContent,
},
olm_v1::DecryptedForwardedRoomKeyEvent,
room::encrypted::{EncryptedEvent, RoomEventEncryptionScheme},
},
serialize_curve_key, EventEncryptionAlgorithm, SigningKeys,
@@ -157,42 +162,6 @@ impl InboundGroupSession {
})
}
/// Create a new inbound group session from a forwarded room key content.
///
/// # Arguments
///
/// * `sender_key` - The public curve25519 key of the account that
/// sent us the session
///
/// * `content` - A forwarded room key content that contains the session key
/// to create the `InboundGroupSession`.
pub fn from_forwarded_key(
algorithm: &EventEncryptionAlgorithm,
content: &ForwardedMegolmV1AesSha2Content,
) -> Result<Self, SessionCreationError> {
let config = OutboundGroupSession::session_config(algorithm)?;
let session = InnerSession::import(&content.session_key, config);
let first_known_index = session.first_known_index();
let mut sender_claimed_key = SigningKeys::new();
sender_claimed_key.insert(DeviceKeyAlgorithm::Ed25519, content.claimed_ed25519_key.into());
Ok(InboundGroupSession {
inner: Mutex::new(session).into(),
session_id: content.session_id.as_str().into(),
sender_key: content.claimed_sender_key,
first_known_index,
history_visibility: None.into(),
signing_keys: sender_claimed_key.into(),
room_id: (*content.room_id).into(),
imported: true,
backed_up: AtomicBool::new(false).into(),
algorithm: algorithm.to_owned().into(),
})
}
/// Store the group session as a base64 encoded string.
///
/// # Arguments
@@ -498,6 +467,67 @@ impl TryFrom<&ExportedRoomKey> for InboundGroupSession {
}
}
impl From<&ForwardedMegolmV1AesSha2Content> for InboundGroupSession {
fn from(value: &ForwardedMegolmV1AesSha2Content) -> Self {
let session = InnerSession::import(&value.session_key, SessionConfig::version_1());
let session_id = session.session_id().into();
let first_known_index = session.first_known_index();
InboundGroupSession {
inner: Mutex::new(session).into(),
session_id,
sender_key: value.claimed_sender_key,
history_visibility: None.into(),
first_known_index,
signing_keys: SigningKeys::from([(
DeviceKeyAlgorithm::Ed25519,
value.claimed_ed25519_key.into(),
)])
.into(),
room_id: value.room_id.to_owned().into(),
imported: true,
algorithm: EventEncryptionAlgorithm::MegolmV1AesSha2.into(),
backed_up: AtomicBool::from(false).into(),
}
}
}
impl From<&ForwardedMegolmV2AesSha2Content> for InboundGroupSession {
fn from(value: &ForwardedMegolmV2AesSha2Content) -> Self {
let session = InnerSession::import(&value.session_key, SessionConfig::version_2());
let session_id = session.session_id().into();
let first_known_index = session.first_known_index();
InboundGroupSession {
inner: Mutex::new(session).into(),
session_id,
sender_key: value.claimed_sender_key,
history_visibility: None.into(),
first_known_index,
signing_keys: value.claimed_signing_keys.to_owned().into(),
room_id: value.room_id.to_owned().into(),
imported: true,
algorithm: EventEncryptionAlgorithm::MegolmV1AesSha2.into(),
backed_up: AtomicBool::from(false).into(),
}
}
}
impl TryFrom<&DecryptedForwardedRoomKeyEvent> for InboundGroupSession {
type Error = SessionCreationError;
fn try_from(value: &DecryptedForwardedRoomKeyEvent) -> Result<Self, Self::Error> {
match &value.content {
ForwardedRoomKeyContent::MegolmV1AesSha2(c) => Ok(Self::from(c.deref())),
#[cfg(feature = "experimental-algorithms")]
ForwardedRoomKeyContent::MegolmV2AesSha2(c) => Ok(Self::from(c.deref())),
ForwardedRoomKeyContent::Unknown(c) => {
Err(SessionCreationError::Algorithm(c.algorithm.to_owned()))
}
}
}
}
#[cfg(test)]
mod test {
use matrix_sdk_test::async_test;

View File

@@ -27,6 +27,8 @@ use thiserror::Error;
pub use vodozemac::megolm::{ExportedSessionKey, SessionKey};
use vodozemac::{megolm::SessionKeyDecodeError, Curve25519PublicKey};
#[cfg(feature = "experimental-algorithms")]
use crate::types::events::forwarded_room_key::ForwardedMegolmV2AesSha2Content;
use crate::types::{
deserialize_curve_key, deserialize_curve_key_vec,
events::forwarded_room_key::{ForwardedMegolmV1AesSha2Content, ForwardedRoomKeyContent},
@@ -128,35 +130,50 @@ impl TryFrom<ExportedRoomKey> for ForwardedRoomKeyContent {
/// This will fail if the exported room key doesn't contain an Ed25519
/// claimed sender key.
fn try_from(room_key: ExportedRoomKey) -> Result<ForwardedRoomKeyContent, Self::Error> {
// The forwarded room key content only supports a single claimed sender
// key and it requires it to be a Ed25519 key. This here will be lossy
// conversion since we're dropping all other key types.
//
// This isn't yet a problem since no other key types exist, but still
// something that will need to be addressed sooner or later.
if let Some(SigningKey::Ed25519(claimed_ed25519_key)) =
room_key.sender_claimed_keys.get(&DeviceKeyAlgorithm::Ed25519)
{
if room_key.algorithm == EventEncryptionAlgorithm::MegolmV1AesSha2 {
Ok(ForwardedRoomKeyContent::MegolmV1AesSha2(
ForwardedMegolmV1AesSha2Content {
match room_key.algorithm {
EventEncryptionAlgorithm::MegolmV1AesSha2 => {
// The forwarded room key content only supports a single claimed sender
// key and it requires it to be a Ed25519 key. This here will be lossy
// conversion since we're dropping all other key types.
//
// This was fixed by the megolm v2 content. Hopefully we'll deprecate megolm v1
// before we have multiple signing keys.
if let Some(SigningKey::Ed25519(claimed_ed25519_key)) =
room_key.sender_claimed_keys.get(&DeviceKeyAlgorithm::Ed25519)
{
Ok(ForwardedRoomKeyContent::MegolmV1AesSha2(
ForwardedMegolmV1AesSha2Content {
room_id: room_key.room_id,
session_id: room_key.session_id,
session_key: room_key.session_key,
claimed_sender_key: room_key.sender_key,
claimed_ed25519_key: *claimed_ed25519_key,
forwarding_curve25519_key_chain: room_key
.forwarding_curve25519_key_chain
.clone(),
other: Default::default(),
}
.into(),
))
} else {
Err(SessionExportError::MissingEd25519Key)
}
}
#[cfg(feature = "experimental-algorithms")]
EventEncryptionAlgorithm::MegolmV2AesSha2 => {
Ok(ForwardedRoomKeyContent::MegolmV2AesSha2(
ForwardedMegolmV2AesSha2Content {
room_id: room_key.room_id,
session_id: room_key.session_id,
session_key: room_key.session_key,
claimed_sender_key: room_key.sender_key,
claimed_ed25519_key: *claimed_ed25519_key,
forwarding_curve25519_key_chain: room_key
.forwarding_curve25519_key_chain
.clone(),
claimed_signing_keys: room_key.sender_claimed_keys,
other: Default::default(),
}
.into(),
))
} else {
Err(SessionExportError::MissingEd25519Key)
}
} else {
Err(SessionExportError::Algorithm(room_key.algorithm))
_ => Err(SessionExportError::Algorithm(room_key.algorithm)),
}
}
}
@@ -180,26 +197,32 @@ impl TryFrom<ForwardedRoomKeyContent> for ExportedRoomKey {
fn try_from(forwarded_key: ForwardedRoomKeyContent) -> Result<Self, Self::Error> {
let algorithm = forwarded_key.algorithm();
let handle_key = |content: Box<ForwardedMegolmV1AesSha2Content>| {
let mut sender_claimed_keys = SigningKeys::new();
sender_claimed_keys
.insert(DeviceKeyAlgorithm::Ed25519, content.claimed_ed25519_key.into());
match forwarded_key {
ForwardedRoomKeyContent::MegolmV1AesSha2(content) => {
let mut sender_claimed_keys = SigningKeys::new();
sender_claimed_keys
.insert(DeviceKeyAlgorithm::Ed25519, content.claimed_ed25519_key.into());
Ok(Self {
Ok(Self {
algorithm,
room_id: content.room_id,
session_id: content.session_id,
forwarding_curve25519_key_chain: content.forwarding_curve25519_key_chain,
sender_claimed_keys,
sender_key: content.claimed_sender_key,
session_key: content.session_key,
})
}
#[cfg(feature = "experimental-algorithms")]
ForwardedRoomKeyContent::MegolmV2AesSha2(content) => Ok(Self {
algorithm,
room_id: content.room_id,
session_id: content.session_id,
forwarding_curve25519_key_chain: content.forwarding_curve25519_key_chain,
sender_claimed_keys,
forwarding_curve25519_key_chain: Default::default(),
sender_claimed_keys: content.claimed_signing_keys,
sender_key: content.claimed_sender_key,
session_key: content.session_key,
})
};
match forwarded_key {
ForwardedRoomKeyContent::MegolmV1AesSha2(content) => handle_key(content),
#[cfg(feature = "experimental-algorithms")]
ForwardedRoomKeyContent::MegolmV2AesSha2(content) => handle_key(content),
}),
ForwardedRoomKeyContent::Unknown(c) => Err(SessionExportError::Algorithm(c.algorithm)),
}
}

View File

@@ -325,13 +325,10 @@ impl OutboundGroupSession {
}
.into(),
#[cfg(feature = "experimental-algorithms")]
EventEncryptionAlgorithm::MegolmV2AesSha2 => MegolmV2AesSha2Content {
ciphertext,
session_id: self.session_id().to_owned(),
sender_key: self.account_identity_keys.curve25519,
device_id: (*self.device_id).to_owned(),
EventEncryptionAlgorithm::MegolmV2AesSha2 => {
MegolmV2AesSha2Content { ciphertext, session_id: self.session_id().to_owned() }
.into()
}
.into(),
_ => unreachable!(
"An outbound group session is always using one of the supported algorithms"
),

View File

@@ -116,6 +116,19 @@ impl ToDeviceRequest {
}
}
pub(crate) fn with_id_raw(
recipient: &UserId,
recipient_device: impl Into<DeviceIdOrAllDevices>,
content: Raw<AnyToDeviceEventContent>,
event_type: ToDeviceEventType,
txn_id: OwnedTransactionId,
) -> Self {
let user_messages = iter::once((recipient_device.into(), content)).collect();
let messages = iter::once((recipient.to_owned(), user_messages)).collect();
ToDeviceRequest { event_type, txn_id, messages }
}
pub(crate) fn with_id(
recipient: &UserId,
recipient_device: impl Into<DeviceIdOrAllDevices>,

View File

@@ -325,7 +325,7 @@ impl Store {
}
#[cfg(test)]
/// Testing helper to allo to save only a set of devices
/// Testing helper to allow to save only a set of devices
pub async fn save_devices(&self, devices: &[ReadOnlyDevice]) -> Result<()> {
let changes = Changes {
devices: DeviceChanges { changed: devices.to_vec(), ..Default::default() },

View File

@@ -16,7 +16,7 @@
use std::collections::BTreeMap;
use ruma::OwnedRoomId;
use ruma::{DeviceKeyAlgorithm, OwnedRoomId};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use vodozemac::{megolm::ExportedSessionKey, Curve25519PublicKey, Ed25519PublicKey};
@@ -24,7 +24,7 @@ use vodozemac::{megolm::ExportedSessionKey, Curve25519PublicKey, Ed25519PublicKe
use super::{EventType, ToDeviceEvent};
use crate::types::{
deserialize_curve_key, deserialize_curve_key_vec, deserialize_ed25519_key, serialize_curve_key,
serialize_curve_key_vec, serialize_ed25519_key, EventEncryptionAlgorithm,
serialize_curve_key_vec, serialize_ed25519_key, EventEncryptionAlgorithm, SigningKeys,
};
/// The `m.forwarded_room_key` to-device event.
@@ -53,7 +53,7 @@ pub enum ForwardedRoomKeyContent {
/// The `m.megolm.v2.aes-sha2` variant of the `m.forwarded_room_key`
/// content.
#[cfg(feature = "experimental-algorithms")]
MegolmV2AesSha2(Box<ForwardedMegolmV1AesSha2Content>),
MegolmV2AesSha2(Box<ForwardedMegolmV2AesSha2Content>),
/// An unknown and unsupported variant of the `m.forwarded_room_key`
/// content.
Unknown(UnknownRoomKeyContent),
@@ -79,7 +79,7 @@ impl EventType for ForwardedRoomKeyContent {
const EVENT_TYPE: &'static str = "m.forwarded_room_key";
}
/// The `m.megolm.v1.aes-sha2` variant of the `m.room_key` content.
/// The `m.megolm.v1.aes-sha2` variant of the `m.forwarded_room_key` content.
#[derive(Deserialize, Serialize)]
pub struct ForwardedMegolmV1AesSha2Content {
/// The room where the key is used.
@@ -131,7 +131,42 @@ pub struct ForwardedMegolmV1AesSha2Content {
pub(crate) other: BTreeMap<String, Value>,
}
/// An unknown and unsupported `m.room_key` algorithm.
/// The `m.megolm.v2.aes-sha2` variant of the `m.forwarded_room_key` content.
#[derive(Deserialize, Serialize)]
pub struct ForwardedMegolmV2AesSha2Content {
/// The room where the key is used.
pub room_id: OwnedRoomId,
/// The ID of the session that the key is for.
pub session_id: String,
/// The key to be exchanged. Can be used to create a [`InboundGroupSession`]
/// that can be used to decrypt room events.
///
/// [`InboundGroupSession`]: vodozemac::megolm::InboundGroupSession
pub session_key: ExportedSessionKey,
/// The Curve25519 key of the device which initiated the session originally.
///
/// It is claimed because the receiving device has no way to tell that
/// the original room_key actually came from a device which owns the private
/// part of this key.
#[serde(deserialize_with = "deserialize_curve_key", serialize_with = "serialize_curve_key")]
pub claimed_sender_key: Curve25519PublicKey,
/// The Ed25519 key of the device which initiated the session originally.
///
/// It is claimed because the receiving device has no way to tell that
/// the original room_key actually came from a device which owns the private
/// part of this key.
#[serde(default)]
pub claimed_signing_keys: SigningKeys<DeviceKeyAlgorithm>,
#[serde(flatten)]
pub(crate) other: BTreeMap<String, Value>,
}
/// An unknown and unsupported `m.forwarded_room_key` algorithm.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct UnknownRoomKeyContent {
/// The algorithm of the unknown room key.
@@ -153,6 +188,17 @@ impl std::fmt::Debug for ForwardedMegolmV1AesSha2Content {
}
}
impl std::fmt::Debug for ForwardedMegolmV2AesSha2Content {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ForwardedMegolmV2AesSha2Content")
.field("room_id", &self.room_id)
.field("session_id", &self.session_id)
.field("claimed_sender_key", &self.claimed_sender_key)
.field("sender_claimed_keys", &self.claimed_signing_keys)
.finish_non_exhaustive()
}
}
#[derive(Deserialize, Serialize)]
struct RoomKeyHelper {
algorithm: EventEncryptionAlgorithm,
@@ -171,7 +217,7 @@ impl TryFrom<RoomKeyHelper> for ForwardedRoomKeyContent {
}
#[cfg(feature = "experimental-algorithms")]
EventEncryptionAlgorithm::MegolmV2AesSha2 => {
let content: ForwardedMegolmV1AesSha2Content = serde_json::from_value(value.other)?;
let content: ForwardedMegolmV2AesSha2Content = serde_json::from_value(value.other)?;
Self::MegolmV2AesSha2(content.into())
}
_ => Self::Unknown(UnknownRoomKeyContent {

View File

@@ -16,7 +16,7 @@
use std::collections::BTreeMap;
use ruma::{DeviceId, OwnedDeviceId, RoomId};
use ruma::{OwnedDeviceId, RoomId};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use vodozemac::{megolm::MegolmMessage, olm::OlmMessage, Curve25519PublicKey};
@@ -250,15 +250,6 @@ impl SupportedEventEncryptionSchemes<'_> {
}
}
/// The ID of the sending device.
pub fn device_id(&self) -> &DeviceId {
match self {
SupportedEventEncryptionSchemes::MegolmV1AesSha2(c) => &c.device_id,
#[cfg(feature = "experimental-algorithms")]
SupportedEventEncryptionSchemes::MegolmV2AesSha2(c) => &c.device_id,
}
}
/// The algorithm that was used to encrypt the event content.
pub fn algorithm(&self) -> EventEncryptionAlgorithm {
match self {
@@ -314,13 +305,6 @@ pub struct MegolmV2AesSha2Content {
/// The ID of the session used to encrypt the message.
pub session_id: String,
/// The Curve25519 key of the sender.
#[serde(deserialize_with = "deserialize_curve_key", serialize_with = "serialize_curve_key")]
pub sender_key: Curve25519PublicKey,
/// The ID of the sending device.
pub device_id: OwnedDeviceId,
}
/// An unknown and unsupported `m.room.encrypted` event content.

View File

@@ -51,6 +51,18 @@ pub struct RoomKeyRequestContent {
pub request_id: OwnedTransactionId,
}
impl RoomKeyRequestContent {
/// Create a new content for a `m.room_key_request` event with the action
/// set to request a room key with the given `RequestedKeyInfo`.
pub fn new_request(
info: RequestedKeyInfo,
requesting_device_id: OwnedDeviceId,
request_id: OwnedTransactionId,
) -> Self {
Self { action: Action::Request(info), requesting_device_id, request_id }
}
}
impl EventType for RoomKeyRequestContent {
const EVENT_TYPE: &'static str = "m.room_key_request";
}

View File

@@ -16,7 +16,7 @@ use std::sync::Arc;
use dashmap::DashMap;
use ruma::{DeviceId, OwnedTransactionId, OwnedUserId, TransactionId, UserId};
use tracing::trace;
use tracing::{trace, warn};
use super::{event_enums::OutgoingContent, Sas, Verification};
#[cfg(feature = "qrcode")]
@@ -56,6 +56,14 @@ impl VerificationCache {
let old_verification = old.value();
if !old_verification.is_cancelled() {
warn!(
user_id = verification.other_user().as_str(),
old_flow_id = old_verification.flow_id(),
new_flow_id = verification.flow_id(),
"Received a new verification whilst another one with \
the same user is ongoing. Cancelling both verifications"
);
if let Some(r) = old_verification.cancel() {
self.add_request(r.into())
}
@@ -78,11 +86,7 @@ impl VerificationCache {
pub fn replace_sas(&self, sas: Sas) {
let verification: Verification = sas.into();
self.verification
.entry(verification.other_user().to_owned())
.or_default()
.insert(verification.flow_id().to_owned(), verification.clone());
self.replace(verification);
}
#[cfg(feature = "qrcode")]
@@ -90,6 +94,12 @@ impl VerificationCache {
self.insert(qr)
}
#[cfg(feature = "qrcode")]
pub fn replace_qr(&self, qr: QrVerification) {
let verification: Verification = qr.into();
self.replace(verification);
}
#[cfg(feature = "qrcode")]
pub fn get_qr(&self, sender: &UserId, flow_id: &str) -> Option<QrVerification> {
self.get(sender, flow_id).and_then(|v| {
@@ -101,6 +111,13 @@ impl VerificationCache {
})
}
pub fn replace(&self, verification: Verification) {
self.verification
.entry(verification.other_user().to_owned())
.or_default()
.insert(verification.flow_id().to_owned(), verification.clone());
}
pub fn get(&self, sender: &UserId, flow_id: &str) -> Option<Verification> {
self.verification.get(sender).and_then(|m| m.get(flow_id).map(|v| v.clone()))
}

View File

@@ -47,7 +47,7 @@ use ruma::{
DeviceId, EventId, OwnedDeviceId, OwnedEventId, OwnedRoomId, OwnedTransactionId, RoomId,
UserId,
};
pub use sas::{AcceptSettings, Sas};
pub use sas::{AcceptSettings, AcceptedProtocols, EmojiShortAuthString, Sas, SasState};
use tracing::{error, info, trace, warn};
use crate::{
@@ -100,7 +100,7 @@ pub fn format_emojis(emojis: [Emoji; 7]) -> String {
// Hack to make terminals behave properly when one of the above is printed.
let emoji = if VARIATION_SELECTOR_EMOJIS.contains(&emoji) {
format!("{} ", emoji)
format!("{emoji} ")
} else {
emoji.to_owned()
};
@@ -109,13 +109,12 @@ pub fn format_emojis(emojis: [Emoji; 7]) -> String {
// monospace characters.
let placeholder = ".".repeat(EMOJI_WIDTH);
format!("{:^12}", placeholder).replace(&placeholder, &emoji)
format!("{placeholder:^12}").replace(&placeholder, &emoji)
};
let emoji_string = emojis.iter().map(|e| center_emoji(e)).collect::<Vec<_>>().join("");
let description =
descriptions.iter().map(|d| format!("{:^12}", d)).collect::<Vec<_>>().join("");
let description = descriptions.iter().map(|d| format!("{d:^12}")).collect::<Vec<_>>().join("");
format!("{emoji_string}\n{description}")
}
@@ -826,7 +825,9 @@ mod test {
use super::VerificationStore;
use crate::{
olm::PrivateCrossSigningIdentity, store::MemoryStore, ReadOnlyAccount, ReadOnlyDevice,
olm::PrivateCrossSigningIdentity,
store::{Changes, CryptoStore, IdentityChanges, MemoryStore},
ReadOnlyAccount, ReadOnlyDevice, ReadOnlyOwnUserIdentity, ReadOnlyUserIdentity,
};
pub fn alice_id() -> &'static UserId {
@@ -848,28 +849,57 @@ mod test {
pub(crate) async fn setup_stores() -> (VerificationStore, VerificationStore) {
let alice = ReadOnlyAccount::new(alice_id(), alice_device_id());
let alice_store = MemoryStore::new();
let alice_identity = Mutex::new(PrivateCrossSigningIdentity::empty(alice_id()));
let (alice_private_identity, _, _) =
PrivateCrossSigningIdentity::with_account(&alice).await;
let alice_private_identity = Mutex::new(alice_private_identity);
let bob = ReadOnlyAccount::new(bob_id(), bob_device_id());
let bob_store = MemoryStore::new();
let bob_identity = Mutex::new(PrivateCrossSigningIdentity::empty(bob_id()));
let (bob_private_identity, _, _) = PrivateCrossSigningIdentity::with_account(&bob).await;
let bob_private_identity = Mutex::new(bob_private_identity);
let alice_public_identity =
ReadOnlyUserIdentity::from_private(&*alice_private_identity.lock().await).await;
let alice_readonly_identity =
ReadOnlyOwnUserIdentity::from_private(&*alice_private_identity.lock().await).await;
let bob_public_identity =
ReadOnlyUserIdentity::from_private(&*bob_private_identity.lock().await).await;
let bob_readonly_identity =
ReadOnlyOwnUserIdentity::from_private(&*bob_private_identity.lock().await).await;
let alice_device = ReadOnlyDevice::from_account(&alice).await;
let bob_device = ReadOnlyDevice::from_account(&bob).await;
let alice_changes = Changes {
identities: IdentityChanges {
new: vec![alice_readonly_identity.into(), bob_public_identity.into()],
changed: vec![],
},
..Default::default()
};
alice_store.save_changes(alice_changes).await.unwrap();
alice_store.save_devices(vec![bob_device]).await;
let bob_changes = Changes {
identities: IdentityChanges {
new: vec![bob_readonly_identity.into(), alice_public_identity.into()],
changed: vec![],
},
..Default::default()
};
bob_store.save_changes(bob_changes).await.unwrap();
bob_store.save_devices(vec![alice_device]).await;
let alice_store = VerificationStore {
account: alice,
inner: Arc::new(alice_store),
private_identity: alice_identity.into(),
private_identity: alice_private_identity.into(),
};
let bob_store = VerificationStore {
account: bob.clone(),
inner: Arc::new(bob_store),
private_identity: bob_identity.into(),
private_identity: bob_private_identity.into(),
};
(alice_store, bob_store)

View File

@@ -36,6 +36,8 @@ use ruma::{
DeviceId, MilliSecondsSinceUnixEpoch, OwnedDeviceId, OwnedUserId, RoomId, TransactionId,
UserId,
};
#[cfg(feature = "qrcode")]
use tracing::debug;
use tracing::{info, trace, warn};
#[cfg(feature = "qrcode")]
@@ -336,7 +338,29 @@ impl VerificationRequest {
if let Some(future) = fut {
let qr_verification = future.await?;
self.verification_cache.insert_qr(qr_verification.clone());
// We may have previously started our own QR verification (e.g. two devices
// displaying QR code at the same time), so we need to replace it with the newly
// scanned code.
if self
.verification_cache
.get_qr(qr_verification.other_user_id(), qr_verification.flow_id().as_str())
.is_some()
{
debug!(
user_id = %self.other_user(),
flow_id = self.flow_id().as_str(),
"Replacing existing QR verification"
);
self.verification_cache.replace_qr(qr_verification.clone());
} else {
debug!(
user_id = %self.other_user(),
flow_id = self.flow_id().as_str(),
"Inserting new QR verification"
);
self.verification_cache.insert_qr(qr_verification.clone());
}
Ok(Some(qr_verification))
} else {
@@ -634,7 +658,24 @@ impl VerificationRequest {
if let Some((sas, content)) =
s.clone().start_sas(self.we_started, self.inner.clone().into()).await?
{
self.verification_cache.insert_sas(sas.clone());
// We may have previously started QR verification and generated a QR code. If we
// now switch to SAS flow, the previous verification has to be replaced
cfg_if::cfg_if! {
if #[cfg(feature = "qrcode")] {
if self.verification_cache.get_qr(sas.other_user_id(), sas.flow_id().as_str()).is_some() {
debug!(
user_id = %self.other_user(),
flow_id = self.flow_id().as_str(),
"We have an ongoing QR verification, replacing with SAS"
);
self.verification_cache.replace(sas.clone().into())
} else {
self.verification_cache.insert_sas(sas.clone());
}
} else {
self.verification_cache.insert_sas(sas.clone());
}
}
let request = match content {
OutgoingContent::ToDevice(content) => ToDeviceRequest::with_id(
@@ -1222,7 +1263,11 @@ mod tests {
use std::convert::{TryFrom, TryInto};
#[cfg(feature = "qrcode")]
use matrix_sdk_qrcode::QrVerificationData;
use matrix_sdk_test::async_test;
#[cfg(feature = "qrcode")]
use ruma::events::key::verification::VerificationMethod;
use ruma::{event_id, room_id};
use super::VerificationRequest;
@@ -1385,4 +1430,125 @@ mod tests {
assert!(alice_sas.started_from_request());
assert!(bob_sas.started_from_request());
}
#[async_test]
#[cfg(feature = "qrcode")]
async fn can_scan_another_qr_after_creating_mine() {
let (alice_store, bob_store) = setup_stores().await;
let flow_id = FlowId::ToDevice("TEST_FLOW_ID".into());
// We setup the initial verification request
let bob_request = VerificationRequest::new(
VerificationCache::new(),
bob_store,
flow_id.clone(),
alice_id(),
vec![],
Some(vec![VerificationMethod::QrCodeScanV1, VerificationMethod::QrCodeShowV1]),
);
let request = bob_request.request_to_device();
let content: OutgoingContent = request.try_into().unwrap();
let content = RequestContent::try_from(&content).unwrap();
let alice_request = VerificationRequest::from_request(
VerificationCache::new(),
alice_store,
bob_id(),
flow_id,
&content,
);
let content: OutgoingContent = alice_request
.accept_with_methods(vec![
VerificationMethod::QrCodeScanV1,
VerificationMethod::QrCodeShowV1,
])
.unwrap()
.try_into()
.unwrap();
let content = ReadyContent::try_from(&content).unwrap();
bob_request.receive_ready(alice_id(), &content);
assert!(bob_request.is_ready());
assert!(alice_request.is_ready());
// Each side can start its own QR verification flow by generating QR code
let alice_verification = alice_request.generate_qr_code().await.unwrap();
let bob_verification = bob_request.generate_qr_code().await.unwrap();
assert!(alice_verification.is_some());
assert!(bob_verification.is_some());
// Now only Alice scans Bob's code
let bob_qr_code = bob_verification.unwrap().to_bytes().unwrap();
let bob_qr_code = QrVerificationData::from_bytes(bob_qr_code).unwrap();
let alice_verification = alice_request.scan_qr_code(bob_qr_code).await.unwrap().unwrap();
// Finally we assert that the verification has been reciprocated rather than
// cancelled due to a duplicate verification flow
assert!(!alice_verification.is_cancelled());
assert!(alice_verification.reciprocated());
}
#[async_test]
#[cfg(feature = "qrcode")]
async fn can_start_sas_after_generating_qr_code() {
let (alice_store, bob_store) = setup_stores().await;
let flow_id = FlowId::ToDevice("TEST_FLOW_ID".into());
// We setup the initial verification request
let bob_request = VerificationRequest::new(
VerificationCache::new(),
bob_store,
flow_id.clone(),
alice_id(),
vec![],
Some(vec![
VerificationMethod::QrCodeScanV1,
VerificationMethod::QrCodeShowV1,
VerificationMethod::SasV1,
]),
);
let request = bob_request.request_to_device();
let content: OutgoingContent = request.try_into().unwrap();
let content = RequestContent::try_from(&content).unwrap();
let alice_request = VerificationRequest::from_request(
VerificationCache::new(),
alice_store,
bob_id(),
flow_id,
&content,
);
let content: OutgoingContent = alice_request
.accept_with_methods(vec![
VerificationMethod::QrCodeScanV1,
VerificationMethod::QrCodeShowV1,
])
.unwrap()
.try_into()
.unwrap();
let content = ReadyContent::try_from(&content).unwrap();
bob_request.receive_ready(alice_id(), &content);
assert!(bob_request.is_ready());
assert!(alice_request.is_ready());
// Each side can start its own QR verification flow by generating QR code
let alice_verification = alice_request.generate_qr_code().await.unwrap();
let bob_verification = bob_request.generate_qr_code().await.unwrap();
assert!(alice_verification.is_some());
assert!(bob_verification.is_some());
// Alice can now start SAS verification flow instead of QR without cancelling
// the request
let (sas, _) = alice_request.start_sas().await.unwrap().unwrap();
assert!(!sas.is_cancelled());
}
}

View File

@@ -62,7 +62,7 @@ pub fn calculate_commitment(public_key: Curve25519PublicKey, content: &StartCont
Base64::new(
Sha256::new()
.chain_update(public_key.to_base64())
.chain_update(&content_string)
.chain_update(content_string)
.finalize()
.as_slice()
.to_owned(),
@@ -228,7 +228,7 @@ pub fn receive_mac_event(
if let Some(key) = ids.other_device.keys().get(&key_id) {
let calculated_mac = Base64::parse(
sas.calculate_mac_invalid_base64(&key.to_base64(), &format!("{}{}", info, key_id)),
sas.calculate_mac_invalid_base64(&key.to_base64(), &format!("{info}{key_id}")),
)
.expect("Can't base64-decode SAS MAC");

View File

@@ -18,17 +18,18 @@ mod sas_state;
use std::sync::{Arc, Mutex};
use futures_core::Stream;
use futures_signals::signal::{Mutable, SignalExt};
use inner_sas::InnerSas;
#[cfg(test)]
use matrix_sdk_common::instant::Instant;
use ruma::{
api::client::keys::upload_signatures::v3::Request as SignatureUploadRequest,
events::{
key::verification::{cancel::CancelCode, ShortAuthenticationString},
key::verification::{cancel::CancelCode, start::SasV1Content, ShortAuthenticationString},
AnyMessageLikeEventContent, AnyToDeviceEventContent,
},
DeviceId, OwnedEventId, OwnedRoomId, OwnedTransactionId, RoomId, TransactionId, UserId,
};
pub use sas_state::AcceptedProtocols;
use tracing::trace;
use super::{
@@ -47,6 +48,7 @@ use crate::{
#[derive(Clone, Debug)]
pub struct Sas {
inner: Arc<Mutex<InnerSas>>,
state: Arc<Mutable<SasState>>,
account: ReadOnlyAccount,
identities_being_verified: IdentitiesBeingVerified,
flow_id: Arc<FlowId>,
@@ -54,6 +56,133 @@ pub struct Sas {
request_handle: Option<RequestHandle>,
}
/// The short auth string for the emoji method of SAS verification.
#[derive(Debug, Clone)]
pub struct EmojiShortAuthString {
/// A list of seven indices that should be used for the SAS verification.
///
/// The indices can be put into the emoji table in the [spec] to figure out
/// the symbols and descriptions.
///
/// If you have a table of [translated descriptions] for the emojis you will
/// want to use this field.
///
/// [spec]: https://spec.matrix.org/unstable/client-server-api/#sas-method-emoji
/// [translated descriptions]: https://github.com/matrix-org/matrix-doc/blob/master/data-definitions/
pub indices: [u8; 7],
/// A list of seven emojis that should be used for the SAS verification.
pub emojis: [Emoji; 7],
}
/// An Enum describing the state the SAS verification is in.
#[derive(Debug, Clone)]
pub enum SasState {
/// The verification has been started, the protocols that should be used
/// have been proposed and can be accepted.
Started {
/// The protocols that were proposed in the `m.key.verification.start`
/// event.
protocols: SasV1Content,
},
/// The verification has been accepted and both sides agreed to a set of
/// protocols that will be used for the verification process.
Accepted {
/// The protocols that were accepted in the `m.key.verification.accept`
/// event.
accepted_protocols: AcceptedProtocols,
},
/// The public keys have been exchanged and the short auth string can be
/// presented to the user.
KeysExchanged {
/// The emojis that represent the short auth string, will be `None` if
/// the emoji SAS method wasn't part of the [`AcceptedProtocols`].
emojis: Option<EmojiShortAuthString>,
/// The list of decimals that represent the short auth string.
decimals: (u16, u16, u16),
},
/// The verification process has been confirmed from our side, we're waiting
/// for the other side to confirm as well.
Confirmed,
/// The verification process has been successfully concluded.
Done {
/// The list of devices that has been verified.
verified_devices: Vec<ReadOnlyDevice>,
/// The list of user identities that has been verified.
verified_identities: Vec<ReadOnlyUserIdentities>,
},
/// The verification process has been cancelled.
Cancelled(CancelInfo),
}
impl PartialEq for SasState {
fn eq(&self, other: &Self) -> bool {
matches!(
(self, other),
(Self::Started { .. }, Self::Started { .. })
| (Self::Accepted { .. }, Self::Accepted { .. })
| (Self::KeysExchanged { .. }, Self::KeysExchanged { .. })
| (Self::Confirmed, Self::Confirmed)
| (Self::Done { .. }, Self::Done { .. })
| (Self::Cancelled(_), Self::Cancelled(_))
)
}
}
impl From<&InnerSas> for SasState {
fn from(value: &InnerSas) -> Self {
match value {
InnerSas::Created(s) => {
Self::Started { protocols: s.state.protocol_definitions.to_owned() }
}
InnerSas::Started(s) => {
Self::Started { protocols: s.state.protocol_definitions.to_owned() }
}
InnerSas::Accepted(s) => {
Self::Accepted { accepted_protocols: s.state.accepted_protocols.to_owned() }
}
InnerSas::WeAccepted(s) => {
Self::Accepted { accepted_protocols: s.state.accepted_protocols.to_owned() }
}
InnerSas::KeyReceived(s) => {
let emojis = if value.supports_emoji() {
let emojis = s.get_emoji();
let indices = s.get_emoji_index();
Some(EmojiShortAuthString { emojis, indices })
} else {
None
};
let decimals = s.get_decimal();
Self::KeysExchanged { emojis, decimals }
}
InnerSas::MacReceived(s) => {
let emojis = if value.supports_emoji() {
let emojis = s.get_emoji();
let indices = s.get_emoji_index();
Some(EmojiShortAuthString { emojis, indices })
} else {
None
};
let decimals = s.get_decimal();
Self::KeysExchanged { emojis, decimals }
}
InnerSas::Confirmed(_) => Self::Confirmed,
InnerSas::WaitingForDone(_) => Self::Confirmed,
InnerSas::Done(s) => Self::Done {
verified_devices: s.verified_devices().to_vec(),
verified_identities: s.verified_identities().to_vec(),
},
InnerSas::Cancelled(c) => Self::Cancelled(c.state.as_ref().clone().into()),
}
}
}
impl Sas {
/// Get our own user id.
pub fn user_id(&self) -> &UserId {
@@ -137,7 +266,7 @@ impl Sas {
#[cfg(test)]
#[allow(dead_code)]
pub(crate) fn set_creation_time(&self, time: Instant) {
pub(crate) fn set_creation_time(&self, time: matrix_sdk_common::instant::Instant) {
self.inner.lock().unwrap().set_creation_time(time)
}
@@ -156,11 +285,13 @@ impl Sas {
request_handle.is_some(),
);
let state = (&inner).into();
let account = identities.store.account.clone();
(
Sas {
inner: Arc::new(Mutex::new(inner)),
state: Mutable::new(state).into(),
account,
identities_being_verified: identities,
flow_id: flow_id.into(),
@@ -241,10 +372,12 @@ impl Sas {
request_handle.is_some(),
)?;
let state = (&inner).into();
let account = identities.store.account.clone();
Ok(Sas {
inner: Arc::new(Mutex::new(inner)),
state: Mutable::new(state).into(),
account,
identities_being_verified: identities,
flow_id: flow_id.into(),
@@ -271,28 +404,41 @@ impl Sas {
&self,
settings: AcceptSettings,
) -> Option<OutgoingVerificationRequest> {
let mut guard = self.inner.lock().unwrap();
let sas: InnerSas = (*guard).clone();
let methods = settings.allowed_methods;
let (request, state) = {
let mut guard = self.inner.lock().unwrap();
let sas: InnerSas = (*guard).clone();
let methods = settings.allowed_methods;
if let Some((sas, content)) = sas.accept(methods) {
*guard = sas;
if let Some((sas, content)) = sas.accept(methods) {
let state: SasState = (&sas).into();
Some(match content {
OwnedAcceptContent::ToDevice(c) => {
let content = AnyToDeviceEventContent::KeyVerificationAccept(c);
self.content_to_request(content).into()
}
OwnedAcceptContent::Room(room_id, content) => RoomMessageRequest {
room_id,
txn_id: TransactionId::new(),
content: AnyMessageLikeEventContent::KeyVerificationAccept(content),
}
.into(),
})
} else {
None
*guard = sas;
(
Some(match content {
OwnedAcceptContent::ToDevice(c) => {
let content = AnyToDeviceEventContent::KeyVerificationAccept(c);
self.content_to_request(content).into()
}
OwnedAcceptContent::Room(room_id, content) => RoomMessageRequest {
room_id,
txn_id: TransactionId::new(),
content: AnyMessageLikeEventContent::KeyVerificationAccept(content),
}
.into(),
}),
Some(state),
)
} else {
(None, None)
}
};
if let Some(new_state) = state {
self.update_state(new_state);
}
request
}
/// Confirm the Sas verification.
@@ -306,13 +452,15 @@ impl Sas {
&self,
) -> Result<(Vec<OutgoingVerificationRequest>, Option<SignatureUploadRequest>), CryptoStoreError>
{
let (contents, done) = {
let (contents, done, state) = {
let mut guard = self.inner.lock().unwrap();
let sas: InnerSas = (*guard).clone();
let (sas, contents) = sas.confirm();
let state: SasState = (&sas).into();
*guard = sas;
(contents, guard.is_done())
(contents, guard.is_done(), state)
};
let mac_requests = contents
@@ -339,10 +487,17 @@ impl Sas {
VerificationResult::Cancel(c) => {
Ok((self.cancel_with_code(c).into_iter().collect(), None))
}
VerificationResult::Ok => Ok((mac_requests, None)),
VerificationResult::SignatureUpload(r) => Ok((mac_requests, Some(r))),
VerificationResult::Ok => {
self.update_state(state);
Ok((mac_requests, None))
}
VerificationResult::SignatureUpload(r) => {
self.update_state(state);
Ok((mac_requests, Some(r)))
}
}
} else {
self.update_state(state);
Ok((mac_requests, None))
}
}
@@ -377,21 +532,32 @@ impl Sas {
///
/// [`cancel()`]: #method.cancel
pub fn cancel_with_code(&self, code: CancelCode) -> Option<OutgoingVerificationRequest> {
let mut guard = self.inner.lock().unwrap();
let (content, state) = {
let mut guard = self.inner.lock().unwrap();
if let Some(request) = &self.request_handle {
request.cancel_with_code(&code);
}
let sas: InnerSas = (*guard).clone();
let (sas, content) = sas.cancel(true, code);
*guard = sas;
content.map(|c| match c {
OutgoingContent::Room(room_id, content) => {
RoomMessageRequest { room_id, txn_id: TransactionId::new(), content }.into()
if let Some(request) = &self.request_handle {
request.cancel_with_code(&code);
}
OutgoingContent::ToDevice(c) => self.content_to_request(c).into(),
})
let sas: InnerSas = (*guard).clone();
let (sas, content) = sas.cancel(true, code);
let state: SasState = (&sas).into();
*guard = sas;
(
content.map(|c| match c {
OutgoingContent::Room(room_id, content) => {
RoomMessageRequest { room_id, txn_id: TransactionId::new(), content }.into()
}
OutgoingContent::ToDevice(c) => self.content_to_request(c).into(),
}),
state,
)
};
self.update_state(state);
content
}
pub(crate) fn cancel_if_timed_out(&self) -> Option<OutgoingVerificationRequest> {
@@ -451,15 +617,134 @@ impl Sas {
self.inner.lock().unwrap().decimals()
}
/// Listen for changes in the SAS verification process.
///
/// The changes are presented as a stream of [`SasState`] values.
///
/// This method can be used to react to changes in the state of the
/// verification process, or rather the method can be used to handle
/// each step of the verification process.
///
/// # Flowchart
///
/// The flow of the verification process is pictured bellow. Please note
/// that the process can be cancelled at each step of the process.
/// Either side can cancel the process.
///
/// ```text
/// ┌───────┐
/// │Started│
/// └───┬───┘
/// │
/// ┌────⌄───┐
/// │Accepted│
/// └────┬───┘
/// │
/// ┌───────⌄──────┐
/// │Keys Exchanged│
/// └───────┬──────┘
/// │
/// ________⌄________
/// ╲ ┌─────────┐
/// Does the short ╲______│Cancelled│
/// ╲ auth string match no └─────────┘
/// ╲_________________
/// │yes
/// │
/// ┌────⌄────┐
/// │Confirmed│
/// └────┬────┘
/// │
/// ┌───⌄───┐
/// │ Done │
/// └───────┘
/// ```
/// # Example
///
/// ```no_run
/// use futures::stream::{Stream, StreamExt};
/// use matrix_sdk_crypto::{Sas, SasState};
///
/// # futures::executor::block_on(async {
/// # let sas: Sas = unimplemented!();
///
/// let mut stream = sas.changes();
///
/// while let Some(state) = stream.next().await {
/// match state {
/// SasState::KeysExchanged { emojis, decimals: _ } => {
/// let emojis =
/// emojis.expect("We only support emoji verification");
/// println!("Do these emojis match {emojis:#?}");
///
/// // Ask the user to confirm or cancel here.
/// }
/// SasState::Done { .. } => {
/// let device = sas.other_device();
///
/// println!(
/// "Successfully verified device {} {} {:?}",
/// device.user_id(),
/// device.device_id(),
/// device.local_trust_state()
/// );
///
/// break;
/// }
/// SasState::Cancelled(cancel_info) => {
/// println!(
/// "The verification has been cancelled, reason: {}",
/// cancel_info.reason()
/// );
/// break;
/// }
/// SasState::Started { .. }
/// | SasState::Accepted { .. }
/// | SasState::Confirmed => (),
/// }
/// }
/// # anyhow::Ok(()) });
/// ```
pub fn changes(&self) -> impl Stream<Item = SasState> {
self.state.signal_cloned().to_stream()
}
/// Get the current state of the verification process.
pub fn state(&self) -> SasState {
self.state.lock_ref().to_owned()
}
fn update_state(&self, new_state: SasState) {
let mut lock = self.state.lock_mut();
// Only update the state if it differs, this is important so clients don't end
// up printing the emoji twice. For example, the internal state might
// change into a MacReceived, because the other side already confirmed,
// but our side still needs to just show the emoji and wait for
// confirmation.
if *lock != new_state {
*lock = new_state;
}
}
pub(crate) fn receive_any_event(
&self,
sender: &UserId,
content: &AnyVerificationContent<'_>,
) -> Option<OutgoingContent> {
let mut guard = self.inner.lock().unwrap();
let sas: InnerSas = (*guard).clone();
let (sas, content) = sas.receive_any_event(sender, content);
*guard = sas;
let (content, state) = {
let mut guard = self.inner.lock().unwrap();
let sas: InnerSas = (*guard).clone();
let (sas, content) = sas.receive_any_event(sender, content);
let state: SasState = (&sas).into();
*guard = sas;
(content, state)
};
self.update_state(state);
content
}
@@ -527,7 +812,7 @@ mod tests {
event_enums::{AcceptContent, KeyContent, MacContent, OutgoingContent, StartContent},
VerificationStore,
},
ReadOnlyAccount, ReadOnlyDevice,
ReadOnlyAccount, ReadOnlyDevice, SasState,
};
fn alice_id() -> &'static UserId {
@@ -573,18 +858,25 @@ mod tests {
let (alice, content) = Sas::start(identities, TransactionId::new(), true, None);
matches!(alice.state(), SasState::Started { .. });
let flow_id = alice.flow_id().to_owned();
let content = StartContent::try_from(&content).unwrap();
let identities = bob_store.get_identities(alice_device).await.unwrap();
let bob = Sas::from_start_event(flow_id, &content, identities, None, false).unwrap();
matches!(bob.state(), SasState::Started { .. });
let request = bob.accept().unwrap();
let content = OutgoingContent::try_from(request).unwrap();
let content = AcceptContent::try_from(&content).unwrap();
let content = alice.receive_any_event(bob.user_id(), &content.into()).unwrap();
matches!(alice.state(), SasState::Accepted { .. });
matches!(bob.state(), SasState::Accepted { .. });
assert!(!alice.can_be_presented());
assert!(!bob.can_be_presented());
@@ -592,22 +884,27 @@ mod tests {
let content = bob.receive_any_event(alice.user_id(), &content.into()).unwrap();
assert!(bob.can_be_presented());
matches!(bob.state(), SasState::KeysExchanged { .. });
let content = KeyContent::try_from(&content).unwrap();
alice.receive_any_event(bob.user_id(), &content.into());
matches!(alice.state(), SasState::KeysExchanged { .. });
assert!(alice.can_be_presented());
assert_eq!(alice.emoji().unwrap(), bob.emoji().unwrap());
assert_eq!(alice.decimals().unwrap(), bob.decimals().unwrap());
let mut requests = alice.confirm().await.unwrap().0;
matches!(alice.state(), SasState::Confirmed);
assert!(requests.len() == 1);
let request = requests.pop().unwrap();
let content = OutgoingContent::try_from(request).unwrap();
let content = MacContent::try_from(&content).unwrap();
bob.receive_any_event(alice.user_id(), &content.into());
matches!(bob.state(), SasState::KeysExchanged { .. });
let mut requests = bob.confirm().await.unwrap().0;
matches!(bob.state(), SasState::Confirmed);
assert!(requests.len() == 1);
let request = requests.pop().unwrap();
let content = OutgoingContent::try_from(request).unwrap();
@@ -616,5 +913,7 @@ mod tests {
assert!(alice.verified_devices().unwrap().contains(alice.other_device()));
assert!(bob.verified_devices().unwrap().contains(bob.other_device()));
matches!(alice.state(), SasState::Done { .. });
matches!(bob.state(), SasState::Done { .. });
}
}

View File

@@ -34,7 +34,7 @@ use ruma::{
ToDeviceKeyVerificationStartEventContent,
},
HashAlgorithm, KeyAgreementProtocol, MessageAuthenticationCode, Relation,
ShortAuthenticationString, VerificationMethod,
ShortAuthenticationString,
},
AnyMessageLikeEventContent, AnyToDeviceEventContent,
},
@@ -92,12 +92,16 @@ const MAX_EVENT_TIMEOUT: Duration = Duration::from_secs(60);
/// Struct containing the protocols that were agreed to be used for the SAS
/// flow.
#[derive(Clone, Debug)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct AcceptedProtocols {
pub method: VerificationMethod,
/// The key agreement protocol the device is choosing to use.
pub key_agreement_protocol: KeyAgreementProtocol,
/// The hash method the device is choosing to use.
pub hash: HashAlgorithm,
/// The message authentication code the device is choosing to use
pub message_auth_code: MessageAuthenticationCode,
/// The SAS methods both devices involved in the verification process
/// understand.
pub short_auth_string: Vec<ShortAuthenticationString>,
}
@@ -116,7 +120,6 @@ impl TryFrom<AcceptV1Content> for AcceptedProtocols {
Err(CancelCode::UnknownMethod)
} else {
Ok(Self {
method: VerificationMethod::SasV1,
hash: content.hash,
key_agreement_protocol: content.key_agreement_protocol,
message_auth_code: content.message_authentication_code,
@@ -163,7 +166,6 @@ impl TryFrom<&SasV1Content> for AcceptedProtocols {
}
Ok(Self {
method: VerificationMethod::SasV1,
hash: HashAlgorithm::Sha256,
key_agreement_protocol: KeyAgreementProtocol::Curve25519HkdfSha256,
message_auth_code: MessageAuthenticationCode::HkdfHmacSha256,
@@ -177,7 +179,6 @@ impl TryFrom<&SasV1Content> for AcceptedProtocols {
impl Default for AcceptedProtocols {
fn default() -> Self {
AcceptedProtocols {
method: VerificationMethod::SasV1,
hash: HashAlgorithm::Sha256,
key_agreement_protocol: KeyAgreementProtocol::Curve25519HkdfSha256,
message_auth_code: MessageAuthenticationCode::HkdfHmacSha256,
@@ -239,21 +240,22 @@ impl<S: Clone + std::fmt::Debug> std::fmt::Debug for SasState<S> {
/// The initial SAS state.
#[derive(Clone, Debug)]
pub struct Created {
protocol_definitions: SasV1Content,
pub protocol_definitions: SasV1Content,
}
/// The initial SAS state if the other side started the SAS verification.
#[derive(Clone, Debug)]
pub struct Started {
commitment: Base64,
pub accepted_protocols: Arc<AcceptedProtocols>,
pub protocol_definitions: SasV1Content,
pub accepted_protocols: AcceptedProtocols,
}
/// The SAS state we're going to be in after the other side accepted our
/// verification start event.
#[derive(Clone, Debug)]
pub struct Accepted {
pub accepted_protocols: Arc<AcceptedProtocols>,
pub accepted_protocols: AcceptedProtocols,
start_content: Arc<OwnedStartContent>,
commitment: Base64,
}
@@ -263,7 +265,7 @@ pub struct Accepted {
#[derive(Clone, Debug)]
pub struct WeAccepted {
we_started: bool,
pub accepted_protocols: Arc<AcceptedProtocols>,
pub accepted_protocols: AcceptedProtocols,
commitment: Base64,
}
@@ -275,7 +277,7 @@ pub struct WeAccepted {
pub struct KeyReceived {
sas: Arc<Mutex<EstablishedSas>>,
we_started: bool,
pub accepted_protocols: Arc<AcceptedProtocols>,
pub accepted_protocols: AcceptedProtocols,
}
/// The SAS state we're going to be in after the user has confirmed that the
@@ -284,7 +286,7 @@ pub struct KeyReceived {
#[derive(Clone, Debug)]
pub struct Confirmed {
sas: Arc<Mutex<EstablishedSas>>,
pub accepted_protocols: Arc<AcceptedProtocols>,
pub accepted_protocols: AcceptedProtocols,
}
/// The SAS state we're going to be in after we receive a MAC event from the
@@ -296,7 +298,7 @@ pub struct MacReceived {
we_started: bool,
verified_devices: Arc<[ReadOnlyDevice]>,
verified_master_keys: Arc<[ReadOnlyUserIdentities]>,
pub accepted_protocols: Arc<AcceptedProtocols>,
pub accepted_protocols: AcceptedProtocols,
}
/// The SAS state we're going to be in after we receive a MAC event in a DM. DMs
@@ -485,7 +487,7 @@ impl SasState<Created> {
state: Arc::new(Accepted {
start_content,
commitment: content.commitment.clone(),
accepted_protocols: accepted_protocols.into(),
accepted_protocols,
}),
})
} else {
@@ -565,7 +567,8 @@ impl SasState<Started> {
verification_flow_id: flow_id,
state: Arc::new(Started {
accepted_protocols: accepted_protocols.into(),
protocol_definitions: method_content.to_owned(),
accepted_protocols,
commitment,
}),
})
@@ -578,7 +581,7 @@ impl SasState<Started> {
}
pub fn into_we_accepted(self, methods: Vec<ShortAuthenticationString>) -> SasState<WeAccepted> {
let mut accepted_protocols = self.state.accepted_protocols.as_ref().to_owned();
let mut accepted_protocols = self.state.accepted_protocols.to_owned();
accepted_protocols.short_auth_string = methods;
// Decimal is required per spec.
@@ -596,7 +599,7 @@ impl SasState<Started> {
started_from_request: self.started_from_request,
state: Arc::new(WeAccepted {
we_started: false,
accepted_protocols: accepted_protocols.into(),
accepted_protocols,
commitment: self.state.commitment.clone(),
}),
}
@@ -658,7 +661,7 @@ impl SasState<Started> {
state: Arc::new(Accepted {
start_content,
commitment: content.commitment.clone(),
accepted_protocols: accepted_protocols.into(),
accepted_protocols,
}),
})
} else {

View File

@@ -30,11 +30,11 @@ matrix-sdk-crypto = { version = "0.6.0", path = "../matrix-sdk-crypto", features
matrix-sdk-store-encryption = { version = "0.2.0", path = "../matrix-sdk-store-encryption" }
indexed_db_futures = "0.2.3"
indexed_db_futures_nodejs = { version = "0.2.3", package = "indexed_db_futures", git = "https://github.com/Hywan/rust-indexed-db", branch = "feat-factory-nodejs", optional = true }
ruma = "0.7.0"
ruma = { workspace = true }
serde = "1.0.136"
serde_json = "1.0.79"
thiserror = "1.0.30"
tracing = "0.1.34"
tracing = { workspace = true }
wasm-bindgen = { version = "0.2.80", features = ["serde-serialize"] }
web-sys = { version = "0.3.57", features = ["IdbKeyRange"] }

View File

@@ -140,7 +140,7 @@ impl IndexeddbCryptoStore {
prefix: &str,
store_cipher: Option<Arc<StoreCipher>>,
) -> Result<Self> {
let name = format!("{:0}::matrix-sdk-crypto", prefix);
let name = format!("{prefix:0}::matrix-sdk-crypto");
// Open my_db v1
let mut db_req: OpenDbRequest = IdbDatabase::open_f64(&name, 1.1)?;
@@ -232,7 +232,7 @@ impl IndexeddbCryptoStore {
/// Open a new `IndexeddbCryptoStore` with given name and passphrase
pub async fn open_with_passphrase(prefix: &str, passphrase: &str) -> Result<Self> {
let name = format!("{:0}::matrix-sdk-crypto-meta", prefix);
let name = format!("{prefix:0}::matrix-sdk-crypto-meta");
let mut db_req: OpenDbRequest = IdbDatabase::open_f64(&name, 1.0)?;
db_req.set_on_upgrade_needed(Some(|evt: &IdbVersionChangeEvent| -> Result<(), JsValue> {
@@ -907,7 +907,7 @@ impl IndexeddbCryptoStore {
if let Some(inner) = request {
tx.object_store(KEYS::SECRET_REQUESTS_BY_INFO)?
.delete(&self.encode_key(KEYS::KEY_REQUEST, &inner.info.as_key()))?;
.delete(&self.encode_key(KEYS::KEY_REQUEST, inner.info.as_key()))?;
}
tx.object_store(KEYS::UNSENT_SECRET_REQUESTS)?.delete(&jskey)?;

View File

@@ -61,7 +61,7 @@ pub trait SafeEncode {
/// encode self into a JsValue, internally using `as_encoded_string`
/// to escape the value of self, and append the given counter
fn encode_with_counter(&self, i: usize) -> JsValue {
format!("{}{}{:016x}", self.as_encoded_string(), KEY_SEPARATOR, i).into()
format!("{}{KEY_SEPARATOR}{i:016x}", self.as_encoded_string()).into()
}
/// encode self into a JsValue, internally using `as_secure_string`

View File

@@ -97,17 +97,7 @@ impl From<IndexeddbStateStoreError> for StoreError {
match e {
IndexeddbStateStoreError::Json(e) => StoreError::Json(e),
IndexeddbStateStoreError::StoreError(e) => e,
IndexeddbStateStoreError::Encryption(e) => match e {
EncryptionError::Random(e) => StoreError::Encryption(e.to_string()),
EncryptionError::Serialization(e) => StoreError::Json(e),
EncryptionError::Encryption(e) => StoreError::Encryption(e.to_string()),
EncryptionError::Version(found, expected) => StoreError::Encryption(format!(
"Bad Database Encryption Version: expected {expected}, found {found}",
)),
EncryptionError::Length(found, expected) => StoreError::Encryption(format!(
"The database key an invalid length: expected {expected}, found {found}",
)),
},
IndexeddbStateStoreError::Encryption(e) => StoreError::Encryption(e),
_ => StoreError::backend(e),
}
}
@@ -201,7 +191,7 @@ fn create_stores(db: &IdbDatabase) -> Result<(), JsValue> {
async fn backup(source: &IdbDatabase, meta: &IdbDatabase) -> Result<()> {
let now = JsDate::now();
let backup_name = format!("backup-{}-{}", source.name(), now);
let backup_name = format!("backup-{}-{now}", source.name());
let mut db_req: OpenDbRequest = IdbDatabase::open_f64(&backup_name, source.version())?;
db_req.set_on_upgrade_needed(Some(move |evt: &IdbVersionChangeEvent| -> Result<(), JsValue> {
@@ -267,7 +257,7 @@ impl IndexeddbStateStoreBuilder {
.unwrap_or(MigrationConflictStrategy::BackupAndDrop);
let name = self.name.clone().unwrap_or_else(|| "state".to_owned());
let meta_name = format!("{}::{}", name, KEYS::INTERNAL_STATE);
let meta_name = format!("{name}::{}", KEYS::INTERNAL_STATE);
let mut db_req: OpenDbRequest =
IdbDatabase::open_f64(&meta_name, KEYS::CURRENT_META_DB_VERSION)?;
@@ -1140,9 +1130,7 @@ impl IndexeddbStateStore {
}
async fn get_custom_value(&self, key: &[u8]) -> Result<Option<Vec<u8>>> {
let jskey = &JsValue::from_str(
core::str::from_utf8(key).map_err(|e| StoreError::Codec(format!("{:}", e)))?,
);
let jskey = &JsValue::from_str(core::str::from_utf8(key).map_err(StoreError::Codec)?);
self.get_custom_value_for_js(jskey).await
}
@@ -1157,9 +1145,7 @@ impl IndexeddbStateStore {
}
async fn set_custom_value(&self, key: &[u8], value: Vec<u8>) -> Result<Option<Vec<u8>>> {
let jskey = JsValue::from_str(
core::str::from_utf8(key).map_err(|e| StoreError::Codec(format!("{:}", e)))?,
);
let jskey = JsValue::from_str(core::str::from_utf8(key).map_err(StoreError::Codec)?);
let prev = self.get_custom_value_for_js(&jskey).await?;

View File

@@ -21,9 +21,7 @@ byteorder = "1.4.3"
qrcode = { version = "0.12.0", default-features = false }
ruma-common = "0.10.0"
thiserror = "1.0.30"
[dependencies.vodozemac]
version = "0.3.0"
vodozemac = { workspace = true }
[dev-dependencies]
image = "0.23.0"

View File

@@ -35,13 +35,13 @@ matrix-sdk-base = { version = "0.6.0", path = "../matrix-sdk-base", optional = t
matrix-sdk-common = { version = "0.6.0", path = "../matrix-sdk-common" }
matrix-sdk-crypto = { version = "0.6.0", path = "../matrix-sdk-crypto", optional = true }
matrix-sdk-store-encryption = { version = "0.2.0", path = "../matrix-sdk-store-encryption" }
ruma = "0.7.0"
ruma = { workspace = true }
serde = "1.0.136"
serde_json = "1.0.79"
sled = "0.34.7"
thiserror = "1.0.30"
tokio = { version = "1.17.0", default-features = false, features = ["sync", "fs"] }
tracing = "0.1.34"
tracing = { workspace = true }
[dev-dependencies]
glob = "0.3.0"

View File

@@ -792,7 +792,7 @@ impl CryptoStore for SledCryptoStore {
let key = self.encode_key(INBOUND_GROUP_TABLE_NAME, (room_id, session_id));
let pickle = self
.inbound_group_sessions
.get(&key)
.get(key)
.map_err(CryptoStoreError::backend)?
.map(|p| self.deserialize_value(&p));

View File

@@ -104,25 +104,14 @@ impl From<TransactionError<SledStoreError>> for SledStoreError {
}
}
#[allow(clippy::from_over_into)]
impl Into<StoreError> for SledStoreError {
fn into(self) -> StoreError {
match self {
impl From<SledStoreError> for StoreError {
fn from(err: SledStoreError) -> StoreError {
match err {
SledStoreError::Json(e) => StoreError::Json(e),
SledStoreError::Identifier(e) => StoreError::Identifier(e),
SledStoreError::Encryption(e) => match e {
KeyEncryptionError::Random(e) => StoreError::Encryption(e.to_string()),
KeyEncryptionError::Serialization(e) => StoreError::Json(e),
KeyEncryptionError::Encryption(e) => StoreError::Encryption(e.to_string()),
KeyEncryptionError::Version(found, expected) => StoreError::Encryption(format!(
"Bad Database Encryption Version: expected {expected}, found {found}",
)),
KeyEncryptionError::Length(found, expected) => StoreError::Encryption(format!(
"The database key an invalid length: expected {expected}, found {found}",
)),
},
SledStoreError::Encryption(e) => StoreError::Encryption(e),
SledStoreError::StoreError(e) => e,
_ => StoreError::backend(self),
_ => StoreError::backend(err),
}
}
}
@@ -701,7 +690,7 @@ impl SledStateStore {
let make_room_version = |room_id| {
self.room_info
.get(&self.encode_key(ROOM_INFO, room_id))
.get(self.encode_key(ROOM_INFO, room_id))
.ok()
.flatten()
.map(|r| self.deserialize_value::<RoomInfo>(&r))
@@ -1588,7 +1577,7 @@ mod migration {
if let Err(SledStoreError::MigrationConflict { .. }) = res {
// all good
} else {
panic!("Didn't raise the expected error: {:?}", res);
panic!("Didn't raise the expected error: {res:?}");
}
assert_eq!(std::fs::read_dir(folder.path())?.count(), 1);
Ok(())

View File

@@ -25,7 +25,7 @@ serde = { version = "1.0.136", features = ["derive"] }
serde_json = "1.0.79"
sha2 = "0.10.2"
thiserror = "1.0.30"
zeroize = { version = "1.3.0", features = ["zeroize_derive"] }
zeroize = { workspace = true, features = ["zeroize_derive"] }
[dev-dependencies]
anyhow = "1.0.57"

View File

@@ -37,11 +37,14 @@ markdown = ["ruma/markdown"]
native-tls = ["reqwest/native-tls"]
rustls-tls = ["reqwest/rustls-tls"]
socks = ["reqwest/socks"]
sso-login = ["warp", "dep:rand", "dep:tokio-stream"]
sso-login = ["dep:hyper", "dep:rand", "dep:tokio-stream", "dep:tower"]
appservice = ["ruma/appservice-api-s"]
image-proc = ["dep:image"]
image-rayon = ["image-proc", "image?/jpeg_rayon"]
experimental-room-preview = []
experimental-timeline = []
sliding-sync = [
"matrix-sdk-base/sliding-sync",
"anyhow",
@@ -70,21 +73,24 @@ futures-core = "0.3.21"
futures-signals = { version = "0.3.30", default-features = false }
futures-util = { version = "0.3.21", default-features = false }
http = "0.2.6"
indexmap = "1.9.1"
hyper = { version = "0.14.20", features = ["http1", "http2", "server"], optional = true }
matrix-sdk-base = { version = "0.6.0", path = "../matrix-sdk-base", default_features = false }
matrix-sdk-common = { version = "0.6.0", path = "../matrix-sdk-common" }
matrix-sdk-indexeddb = { version = "0.2.0", path = "../matrix-sdk-indexeddb", default-features = false, optional = true }
matrix-sdk-sled = { version = "0.2.0", path = "../matrix-sdk-sled", default-features = false, optional = true }
mime = "0.3.16"
rand = { version = "0.8.5", optional = true }
reqwest = { version = "0.11.10", default_features = false}
reqwest = { version = "0.11.10", default_features = false }
ruma = { workspace = true, features = ["compat", "rand", "unstable-msc2448", "unstable-msc2965"] }
serde = "1.0.136"
serde_json = "1.0.79"
thiserror = "1.0.30"
tokio-stream = { version = "0.1.8", features = ["net"], optional = true }
tracing = "0.1.34"
tower = { version = "0.4.13", features = ["make"], optional = true }
tracing = { workspace = true, features = ["attributes"] }
url = "2.2.2"
warp = { version = "0.3.2", default-features = false, optional = true }
zeroize = "1.3.0"
zeroize = { workspace = true }
[dependencies.image]
version = "0.24.2"
@@ -106,10 +112,6 @@ features = [
]
optional = true
[dependencies.ruma]
version = "0.7.0"
features = ["client-api-c", "compat", "rand", "unstable-msc2448", "unstable-msc2965"]
[target.'cfg(target_arch = "wasm32")'.dependencies]
async-once-cell = "0.4.2"
wasm-timer = "0.2.5"
@@ -120,6 +122,7 @@ tokio = { version = "1.17.0", default-features = false, features = ["fs", "rt"]
[dev-dependencies]
anyhow = "1.0.57"
assert_matches = "1.5.0"
dirs = "4.0.0"
futures = { version = "0.3.21", default-features = false, features = ["executor"] }
matches = "0.1.9"

View File

@@ -43,8 +43,8 @@ async fn main() -> anyhow::Result<()> {
});
// Syncing is important to synchronize the client state with the server.
// This method will never return.
client.sync(SyncSettings::default()).await;
// This method will never return unless there is an error.
client.sync(SyncSettings::default()).await?;
Ok(())
}

View File

@@ -79,7 +79,8 @@ impl Account {
pub async fn get_display_name(&self) -> Result<Option<String>> {
let user_id = self.client.user_id().ok_or(Error::AuthenticationRequired)?;
let request = get_display_name::v3::Request::new(user_id);
let response = self.client.send(request, None).await?;
let request_config = self.client.request_config().force_auth();
let response = self.client.send(request, Some(request_config)).await?;
Ok(response.displayname)
}
@@ -239,7 +240,8 @@ impl Account {
pub async fn get_profile(&self) -> Result<get_profile::v3::Response> {
let user_id = self.client.user_id().ok_or(Error::AuthenticationRequired)?;
let request = get_profile::v3::Request::new(user_id);
Ok(self.client.send(request, None).await?)
let request_config = self.client.request_config().force_auth();
Ok(self.client.send(request, Some(request_config)).await?)
}
/// Change the password of the account.

View File

@@ -13,7 +13,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use std::{fmt, sync::Arc};
#[cfg(target_arch = "wasm32")]
use async_once_cell::OnceCell;
@@ -82,7 +82,7 @@ use crate::{
pub struct ClientBuilder {
homeserver_cfg: Option<HomeserverConfig>,
http_cfg: Option<HttpConfig>,
store_config: StoreConfig,
store_config: BuilderStoreConfig,
request_config: RequestConfig,
respect_login_well_known: bool,
appservice_mode: bool,
@@ -95,7 +95,7 @@ impl ClientBuilder {
Self {
homeserver_cfg: None,
http_cfg: None,
store_config: Default::default(),
store_config: BuilderStoreConfig::Custom(StoreConfig::default()),
request_config: Default::default(),
respect_login_well_known: true,
appservice_mode: false,
@@ -126,30 +126,36 @@ impl ClientBuilder {
/// Set up the store configuration for a sled store.
///
/// This is a shorthand for
/// This is the same as
/// <code>.[store_config](Self::store_config)([matrix_sdk_sled]::[make_store_config](matrix_sdk_sled::make_store_config)(path, passphrase)?)</code>.
/// except it delegates the actual store config creation to when
/// `.build().await` is called.
#[cfg(feature = "sled")]
pub async fn sled_store(
self,
pub fn sled_store(
mut self,
path: impl AsRef<std::path::Path>,
passphrase: Option<&str>,
) -> Result<Self, matrix_sdk_sled::OpenStoreError> {
let config = matrix_sdk_sled::make_store_config(path, passphrase).await?;
Ok(self.store_config(config))
) -> Self {
self.store_config = BuilderStoreConfig::Sled {
path: path.as_ref().to_owned(),
passphrase: passphrase.map(ToOwned::to_owned),
};
self
}
/// Set up the store configuration for a IndexedDB store.
///
/// This is a shorthand for
/// <code>.[store_config](Self::store_config)([matrix_sdk_indexeddb]::[make_store_config](matrix_sdk_indexeddb::make_store_config)(path, passphrase).await?)</code>.
/// This is the same as
/// <code>.[store_config](Self::store_config)([matrix_sdk_indexeddb]::[make_store_config](matrix_sdk_indexeddb::make_store_config)(path, passphrase).await?)</code>,
/// except it delegates the actual store config creation to when
/// `.build().await` is called.
#[cfg(feature = "indexeddb")]
pub async fn indexeddb_store(
self,
name: &str,
passphrase: Option<&str>,
) -> Result<Self, matrix_sdk_indexeddb::OpenStoreError> {
let config = matrix_sdk_indexeddb::make_store_config(name, passphrase).await?;
Ok(self.store_config(config))
pub fn indexeddb_store(mut self, name: &str, passphrase: Option<&str>) -> Self {
self.store_config = BuilderStoreConfig::IndexedDb {
name: name.to_owned(),
passphrase: passphrase.map(ToOwned::to_owned),
};
self
}
/// Set up the store configuration.
@@ -172,7 +178,7 @@ impl ClientBuilder {
/// let client_builder = Client::builder().store_config(store_config);
/// ```
pub fn store_config(mut self, store_config: StoreConfig) -> Self {
self.store_config = store_config;
self.store_config = BuilderStoreConfig::Custom(store_config);
self
}
@@ -336,7 +342,20 @@ impl ClientBuilder {
HttpConfig::Custom(c) => c,
};
let base_client = BaseClient::with_store_config(self.store_config);
#[allow(clippy::infallible_destructuring_match)]
let store_config = match self.store_config {
#[cfg(feature = "sled")]
BuilderStoreConfig::Sled { path, passphrase } => {
matrix_sdk_sled::make_store_config(&path, passphrase.as_deref()).await?
}
#[cfg(feature = "indexeddb")]
BuilderStoreConfig::IndexedDb { name, passphrase } => {
matrix_sdk_indexeddb::make_store_config(&name, passphrase.as_deref()).await?
}
BuilderStoreConfig::Custom(config) => config,
};
let base_client = BaseClient::with_store_config(store_config);
let http_client = HttpClient::new(inner_http_client.clone(), self.request_config);
let mut authentication_issuer: Option<Url> = None;
@@ -439,6 +458,38 @@ impl Default for HttpConfig {
}
}
#[derive(Clone)]
enum BuilderStoreConfig {
#[cfg(feature = "sled")]
Sled {
path: std::path::PathBuf,
passphrase: Option<String>,
},
#[cfg(feature = "indexeddb")]
IndexedDb {
name: String,
passphrase: Option<String>,
},
Custom(StoreConfig),
}
impl fmt::Debug for BuilderStoreConfig {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
#[allow(clippy::infallible_destructuring_match)]
match self {
#[cfg(feature = "sled")]
Self::Sled { path, .. } => {
f.debug_struct("Sled").field("path", path).finish_non_exhaustive()
}
#[cfg(feature = "indexeddb")]
Self::IndexedDb { name, .. } => {
f.debug_struct("IndexedDb").field("name", name).finish_non_exhaustive()
}
Self::Custom(store_config) => f.debug_tuple("Custom").field(store_config).finish(),
}
}
}
/// Errors that can happen in [`ClientBuilder::build`].
#[derive(Debug, Error)]
pub enum ClientBuildError {

View File

@@ -263,17 +263,19 @@ where
#[instrument(target = "matrix_sdk::client", name = "login", skip_all, fields(method = "sso"))]
pub async fn send(self) -> Result<login::v3::Response> {
use std::{
collections::HashMap,
convert::Infallible,
io::{Error as IoError, ErrorKind as IoErrorKind},
ops::Range,
sync::{Arc, Mutex},
};
use http::{Method, StatusCode};
use hyper::{server::conn::AddrIncoming, service::service_fn};
use rand::{thread_rng, Rng};
use serde::Deserialize;
use tokio::{net::TcpListener, sync::oneshot};
use tokio_stream::wrappers::TcpListenerStream;
use tracing::debug;
use url::Url;
use warp::Filter;
/// The range of ports the SSO server will try to bind to randomly.
///
@@ -302,14 +304,30 @@ where
.unwrap_or("The Single Sign-On login process is complete. You can close this page now.")
.to_owned();
let route = warp::get().and(warp::query::<HashMap<String, String>>()).map(
move |p: HashMap<String, String>| {
if let Some(data_tx) = data_tx_mutex.lock().unwrap().take() {
data_tx.send(p.get("loginToken").cloned()).unwrap();
}
http::Response::builder().body(response.clone())
},
);
#[derive(Deserialize)]
struct QueryParameters {
#[serde(rename = "loginToken")]
login_token: Option<String>,
}
let handle_request = move |request: http::Request<_>| {
if request.method() != Method::HEAD && request.method() != Method::GET {
return Err(StatusCode::METHOD_NOT_ALLOWED);
}
if let Some(data_tx) = data_tx_mutex.lock().unwrap().take() {
let query_string = request.uri().query().unwrap_or("");
let query: QueryParameters = ruma::serde::urlencoded::from_str(query_string)
.map_err(|_| {
debug!("Failed to deserialize query parameters");
StatusCode::BAD_REQUEST
})?;
data_tx.send(query.login_token).unwrap();
}
Ok(http::Response::new(response.clone()))
};
let listener = {
if redirect_url.port().expect("The redirect URL doesn't include a port") == 0 {
@@ -338,12 +356,24 @@ where
}
};
let server = warp::serve(route).serve_incoming_with_graceful_shutdown(
TcpListenerStream::new(listener),
async {
let incoming = AddrIncoming::from_listener(listener).unwrap();
let server = hyper::Server::builder(incoming)
.serve(tower::make::Shared::new(service_fn(move |request| {
let handle_request = handle_request.clone();
async move {
match handle_request(request) {
Ok(res) => Ok::<_, Infallible>(res.map(hyper::Body::from)),
Err(status_code) => {
let mut res = http::Response::new(hyper::Body::default());
*res.status_mut() = status_code;
Ok(res)
}
}
}
})))
.with_graceful_shutdown(async {
signal_rx.await.ok();
},
);
});
tokio::spawn(server);

View File

@@ -58,7 +58,7 @@ use ruma::{
sync::sync_events,
uiaa::{AuthData, UserIdentifier},
},
error::{FromHttpResponseError, ServerError},
error::FromHttpResponseError,
MatrixVersion, OutgoingRequest, SendAccessToken,
},
assign, DeviceId, OwnedDeviceId, OwnedRoomId, OwnedServerName, RoomAliasId, RoomId,
@@ -532,7 +532,12 @@ impl Client {
/// context argument types are only available for a subset of event types:
///
/// * [`Room`][room::Room] is only available for room-specific events, i.e.
/// not for events like global account data events or presence events
/// not for events like global account data events or presence events.
///
/// You can provide custom context via
/// [`add_event_handler_context`](Client::add_event_handler_context) and
/// then use [`Ctx<T>`](crate::event_handler::Ctx) to extract the context
/// into the event handler.
///
/// [`EventHandlerContext`]: crate::event_handler::EventHandlerContext
///
@@ -544,6 +549,7 @@ impl Client {
/// # let homeserver = Url::parse("http://localhost:8080").unwrap();
/// use matrix_sdk::{
/// deserialized_responses::EncryptionInfo,
/// event_handler::Ctx,
/// room::Room,
/// ruma::{
/// events::{
@@ -588,6 +594,16 @@ impl Client {
/// });
/// client.remove_event_handler(handle);
///
/// // Registering custom event handler context:
/// #[derive(Debug, Clone)] // The context will be cloned for event handler.
/// struct MyContext {
/// number: usize,
/// }
/// client.add_event_handler_context(MyContext { number: 5 });
/// client.add_event_handler(|ev: SyncRoomMessageEvent, context: Ctx<MyContext>| async move {
/// // Use the context
/// });
///
/// // Custom events work exactly the same way, you just need to declare
/// // the content struct and use the EventContent derive macro on it.
/// #[derive(Clone, Debug, Deserialize, Serialize, EventContent)]
@@ -1338,13 +1354,11 @@ impl Client {
Ok(Some(res))
}
Err(error) => {
*guard = if let HttpError::Api(FromHttpResponseError::Server(
ServerError::Known(RumaApiError::ClientApi(api_error)),
)) = &error
{
Err(RefreshTokenError::ClientApi(api_error.to_owned()))
} else {
Err(RefreshTokenError::UnableToRefreshToken)
*guard = match error.as_ruma_api_error() {
Some(RumaApiError::ClientApi(api_error)) => {
Err(RefreshTokenError::ClientApi(api_error.to_owned()))
}
_ => Err(RefreshTokenError::UnableToRefreshToken),
};
Err(error)
@@ -1688,9 +1702,9 @@ impl Client {
// If this is an `M_UNKNOWN_TOKEN` error and refresh token handling is active,
// try to refresh the token and retry the request.
if self.inner.handle_refresh_tokens {
if let Err(HttpError::Api(FromHttpResponseError::Server(ServerError::Known(
RumaApiError::ClientApi(error),
)))) = &res
// FIXME: Use if-let chain once available
if let Err(Some(RumaApiError::ClientApi(error))) =
res.as_ref().map_err(HttpError::as_ruma_api_error)
{
if matches!(error.kind, ErrorKind::UnknownToken { .. }) {
let refresh_res = self.refresh_access_token().await;
@@ -1733,9 +1747,9 @@ impl Client {
// If this is an `M_UNKNOWN_TOKEN` error and refresh token handling is active,
// try to refresh the token and retry the request.
if self.inner.handle_refresh_tokens {
if let Err(HttpError::Api(FromHttpResponseError::Server(ServerError::Known(
RumaApiError::ClientApi(error),
)))) = &res
// FIXME: Use if-let chain once available
if let Err(Some(RumaApiError::ClientApi(error))) =
res.as_ref().map_err(HttpError::as_ruma_api_error)
{
if matches!(error.kind, ErrorKind::UnknownToken { .. }) {
let refresh_res = self.refresh_access_token().await;

View File

@@ -48,7 +48,6 @@ impl<'a> fmt::Debug for SyncSettings<'a> {
opt_field!(filter);
opt_field!(timeout);
opt_field!(token);
s.field("full_state", &self.full_state).finish()
}

View File

@@ -35,7 +35,10 @@ mod qrcode;
mod requests;
mod sas;
pub use matrix_sdk_base::crypto::{format_emojis, AcceptSettings, CancelInfo, Emoji};
pub use matrix_sdk_base::crypto::{
format_emojis, AcceptSettings, AcceptedProtocols, CancelInfo, Emoji, EmojiShortAuthString,
SasState,
};
#[cfg(feature = "qrcode")]
pub use matrix_sdk_base::crypto::{
matrix_sdk_qrcode::{DecodingError, EncodingError, QrVerificationData},

View File

@@ -12,7 +12,10 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use matrix_sdk_base::crypto::{AcceptSettings, CancelInfo, Emoji, ReadOnlyDevice, Sas as BaseSas};
use futures_core::Stream;
use matrix_sdk_base::crypto::{
AcceptSettings, CancelInfo, Emoji, ReadOnlyDevice, Sas as BaseSas, SasState,
};
use ruma::{events::key::verification::cancel::CancelCode, UserId};
use crate::{error::Result, Client};
@@ -217,4 +220,110 @@ impl SasVerification {
pub fn other_user_id(&self) -> &UserId {
self.inner.other_user_id()
}
/// Listen for changes in the SAS verification process.
///
/// The changes are presented as a stream of [`SasState`] values.
///
/// This method can be used to react to changes in the state of the
/// verification process, or rather the method can be used to handle
/// each step of the verification process.
///
/// # Flowchart
///
/// The flow of the verification process is pictured bellow. Please note
/// that the process can be cancelled at each step of the process.
/// Either side can cancel the process.
///
/// ```text
/// ┌───────┐
/// │Started│
/// └───┬───┘
/// │
/// ┌────⌄───┐
/// │Accepted│
/// └────┬───┘
/// │
/// ┌───────⌄──────┐
/// │Keys Exchanged│
/// └───────┬──────┘
/// │
/// ________⌄________
/// ╲ ┌─────────┐
/// Does the short ╲______│Cancelled│
/// ╲ auth string match no └─────────┘
/// ╲_________________
/// │yes
/// │
/// ┌────⌄────┐
/// │Confirmed│
/// └────┬────┘
/// │
/// ┌───⌄───┐
/// │ Done │
/// └───────┘
/// ```
/// # Example
///
/// ```no_run
/// use futures::stream::{Stream, StreamExt};
/// use matrix_sdk::encryption::verification::{SasState, SasVerification};
///
/// # futures::executor::block_on(async {
/// # let sas: SasVerification = unimplemented!();
/// # let user_confirmed = false;
///
/// let mut stream = sas.changes();
///
/// while let Some(state) = stream.next().await {
/// match state {
/// SasState::KeysExchanged { emojis, decimals: _ } => {
/// let emojis =
/// emojis.expect("We only support emoji verification");
/// println!("Do these emojis match {emojis:#?}");
///
/// // Ask the user to confirm or cancel here.
/// if user_confirmed {
/// sas.confirm().await?;
/// } else {
/// sas.cancel().await?;
/// }
/// }
/// SasState::Done { .. } => {
/// let device = sas.other_device();
///
/// println!(
/// "Successfully verified device {} {} {:?}",
/// device.user_id(),
/// device.device_id(),
/// device.local_trust_state()
/// );
///
/// break;
/// }
/// SasState::Cancelled(cancel_info) => {
/// println!(
/// "The verification has been cancelled, reason: {}",
/// cancel_info.reason()
/// );
/// break;
/// }
/// SasState::Started { .. }
/// | SasState::Accepted { .. }
/// | SasState::Confirmed => (),
/// }
/// }
/// # anyhow::Ok(()) });
/// ```
pub fn changes(&self) -> impl Stream<Item = SasState> {
self.inner.changes()
}
/// Get the current state the verification process is in.
///
/// To listen to changes to the [`SasState`] use the
/// [`SasVerification::changes`] method.
pub fn state(&self) -> SasState {
self.inner.state()
}
}

View File

@@ -27,7 +27,7 @@ use matrix_sdk_base::{Error as SdkBaseError, StoreError};
use reqwest::Error as ReqwestError;
use ruma::{
api::{
client::uiaa::{UiaaInfo, UiaaResponse as UiaaError},
client::uiaa::{UiaaInfo, UiaaResponse},
error::{FromHttpResponseError, IntoHttpError, ServerError},
},
events::tag::InvalidUserTagName,
@@ -51,6 +51,15 @@ pub enum RumaApiError {
#[error(transparent)]
ClientApi(ruma::api::client::Error),
/// A user-interactive authentication API error.
///
/// When registering or authenticating, the Matrix server can send a
/// `UiaaResponse` as the error type, this is a User-Interactive
/// Authentication API response. This represents an error with
/// information about how to authenticate the user.
#[error(transparent)]
Uiaa(UiaaResponse),
/// Another API response error.
#[error(transparent)]
Other(ruma::api::error::MatrixError),
@@ -81,15 +90,6 @@ pub enum HttpError {
#[error(transparent)]
IntoHttp(#[from] IntoHttpError),
/// An error occurred while authenticating.
///
/// When registering or authenticating the Matrix server can send a
/// `UiaaResponse` as the error type, this is a User-Interactive
/// Authentication API response. This represents an error with
/// information about how to authenticate the user.
#[error(transparent)]
UiaaError(#[from] FromHttpResponseError<UiaaError>),
/// The server returned a status code that should be retried.
#[error("Server returned an error {0}")]
Server(StatusCode),
@@ -103,6 +103,18 @@ pub enum HttpError {
RefreshToken(#[from] RefreshTokenError),
}
impl HttpError {
/// If `self` is `Api(Server(Known(e)))`, returns `Some(e)`.
///
/// Otherwise, returns `None`.
pub fn as_ruma_api_error(&self) -> Option<&RumaApiError> {
match self {
Self::Api(FromHttpResponseError::Server(ServerError::Known(e))) => Some(e),
_ => None,
}
}
}
/// Internal representation of errors.
#[derive(Error, Debug)]
#[non_exhaustive]
@@ -188,6 +200,18 @@ pub enum Error {
UnknownError(Box<dyn std::error::Error + Send + Sync>),
}
impl Error {
/// If `self` is `Http(Api(Server(Known(e))))`, returns `Some(e)`.
///
/// Otherwise, returns `None`.
pub fn as_ruma_api_error(&self) -> Option<&RumaApiError> {
match self {
Error::Http(e) => e.as_ruma_api_error(),
_ => None,
}
}
}
/// Error for the room key importing functionality.
#[cfg(feature = "e2e-encryption")]
#[derive(Error, Debug)]
@@ -229,13 +253,9 @@ impl HttpError {
/// This method is an convenience method to get to the info the server
/// returned on the first, failed request.
pub fn uiaa_response(&self) -> Option<&UiaaInfo> {
if let HttpError::UiaaError(FromHttpResponseError::Server(ServerError::Known(
UiaaError::AuthResponse(i),
))) = self
{
Some(i)
} else {
None
match self.as_ruma_api_error() {
Some(RumaApiError::Uiaa(UiaaResponse::AuthResponse(i))) => Some(i),
_ => None,
}
}
}
@@ -253,13 +273,9 @@ impl Error {
/// This method is an convenience method to get to the info the server
/// returned on the first, failed request.
pub fn uiaa_response(&self) -> Option<&UiaaInfo> {
if let Error::Http(HttpError::UiaaError(FromHttpResponseError::Server(
ServerError::Known(UiaaError::AuthResponse(i)),
))) = self
{
Some(i)
} else {
None
match self.as_ruma_api_error() {
Some(RumaApiError::Uiaa(UiaaResponse::AuthResponse(i))) => Some(i),
_ => None,
}
}
}
@@ -270,6 +286,12 @@ impl From<FromHttpResponseError<ruma::api::client::Error>> for HttpError {
}
}
impl From<FromHttpResponseError<UiaaResponse>> for HttpError {
fn from(err: FromHttpResponseError<UiaaResponse>) -> Self {
Self::Api(err.map(|e| e.map(RumaApiError::Uiaa)))
}
}
impl From<FromHttpResponseError<ruma::api::error::MatrixError>> for HttpError {
fn from(err: FromHttpResponseError<ruma::api::error::MatrixError>) -> Self {
Self::Api(err.map(|e| e.map(RumaApiError::Other)))
@@ -297,7 +319,7 @@ impl From<SdkBaseError> for Error {
_ => Self::UnknownError(anyhow::anyhow!(e).into()),
#[cfg(all(not(feature = "eyre"), not(feature = "anyhow")))]
_ => {
let e: Box<dyn std::error::Error + Sync + Send> = format!("{:?}", e).into();
let e: Box<dyn std::error::Error + Sync + Send> = format!("{e:?}").into();
Self::UnknownError(e)
}
}

View File

@@ -105,7 +105,10 @@ impl HttpClient {
HttpClient { inner, request_config }
}
#[tracing::instrument(skip(self, request), fields(request_type = type_name::<Request>()))]
#[tracing::instrument(
skip(self, request, access_token),
fields(request_type = type_name::<Request>()),
)]
pub async fn send<Request>(
&self,
request: Request,

View File

@@ -37,6 +37,8 @@ use ruma::{
};
use serde::de::DeserializeOwned;
#[cfg(feature = "experimental-timeline")]
use super::timeline::Timeline;
use crate::{
event_handler::{EventHandler, EventHandlerHandle, EventHandlerResult, SyncEvent},
media::{MediaFormat, MediaRequest},
@@ -251,6 +253,16 @@ impl Common {
self.client.add_room_event_handler(self.room_id(), handler)
}
/// Get a [`Timeline`] for this room.
///
/// This offers a higher-level API than event handlers, in treating things
/// like edits and reactions as updates of existing items rather than new
/// independent events.
#[cfg(feature = "experimental-timeline")]
pub fn timeline(&self) -> Timeline {
Timeline::new(self)
}
/// Fetch the event with the given `EventId` in this room.
pub async fn event(&self, event_id: &EventId) -> Result<TimelineEvent> {
let request = get_room_event::v3::Request::new(self.room_id(), event_id);

View File

@@ -9,6 +9,8 @@ mod invited;
mod joined;
mod left;
mod member;
#[cfg(feature = "experimental-timeline")]
pub mod timeline;
pub use self::{
common::{Common, Messages, MessagesOptions},

View File

@@ -0,0 +1,488 @@
// Copyright 2022 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use indexmap::map::Entry;
use matrix_sdk_base::deserialized_responses::EncryptionInfo;
use ruma::{
events::{
reaction::ReactionEventContent,
room::{
message::{Relation, Replacement, RoomMessageEventContent},
redaction::{
OriginalSyncRoomRedactionEvent, RoomRedactionEventContent, SyncRoomRedactionEvent,
},
},
AnyMessageLikeEventContent, AnyStateEventContent, AnySyncMessageLikeEvent,
AnySyncTimelineEvent, Relations,
},
serde::Raw,
uint, EventId, MilliSecondsSinceUnixEpoch, OwnedEventId, OwnedTransactionId, OwnedUserId,
UserId,
};
use tracing::{debug, error, info, warn};
use super::{
event_item::{BundledReactions, TimelineDetails},
find_event, EventTimelineItem, Message, TimelineInner, TimelineItem, TimelineItemContent,
TimelineKey,
};
impl TimelineInner {
pub(super) fn handle_live_event(
&self,
raw: Raw<AnySyncTimelineEvent>,
encryption_info: Option<EncryptionInfo>,
own_user_id: &UserId,
) {
self.handle_remote_event(raw, encryption_info, own_user_id, TimelineItemPosition::End)
}
pub(super) fn handle_local_event(
&self,
txn_id: OwnedTransactionId,
content: AnyMessageLikeEventContent,
own_user_id: &UserId,
) {
let meta = TimelineEventMetadata {
sender: own_user_id.to_owned(),
origin_server_ts: None,
raw_event: None,
is_own_event: true,
relations: None,
// FIXME: Should we supply something here for encrypted rooms?
encryption_info: None,
};
let flow = Flow::Local { txn_id };
let kind = TimelineEventKind::Message { content };
TimelineEventHandler::new(meta, flow, self).handle_event(kind)
}
pub(super) fn handle_back_paginated_event(
&self,
raw: Raw<AnySyncTimelineEvent>,
encryption_info: Option<EncryptionInfo>,
own_user_id: &UserId,
) {
self.handle_remote_event(raw, encryption_info, own_user_id, TimelineItemPosition::Start)
}
fn handle_remote_event(
&self,
raw: Raw<AnySyncTimelineEvent>,
encryption_info: Option<EncryptionInfo>,
own_user_id: &UserId,
position: TimelineItemPosition,
) {
let event = match raw.deserialize() {
Ok(ev) => ev,
Err(_e) => {
// TODO: Add some sort of error timeline item
return;
}
};
let sender = event.sender().to_owned();
let is_own_event = sender == own_user_id;
let meta = TimelineEventMetadata {
raw_event: Some(raw),
sender,
origin_server_ts: Some(event.origin_server_ts()),
is_own_event,
relations: event.relations().cloned(),
encryption_info,
};
let flow = Flow::Remote {
event_id: event.event_id().to_owned(),
txn_id: event.transaction_id().map(ToOwned::to_owned),
position,
};
TimelineEventHandler::new(meta, flow, self).handle_event(event.into())
}
}
enum Flow {
Local {
txn_id: OwnedTransactionId,
},
Remote {
event_id: OwnedEventId,
txn_id: Option<OwnedTransactionId>,
position: TimelineItemPosition,
},
}
impl Flow {
fn to_key(&self) -> TimelineKey {
match self {
Self::Remote { event_id, .. } => TimelineKey::EventId(event_id.to_owned()),
Self::Local { txn_id } => TimelineKey::TransactionId(txn_id.to_owned()),
}
}
}
struct TimelineEventMetadata {
raw_event: Option<Raw<AnySyncTimelineEvent>>,
sender: OwnedUserId,
origin_server_ts: Option<MilliSecondsSinceUnixEpoch>,
is_own_event: bool,
relations: Option<Relations>,
encryption_info: Option<EncryptionInfo>,
}
impl From<AnySyncTimelineEvent> for TimelineEventKind {
fn from(event: AnySyncTimelineEvent) -> Self {
match event {
AnySyncTimelineEvent::MessageLike(AnySyncMessageLikeEvent::RoomRedaction(
SyncRoomRedactionEvent::Original(OriginalSyncRoomRedactionEvent {
redacts,
content,
..
}),
)) => Self::Redaction { redacts, content },
AnySyncTimelineEvent::MessageLike(ev) => match ev.original_content() {
Some(content) => Self::Message { content },
None => Self::RedactedMessage,
},
AnySyncTimelineEvent::State(ev) => match ev.original_content() {
Some(_content) => Self::State { _content },
None => Self::RedactedState,
},
}
}
}
#[derive(Clone)]
enum TimelineEventKind {
Message { content: AnyMessageLikeEventContent },
RedactedMessage,
Redaction { redacts: OwnedEventId, content: RoomRedactionEventContent },
// FIXME: Split further for state keys of different type
State { _content: AnyStateEventContent },
RedactedState, // AnyRedactedStateEventContent
}
enum TimelineItemPosition {
Start,
End,
}
// Bundles together a few things that are needed throughout the different stages
// of handling an event (figuring out whether it should update an existing
// timeline item, transforming that item or creating a new one, updating the
// reactive Vec).
struct TimelineEventHandler<'a> {
meta: TimelineEventMetadata,
flow: Flow,
timeline: &'a TimelineInner,
event_added: bool,
}
impl<'a> TimelineEventHandler<'a> {
fn new(meta: TimelineEventMetadata, flow: Flow, timeline: &'a TimelineInner) -> Self {
Self { meta, flow, timeline, event_added: false }
}
fn handle_event(mut self, event_kind: TimelineEventKind) {
match event_kind {
TimelineEventKind::Message { content } => match content {
AnyMessageLikeEventContent::Reaction(c) => self.handle_reaction(c),
AnyMessageLikeEventContent::RoomMessage(c) => self.handle_room_message(c),
// TODO
_ => {}
},
TimelineEventKind::RedactedMessage => {
self.add(NewEventTimelineItem::redacted_message());
}
TimelineEventKind::Redaction { redacts, content } => {
self.handle_redaction(redacts, content)
}
// TODO: State events
_ => {}
}
if !self.event_added {
// TODO: Add event as raw
}
}
fn handle_room_message(&mut self, content: RoomMessageEventContent) {
match content.relates_to {
Some(Relation::Replacement(re)) => {
self.handle_room_message_edit(re);
}
_ => {
self.add(NewEventTimelineItem::message(content, self.meta.relations.clone()));
}
}
}
fn handle_room_message_edit(&mut self, replacement: Replacement) {
let event_id = &replacement.event_id;
self.maybe_update_timeline_item(event_id, "edit", |item| {
if self.meta.sender != item.sender() {
info!(
%event_id, original_sender = %item.sender(), edit_sender = %self.meta.sender,
"Event tries to edit another user's timeline item, discarding"
);
return None;
}
let msg = match &item.content {
TimelineItemContent::Message(msg) => msg,
TimelineItemContent::RedactedMessage => {
info!(
%event_id,
"Event tries to edit a non-editable timeline item, discarding"
);
return None;
}
};
let content = TimelineItemContent::Message(Message {
msgtype: replacement.new_content.msgtype,
in_reply_to: msg.in_reply_to.clone(),
edited: true,
});
Some(item.with_content(content))
});
}
// Redacted reaction events are no-ops so don't need to be handled
fn handle_reaction(&mut self, c: ReactionEventContent) {
let event_id: &EventId = &c.relates_to.event_id;
// This lock should never be contended, same as the timeline item lock.
// If this is ever run in parallel for some reason though, make sure the
// reaction lock is held for the entire time of the timeline items being
// locked so these two things can't get out of sync.
let mut lock = self.timeline.reaction_map.lock().unwrap();
let did_update = self.maybe_update_timeline_item(event_id, "reaction", |item| {
// Handling of reactions on redacted events is an open question.
// For now, ignore reactions on redacted events like Element does.
if let TimelineItemContent::RedactedMessage = item.content {
debug!(%event_id, "Ignoring reaction on redacted event");
None
} else {
let mut reactions = item.reactions.clone();
let reaction_details =
reactions.bundled.entry(c.relates_to.key.clone()).or_default();
reaction_details.count += uint!(1);
if let TimelineDetails::Ready(senders) = &mut reaction_details.senders {
senders.push(self.meta.sender.clone());
}
Some(item.with_reactions(reactions))
}
});
if did_update {
lock.insert(self.flow.to_key(), (self.meta.sender.clone(), c.relates_to));
}
}
// Redacted redactions are no-ops (unfortunately)
fn handle_redaction(&mut self, redacts: OwnedEventId, _content: RoomRedactionEventContent) {
let mut did_update = false;
// Don't release this lock until after update_timeline_item.
// See first comment in handle_reaction for why.
let mut lock = self.timeline.reaction_map.lock().unwrap();
if let Some((sender, rel)) = lock.remove(&TimelineKey::EventId(redacts.clone())) {
did_update = self.maybe_update_timeline_item(&rel.event_id, "redaction", |item| {
let mut reactions = item.reactions.clone();
let mut details_entry = match reactions.bundled.entry(rel.key) {
Entry::Occupied(o) => o,
Entry::Vacant(_) => return None,
};
let details = details_entry.get_mut();
details.count -= uint!(1);
if details.count == uint!(0) {
details_entry.remove();
return Some(item.with_reactions(reactions));
}
let senders = match &mut details.senders {
TimelineDetails::Ready(senders) => senders,
_ => {
// FIXME: We probably want to support this somehow in
// the future, but right now it's not possible.
warn!(
"inconsistent state: shouldn't have a reaction_map entry for a \
timeline item with incomplete reactions"
);
return None;
}
};
if let Some(idx) = senders.iter().position(|s| *s == sender) {
senders.remove(idx);
} else {
error!(
"inconsistent state: sender from reaction_map not in reaction sender list \
of timeline item"
);
return None;
}
if u64::from(details.count) != senders.len() as u64 {
error!("inconsistent state: reaction count differs from number of senders");
// Can't make things worse by updating the item, so no early
// return here.
}
Some(item.with_reactions(reactions))
});
if !did_update {
warn!("reaction_map out of sync with timeline items");
}
}
// Even if the event being redacted is a reaction (found in
// `reaction_map`), it can still be present in the timeline items
// directly with the raw event timeline feature (not yet implemented).
did_update |= self.update_timeline_item(&redacts, "redaction", |item| item.to_redacted());
if !did_update {
// We will want to know this when debugging redaction issues.
debug!(redaction_key = ?self.flow.to_key(), %redacts, "redaction affected no event");
}
}
fn add(&mut self, item: NewEventTimelineItem) {
self.event_added = true;
let NewEventTimelineItem { content, reactions } = item;
let item = EventTimelineItem {
key: self.flow.to_key(),
event_id: None,
sender: self.meta.sender.to_owned(),
content,
reactions,
origin_server_ts: self.meta.origin_server_ts,
is_own: self.meta.is_own_event,
encryption_info: self.meta.encryption_info.clone(),
raw: self.meta.raw_event.clone(),
};
let item = Arc::new(TimelineItem::Event(item));
let mut lock = self.timeline.items.lock_mut();
match &self.flow {
Flow::Local { .. }
| Flow::Remote { position: TimelineItemPosition::End, txn_id: None, .. } => {
lock.push_cloned(item);
}
Flow::Remote { position: TimelineItemPosition::Start, txn_id: None, .. } => {
lock.insert_cloned(0, item);
}
Flow::Remote { txn_id: Some(txn_id), event_id, position } => {
if let Some((idx, _old_item)) = find_event(&lock, txn_id) {
// TODO: Check whether anything is different about the old and new item?
lock.set_cloned(idx, item);
} else {
debug!(
%txn_id, %event_id,
"Received event with transaction ID, but didn't find matching timeline item"
);
match position {
TimelineItemPosition::Start => lock.insert_cloned(0, item),
TimelineItemPosition::End => lock.push_cloned(item),
}
}
}
}
}
/// Returns whether an update happened
fn maybe_update_timeline_item(
&self,
event_id: &EventId,
action: &str,
update: impl FnOnce(&EventTimelineItem) -> Option<EventTimelineItem>,
) -> bool {
// No point in trying to update items with relations when back-
// paginating, the event the relation applies to can't be processed yet.
if matches!(self.flow, Flow::Remote { position: TimelineItemPosition::Start, .. }) {
return false;
}
let mut lock = self.timeline.items.lock_mut();
if let Some((idx, item)) = find_event(&lock, event_id) {
if let Some(new_item) = update(item) {
lock.set_cloned(idx, Arc::new(TimelineItem::Event(new_item)));
return true;
}
} else {
debug!(%event_id, "Timeline item not found, discarding {action}");
}
false
}
/// Returns whether an update happened
fn update_timeline_item(
&self,
event_id: &EventId,
action: &str,
update: impl FnOnce(&EventTimelineItem) -> EventTimelineItem,
) -> bool {
self.maybe_update_timeline_item(event_id, action, move |item| Some(update(item)))
}
}
struct NewEventTimelineItem {
content: TimelineItemContent,
reactions: BundledReactions,
}
impl NewEventTimelineItem {
// These constructors could also be `From` implementations, but that would
// allow users to call them directly, which should not be supported
pub(crate) fn message(c: RoomMessageEventContent, relations: Option<Relations>) -> Self {
let edited = relations.as_ref().map_or(false, |r| r.replace.is_some());
let content = TimelineItemContent::Message(Message {
msgtype: c.msgtype,
in_reply_to: c.relates_to.and_then(|rel| match rel {
Relation::Reply { in_reply_to } => Some(in_reply_to.event_id),
_ => None,
}),
edited,
});
let reactions =
relations.and_then(|r| r.annotation).map(BundledReactions::from).unwrap_or_default();
Self { content, reactions }
}
pub(crate) fn redacted_message() -> Self {
Self {
content: TimelineItemContent::RedactedMessage,
reactions: BundledReactions::default(),
}
}
}

Some files were not shown because too many files have changed in this diff Show More