From f91216dd3ce5f842c1c441d0bae5a852e689bccf Mon Sep 17 00:00:00 2001 From: Jonas Zohren Date: Tue, 14 Dec 2021 11:16:02 +0100 Subject: [PATCH 01/65] CI: Optionally use sccache for compilation This moves compiler caching for incremental builds away from GitLab caching the whole target/ folder to caching each code unit in S3. This aleviates the need to zip and unzip and just caches on the fly. This feature is optional and gated behind the SCCACHE_BIN_URL env --- .gitlab-ci.yml | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a8d43842..664b5ea3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -26,16 +26,19 @@ variables: cache: paths: - cargohome - - target/ - key: "build_cache--$TARGET--$CI_COMMIT_BRANCH--release" + key: "build_cache--$TARGET--$CI_COMMIT_BRANCH" variables: CARGO_PROFILE_RELEASE_LTO: "true" CARGO_PROFILE_RELEASE_CODEGEN_UNITS: "1" + CARGO_INCREMENTAL: "false" # https://matklad.github.io/2021/09/04/fast-rust-builds.html#ci-workflow + CARGO_HOME: $CI_PROJECT_DIR/cargohome before_script: - 'echo "Building for target $TARGET"' - - 'mkdir -p cargohome && CARGOHOME="cargohome"' + - "mkdir -p $CARGO_HOME" - "rustc --version && cargo --version && rustup show" # Print version info for debugging - "rustup target add $TARGET" + # If provided, bring in caching through sccache, which uses an external S3 endpoint to store compilation results: + - if [ -n "${SCCACHE_BIN_URL}" ]; then curl $SCCACHE_BIN_URL --output /sccache && chmod +x /sccache && export RUSTC_WRAPPER=/sccache; fi script: - time cargo build --target $TARGET --release - 'cp "target/$TARGET/release/conduit" "conduit-$TARGET"' @@ -216,20 +219,20 @@ test:cargo: image: "rust:latest" tags: ["docker"] variables: - CARGO_HOME: "cargohome" + CARGO_HOME: "$CI_PROJECT_DIR/cargohome" + CARGO_INCREMENTAL: "false" # https://matklad.github.io/2021/09/04/fast-rust-builds.html#ci-workflow cache: paths: - - target - cargohome - key: test_cache + key: "test_cache--$CI_COMMIT_BRANCH" interruptible: true before_script: - - mkdir -p $CARGO_HOME && echo "using $CARGO_HOME to cache cargo deps" + - mkdir -p $CARGO_HOME - apt-get update -yqq - apt-get install -yqq --no-install-recommends build-essential libssl-dev pkg-config wget - rustup component add clippy rustfmt - - wget "https://faulty-storage.de/gitlab-report" - - chmod +x ./gitlab-report + # If provided, bring in caching through sccache, which uses an external S3 endpoint to store compilation results: + - if [ -n "${SCCACHE_BIN_URL}" ]; then curl $SCCACHE_BIN_URL --output /sccache && chmod +x /sccache && export RUSTC_WRAPPER=/sccache; fi script: - rustc --version && cargo --version # Print version info for debugging - cargo fmt --all -- --check From adb518fa0df35ba85c2ff1c96a539dda085f8991 Mon Sep 17 00:00:00 2001 From: Jonas Zohren Date: Tue, 14 Dec 2021 11:16:40 +0100 Subject: [PATCH 02/65] CI: Use curl instead of wget The rust docker image already comes with curl, no need to install wget. --- .gitlab-ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 664b5ea3..1dedd8ff 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -229,8 +229,9 @@ test:cargo: before_script: - mkdir -p $CARGO_HOME - apt-get update -yqq - - apt-get install -yqq --no-install-recommends build-essential libssl-dev pkg-config wget + - apt-get install -yqq --no-install-recommends build-essential libssl-dev pkg-config - rustup component add clippy rustfmt + - curl "https://faulty-storage.de/gitlab-report" --output ./gitlab-report && chmod +x ./gitlab-report # If provided, bring in caching through sccache, which uses an external S3 endpoint to store compilation results: - if [ -n "${SCCACHE_BIN_URL}" ]; then curl $SCCACHE_BIN_URL --output /sccache && chmod +x /sccache && export RUSTC_WRAPPER=/sccache; fi script: From 3d25d46dc5b14c506692ea8a82151b6e4f39fafd Mon Sep 17 00:00:00 2001 From: Moritz Bitsch Date: Wed, 20 Oct 2021 06:20:34 +0200 Subject: [PATCH 03/65] Use simple BTreeMap to store uiaa requests some uiaa requests contain plaintext passwords which should never be persisted to disk. Currently there is no cleanup implemented (you have to restart conduit) --- src/database.rs | 3 +-- src/database/uiaa.rs | 16 +++++++++------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/src/database.rs b/src/database.rs index 84ca68dc..83b0fd5e 100644 --- a/src/database.rs +++ b/src/database.rs @@ -250,8 +250,7 @@ impl Database { }, uiaa: uiaa::Uiaa { userdevicesessionid_uiaainfo: builder.open_tree("userdevicesessionid_uiaainfo")?, - userdevicesessionid_uiaarequest: builder - .open_tree("userdevicesessionid_uiaarequest")?, + userdevicesessionid_uiaarequest: RwLock::new(BTreeMap::new()), }, rooms: rooms::Rooms { edus: rooms::RoomEdus { diff --git a/src/database/uiaa.rs b/src/database/uiaa.rs index 1c0fb566..2ecca93d 100644 --- a/src/database/uiaa.rs +++ b/src/database/uiaa.rs @@ -1,4 +1,6 @@ use std::sync::Arc; +use std::sync::RwLock; +use std::collections::BTreeMap; use crate::{client_server::SESSION_ID_LENGTH, utils, Error, Result}; use ruma::{ @@ -18,7 +20,7 @@ use super::abstraction::Tree; pub struct Uiaa { pub(super) userdevicesessionid_uiaainfo: Arc, // User-interactive authentication - pub(super) userdevicesessionid_uiaarequest: Arc, // UiaaRequest = canonical json value + pub(super) userdevicesessionid_uiaarequest: RwLock, Vec>>, // UiaaRequest = canonical json value } impl Uiaa { @@ -153,10 +155,10 @@ impl Uiaa { userdevicesessionid.push(0xff); userdevicesessionid.extend_from_slice(session.as_bytes()); - self.userdevicesessionid_uiaarequest.insert( - &userdevicesessionid, - &serde_json::to_vec(request).expect("json value to vec always works"), - )?; + self.userdevicesessionid_uiaarequest.write().unwrap().insert( + userdevicesessionid, + serde_json::to_vec(request).expect("json value to vec always works"), + ); Ok(()) } @@ -173,8 +175,8 @@ impl Uiaa { userdevicesessionid.push(0xff); userdevicesessionid.extend_from_slice(session.as_bytes()); - self.userdevicesessionid_uiaarequest - .get(&userdevicesessionid)? + self.userdevicesessionid_uiaarequest.read().unwrap() + .get(&userdevicesessionid) .map(|bytes| { serde_json::from_str::( &utils::string_from_bytes(&bytes) From fe8cfe05569e667b03ee855a2463964a5a029661 Mon Sep 17 00:00:00 2001 From: Moritz Bitsch Date: Tue, 14 Dec 2021 17:55:28 +0100 Subject: [PATCH 04/65] Add database migration to remove stored passwords uiaarequests can contain plaintext passwords, which were stored on disk --- src/database.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/database.rs b/src/database.rs index 83b0fd5e..8b29b221 100644 --- a/src/database.rs +++ b/src/database.rs @@ -754,6 +754,15 @@ impl Database { println!("Migration: 9 -> 10 finished"); } + + if db.globals.database_version()? < 11 { + db._db + .open_tree("userdevicesessionid_uiaarequest")? + .clear()?; + db.globals.bump_database_version(11)?; + + println!("Migration: 10 -> 11 finished"); + } } let guard = db.read().await; From 0725b69abb7453df534a764947b6015ffe8293c4 Mon Sep 17 00:00:00 2001 From: Moritz Bitsch Date: Sat, 18 Dec 2021 18:46:38 +0100 Subject: [PATCH 05/65] Clean up userdevicesessionid_uiaarequest BTreeMap There is no need to encode or decode anything as we are not saving to disk --- src/database/uiaa.rs | 52 ++++++++++++++++++++------------------------ 1 file changed, 24 insertions(+), 28 deletions(-) diff --git a/src/database/uiaa.rs b/src/database/uiaa.rs index 2ecca93d..461a3e27 100644 --- a/src/database/uiaa.rs +++ b/src/database/uiaa.rs @@ -1,6 +1,6 @@ +use std::collections::BTreeMap; use std::sync::Arc; use std::sync::RwLock; -use std::collections::BTreeMap; use crate::{client_server::SESSION_ID_LENGTH, utils, Error, Result}; use ruma::{ @@ -20,7 +20,8 @@ use super::abstraction::Tree; pub struct Uiaa { pub(super) userdevicesessionid_uiaainfo: Arc, // User-interactive authentication - pub(super) userdevicesessionid_uiaarequest: RwLock, Vec>>, // UiaaRequest = canonical json value + pub(super) userdevicesessionid_uiaarequest: + RwLock>, } impl Uiaa { @@ -149,16 +150,17 @@ impl Uiaa { session: &str, request: &CanonicalJsonValue, ) -> Result<()> { - let mut userdevicesessionid = user_id.as_bytes().to_vec(); - userdevicesessionid.push(0xff); - userdevicesessionid.extend_from_slice(device_id.as_bytes()); - userdevicesessionid.push(0xff); - userdevicesessionid.extend_from_slice(session.as_bytes()); - - self.userdevicesessionid_uiaarequest.write().unwrap().insert( - userdevicesessionid, - serde_json::to_vec(request).expect("json value to vec always works"), - ); + self.userdevicesessionid_uiaarequest + .write() + .unwrap() + .insert( + ( + user_id.to_owned(), + device_id.to_string(), + session.to_string(), + ), + request.to_owned(), + ); Ok(()) } @@ -169,22 +171,16 @@ impl Uiaa { device_id: &DeviceId, session: &str, ) -> Result> { - let mut userdevicesessionid = user_id.as_bytes().to_vec(); - userdevicesessionid.push(0xff); - userdevicesessionid.extend_from_slice(device_id.as_bytes()); - userdevicesessionid.push(0xff); - userdevicesessionid.extend_from_slice(session.as_bytes()); - - self.userdevicesessionid_uiaarequest.read().unwrap() - .get(&userdevicesessionid) - .map(|bytes| { - serde_json::from_str::( - &utils::string_from_bytes(&bytes) - .map_err(|_| Error::bad_database("Invalid uiaa request bytes in db."))?, - ) - .map_err(|_| Error::bad_database("Invalid uiaa request in db.")) - }) - .transpose() + Ok(self + .userdevicesessionid_uiaarequest + .read() + .unwrap() + .get(&( + user_id.to_owned(), + device_id.to_string(), + session.to_string(), + )) + .map(|j| j.to_owned())) } fn update_uiaa_session( From 720a54b3bb74301eaf08f54edd163995bf5ef7fa Mon Sep 17 00:00:00 2001 From: Moritz Bitsch Date: Sat, 18 Dec 2021 19:05:18 +0100 Subject: [PATCH 06/65] Use String to store UserId for uiaa request Fixes compilation error after ruma upgrade --- src/database/uiaa.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/database/uiaa.rs b/src/database/uiaa.rs index 461a3e27..6a5f7a33 100644 --- a/src/database/uiaa.rs +++ b/src/database/uiaa.rs @@ -21,7 +21,7 @@ use super::abstraction::Tree; pub struct Uiaa { pub(super) userdevicesessionid_uiaainfo: Arc, // User-interactive authentication pub(super) userdevicesessionid_uiaarequest: - RwLock>, + RwLock>, } impl Uiaa { @@ -155,7 +155,7 @@ impl Uiaa { .unwrap() .insert( ( - user_id.to_owned(), + user_id.to_string(), device_id.to_string(), session.to_string(), ), @@ -176,7 +176,7 @@ impl Uiaa { .read() .unwrap() .get(&( - user_id.to_owned(), + user_id.to_string(), device_id.to_string(), session.to_string(), )) From 7857da8a0b6322618b12e4b41c6945bcd7dee9ef Mon Sep 17 00:00:00 2001 From: Torsten Flammiger Date: Mon, 20 Dec 2021 15:46:36 +0100 Subject: [PATCH 07/65] Add ability to remove an appservice --- APPSERVICES.md | 8 ++++++++ src/database/admin.rs | 4 ++++ src/database/appservice.rs | 9 +++++++++ src/database/rooms.rs | 9 +++++++++ 4 files changed, 30 insertions(+) diff --git a/APPSERVICES.md b/APPSERVICES.md index 26c34cc4..894bc6f4 100644 --- a/APPSERVICES.md +++ b/APPSERVICES.md @@ -42,6 +42,14 @@ could help. ## Appservice-specific instructions +### Remove an appservice + +To remove an appservice go to your admin room and execute + +```@conduit:your.server.name: unregister_appservice ``` + +where `` one of the output of `list_appservices`. + ### Tested appservices These appservices have been tested and work with Conduit without any extra steps: diff --git a/src/database/admin.rs b/src/database/admin.rs index 1e5c47c9..0702bcdd 100644 --- a/src/database/admin.rs +++ b/src/database/admin.rs @@ -12,6 +12,7 @@ use tracing::warn; pub enum AdminCommand { RegisterAppservice(serde_yaml::Value), + UnregisterAppservice(String), ListAppservices, SendMessage(RoomMessageEventContent), } @@ -96,6 +97,9 @@ impl Admin { AdminCommand::RegisterAppservice(yaml) => { guard.appservice.register_appservice(yaml).unwrap(); // TODO handle error } + AdminCommand::UnregisterAppservice(service_name) => { + guard.appservice.unregister_appservice(&service_name).unwrap(); // TODO: see above + } AdminCommand::ListAppservices => { if let Ok(appservices) = guard.appservice.iter_ids().map(|ids| ids.collect::>()) { let count = appservices.len(); diff --git a/src/database/appservice.rs b/src/database/appservice.rs index 7cc91372..caa48ad0 100644 --- a/src/database/appservice.rs +++ b/src/database/appservice.rs @@ -27,6 +27,15 @@ impl Appservice { Ok(()) } + /** + * Remove an appservice registration + * service_name is the name you send to register the service + */ + pub fn unregister_appservice(&self, service_name: &str) -> Result<()> { + self.id_appserviceregistrations.remove(service_name.as_bytes())?; + Ok(()) + } + pub fn get_registration(&self, id: &str) -> Result> { self.cached_registrations .read() diff --git a/src/database/rooms.rs b/src/database/rooms.rs index fb9ecbf0..612bd51d 100644 --- a/src/database/rooms.rs +++ b/src/database/rooms.rs @@ -1528,6 +1528,15 @@ impl Rooms { )); } } + "unregister_appservice" => { + if args.len() == 1 { + db.admin.send(AdminCommand::UnregisterAppservice(args[0].to_owned())); + } else { + db.admin.send(AdminCommand::SendMessage( + RoomMessageEventContent::text_plain("Missing appservice identifier"), + )); + } + } "list_appservices" => { db.admin.send(AdminCommand::ListAppservices); } From b6c9582cf4e9255e0610a63849bb3c5113be16e2 Mon Sep 17 00:00:00 2001 From: Torsten Flammiger Date: Wed, 22 Dec 2021 13:09:56 +0100 Subject: [PATCH 08/65] Fix doc style comment according to Rust; VSCode added line breaks --- src/database/appservice.rs | 12 +++++++----- src/database/rooms.rs | 8 ++++++-- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/src/database/appservice.rs b/src/database/appservice.rs index caa48ad0..910964a4 100644 --- a/src/database/appservice.rs +++ b/src/database/appservice.rs @@ -27,12 +27,14 @@ impl Appservice { Ok(()) } - /** - * Remove an appservice registration - * service_name is the name you send to register the service - */ + /// Remove an appservice registration + /// + /// # Arguments + /// + /// * `service_name` - the name you send to register the service previously pub fn unregister_appservice(&self, service_name: &str) -> Result<()> { - self.id_appserviceregistrations.remove(service_name.as_bytes())?; + self.id_appserviceregistrations + .remove(service_name.as_bytes())?; Ok(()) } diff --git a/src/database/rooms.rs b/src/database/rooms.rs index 612bd51d..775e2f8d 100644 --- a/src/database/rooms.rs +++ b/src/database/rooms.rs @@ -1530,10 +1530,14 @@ impl Rooms { } "unregister_appservice" => { if args.len() == 1 { - db.admin.send(AdminCommand::UnregisterAppservice(args[0].to_owned())); + db.admin.send(AdminCommand::UnregisterAppservice( + args[0].to_owned(), + )); } else { db.admin.send(AdminCommand::SendMessage( - RoomMessageEventContent::text_plain("Missing appservice identifier"), + RoomMessageEventContent::text_plain( + "Missing appservice identifier", + ), )); } } From 7f2445be6ca7798ec25458e5447b23e7aeea1f7f Mon Sep 17 00:00:00 2001 From: Torsten Flammiger Date: Wed, 22 Dec 2021 16:48:27 +0100 Subject: [PATCH 09/65] On unregister_appservice(service_name), remove the appservice service_name from cache too --- src/database/appservice.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/database/appservice.rs b/src/database/appservice.rs index 910964a4..847d7479 100644 --- a/src/database/appservice.rs +++ b/src/database/appservice.rs @@ -35,6 +35,10 @@ impl Appservice { pub fn unregister_appservice(&self, service_name: &str) -> Result<()> { self.id_appserviceregistrations .remove(service_name.as_bytes())?; + self.cached_registrations. + write(). + unwrap(). + remove(service_name); Ok(()) } From c4a438460e0537e465f5b93514fd05b66a03ad37 Mon Sep 17 00:00:00 2001 From: Moritz Bitsch Date: Wed, 22 Dec 2021 19:26:23 +0100 Subject: [PATCH 10/65] Use Box to store UserID and DeviceID Userid and DeviceID are of unknown size, use Box to be able to store them into the userdevicesessionid_uiaarequest BTreeMap --- src/database/uiaa.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/database/uiaa.rs b/src/database/uiaa.rs index 6a5f7a33..772dab9e 100644 --- a/src/database/uiaa.rs +++ b/src/database/uiaa.rs @@ -21,7 +21,7 @@ use super::abstraction::Tree; pub struct Uiaa { pub(super) userdevicesessionid_uiaainfo: Arc, // User-interactive authentication pub(super) userdevicesessionid_uiaarequest: - RwLock>, + RwLock, Box, String), CanonicalJsonValue>>, } impl Uiaa { @@ -155,8 +155,8 @@ impl Uiaa { .unwrap() .insert( ( - user_id.to_string(), - device_id.to_string(), + user_id.to_owned(), + device_id.to_owned(), session.to_string(), ), request.to_owned(), @@ -176,8 +176,8 @@ impl Uiaa { .read() .unwrap() .get(&( - user_id.to_string(), - device_id.to_string(), + user_id.to_owned(), + device_id.to_owned(), session.to_string(), )) .map(|j| j.to_owned())) From aba95b20f3b3c252e72ac87312b10df8068f7419 Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Wed, 22 Dec 2021 19:41:33 +0100 Subject: [PATCH 11/65] Upgrade Ruma --- Cargo.lock | 56 ++++++++------- Cargo.toml | 4 +- src/client_server/keys.rs | 45 ++++++------ src/client_server/sync.rs | 2 + src/database/key_backups.rs | 65 +++++++++++------- src/database/users.rs | 133 ++++++++++++++++++++++-------------- 6 files changed, 184 insertions(+), 121 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fbf4b3f2..69a026b2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -938,7 +938,7 @@ checksum = "527e8c9ac747e28542699a951517aa9a6945af506cd1f2e1b53a576c17b6cc11" dependencies = [ "bytes", "fnv", - "itoa", + "itoa 0.4.8", ] [[package]] @@ -979,7 +979,7 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa", + "itoa 0.4.8", "pin-project-lite", "socket2 0.4.1", "tokio", @@ -1114,6 +1114,12 @@ version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" +[[package]] +name = "itoa" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" + [[package]] name = "jobserver" version = "0.1.24" @@ -1984,7 +1990,7 @@ dependencies = [ [[package]] name = "ruma" version = "0.4.0" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "assign", "js_int", @@ -2005,7 +2011,7 @@ dependencies = [ [[package]] name = "ruma-api" version = "0.18.5" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "bytes", "http", @@ -2021,7 +2027,7 @@ dependencies = [ [[package]] name = "ruma-api-macros" version = "0.18.5" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2032,7 +2038,7 @@ dependencies = [ [[package]] name = "ruma-appservice-api" version = "0.4.0" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "ruma-api", "ruma-common", @@ -2046,7 +2052,7 @@ dependencies = [ [[package]] name = "ruma-client-api" version = "0.12.3" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "assign", "bytes", @@ -2066,7 +2072,7 @@ dependencies = [ [[package]] name = "ruma-common" version = "0.6.0" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "indexmap", "js_int", @@ -2081,7 +2087,7 @@ dependencies = [ [[package]] name = "ruma-events" version = "0.24.6" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "indoc", "js_int", @@ -2097,7 +2103,7 @@ dependencies = [ [[package]] name = "ruma-events-macros" version = "0.24.6" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2108,7 +2114,7 @@ dependencies = [ [[package]] name = "ruma-federation-api" version = "0.3.1" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "js_int", "ruma-api", @@ -2123,7 +2129,7 @@ dependencies = [ [[package]] name = "ruma-identifiers" version = "0.20.0" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "percent-encoding", "rand 0.8.4", @@ -2137,7 +2143,7 @@ dependencies = [ [[package]] name = "ruma-identifiers-macros" version = "0.20.0" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "quote", "ruma-identifiers-validation", @@ -2147,7 +2153,7 @@ dependencies = [ [[package]] name = "ruma-identifiers-validation" version = "0.5.0" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "thiserror", ] @@ -2155,7 +2161,7 @@ dependencies = [ [[package]] name = "ruma-identity-service-api" version = "0.3.0" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "js_int", "ruma-api", @@ -2168,7 +2174,7 @@ dependencies = [ [[package]] name = "ruma-push-gateway-api" version = "0.3.0" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "js_int", "ruma-api", @@ -2183,11 +2189,11 @@ dependencies = [ [[package]] name = "ruma-serde" version = "0.5.0" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "bytes", "form_urlencoded", - "itoa", + "itoa 0.4.8", "js_int", "ruma-serde-macros", "serde", @@ -2197,7 +2203,7 @@ dependencies = [ [[package]] name = "ruma-serde-macros" version = "0.5.0" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2208,7 +2214,7 @@ dependencies = [ [[package]] name = "ruma-signatures" version = "0.9.0" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "base64 0.13.0", "ed25519-dalek", @@ -2225,7 +2231,7 @@ dependencies = [ [[package]] name = "ruma-state-res" version = "0.4.1" -source = "git+https://github.com/ruma/ruma?rev=16f031fabb7871fcd738b0f25391193ee4ca28a9#16f031fabb7871fcd738b0f25391193ee4ca28a9" +source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" dependencies = [ "itertools 0.10.1", "js_int", @@ -2404,11 +2410,11 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.67" +version = "1.0.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7f9e390c27c3c0ce8bc5d725f6e4d30a29d26659494aa4b17535f7522c5c950" +checksum = "bcbd0344bc6533bc7ec56df11d42fb70f1b912351c0825ccb7211b59d8af7cf5" dependencies = [ - "itoa", + "itoa 1.0.1", "ryu", "serde", ] @@ -2420,7 +2426,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edfa57a7f8d9c1d260a549e7224100f6c43d43f9103e06dd8b4095a9b2b43ce9" dependencies = [ "form_urlencoded", - "itoa", + "itoa 0.4.8", "ryu", "serde", ] diff --git a/Cargo.toml b/Cargo.toml index 02159e31..e64e2751 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,7 +19,7 @@ rocket = { version = "0.5.0-rc.1", features = ["tls"] } # Used to handle request # Used for matrix spec type definitions and helpers #ruma = { version = "0.4.0", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } -ruma = { git = "https://github.com/ruma/ruma", rev = "16f031fabb7871fcd738b0f25391193ee4ca28a9", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } +ruma = { git = "https://github.com/ruma/ruma", rev = "7cf3abbaf02995b03db74429090ca5af1cd71edc", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } #ruma = { git = "https://github.com/timokoesters/ruma", rev = "50c1db7e0a3a21fc794b0cce3b64285a4c750c71", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } #ruma = { path = "../ruma/crates/ruma", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } @@ -36,7 +36,7 @@ http = "0.2.4" # Used to find data directory for default db path directories = "3.0.2" # Used for ruma wrapper -serde_json = { version = "1.0.67", features = ["raw_value"] } +serde_json = { version = "1.0.70", features = ["raw_value"] } # Used for appservice registration files serde_yaml = "0.8.20" # Used for pdu definition diff --git a/src/client_server/keys.rs b/src/client_server/keys.rs index 08ea6e76..be0675d8 100644 --- a/src/client_server/keys.rs +++ b/src/client_server/keys.rs @@ -15,7 +15,7 @@ use ruma::{ }, federation, }, - encryption::UnsignedDeviceInfo, + serde::Raw, DeviceId, DeviceKeyAlgorithm, UserId, }; use serde_json::json; @@ -42,16 +42,9 @@ pub async fn upload_keys_route( let sender_user = body.sender_user.as_ref().expect("user is authenticated"); let sender_device = body.sender_device.as_ref().expect("user is authenticated"); - if let Some(one_time_keys) = &body.one_time_keys { - for (key_key, key_value) in one_time_keys { - db.users.add_one_time_key( - sender_user, - sender_device, - key_key, - key_value, - &db.globals, - )?; - } + for (key_key, key_value) in &body.one_time_keys { + db.users + .add_one_time_key(sender_user, sender_device, key_key, key_value, &db.globals)?; } if let Some(device_keys) = &body.device_keys { @@ -350,10 +343,8 @@ pub(crate) async fn get_keys_helper bool>( Error::bad_database("all_device_keys contained nonexistent device.") })?; - keys.unsigned = UnsignedDeviceInfo { - device_display_name: metadata.display_name, - }; - + add_unsigned_device_display_name(&mut keys, metadata) + .map_err(|_| Error::bad_database("invalid device keys in database"))?; container.insert(device_id, keys); } } @@ -369,10 +360,8 @@ pub(crate) async fn get_keys_helper bool>( ), )?; - keys.unsigned = UnsignedDeviceInfo { - device_display_name: metadata.display_name, - }; - + add_unsigned_device_display_name(&mut keys, metadata) + .map_err(|_| Error::bad_database("invalid device keys in database"))?; container.insert(device_id.to_owned(), keys); } device_keys.insert(user_id.to_owned(), container); @@ -441,6 +430,24 @@ pub(crate) async fn get_keys_helper bool>( }) } +fn add_unsigned_device_display_name( + keys: &mut Raw, + metadata: ruma::api::client::r0::device::Device, +) -> serde_json::Result<()> { + if let Some(display_name) = metadata.display_name { + let mut object = keys.deserialize_as::>()?; + + let unsigned = object.entry("unsigned").or_insert_with(|| json!({})); + if let serde_json::Value::Object(unsigned_object) = unsigned { + unsigned_object.insert("device_display_name".to_owned(), display_name.into()); + } + + *keys = Raw::from_json(serde_json::value::to_raw_value(&object)?); + } + + Ok(()) +} + pub(crate) async fn claim_keys_helper( one_time_keys_input: &BTreeMap, BTreeMap, DeviceKeyAlgorithm>>, db: &Database, diff --git a/src/client_server/sync.rs b/src/client_server/sync.rs index 9ba3b7fb..64588a2c 100644 --- a/src/client_server/sync.rs +++ b/src/client_server/sync.rs @@ -762,6 +762,8 @@ async fn sync_helper( .users .get_to_device_events(&sender_user, &sender_device)?, }, + // Fallback keys are not yet supported + device_unused_fallback_key_types: None, }; // TODO: Retry the endpoint instead of returning (waiting for #118) diff --git a/src/database/key_backups.rs b/src/database/key_backups.rs index 56963c08..b74bc408 100644 --- a/src/database/key_backups.rs +++ b/src/database/key_backups.rs @@ -4,8 +4,10 @@ use ruma::{ error::ErrorKind, r0::backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup}, }, + serde::Raw, RoomId, UserId, }; +use serde_json::json; use std::{collections::BTreeMap, sync::Arc}; use super::abstraction::Tree; @@ -20,7 +22,7 @@ impl KeyBackups { pub fn create_backup( &self, user_id: &UserId, - backup_metadata: &BackupAlgorithm, + backup_metadata: &Raw, globals: &super::globals::Globals, ) -> Result { let version = globals.next_count()?.to_string(); @@ -59,7 +61,7 @@ impl KeyBackups { &self, user_id: &UserId, version: &str, - backup_metadata: &BackupAlgorithm, + backup_metadata: &Raw, globals: &super::globals::Globals, ) -> Result { let mut key = user_id.as_bytes().to_vec(); @@ -73,12 +75,8 @@ impl KeyBackups { )); } - self.backupid_algorithm.insert( - &key, - serde_json::to_string(backup_metadata) - .expect("BackupAlgorithm::to_string always works") - .as_bytes(), - )?; + self.backupid_algorithm + .insert(&key, backup_metadata.json().get().as_bytes())?; self.backupid_etag .insert(&key, &globals.next_count()?.to_be_bytes())?; Ok(version.to_owned()) @@ -105,7 +103,10 @@ impl KeyBackups { .transpose() } - pub fn get_latest_backup(&self, user_id: &UserId) -> Result> { + pub fn get_latest_backup( + &self, + user_id: &UserId, + ) -> Result)>> { let mut prefix = user_id.as_bytes().to_vec(); prefix.push(0xff); let mut last_possible_key = prefix.clone(); @@ -133,7 +134,11 @@ impl KeyBackups { .transpose() } - pub fn get_backup(&self, user_id: &UserId, version: &str) -> Result> { + pub fn get_backup( + &self, + user_id: &UserId, + version: &str, + ) -> Result>> { let mut key = user_id.as_bytes().to_vec(); key.push(0xff); key.extend_from_slice(version.as_bytes()); @@ -152,7 +157,7 @@ impl KeyBackups { version: &str, room_id: &RoomId, session_id: &str, - key_data: &KeyBackupData, + key_data: &Raw, globals: &super::globals::Globals, ) -> Result<()> { let mut key = user_id.as_bytes().to_vec(); @@ -174,10 +179,8 @@ impl KeyBackups { key.push(0xff); key.extend_from_slice(session_id.as_bytes()); - self.backupkeyid_backup.insert( - &key, - &serde_json::to_vec(&key_data).expect("KeyBackupData::to_vec always works"), - )?; + self.backupkeyid_backup + .insert(&key, key_data.json().get().as_bytes())?; Ok(()) } @@ -209,13 +212,13 @@ impl KeyBackups { &self, user_id: &UserId, version: &str, - ) -> Result, RoomKeyBackup>> { + ) -> Result, Raw>> { let mut prefix = user_id.as_bytes().to_vec(); prefix.push(0xff); prefix.extend_from_slice(version.as_bytes()); prefix.push(0xff); - let mut rooms = BTreeMap::, RoomKeyBackup>::new(); + let mut rooms = BTreeMap::, Raw>::new(); for result in self .backupkeyid_backup @@ -241,7 +244,7 @@ impl KeyBackups { Error::bad_database("backupkeyid_backup room_id is invalid room id.") })?; - let key_data = serde_json::from_slice(&value).map_err(|_| { + let key_data: serde_json::Value = serde_json::from_slice(&value).map_err(|_| { Error::bad_database("KeyBackupData in backupkeyid_backup is invalid.") })?; @@ -249,13 +252,25 @@ impl KeyBackups { }) { let (room_id, session_id, key_data) = result?; - rooms - .entry(room_id) - .or_insert_with(|| RoomKeyBackup { + let room_key_backup = rooms.entry(room_id).or_insert_with(|| { + Raw::new(&RoomKeyBackup { sessions: BTreeMap::new(), }) - .sessions - .insert(session_id, key_data); + .expect("RoomKeyBackup serialization") + }); + + let mut object = room_key_backup + .deserialize_as::>() + .map_err(|_| Error::bad_database("RoomKeyBackup is not an object"))?; + + let sessions = object.entry("session").or_insert_with(|| json!({})); + if let serde_json::Value::Object(unsigned_object) = sessions { + unsigned_object.insert(session_id, key_data); + } + + *room_key_backup = Raw::from_json( + serde_json::value::to_raw_value(&object).expect("Value => RawValue serialization"), + ); } Ok(rooms) @@ -266,7 +281,7 @@ impl KeyBackups { user_id: &UserId, version: &str, room_id: &RoomId, - ) -> Result> { + ) -> Result>> { let mut prefix = user_id.as_bytes().to_vec(); prefix.push(0xff); prefix.extend_from_slice(version.as_bytes()); @@ -304,7 +319,7 @@ impl KeyBackups { version: &str, room_id: &RoomId, session_id: &str, - ) -> Result> { + ) -> Result>> { let mut key = user_id.as_bytes().to_vec(); key.push(0xff); key.extend_from_slice(version.as_bytes()); diff --git a/src/database/users.rs b/src/database/users.rs index d4bf4890..63a63f00 100644 --- a/src/database/users.rs +++ b/src/database/users.rs @@ -8,7 +8,12 @@ use ruma::{ DeviceId, DeviceKeyAlgorithm, DeviceKeyId, MilliSecondsSinceUnixEpoch, RoomAliasId, UInt, UserId, }; -use std::{collections::BTreeMap, convert::TryInto, mem, sync::Arc}; +use std::{ + collections::BTreeMap, + convert::{TryFrom, TryInto}, + mem, + sync::Arc, +}; use tracing::warn; use super::abstraction::Tree; @@ -359,7 +364,7 @@ impl Users { user_id: &UserId, device_id: &DeviceId, one_time_key_key: &DeviceKeyId, - one_time_key_value: &OneTimeKey, + one_time_key_value: &Raw, globals: &super::globals::Globals, ) -> Result<()> { let mut key = user_id.as_bytes().to_vec(); @@ -409,7 +414,7 @@ impl Users { device_id: &DeviceId, key_algorithm: &DeviceKeyAlgorithm, globals: &super::globals::Globals, - ) -> Result, OneTimeKey)>> { + ) -> Result, Raw)>> { let mut prefix = user_id.as_bytes().to_vec(); prefix.push(0xff); prefix.extend_from_slice(device_id.as_bytes()); @@ -480,7 +485,7 @@ impl Users { &self, user_id: &UserId, device_id: &DeviceId, - device_keys: &DeviceKeys, + device_keys: &Raw, rooms: &super::rooms::Rooms, globals: &super::globals::Globals, ) -> Result<()> { @@ -509,9 +514,9 @@ impl Users { pub fn add_cross_signing_keys( &self, user_id: &UserId, - master_key: &CrossSigningKey, - self_signing_key: &Option, - user_signing_key: &Option, + master_key: &Raw, + self_signing_key: &Option>, + user_signing_key: &Option>, rooms: &super::rooms::Rooms, globals: &super::globals::Globals, ) -> Result<()> { @@ -521,7 +526,12 @@ impl Users { prefix.push(0xff); // Master key - let mut master_key_ids = master_key.keys.values(); + let master_key_map = master_key + .deserialize() + .map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid master key"))? + .keys; + let mut master_key_ids = master_key_map.values(); + let master_key_id = master_key_ids.next().ok_or(Error::BadRequest( ErrorKind::InvalidParam, "Master key contained no key.", @@ -537,17 +547,21 @@ impl Users { let mut master_key_key = prefix.clone(); master_key_key.extend_from_slice(master_key_id.as_bytes()); - self.keyid_key.insert( - &master_key_key, - &serde_json::to_vec(&master_key).expect("CrossSigningKey::to_vec always works"), - )?; + self.keyid_key + .insert(&master_key_key, master_key.json().get().as_bytes())?; self.userid_masterkeyid .insert(user_id.as_bytes(), &master_key_key)?; // Self-signing key if let Some(self_signing_key) = self_signing_key { - let mut self_signing_key_ids = self_signing_key.keys.values(); + let self_signing_key_map = self_signing_key + .deserialize() + .map_err(|_| { + Error::BadRequest(ErrorKind::InvalidParam, "Invalid self signing key") + })? + .keys; + let mut self_signing_key_ids = self_signing_key_map.values(); let self_signing_key_id = self_signing_key_ids.next().ok_or(Error::BadRequest( ErrorKind::InvalidParam, "Self signing key contained no key.", @@ -565,8 +579,7 @@ impl Users { self.keyid_key.insert( &self_signing_key_key, - &serde_json::to_vec(&self_signing_key) - .expect("CrossSigningKey::to_vec always works"), + self_signing_key.json().get().as_bytes(), )?; self.userid_selfsigningkeyid @@ -575,7 +588,13 @@ impl Users { // User-signing key if let Some(user_signing_key) = user_signing_key { - let mut user_signing_key_ids = user_signing_key.keys.values(); + let user_signing_key_map = user_signing_key + .deserialize() + .map_err(|_| { + Error::BadRequest(ErrorKind::InvalidParam, "Invalid user signing key") + })? + .keys; + let mut user_signing_key_ids = user_signing_key_map.values(); let user_signing_key_id = user_signing_key_ids.next().ok_or(Error::BadRequest( ErrorKind::InvalidParam, "User signing key contained no key.", @@ -593,8 +612,7 @@ impl Users { self.keyid_key.insert( &user_signing_key_key, - &serde_json::to_vec(&user_signing_key) - .expect("CrossSigningKey::to_vec always works"), + user_signing_key.json().get().as_bytes(), )?; self.userid_usersigningkeyid @@ -727,7 +745,7 @@ impl Users { &self, user_id: &UserId, device_id: &DeviceId, - ) -> Result> { + ) -> Result>> { let mut key = user_id.as_bytes().to_vec(); key.push(0xff); key.extend_from_slice(device_id.as_bytes()); @@ -744,25 +762,19 @@ impl Users { &self, user_id: &UserId, allowed_signatures: F, - ) -> Result> { + ) -> Result>> { self.userid_masterkeyid .get(user_id.as_bytes())? .map_or(Ok(None), |key| { self.keyid_key.get(&key)?.map_or(Ok(None), |bytes| { - let mut cross_signing_key = serde_json::from_slice::(&bytes) - .map_err(|_| { - Error::bad_database("CrossSigningKey in db is invalid.") - })?; - - // A user is not allowed to see signatures from users other than himself and - // the target user - cross_signing_key.signatures = cross_signing_key - .signatures - .into_iter() - .filter(|(user, _)| allowed_signatures(user)) - .collect(); - - Ok(Some(cross_signing_key)) + let mut cross_signing_key = serde_json::from_slice::(&bytes) + .map_err(|_| Error::bad_database("CrossSigningKey in db is invalid."))?; + clean_signatures(&mut cross_signing_key, user_id, allowed_signatures)?; + + Ok(Some(Raw::from_json( + serde_json::value::to_raw_value(&cross_signing_key) + .expect("Value to RawValue serialization"), + ))) }) }) } @@ -772,31 +784,25 @@ impl Users { &self, user_id: &UserId, allowed_signatures: F, - ) -> Result> { + ) -> Result>> { self.userid_selfsigningkeyid .get(user_id.as_bytes())? .map_or(Ok(None), |key| { self.keyid_key.get(&key)?.map_or(Ok(None), |bytes| { - let mut cross_signing_key = serde_json::from_slice::(&bytes) - .map_err(|_| { - Error::bad_database("CrossSigningKey in db is invalid.") - })?; - - // A user is not allowed to see signatures from users other than himself and - // the target user - cross_signing_key.signatures = cross_signing_key - .signatures - .into_iter() - .filter(|(user, _)| user == user_id || allowed_signatures(user)) - .collect(); - - Ok(Some(cross_signing_key)) + let mut cross_signing_key = serde_json::from_slice::(&bytes) + .map_err(|_| Error::bad_database("CrossSigningKey in db is invalid."))?; + clean_signatures(&mut cross_signing_key, user_id, allowed_signatures)?; + + Ok(Some(Raw::from_json( + serde_json::value::to_raw_value(&cross_signing_key) + .expect("Value to RawValue serialization"), + ))) }) }) } #[tracing::instrument(skip(self, user_id))] - pub fn get_user_signing_key(&self, user_id: &UserId) -> Result> { + pub fn get_user_signing_key(&self, user_id: &UserId) -> Result>> { self.userid_usersigningkeyid .get(user_id.as_bytes())? .map_or(Ok(None), |key| { @@ -991,3 +997,30 @@ impl Users { Ok(()) } } + +/// Ensure that a user only sees signatures from themselves and the target user +fn clean_signatures bool>( + cross_signing_key: &mut serde_json::Value, + user_id: &UserId, + allowed_signatures: F, +) -> Result<(), Error> { + if let Some(signatures) = cross_signing_key + .get_mut("signatures") + .and_then(|v| v.as_object_mut()) + { + // Don't allocate for the full size of the current signatures, but require + // at most one resize if nothing is dropped + let new_capacity = signatures.len() / 2; + for (user, signature) in + mem::replace(signatures, serde_json::Map::with_capacity(new_capacity)) + { + let id = <&UserId>::try_from(user.as_str()) + .map_err(|_| Error::bad_database("Invalid user ID in database."))?; + if id == user_id || allowed_signatures(id) { + signatures.insert(user, signature); + } + } + } + + Ok(()) +} From a889e884e684aa433772d8d61ee965c062a38790 Mon Sep 17 00:00:00 2001 From: Tglman Date: Thu, 23 Dec 2021 22:16:40 +0000 Subject: [PATCH 12/65] refactor:moved key watch wake logic to specific module --- Cargo.toml | 2 +- src/database/abstraction.rs | 3 ++ src/database/abstraction/heed.rs | 48 ++++--------------------- src/database/abstraction/sqlite.rs | 46 ++++-------------------- src/database/abstraction/watchers.rs | 54 ++++++++++++++++++++++++++++ 5 files changed, 70 insertions(+), 83 deletions(-) create mode 100644 src/database/abstraction/watchers.rs diff --git a/Cargo.toml b/Cargo.toml index 02159e31..ceae6ae9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -87,7 +87,7 @@ sha-1 = "0.9.8" default = ["conduit_bin", "backend_sqlite"] backend_sled = ["sled"] backend_sqlite = ["sqlite"] -backend_heed = ["heed", "crossbeam"] +backend_heed = ["heed", "crossbeam", "parking_lot"] sqlite = ["rusqlite", "parking_lot", "crossbeam", "tokio/signal"] conduit_bin = [] # TODO: add rocket to this when it is optional diff --git a/src/database/abstraction.rs b/src/database/abstraction.rs index 11bbc3b1..67b80d1a 100644 --- a/src/database/abstraction.rs +++ b/src/database/abstraction.rs @@ -12,6 +12,9 @@ pub mod sqlite; #[cfg(feature = "heed")] pub mod heed; +#[cfg(any(feature = "sqlite", feature = "heed"))] +pub mod watchers; + pub trait DatabaseEngine: Sized { fn open(config: &Config) -> Result>; fn open_tree(self: &Arc, name: &'static str) -> Result>; diff --git a/src/database/abstraction/heed.rs b/src/database/abstraction/heed.rs index e767e22b..83dafc57 100644 --- a/src/database/abstraction/heed.rs +++ b/src/database/abstraction/heed.rs @@ -1,15 +1,13 @@ -use super::super::Config; +use super::{super::Config, watchers::Watchers}; use crossbeam::channel::{bounded, Sender as ChannelSender}; use threadpool::ThreadPool; use crate::{Error, Result}; use std::{ - collections::HashMap, future::Future, pin::Pin, - sync::{Arc, Mutex, RwLock}, + sync::{Arc, Mutex}, }; -use tokio::sync::oneshot::Sender; use super::{DatabaseEngine, Tree}; @@ -23,7 +21,7 @@ pub struct Engine { pub struct EngineTree { engine: Arc, tree: Arc, - watchers: RwLock, Vec>>>, + watchers: Watchers, } fn convert_error(error: heed::Error) -> Error { @@ -60,7 +58,7 @@ impl DatabaseEngine for Engine { .create_database(Some(name)) .map_err(convert_error)?, ), - watchers: RwLock::new(HashMap::new()), + watchers: Default::default(), })) } @@ -145,29 +143,7 @@ impl Tree for EngineTree { .put(&mut txn, &key, &value) .map_err(convert_error)?; txn.commit().map_err(convert_error)?; - - let watchers = self.watchers.read().unwrap(); - let mut triggered = Vec::new(); - - for length in 0..=key.len() { - if watchers.contains_key(&key[..length]) { - triggered.push(&key[..length]); - } - } - - drop(watchers); - - if !triggered.is_empty() { - let mut watchers = self.watchers.write().unwrap(); - for prefix in triggered { - if let Some(txs) = watchers.remove(prefix) { - for tx in txs { - let _ = tx.send(()); - } - } - } - }; - + self.watchers.wake(key); Ok(()) } @@ -223,18 +199,6 @@ impl Tree for EngineTree { #[tracing::instrument(skip(self, prefix))] fn watch_prefix<'a>(&'a self, prefix: &[u8]) -> Pin + Send + 'a>> { - let (tx, rx) = tokio::sync::oneshot::channel(); - - self.watchers - .write() - .unwrap() - .entry(prefix.to_vec()) - .or_default() - .push(tx); - - Box::pin(async move { - // Tx is never destroyed - rx.await.unwrap(); - }) + self.watchers.watch(prefix) } } diff --git a/src/database/abstraction/sqlite.rs b/src/database/abstraction/sqlite.rs index 1d2038c5..1e6a2d89 100644 --- a/src/database/abstraction/sqlite.rs +++ b/src/database/abstraction/sqlite.rs @@ -1,17 +1,15 @@ -use super::{DatabaseEngine, Tree}; +use super::{watchers::Watchers, DatabaseEngine, Tree}; use crate::{database::Config, Result}; -use parking_lot::{Mutex, MutexGuard, RwLock}; +use parking_lot::{Mutex, MutexGuard}; use rusqlite::{Connection, DatabaseName::Main, OptionalExtension}; use std::{ cell::RefCell, - collections::{hash_map, HashMap}, future::Future, path::{Path, PathBuf}, pin::Pin, sync::Arc, }; use thread_local::ThreadLocal; -use tokio::sync::watch; use tracing::debug; thread_local! { @@ -113,7 +111,7 @@ impl DatabaseEngine for Engine { Ok(Arc::new(SqliteTable { engine: Arc::clone(self), name: name.to_owned(), - watchers: RwLock::new(HashMap::new()), + watchers: Watchers::default(), })) } @@ -126,7 +124,7 @@ impl DatabaseEngine for Engine { pub struct SqliteTable { engine: Arc, name: String, - watchers: RwLock, (watch::Sender<()>, watch::Receiver<()>)>>, + watchers: Watchers, } type TupleOfBytes = (Vec, Vec); @@ -200,27 +198,7 @@ impl Tree for SqliteTable { let guard = self.engine.write_lock(); self.insert_with_guard(&guard, key, value)?; drop(guard); - - let watchers = self.watchers.read(); - let mut triggered = Vec::new(); - - for length in 0..=key.len() { - if watchers.contains_key(&key[..length]) { - triggered.push(&key[..length]); - } - } - - drop(watchers); - - if !triggered.is_empty() { - let mut watchers = self.watchers.write(); - for prefix in triggered { - if let Some(tx) = watchers.remove(prefix) { - let _ = tx.0.send(()); - } - } - }; - + self.watchers.wake(key); Ok(()) } @@ -365,19 +343,7 @@ impl Tree for SqliteTable { #[tracing::instrument(skip(self, prefix))] fn watch_prefix<'a>(&'a self, prefix: &[u8]) -> Pin + Send + 'a>> { - let mut rx = match self.watchers.write().entry(prefix.to_vec()) { - hash_map::Entry::Occupied(o) => o.get().1.clone(), - hash_map::Entry::Vacant(v) => { - let (tx, rx) = tokio::sync::watch::channel(()); - v.insert((tx, rx.clone())); - rx - } - }; - - Box::pin(async move { - // Tx is never destroyed - rx.changed().await.unwrap(); - }) + self.watchers.watch(prefix) } #[tracing::instrument(skip(self))] diff --git a/src/database/abstraction/watchers.rs b/src/database/abstraction/watchers.rs new file mode 100644 index 00000000..404f3f06 --- /dev/null +++ b/src/database/abstraction/watchers.rs @@ -0,0 +1,54 @@ +use parking_lot::RwLock; +use std::{ + collections::{hash_map, HashMap}, + future::Future, + pin::Pin, +}; +use tokio::sync::watch; + +#[derive(Default)] +pub(super) struct Watchers { + watchers: RwLock, (watch::Sender<()>, watch::Receiver<()>)>>, +} + +impl Watchers { + pub(super) fn watch<'a>( + &'a self, + prefix: &[u8], + ) -> Pin + Send + 'a>> { + let mut rx = match self.watchers.write().entry(prefix.to_vec()) { + hash_map::Entry::Occupied(o) => o.get().1.clone(), + hash_map::Entry::Vacant(v) => { + let (tx, rx) = tokio::sync::watch::channel(()); + v.insert((tx, rx.clone())); + rx + } + }; + + Box::pin(async move { + // Tx is never destroyed + rx.changed().await.unwrap(); + }) + } + pub(super) fn wake(&self, key: &[u8]) { + let watchers = self.watchers.read(); + let mut triggered = Vec::new(); + + for length in 0..=key.len() { + if watchers.contains_key(&key[..length]) { + triggered.push(&key[..length]); + } + } + + drop(watchers); + + if !triggered.is_empty() { + let mut watchers = self.watchers.write(); + for prefix in triggered { + if let Some(tx) = watchers.remove(prefix) { + let _ = tx.0.send(()); + } + } + }; + } +} From b746f17e562ba02d9471d23c42c9bb8c9f4ee070 Mon Sep 17 00:00:00 2001 From: Ticho 34782694 Date: Fri, 7 Jan 2022 13:06:21 +0000 Subject: [PATCH 13/65] Make traefik+nginx config more self-contained The nginx instance which is serving the .well-known endpoints can serve the simple JSON replies directly from memory, instead of having them as external files on disk. --- docker/README.md | 30 ++++++++++++------------------ 1 file changed, 12 insertions(+), 18 deletions(-) diff --git a/docker/README.md b/docker/README.md index 19d9dca6..1f38d66a 100644 --- a/docker/README.md +++ b/docker/README.md @@ -94,26 +94,20 @@ So...step by step: server_name .; listen 80 default_server; - location /.well-known/matrix/ { - root /var/www; - default_type application/json; - add_header Access-Control-Allow-Origin *; + location /.well-known/matrix/server { + return 200 '{"m.server": ".:443"}'; + add_header Content-Type application/json; } - } - ``` - - `./nginx/www/.well-known/matrix/client` (relative to the compose file, you can change this, but then also need to change the volume mapping) - ```json - { - "m.homeserver": { - "base_url": "https://." - } - } - ``` - - `./nginx/www/.well-known/matrix/server` (relative to the compose file, you can change this, but then also need to change the volume mapping) - ```json - { - "m.server": ".:443" + location /.well-known/matrix/client { + return 200 '{"m.homeserver": {"base_url": "https://."}}'; + add_header Content-Type application/json; + add_header "Access-Control-Allow-Origin" *; + } + + location / { + return 404; + } } ``` From 349865d3ccb9ee78c9410de28e0d5d8c043ae0c8 Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Thu, 13 Jan 2022 11:44:23 +0100 Subject: [PATCH 14/65] Upgrade Ruma --- Cargo.lock | 36 ++++++++++++++++++------------------ Cargo.toml | 2 +- src/client_server/message.rs | 4 ++-- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 69a026b2..07cae94e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1990,7 +1990,7 @@ dependencies = [ [[package]] name = "ruma" version = "0.4.0" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "assign", "js_int", @@ -2011,7 +2011,7 @@ dependencies = [ [[package]] name = "ruma-api" version = "0.18.5" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "bytes", "http", @@ -2027,7 +2027,7 @@ dependencies = [ [[package]] name = "ruma-api-macros" version = "0.18.5" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2038,7 +2038,7 @@ dependencies = [ [[package]] name = "ruma-appservice-api" version = "0.4.0" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "ruma-api", "ruma-common", @@ -2052,7 +2052,7 @@ dependencies = [ [[package]] name = "ruma-client-api" version = "0.12.3" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "assign", "bytes", @@ -2072,7 +2072,7 @@ dependencies = [ [[package]] name = "ruma-common" version = "0.6.0" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "indexmap", "js_int", @@ -2087,7 +2087,7 @@ dependencies = [ [[package]] name = "ruma-events" version = "0.24.6" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "indoc", "js_int", @@ -2103,7 +2103,7 @@ dependencies = [ [[package]] name = "ruma-events-macros" version = "0.24.6" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2114,7 +2114,7 @@ dependencies = [ [[package]] name = "ruma-federation-api" version = "0.3.1" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "js_int", "ruma-api", @@ -2129,7 +2129,7 @@ dependencies = [ [[package]] name = "ruma-identifiers" version = "0.20.0" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "percent-encoding", "rand 0.8.4", @@ -2143,7 +2143,7 @@ dependencies = [ [[package]] name = "ruma-identifiers-macros" version = "0.20.0" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "quote", "ruma-identifiers-validation", @@ -2153,7 +2153,7 @@ dependencies = [ [[package]] name = "ruma-identifiers-validation" version = "0.5.0" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "thiserror", ] @@ -2161,7 +2161,7 @@ dependencies = [ [[package]] name = "ruma-identity-service-api" version = "0.3.0" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "js_int", "ruma-api", @@ -2174,7 +2174,7 @@ dependencies = [ [[package]] name = "ruma-push-gateway-api" version = "0.3.0" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "js_int", "ruma-api", @@ -2189,7 +2189,7 @@ dependencies = [ [[package]] name = "ruma-serde" version = "0.5.0" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "bytes", "form_urlencoded", @@ -2203,7 +2203,7 @@ dependencies = [ [[package]] name = "ruma-serde-macros" version = "0.5.0" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2214,7 +2214,7 @@ dependencies = [ [[package]] name = "ruma-signatures" version = "0.9.0" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "base64 0.13.0", "ed25519-dalek", @@ -2231,7 +2231,7 @@ dependencies = [ [[package]] name = "ruma-state-res" version = "0.4.1" -source = "git+https://github.com/ruma/ruma?rev=7cf3abbaf02995b03db74429090ca5af1cd71edc#7cf3abbaf02995b03db74429090ca5af1cd71edc" +source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ "itertools 0.10.1", "js_int", diff --git a/Cargo.toml b/Cargo.toml index 7b3432c2..5e09dee0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,7 +19,7 @@ rocket = { version = "0.5.0-rc.1", features = ["tls"] } # Used to handle request # Used for matrix spec type definitions and helpers #ruma = { version = "0.4.0", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } -ruma = { git = "https://github.com/ruma/ruma", rev = "7cf3abbaf02995b03db74429090ca5af1cd71edc", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } +ruma = { git = "https://github.com/ruma/ruma", rev = "f8ba7f795765bf4aeb4db06849f9fdde9c162ac3", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } #ruma = { git = "https://github.com/timokoesters/ruma", rev = "50c1db7e0a3a21fc794b0cce3b64285a4c750c71", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } #ruma = { path = "../ruma/crates/ruma", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } diff --git a/src/client_server/message.rs b/src/client_server/message.rs index 60c756a3..da6ae875 100644 --- a/src/client_server/message.rs +++ b/src/client_server/message.rs @@ -160,7 +160,7 @@ pub async fn get_message_events_route( .collect(); let mut resp = get_message_events::Response::new(); - resp.start = Some(body.from.to_owned()); + resp.start = body.from.to_owned(); resp.end = end_token; resp.chunk = events_after; resp.state = Vec::new(); @@ -190,7 +190,7 @@ pub async fn get_message_events_route( .collect(); let mut resp = get_message_events::Response::new(); - resp.start = Some(body.from.to_owned()); + resp.start = body.from.to_owned(); resp.end = start_token; resp.chunk = events_before; resp.state = Vec::new(); From cf54185a1cfe6b7cbed4c8c472198360aa705663 Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Thu, 13 Jan 2022 11:48:18 +0100 Subject: [PATCH 15/65] Use struct literals for consistency --- src/client_server/context.rs | 25 +++++++++++++------------ src/client_server/message.rs | 22 ++++++++++++---------- src/client_server/unversioned.rs | 9 ++++----- 3 files changed, 29 insertions(+), 27 deletions(-) diff --git a/src/client_server/context.rs b/src/client_server/context.rs index 97fc4fd8..9bfec9e1 100644 --- a/src/client_server/context.rs +++ b/src/client_server/context.rs @@ -92,18 +92,19 @@ pub async fn get_context_route( .map(|(_, pdu)| pdu.to_room_event()) .collect(); - let mut resp = get_context::Response::new(); - resp.start = start_token; - resp.end = end_token; - resp.events_before = events_before; - resp.event = Some(base_event); - resp.events_after = events_after; - resp.state = db // TODO: State at event - .rooms - .room_state_full(&body.room_id)? - .values() - .map(|pdu| pdu.to_state_event()) - .collect(); + let resp = get_context::Response { + start: start_token, + end: end_token, + events_before, + event: Some(base_event), + events_after, + state: db // TODO: State at event + .rooms + .room_state_full(&body.room_id)? + .values() + .map(|pdu| pdu.to_state_event()) + .collect(), + }; Ok(resp.into()) } diff --git a/src/client_server/message.rs b/src/client_server/message.rs index da6ae875..cbce019e 100644 --- a/src/client_server/message.rs +++ b/src/client_server/message.rs @@ -159,11 +159,12 @@ pub async fn get_message_events_route( .map(|(_, pdu)| pdu.to_room_event()) .collect(); - let mut resp = get_message_events::Response::new(); - resp.start = body.from.to_owned(); - resp.end = end_token; - resp.chunk = events_after; - resp.state = Vec::new(); + let resp = get_message_events::Response { + start: body.from.to_owned(), + end: end_token, + chunk: events_after, + state: Vec::new(), + }; Ok(resp.into()) } @@ -189,11 +190,12 @@ pub async fn get_message_events_route( .map(|(_, pdu)| pdu.to_room_event()) .collect(); - let mut resp = get_message_events::Response::new(); - resp.start = body.from.to_owned(); - resp.end = start_token; - resp.chunk = events_before; - resp.state = Vec::new(); + let resp = get_message_events::Response { + start: body.from.to_owned(), + end: start_token, + chunk: events_before, + state: Vec::new(), + }; Ok(resp.into()) } diff --git a/src/client_server/unversioned.rs b/src/client_server/unversioned.rs index f2624bbc..f17d8cf3 100644 --- a/src/client_server/unversioned.rs +++ b/src/client_server/unversioned.rs @@ -17,11 +17,10 @@ use rocket::get; #[cfg_attr(feature = "conduit_bin", get("/_matrix/client/versions"))] #[tracing::instrument] pub async fn get_supported_versions_route() -> ConduitResult { - let mut resp = - get_supported_versions::Response::new(vec!["r0.5.0".to_owned(), "r0.6.0".to_owned()]); - - resp.unstable_features - .insert("org.matrix.e2e_cross_signing".to_owned(), true); + let resp = get_supported_versions::Response { + versions: vec!["r0.5.0".to_owned(), "r0.6.0".to_owned()], + unstable_features: [("org.matrix.e2e_cross_signing".to_owned(), true)].into(), + }; Ok(resp.into()) } From 84862352bacd7172602f1b8200a774d668a9f087 Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Thu, 13 Jan 2022 11:48:40 +0100 Subject: [PATCH 16/65] Replace to_string calls on string literals with to_owned --- src/database/uiaa.rs | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/src/database/uiaa.rs b/src/database/uiaa.rs index 772dab9e..5e11467e 100644 --- a/src/database/uiaa.rs +++ b/src/database/uiaa.rs @@ -154,11 +154,7 @@ impl Uiaa { .write() .unwrap() .insert( - ( - user_id.to_owned(), - device_id.to_owned(), - session.to_string(), - ), + (user_id.to_owned(), device_id.to_owned(), session.to_owned()), request.to_owned(), ); @@ -175,11 +171,7 @@ impl Uiaa { .userdevicesessionid_uiaarequest .read() .unwrap() - .get(&( - user_id.to_owned(), - device_id.to_owned(), - session.to_string(), - )) + .get(&(user_id.to_owned(), device_id.to_owned(), session.to_owned())) .map(|j| j.to_owned())) } From bcf4ede0bc356efb4bd8b8909ca3db0ab157f97e Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Thu, 13 Jan 2022 12:06:20 +0100 Subject: [PATCH 17/65] Restore compatibility with Rust 1.53 --- src/client_server/unversioned.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/client_server/unversioned.rs b/src/client_server/unversioned.rs index f17d8cf3..ea685b4b 100644 --- a/src/client_server/unversioned.rs +++ b/src/client_server/unversioned.rs @@ -1,3 +1,5 @@ +use std::{collections::BTreeMap, iter::FromIterator}; + use crate::ConduitResult; use ruma::api::client::unversioned::get_supported_versions; @@ -19,7 +21,7 @@ use rocket::get; pub async fn get_supported_versions_route() -> ConduitResult { let resp = get_supported_versions::Response { versions: vec!["r0.5.0".to_owned(), "r0.6.0".to_owned()], - unstable_features: [("org.matrix.e2e_cross_signing".to_owned(), true)].into(), + unstable_features: BTreeMap::from_iter([("org.matrix.e2e_cross_signing".to_owned(), true)]), }; Ok(resp.into()) From eecd664c43c652f7fe4afc06154b346fc6a45b58 Mon Sep 17 00:00:00 2001 From: Torsten Flammiger Date: Thu, 13 Jan 2022 12:26:23 +0100 Subject: [PATCH 18/65] Reformat code --- src/database/appservice.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/database/appservice.rs b/src/database/appservice.rs index 847d7479..88de1f33 100644 --- a/src/database/appservice.rs +++ b/src/database/appservice.rs @@ -28,17 +28,17 @@ impl Appservice { } /// Remove an appservice registration - /// + /// /// # Arguments - /// + /// /// * `service_name` - the name you send to register the service previously pub fn unregister_appservice(&self, service_name: &str) -> Result<()> { self.id_appserviceregistrations .remove(service_name.as_bytes())?; - self.cached_registrations. - write(). - unwrap(). - remove(service_name); + self.cached_registrations + .write() + .unwrap() + .remove(service_name); Ok(()) } From 1d647a1a9a0a3075ee1bdbe2a039d22ee73baa2f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Sat, 16 Oct 2021 15:19:25 +0200 Subject: [PATCH 19/65] improvement: allow rocksdb again --- Cargo.lock | 723 +++++++++++++++++----------- Cargo.toml | 4 +- src/database.rs | 11 +- src/database/abstraction.rs | 5 +- src/database/abstraction/rocksdb.rs | 183 +++++++ src/database/abstraction/sqlite.rs | 14 +- src/error.rs | 6 + src/utils.rs | 11 + 8 files changed, 662 insertions(+), 295 deletions(-) create mode 100644 src/database/abstraction/rocksdb.rs diff --git a/Cargo.lock b/Cargo.lock index 07cae94e..794445f9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10,9 +10,9 @@ checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" [[package]] name = "ahash" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43bb833f0bf979d8475d38fbf09ed3b8a55e1885fe93ad3f93239fc6a4f17b98" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ "getrandom 0.2.3", "once_cell", @@ -78,9 +78,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.51" +version = "0.1.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44318e776df68115a881de9a8fd1b9e53368d7a4a5ce4cc48517da3393233a5e" +checksum = "061a7acccaa286c011ddc30970520b98fa40e00c9d644633fb26b5fc63a265e3" dependencies = [ "proc-macro2", "quote", @@ -89,9 +89,9 @@ dependencies = [ [[package]] name = "atomic" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3410529e8288c463bedb5930f82833bc0c90e5d2fe639a56582a4d09220b281" +checksum = "b88d82667eca772c4aa12f0f1348b3ae643424c8876448f3f7bd5787032e234c" dependencies = [ "autocfg", ] @@ -146,6 +146,25 @@ dependencies = [ "serde", ] +[[package]] +name = "bindgen" +version = "0.59.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bd2a9a458e8f4304c52c43ebb0cfbd520289f8379a52e329a38afda99bf8eb8" +dependencies = [ + "bitflags", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "peeking_take_while", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", +] + [[package]] name = "bitflags" version = "1.3.2" @@ -174,15 +193,15 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.7.0" +version = "3.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c59e7af012c713f529e7a3ee57ce9b31ddd858d4b512923602f74608b009631" +checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" [[package]] name = "bytemuck" -version = "1.7.2" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72957246c41db82b8ef88a5486143830adeb8227ef9837740bdec67724cf2c5b" +checksum = "439989e6b8c38d1b6570a384ef1e49c8848128f5a97f3914baef02920842712f" [[package]] name = "byteorder" @@ -198,13 +217,22 @@ checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" [[package]] name = "cc" -version = "1.0.70" +version = "1.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26a6ce4b6a484fa3edb70f7efa6fc430fd2b87285fe8b84304fd0936faa0dc0" +checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" dependencies = [ "jobserver", ] +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + [[package]] name = "cfg-if" version = "0.1.10" @@ -230,6 +258,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "clang-sys" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa66045b9cb23c2e9c1520732030608b02ee07e5cfaa5a521ec15ded7fa24c90" +dependencies = [ + "glob", + "libc", + "libloading", +] + [[package]] name = "color_quant" version = "1.1.0" @@ -259,11 +298,12 @@ dependencies = [ "reqwest", "ring", "rocket", + "rocksdb", "ruma", "rusqlite", "rust-argon2", - "rustls", - "rustls-native-certs", + "rustls 0.19.1", + "rustls-native-certs 0.5.0", "serde", "serde_json", "serde_yaml", @@ -275,22 +315,22 @@ dependencies = [ "tokio", "tracing", "tracing-flame", - "tracing-subscriber", + "tracing-subscriber 0.2.25", "trust-dns-resolver", "webpki 0.22.0", ] [[package]] name = "const-oid" -version = "0.6.0" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44c32f031ea41b4291d695026c023b95d59db2d8a2c7640800ed56bc8f510f22" +checksum = "9d6f2aa4d0537bcc1c74df8755072bd31c1ef1a3a1b85a68e8404a8c353b7b8b" [[package]] name = "const_fn" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f92cfa0fd5690b3cf8c1ef2cabbd9b7ef22fa53cf5e1f92b05103f6d5d1cf6e7" +checksum = "fbdcdcb6d86f71c5e97409ad45898af11cbc995b4ee8112d59095a28d376c935" [[package]] name = "constant_time_eq" @@ -311,9 +351,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a89e2ae426ea83155dccf10c0fa6b1463ef6d5fcb44cee0b224a408fa640a62" +checksum = "6888e10551bb93e424d8df1d07f1a8b4fceb0001a3a4b048bfc47554946f47b3" dependencies = [ "core-foundation-sys", "libc", @@ -321,9 +361,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea221b5284a47e40033bf9b66f35f984ec0ea2931eb03505246cd27a963f981b" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "cpufeatures" @@ -336,9 +376,9 @@ dependencies = [ [[package]] name = "crc32fast" -version = "1.2.1" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a" +checksum = "738c290dfaea84fc1ca15ad9c168d083b05a714e1efddd8edaab678dc28d2836" dependencies = [ "cfg-if 1.0.0", ] @@ -353,18 +393,18 @@ dependencies = [ "crossbeam-channel", "crossbeam-deque", "crossbeam-epoch", - "crossbeam-queue 0.3.2", - "crossbeam-utils 0.8.5", + "crossbeam-queue 0.3.3", + "crossbeam-utils 0.8.6", ] [[package]] name = "crossbeam-channel" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4" +checksum = "e54ea8bc3fb1ee042f5aace6e3c6e025d3874866da222930f70ce62aceba0bfa" dependencies = [ "cfg-if 1.0.0", - "crossbeam-utils 0.8.5", + "crossbeam-utils 0.8.6", ] [[package]] @@ -375,17 +415,17 @@ checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" dependencies = [ "cfg-if 1.0.0", "crossbeam-epoch", - "crossbeam-utils 0.8.5", + "crossbeam-utils 0.8.6", ] [[package]] name = "crossbeam-epoch" -version = "0.9.5" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd" +checksum = "97242a70df9b89a65d0b6df3c4bf5b9ce03c5b7309019777fbde37e7537f8762" dependencies = [ "cfg-if 1.0.0", - "crossbeam-utils 0.8.5", + "crossbeam-utils 0.8.6", "lazy_static", "memoffset", "scopeguard", @@ -402,12 +442,12 @@ dependencies = [ [[package]] name = "crossbeam-queue" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b10ddc024425c88c2ad148c1b0fd53f4c6d38db9697c9f1588381212fa657c9" +checksum = "b979d76c9fcb84dffc80a73f7290da0f83e4c95773494674cb44b76d13a7a110" dependencies = [ "cfg-if 1.0.0", - "crossbeam-utils 0.8.5", + "crossbeam-utils 0.8.6", ] [[package]] @@ -422,9 +462,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" +checksum = "cfcae03edb34f947e64acdb1c33ec169824e20657e9ecb61cef6c8c74dcb8120" dependencies = [ "cfg-if 1.0.0", "lazy_static", @@ -471,9 +511,9 @@ dependencies = [ [[package]] name = "der" -version = "0.4.1" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e21d2d0f22cde6e88694108429775c0219760a07779bf96503b434a03d7412" +checksum = "79b71cca7d95d7681a4b3b9cdf63c8dbc3730d0584c2c74e31416d64a90493f4" dependencies = [ "const-oid", ] @@ -546,17 +586,11 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "212d0f5754cb6769937f4501cc0e67f4f4483c8d2c3e1e922ee9edbe4ab4c7c0" -[[package]] -name = "dtoa" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0" - [[package]] name = "ed25519" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4620d40f6d2601794401d6dd95a5cf69b6c157852539470eeda433a99b3c0efc" +checksum = "74e1069e39f1454367eb2de793ed062fac4c35c2934b76a81d90dd9abcd28816" dependencies = [ "signature", ] @@ -583,9 +617,9 @@ checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" [[package]] name = "encoding_rs" -version = "0.8.28" +version = "0.8.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80df024fbc5ac80f87dfef0d9f5209a252f2a497f7f42944cff24d8253cac065" +checksum = "7896dc8abb250ffdda33912550faa54c88ec8b998dec0b2c55ab224921ce11df" dependencies = [ "cfg-if 1.0.0", ] @@ -614,6 +648,15 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" +[[package]] +name = "fastrand" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "779d043b6a0b90cc4c0ed7ee380a6504394cee7efd7db050e3774eee387324b2" +dependencies = [ + "instant", +] + [[package]] name = "figment" version = "0.10.6" @@ -656,9 +699,9 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.17" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a12aa0eb539080d55c3f2d45a67c3b58b6b0773c1a3ca2dfec66d58c97fd66ca" +checksum = "28560757fe2bb34e79f907794bb6b22ae8b0e5c669b638a1132f2592b19035b4" dependencies = [ "futures-channel", "futures-core", @@ -671,9 +714,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.17" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5da6ba8c3bb3c165d3c7319fc1cc8304facf1fb8db99c5de877183c08a273888" +checksum = "ba3dda0b6588335f360afc675d0564c17a77a2bda81ca178a4b6081bd86c7f0b" dependencies = [ "futures-core", "futures-sink", @@ -681,15 +724,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.17" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88d1c26957f23603395cd326b0ffe64124b818f4449552f960d815cfba83a53d" +checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7" [[package]] name = "futures-executor" -version = "0.3.17" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45025be030969d763025784f7f355043dc6bc74093e4ecc5000ca4dc50d8745c" +checksum = "29d6d2ff5bb10fb95c85b8ce46538a2e5f5e7fdc755623a7d4529ab8a4ed9d2a" dependencies = [ "futures-core", "futures-task", @@ -698,18 +741,16 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.17" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "522de2a0fe3e380f1bc577ba0474108faf3f6b18321dbf60b3b9c39a75073377" +checksum = "b1f9d34af5a1aac6fb380f735fe510746c38067c5bf16c7fd250280503c971b2" [[package]] name = "futures-macro" -version = "0.3.17" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18e4a4b95cea4b4ccbcf1c5675ca7c4ee4e9e75eb79944d07defde18068f79bb" +checksum = "6dbd947adfffb0efc70599b3ddcf7b5597bb5fa9e245eb99f62b3a5f7bb8bd3c" dependencies = [ - "autocfg", - "proc-macro-hack", "proc-macro2", "quote", "syn", @@ -717,23 +758,22 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.17" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36ea153c13024fe480590b3e3d4cad89a0cfacecc24577b68f86c6ced9c2bc11" +checksum = "e3055baccb68d74ff6480350f8d6eb8fcfa3aa11bdc1a1ae3afdd0514617d508" [[package]] name = "futures-task" -version = "0.3.17" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d3d00f4eddb73e498a54394f228cd55853bdf059259e8e7bc6e69d408892e99" +checksum = "6ee7c6485c30167ce4dfb83ac568a849fe53274c831081476ee13e0dce1aad72" [[package]] name = "futures-util" -version = "0.3.17" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36568465210a3a6ee45e1f165136d68671471a501e632e9a98d96872222b5481" +checksum = "d9b5cf40b47a271f77a8b1bec03ca09044d99d2372c0de244e66430761127164" dependencies = [ - "autocfg", "futures-channel", "futures-core", "futures-io", @@ -743,8 +783,6 @@ dependencies = [ "memchr", "pin-project-lite", "pin-utils", - "proc-macro-hack", - "proc-macro-nested", "slab", ] @@ -772,9 +810,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.4" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817" +checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" dependencies = [ "typenum", "version_check", @@ -804,9 +842,9 @@ dependencies = [ [[package]] name = "gif" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a668f699973d0f573d15749b7002a9ac9e1f9c6b220e7b165601334c173d8de" +checksum = "c3a7187e78088aead22ceedeee99779455b23fc231fe13ec443f99bb71694e5b" dependencies = [ "color_quant", "weezl", @@ -820,9 +858,9 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" [[package]] name = "h2" -version = "0.3.4" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7f3675cfef6a30c8031cf9e6493ebdc3bb3272a3fea3923c4210d1830e6a472" +checksum = "0c9de88456263e249e241fcd211d3954e2c9b0ef7ccfc235a444eb367cae3689" dependencies = [ "bytes", "fnv", @@ -932,20 +970,20 @@ dependencies = [ [[package]] name = "http" -version = "0.2.4" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "527e8c9ac747e28542699a951517aa9a6945af506cd1f2e1b53a576c17b6cc11" +checksum = "31f4c6746584866f0feabcc69893c5b51beef3831656a968ed7ae254cdc4fd03" dependencies = [ "bytes", "fnv", - "itoa 0.4.8", + "itoa 1.0.1", ] [[package]] name = "http-body" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "399c583b2979440c60be0821a6199eca73bc3c8dcd9d070d75ac726e2c6186e5" +checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6" dependencies = [ "bytes", "http", @@ -960,15 +998,15 @@ checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" [[package]] name = "httpdate" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6456b8a6c8f33fee7d958fcd1b60d55b11940a79e63ae87013e6d22e26034440" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" [[package]] name = "hyper" -version = "0.14.12" +version = "0.14.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13f67199e765030fa08fe0bd581af683f0d5bc04ea09c2b1102012c5fb90e7fd" +checksum = "b7ec3e62bdc98a2f0393a5048e4c30ef659440ea6e0e572965103e72bd836f55" dependencies = [ "bytes", "futures-channel", @@ -981,7 +1019,7 @@ dependencies = [ "httpdate", "itoa 0.4.8", "pin-project-lite", - "socket2 0.4.1", + "socket2 0.4.2", "tokio", "tower-service", "tracing", @@ -990,17 +1028,15 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.22.1" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f9f7a97316d44c0af9b0301e65010573a853a9fc97046d7331d7f6bc0fd5a64" +checksum = "d87c48c02e0dc5e3b849a2041db3029fd066650f8f717c07bf8ed78ccb895cac" dependencies = [ - "futures-util", + "http", "hyper", - "log", - "rustls", + "rustls 0.20.2", "tokio", - "tokio-rustls", - "webpki 0.21.4", + "tokio-rustls 0.23.2", ] [[package]] @@ -1033,9 +1069,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5" +checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" dependencies = [ "autocfg", "hashbrown", @@ -1053,15 +1089,15 @@ dependencies = [ [[package]] name = "inlinable_string" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3094308123a0e9fd59659ce45e22de9f53fc1d2ac6e1feb9fef988e4f76cad77" +checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb" [[package]] name = "instant" -version = "0.1.10" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bee0328b1209d157ef001c94dd85b4f8f64139adb0eac2659f4b08382b2f474d" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ "cfg-if 1.0.0", ] @@ -1092,18 +1128,9 @@ checksum = "68f2d64f2edebec4ce84ad108148e67e1064789bee435edc5b60ad398714a3a9" [[package]] name = "itertools" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.10.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf" +checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" dependencies = [ "either", ] @@ -1137,9 +1164,9 @@ checksum = "229d53d58899083193af11e15917b5640cd40b29ff475a1fe4ef725deb02d0f2" [[package]] name = "js-sys" -version = "0.3.53" +version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4bf49d50e2961077d9c99f4b7997d770a1114f087c3c2e0069b36c13fc2979d" +checksum = "7cc9ffccd38c451a86bf13657df244e9c3f37493cce8e5e21e940963777acc84" dependencies = [ "wasm-bindgen", ] @@ -1173,11 +1200,39 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "libc" -version = "0.2.101" +version = "0.2.112" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cb00336871be5ed2c8ed44b60ae9959dc5b9f08539422ed43f09e34ecaeba21" +checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125" + +[[package]] +name = "libloading" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afe203d669ec979b7128619bae5a63b7b42e9203c1b29146079ee05e2f604b52" +dependencies = [ + "cfg-if 1.0.0", + "winapi", +] + +[[package]] +name = "librocksdb-sys" +version = "6.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c309a9d2470844aceb9a4a098cf5286154d20596868b75a6b36357d2bb9ca25d" +dependencies = [ + "bindgen", + "cc", + "glob", + "libc", +] [[package]] name = "libsqlite3-sys" @@ -1227,15 +1282,17 @@ dependencies = [ [[package]] name = "loom" -version = "0.5.1" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2111607c723d7857e0d8299d5ce7a0bf4b844d3e44f8de136b13da513eaf8fc4" +checksum = "edc5c7d328e32cc4954e8e01193d7f0ef5ab257b5090b70a964e099a36034309" dependencies = [ "cfg-if 1.0.0", "generator", "scoped-tls", "serde", "serde_json", + "tracing", + "tracing-subscriber 0.3.5", ] [[package]] @@ -1268,6 +1325,15 @@ dependencies = [ "regex-automata", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] + [[package]] name = "matches" version = "0.1.9" @@ -1282,9 +1348,9 @@ checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" [[package]] name = "memoffset" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9" +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" dependencies = [ "autocfg", ] @@ -1295,6 +1361,12 @@ version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + [[package]] name = "miniz_oxide" version = "0.3.7" @@ -1306,9 +1378,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.7.13" +version = "0.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c2bdb6314ec10835cd3293dd268473a835c02b7b352e788be788b3c6ca6bb16" +checksum = "8067b404fe97c70829f082dec8bcf4f71225d7eaea1d8645349cb76fa06205cc" dependencies = [ "libc", "log", @@ -1328,9 +1400,9 @@ dependencies = [ [[package]] name = "multer" -version = "2.0.1" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "408327e2999b839cd1af003fc01b2019a6c10a1361769542203f6fedc5179680" +checksum = "5f8f35e687561d5c1667590911e6698a8cb714a134a7505718a182e7bc9d3836" dependencies = [ "bytes", "encoding_rs", @@ -1338,11 +1410,22 @@ dependencies = [ "http", "httparse", "log", + "memchr", "mime", "spin 0.9.2", "tokio", "tokio-util", - "twoway", + "version_check", +] + +[[package]] +name = "nom" +version = "7.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1d11e1ef389c76fe5b81bcaf2ea32cf88b62bc494e19f493d0b30e7a930109" +dependencies = [ + "memchr", + "minimal-lexical", "version_check", ] @@ -1409,9 +1492,9 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.13.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" +checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" dependencies = [ "hermit-abi", "libc", @@ -1419,9 +1502,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" +checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" [[package]] name = "opaque-debug" @@ -1431,9 +1514,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl-probe" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "opentelemetry" @@ -1545,6 +1628,12 @@ dependencies = [ "syn", ] +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + [[package]] name = "pem" version = "0.8.3" @@ -1564,18 +1653,18 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "pin-project" -version = "1.0.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "576bc800220cc65dac09e99e97b08b358cfab6e17078de8dc5fee223bd2d0c08" +checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.0.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e8fe8163d14ce7f0cdac2e040116f22eac817edabff0be91e8aff7e9accf389" +checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb" dependencies = [ "proc-macro2", "quote", @@ -1584,9 +1673,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443" +checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" [[package]] name = "pin-utils" @@ -1596,9 +1685,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkcs8" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbee84ed13e44dd82689fa18348a49934fa79cc774a344c42fc9b301c71b140a" +checksum = "ee3ef9b64d26bad0536099c816c6734379e45bbd5f14798def6809e5cc350447" dependencies = [ "der", "spki", @@ -1607,9 +1696,9 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.19" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c" +checksum = "58893f751c9b0412871a09abd62ecd2a00298c6c83befa223ef98c52aef40cbe" [[package]] name = "png" @@ -1625,15 +1714,15 @@ dependencies = [ [[package]] name = "ppv-lite86" -version = "0.2.10" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" +checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" [[package]] name = "proc-macro-crate" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41fdbd1df62156fbc5945f4762632564d7d038153091c3fcf1067f6aef7cff92" +checksum = "1ebace6889caf889b4d3f76becee12e90353f2b8c7d875534a71e5742f8f6f83" dependencies = [ "thiserror", "toml", @@ -1645,17 +1734,11 @@ version = "0.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" -[[package]] -name = "proc-macro-nested" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086" - [[package]] name = "proc-macro2" -version = "1.0.29" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9f5105d4fdaab20335ca9565e106a5d9b82b6219b5ba735731124ac6711d23d" +checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" dependencies = [ "unicode-xid", ] @@ -1681,9 +1764,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.9" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7" +checksum = "47aa80447ce4daf1717500037052af176af5d38cc3e571d9ec1c7353fc10c87d" dependencies = [ "proc-macro2", ] @@ -1845,15 +1928,16 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.4" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "246e9f61b9bb77df069a947682be06e31ac43ea37862e244a69f177694ea6d22" +checksum = "87f242f1488a539a79bac6dbe7c8609ae43b7914b7736210f239a37cccb32525" dependencies = [ "base64 0.13.0", "bytes", "encoding_rs", "futures-core", "futures-util", + "h2", "http", "http-body", "hyper", @@ -1865,12 +1949,14 @@ dependencies = [ "mime", "percent-encoding", "pin-project-lite", - "rustls", - "rustls-native-certs", + "rustls 0.20.2", + "rustls-native-certs 0.6.1", + "rustls-pemfile", "serde", + "serde_json", "serde_urlencoded", "tokio", - "tokio-rustls", + "tokio-rustls 0.23.2", "tokio-socks", "url", "wasm-bindgen", @@ -1983,10 +2069,20 @@ dependencies = [ "state", "time 0.2.27", "tokio", - "tokio-rustls", + "tokio-rustls 0.22.0", "uncased", ] +[[package]] +name = "rocksdb" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c749134fda8bfc90d0de643d59bfc841dcb3ac8a1062e12b6754bd60235c48b3" +dependencies = [ + "libc", + "librocksdb-sys", +] + [[package]] name = "ruma" version = "0.4.0" @@ -2233,7 +2329,7 @@ name = "ruma-state-res" version = "0.4.1" source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" dependencies = [ - "itertools 0.10.1", + "itertools", "js_int", "ruma-common", "ruma-events", @@ -2247,9 +2343,9 @@ dependencies = [ [[package]] name = "rusqlite" -version = "0.25.3" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57adcf67c8faaf96f3248c2a7b419a0dbc52ebe36ba83dd57fe83827c1ea4eb3" +checksum = "5c4b1eaf239b47034fb450ee9cdedd7d0226571689d8823030c4b6c2cb407152" dependencies = [ "bitflags", "fallible-iterator", @@ -2269,9 +2365,15 @@ dependencies = [ "base64 0.13.0", "blake2b_simd", "constant_time_eq", - "crossbeam-utils 0.8.5", + "crossbeam-utils 0.8.6", ] +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + [[package]] name = "rustc_version" version = "0.2.3" @@ -2290,10 +2392,22 @@ dependencies = [ "base64 0.13.0", "log", "ring", - "sct", + "sct 0.6.1", "webpki 0.21.4", ] +[[package]] +name = "rustls" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d37e5e2290f3e040b594b1a9e04377c2c671f1a1cfd9bfdef82106ac1c113f84" +dependencies = [ + "log", + "ring", + "sct 0.7.0", + "webpki 0.22.0", +] + [[package]] name = "rustls-native-certs" version = "0.5.0" @@ -2301,22 +2415,43 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a07b7c1885bd8ed3831c289b7870b13ef46fe0e856d288c30d9cc17d75a2092" dependencies = [ "openssl-probe", - "rustls", + "rustls 0.19.1", "schannel", "security-framework", ] +[[package]] +name = "rustls-native-certs" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ca9ebdfa27d3fc180e42879037b5338ab1c040c06affd00d8338598e7800943" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eebeaeb360c87bfb72e84abdb3447159c0eaececf1bef2aecd65a8be949d1c9" +dependencies = [ + "base64 0.13.0", +] + [[package]] name = "rustversion" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61b3909d758bb75c79f23d4736fac9433868679d3ad2ea7a61e3c25cfda9a088" +checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" [[package]] name = "ryu" -version = "1.0.5" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" +checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" [[package]] name = "schannel" @@ -2350,6 +2485,16 @@ dependencies = [ "untrusted", ] +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "security-framework" version = "2.4.2" @@ -2390,18 +2535,18 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.130" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f12d06de37cf59146fbdecab66aa99f9fe4f78722e3607577a5375d66bd0c913" +checksum = "97565067517b60e2d1ea8b268e59ce036de907ac523ad83a0475da04e818989a" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.130" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7bc1a1ab1961464eae040d96713baa5a724a8152c1222492465b54322ec508b" +checksum = "ed201699328568d8d08208fdd080e3ff594e6c422e438b6705905da01005d537" dependencies = [ "proc-macro2", "quote", @@ -2410,9 +2555,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.73" +version = "1.0.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcbd0344bc6533bc7ec56df11d42fb70f1b912351c0825ccb7211b59d8af7cf5" +checksum = "ee2bb9cd061c5865d345bb02ca49fcef1391741b672b54a0bf7b679badec3142" dependencies = [ "itoa 1.0.1", "ryu", @@ -2433,12 +2578,12 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.8.20" +version = "0.8.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad104641f3c958dab30eb3010e834c2622d1f3f4c530fef1dee20ad9485f3c09" +checksum = "a4a521f2940385c165a24ee286aa8599633d162077a54bdcae2a6fd5a7bfa7a0" dependencies = [ - "dtoa", "indexmap", + "ryu", "serde", "yaml-rust", ] @@ -2464,9 +2609,9 @@ checksum = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d" [[package]] name = "sha2" -version = "0.9.6" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9204c41a1597a8c5af23c82d1c921cb01ec0a4c59e07a9c7306062829a3903f3" +checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer", "cfg-if 1.0.0", @@ -2477,13 +2622,19 @@ dependencies = [ [[package]] name = "sharded-slab" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "740223c51853f3145fe7c90360d2d4232f2b62e3449489c207eccde818979982" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" dependencies = [ "lazy_static", ] +[[package]] +name = "shlex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" + [[package]] name = "signal-hook-registry" version = "1.4.0" @@ -2495,9 +2646,9 @@ dependencies = [ [[package]] name = "signature" -version = "1.3.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c19772be3c4dd2ceaacf03cb41d5885f2a02c4d8804884918e3a258480803335" +checksum = "f054c6c1a6e95179d6f23ed974060dcefb2d9388bb7256900badad682c499de4" [[package]] name = "simple_asn1" @@ -2512,19 +2663,19 @@ dependencies = [ [[package]] name = "slab" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c307a32c1c5c437f38c7fd45d753050587732ba8628319fbdf12a7e289ccc590" +checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" [[package]] name = "sled" -version = "0.34.6" +version = "0.34.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d0132f3e393bcb7390c60bb45769498cf4550bcb7a21d7f95c02b69f6362cdc" +checksum = "7f96b4737c2ce5987354855aed3797279def4ebf734436c6aa4552cf8e169935" dependencies = [ "crc32fast", "crossbeam-epoch", - "crossbeam-utils 0.8.5", + "crossbeam-utils 0.8.6", "fs2", "fxhash", "libc", @@ -2535,9 +2686,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.6.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e" +checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309" [[package]] name = "socket2" @@ -2552,9 +2703,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "765f090f0e423d2b55843402a07915add955e7d60657db13707a159727326cad" +checksum = "5dc90fe6c7be1a323296982db1836d1ea9e47b6839496dde9a541bc496df3516" dependencies = [ "libc", "winapi", @@ -2574,9 +2725,9 @@ checksum = "511254be0c5bcf062b019a6c89c01a664aa359ded62f78aa72c6fc137c0590e5" [[package]] name = "spki" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "987637c5ae6b3121aba9d513f869bd2bff11c4cc086c22473befd6649c0bd521" +checksum = "5c01a0c15da1b0b0e1494112e7af814a678fec9bd157881b49beac661e9b6f32" dependencies = [ "der", ] @@ -2665,9 +2816,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "syn" -version = "1.0.75" +version = "1.0.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7f58f7e8eaa0009c5fec437aabf511bd9933e4b2d7407bd05273c01a8906ea7" +checksum = "a684ac3dcd8913827e18cd09a68384ee66c1de24157e3c556c9ab16d85695fb7" dependencies = [ "proc-macro2", "quote", @@ -2685,9 +2836,9 @@ dependencies = [ [[package]] name = "synstructure" -version = "0.12.5" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "474aaa926faa1603c40b7885a9eaea29b444d1cb2850cb7c0e37bb1a4182f4fa" +checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2", "quote", @@ -2697,13 +2848,13 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.2.0" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22" +checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" dependencies = [ "cfg-if 1.0.0", + "fastrand", "libc", - "rand 0.8.4", "redox_syscall", "remove_dir_all", "winapi", @@ -2711,18 +2862,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.28" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "283d5230e63df9608ac7d9691adc1dfb6e701225436eb64d0b9a7f0a5a04f6ec" +checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.28" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa3884228611f5cd3608e2d409bf7dce832e4eb3135e3f11addbd7e41bd68e71" +checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" dependencies = [ "proc-macro2", "quote", @@ -2810,9 +2961,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.3.1" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "848a1e1181b9f6753b5e96a092749e29b11d19ede67dfbbd6c7dc7e0f49b5338" +checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" dependencies = [ "tinyvec_macros", ] @@ -2825,11 +2976,10 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.11.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4efe6fc2395938c8155973d7be49fe8d03a843726e285e100a8a383cc0154ce" +checksum = "fbbf1c778ec206785635ce8ad57fe52b3009ae9e0c9f574a728f3049d3e55838" dependencies = [ - "autocfg", "bytes", "libc", "memchr", @@ -2844,9 +2994,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "1.3.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54473be61f4ebe4efd09cec9bd5d16fa51d70ea0192213d754d2d500457db110" +checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7" dependencies = [ "proc-macro2", "quote", @@ -2859,11 +3009,22 @@ version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc6844de72e57df1980054b38be3a9f4702aba4858be64dd700181a8a6d0e1b6" dependencies = [ - "rustls", + "rustls 0.19.1", "tokio", "webpki 0.21.4", ] +[[package]] +name = "tokio-rustls" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a27d5f2b839802bd8267fa19b0530f5a08b9c08cd417976be2a65d130fe1c11b" +dependencies = [ + "rustls 0.20.2", + "tokio", + "webpki 0.22.0", +] + [[package]] name = "tokio-socks" version = "0.5.1" @@ -2878,9 +3039,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b2f3f698253f03119ac0102beaa64f67a67e08074d03a22d18784104543727f" +checksum = "50145484efff8818b5ccd256697f36863f587da82cf8b409c53adf1e840798e3" dependencies = [ "futures-core", "pin-project-lite", @@ -2889,9 +3050,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.6.7" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1caa0b0c8d94a049db56b5acf8cba99dc0623aab1b26d5b5f5e2d945846b3592" +checksum = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0" dependencies = [ "bytes", "futures-core", @@ -2918,9 +3079,9 @@ checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" [[package]] name = "tracing" -version = "0.1.26" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09adeb8c97449311ccd28a427f96fb563e7fd31aabf994189879d9da2394b89d" +checksum = "375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105" dependencies = [ "cfg-if 1.0.0", "pin-project-lite", @@ -2930,9 +3091,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.15" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42e6fa53307c8a17e4ccd4dc81cf5ec38db9209f59b222210375b54ee40d1e2" +checksum = "f4f480b8f81512e825f337ad51e94c1eb5d3bbdf2b363dcd01e2b19a9ffe3f8e" dependencies = [ "proc-macro2", "quote", @@ -2941,9 +3102,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.19" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ca517f43f0fb96e0c3072ed5c275fe5eece87e8cb52f4a77b69226d3b1c9df8" +checksum = "1f4ed65637b8390770814083d20756f87bfa2c21bf2f110babdc5438351746e4" dependencies = [ "lazy_static", ] @@ -2956,7 +3117,7 @@ checksum = "bd520fe41c667b437952383f3a1ec14f1fa45d653f719a77eedd6e6a02d8fa54" dependencies = [ "lazy_static", "tracing", - "tracing-subscriber", + "tracing-subscriber 0.2.25", ] [[package]] @@ -2982,14 +3143,14 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.2.20" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9cbe87a2fa7e35900ce5de20220a582a9483a7063811defce79d7cbd59d4cfe" +checksum = "0e0d2eaa99c3c2e41547cfa109e910a68ea03823cccad4a0525dcbc9b01e8c71" dependencies = [ "ansi_term", "chrono", "lazy_static", - "matchers", + "matchers 0.0.1", "regex", "serde", "serde_json", @@ -3002,6 +3163,24 @@ dependencies = [ "tracing-serde", ] +[[package]] +name = "tracing-subscriber" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d81bfa81424cc98cb034b837c985b7a290f592e5b4322f353f94a0ab0f9f594" +dependencies = [ + "ansi_term", + "lazy_static", + "matchers 0.1.0", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + [[package]] name = "trust-dns-proto" version = "0.20.3" @@ -3053,21 +3232,11 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" -[[package]] -name = "twoway" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c57ffb460d7c24cd6eda43694110189030a3d1dfe418416d9468fd1c1d290b47" -dependencies = [ - "memchr", - "unchecked-index", -] - [[package]] name = "typenum" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b63708a265f51345575b27fe43f9500ad611579e764c79edbc2037b1121959ec" +checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" [[package]] name = "ubyte" @@ -3088,17 +3257,11 @@ dependencies = [ "version_check", ] -[[package]] -name = "unchecked-index" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eeba86d422ce181a719445e51872fa30f1f7413b62becb52e95ec91aa262d85c" - [[package]] name = "unicode-bidi" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "246f4c42e67e7a4e3c6106ff716a5d067d4132a642840b242e357e468a2a0085" +checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" [[package]] name = "unicode-normalization" @@ -3153,9 +3316,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "want" @@ -3181,21 +3344,19 @@ checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" [[package]] name = "wasm-bindgen" -version = "0.2.76" +version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce9b1b516211d33767048e5d47fa2a381ed8b76fc48d2ce4aa39877f9f183e0" +checksum = "632f73e236b219150ea279196e54e610f5dbafa5d61786303d4da54f84e47fce" dependencies = [ "cfg-if 1.0.0", - "serde", - "serde_json", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.76" +version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfe8dc78e2326ba5f845f4b5bf548401604fa20b1dd1d365fb73b6c1d6364041" +checksum = "a317bf8f9fba2476b4b2c85ef4c4af8ff39c3c7f0cdfeed4f82c34a880aa837b" dependencies = [ "bumpalo", "lazy_static", @@ -3208,9 +3369,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.26" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fded345a6559c2cfee778d562300c581f7d4ff3edb9b0d230d69800d213972" +checksum = "8e8d7523cb1f2a4c96c1317ca690031b714a51cc14e05f712446691f413f5d39" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -3220,9 +3381,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.76" +version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44468aa53335841d9d6b6c023eaab07c0cd4bddbcfdee3e2bb1e8d2cb8069fef" +checksum = "d56146e7c495528bf6587663bea13a8eb588d39b36b679d83972e1a2dbbdacf9" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3230,9 +3391,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.76" +version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0195807922713af1e67dc66132c7328206ed9766af3858164fb583eedc25fbad" +checksum = "7803e0eea25835f8abdc585cd3021b3deb11543c6fe226dcd30b228857c5c5ab" dependencies = [ "proc-macro2", "quote", @@ -3243,15 +3404,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.76" +version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acdb075a845574a1fa5f09fd77e43f7747599301ea3417a9fbffdeedfc1f4a29" +checksum = "0237232789cf037d5480773fe568aac745bfe2afbc11a863e97901780a6b47cc" [[package]] name = "web-sys" -version = "0.3.53" +version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "224b2f6b67919060055ef1a67807367c2066ed520c3862cc013d26cf893a783c" +checksum = "38eb105f1c59d9eaa6b5cdc92b859d85b926e82cb2e0945cd0c9259faa6fe9fb" dependencies = [ "js-sys", "wasm-bindgen", @@ -3352,18 +3513,18 @@ checksum = "9fc79f4a1e39857fc00c3f662cbf2651c771f00e9c15fe2abc341806bd46bd71" [[package]] name = "zeroize" -version = "1.4.1" +version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "377db0846015f7ae377174787dd452e1c5f5a9050bc6f954911d01f116daa0cd" +checksum = "d68d9dcec5f9b43a30d38c49f91dfedfaac384cb8f085faca366c26207dd1619" dependencies = [ "zeroize_derive", ] [[package]] name = "zeroize_derive" -version = "1.1.0" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2c1e130bebaeab2f23886bf9acbaca14b092408c452543c857f66399cd6dab1" +checksum = "65f1a51723ec88c66d5d1fe80c841f17f63587d6691901d66be9bec6c3b51f73" dependencies = [ "proc-macro2", "quote", @@ -3373,18 +3534,18 @@ dependencies = [ [[package]] name = "zstd" -version = "0.5.4+zstd.1.4.7" +version = "0.9.2+zstd.1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69996ebdb1ba8b1517f61387a883857818a66c8a295f487b1ffd8fd9d2c82910" +checksum = "2390ea1bf6c038c39674f22d95f0564725fc06034a47129179810b2fc58caa54" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "2.0.6+zstd.1.4.7" +version = "4.1.3+zstd.1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98aa931fb69ecee256d44589d19754e61851ae4769bf963b385119b1cc37a49e" +checksum = "e99d81b99fb3c2c2c794e3fe56c305c63d5173a16a46b5850b07c935ffc7db79" dependencies = [ "libc", "zstd-sys", @@ -3392,12 +3553,10 @@ dependencies = [ [[package]] name = "zstd-sys" -version = "1.4.18+zstd.1.4.7" +version = "1.6.2+zstd.1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1e6e8778706838f43f771d80d37787cb2fe06dafe89dd3aebaf6721b9eaec81" +checksum = "2daf2f248d9ea44454bfcb2516534e8b8ad2fc91bf818a1885495fc42bc8ac9f" dependencies = [ "cc", - "glob", - "itertools 0.9.0", "libc", ] diff --git a/Cargo.toml b/Cargo.toml index 5e09dee0..5cc6a83c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -78,6 +78,7 @@ crossbeam = { version = "0.8.1", optional = true } num_cpus = "1.13.0" threadpool = "1.8.1" heed = { git = "https://github.com/timokoesters/heed.git", rev = "f6f825da7fb2c758867e05ad973ef800a6fe1d5d", optional = true } +rocksdb = { version = "0.16.0", features = ["multi-threaded-cf"], optional = true } thread_local = "1.1.3" # used for TURN server authentication hmac = "0.11.0" @@ -87,7 +88,8 @@ sha-1 = "0.9.8" default = ["conduit_bin", "backend_sqlite"] backend_sled = ["sled"] backend_sqlite = ["sqlite"] -backend_heed = ["heed", "crossbeam", "parking_lot"] +backend_heed = ["heed", "crossbeam"] +backend_rocksdb = ["rocksdb"] sqlite = ["rusqlite", "parking_lot", "crossbeam", "tokio/signal"] conduit_bin = [] # TODO: add rocket to this when it is optional diff --git a/src/database.rs b/src/database.rs index 8b29b221..4c377f06 100644 --- a/src/database.rs +++ b/src/database.rs @@ -154,6 +154,9 @@ pub type Engine = abstraction::sqlite::Engine; #[cfg(feature = "heed")] pub type Engine = abstraction::heed::Engine; +#[cfg(feature = "rocksdb")] +pub type Engine = abstraction::rocksdb::Engine; + pub struct Database { _db: Arc, pub globals: globals::Globals, @@ -314,10 +317,10 @@ impl Database { .expect("pdu cache capacity fits into usize"), )), auth_chain_cache: Mutex::new(LruCache::new(1_000_000)), - shorteventid_cache: Mutex::new(LruCache::new(1_000_000)), - eventidshort_cache: Mutex::new(LruCache::new(1_000_000)), - shortstatekey_cache: Mutex::new(LruCache::new(1_000_000)), - statekeyshort_cache: Mutex::new(LruCache::new(1_000_000)), + shorteventid_cache: Mutex::new(LruCache::new(100_000_000)), + eventidshort_cache: Mutex::new(LruCache::new(100_000_000)), + shortstatekey_cache: Mutex::new(LruCache::new(100_000_000)), + statekeyshort_cache: Mutex::new(LruCache::new(100_000_000)), our_real_users_cache: RwLock::new(HashMap::new()), appservice_in_room_cache: RwLock::new(HashMap::new()), stateinfo_cache: Mutex::new(LruCache::new(1000)), diff --git a/src/database/abstraction.rs b/src/database/abstraction.rs index 67b80d1a..a347f831 100644 --- a/src/database/abstraction.rs +++ b/src/database/abstraction.rs @@ -12,7 +12,10 @@ pub mod sqlite; #[cfg(feature = "heed")] pub mod heed; -#[cfg(any(feature = "sqlite", feature = "heed"))] +#[cfg(feature = "rocksdb")] +pub mod rocksdb; + +#[cfg(any(feature = "sqlite", feature = "rocksdb", feature = "heed"))] pub mod watchers; pub trait DatabaseEngine: Sized { diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs new file mode 100644 index 00000000..3ff6ab86 --- /dev/null +++ b/src/database/abstraction/rocksdb.rs @@ -0,0 +1,183 @@ +use super::super::Config; +use crate::{utils, Result}; + +use std::{future::Future, pin::Pin, sync::Arc}; + +use super::{DatabaseEngine, Tree}; + +use std::{collections::HashMap, sync::RwLock}; + +pub struct Engine { + rocks: rocksdb::DBWithThreadMode, + old_cfs: Vec, +} + +pub struct RocksDbEngineTree<'a> { + db: Arc, + name: &'a str, + watchers: Watchers, +} + +impl DatabaseEngine for Engine { + fn open(config: &Config) -> Result> { + let mut db_opts = rocksdb::Options::default(); + db_opts.create_if_missing(true); + db_opts.set_max_open_files(16); + db_opts.set_compaction_style(rocksdb::DBCompactionStyle::Level); + db_opts.set_compression_type(rocksdb::DBCompressionType::Snappy); + db_opts.set_target_file_size_base(256 << 20); + db_opts.set_write_buffer_size(256 << 20); + + let mut block_based_options = rocksdb::BlockBasedOptions::default(); + block_based_options.set_block_size(512 << 10); + db_opts.set_block_based_table_factory(&block_based_options); + + let cfs = rocksdb::DBWithThreadMode::::list_cf( + &db_opts, + &config.database_path, + ) + .unwrap_or_default(); + + let db = rocksdb::DBWithThreadMode::::open_cf_descriptors( + &db_opts, + &config.database_path, + cfs.iter().map(|name| { + let mut options = rocksdb::Options::default(); + let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1); + options.set_prefix_extractor(prefix_extractor); + options.set_merge_operator_associative("increment", utils::increment_rocksdb); + + rocksdb::ColumnFamilyDescriptor::new(name, options) + }), + )?; + + Ok(Arc::new(Engine { + rocks: db, + old_cfs: cfs, + })) + } + + fn open_tree(self: &Arc, name: &'static str) -> Result> { + if !self.old_cfs.contains(&name.to_owned()) { + // Create if it didn't exist + let mut options = rocksdb::Options::default(); + let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1); + options.set_prefix_extractor(prefix_extractor); + options.set_merge_operator_associative("increment", utils::increment_rocksdb); + + let _ = self.rocks.create_cf(name, &options); + println!("created cf"); + } + + Ok(Arc::new(RocksDbEngineTree { + name, + db: Arc::clone(self), + watchers: Watchers::default(), + })) + } + + fn flush(self: &Arc) -> Result<()> { + // TODO? + Ok(()) + } +} + +impl RocksDbEngineTree<'_> { + fn cf(&self) -> rocksdb::BoundColumnFamily<'_> { + self.db.rocks.cf_handle(self.name).unwrap() + } +} + +impl Tree for RocksDbEngineTree<'_> { + fn get(&self, key: &[u8]) -> Result>> { + Ok(self.db.rocks.get_cf(self.cf(), key)?) + } + + fn insert(&self, key: &[u8], value: &[u8]) -> Result<()> { + self.db.rocks.put_cf(self.cf(), key, value)?; + self.watchers.wake(key); + Ok(()) + } + + fn insert_batch<'a>(&self, iter: &mut dyn Iterator, Vec)>) -> Result<()> { + for (key, value) in iter { + self.db.rocks.put_cf(self.cf(), key, value)?; + } + + Ok(()) + } + + fn remove(&self, key: &[u8]) -> Result<()> { + Ok(self.db.rocks.delete_cf(self.cf(), key)?) + } + + fn iter<'a>(&'a self) -> Box, Vec)> + 'a> { + Box::new( + self.db + .rocks + .iterator_cf(self.cf(), rocksdb::IteratorMode::Start) + .map(|(k, v)| (Vec::from(k), Vec::from(v))), + ) + } + + fn iter_from<'a>( + &'a self, + from: &[u8], + backwards: bool, + ) -> Box, Vec)> + 'a> { + Box::new( + self.db + .rocks + .iterator_cf( + self.cf(), + rocksdb::IteratorMode::From( + from, + if backwards { + rocksdb::Direction::Reverse + } else { + rocksdb::Direction::Forward + }, + ), + ) + .map(|(k, v)| (Vec::from(k), Vec::from(v))), + ) + } + + fn increment(&self, key: &[u8]) -> Result> { + // TODO: make atomic + let old = self.db.rocks.get_cf(self.cf(), &key)?; + let new = utils::increment(old.as_deref()).unwrap(); + self.db.rocks.put_cf(self.cf(), key, &new)?; + Ok(new) + } + + fn increment_batch<'a>(&self, iter: &mut dyn Iterator>) -> Result<()> { + for key in iter { + let old = self.db.rocks.get_cf(self.cf(), &key)?; + let new = utils::increment(old.as_deref()).unwrap(); + self.db.rocks.put_cf(self.cf(), key, new)?; + } + + Ok(()) + } + + fn scan_prefix<'a>( + &'a self, + prefix: Vec, + ) -> Box, Vec)> + 'a> { + Box::new( + self.db + .rocks + .iterator_cf( + self.cf(), + rocksdb::IteratorMode::From(&prefix, rocksdb::Direction::Forward), + ) + .map(|(k, v)| (Vec::from(k), Vec::from(v))) + .take_while(move |(k, _)| k.starts_with(&prefix)), + ) + } + + fn watch_prefix<'a>(&'a self, prefix: &[u8]) -> Pin + Send + 'a>> { + self.watchers.watch(prefix) + } +} diff --git a/src/database/abstraction/sqlite.rs b/src/database/abstraction/sqlite.rs index 1e6a2d89..31875667 100644 --- a/src/database/abstraction/sqlite.rs +++ b/src/database/abstraction/sqlite.rs @@ -132,7 +132,7 @@ type TupleOfBytes = (Vec, Vec); impl SqliteTable { #[tracing::instrument(skip(self, guard, key))] fn get_with_guard(&self, guard: &Connection, key: &[u8]) -> Result>> { - //dbg!(&self.name); + dbg!(&self.name); Ok(guard .prepare(format!("SELECT value FROM {} WHERE key = ?", self.name).as_str())? .query_row([key], |row| row.get(0)) @@ -141,7 +141,7 @@ impl SqliteTable { #[tracing::instrument(skip(self, guard, key, value))] fn insert_with_guard(&self, guard: &Connection, key: &[u8], value: &[u8]) -> Result<()> { - //dbg!(&self.name); + dbg!(&self.name); guard.execute( format!( "INSERT OR REPLACE INTO {} (key, value) VALUES (?, ?)", @@ -168,14 +168,14 @@ impl SqliteTable { let statement_ref = NonAliasingBox(statement); - //let name = self.name.clone(); + let name = self.name.clone(); let iterator = Box::new( statement .query_map([], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) .unwrap() .map(move |r| { - //dbg!(&name); + dbg!(&name); r.unwrap() }), ); @@ -263,7 +263,7 @@ impl Tree for SqliteTable { let guard = self.engine.read_lock_iterator(); let from = from.to_vec(); // TODO change interface? - //let name = self.name.clone(); + let name = self.name.clone(); if backwards { let statement = Box::leak(Box::new( @@ -282,7 +282,7 @@ impl Tree for SqliteTable { .query_map([from], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) .unwrap() .map(move |r| { - //dbg!(&name); + dbg!(&name); r.unwrap() }), ); @@ -307,7 +307,7 @@ impl Tree for SqliteTable { .query_map([from], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) .unwrap() .map(move |r| { - //dbg!(&name); + dbg!(&name); r.unwrap() }), ); diff --git a/src/error.rs b/src/error.rs index 7faddc91..4d427da4 100644 --- a/src/error.rs +++ b/src/error.rs @@ -39,6 +39,12 @@ pub enum Error { #[cfg(feature = "heed")] #[error("There was a problem with the connection to the heed database: {error}")] HeedError { error: String }, + #[cfg(feature = "rocksdb")] + #[error("There was a problem with the connection to the rocksdb database: {source}")] + RocksDbError { + #[from] + source: rocksdb::Error, + }, #[error("Could not generate an image.")] ImageError { #[from] diff --git a/src/utils.rs b/src/utils.rs index 26d71a8c..4702d051 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -29,6 +29,17 @@ pub fn increment(old: Option<&[u8]>) -> Option> { Some(number.to_be_bytes().to_vec()) } +#[cfg(feature = "rocksdb")] +pub fn increment_rocksdb( + _new_key: &[u8], + old: Option<&[u8]>, + _operands: &mut rocksdb::MergeOperands, +) -> Option> { + dbg!(_new_key); + dbg!(old); + increment(old) +} + pub fn generate_keypair() -> Vec { let mut value = random_string(8).as_bytes().to_vec(); value.push(0xff); From a30b588ede6135642946afd575a2411c6d0d21e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Fri, 10 Dec 2021 21:34:45 +0100 Subject: [PATCH 20/65] rocksdb as default --- Cargo.toml | 2 +- src/database.rs | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 5cc6a83c..0a2b4459 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -85,7 +85,7 @@ hmac = "0.11.0" sha-1 = "0.9.8" [features] -default = ["conduit_bin", "backend_sqlite"] +default = ["conduit_bin", "backend_rocksdb"] backend_sled = ["sled"] backend_sqlite = ["sqlite"] backend_heed = ["heed", "crossbeam"] diff --git a/src/database.rs b/src/database.rs index 4c377f06..af6136b3 100644 --- a/src/database.rs +++ b/src/database.rs @@ -317,10 +317,10 @@ impl Database { .expect("pdu cache capacity fits into usize"), )), auth_chain_cache: Mutex::new(LruCache::new(1_000_000)), - shorteventid_cache: Mutex::new(LruCache::new(100_000_000)), - eventidshort_cache: Mutex::new(LruCache::new(100_000_000)), - shortstatekey_cache: Mutex::new(LruCache::new(100_000_000)), - statekeyshort_cache: Mutex::new(LruCache::new(100_000_000)), + shorteventid_cache: Mutex::new(LruCache::new(1_000_000)), + eventidshort_cache: Mutex::new(LruCache::new(1_000_000)), + shortstatekey_cache: Mutex::new(LruCache::new(1_000_000)), + statekeyshort_cache: Mutex::new(LruCache::new(1_000_000)), our_real_users_cache: RwLock::new(HashMap::new()), appservice_in_room_cache: RwLock::new(HashMap::new()), stateinfo_cache: Mutex::new(LruCache::new(1000)), From c9c99746412155fcdce6a6430bd5ef9c567cc3fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Thu, 16 Dec 2021 14:52:19 +0100 Subject: [PATCH 21/65] fix: stack overflows when fetching auth events --- src/database/abstraction/rocksdb.rs | 18 ++-- src/server_server.rs | 145 +++++++++++++++------------- 2 files changed, 88 insertions(+), 75 deletions(-) diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index 3ff6ab86..825c02e0 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -22,14 +22,20 @@ impl DatabaseEngine for Engine { fn open(config: &Config) -> Result> { let mut db_opts = rocksdb::Options::default(); db_opts.create_if_missing(true); - db_opts.set_max_open_files(16); + db_opts.set_max_open_files(512); db_opts.set_compaction_style(rocksdb::DBCompactionStyle::Level); - db_opts.set_compression_type(rocksdb::DBCompressionType::Snappy); - db_opts.set_target_file_size_base(256 << 20); - db_opts.set_write_buffer_size(256 << 20); + db_opts.set_compression_type(rocksdb::DBCompressionType::Zstd); + db_opts.set_target_file_size_base(2 << 22); + db_opts.set_max_bytes_for_level_base(2 << 24); + db_opts.set_max_bytes_for_level_multiplier(2.0); + db_opts.set_num_levels(8); + db_opts.set_write_buffer_size(2 << 27); + + let rocksdb_cache = rocksdb::Cache::new_lru_cache((config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize).unwrap(); let mut block_based_options = rocksdb::BlockBasedOptions::default(); - block_based_options.set_block_size(512 << 10); + block_based_options.set_block_size(2 << 19); + block_based_options.set_block_cache(&rocksdb_cache); db_opts.set_block_based_table_factory(&block_based_options); let cfs = rocksdb::DBWithThreadMode::::list_cf( @@ -45,7 +51,6 @@ impl DatabaseEngine for Engine { let mut options = rocksdb::Options::default(); let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1); options.set_prefix_extractor(prefix_extractor); - options.set_merge_operator_associative("increment", utils::increment_rocksdb); rocksdb::ColumnFamilyDescriptor::new(name, options) }), @@ -63,7 +68,6 @@ impl DatabaseEngine for Engine { let mut options = rocksdb::Options::default(); let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1); options.set_prefix_extractor(prefix_extractor); - options.set_merge_operator_associative("increment", utils::increment_rocksdb); let _ = self.rocks.create_cf(name, &options); println!("created cf"); diff --git a/src/server_server.rs b/src/server_server.rs index 594152ae..d6bc9b91 100644 --- a/src/server_server.rs +++ b/src/server_server.rs @@ -1392,12 +1392,11 @@ async fn upgrade_outlier_to_timeline_pdu( let mut starting_events = Vec::with_capacity(leaf_state.len()); for (k, id) in leaf_state { - let k = db - .rooms - .get_statekey_from_short(k) - .map_err(|_| "Failed to get_statekey_from_short.".to_owned())?; - - state.insert(k, id.clone()); + if let Ok(k) = db.rooms.get_statekey_from_short(k) { + state.insert(k, id.clone()); + } else { + warn!("Failed to get_statekey_from_short."); + } starting_events.push(id); } @@ -1755,11 +1754,16 @@ async fn upgrade_outlier_to_timeline_pdu( .into_iter() .map(|map| { map.into_iter() - .map(|(k, id)| db.rooms.get_statekey_from_short(k).map(|k| (k, id))) - .collect::>>() + .filter_map(|(k, id)| { + db.rooms + .get_statekey_from_short(k) + .map(|k| (k, id)) + .map_err(|e| warn!("Failed to get_statekey_from_short: {}", e)) + .ok() + }) + .collect::>() }) - .collect::>() - .map_err(|_| "Failed to get_statekey_from_short.".to_owned())?; + .collect(); let state = match state_res::resolve( room_version_id, @@ -1871,73 +1875,78 @@ pub(crate) fn fetch_and_handle_outliers<'a>( // a. Look in the main timeline (pduid_pdu tree) // b. Look at outlier pdu tree // (get_pdu_json checks both) - let local_pdu = db.rooms.get_pdu(id); - let pdu = match local_pdu { - Ok(Some(pdu)) => { + if let Ok(Some(local_pdu)) = db.rooms.get_pdu(id) { + trace!("Found {} in db", id); + pdus.push((local_pdu, None)); + } + + // c. Ask origin server over federation + // We also handle its auth chain here so we don't get a stack overflow in + // handle_outlier_pdu. + let mut todo_auth_events = vec![id]; + let mut events_in_reverse_order = Vec::new(); + while let Some(next_id) = todo_auth_events.pop() { + if let Ok(Some(_)) = db.rooms.get_pdu(next_id) { trace!("Found {} in db", id); - (pdu, None) + continue; } - Ok(None) => { - // c. Ask origin server over federation - warn!("Fetching {} over federation.", id); - match db - .sending - .send_federation_request( - &db.globals, - origin, - get_event::v1::Request { event_id: id }, - ) - .await - { - Ok(res) => { - warn!("Got {} over federation", id); - let (calculated_event_id, value) = - match crate::pdu::gen_event_id_canonical_json(&res.pdu) { - Ok(t) => t, - Err(_) => { - back_off((**id).to_owned()); - continue; - } - }; - if calculated_event_id != **id { - warn!("Server didn't return event id we requested: requested: {}, we got {}. Event: {:?}", - id, calculated_event_id, &res.pdu); - } - - // This will also fetch the auth chain - match handle_outlier_pdu( - origin, - create_event, - id, - room_id, - value.clone(), - db, - pub_key_map, - ) - .await - { - Ok((pdu, json)) => (pdu, Some(json)), - Err(e) => { - warn!("Authentication of event {} failed: {:?}", id, e); - back_off((**id).to_owned()); + warn!("Fetching {} over federation.", next_id); + match db + .sending + .send_federation_request( + &db.globals, + origin, + get_event::v1::Request { event_id: next_id }, + ) + .await + { + Ok(res) => { + warn!("Got {} over federation", next_id); + let (calculated_event_id, value) = + match crate::pdu::gen_event_id_canonical_json(&res.pdu) { + Ok(t) => t, + Err(_) => { + back_off((**next_id).to_owned()); continue; } - } - } - Err(_) => { - warn!("Failed to fetch event: {}", id); - back_off((**id).to_owned()); - continue; + }; + + if calculated_event_id != **next_id { + warn!("Server didn't return event id we requested: requested: {}, we got {}. Event: {:?}", + next_id, calculated_event_id, &res.pdu); } + + events_in_reverse_order.push((next_id, value)); + } + Err(_) => { + warn!("Failed to fetch event: {}", next_id); + back_off((**next_id).to_owned()); } } - Err(e) => { - warn!("Error loading {}: {}", id, e); - continue; + } + + while let Some((next_id, value)) = events_in_reverse_order.pop() { + match handle_outlier_pdu( + origin, + create_event, + next_id, + room_id, + value.clone(), + db, + pub_key_map, + ) + .await + { + Ok((pdu, json)) => { + pdus.push((pdu, Some(json))); + } + Err(e) => { + warn!("Authentication of event {} failed: {:?}", next_id, e); + back_off((**next_id).to_owned()); + } } - }; - pdus.push(pdu); + } } pdus }) From 4b4afea2abb4289d6fa31e02bd2be2799f51e0ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Thu, 16 Dec 2021 15:54:42 +0100 Subject: [PATCH 22/65] fix auth event fetching --- src/server_server.rs | 31 +++++++++++++++++++++++-------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/src/server_server.rs b/src/server_server.rs index d6bc9b91..28c3ea07 100644 --- a/src/server_server.rs +++ b/src/server_server.rs @@ -1878,15 +1878,16 @@ pub(crate) fn fetch_and_handle_outliers<'a>( if let Ok(Some(local_pdu)) = db.rooms.get_pdu(id) { trace!("Found {} in db", id); pdus.push((local_pdu, None)); + continue; } // c. Ask origin server over federation // We also handle its auth chain here so we don't get a stack overflow in // handle_outlier_pdu. - let mut todo_auth_events = vec![id]; + let mut todo_auth_events = vec![Arc::clone(id)]; let mut events_in_reverse_order = Vec::new(); while let Some(next_id) = todo_auth_events.pop() { - if let Ok(Some(_)) = db.rooms.get_pdu(next_id) { + if let Ok(Some(_)) = db.rooms.get_pdu(&next_id) { trace!("Found {} in db", id); continue; } @@ -1897,7 +1898,7 @@ pub(crate) fn fetch_and_handle_outliers<'a>( .send_federation_request( &db.globals, origin, - get_event::v1::Request { event_id: next_id }, + get_event::v1::Request { event_id: &next_id }, ) .await { @@ -1907,21 +1908,35 @@ pub(crate) fn fetch_and_handle_outliers<'a>( match crate::pdu::gen_event_id_canonical_json(&res.pdu) { Ok(t) => t, Err(_) => { - back_off((**next_id).to_owned()); + back_off((*next_id).to_owned()); continue; } }; - if calculated_event_id != **next_id { + if calculated_event_id != *next_id { warn!("Server didn't return event id we requested: requested: {}, we got {}. Event: {:?}", next_id, calculated_event_id, &res.pdu); } + + if let Some(auth_events) = value.get("auth_events").and_then(|c| c.as_array()) { + for auth_event in auth_events { + if let Some(Ok(auth_event)) = auth_event.as_str() + .map(|e| serde_json::from_str(e)) { + todo_auth_events.push(auth_event); + } else { + warn!("Auth event id is not valid"); + } + } + } else { + warn!("Auth event list invalid"); + } + events_in_reverse_order.push((next_id, value)); } Err(_) => { warn!("Failed to fetch event: {}", next_id); - back_off((**next_id).to_owned()); + back_off((*next_id).to_owned()); } } } @@ -1930,7 +1945,7 @@ pub(crate) fn fetch_and_handle_outliers<'a>( match handle_outlier_pdu( origin, create_event, - next_id, + &next_id, room_id, value.clone(), db, @@ -1943,7 +1958,7 @@ pub(crate) fn fetch_and_handle_outliers<'a>( } Err(e) => { warn!("Authentication of event {} failed: {:?}", next_id, e); - back_off((**next_id).to_owned()); + back_off((*next_id).to_owned()); } } } From 74951cb239b5ec7ef41ba080729bc93df046fb66 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Thu, 16 Dec 2021 21:42:53 +0100 Subject: [PATCH 23/65] dbg --- src/server_server.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/server_server.rs b/src/server_server.rs index 28c3ea07..b6bea0c5 100644 --- a/src/server_server.rs +++ b/src/server_server.rs @@ -1922,8 +1922,9 @@ pub(crate) fn fetch_and_handle_outliers<'a>( if let Some(auth_events) = value.get("auth_events").and_then(|c| c.as_array()) { for auth_event in auth_events { if let Some(Ok(auth_event)) = auth_event.as_str() - .map(|e| serde_json::from_str(e)) { - todo_auth_events.push(auth_event); + .map(|e| {let ev: std::result::Result, _> = dbg!(serde_json::from_str(dbg!(e))); ev}) { + let a: Arc = auth_event; + todo_auth_events.push(a); } else { warn!("Auth event id is not valid"); } From 83a9095cdc3febd617d9bfd2d8cacf0fe3e89990 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Thu, 16 Dec 2021 22:25:24 +0100 Subject: [PATCH 24/65] fix? --- src/server_server.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/server_server.rs b/src/server_server.rs index b6bea0c5..8c5c09f2 100644 --- a/src/server_server.rs +++ b/src/server_server.rs @@ -1921,8 +1921,7 @@ pub(crate) fn fetch_and_handle_outliers<'a>( if let Some(auth_events) = value.get("auth_events").and_then(|c| c.as_array()) { for auth_event in auth_events { - if let Some(Ok(auth_event)) = auth_event.as_str() - .map(|e| {let ev: std::result::Result, _> = dbg!(serde_json::from_str(dbg!(e))); ev}) { + if let Ok(auth_event) = serde_json::from_value(auth_event.clone().into()) { let a: Arc = auth_event; todo_auth_events.push(a); } else { From ee3d2db8e061bcdac43674aa050bcd3aad79d4a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Sun, 19 Dec 2021 10:48:28 +0100 Subject: [PATCH 25/65] improvement, maybe not safe --- src/server_server.rs | 25 ++++--------------------- 1 file changed, 4 insertions(+), 21 deletions(-) diff --git a/src/server_server.rs b/src/server_server.rs index 8c5c09f2..57f55867 100644 --- a/src/server_server.rs +++ b/src/server_server.rs @@ -1686,25 +1686,6 @@ async fn upgrade_outlier_to_timeline_pdu( // We do this by adding the current state to the list of fork states extremity_sstatehashes.remove(¤t_sstatehash); fork_states.push(current_state_ids); - dbg!(&extremity_sstatehashes); - - for (sstatehash, leaf_pdu) in extremity_sstatehashes { - let mut leaf_state = db - .rooms - .state_full_ids(sstatehash) - .map_err(|_| "Failed to ask db for room state.".to_owned())?; - - if let Some(state_key) = &leaf_pdu.state_key { - let shortstatekey = db - .rooms - .get_or_create_shortstatekey(&leaf_pdu.kind, state_key, &db.globals) - .map_err(|_| "Failed to create shortstatekey.".to_owned())?; - leaf_state.insert(shortstatekey, Arc::from(&*leaf_pdu.event_id)); - // Now it's the state after the pdu - } - - fork_states.push(leaf_state); - } // We also add state after incoming event to the fork states let mut state_after = state_at_incoming_event.clone(); @@ -1941,7 +1922,7 @@ pub(crate) fn fetch_and_handle_outliers<'a>( } } - while let Some((next_id, value)) = events_in_reverse_order.pop() { + for (next_id, value) in events_in_reverse_order { match handle_outlier_pdu( origin, create_event, @@ -1954,7 +1935,9 @@ pub(crate) fn fetch_and_handle_outliers<'a>( .await { Ok((pdu, json)) => { - pdus.push((pdu, Some(json))); + if next_id == *id { + pdus.push((pdu, Some(json))); + } } Err(e) => { warn!("Authentication of event {} failed: {:?}", next_id, e); From b1d9ec3efccafaf887da1b54e4b3ef2bfa4d84a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Mon, 20 Dec 2021 10:16:22 +0100 Subject: [PATCH 26/65] fix: atomic increment --- Cargo.toml | 2 +- src/database/abstraction/rocksdb.rs | 24 ++++++++++++++++-------- src/database/abstraction/watchers.rs | 8 ++++---- 3 files changed, 21 insertions(+), 13 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 0a2b4459..6241b6a8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -90,7 +90,7 @@ backend_sled = ["sled"] backend_sqlite = ["sqlite"] backend_heed = ["heed", "crossbeam"] backend_rocksdb = ["rocksdb"] -sqlite = ["rusqlite", "parking_lot", "crossbeam", "tokio/signal"] +sqlite = ["rusqlite", "parking_lot", "tokio/signal"] conduit_bin = [] # TODO: add rocket to this when it is optional [[bin]] diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index 825c02e0..b2142dfe 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -1,11 +1,6 @@ -use super::super::Config; +use super::{super::Config, watchers::Watchers, DatabaseEngine, Tree}; use crate::{utils, Result}; - -use std::{future::Future, pin::Pin, sync::Arc}; - -use super::{DatabaseEngine, Tree}; - -use std::{collections::HashMap, sync::RwLock}; +use std::{future::Future, pin::Pin, sync::Arc, collections::HashMap, sync::RwLock}; pub struct Engine { rocks: rocksdb::DBWithThreadMode, @@ -16,6 +11,7 @@ pub struct RocksDbEngineTree<'a> { db: Arc, name: &'a str, watchers: Watchers, + write_lock: RwLock<()> } impl DatabaseEngine for Engine { @@ -77,6 +73,7 @@ impl DatabaseEngine for Engine { name, db: Arc::clone(self), watchers: Watchers::default(), + write_lock: RwLock::new(()), })) } @@ -98,8 +95,12 @@ impl Tree for RocksDbEngineTree<'_> { } fn insert(&self, key: &[u8], value: &[u8]) -> Result<()> { + let lock = self.write_lock.read().unwrap(); self.db.rocks.put_cf(self.cf(), key, value)?; + drop(lock); + self.watchers.wake(key); + Ok(()) } @@ -148,20 +149,27 @@ impl Tree for RocksDbEngineTree<'_> { } fn increment(&self, key: &[u8]) -> Result> { - // TODO: make atomic + let lock = self.write_lock.write().unwrap(); + let old = self.db.rocks.get_cf(self.cf(), &key)?; let new = utils::increment(old.as_deref()).unwrap(); self.db.rocks.put_cf(self.cf(), key, &new)?; + + drop(lock); Ok(new) } fn increment_batch<'a>(&self, iter: &mut dyn Iterator>) -> Result<()> { + let lock = self.write_lock.write().unwrap(); + for key in iter { let old = self.db.rocks.get_cf(self.cf(), &key)?; let new = utils::increment(old.as_deref()).unwrap(); self.db.rocks.put_cf(self.cf(), key, new)?; } + drop(lock); + Ok(()) } diff --git a/src/database/abstraction/watchers.rs b/src/database/abstraction/watchers.rs index 404f3f06..fec1f27a 100644 --- a/src/database/abstraction/watchers.rs +++ b/src/database/abstraction/watchers.rs @@ -1,6 +1,6 @@ -use parking_lot::RwLock; use std::{ collections::{hash_map, HashMap}, + sync::RwLock, future::Future, pin::Pin, }; @@ -16,7 +16,7 @@ impl Watchers { &'a self, prefix: &[u8], ) -> Pin + Send + 'a>> { - let mut rx = match self.watchers.write().entry(prefix.to_vec()) { + let mut rx = match self.watchers.write().unwrap().entry(prefix.to_vec()) { hash_map::Entry::Occupied(o) => o.get().1.clone(), hash_map::Entry::Vacant(v) => { let (tx, rx) = tokio::sync::watch::channel(()); @@ -31,7 +31,7 @@ impl Watchers { }) } pub(super) fn wake(&self, key: &[u8]) { - let watchers = self.watchers.read(); + let watchers = self.watchers.read().unwrap(); let mut triggered = Vec::new(); for length in 0..=key.len() { @@ -43,7 +43,7 @@ impl Watchers { drop(watchers); if !triggered.is_empty() { - let mut watchers = self.watchers.write(); + let mut watchers = self.watchers.write().unwrap(); for prefix in triggered { if let Some(tx) = watchers.remove(prefix) { let _ = tx.0.send(()); From 54f4d39e3ed92106ec3a902de22d2366cfd8e8be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Tue, 21 Dec 2021 16:02:12 +0100 Subject: [PATCH 27/65] improvement: don't fetch event multiple times --- src/database/abstraction/rocksdb.rs | 4 +++- src/server_server.rs | 17 +++++++++++++---- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index b2142dfe..397047bd 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -27,7 +27,9 @@ impl DatabaseEngine for Engine { db_opts.set_num_levels(8); db_opts.set_write_buffer_size(2 << 27); - let rocksdb_cache = rocksdb::Cache::new_lru_cache((config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize).unwrap(); + let rocksdb_cache = + rocksdb::Cache::new_lru_cache((config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize) + .unwrap(); let mut block_based_options = rocksdb::BlockBasedOptions::default(); block_based_options.set_block_size(2 << 19); diff --git a/src/server_server.rs b/src/server_server.rs index 57f55867..6e8ebf38 100644 --- a/src/server_server.rs +++ b/src/server_server.rs @@ -1867,7 +1867,12 @@ pub(crate) fn fetch_and_handle_outliers<'a>( // handle_outlier_pdu. let mut todo_auth_events = vec![Arc::clone(id)]; let mut events_in_reverse_order = Vec::new(); + let mut events_all = HashSet::new(); while let Some(next_id) = todo_auth_events.pop() { + if events_all.contains(&next_id) { + continue; + } + if let Ok(Some(_)) = db.rooms.get_pdu(&next_id) { trace!("Found {} in db", id); continue; @@ -1899,10 +1904,13 @@ pub(crate) fn fetch_and_handle_outliers<'a>( next_id, calculated_event_id, &res.pdu); } - - if let Some(auth_events) = value.get("auth_events").and_then(|c| c.as_array()) { + if let Some(auth_events) = + value.get("auth_events").and_then(|c| c.as_array()) + { for auth_event in auth_events { - if let Ok(auth_event) = serde_json::from_value(auth_event.clone().into()) { + if let Ok(auth_event) = + serde_json::from_value(auth_event.clone().into()) + { let a: Arc = auth_event; todo_auth_events.push(a); } else { @@ -1913,7 +1921,8 @@ pub(crate) fn fetch_and_handle_outliers<'a>( warn!("Auth event list invalid"); } - events_in_reverse_order.push((next_id, value)); + events_in_reverse_order.push((next_id.clone(), value)); + events_all.insert(next_id); } Err(_) => { warn!("Failed to fetch event: {}", next_id); From 5bcc1324ed3936444ba189c399e906482cc67d3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Tue, 21 Dec 2021 22:10:31 +0100 Subject: [PATCH 28/65] fix: auth event fetch order --- src/server_server.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/server_server.rs b/src/server_server.rs index 6e8ebf38..c76afd34 100644 --- a/src/server_server.rs +++ b/src/server_server.rs @@ -1931,7 +1931,7 @@ pub(crate) fn fetch_and_handle_outliers<'a>( } } - for (next_id, value) in events_in_reverse_order { + for (next_id, value) in events_in_reverse_order.iter().rev() { match handle_outlier_pdu( origin, create_event, @@ -1944,13 +1944,13 @@ pub(crate) fn fetch_and_handle_outliers<'a>( .await { Ok((pdu, json)) => { - if next_id == *id { + if next_id == id { pdus.push((pdu, Some(json))); } } Err(e) => { warn!("Authentication of event {} failed: {:?}", next_id, e); - back_off((*next_id).to_owned()); + back_off((**next_id).to_owned()); } } } From 68e910bb77f7bbc93269dd1dfd0f70a26f1e8ef0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Tue, 4 Jan 2022 14:30:13 +0100 Subject: [PATCH 29/65] feat: lazy loading --- src/client_server/context.rs | 62 ++++++++++++++++--- src/client_server/message.rs | 83 +++++++++++++++++++------ src/client_server/sync.rs | 117 +++++++++++++++++++++++++++++++---- src/database.rs | 3 + src/database/rooms.rs | 96 +++++++++++++++++++++++++++- 5 files changed, 320 insertions(+), 41 deletions(-) diff --git a/src/client_server/context.rs b/src/client_server/context.rs index 9bfec9e1..94a44e39 100644 --- a/src/client_server/context.rs +++ b/src/client_server/context.rs @@ -1,5 +1,9 @@ use crate::{database::DatabaseGuard, ConduitResult, Error, Ruma}; -use ruma::api::client::{error::ErrorKind, r0::context::get_context}; +use ruma::{ + api::client::{error::ErrorKind, r0::context::get_context}, + events::EventType, +}; +use std::collections::HashSet; use std::convert::TryFrom; #[cfg(feature = "conduit_bin")] @@ -21,6 +25,7 @@ pub async fn get_context_route( body: Ruma>, ) -> ConduitResult { let sender_user = body.sender_user.as_ref().expect("user is authenticated"); + let sender_device = body.sender_device.as_ref().expect("user is authenticated"); if !db.rooms.is_joined(sender_user, &body.room_id)? { return Err(Error::BadRequest( @@ -29,6 +34,8 @@ pub async fn get_context_route( )); } + let mut lazy_loaded = HashSet::new(); + let base_pdu_id = db .rooms .get_pdu_id(&body.event_id)? @@ -45,8 +52,18 @@ pub async fn get_context_route( .ok_or(Error::BadRequest( ErrorKind::NotFound, "Base event not found.", - ))? - .to_room_event(); + ))?; + + if !db.rooms.lazy_load_was_sent_before( + &sender_user, + &sender_device, + &body.room_id, + &base_event.sender, + )? { + lazy_loaded.insert(base_event.sender.clone()); + } + + let base_event = base_event.to_room_event(); let events_before: Vec<_> = db .rooms @@ -60,6 +77,17 @@ pub async fn get_context_route( .filter_map(|r| r.ok()) // Remove buggy events .collect(); + for (_, event) in &events_before { + if !db.rooms.lazy_load_was_sent_before( + &sender_user, + &sender_device, + &body.room_id, + &event.sender, + )? { + lazy_loaded.insert(event.sender.clone()); + } + } + let start_token = events_before .last() .and_then(|(pdu_id, _)| db.rooms.pdu_count(pdu_id).ok()) @@ -82,6 +110,17 @@ pub async fn get_context_route( .filter_map(|r| r.ok()) // Remove buggy events .collect(); + for (_, event) in &events_after { + if !db.rooms.lazy_load_was_sent_before( + &sender_user, + &sender_device, + &body.room_id, + &event.sender, + )? { + lazy_loaded.insert(event.sender.clone()); + } + } + let end_token = events_after .last() .and_then(|(pdu_id, _)| db.rooms.pdu_count(pdu_id).ok()) @@ -92,18 +131,23 @@ pub async fn get_context_route( .map(|(_, pdu)| pdu.to_room_event()) .collect(); + let mut state = Vec::new(); + for ll_id in &lazy_loaded { + if let Some(member_event) = + db.rooms + .room_state_get(&body.room_id, &EventType::RoomMember, ll_id.as_str())? + { + state.push(member_event.to_state_event()); + } + } + let resp = get_context::Response { start: start_token, end: end_token, events_before, event: Some(base_event), events_after, - state: db // TODO: State at event - .rooms - .room_state_full(&body.room_id)? - .values() - .map(|pdu| pdu.to_state_event()) - .collect(), + state, }; Ok(resp.into()) diff --git a/src/client_server/message.rs b/src/client_server/message.rs index cbce019e..48ca4ae8 100644 --- a/src/client_server/message.rs +++ b/src/client_server/message.rs @@ -6,7 +6,11 @@ use ruma::{ }, events::EventType, }; -use std::{collections::BTreeMap, convert::TryInto, sync::Arc}; +use std::{ + collections::{BTreeMap, HashSet}, + convert::TryInto, + sync::Arc, +}; #[cfg(feature = "conduit_bin")] use rocket::{get, put}; @@ -117,6 +121,7 @@ pub async fn get_message_events_route( body: Ruma>, ) -> ConduitResult { let sender_user = body.sender_user.as_ref().expect("user is authenticated"); + let sender_device = body.sender_device.as_ref().expect("user is authenticated"); if !db.rooms.is_joined(sender_user, &body.room_id)? { return Err(Error::BadRequest( @@ -136,6 +141,12 @@ pub async fn get_message_events_route( // Use limit or else 10 let limit = body.limit.try_into().map_or(10_usize, |l: u32| l as usize); + let next_token; + + let mut resp = get_message_events::Response::new(); + + let mut lazy_loaded = HashSet::new(); + match body.dir { get_message_events::Direction::Forward => { let events_after: Vec<_> = db @@ -152,21 +163,27 @@ pub async fn get_message_events_route( .take_while(|&(k, _)| Some(Ok(k)) != to) // Stop at `to` .collect(); - let end_token = events_after.last().map(|(count, _)| count.to_string()); + for (_, event) in &events_after { + if !db.rooms.lazy_load_was_sent_before( + &sender_user, + &sender_device, + &body.room_id, + &event.sender, + )? { + lazy_loaded.insert(event.sender.clone()); + } + } + + next_token = events_after.last().map(|(count, _)| count).copied(); let events_after: Vec<_> = events_after .into_iter() .map(|(_, pdu)| pdu.to_room_event()) .collect(); - let resp = get_message_events::Response { - start: body.from.to_owned(), - end: end_token, - chunk: events_after, - state: Vec::new(), - }; - - Ok(resp.into()) + resp.start = body.from.to_owned(); + resp.end = next_token.map(|count| count.to_string()); + resp.chunk = events_after; } get_message_events::Direction::Backward => { let events_before: Vec<_> = db @@ -183,21 +200,51 @@ pub async fn get_message_events_route( .take_while(|&(k, _)| Some(Ok(k)) != to) // Stop at `to` .collect(); - let start_token = events_before.last().map(|(count, _)| count.to_string()); + for (_, event) in &events_before { + if !db.rooms.lazy_load_was_sent_before( + &sender_user, + &sender_device, + &body.room_id, + &event.sender, + )? { + lazy_loaded.insert(event.sender.clone()); + } + } + + next_token = events_before.last().map(|(count, _)| count).copied(); let events_before: Vec<_> = events_before .into_iter() .map(|(_, pdu)| pdu.to_room_event()) .collect(); - let resp = get_message_events::Response { - start: body.from.to_owned(), - end: start_token, - chunk: events_before, - state: Vec::new(), - }; + resp.start = body.from.to_owned(); + resp.end = next_token.map(|count| count.to_string()); + resp.chunk = events_before; + } + } - Ok(resp.into()) + db.rooms + .lazy_load_confirm_delivery(&sender_user, &sender_device, &body.room_id, from)?; + resp.state = Vec::new(); + for ll_id in &lazy_loaded { + if let Some(member_event) = + db.rooms + .room_state_get(&body.room_id, &EventType::RoomMember, ll_id.as_str())? + { + resp.state.push(member_event.to_state_event()); } } + + if let Some(next_token) = next_token { + db.rooms.lazy_load_mark_sent( + &sender_user, + &sender_device, + &body.room_id, + lazy_loaded.into_iter().collect(), + next_token, + ); + } + + Ok(resp.into()) } diff --git a/src/client_server/sync.rs b/src/client_server/sync.rs index 64588a2c..88bf8614 100644 --- a/src/client_server/sync.rs +++ b/src/client_server/sync.rs @@ -264,6 +264,14 @@ async fn sync_helper( // limited unless there are events in non_timeline_pdus let limited = non_timeline_pdus.next().is_some(); + let mut timeline_users = HashSet::new(); + for (_, event) in &timeline_pdus { + timeline_users.insert(event.sender.as_str().to_owned()); + } + + db.rooms + .lazy_load_confirm_delivery(&sender_user, &sender_device, &room_id, since)?; + // Database queries: let current_shortstatehash = db @@ -344,14 +352,51 @@ async fn sync_helper( state_events, ) = if since_shortstatehash.is_none() { // Probably since = 0, we will do an initial sync + let (joined_member_count, invited_member_count, heroes) = calculate_counts()?; let current_state_ids = db.rooms.state_full_ids(current_shortstatehash)?; - let state_events: Vec<_> = current_state_ids - .iter() - .map(|(_, id)| db.rooms.get_pdu(id)) - .filter_map(|r| r.ok().flatten()) - .collect(); + + let mut state_events = Vec::new(); + let mut lazy_loaded = Vec::new(); + + for (_, id) in current_state_ids { + let pdu = match db.rooms.get_pdu(&id)? { + Some(pdu) => pdu, + None => { + error!("Pdu in state not found: {}", id); + continue; + } + }; + let state_key = pdu + .state_key + .as_ref() + .expect("state events have state keys"); + if pdu.kind != EventType::RoomMember { + state_events.push(pdu); + } else if full_state || timeline_users.contains(state_key) { + // TODO: check filter: is ll enabled? + lazy_loaded.push( + UserId::parse(state_key.as_ref()) + .expect("they are in timeline_users, so they should be correct"), + ); + state_events.push(pdu); + } + } + + // Reset lazy loading because this is an initial sync + db.rooms + .lazy_load_reset(&sender_user, &sender_device, &room_id)?; + + // The state_events above should contain all timeline_users, let's mark them as lazy + // loaded. + db.rooms.lazy_load_mark_sent( + &sender_user, + &sender_device, + &room_id, + lazy_loaded, + next_batch, + ); ( heroes, @@ -387,20 +432,66 @@ async fn sync_helper( let since_state_ids = db.rooms.state_full_ids(since_shortstatehash)?; - let state_events = if joined_since_last_sync { + /* + let state_events = if joined_since_last_sync || full_state { current_state_ids .iter() .map(|(_, id)| db.rooms.get_pdu(id)) .filter_map(|r| r.ok().flatten()) .collect::>() } else { - current_state_ids - .iter() - .filter(|(key, id)| since_state_ids.get(key) != Some(id)) - .map(|(_, id)| db.rooms.get_pdu(id)) - .filter_map(|r| r.ok().flatten()) - .collect() - }; + */ + let mut state_events = Vec::new(); + let mut lazy_loaded = Vec::new(); + + for (key, id) in current_state_ids { + let pdu = match db.rooms.get_pdu(&id)? { + Some(pdu) => pdu, + None => { + error!("Pdu in state not found: {}", id); + continue; + } + }; + + let state_key = pdu + .state_key + .as_ref() + .expect("state events have state keys"); + + if pdu.kind != EventType::RoomMember { + if full_state || since_state_ids.get(&key) != Some(&id) { + state_events.push(pdu); + } + continue; + } + + // Pdu has to be a member event + let state_key_userid = UserId::parse(state_key.as_ref()) + .expect("they are in timeline_users, so they should be correct"); + + if full_state || since_state_ids.get(&key) != Some(&id) { + lazy_loaded.push(state_key_userid); + state_events.push(pdu); + } else if timeline_users.contains(state_key) + && !db.rooms.lazy_load_was_sent_before( + &sender_user, + &sender_device, + &room_id, + &state_key_userid, + )? + { + lazy_loaded.push(state_key_userid); + state_events.push(pdu); + } + } + + db.rooms.lazy_load_mark_sent( + &sender_user, + &sender_device, + &room_id, + lazy_loaded, + next_batch, + ); let encrypted_room = db .rooms diff --git a/src/database.rs b/src/database.rs index af6136b3..9e020198 100644 --- a/src/database.rs +++ b/src/database.rs @@ -288,6 +288,8 @@ impl Database { userroomid_leftstate: builder.open_tree("userroomid_leftstate")?, roomuserid_leftcount: builder.open_tree("roomuserid_leftcount")?, + lazyloadedids: builder.open_tree("lazyloadedids")?, + userroomid_notificationcount: builder.open_tree("userroomid_notificationcount")?, userroomid_highlightcount: builder.open_tree("userroomid_highlightcount")?, @@ -323,6 +325,7 @@ impl Database { statekeyshort_cache: Mutex::new(LruCache::new(1_000_000)), our_real_users_cache: RwLock::new(HashMap::new()), appservice_in_room_cache: RwLock::new(HashMap::new()), + lazy_load_waiting: Mutex::new(HashMap::new()), stateinfo_cache: Mutex::new(LruCache::new(1000)), }, account_data: account_data::AccountData { diff --git a/src/database/rooms.rs b/src/database/rooms.rs index 775e2f8d..b957b55d 100644 --- a/src/database/rooms.rs +++ b/src/database/rooms.rs @@ -28,7 +28,7 @@ use ruma::{ push::{Action, Ruleset, Tweak}, serde::{CanonicalJsonObject, CanonicalJsonValue, Raw}, state_res::{self, RoomVersion, StateMap}, - uint, EventId, RoomAliasId, RoomId, RoomVersionId, ServerName, UserId, + uint, DeviceId, EventId, RoomAliasId, RoomId, RoomVersionId, ServerName, UserId, }; use serde::Deserialize; use serde_json::value::to_raw_value; @@ -79,6 +79,8 @@ pub struct Rooms { pub(super) userroomid_leftstate: Arc, pub(super) roomuserid_leftcount: Arc, + pub(super) lazyloadedids: Arc, // LazyLoadedIds = UserId + DeviceId + RoomId + LazyLoadedUserId + pub(super) userroomid_notificationcount: Arc, // NotifyCount = u64 pub(super) userroomid_highlightcount: Arc, // HightlightCount = u64 @@ -117,6 +119,8 @@ pub struct Rooms { pub(super) shortstatekey_cache: Mutex>, pub(super) our_real_users_cache: RwLock, Arc>>>>, pub(super) appservice_in_room_cache: RwLock, HashMap>>, + pub(super) lazy_load_waiting: + Mutex, Box, Box, u64), Vec>>>, pub(super) stateinfo_cache: Mutex< LruCache< u64, @@ -3466,4 +3470,94 @@ impl Rooms { Ok(()) } + + #[tracing::instrument(skip(self))] + pub fn lazy_load_was_sent_before( + &self, + user_id: &UserId, + device_id: &DeviceId, + room_id: &RoomId, + ll_user: &UserId, + ) -> Result { + let mut key = user_id.as_bytes().to_vec(); + key.push(0xff); + key.extend_from_slice(&device_id.as_bytes()); + key.push(0xff); + key.extend_from_slice(&room_id.as_bytes()); + key.push(0xff); + key.extend_from_slice(&ll_user.as_bytes()); + Ok(self.lazyloadedids.get(&key)?.is_some()) + } + + #[tracing::instrument(skip(self))] + pub fn lazy_load_mark_sent( + &self, + user_id: &UserId, + device_id: &DeviceId, + room_id: &RoomId, + lazy_load: Vec>, + count: u64, + ) { + self.lazy_load_waiting.lock().unwrap().insert( + ( + user_id.to_owned(), + device_id.to_owned(), + room_id.to_owned(), + count, + ), + lazy_load, + ); + } + + #[tracing::instrument(skip(self))] + pub fn lazy_load_confirm_delivery( + &self, + user_id: &UserId, + device_id: &DeviceId, + room_id: &RoomId, + since: u64, + ) -> Result<()> { + if let Some(user_ids) = self.lazy_load_waiting.lock().unwrap().remove(&( + user_id.to_owned(), + device_id.to_owned(), + room_id.to_owned(), + since, + )) { + let mut prefix = user_id.as_bytes().to_vec(); + prefix.push(0xff); + prefix.extend_from_slice(&device_id.as_bytes()); + prefix.push(0xff); + prefix.extend_from_slice(&room_id.as_bytes()); + prefix.push(0xff); + + for ll_id in user_ids { + let mut key = prefix.clone(); + key.extend_from_slice(&ll_id.as_bytes()); + self.lazyloadedids.insert(&key, &[])?; + } + } + + Ok(()) + } + + #[tracing::instrument(skip(self))] + pub fn lazy_load_reset( + &self, + user_id: &Box, + device_id: &Box, + room_id: &Box, + ) -> Result<()> { + let mut prefix = user_id.as_bytes().to_vec(); + prefix.push(0xff); + prefix.extend_from_slice(&device_id.as_bytes()); + prefix.push(0xff); + prefix.extend_from_slice(&room_id.as_bytes()); + prefix.push(0xff); + + for (key, _) in self.lazyloadedids.scan_prefix(prefix) { + self.lazyloadedids.remove(&key)?; + } + + Ok(()) + } } From 1bd9fd74b31383105526ea27b6df8d461aacc223 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Wed, 5 Jan 2022 18:15:00 +0100 Subject: [PATCH 30/65] feat: partially support sync filters --- src/client_server/filter.rs | 57 ++++++++++++++++--------- src/client_server/message.rs | 5 ++- src/client_server/sync.rs | 83 ++++++++++++++++++++---------------- src/database.rs | 1 + src/database/users.rs | 48 ++++++++++++++++++++- 5 files changed, 133 insertions(+), 61 deletions(-) diff --git a/src/client_server/filter.rs b/src/client_server/filter.rs index dfb53770..f8845f1e 100644 --- a/src/client_server/filter.rs +++ b/src/client_server/filter.rs @@ -1,32 +1,47 @@ -use crate::{utils, ConduitResult}; -use ruma::api::client::r0::filter::{self, create_filter, get_filter}; +use crate::{database::DatabaseGuard, ConduitResult, Error, Ruma}; +use ruma::api::client::{ + error::ErrorKind, + r0::filter::{create_filter, get_filter}, +}; #[cfg(feature = "conduit_bin")] use rocket::{get, post}; /// # `GET /_matrix/client/r0/user/{userId}/filter/{filterId}` /// -/// TODO: Loads a filter that was previously created. -#[cfg_attr(feature = "conduit_bin", get("/_matrix/client/r0/user/<_>/filter/<_>"))] -#[tracing::instrument] -pub async fn get_filter_route() -> ConduitResult { - // TODO - Ok(get_filter::Response::new(filter::IncomingFilterDefinition { - event_fields: None, - event_format: filter::EventFormat::default(), - account_data: filter::IncomingFilter::default(), - room: filter::IncomingRoomFilter::default(), - presence: filter::IncomingFilter::default(), - }) - .into()) +/// Loads a filter that was previously created. +/// +/// - A user can only access their own filters +#[cfg_attr( + feature = "conduit_bin", + get("/_matrix/client/r0/user/<_>/filter/<_>", data = "") +)] +#[tracing::instrument(skip(db, body))] +pub async fn get_filter_route( + db: DatabaseGuard, + body: Ruma>, +) -> ConduitResult { + let sender_user = body.sender_user.as_ref().expect("user is authenticated"); + let filter = match db.users.get_filter(sender_user, &body.filter_id)? { + Some(filter) => filter, + None => return Err(Error::BadRequest(ErrorKind::NotFound, "Filter not found.")), + }; + + Ok(get_filter::Response::new(filter).into()) } /// # `PUT /_matrix/client/r0/user/{userId}/filter` /// -/// TODO: Creates a new filter to be used by other endpoints. -#[cfg_attr(feature = "conduit_bin", post("/_matrix/client/r0/user/<_>/filter"))] -#[tracing::instrument] -pub async fn create_filter_route() -> ConduitResult { - // TODO - Ok(create_filter::Response::new(utils::random_string(10)).into()) +/// Creates a new filter to be used by other endpoints. +#[cfg_attr( + feature = "conduit_bin", + post("/_matrix/client/r0/user/<_>/filter", data = "") +)] +#[tracing::instrument(skip(db, body))] +pub async fn create_filter_route( + db: DatabaseGuard, + body: Ruma>, +) -> ConduitResult { + let sender_user = body.sender_user.as_ref().expect("user is authenticated"); + Ok(create_filter::Response::new(db.users.create_filter(sender_user, &body.filter)?).into()) } diff --git a/src/client_server/message.rs b/src/client_server/message.rs index 48ca4ae8..899c45a2 100644 --- a/src/client_server/message.rs +++ b/src/client_server/message.rs @@ -138,6 +138,9 @@ pub async fn get_message_events_route( let to = body.to.as_ref().map(|t| t.parse()); + db.rooms + .lazy_load_confirm_delivery(&sender_user, &sender_device, &body.room_id, from)?; + // Use limit or else 10 let limit = body.limit.try_into().map_or(10_usize, |l: u32| l as usize); @@ -224,8 +227,6 @@ pub async fn get_message_events_route( } } - db.rooms - .lazy_load_confirm_delivery(&sender_user, &sender_device, &body.room_id, from)?; resp.state = Vec::new(); for ll_id in &lazy_loaded { if let Some(member_event) = diff --git a/src/client_server/sync.rs b/src/client_server/sync.rs index 88bf8614..6d8ac28d 100644 --- a/src/client_server/sync.rs +++ b/src/client_server/sync.rs @@ -1,6 +1,10 @@ use crate::{database::DatabaseGuard, ConduitResult, Database, Error, Result, Ruma, RumaResponse}; use ruma::{ - api::client::r0::{sync::sync_events, uiaa::UiaaResponse}, + api::client::r0::{ + filter::{IncomingFilterDefinition, LazyLoadOptions}, + sync::sync_events, + uiaa::UiaaResponse, + }, events::{ room::member::{MembershipState, RoomMemberEventContent}, AnySyncEphemeralRoomEvent, EventType, @@ -77,34 +81,32 @@ pub async fn sync_events_route( Entry::Vacant(v) => { let (tx, rx) = tokio::sync::watch::channel(None); + v.insert((body.since.clone(), rx.clone())); + tokio::spawn(sync_helper_wrapper( Arc::clone(&arc_db), sender_user.clone(), sender_device.clone(), - body.since.clone(), - body.full_state, - body.timeout, + body, tx, )); - v.insert((body.since.clone(), rx)).1.clone() + rx } Entry::Occupied(mut o) => { if o.get().0 != body.since { let (tx, rx) = tokio::sync::watch::channel(None); + o.insert((body.since.clone(), rx.clone())); + tokio::spawn(sync_helper_wrapper( Arc::clone(&arc_db), sender_user.clone(), sender_device.clone(), - body.since.clone(), - body.full_state, - body.timeout, + body, tx, )); - o.insert((body.since.clone(), rx.clone())); - rx } else { o.get().1.clone() @@ -135,18 +137,16 @@ async fn sync_helper_wrapper( db: Arc, sender_user: Box, sender_device: Box, - since: Option, - full_state: bool, - timeout: Option, + body: sync_events::IncomingRequest, tx: Sender>>, ) { + let since = body.since.clone(); + let r = sync_helper( Arc::clone(&db), sender_user.clone(), sender_device.clone(), - since.clone(), - full_state, - timeout, + body, ) .await; @@ -179,9 +179,7 @@ async fn sync_helper( db: Arc, sender_user: Box, sender_device: Box, - since: Option, - full_state: bool, - timeout: Option, + body: sync_events::IncomingRequest, // bool = caching allowed ) -> Result<(sync_events::Response, bool), Error> { // TODO: match body.set_presence { @@ -193,8 +191,26 @@ async fn sync_helper( let next_batch = db.globals.current_count()?; let next_batch_string = next_batch.to_string(); + // Load filter + let filter = match body.filter { + None => IncomingFilterDefinition::default(), + Some(sync_events::IncomingFilter::FilterDefinition(filter)) => filter, + Some(sync_events::IncomingFilter::FilterId(filter_id)) => db + .users + .get_filter(&sender_user, &filter_id)? + .unwrap_or_default(), + }; + + let (lazy_load_enabled, lazy_load_send_redundant) = match filter.room.state.lazy_load_options { + LazyLoadOptions::Enabled { + include_redundant_members: redundant, + } => (true, redundant), + _ => (false, false), + }; + let mut joined_rooms = BTreeMap::new(); - let since = since + let since = body + .since .clone() .and_then(|string| string.parse().ok()) .unwrap_or(0); @@ -374,8 +390,10 @@ async fn sync_helper( .expect("state events have state keys"); if pdu.kind != EventType::RoomMember { state_events.push(pdu); - } else if full_state || timeline_users.contains(state_key) { - // TODO: check filter: is ll enabled? + } else if !lazy_load_enabled + || body.full_state + || timeline_users.contains(state_key) + { lazy_loaded.push( UserId::parse(state_key.as_ref()) .expect("they are in timeline_users, so they should be correct"), @@ -432,15 +450,6 @@ async fn sync_helper( let since_state_ids = db.rooms.state_full_ids(since_shortstatehash)?; - /* - let state_events = if joined_since_last_sync || full_state { - current_state_ids - .iter() - .map(|(_, id)| db.rooms.get_pdu(id)) - .filter_map(|r| r.ok().flatten()) - .collect::>() - } else { - */ let mut state_events = Vec::new(); let mut lazy_loaded = Vec::new(); @@ -459,7 +468,7 @@ async fn sync_helper( .expect("state events have state keys"); if pdu.kind != EventType::RoomMember { - if full_state || since_state_ids.get(&key) != Some(&id) { + if body.full_state || since_state_ids.get(&key) != Some(&id) { state_events.push(pdu); } continue; @@ -469,16 +478,16 @@ async fn sync_helper( let state_key_userid = UserId::parse(state_key.as_ref()) .expect("they are in timeline_users, so they should be correct"); - if full_state || since_state_ids.get(&key) != Some(&id) { + if body.full_state || since_state_ids.get(&key) != Some(&id) { lazy_loaded.push(state_key_userid); state_events.push(pdu); } else if timeline_users.contains(state_key) - && !db.rooms.lazy_load_was_sent_before( + && (!db.rooms.lazy_load_was_sent_before( &sender_user, &sender_device, &room_id, &state_key_userid, - )? + )? || lazy_load_send_redundant) { lazy_loaded.push(state_key_userid); state_events.push(pdu); @@ -858,7 +867,7 @@ async fn sync_helper( }; // TODO: Retry the endpoint instead of returning (waiting for #118) - if !full_state + if !body.full_state && response.rooms.is_empty() && response.presence.is_empty() && response.account_data.is_empty() @@ -867,7 +876,7 @@ async fn sync_helper( { // Hang a few seconds so requests are not spammed // Stop hanging if new info arrives - let mut duration = timeout.unwrap_or_default(); + let mut duration = body.timeout.unwrap_or_default(); if duration.as_secs() > 30 { duration = Duration::from_secs(30); } diff --git a/src/database.rs b/src/database.rs index 9e020198..ddf701bb 100644 --- a/src/database.rs +++ b/src/database.rs @@ -249,6 +249,7 @@ impl Database { userid_masterkeyid: builder.open_tree("userid_masterkeyid")?, userid_selfsigningkeyid: builder.open_tree("userid_selfsigningkeyid")?, userid_usersigningkeyid: builder.open_tree("userid_usersigningkeyid")?, + userfilterid_filter: builder.open_tree("userfilterid_filter")?, todeviceid_events: builder.open_tree("todeviceid_events")?, }, uiaa: uiaa::Uiaa { diff --git a/src/database/users.rs b/src/database/users.rs index 63a63f00..c4fcee3d 100644 --- a/src/database/users.rs +++ b/src/database/users.rs @@ -1,6 +1,9 @@ use crate::{utils, Error, Result}; use ruma::{ - api::client::{error::ErrorKind, r0::device::Device}, + api::client::{ + error::ErrorKind, + r0::{device::Device, filter::IncomingFilterDefinition}, + }, encryption::{CrossSigningKey, DeviceKeys, OneTimeKey}, events::{AnyToDeviceEvent, EventType}, identifiers::MxcUri, @@ -36,6 +39,8 @@ pub struct Users { pub(super) userid_selfsigningkeyid: Arc, pub(super) userid_usersigningkeyid: Arc, + pub(super) userfilterid_filter: Arc, // UserFilterId = UserId + FilterId + pub(super) todeviceid_events: Arc, // ToDeviceId = UserId + DeviceId + Count } @@ -996,6 +1001,47 @@ impl Users { // TODO: Unhook 3PID Ok(()) } + + /// Creates a new sync filter. Returns the filter id. + #[tracing::instrument(skip(self))] + pub fn create_filter( + &self, + user_id: &UserId, + filter: &IncomingFilterDefinition, + ) -> Result { + let filter_id = utils::random_string(4); + + let mut key = user_id.as_bytes().to_vec(); + key.push(0xff); + key.extend_from_slice(filter_id.as_bytes()); + + self.userfilterid_filter.insert( + &key, + &serde_json::to_vec(&filter).expect("filter is valid json"), + )?; + + Ok(filter_id) + } + + #[tracing::instrument(skip(self))] + pub fn get_filter( + &self, + user_id: &UserId, + filter_id: &str, + ) -> Result> { + let mut key = user_id.as_bytes().to_vec(); + key.push(0xff); + key.extend_from_slice(filter_id.as_bytes()); + + let raw = self.userfilterid_filter.get(&key)?; + + if let Some(raw) = raw { + serde_json::from_slice(&raw) + .map_err(|_| Error::bad_database("Invalid filter event in db.")) + } else { + Ok(None) + } + } } /// Ensure that a user only sees signatures from themselves and the target user From 93d225fd1ec186d1957c670b0e6f7f161888dd21 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Wed, 5 Jan 2022 20:31:20 +0100 Subject: [PATCH 31/65] improvement: faster way to load required state --- src/client_server/sync.rs | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/src/client_server/sync.rs b/src/client_server/sync.rs index 6d8ac28d..a41e728e 100644 --- a/src/client_server/sync.rs +++ b/src/client_server/sync.rs @@ -376,24 +376,29 @@ async fn sync_helper( let mut state_events = Vec::new(); let mut lazy_loaded = Vec::new(); - for (_, id) in current_state_ids { - let pdu = match db.rooms.get_pdu(&id)? { - Some(pdu) => pdu, - None => { - error!("Pdu in state not found: {}", id); - continue; - } - }; - let state_key = pdu - .state_key - .as_ref() - .expect("state events have state keys"); - if pdu.kind != EventType::RoomMember { + for (shortstatekey, id) in current_state_ids { + let (event_type, state_key) = db.rooms.get_statekey_from_short(shortstatekey)?; + + if event_type != EventType::RoomMember { + let pdu = match db.rooms.get_pdu(&id)? { + Some(pdu) => pdu, + None => { + error!("Pdu in state not found: {}", id); + continue; + } + }; state_events.push(pdu); } else if !lazy_load_enabled || body.full_state - || timeline_users.contains(state_key) + || timeline_users.contains(&state_key) { + let pdu = match db.rooms.get_pdu(&id)? { + Some(pdu) => pdu, + None => { + error!("Pdu in state not found: {}", id); + continue; + } + }; lazy_loaded.push( UserId::parse(state_key.as_ref()) .expect("they are in timeline_users, so they should be correct"), From f285c89006e48b0644f421ae399f1a8eb47e37f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Thu, 6 Jan 2022 00:15:34 +0100 Subject: [PATCH 32/65] fix: make incremental sync efficient again --- src/client_server/message.rs | 2 +- src/client_server/sync.rs | 77 ++++++++++++++++++++---------------- src/database/rooms.rs | 4 +- 3 files changed, 47 insertions(+), 36 deletions(-) diff --git a/src/client_server/message.rs b/src/client_server/message.rs index 899c45a2..9705e4c0 100644 --- a/src/client_server/message.rs +++ b/src/client_server/message.rs @@ -242,7 +242,7 @@ pub async fn get_message_events_route( &sender_user, &sender_device, &body.room_id, - lazy_loaded.into_iter().collect(), + lazy_loaded, next_token, ); } diff --git a/src/client_server/sync.rs b/src/client_server/sync.rs index a41e728e..c2014403 100644 --- a/src/client_server/sync.rs +++ b/src/client_server/sync.rs @@ -374,7 +374,7 @@ async fn sync_helper( let current_state_ids = db.rooms.state_full_ids(current_shortstatehash)?; let mut state_events = Vec::new(); - let mut lazy_loaded = Vec::new(); + let mut lazy_loaded = HashSet::new(); for (shortstatekey, id) in current_state_ids { let (event_type, state_key) = db.rooms.get_statekey_from_short(shortstatekey)?; @@ -399,7 +399,7 @@ async fn sync_helper( continue; } }; - lazy_loaded.push( + lazy_loaded.insert( UserId::parse(state_key.as_ref()) .expect("they are in timeline_users, so they should be correct"), ); @@ -456,46 +456,57 @@ async fn sync_helper( let since_state_ids = db.rooms.state_full_ids(since_shortstatehash)?; let mut state_events = Vec::new(); - let mut lazy_loaded = Vec::new(); + let mut lazy_loaded = HashSet::new(); for (key, id) in current_state_ids { - let pdu = match db.rooms.get_pdu(&id)? { - Some(pdu) => pdu, - None => { - error!("Pdu in state not found: {}", id); - continue; - } - }; - - let state_key = pdu - .state_key - .as_ref() - .expect("state events have state keys"); + if body.full_state || since_state_ids.get(&key) != Some(&id) { + let pdu = match db.rooms.get_pdu(&id)? { + Some(pdu) => pdu, + None => { + error!("Pdu in state not found: {}", id); + continue; + } + }; - if pdu.kind != EventType::RoomMember { - if body.full_state || since_state_ids.get(&key) != Some(&id) { - state_events.push(pdu); + if pdu.kind == EventType::RoomMember { + match UserId::parse( + pdu.state_key + .as_ref() + .expect("State event has state key") + .clone(), + ) { + Ok(state_key_userid) => { + lazy_loaded.insert(state_key_userid); + } + Err(e) => error!("Invalid state key for member event: {}", e), + } } - continue; - } - - // Pdu has to be a member event - let state_key_userid = UserId::parse(state_key.as_ref()) - .expect("they are in timeline_users, so they should be correct"); - if body.full_state || since_state_ids.get(&key) != Some(&id) { - lazy_loaded.push(state_key_userid); state_events.push(pdu); - } else if timeline_users.contains(state_key) - && (!db.rooms.lazy_load_was_sent_before( + } + for (_, event) in &timeline_pdus { + if lazy_loaded.contains(&event.sender) { + continue; + } + + if !db.rooms.lazy_load_was_sent_before( &sender_user, &sender_device, &room_id, - &state_key_userid, - )? || lazy_load_send_redundant) - { - lazy_loaded.push(state_key_userid); - state_events.push(pdu); + &event.sender, + )? || lazy_load_send_redundant + { + let pdu = match db.rooms.get_pdu(&id)? { + Some(pdu) => pdu, + None => { + error!("Pdu in state not found: {}", id); + continue; + } + }; + + lazy_loaded.insert(event.sender.clone()); + state_events.push(pdu); + } } } diff --git a/src/database/rooms.rs b/src/database/rooms.rs index b957b55d..600f46df 100644 --- a/src/database/rooms.rs +++ b/src/database/rooms.rs @@ -120,7 +120,7 @@ pub struct Rooms { pub(super) our_real_users_cache: RwLock, Arc>>>>, pub(super) appservice_in_room_cache: RwLock, HashMap>>, pub(super) lazy_load_waiting: - Mutex, Box, Box, u64), Vec>>>, + Mutex, Box, Box, u64), HashSet>>>, pub(super) stateinfo_cache: Mutex< LruCache< u64, @@ -3495,7 +3495,7 @@ impl Rooms { user_id: &UserId, device_id: &DeviceId, room_id: &RoomId, - lazy_load: Vec>, + lazy_load: HashSet>, count: u64, ) { self.lazy_load_waiting.lock().unwrap().insert( From c6d88359d7aae985f9688dddb321d07ef2043708 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Fri, 7 Jan 2022 09:56:09 +0100 Subject: [PATCH 33/65] fix: incremental lazy loading --- src/client_server/sync.rs | 37 ++++++++++++++++++------------------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/src/client_server/sync.rs b/src/client_server/sync.rs index c2014403..a6122893 100644 --- a/src/client_server/sync.rs +++ b/src/client_server/sync.rs @@ -484,28 +484,27 @@ async fn sync_helper( state_events.push(pdu); } - for (_, event) in &timeline_pdus { - if lazy_loaded.contains(&event.sender) { - continue; - } + } - if !db.rooms.lazy_load_was_sent_before( - &sender_user, - &sender_device, - &room_id, - &event.sender, - )? || lazy_load_send_redundant - { - let pdu = match db.rooms.get_pdu(&id)? { - Some(pdu) => pdu, - None => { - error!("Pdu in state not found: {}", id); - continue; - } - }; + for (_, event) in &timeline_pdus { + if lazy_loaded.contains(&event.sender) { + continue; + } + if !db.rooms.lazy_load_was_sent_before( + &sender_user, + &sender_device, + &room_id, + &event.sender, + )? || lazy_load_send_redundant + { + if let Some(member_event) = db.rooms.room_state_get( + &room_id, + &EventType::RoomMember, + event.sender.as_str(), + )? { lazy_loaded.insert(event.sender.clone()); - state_events.push(pdu); + state_events.push(member_event); } } } From 4f39d36e980d8f4e6fcc7ae7c9a292db52d915e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Sun, 9 Jan 2022 13:42:25 +0100 Subject: [PATCH 34/65] docs: lazy loading --- src/client_server/sync.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/client_server/sync.rs b/src/client_server/sync.rs index a6122893..bd2f48a3 100644 --- a/src/client_server/sync.rs +++ b/src/client_server/sync.rs @@ -40,13 +40,15 @@ use rocket::{get, tokio}; /// Calling this endpoint with a `since` parameter from a previous `next_batch` returns: /// For joined rooms: /// - Some of the most recent events of each timeline that happened after since -/// - If user joined the room after since: All state events and device list updates in that room +/// - If user joined the room after since: All state events (unless lazy loading is activated) and +/// all device list updates in that room /// - If the user was already in the room: A list of all events that are in the state now, but were /// not in the state at `since` /// - If the state we send contains a member event: Joined and invited member counts, heroes /// - Device list updates that happened after `since` /// - If there are events in the timeline we send or the user send updated his read mark: Notification counts /// - EDUs that are active now (read receipts, typing updates, presence) +/// - TODO: Allow multiple sync streams to support Pantalaimon /// /// For invited rooms: /// - If the user was invited after `since`: A subset of the state of the room at the point of the invite From fa6d7f7ccd14426f1fc2d802fff021b06f39bf02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Sun, 9 Jan 2022 16:44:44 +0100 Subject: [PATCH 35/65] feat: database backend selection at runtime --- Cargo.toml | 2 +- conduit-example.toml | 15 ++- src/database.rs | 142 ++++++++++++++++------------ src/database/abstraction.rs | 13 ++- src/database/abstraction/rocksdb.rs | 9 +- src/database/abstraction/sqlite.rs | 14 ++- src/utils.rs | 11 --- 7 files changed, 117 insertions(+), 89 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 6241b6a8..c898d4d6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -85,7 +85,7 @@ hmac = "0.11.0" sha-1 = "0.9.8" [features] -default = ["conduit_bin", "backend_rocksdb"] +default = ["conduit_bin", "backend_sqlite", "backend_rocksdb"] backend_sled = ["sled"] backend_sqlite = ["sqlite"] backend_heed = ["heed", "crossbeam"] diff --git a/conduit-example.toml b/conduit-example.toml index 4275f528..c0274a4d 100644 --- a/conduit-example.toml +++ b/conduit-example.toml @@ -1,11 +1,15 @@ [global] -# The server_name is the name of this server. It is used as a suffix for user +# The server_name is the pretty name of this server. It is used as a suffix for user # and room ids. Examples: matrix.org, conduit.rs -# The Conduit server needs to be reachable at https://your.server.name/ on port -# 443 (client-server) and 8448 (federation) OR you can create /.well-known -# files to redirect requests. See + +# The Conduit server needs all /_matrix/ requests to be reachable at +# https://your.server.name/ on port 443 (client-server) and 8448 (federation). + +# If that's not possible for you, you can create /.well-known files to redirect +# requests. See # https://matrix.org/docs/spec/client_server/latest#get-well-known-matrix-client -# and https://matrix.org/docs/spec/server_server/r0.1.4#get-well-known-matrix-server +# and +# https://matrix.org/docs/spec/server_server/r0.1.4#get-well-known-matrix-server # for more information # YOU NEED TO EDIT THIS @@ -13,6 +17,7 @@ # This is the only directory where Conduit will save its data database_path = "/var/lib/conduit/" +database_backend = "rocksdb" # The port Conduit will be running on. You need to set up a reverse proxy in # your web server (e.g. apache or nginx), so all requests to /_matrix on port diff --git a/src/database.rs b/src/database.rs index ddf701bb..c2b3e2b9 100644 --- a/src/database.rs +++ b/src/database.rs @@ -44,13 +44,15 @@ use self::proxy::ProxyConfig; #[derive(Clone, Debug, Deserialize)] pub struct Config { server_name: Box, + #[serde(default = "default_database_backend")] + database_backend: String, database_path: String, #[serde(default = "default_db_cache_capacity_mb")] db_cache_capacity_mb: f64, #[serde(default = "default_pdu_cache_capacity")] pdu_cache_capacity: u32, - #[serde(default = "default_sqlite_wal_clean_second_interval")] - sqlite_wal_clean_second_interval: u32, + #[serde(default = "default_cleanup_second_interval")] + cleanup_second_interval: u32, #[serde(default = "default_max_request_size")] max_request_size: u32, #[serde(default = "default_max_concurrent_requests")] @@ -117,6 +119,10 @@ fn true_fn() -> bool { true } +fn default_database_backend() -> String { + "sqlite".to_owned() +} + fn default_db_cache_capacity_mb() -> f64 { 200.0 } @@ -125,7 +131,7 @@ fn default_pdu_cache_capacity() -> u32 { 100_000 } -fn default_sqlite_wal_clean_second_interval() -> u32 { +fn default_cleanup_second_interval() -> u32 { 1 * 60 // every minute } @@ -145,20 +151,8 @@ fn default_turn_ttl() -> u64 { 60 * 60 * 24 } -#[cfg(feature = "sled")] -pub type Engine = abstraction::sled::Engine; - -#[cfg(feature = "sqlite")] -pub type Engine = abstraction::sqlite::Engine; - -#[cfg(feature = "heed")] -pub type Engine = abstraction::heed::Engine; - -#[cfg(feature = "rocksdb")] -pub type Engine = abstraction::rocksdb::Engine; - pub struct Database { - _db: Arc, + _db: Arc, pub globals: globals::Globals, pub users: users::Users, pub uiaa: uiaa::Uiaa, @@ -186,27 +180,53 @@ impl Database { Ok(()) } - fn check_sled_or_sqlite_db(config: &Config) -> Result<()> { - #[cfg(feature = "backend_sqlite")] - { - let path = Path::new(&config.database_path); - - let sled_exists = path.join("db").exists(); - let sqlite_exists = path.join("conduit.db").exists(); - if sled_exists { - if sqlite_exists { - // most likely an in-place directory, only warn - warn!("Both sled and sqlite databases are detected in database directory"); - warn!("Currently running from the sqlite database, but consider removing sled database files to free up space") - } else { - error!( - "Sled database detected, conduit now uses sqlite for database operations" - ); - error!("This database must be converted to sqlite, go to https://github.com/ShadowJonathan/conduit_toolbox#conduit_sled_to_sqlite"); - return Err(Error::bad_config( - "sled database detected, migrate to sqlite", - )); - } + fn check_db_setup(config: &Config) -> Result<()> { + let path = Path::new(&config.database_path); + + let sled_exists = path.join("db").exists(); + let sqlite_exists = path.join("conduit.db").exists(); + let rocksdb_exists = path.join("IDENTITY").exists(); + + let mut count = 0; + + if sled_exists { + count += 1; + } + + if sqlite_exists { + count += 1; + } + + if rocksdb_exists { + count += 1; + } + + if count > 1 { + warn!("Multiple databases at database_path detected"); + return Ok(()); + } + + if sled_exists { + if config.database_backend != "sled" { + return Err(Error::bad_config( + "Found sled at database_path, but is not specified in config.", + )); + } + } + + if sqlite_exists { + if config.database_backend != "sqlite" { + return Err(Error::bad_config( + "Found sqlite at database_path, but is not specified in config.", + )); + } + } + + if rocksdb_exists { + if config.database_backend != "rocksdb" { + return Err(Error::bad_config( + "Found rocksdb at database_path, but is not specified in config.", + )); } } @@ -215,14 +235,30 @@ impl Database { /// Load an existing database or create a new one. pub async fn load_or_create(config: &Config) -> Result>> { - Self::check_sled_or_sqlite_db(config)?; + Self::check_db_setup(config)?; if !Path::new(&config.database_path).exists() { std::fs::create_dir_all(&config.database_path) .map_err(|_| Error::BadConfig("Database folder doesn't exists and couldn't be created (e.g. due to missing permissions). Please create the database folder yourself."))?; } - let builder = Engine::open(config)?; + let builder: Arc = match &*config.database_backend { + "sqlite" => { + #[cfg(not(feature = "sqlite"))] + return Err(Error::BadConfig("Database backend not found.")); + #[cfg(feature = "sqlite")] + Arc::new(Arc::::open(config)?) + } + "rocksdb" => { + #[cfg(not(feature = "rocksdb"))] + return Err(Error::BadConfig("Database backend not found.")); + #[cfg(feature = "rocksdb")] + Arc::new(Arc::::open(config)?) + } + _ => { + return Err(Error::BadConfig("Database backend not found.")); + } + }; if config.max_request_size < 1024 { eprintln!("ERROR: Max request size is less than 1KB. Please increase it."); @@ -784,10 +820,7 @@ impl Database { drop(guard); - #[cfg(feature = "sqlite")] - { - Self::start_wal_clean_task(Arc::clone(&db), config).await; - } + Self::start_cleanup_task(Arc::clone(&db), config).await; Ok(db) } @@ -925,15 +958,8 @@ impl Database { res } - #[cfg(feature = "sqlite")] - #[tracing::instrument(skip(self))] - pub fn flush_wal(&self) -> Result<()> { - self._db.flush_wal() - } - - #[cfg(feature = "sqlite")] #[tracing::instrument(skip(db, config))] - pub async fn start_wal_clean_task(db: Arc>, config: &Config) { + pub async fn start_cleanup_task(db: Arc>, config: &Config) { use tokio::time::interval; #[cfg(unix)] @@ -942,7 +968,7 @@ impl Database { use std::time::{Duration, Instant}; - let timer_interval = Duration::from_secs(config.sqlite_wal_clean_second_interval as u64); + let timer_interval = Duration::from_secs(config.cleanup_second_interval as u64); tokio::spawn(async move { let mut i = interval(timer_interval); @@ -953,23 +979,23 @@ impl Database { #[cfg(unix)] tokio::select! { _ = i.tick() => { - info!("wal-trunc: Timer ticked"); + info!("cleanup: Timer ticked"); } _ = s.recv() => { - info!("wal-trunc: Received SIGHUP"); + info!("cleanup: Received SIGHUP"); } }; #[cfg(not(unix))] { i.tick().await; - info!("wal-trunc: Timer ticked") + info!("cleanup: Timer ticked") } let start = Instant::now(); - if let Err(e) = db.read().await.flush_wal() { - error!("wal-trunc: Errored: {}", e); + if let Err(e) = db.read().await._db.cleanup() { + error!("cleanup: Errored: {}", e); } else { - info!("wal-trunc: Flushed in {:?}", start.elapsed()); + info!("cleanup: Finished in {:?}", start.elapsed()); } } }); diff --git a/src/database/abstraction.rs b/src/database/abstraction.rs index a347f831..45627bbc 100644 --- a/src/database/abstraction.rs +++ b/src/database/abstraction.rs @@ -18,10 +18,15 @@ pub mod rocksdb; #[cfg(any(feature = "sqlite", feature = "rocksdb", feature = "heed"))] pub mod watchers; -pub trait DatabaseEngine: Sized { - fn open(config: &Config) -> Result>; - fn open_tree(self: &Arc, name: &'static str) -> Result>; - fn flush(self: &Arc) -> Result<()>; +pub trait DatabaseEngine: Send + Sync { + fn open(config: &Config) -> Result + where + Self: Sized; + fn open_tree(&self, name: &'static str) -> Result>; + fn flush(self: &Self) -> Result<()>; + fn cleanup(self: &Self) -> Result<()> { + Ok(()) + } } pub trait Tree: Send + Sync { diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index 397047bd..a41ed1fb 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -14,8 +14,8 @@ pub struct RocksDbEngineTree<'a> { write_lock: RwLock<()> } -impl DatabaseEngine for Engine { - fn open(config: &Config) -> Result> { +impl DatabaseEngine for Arc { + fn open(config: &Config) -> Result { let mut db_opts = rocksdb::Options::default(); db_opts.create_if_missing(true); db_opts.set_max_open_files(512); @@ -60,7 +60,7 @@ impl DatabaseEngine for Engine { })) } - fn open_tree(self: &Arc, name: &'static str) -> Result> { + fn open_tree(&self, name: &'static str) -> Result> { if !self.old_cfs.contains(&name.to_owned()) { // Create if it didn't exist let mut options = rocksdb::Options::default(); @@ -68,7 +68,6 @@ impl DatabaseEngine for Engine { options.set_prefix_extractor(prefix_extractor); let _ = self.rocks.create_cf(name, &options); - println!("created cf"); } Ok(Arc::new(RocksDbEngineTree { @@ -79,7 +78,7 @@ impl DatabaseEngine for Engine { })) } - fn flush(self: &Arc) -> Result<()> { + fn flush(&self) -> Result<()> { // TODO? Ok(()) } diff --git a/src/database/abstraction/sqlite.rs b/src/database/abstraction/sqlite.rs index 31875667..d4fd0bdd 100644 --- a/src/database/abstraction/sqlite.rs +++ b/src/database/abstraction/sqlite.rs @@ -80,8 +80,8 @@ impl Engine { } } -impl DatabaseEngine for Engine { - fn open(config: &Config) -> Result> { +impl DatabaseEngine for Arc { + fn open(config: &Config) -> Result { let path = Path::new(&config.database_path).join("conduit.db"); // calculates cache-size per permanent connection @@ -92,7 +92,7 @@ impl DatabaseEngine for Engine { / ((num_cpus::get().max(1) * 2) + 1) as f64) as u32; - let writer = Mutex::new(Self::prepare_conn(&path, cache_size_per_thread)?); + let writer = Mutex::new(Engine::prepare_conn(&path, cache_size_per_thread)?); let arc = Arc::new(Engine { writer, @@ -105,7 +105,7 @@ impl DatabaseEngine for Engine { Ok(arc) } - fn open_tree(self: &Arc, name: &str) -> Result> { + fn open_tree(&self, name: &str) -> Result> { self.write_lock().execute(&format!("CREATE TABLE IF NOT EXISTS {} ( \"key\" BLOB PRIMARY KEY, \"value\" BLOB NOT NULL )", name), [])?; Ok(Arc::new(SqliteTable { @@ -115,10 +115,14 @@ impl DatabaseEngine for Engine { })) } - fn flush(self: &Arc) -> Result<()> { + fn flush(&self) -> Result<()> { // we enabled PRAGMA synchronous=normal, so this should not be necessary Ok(()) } + + fn cleanup(&self) -> Result<()> { + self.flush_wal() + } } pub struct SqliteTable { diff --git a/src/utils.rs b/src/utils.rs index 4702d051..26d71a8c 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -29,17 +29,6 @@ pub fn increment(old: Option<&[u8]>) -> Option> { Some(number.to_be_bytes().to_vec()) } -#[cfg(feature = "rocksdb")] -pub fn increment_rocksdb( - _new_key: &[u8], - old: Option<&[u8]>, - _operands: &mut rocksdb::MergeOperands, -) -> Option> { - dbg!(_new_key); - dbg!(old); - increment(old) -} - pub fn generate_keypair() -> Vec { let mut value = random_string(8).as_bytes().to_vec(); value.push(0xff); From 71431f330aadb1ee92cd63a36351af834aa65215 Mon Sep 17 00:00:00 2001 From: Andrej Kacian Date: Sun, 9 Jan 2022 20:07:03 +0100 Subject: [PATCH 36/65] Add memory_usage() to DatabaseEngine trait --- src/database/abstraction.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/database/abstraction.rs b/src/database/abstraction.rs index 45627bbc..17bd971f 100644 --- a/src/database/abstraction.rs +++ b/src/database/abstraction.rs @@ -27,6 +27,9 @@ pub trait DatabaseEngine: Send + Sync { fn cleanup(self: &Self) -> Result<()> { Ok(()) } + fn memory_usage(self: &Self) -> Result { + Ok("Current database engine does not support memory usage reporting.".to_string()) + } } pub trait Tree: Send + Sync { From ff243870f850c07907a6944151fd909c234da662 Mon Sep 17 00:00:00 2001 From: Andrej Kacian Date: Sun, 9 Jan 2022 20:07:50 +0100 Subject: [PATCH 37/65] Add "database_memory_usage" AdminCommand --- src/database/admin.rs | 8 ++++++++ src/database/rooms.rs | 3 +++ 2 files changed, 11 insertions(+) diff --git a/src/database/admin.rs b/src/database/admin.rs index 0702bcdd..c3083309 100644 --- a/src/database/admin.rs +++ b/src/database/admin.rs @@ -14,6 +14,7 @@ pub enum AdminCommand { RegisterAppservice(serde_yaml::Value), UnregisterAppservice(String), ListAppservices, + ShowMemoryUsage, SendMessage(RoomMessageEventContent), } @@ -113,6 +114,13 @@ impl Admin { send_message(RoomMessageEventContent::text_plain("Failed to get appservices."), guard, &state_lock); } } + AdminCommand::ShowMemoryUsage => { + if let Ok(response) = guard._db.memory_usage() { + send_message(RoomMessageEventContent::text_plain(response), guard, &state_lock); + } else { + send_message(RoomMessageEventContent::text_plain("Failed to get database memory usage".to_string()), guard, &state_lock); + } + } AdminCommand::SendMessage(message) => { send_message(message, guard, &state_lock); } diff --git a/src/database/rooms.rs b/src/database/rooms.rs index 600f46df..0ba6c9ba 100644 --- a/src/database/rooms.rs +++ b/src/database/rooms.rs @@ -1693,6 +1693,9 @@ impl Rooms { )); } } + "database_memory_usage" => { + db.admin.send(AdminCommand::ShowMemoryUsage); + } _ => { db.admin.send(AdminCommand::SendMessage( RoomMessageEventContent::text_plain(format!( From 68ee1a5408595804625a6dd0ebab5f333e7f0fe6 Mon Sep 17 00:00:00 2001 From: Andrej Kacian Date: Sun, 9 Jan 2022 20:08:15 +0100 Subject: [PATCH 38/65] Add rocksdb implementation of memory_usage() --- src/database/abstraction/rocksdb.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index a41ed1fb..f0affd32 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -82,6 +82,19 @@ impl DatabaseEngine for Arc { // TODO? Ok(()) } + + fn memory_usage(&self) -> Result { + let stats = rocksdb::perf::get_memory_usage_stats(Some(&[&self.rocks]), None)?; + Ok(format!("Approximate memory usage of all the mem-tables: {:.3} MB\n\ + Approximate memory usage of un-flushed mem-tables: {:.3} MB\n\ + Approximate memory usage of all the table readers: {:.3} MB\n\ + Approximate memory usage by cache: {:.3} MB", + stats.mem_table_total as f64 / 1024.0 / 1024.0, + stats.mem_table_unflushed as f64 / 1024.0 / 1024.0, + stats.mem_table_readers_total as f64 / 1024.0 / 1024.0, + stats.cache_total as f64 / 1024.0 / 1024.0 + )) + } } impl RocksDbEngineTree<'_> { From 077e9ad4380715688a8ad5a2f40afd7331157bd5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Mon, 10 Jan 2022 15:53:28 +0100 Subject: [PATCH 39/65] improvement: memory usage for caches --- Cargo.lock | 4 +-- Cargo.toml | 2 +- src/database/abstraction/rocksdb.rs | 40 ++++++++++++++++------------- src/database/admin.rs | 2 +- 4 files changed, 26 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 794445f9..d297102c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2075,9 +2075,9 @@ dependencies = [ [[package]] name = "rocksdb" -version = "0.16.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c749134fda8bfc90d0de643d59bfc841dcb3ac8a1062e12b6754bd60235c48b3" +checksum = "7a62eca5cacf2c8261128631bed9f045598d40bfbe4b29f5163f0f802f8f44a7" dependencies = [ "libc", "librocksdb-sys", diff --git a/Cargo.toml b/Cargo.toml index c898d4d6..c87d949c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -78,7 +78,7 @@ crossbeam = { version = "0.8.1", optional = true } num_cpus = "1.13.0" threadpool = "1.8.1" heed = { git = "https://github.com/timokoesters/heed.git", rev = "f6f825da7fb2c758867e05ad973ef800a6fe1d5d", optional = true } -rocksdb = { version = "0.16.0", features = ["multi-threaded-cf"], optional = true } +rocksdb = { version = "0.17.0", features = ["multi-threaded-cf"], optional = true } thread_local = "1.1.3" # used for TURN server authentication hmac = "0.11.0" diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index f0affd32..a7dd6e16 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -4,6 +4,7 @@ use std::{future::Future, pin::Pin, sync::Arc, collections::HashMap, sync::RwLoc pub struct Engine { rocks: rocksdb::DBWithThreadMode, + cache: rocksdb::Cache, old_cfs: Vec, } @@ -56,6 +57,7 @@ impl DatabaseEngine for Arc { Ok(Arc::new(Engine { rocks: db, + cache: rocksdb_cache, old_cfs: cfs, })) } @@ -84,33 +86,35 @@ impl DatabaseEngine for Arc { } fn memory_usage(&self) -> Result { - let stats = rocksdb::perf::get_memory_usage_stats(Some(&[&self.rocks]), None)?; - Ok(format!("Approximate memory usage of all the mem-tables: {:.3} MB\n\ + let stats = + rocksdb::perf::get_memory_usage_stats(Some(&[&self.rocks]), Some(&[&self.cache]))?; + Ok(format!( + "Approximate memory usage of all the mem-tables: {:.3} MB\n\ Approximate memory usage of un-flushed mem-tables: {:.3} MB\n\ Approximate memory usage of all the table readers: {:.3} MB\n\ Approximate memory usage by cache: {:.3} MB", - stats.mem_table_total as f64 / 1024.0 / 1024.0, - stats.mem_table_unflushed as f64 / 1024.0 / 1024.0, - stats.mem_table_readers_total as f64 / 1024.0 / 1024.0, - stats.cache_total as f64 / 1024.0 / 1024.0 + stats.mem_table_total as f64 / 1024.0 / 1024.0, + stats.mem_table_unflushed as f64 / 1024.0 / 1024.0, + stats.mem_table_readers_total as f64 / 1024.0 / 1024.0, + stats.cache_total as f64 / 1024.0 / 1024.0 )) } } impl RocksDbEngineTree<'_> { - fn cf(&self) -> rocksdb::BoundColumnFamily<'_> { + fn cf(&self) -> Arc> { self.db.rocks.cf_handle(self.name).unwrap() } } impl Tree for RocksDbEngineTree<'_> { fn get(&self, key: &[u8]) -> Result>> { - Ok(self.db.rocks.get_cf(self.cf(), key)?) + Ok(self.db.rocks.get_cf(&self.cf(), key)?) } fn insert(&self, key: &[u8], value: &[u8]) -> Result<()> { let lock = self.write_lock.read().unwrap(); - self.db.rocks.put_cf(self.cf(), key, value)?; + self.db.rocks.put_cf(&self.cf(), key, value)?; drop(lock); self.watchers.wake(key); @@ -120,21 +124,21 @@ impl Tree for RocksDbEngineTree<'_> { fn insert_batch<'a>(&self, iter: &mut dyn Iterator, Vec)>) -> Result<()> { for (key, value) in iter { - self.db.rocks.put_cf(self.cf(), key, value)?; + self.db.rocks.put_cf(&self.cf(), key, value)?; } Ok(()) } fn remove(&self, key: &[u8]) -> Result<()> { - Ok(self.db.rocks.delete_cf(self.cf(), key)?) + Ok(self.db.rocks.delete_cf(&self.cf(), key)?) } fn iter<'a>(&'a self) -> Box, Vec)> + 'a> { Box::new( self.db .rocks - .iterator_cf(self.cf(), rocksdb::IteratorMode::Start) + .iterator_cf(&self.cf(), rocksdb::IteratorMode::Start) .map(|(k, v)| (Vec::from(k), Vec::from(v))), ) } @@ -148,7 +152,7 @@ impl Tree for RocksDbEngineTree<'_> { self.db .rocks .iterator_cf( - self.cf(), + &self.cf(), rocksdb::IteratorMode::From( from, if backwards { @@ -165,9 +169,9 @@ impl Tree for RocksDbEngineTree<'_> { fn increment(&self, key: &[u8]) -> Result> { let lock = self.write_lock.write().unwrap(); - let old = self.db.rocks.get_cf(self.cf(), &key)?; + let old = self.db.rocks.get_cf(&self.cf(), &key)?; let new = utils::increment(old.as_deref()).unwrap(); - self.db.rocks.put_cf(self.cf(), key, &new)?; + self.db.rocks.put_cf(&self.cf(), key, &new)?; drop(lock); Ok(new) @@ -177,9 +181,9 @@ impl Tree for RocksDbEngineTree<'_> { let lock = self.write_lock.write().unwrap(); for key in iter { - let old = self.db.rocks.get_cf(self.cf(), &key)?; + let old = self.db.rocks.get_cf(&self.cf(), &key)?; let new = utils::increment(old.as_deref()).unwrap(); - self.db.rocks.put_cf(self.cf(), key, new)?; + self.db.rocks.put_cf(&self.cf(), key, new)?; } drop(lock); @@ -195,7 +199,7 @@ impl Tree for RocksDbEngineTree<'_> { self.db .rocks .iterator_cf( - self.cf(), + &self.cf(), rocksdb::IteratorMode::From(&prefix, rocksdb::Direction::Forward), ) .map(|(k, v)| (Vec::from(k), Vec::from(v))) diff --git a/src/database/admin.rs b/src/database/admin.rs index c3083309..7d2301d9 100644 --- a/src/database/admin.rs +++ b/src/database/admin.rs @@ -118,7 +118,7 @@ impl Admin { if let Ok(response) = guard._db.memory_usage() { send_message(RoomMessageEventContent::text_plain(response), guard, &state_lock); } else { - send_message(RoomMessageEventContent::text_plain("Failed to get database memory usage".to_string()), guard, &state_lock); + send_message(RoomMessageEventContent::text_plain("Failed to get database memory usage.".to_string()), guard, &state_lock); } } AdminCommand::SendMessage(message) => { From 0bb7d76dec4b3f54b1cbb37e57ddbe54e1dbd38f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Mon, 10 Jan 2022 20:20:45 +0100 Subject: [PATCH 40/65] improvement: rocksdb configuration --- src/database/abstraction/rocksdb.rs | 33 +++++++++++++++-------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index a7dd6e16..3f1793a2 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -17,25 +17,21 @@ pub struct RocksDbEngineTree<'a> { impl DatabaseEngine for Arc { fn open(config: &Config) -> Result { - let mut db_opts = rocksdb::Options::default(); - db_opts.create_if_missing(true); - db_opts.set_max_open_files(512); - db_opts.set_compaction_style(rocksdb::DBCompactionStyle::Level); - db_opts.set_compression_type(rocksdb::DBCompressionType::Zstd); - db_opts.set_target_file_size_base(2 << 22); - db_opts.set_max_bytes_for_level_base(2 << 24); - db_opts.set_max_bytes_for_level_multiplier(2.0); - db_opts.set_num_levels(8); - db_opts.set_write_buffer_size(2 << 27); - let rocksdb_cache = rocksdb::Cache::new_lru_cache((config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize) .unwrap(); let mut block_based_options = rocksdb::BlockBasedOptions::default(); - block_based_options.set_block_size(2 << 19); block_based_options.set_block_cache(&rocksdb_cache); + + let mut db_opts = rocksdb::Options::default(); db_opts.set_block_based_table_factory(&block_based_options); + db_opts.create_if_missing(true); + db_opts.increase_parallelism(num_cpus::get() as i32); + db_opts.set_max_open_files(512); + db_opts.set_compression_type(rocksdb::DBCompressionType::Zstd); + db_opts.set_compaction_style(rocksdb::DBCompactionStyle::Level); + db_opts.optimize_level_style_compaction((config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize); let cfs = rocksdb::DBWithThreadMode::::list_cf( &db_opts, @@ -90,13 +86,18 @@ impl DatabaseEngine for Arc { rocksdb::perf::get_memory_usage_stats(Some(&[&self.rocks]), Some(&[&self.cache]))?; Ok(format!( "Approximate memory usage of all the mem-tables: {:.3} MB\n\ - Approximate memory usage of un-flushed mem-tables: {:.3} MB\n\ - Approximate memory usage of all the table readers: {:.3} MB\n\ - Approximate memory usage by cache: {:.3} MB", + Approximate memory usage of un-flushed mem-tables: {:.3} MB\n\ + Approximate memory usage of all the table readers: {:.3} MB\n\ + Approximate memory usage by cache: {:.3} MB\n\ + self.cache.get_usage(): {:.3} MB\n\ + self.cache.get_pinned_usage(): {:.3} MB\n\ + ", stats.mem_table_total as f64 / 1024.0 / 1024.0, stats.mem_table_unflushed as f64 / 1024.0 / 1024.0, stats.mem_table_readers_total as f64 / 1024.0 / 1024.0, - stats.cache_total as f64 / 1024.0 / 1024.0 + stats.cache_total as f64 / 1024.0 / 1024.0, + self.cache.get_usage() as f64 / 1024.0 / 1024.0, + self.cache.get_pinned_usage() as f64 / 1024.0 / 1024.0, )) } } From b96822b6174de4d404bf0b9013a39f8fd2a06f87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Mon, 10 Jan 2022 21:20:29 +0100 Subject: [PATCH 41/65] fix: use db options for column families too --- src/database/abstraction/rocksdb.rs | 57 ++++++++++++++++------------- 1 file changed, 32 insertions(+), 25 deletions(-) diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index 3f1793a2..c82e4bc8 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -4,6 +4,7 @@ use std::{future::Future, pin::Pin, sync::Arc, collections::HashMap, sync::RwLoc pub struct Engine { rocks: rocksdb::DBWithThreadMode, + cache_capacity_bytes: usize, cache: rocksdb::Cache, old_cfs: Vec, } @@ -15,23 +16,31 @@ pub struct RocksDbEngineTree<'a> { write_lock: RwLock<()> } +fn db_options(cache_capacity_bytes: usize, rocksdb_cache: &rocksdb::Cache) -> rocksdb::Options { + let mut block_based_options = rocksdb::BlockBasedOptions::default(); + block_based_options.set_block_cache(rocksdb_cache); + + let mut db_opts = rocksdb::Options::default(); + db_opts.set_block_based_table_factory(&block_based_options); + db_opts.create_if_missing(true); + db_opts.increase_parallelism(num_cpus::get() as i32); + db_opts.set_max_open_files(512); + db_opts.set_compression_type(rocksdb::DBCompressionType::Zstd); + db_opts.set_compaction_style(rocksdb::DBCompactionStyle::Level); + db_opts.optimize_level_style_compaction(cache_capacity_bytes); + + let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1); + db_opts.set_prefix_extractor(prefix_extractor); + + db_opts +} + impl DatabaseEngine for Arc { fn open(config: &Config) -> Result { - let rocksdb_cache = - rocksdb::Cache::new_lru_cache((config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize) - .unwrap(); - - let mut block_based_options = rocksdb::BlockBasedOptions::default(); - block_based_options.set_block_cache(&rocksdb_cache); - - let mut db_opts = rocksdb::Options::default(); - db_opts.set_block_based_table_factory(&block_based_options); - db_opts.create_if_missing(true); - db_opts.increase_parallelism(num_cpus::get() as i32); - db_opts.set_max_open_files(512); - db_opts.set_compression_type(rocksdb::DBCompressionType::Zstd); - db_opts.set_compaction_style(rocksdb::DBCompactionStyle::Level); - db_opts.optimize_level_style_compaction((config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize); + let cache_capacity_bytes = (config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize; + let rocksdb_cache = rocksdb::Cache::new_lru_cache(cache_capacity_bytes).unwrap(); + + let db_opts = db_options(cache_capacity_bytes, &rocksdb_cache); let cfs = rocksdb::DBWithThreadMode::::list_cf( &db_opts, @@ -43,16 +52,16 @@ impl DatabaseEngine for Arc { &db_opts, &config.database_path, cfs.iter().map(|name| { - let mut options = rocksdb::Options::default(); - let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1); - options.set_prefix_extractor(prefix_extractor); - - rocksdb::ColumnFamilyDescriptor::new(name, options) + rocksdb::ColumnFamilyDescriptor::new( + name, + db_options(cache_capacity_bytes, &rocksdb_cache), + ) }), )?; Ok(Arc::new(Engine { rocks: db, + cache_capacity_bytes, cache: rocksdb_cache, old_cfs: cfs, })) @@ -61,11 +70,9 @@ impl DatabaseEngine for Arc { fn open_tree(&self, name: &'static str) -> Result> { if !self.old_cfs.contains(&name.to_owned()) { // Create if it didn't exist - let mut options = rocksdb::Options::default(); - let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1); - options.set_prefix_extractor(prefix_extractor); - - let _ = self.rocks.create_cf(name, &options); + let _ = self + .rocks + .create_cf(name, &db_options(self.cache_capacity_bytes, &self.cache)); } Ok(Arc::new(RocksDbEngineTree { From 7f27af032b7d0cb79248607decd1bb5f2a818507 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Wed, 12 Jan 2022 10:07:10 +0100 Subject: [PATCH 42/65] improvement: optimize rocksdb for spinning disks --- src/database/abstraction/rocksdb.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index c82e4bc8..32095566 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -20,8 +20,20 @@ fn db_options(cache_capacity_bytes: usize, rocksdb_cache: &rocksdb::Cache) -> ro let mut block_based_options = rocksdb::BlockBasedOptions::default(); block_based_options.set_block_cache(rocksdb_cache); + // "Difference of spinning disk" + // https://zhangyuchi.gitbooks.io/rocksdbbook/content/RocksDB-Tuning-Guide.html + block_based_options.set_block_size(64 * 1024); + block_based_options.set_cache_index_and_filter_blocks(true); + let mut db_opts = rocksdb::Options::default(); db_opts.set_block_based_table_factory(&block_based_options); + db_opts.set_optimize_filters_for_hits(true); + db_opts.set_skip_stats_update_on_db_open(true); + db_opts.set_level_compaction_dynamic_level_bytes(true); + db_opts.set_target_file_size_base(256 * 1024 * 1024); + db_opts.set_compaction_readahead_size(2 * 1024 * 1024); + db_opts.set_use_direct_reads(true); + db_opts.set_use_direct_io_for_flush_and_compaction(true); db_opts.create_if_missing(true); db_opts.increase_parallelism(num_cpus::get() as i32); db_opts.set_max_open_files(512); From 9e77f7617cfcdc8d1c0e1b3146cbef6566ed0dc8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Wed, 12 Jan 2022 12:27:02 +0100 Subject: [PATCH 43/65] fix: disable direct IO again --- src/database/abstraction/rocksdb.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index 32095566..b7f6d3b6 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -22,7 +22,7 @@ fn db_options(cache_capacity_bytes: usize, rocksdb_cache: &rocksdb::Cache) -> ro // "Difference of spinning disk" // https://zhangyuchi.gitbooks.io/rocksdbbook/content/RocksDB-Tuning-Guide.html - block_based_options.set_block_size(64 * 1024); + block_based_options.set_block_size(4 * 1024); block_based_options.set_cache_index_and_filter_blocks(true); let mut db_opts = rocksdb::Options::default(); @@ -31,9 +31,9 @@ fn db_options(cache_capacity_bytes: usize, rocksdb_cache: &rocksdb::Cache) -> ro db_opts.set_skip_stats_update_on_db_open(true); db_opts.set_level_compaction_dynamic_level_bytes(true); db_opts.set_target_file_size_base(256 * 1024 * 1024); - db_opts.set_compaction_readahead_size(2 * 1024 * 1024); - db_opts.set_use_direct_reads(true); - db_opts.set_use_direct_io_for_flush_and_compaction(true); + //db_opts.set_compaction_readahead_size(2 * 1024 * 1024); + //db_opts.set_use_direct_reads(true); + //db_opts.set_use_direct_io_for_flush_and_compaction(true); db_opts.create_if_missing(true); db_opts.increase_parallelism(num_cpus::get() as i32); db_opts.set_max_open_files(512); From 447639054e21523aed76e408667c5263ccde85ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Thu, 13 Jan 2022 21:03:53 +0100 Subject: [PATCH 44/65] improvement: higher default pdu capacity --- src/client_server/device.rs | 2 +- src/database.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/client_server/device.rs b/src/client_server/device.rs index 03a3004b..f240f2e7 100644 --- a/src/client_server/device.rs +++ b/src/client_server/device.rs @@ -85,7 +85,7 @@ pub async fn update_device_route( Ok(update_device::Response {}.into()) } -/// # `PUT /_matrix/client/r0/devices/{deviceId}` +/// # `DELETE /_matrix/client/r0/devices/{deviceId}` /// /// Deletes the given device. /// diff --git a/src/database.rs b/src/database.rs index c2b3e2b9..9a71e737 100644 --- a/src/database.rs +++ b/src/database.rs @@ -128,7 +128,7 @@ fn default_db_cache_capacity_mb() -> f64 { } fn default_pdu_cache_capacity() -> u32 { - 100_000 + 1_000_000 } fn default_cleanup_second_interval() -> u32 { From a336027b0e45b512c55e4c0b68e095d40ebd01ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Thu, 13 Jan 2022 21:11:45 +0100 Subject: [PATCH 45/65] fix: better memory usage message --- src/database/abstraction/rocksdb.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index b7f6d3b6..d1706d45 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -108,14 +108,12 @@ impl DatabaseEngine for Arc { Approximate memory usage of un-flushed mem-tables: {:.3} MB\n\ Approximate memory usage of all the table readers: {:.3} MB\n\ Approximate memory usage by cache: {:.3} MB\n\ - self.cache.get_usage(): {:.3} MB\n\ - self.cache.get_pinned_usage(): {:.3} MB\n\ + Approximate memory usage by cache pinned: {:.3} MB\n\ ", stats.mem_table_total as f64 / 1024.0 / 1024.0, stats.mem_table_unflushed as f64 / 1024.0 / 1024.0, stats.mem_table_readers_total as f64 / 1024.0 / 1024.0, stats.cache_total as f64 / 1024.0 / 1024.0, - self.cache.get_usage() as f64 / 1024.0 / 1024.0, self.cache.get_pinned_usage() as f64 / 1024.0 / 1024.0, )) } From 6fa01aa9826c2a4f7643289e0b86aee40efc59d2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Thu, 13 Jan 2022 21:46:20 +0100 Subject: [PATCH 46/65] fix: remove dbg --- src/database/abstraction/sqlite.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/database/abstraction/sqlite.rs b/src/database/abstraction/sqlite.rs index d4fd0bdd..f80f50e4 100644 --- a/src/database/abstraction/sqlite.rs +++ b/src/database/abstraction/sqlite.rs @@ -136,7 +136,7 @@ type TupleOfBytes = (Vec, Vec); impl SqliteTable { #[tracing::instrument(skip(self, guard, key))] fn get_with_guard(&self, guard: &Connection, key: &[u8]) -> Result>> { - dbg!(&self.name); + //dbg!(&self.name); Ok(guard .prepare(format!("SELECT value FROM {} WHERE key = ?", self.name).as_str())? .query_row([key], |row| row.get(0)) @@ -145,7 +145,7 @@ impl SqliteTable { #[tracing::instrument(skip(self, guard, key, value))] fn insert_with_guard(&self, guard: &Connection, key: &[u8], value: &[u8]) -> Result<()> { - dbg!(&self.name); + //dbg!(&self.name); guard.execute( format!( "INSERT OR REPLACE INTO {} (key, value) VALUES (?, ?)", @@ -179,7 +179,7 @@ impl SqliteTable { .query_map([], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) .unwrap() .map(move |r| { - dbg!(&name); + //dbg!(&name); r.unwrap() }), ); @@ -286,7 +286,7 @@ impl Tree for SqliteTable { .query_map([from], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) .unwrap() .map(move |r| { - dbg!(&name); + //dbg!(&name); r.unwrap() }), ); @@ -311,7 +311,7 @@ impl Tree for SqliteTable { .query_map([from], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) .unwrap() .map(move |r| { - dbg!(&name); + //dbg!(&name); r.unwrap() }), ); From 16f826773bc26dd388f04e3e862bef7d1be9cdeb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Thu, 13 Jan 2022 22:47:30 +0100 Subject: [PATCH 47/65] refactor: fix warnings --- src/database/abstraction/rocksdb.rs | 4 ++-- src/database/abstraction/sqlite.rs | 4 ++-- src/database/abstraction/watchers.rs | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index d1706d45..79a3d82a 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -1,6 +1,6 @@ use super::{super::Config, watchers::Watchers, DatabaseEngine, Tree}; use crate::{utils, Result}; -use std::{future::Future, pin::Pin, sync::Arc, collections::HashMap, sync::RwLock}; +use std::{future::Future, pin::Pin, sync::Arc, sync::RwLock}; pub struct Engine { rocks: rocksdb::DBWithThreadMode, @@ -13,7 +13,7 @@ pub struct RocksDbEngineTree<'a> { db: Arc, name: &'a str, watchers: Watchers, - write_lock: RwLock<()> + write_lock: RwLock<()>, } fn db_options(cache_capacity_bytes: usize, rocksdb_cache: &rocksdb::Cache) -> rocksdb::Options { diff --git a/src/database/abstraction/sqlite.rs b/src/database/abstraction/sqlite.rs index f80f50e4..d4aab7dd 100644 --- a/src/database/abstraction/sqlite.rs +++ b/src/database/abstraction/sqlite.rs @@ -172,7 +172,7 @@ impl SqliteTable { let statement_ref = NonAliasingBox(statement); - let name = self.name.clone(); + //let name = self.name.clone(); let iterator = Box::new( statement @@ -267,7 +267,7 @@ impl Tree for SqliteTable { let guard = self.engine.read_lock_iterator(); let from = from.to_vec(); // TODO change interface? - let name = self.name.clone(); + //let name = self.name.clone(); if backwards { let statement = Box::leak(Box::new( diff --git a/src/database/abstraction/watchers.rs b/src/database/abstraction/watchers.rs index fec1f27a..55cb60b3 100644 --- a/src/database/abstraction/watchers.rs +++ b/src/database/abstraction/watchers.rs @@ -1,8 +1,8 @@ use std::{ collections::{hash_map, HashMap}, - sync::RwLock, future::Future, pin::Pin, + sync::RwLock, }; use tokio::sync::watch; From f67785caaf6a4be5c7d330df0f7a89781aa21f91 Mon Sep 17 00:00:00 2001 From: Jonas Zohren Date: Thu, 13 Jan 2022 22:24:47 +0000 Subject: [PATCH 48/65] Fix(ci): Disable CARGO_HOME caching --- .gitlab-ci.yml | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 1dedd8ff..f47327b8 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,18 +23,12 @@ variables: interruptible: true image: "rust:latest" tags: ["docker"] - cache: - paths: - - cargohome - key: "build_cache--$TARGET--$CI_COMMIT_BRANCH" variables: CARGO_PROFILE_RELEASE_LTO: "true" CARGO_PROFILE_RELEASE_CODEGEN_UNITS: "1" CARGO_INCREMENTAL: "false" # https://matklad.github.io/2021/09/04/fast-rust-builds.html#ci-workflow - CARGO_HOME: $CI_PROJECT_DIR/cargohome before_script: - 'echo "Building for target $TARGET"' - - "mkdir -p $CARGO_HOME" - "rustc --version && cargo --version && rustup show" # Print version info for debugging - "rustup target add $TARGET" # If provided, bring in caching through sccache, which uses an external S3 endpoint to store compilation results: @@ -219,15 +213,10 @@ test:cargo: image: "rust:latest" tags: ["docker"] variables: - CARGO_HOME: "$CI_PROJECT_DIR/cargohome" CARGO_INCREMENTAL: "false" # https://matklad.github.io/2021/09/04/fast-rust-builds.html#ci-workflow - cache: - paths: - - cargohome - key: "test_cache--$CI_COMMIT_BRANCH" interruptible: true before_script: - - mkdir -p $CARGO_HOME + # - mkdir -p $CARGO_HOME - apt-get update -yqq - apt-get install -yqq --no-install-recommends build-essential libssl-dev pkg-config - rustup component add clippy rustfmt From 80e51986c42ea449a3f1d7860c16722431f4fcaf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Fri, 14 Jan 2022 11:08:31 +0100 Subject: [PATCH 49/65] improvement: better default cache capacity --- src/database.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/database.rs b/src/database.rs index 9a71e737..d688ff9f 100644 --- a/src/database.rs +++ b/src/database.rs @@ -124,7 +124,7 @@ fn default_database_backend() -> String { } fn default_db_cache_capacity_mb() -> f64 { - 200.0 + 10.0 } fn default_pdu_cache_capacity() -> u32 { From d434dfb3a56afde239023685ca0a8d191355314b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Fri, 14 Jan 2022 11:40:49 +0100 Subject: [PATCH 50/65] feat: config option for rocksdb max open files --- src/database.rs | 6 ++++++ src/database/abstraction/rocksdb.rs | 29 ++++++++++++++++++++++------- 2 files changed, 28 insertions(+), 7 deletions(-) diff --git a/src/database.rs b/src/database.rs index d688ff9f..fd7a1451 100644 --- a/src/database.rs +++ b/src/database.rs @@ -49,6 +49,8 @@ pub struct Config { database_path: String, #[serde(default = "default_db_cache_capacity_mb")] db_cache_capacity_mb: f64, + #[serde(default = "default_rocksdb_max_open_files")] + rocksdb_max_open_files: i32, #[serde(default = "default_pdu_cache_capacity")] pdu_cache_capacity: u32, #[serde(default = "default_cleanup_second_interval")] @@ -127,6 +129,10 @@ fn default_db_cache_capacity_mb() -> f64 { 10.0 } +fn default_rocksdb_max_open_files() -> i32 { + 512 +} + fn default_pdu_cache_capacity() -> u32 { 1_000_000 } diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index 79a3d82a..adda6787 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -5,6 +5,7 @@ use std::{future::Future, pin::Pin, sync::Arc, sync::RwLock}; pub struct Engine { rocks: rocksdb::DBWithThreadMode, cache_capacity_bytes: usize, + max_open_files: i32, cache: rocksdb::Cache, old_cfs: Vec, } @@ -16,7 +17,11 @@ pub struct RocksDbEngineTree<'a> { write_lock: RwLock<()>, } -fn db_options(cache_capacity_bytes: usize, rocksdb_cache: &rocksdb::Cache) -> rocksdb::Options { +fn db_options( + cache_capacity_bytes: usize, + max_open_files: i32, + rocksdb_cache: &rocksdb::Cache, +) -> rocksdb::Options { let mut block_based_options = rocksdb::BlockBasedOptions::default(); block_based_options.set_block_cache(rocksdb_cache); @@ -36,7 +41,7 @@ fn db_options(cache_capacity_bytes: usize, rocksdb_cache: &rocksdb::Cache) -> ro //db_opts.set_use_direct_io_for_flush_and_compaction(true); db_opts.create_if_missing(true); db_opts.increase_parallelism(num_cpus::get() as i32); - db_opts.set_max_open_files(512); + db_opts.set_max_open_files(max_open_files); db_opts.set_compression_type(rocksdb::DBCompressionType::Zstd); db_opts.set_compaction_style(rocksdb::DBCompactionStyle::Level); db_opts.optimize_level_style_compaction(cache_capacity_bytes); @@ -52,7 +57,11 @@ impl DatabaseEngine for Arc { let cache_capacity_bytes = (config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize; let rocksdb_cache = rocksdb::Cache::new_lru_cache(cache_capacity_bytes).unwrap(); - let db_opts = db_options(cache_capacity_bytes, &rocksdb_cache); + let db_opts = db_options( + cache_capacity_bytes, + config.rocksdb_max_open_files, + &rocksdb_cache, + ); let cfs = rocksdb::DBWithThreadMode::::list_cf( &db_opts, @@ -66,7 +75,11 @@ impl DatabaseEngine for Arc { cfs.iter().map(|name| { rocksdb::ColumnFamilyDescriptor::new( name, - db_options(cache_capacity_bytes, &rocksdb_cache), + db_options( + cache_capacity_bytes, + config.rocksdb_max_open_files, + &rocksdb_cache, + ), ) }), )?; @@ -74,6 +87,7 @@ impl DatabaseEngine for Arc { Ok(Arc::new(Engine { rocks: db, cache_capacity_bytes, + max_open_files: config.rocksdb_max_open_files, cache: rocksdb_cache, old_cfs: cfs, })) @@ -82,9 +96,10 @@ impl DatabaseEngine for Arc { fn open_tree(&self, name: &'static str) -> Result> { if !self.old_cfs.contains(&name.to_owned()) { // Create if it didn't exist - let _ = self - .rocks - .create_cf(name, &db_options(self.cache_capacity_bytes, &self.cache)); + let _ = self.rocks.create_cf( + name, + &db_options(self.cache_capacity_bytes, self.max_open_files, &self.cache), + ); } Ok(Arc::new(RocksDbEngineTree { From 10f1da12bfa17c05ae219913c411fd3c27dc3a29 Mon Sep 17 00:00:00 2001 From: Jonas Zohren Date: Sun, 16 Jan 2022 20:57:23 +0000 Subject: [PATCH 51/65] CI: Fix cargo-test --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index f47327b8..73a1a928 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -218,7 +218,7 @@ test:cargo: before_script: # - mkdir -p $CARGO_HOME - apt-get update -yqq - - apt-get install -yqq --no-install-recommends build-essential libssl-dev pkg-config + - apt-get install -yqq --no-install-recommends build-essential libssl-dev pkg-config libclang-dev - rustup component add clippy rustfmt - curl "https://faulty-storage.de/gitlab-report" --output ./gitlab-report && chmod +x ./gitlab-report # If provided, bring in caching through sccache, which uses an external S3 endpoint to store compilation results: From ee8e72f7a809cfbe58697ad69aff437d35e1404f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Mon, 17 Jan 2022 14:35:38 +0100 Subject: [PATCH 52/65] feat: implement server ACLs --- Cargo.lock | 48 +++++++---- Cargo.toml | 2 +- src/client_server/membership.rs | 4 +- src/client_server/message.rs | 4 +- src/client_server/state.rs | 4 +- src/client_server/to_device.rs | 4 +- src/database/abstraction/rocksdb.rs | 2 +- src/database/sending.rs | 8 +- src/database/transaction_ids.rs | 6 +- src/server_server.rs | 126 +++++++++++++++++++++++----- 10 files changed, 150 insertions(+), 58 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d297102c..5be10f14 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2086,7 +2086,7 @@ dependencies = [ [[package]] name = "ruma" version = "0.4.0" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "assign", "js_int", @@ -2107,7 +2107,7 @@ dependencies = [ [[package]] name = "ruma-api" version = "0.18.5" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "bytes", "http", @@ -2123,7 +2123,7 @@ dependencies = [ [[package]] name = "ruma-api-macros" version = "0.18.5" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2134,7 +2134,7 @@ dependencies = [ [[package]] name = "ruma-appservice-api" version = "0.4.0" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "ruma-api", "ruma-common", @@ -2148,7 +2148,7 @@ dependencies = [ [[package]] name = "ruma-client-api" version = "0.12.3" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "assign", "bytes", @@ -2168,7 +2168,7 @@ dependencies = [ [[package]] name = "ruma-common" version = "0.6.0" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "indexmap", "js_int", @@ -2183,7 +2183,7 @@ dependencies = [ [[package]] name = "ruma-events" version = "0.24.6" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "indoc", "js_int", @@ -2194,12 +2194,13 @@ dependencies = [ "serde", "serde_json", "thiserror", + "wildmatch", ] [[package]] name = "ruma-events-macros" version = "0.24.6" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2210,7 +2211,7 @@ dependencies = [ [[package]] name = "ruma-federation-api" version = "0.3.1" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "js_int", "ruma-api", @@ -2225,7 +2226,7 @@ dependencies = [ [[package]] name = "ruma-identifiers" version = "0.20.0" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "percent-encoding", "rand 0.8.4", @@ -2234,12 +2235,13 @@ dependencies = [ "ruma-serde", "ruma-serde-macros", "serde", + "uuid", ] [[package]] name = "ruma-identifiers-macros" version = "0.20.0" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "quote", "ruma-identifiers-validation", @@ -2249,7 +2251,7 @@ dependencies = [ [[package]] name = "ruma-identifiers-validation" version = "0.5.0" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "thiserror", ] @@ -2257,7 +2259,7 @@ dependencies = [ [[package]] name = "ruma-identity-service-api" version = "0.3.0" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "js_int", "ruma-api", @@ -2270,7 +2272,7 @@ dependencies = [ [[package]] name = "ruma-push-gateway-api" version = "0.3.0" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "js_int", "ruma-api", @@ -2285,8 +2287,9 @@ dependencies = [ [[package]] name = "ruma-serde" version = "0.5.0" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ + "base64 0.13.0", "bytes", "form_urlencoded", "itoa 0.4.8", @@ -2299,7 +2302,7 @@ dependencies = [ [[package]] name = "ruma-serde-macros" version = "0.5.0" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2310,7 +2313,7 @@ dependencies = [ [[package]] name = "ruma-signatures" version = "0.9.0" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "base64 0.13.0", "ed25519-dalek", @@ -2327,7 +2330,7 @@ dependencies = [ [[package]] name = "ruma-state-res" version = "0.4.1" -source = "git+https://github.com/ruma/ruma?rev=f8ba7f795765bf4aeb4db06849f9fdde9c162ac3#f8ba7f795765bf4aeb4db06849f9fdde9c162ac3" +source = "git+https://github.com/ruma/ruma?rev=08d60b3d376b63462f769d4b9bd3bbfb560d501a#08d60b3d376b63462f769d4b9bd3bbfb560d501a" dependencies = [ "itertools", "js_int", @@ -3308,6 +3311,15 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom 0.2.3", +] + [[package]] name = "vcpkg" version = "0.2.15" diff --git a/Cargo.toml b/Cargo.toml index c87d949c..29a090c7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,7 +19,7 @@ rocket = { version = "0.5.0-rc.1", features = ["tls"] } # Used to handle request # Used for matrix spec type definitions and helpers #ruma = { version = "0.4.0", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } -ruma = { git = "https://github.com/ruma/ruma", rev = "f8ba7f795765bf4aeb4db06849f9fdde9c162ac3", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } +ruma = { git = "https://github.com/ruma/ruma", rev = "08d60b3d376b63462f769d4b9bd3bbfb560d501a", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } #ruma = { git = "https://github.com/timokoesters/ruma", rev = "50c1db7e0a3a21fc794b0cce3b64285a4c750c71", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } #ruma = { path = "../ruma/crates/ruma", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } diff --git a/src/client_server/membership.rs b/src/client_server/membership.rs index cede51f0..70352784 100644 --- a/src/client_server/membership.rs +++ b/src/client_server/membership.rs @@ -23,7 +23,7 @@ use ruma::{ }, EventType, }, - serde::{to_canonical_value, CanonicalJsonObject, CanonicalJsonValue}, + serde::{to_canonical_value, Base64, CanonicalJsonObject, CanonicalJsonValue}, state_res::{self, RoomVersion}, uint, EventId, RoomId, RoomVersionId, ServerName, UserId, }; @@ -787,7 +787,7 @@ async fn join_room_by_id_helper( fn validate_and_add_event_id( pdu: &RawJsonValue, room_version: &RoomVersionId, - pub_key_map: &RwLock>>, + pub_key_map: &RwLock>>, db: &Database, ) -> Result<(Box, CanonicalJsonObject)> { let mut value: CanonicalJsonObject = serde_json::from_str(pdu.get()).map_err(|e| { diff --git a/src/client_server/message.rs b/src/client_server/message.rs index 9705e4c0..36653fab 100644 --- a/src/client_server/message.rs +++ b/src/client_server/message.rs @@ -74,11 +74,11 @@ pub async fn send_message_event_route( } let mut unsigned = BTreeMap::new(); - unsigned.insert("transaction_id".to_owned(), body.txn_id.clone().into()); + unsigned.insert("transaction_id".to_owned(), body.txn_id.to_string().into()); let event_id = db.rooms.build_and_append_pdu( PduBuilder { - event_type: EventType::from(&body.event_type), + event_type: EventType::from(&*body.event_type), content: serde_json::from_str(body.body.body.json().get()) .map_err(|_| Error::BadRequest(ErrorKind::BadJson, "Invalid JSON body."))?, unsigned: Some(unsigned), diff --git a/src/client_server/state.rs b/src/client_server/state.rs index e42694ae..c07d4825 100644 --- a/src/client_server/state.rs +++ b/src/client_server/state.rs @@ -44,7 +44,7 @@ pub async fn send_state_event_for_key_route( &db, sender_user, &body.room_id, - EventType::from(&body.event_type), + EventType::from(&*body.event_type), &body.body.body, // Yes, I hate it too body.state_key.to_owned(), ) @@ -86,7 +86,7 @@ pub async fn send_state_event_for_empty_key_route( &db, sender_user, &body.room_id, - EventType::from(&body.event_type), + EventType::from(&*body.event_type), &body.body.body, body.state_key.to_owned(), ) diff --git a/src/client_server/to_device.rs b/src/client_server/to_device.rs index 177b1234..6e764deb 100644 --- a/src/client_server/to_device.rs +++ b/src/client_server/to_device.rs @@ -53,8 +53,8 @@ pub async fn send_event_to_device_route( serde_json::to_vec(&federation::transactions::edu::Edu::DirectToDevice( DirectDeviceContent { sender: sender_user.clone(), - ev_type: EventType::from(&body.event_type), - message_id: body.txn_id.clone(), + ev_type: EventType::from(&*body.event_type), + message_id: body.txn_id.to_string(), messages, }, )) diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index adda6787..15ea9f73 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -44,7 +44,7 @@ fn db_options( db_opts.set_max_open_files(max_open_files); db_opts.set_compression_type(rocksdb::DBCompressionType::Zstd); db_opts.set_compaction_style(rocksdb::DBCompactionStyle::Level); - db_opts.optimize_level_style_compaction(cache_capacity_bytes); + db_opts.optimize_level_style_compaction(10 * 1024 * 1024); let prefix_extractor = rocksdb::SliceTransform::create_fixed_prefix(1); db_opts.set_prefix_extractor(prefix_extractor); diff --git a/src/database/sending.rs b/src/database/sending.rs index 1e180d43..65284a4f 100644 --- a/src/database/sending.rs +++ b/src/database/sending.rs @@ -524,7 +524,7 @@ impl Sending { .unwrap(), // TODO: handle error appservice::event::push_events::v1::Request { events: &pdu_jsons, - txn_id: &base64::encode_config( + txn_id: (&*base64::encode_config( Self::calculate_hash( &events .iter() @@ -534,7 +534,7 @@ impl Sending { .collect::>(), ), base64::URL_SAFE_NO_PAD, - ), + )).into(), }, ) .await @@ -682,7 +682,7 @@ impl Sending { pdus: &pdu_jsons, edus: &edu_jsons, origin_server_ts: MilliSecondsSinceUnixEpoch::now(), - transaction_id: &base64::encode_config( + transaction_id: (&*base64::encode_config( Self::calculate_hash( &events .iter() @@ -692,7 +692,7 @@ impl Sending { .collect::>(), ), base64::URL_SAFE_NO_PAD, - ), + )).into(), }, ) .await diff --git a/src/database/transaction_ids.rs b/src/database/transaction_ids.rs index f3467572..d576083a 100644 --- a/src/database/transaction_ids.rs +++ b/src/database/transaction_ids.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use crate::Result; -use ruma::{DeviceId, UserId}; +use ruma::{DeviceId, UserId, identifiers::TransactionId}; use super::abstraction::Tree; @@ -14,7 +14,7 @@ impl TransactionIds { &self, user_id: &UserId, device_id: Option<&DeviceId>, - txn_id: &str, + txn_id: &TransactionId, data: &[u8], ) -> Result<()> { let mut key = user_id.as_bytes().to_vec(); @@ -32,7 +32,7 @@ impl TransactionIds { &self, user_id: &UserId, device_id: Option<&DeviceId>, - txn_id: &str, + txn_id: &TransactionId, ) -> Result>> { let mut key = user_id.as_bytes().to_vec(); key.push(0xff); diff --git a/src/server_server.rs b/src/server_server.rs index c76afd34..5cd43d81 100644 --- a/src/server_server.rs +++ b/src/server_server.rs @@ -42,6 +42,7 @@ use ruma::{ events::{ receipt::{ReceiptEvent, ReceiptEventContent}, room::{ + server_acl::RoomServerAclEventContent, create::RoomCreateEventContent, member::{MembershipState, RoomMemberEventContent}, }, @@ -49,7 +50,7 @@ use ruma::{ }, int, receipt::ReceiptType, - serde::JsonObject, + serde::{Base64, JsonObject}, signatures::{CanonicalJsonObject, CanonicalJsonValue}, state_res::{self, RoomVersion, StateMap}, to_device::DeviceIdOrAllDevices, @@ -551,7 +552,7 @@ pub fn get_server_keys_route(db: DatabaseGuard) -> Json { .try_into() .expect("found invalid server signing keys in DB"), VerifyKey { - key: base64::encode_config(db.globals.keypair().public_key(), base64::STANDARD_NO_PAD), + key: Base64::new(db.globals.keypair().public_key().to_vec()), }, ); let mut response = serde_json::from_slice( @@ -740,6 +741,8 @@ pub async fn send_transaction_message_route( } }; + acl_check(&body.origin, &room_id, &db)?; + let mutex = Arc::clone( db.globals .roomid_mutex_federation @@ -854,7 +857,7 @@ pub async fn send_transaction_message_route( // Check if this is a new transaction id if db .transaction_ids - .existing_txnid(&sender, None, &message_id)? + .existing_txnid(&sender, None, (&*message_id).into())? .is_some() { continue; @@ -902,7 +905,7 @@ pub async fn send_transaction_message_route( // Save transaction id with empty data db.transaction_ids - .add_txnid(&sender, None, &message_id, &[])?; + .add_txnid(&sender, None, (&*message_id).into(), &[])?; } Edu::_Custom(_) => {} } @@ -948,7 +951,7 @@ pub(crate) async fn handle_incoming_pdu<'a>( value: BTreeMap, is_timeline_event: bool, db: &'a Database, - pub_key_map: &'a RwLock>>, + pub_key_map: &'a RwLock>>, ) -> Result>, String> { match db.rooms.exists(room_id) { Ok(true) => {} @@ -1123,7 +1126,7 @@ fn handle_outlier_pdu<'a>( room_id: &'a RoomId, value: BTreeMap, db: &'a Database, - pub_key_map: &'a RwLock>>, + pub_key_map: &'a RwLock>>, ) -> AsyncRecursiveType<'a, Result<(Arc, BTreeMap), String>> { Box::pin(async move { // TODO: For RoomVersion6 we must check that Raw<..> is canonical do we anywhere?: https://matrix.org/docs/spec/rooms/v6#canonical-json @@ -1285,7 +1288,7 @@ async fn upgrade_outlier_to_timeline_pdu( origin: &ServerName, db: &Database, room_id: &RoomId, - pub_key_map: &RwLock>>, + pub_key_map: &RwLock>>, ) -> Result>, String> { if let Ok(Some(pduid)) = db.rooms.get_pdu_id(&incoming_pdu.event_id) { return Ok(Some(pduid)); @@ -1827,7 +1830,7 @@ pub(crate) fn fetch_and_handle_outliers<'a>( events: &'a [Arc], create_event: &'a PduEvent, room_id: &'a RoomId, - pub_key_map: &'a RwLock>>, + pub_key_map: &'a RwLock>>, ) -> AsyncRecursiveType<'a, Vec<(Arc, Option>)>> { Box::pin(async move { let back_off = |id| match db.globals.bad_event_ratelimiter.write().unwrap().entry(id) { @@ -1966,9 +1969,9 @@ pub(crate) async fn fetch_signing_keys( db: &Database, origin: &ServerName, signature_ids: Vec, -) -> Result> { +) -> Result> { let contains_all_ids = - |keys: &BTreeMap| signature_ids.iter().all(|id| keys.contains_key(id)); + |keys: &BTreeMap| signature_ids.iter().all(|id| keys.contains_key(id)); let permit = db .globals @@ -2355,8 +2358,11 @@ pub fn get_event_route( let room_id = <&RoomId>::try_from(room_id_str) .map_err(|_| Error::bad_database("Invalid room id field in event in database"))?; - if !db.rooms.server_in_room(sender_servername, room_id)? { - return Err(Error::BadRequest(ErrorKind::NotFound, "Event not found.")); + if !db.rooms.server_in_room(sender_servername, &room_id)? { + return Err(Error::BadRequest( + ErrorKind::Forbidden, + "Server is not in room", + )); } Ok(get_event::v1::Response { @@ -2395,6 +2401,8 @@ pub fn get_missing_events_route( )); } + acl_check(sender_servername, &body.room_id, &db)?; + let mut queued_events = body.latest_events.clone(); let mut events = Vec::new(); @@ -2464,6 +2472,15 @@ pub fn get_event_authorization_route( .as_ref() .expect("server is authenticated"); + if !db.rooms.server_in_room(sender_servername, &body.room_id)? { + return Err(Error::BadRequest( + ErrorKind::Forbidden, + "Server is not in room.", + )); + } + + acl_check(sender_servername, &body.room_id, &db)?; + let event = db .rooms .get_pdu_json(&body.event_id)? @@ -2477,10 +2494,6 @@ pub fn get_event_authorization_route( let room_id = <&RoomId>::try_from(room_id_str) .map_err(|_| Error::bad_database("Invalid room id field in event in database"))?; - if !db.rooms.server_in_room(sender_servername, room_id)? { - return Err(Error::BadRequest(ErrorKind::NotFound, "Event not found.")); - } - let auth_chain_ids = get_auth_chain(room_id, vec![Arc::from(&*body.event_id)], &db)?; Ok(get_event_authorization::v1::Response { @@ -2520,6 +2533,8 @@ pub fn get_room_state_route( )); } + acl_check(sender_servername, &body.room_id, &db)?; + let shortstatehash = db .rooms .pdu_shortstatehash(&body.event_id)? @@ -2583,6 +2598,8 @@ pub fn get_room_state_ids_route( )); } + acl_check(sender_servername, &body.room_id, &db)?; + let shortstatehash = db .rooms .pdu_shortstatehash(&body.event_id)? @@ -2626,10 +2643,17 @@ pub fn create_join_event_template_route( if !db.rooms.exists(&body.room_id)? { return Err(Error::BadRequest( ErrorKind::NotFound, - "Server is not in room.", + "Room is unknown to this server.", )); } + let sender_servername = body + .sender_servername + .as_ref() + .expect("server is authenticated"); + + acl_check(sender_servername, &body.room_id, &db)?; + let prev_events: Vec<_> = db .rooms .get_pdu_leaves(&body.room_id)? @@ -2782,6 +2806,7 @@ pub fn create_join_event_template_route( async fn create_join_event( db: &DatabaseGuard, + sender_servername: &ServerName, room_id: &RoomId, pdu: &RawJsonValue, ) -> Result { @@ -2789,6 +2814,15 @@ async fn create_join_event( return Err(Error::bad_config("Federation is disabled.")); } + if !db.rooms.exists(room_id)? { + return Err(Error::BadRequest( + ErrorKind::NotFound, + "Room is unknown to this server.", + )); + } + + acl_check(sender_servername, room_id, &db)?; + // We need to return the state prior to joining, let's keep a reference to that here let shortstatehash = db .rooms @@ -2888,7 +2922,12 @@ pub async fn create_join_event_v1_route( db: DatabaseGuard, body: Ruma>, ) -> ConduitResult { - let room_state = create_join_event(&db, &body.room_id, &body.pdu).await?; + let sender_servername = body + .sender_servername + .as_ref() + .expect("server is authenticated"); + + let room_state = create_join_event(&db, sender_servername, &body.room_id, &body.pdu).await?; Ok(create_join_event::v1::Response { room_state }.into()) } @@ -2905,7 +2944,12 @@ pub async fn create_join_event_v2_route( db: DatabaseGuard, body: Ruma>, ) -> ConduitResult { - let room_state = create_join_event(&db, &body.room_id, &body.pdu).await?; + let sender_servername = body + .sender_servername + .as_ref() + .expect("server is authenticated"); + + let room_state = create_join_event(&db, sender_servername, &body.room_id, &body.pdu).await?; Ok(create_join_event::v2::Response { room_state }.into()) } @@ -2926,6 +2970,13 @@ pub async fn create_invite_route( return Err(Error::bad_config("Federation is disabled.")); } + let sender_servername = body + .sender_servername + .as_ref() + .expect("server is authenticated"); + + acl_check(sender_servername, &body.room_id, &db)?; + if body.room_version != RoomVersionId::V5 && body.room_version != RoomVersionId::V6 { return Err(Error::BadRequest( ErrorKind::IncompatibleRoomVersion { @@ -3199,7 +3250,7 @@ pub async fn claim_keys_route( #[tracing::instrument(skip(event, pub_key_map, db))] pub(crate) async fn fetch_required_signing_keys( event: &BTreeMap, - pub_key_map: &RwLock>>, + pub_key_map: &RwLock>>, db: &Database, ) -> Result<()> { let signatures = event @@ -3253,7 +3304,7 @@ fn get_server_keys_from_cache( pdu: &RawJsonValue, servers: &mut BTreeMap, BTreeMap, QueryCriteria>>, room_version: &RoomVersionId, - pub_key_map: &mut RwLockWriteGuard<'_, BTreeMap>>, + pub_key_map: &mut RwLockWriteGuard<'_, BTreeMap>>, db: &Database, ) -> Result<()> { let value: CanonicalJsonObject = serde_json::from_str(pdu.get()).map_err(|e| { @@ -3306,7 +3357,7 @@ fn get_server_keys_from_cache( let signature_ids = signature_object.keys().cloned().collect::>(); let contains_all_ids = - |keys: &BTreeMap| signature_ids.iter().all(|id| keys.contains_key(id)); + |keys: &BTreeMap| signature_ids.iter().all(|id| keys.contains_key(id)); let origin = <&ServerName>::try_from(signature_server.as_str()).map_err(|_| { Error::BadServerResponse("Invalid servername in signatures of server response pdu.") @@ -3339,7 +3390,7 @@ fn get_server_keys_from_cache( pub(crate) async fn fetch_join_signing_keys( event: &create_join_event::v2::Response, room_version: &RoomVersionId, - pub_key_map: &RwLock>>, + pub_key_map: &RwLock>>, db: &Database, ) -> Result<()> { let mut servers: BTreeMap, BTreeMap, QueryCriteria>> = @@ -3439,6 +3490,35 @@ pub(crate) async fn fetch_join_signing_keys( Ok(()) } +/// Returns Ok if the acl allows the server +fn acl_check( + server_name: &ServerName, + room_id: &RoomId, + db: &Database, +) -> Result<()> { + let acl_event = match db + .rooms + .room_state_get(room_id, &EventType::RoomServerAcl, "")? { + Some(acl) => acl, + None => return Ok(()), + }; + + let acl_event_content: RoomServerAclEventContent = match + serde_json::from_str(acl_event.content.get()) { + Ok(content) => content, + Err(_) => { + warn!("Invalid ACL event"); + return Ok(()); + } + }; + + if acl_event_content.is_allowed(server_name) { + Ok(()) + } else { + Err(Error::BadRequest(ErrorKind::Forbidden, "Server was denied by ACL")) + } +} + #[cfg(test)] mod tests { use super::{add_port_to_hostname, get_ip_with_port, FedDest}; From 8c90e7adfb0d06164d17921e6e686cdaab0d8f1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Mon, 17 Jan 2022 14:39:37 +0100 Subject: [PATCH 53/65] refactor: fix warnings --- src/database/abstraction/rocksdb.rs | 27 ++++++--------------------- src/database/sending.rs | 6 ++++-- src/database/transaction_ids.rs | 2 +- src/server_server.rs | 26 +++++++++++++------------- 4 files changed, 24 insertions(+), 37 deletions(-) diff --git a/src/database/abstraction/rocksdb.rs b/src/database/abstraction/rocksdb.rs index 15ea9f73..d6157135 100644 --- a/src/database/abstraction/rocksdb.rs +++ b/src/database/abstraction/rocksdb.rs @@ -4,7 +4,6 @@ use std::{future::Future, pin::Pin, sync::Arc, sync::RwLock}; pub struct Engine { rocks: rocksdb::DBWithThreadMode, - cache_capacity_bytes: usize, max_open_files: i32, cache: rocksdb::Cache, old_cfs: Vec, @@ -17,11 +16,7 @@ pub struct RocksDbEngineTree<'a> { write_lock: RwLock<()>, } -fn db_options( - cache_capacity_bytes: usize, - max_open_files: i32, - rocksdb_cache: &rocksdb::Cache, -) -> rocksdb::Options { +fn db_options(max_open_files: i32, rocksdb_cache: &rocksdb::Cache) -> rocksdb::Options { let mut block_based_options = rocksdb::BlockBasedOptions::default(); block_based_options.set_block_cache(rocksdb_cache); @@ -57,11 +52,7 @@ impl DatabaseEngine for Arc { let cache_capacity_bytes = (config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize; let rocksdb_cache = rocksdb::Cache::new_lru_cache(cache_capacity_bytes).unwrap(); - let db_opts = db_options( - cache_capacity_bytes, - config.rocksdb_max_open_files, - &rocksdb_cache, - ); + let db_opts = db_options(config.rocksdb_max_open_files, &rocksdb_cache); let cfs = rocksdb::DBWithThreadMode::::list_cf( &db_opts, @@ -75,18 +66,13 @@ impl DatabaseEngine for Arc { cfs.iter().map(|name| { rocksdb::ColumnFamilyDescriptor::new( name, - db_options( - cache_capacity_bytes, - config.rocksdb_max_open_files, - &rocksdb_cache, - ), + db_options(config.rocksdb_max_open_files, &rocksdb_cache), ) }), )?; Ok(Arc::new(Engine { rocks: db, - cache_capacity_bytes, max_open_files: config.rocksdb_max_open_files, cache: rocksdb_cache, old_cfs: cfs, @@ -96,10 +82,9 @@ impl DatabaseEngine for Arc { fn open_tree(&self, name: &'static str) -> Result> { if !self.old_cfs.contains(&name.to_owned()) { // Create if it didn't exist - let _ = self.rocks.create_cf( - name, - &db_options(self.cache_capacity_bytes, self.max_open_files, &self.cache), - ); + let _ = self + .rocks + .create_cf(name, &db_options(self.max_open_files, &self.cache)); } Ok(Arc::new(RocksDbEngineTree { diff --git a/src/database/sending.rs b/src/database/sending.rs index 65284a4f..69f7c444 100644 --- a/src/database/sending.rs +++ b/src/database/sending.rs @@ -534,7 +534,8 @@ impl Sending { .collect::>(), ), base64::URL_SAFE_NO_PAD, - )).into(), + )) + .into(), }, ) .await @@ -692,7 +693,8 @@ impl Sending { .collect::>(), ), base64::URL_SAFE_NO_PAD, - )).into(), + )) + .into(), }, ) .await diff --git a/src/database/transaction_ids.rs b/src/database/transaction_ids.rs index d576083a..12b838ba 100644 --- a/src/database/transaction_ids.rs +++ b/src/database/transaction_ids.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use crate::Result; -use ruma::{DeviceId, UserId, identifiers::TransactionId}; +use ruma::{identifiers::TransactionId, DeviceId, UserId}; use super::abstraction::Tree; diff --git a/src/server_server.rs b/src/server_server.rs index 5cd43d81..54ae0251 100644 --- a/src/server_server.rs +++ b/src/server_server.rs @@ -42,9 +42,9 @@ use ruma::{ events::{ receipt::{ReceiptEvent, ReceiptEventContent}, room::{ - server_acl::RoomServerAclEventContent, create::RoomCreateEventContent, member::{MembershipState, RoomMemberEventContent}, + server_acl::RoomServerAclEventContent, }, AnyEphemeralRoomEvent, EventType, }, @@ -3491,20 +3491,17 @@ pub(crate) async fn fetch_join_signing_keys( } /// Returns Ok if the acl allows the server -fn acl_check( - server_name: &ServerName, - room_id: &RoomId, - db: &Database, -) -> Result<()> { +fn acl_check(server_name: &ServerName, room_id: &RoomId, db: &Database) -> Result<()> { let acl_event = match db .rooms - .room_state_get(room_id, &EventType::RoomServerAcl, "")? { - Some(acl) => acl, - None => return Ok(()), - }; + .room_state_get(room_id, &EventType::RoomServerAcl, "")? + { + Some(acl) => acl, + None => return Ok(()), + }; - let acl_event_content: RoomServerAclEventContent = match - serde_json::from_str(acl_event.content.get()) { + let acl_event_content: RoomServerAclEventContent = + match serde_json::from_str(acl_event.content.get()) { Ok(content) => content, Err(_) => { warn!("Invalid ACL event"); @@ -3515,7 +3512,10 @@ fn acl_check( if acl_event_content.is_allowed(server_name) { Ok(()) } else { - Err(Error::BadRequest(ErrorKind::Forbidden, "Server was denied by ACL")) + Err(Error::BadRequest( + ErrorKind::Forbidden, + "Server was denied by ACL", + )) } } From 03b174335cfc472c3ecaba7068ead74f0e2268be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Mon, 17 Jan 2022 14:46:53 +0100 Subject: [PATCH 54/65] improvement: lower default pdu cache capacity --- src/database.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/database.rs b/src/database.rs index fd7a1451..1997dc0a 100644 --- a/src/database.rs +++ b/src/database.rs @@ -134,7 +134,7 @@ fn default_rocksdb_max_open_files() -> i32 { } fn default_pdu_cache_capacity() -> u32 { - 1_000_000 + 150_000 } fn default_cleanup_second_interval() -> u32 { From 53de3509087f46b6a45ca20d27e8fa2884269535 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Mon, 17 Jan 2022 23:24:27 +0100 Subject: [PATCH 55/65] fix: less load when lazy loading --- src/client_server/sync.rs | 53 ++++++++++++++++++++------------------- 1 file changed, 27 insertions(+), 26 deletions(-) diff --git a/src/client_server/sync.rs b/src/client_server/sync.rs index bd2f48a3..14aac3a1 100644 --- a/src/client_server/sync.rs +++ b/src/client_server/sync.rs @@ -453,38 +453,39 @@ async fn sync_helper( let joined_since_last_sync = since_sender_member .map_or(true, |member| member.membership != MembershipState::Join); - let current_state_ids = db.rooms.state_full_ids(current_shortstatehash)?; - - let since_state_ids = db.rooms.state_full_ids(since_shortstatehash)?; - let mut state_events = Vec::new(); let mut lazy_loaded = HashSet::new(); - for (key, id) in current_state_ids { - if body.full_state || since_state_ids.get(&key) != Some(&id) { - let pdu = match db.rooms.get_pdu(&id)? { - Some(pdu) => pdu, - None => { - error!("Pdu in state not found: {}", id); - continue; - } - }; - - if pdu.kind == EventType::RoomMember { - match UserId::parse( - pdu.state_key - .as_ref() - .expect("State event has state key") - .clone(), - ) { - Ok(state_key_userid) => { - lazy_loaded.insert(state_key_userid); + if since_shortstatehash != current_shortstatehash { + let current_state_ids = db.rooms.state_full_ids(current_shortstatehash)?; + let since_state_ids = db.rooms.state_full_ids(since_shortstatehash)?; + + for (key, id) in current_state_ids { + if body.full_state || since_state_ids.get(&key) != Some(&id) { + let pdu = match db.rooms.get_pdu(&id)? { + Some(pdu) => pdu, + None => { + error!("Pdu in state not found: {}", id); + continue; + } + }; + + if pdu.kind == EventType::RoomMember { + match UserId::parse( + pdu.state_key + .as_ref() + .expect("State event has state key") + .clone(), + ) { + Ok(state_key_userid) => { + lazy_loaded.insert(state_key_userid); + } + Err(e) => error!("Invalid state key for member event: {}", e), } - Err(e) => error!("Invalid state key for member event: {}", e), } - } - state_events.push(pdu); + state_events.push(pdu); + } } } From 13a48c45776de19912ecd040a6434c75152802f7 Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Tue, 18 Jan 2022 21:04:44 +0100 Subject: [PATCH 56/65] Clean up mod and use statements in lib.rs and main.rs --- src/lib.rs | 10 ++++++---- src/main.rs | 22 ++++------------------ 2 files changed, 10 insertions(+), 22 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 82b8f340..745eb394 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,21 +7,23 @@ #![allow(clippy::suspicious_else_formatting)] #![deny(clippy::dbg_macro)] -pub mod appservice_server; -pub mod client_server; +use std::ops::Deref; + mod database; mod error; mod pdu; mod ruma_wrapper; -pub mod server_server; mod utils; +pub mod appservice_server; +pub mod client_server; +pub mod server_server; + pub use database::{Config, Database}; pub use error::{Error, Result}; pub use pdu::PduEvent; pub use rocket::Config as RocketConfig; pub use ruma_wrapper::{ConduitResult, Ruma, RumaResponse}; -use std::ops::Deref; pub struct State<'r, T: Send + Sync + 'static>(pub &'r T); diff --git a/src/main.rs b/src/main.rs index 56faa3e7..d9bbc240 100644 --- a/src/main.rs +++ b/src/main.rs @@ -7,27 +7,9 @@ #![allow(clippy::suspicious_else_formatting)] #![deny(clippy::dbg_macro)] -pub mod appservice_server; -pub mod client_server; -pub mod server_server; - -mod database; -mod error; -mod pdu; -mod ruma_wrapper; -mod utils; - use std::sync::Arc; -use database::Config; -pub use database::Database; -pub use error::{Error, Result}; use opentelemetry::trace::{FutureExt, Tracer}; -pub use pdu::PduEvent; -pub use rocket::State; -use ruma::api::client::error::ErrorKind; -pub use ruma_wrapper::{ConduitResult, Ruma, RumaResponse}; - use rocket::{ catch, catchers, figment::{ @@ -36,9 +18,13 @@ use rocket::{ }, routes, Request, }; +use ruma::api::client::error::ErrorKind; use tokio::sync::RwLock; use tracing_subscriber::{prelude::*, EnvFilter}; +pub use conduit::*; // Re-export everything from the library crate +pub use rocket::State; + fn setup_rocket(config: Figment, data: Arc>) -> rocket::Rocket { rocket::custom(config) .manage(data) From c6277c72a1f75d889b47708769adf376cac9d1ea Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Tue, 18 Jan 2022 21:05:40 +0100 Subject: [PATCH 57/65] Fix warnings in database::abstraction --- src/database/abstraction.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/database/abstraction.rs b/src/database/abstraction.rs index 17bd971f..321b064f 100644 --- a/src/database/abstraction.rs +++ b/src/database/abstraction.rs @@ -23,12 +23,12 @@ pub trait DatabaseEngine: Send + Sync { where Self: Sized; fn open_tree(&self, name: &'static str) -> Result>; - fn flush(self: &Self) -> Result<()>; - fn cleanup(self: &Self) -> Result<()> { + fn flush(&self) -> Result<()>; + fn cleanup(&self) -> Result<()> { Ok(()) } - fn memory_usage(self: &Self) -> Result { - Ok("Current database engine does not support memory usage reporting.".to_string()) + fn memory_usage(&self) -> Result { + Ok("Current database engine does not support memory usage reporting.".to_owned()) } } From d4eb3e3295ee1b0947b66d1d45ef10bb4d152839 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Timo=20K=C3=B6sters?= Date: Wed, 19 Jan 2022 07:09:25 +0100 Subject: [PATCH 58/65] fix: rocksdb does not use zstd compression unless we disable everything else --- Cargo.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 29a090c7..32233305 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -78,7 +78,8 @@ crossbeam = { version = "0.8.1", optional = true } num_cpus = "1.13.0" threadpool = "1.8.1" heed = { git = "https://github.com/timokoesters/heed.git", rev = "f6f825da7fb2c758867e05ad973ef800a6fe1d5d", optional = true } -rocksdb = { version = "0.17.0", features = ["multi-threaded-cf"], optional = true } +rocksdb = { version = "0.17.0", default-features = false, features = ["multi-threaded-cf", "zstd"], optional = true } + thread_local = "1.1.3" # used for TURN server authentication hmac = "0.11.0" From a0fc5eba72a7b364cfe91d5b188b136fa555b7e1 Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Wed, 19 Jan 2022 23:56:55 +0100 Subject: [PATCH 59/65] Remove unnecessary Result --- src/database/uiaa.rs | 7 +++---- src/ruma_wrapper.rs | 13 +++++-------- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/src/database/uiaa.rs b/src/database/uiaa.rs index 5e11467e..b0c8d6dd 100644 --- a/src/database/uiaa.rs +++ b/src/database/uiaa.rs @@ -166,13 +166,12 @@ impl Uiaa { user_id: &UserId, device_id: &DeviceId, session: &str, - ) -> Result> { - Ok(self - .userdevicesessionid_uiaarequest + ) -> Option { + self.userdevicesessionid_uiaarequest .read() .unwrap() .get(&(user_id.to_owned(), device_id.to_owned(), session.to_owned())) - .map(|j| j.to_owned())) + .map(|j| j.to_owned()) } fn update_uiaa_session( diff --git a/src/ruma_wrapper.rs b/src/ruma_wrapper.rs index 4b8d5dea..1bd921d9 100644 --- a/src/ruma_wrapper.rs +++ b/src/ruma_wrapper.rs @@ -296,14 +296,11 @@ where .and_then(|auth| auth.get("session")) .and_then(|session| session.as_str()) .and_then(|session| { - db.uiaa - .get_uiaa_request( - &user_id, - &sender_device.clone().unwrap_or_else(|| "".into()), - session, - ) - .ok() - .flatten() + db.uiaa.get_uiaa_request( + &user_id, + &sender_device.clone().unwrap_or_else(|| "".into()), + session, + ) }) { for (key, value) in initial_request { From 756a41f22d24c89682eea826e138f8c3896433fb Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Thu, 20 Jan 2022 00:10:39 +0100 Subject: [PATCH 60/65] Fix rustc / clippy warnings --- src/client_server/context.rs | 15 +++++++-------- src/client_server/keys.rs | 2 +- src/client_server/message.rs | 14 +++++++------- src/client_server/profile.rs | 4 ++-- src/database.rs | 30 ++++++++++++------------------ src/database/admin.rs | 2 +- src/database/rooms.rs | 27 +++++++++++++-------------- src/server_server.rs | 6 +++--- 8 files changed, 46 insertions(+), 54 deletions(-) diff --git a/src/client_server/context.rs b/src/client_server/context.rs index 94a44e39..e1177661 100644 --- a/src/client_server/context.rs +++ b/src/client_server/context.rs @@ -3,8 +3,7 @@ use ruma::{ api::client::{error::ErrorKind, r0::context::get_context}, events::EventType, }; -use std::collections::HashSet; -use std::convert::TryFrom; +use std::{collections::HashSet, convert::TryFrom}; #[cfg(feature = "conduit_bin")] use rocket::get; @@ -55,8 +54,8 @@ pub async fn get_context_route( ))?; if !db.rooms.lazy_load_was_sent_before( - &sender_user, - &sender_device, + sender_user, + sender_device, &body.room_id, &base_event.sender, )? { @@ -79,8 +78,8 @@ pub async fn get_context_route( for (_, event) in &events_before { if !db.rooms.lazy_load_was_sent_before( - &sender_user, - &sender_device, + sender_user, + sender_device, &body.room_id, &event.sender, )? { @@ -112,8 +111,8 @@ pub async fn get_context_route( for (_, event) in &events_after { if !db.rooms.lazy_load_was_sent_before( - &sender_user, - &sender_device, + sender_user, + sender_device, &body.room_id, &event.sender, )? { diff --git a/src/client_server/keys.rs b/src/client_server/keys.rs index be0675d8..e7aec26b 100644 --- a/src/client_server/keys.rs +++ b/src/client_server/keys.rs @@ -272,7 +272,7 @@ pub async fn get_key_changes_route( device_list_updates.extend( db.users .keys_changed( - &sender_user.to_string(), + sender_user.as_str(), body.from .parse() .map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid `from`."))?, diff --git a/src/client_server/message.rs b/src/client_server/message.rs index 36653fab..7d904f90 100644 --- a/src/client_server/message.rs +++ b/src/client_server/message.rs @@ -139,7 +139,7 @@ pub async fn get_message_events_route( let to = body.to.as_ref().map(|t| t.parse()); db.rooms - .lazy_load_confirm_delivery(&sender_user, &sender_device, &body.room_id, from)?; + .lazy_load_confirm_delivery(sender_user, sender_device, &body.room_id, from)?; // Use limit or else 10 let limit = body.limit.try_into().map_or(10_usize, |l: u32| l as usize); @@ -168,8 +168,8 @@ pub async fn get_message_events_route( for (_, event) in &events_after { if !db.rooms.lazy_load_was_sent_before( - &sender_user, - &sender_device, + sender_user, + sender_device, &body.room_id, &event.sender, )? { @@ -205,8 +205,8 @@ pub async fn get_message_events_route( for (_, event) in &events_before { if !db.rooms.lazy_load_was_sent_before( - &sender_user, - &sender_device, + sender_user, + sender_device, &body.room_id, &event.sender, )? { @@ -239,8 +239,8 @@ pub async fn get_message_events_route( if let Some(next_token) = next_token { db.rooms.lazy_load_mark_sent( - &sender_user, - &sender_device, + sender_user, + sender_device, &body.room_id, lazy_loaded, next_token, diff --git a/src/client_server/profile.rs b/src/client_server/profile.rs index 29b1ae87..71e61da3 100644 --- a/src/client_server/profile.rs +++ b/src/client_server/profile.rs @@ -52,7 +52,7 @@ pub async fn set_displayname_route( .room_state_get( &room_id, &EventType::RoomMember, - &sender_user.to_string(), + sender_user.as_str(), )? .ok_or_else(|| { Error::bad_database( @@ -195,7 +195,7 @@ pub async fn set_avatar_url_route( .room_state_get( &room_id, &EventType::RoomMember, - &sender_user.to_string(), + sender_user.as_str(), )? .ok_or_else(|| { Error::bad_database( diff --git a/src/database.rs b/src/database.rs index 1997dc0a..7a4ddc66 100644 --- a/src/database.rs +++ b/src/database.rs @@ -212,28 +212,22 @@ impl Database { return Ok(()); } - if sled_exists { - if config.database_backend != "sled" { - return Err(Error::bad_config( - "Found sled at database_path, but is not specified in config.", - )); - } + if sled_exists && config.database_backend != "sled" { + return Err(Error::bad_config( + "Found sled at database_path, but is not specified in config.", + )); } - if sqlite_exists { - if config.database_backend != "sqlite" { - return Err(Error::bad_config( - "Found sqlite at database_path, but is not specified in config.", - )); - } + if sqlite_exists && config.database_backend != "sqlite" { + return Err(Error::bad_config( + "Found sqlite at database_path, but is not specified in config.", + )); } - if rocksdb_exists { - if config.database_backend != "rocksdb" { - return Err(Error::bad_config( - "Found rocksdb at database_path, but is not specified in config.", - )); - } + if rocksdb_exists && config.database_backend != "rocksdb" { + return Err(Error::bad_config( + "Found rocksdb at database_path, but is not specified in config.", + )); } Ok(()) diff --git a/src/database/admin.rs b/src/database/admin.rs index 7d2301d9..bf38bd8c 100644 --- a/src/database/admin.rs +++ b/src/database/admin.rs @@ -118,7 +118,7 @@ impl Admin { if let Ok(response) = guard._db.memory_usage() { send_message(RoomMessageEventContent::text_plain(response), guard, &state_lock); } else { - send_message(RoomMessageEventContent::text_plain("Failed to get database memory usage.".to_string()), guard, &state_lock); + send_message(RoomMessageEventContent::text_plain("Failed to get database memory usage.".to_owned()), guard, &state_lock); } } AdminCommand::SendMessage(message) => { diff --git a/src/database/rooms.rs b/src/database/rooms.rs index 0ba6c9ba..c9a3c202 100644 --- a/src/database/rooms.rs +++ b/src/database/rooms.rs @@ -2727,7 +2727,7 @@ impl Rooms { let state_lock = mutex_state.lock().await; let mut event: RoomMemberEventContent = serde_json::from_str( - self.room_state_get(room_id, &EventType::RoomMember, &user_id.to_string())? + self.room_state_get(room_id, &EventType::RoomMember, user_id.as_str())? .ok_or(Error::BadRequest( ErrorKind::BadState, "Cannot leave a room you are not a member of.", @@ -3462,8 +3462,7 @@ impl Rooms { &key[0].to_be_bytes(), &chain .iter() - .map(|s| s.to_be_bytes().to_vec()) - .flatten() + .flat_map(|s| s.to_be_bytes().to_vec()) .collect::>(), )?; } @@ -3484,11 +3483,11 @@ impl Rooms { ) -> Result { let mut key = user_id.as_bytes().to_vec(); key.push(0xff); - key.extend_from_slice(&device_id.as_bytes()); + key.extend_from_slice(device_id.as_bytes()); key.push(0xff); - key.extend_from_slice(&room_id.as_bytes()); + key.extend_from_slice(room_id.as_bytes()); key.push(0xff); - key.extend_from_slice(&ll_user.as_bytes()); + key.extend_from_slice(ll_user.as_bytes()); Ok(self.lazyloadedids.get(&key)?.is_some()) } @@ -3528,14 +3527,14 @@ impl Rooms { )) { let mut prefix = user_id.as_bytes().to_vec(); prefix.push(0xff); - prefix.extend_from_slice(&device_id.as_bytes()); + prefix.extend_from_slice(device_id.as_bytes()); prefix.push(0xff); - prefix.extend_from_slice(&room_id.as_bytes()); + prefix.extend_from_slice(room_id.as_bytes()); prefix.push(0xff); for ll_id in user_ids { let mut key = prefix.clone(); - key.extend_from_slice(&ll_id.as_bytes()); + key.extend_from_slice(ll_id.as_bytes()); self.lazyloadedids.insert(&key, &[])?; } } @@ -3546,15 +3545,15 @@ impl Rooms { #[tracing::instrument(skip(self))] pub fn lazy_load_reset( &self, - user_id: &Box, - device_id: &Box, - room_id: &Box, + user_id: &UserId, + device_id: &DeviceId, + room_id: &RoomId, ) -> Result<()> { let mut prefix = user_id.as_bytes().to_vec(); prefix.push(0xff); - prefix.extend_from_slice(&device_id.as_bytes()); + prefix.extend_from_slice(device_id.as_bytes()); prefix.push(0xff); - prefix.extend_from_slice(&room_id.as_bytes()); + prefix.extend_from_slice(room_id.as_bytes()); prefix.push(0xff); for (key, _) in self.lazyloadedids.scan_prefix(prefix) { diff --git a/src/server_server.rs b/src/server_server.rs index 54ae0251..9129951b 100644 --- a/src/server_server.rs +++ b/src/server_server.rs @@ -1938,7 +1938,7 @@ pub(crate) fn fetch_and_handle_outliers<'a>( match handle_outlier_pdu( origin, create_event, - &next_id, + next_id, room_id, value.clone(), db, @@ -2358,7 +2358,7 @@ pub fn get_event_route( let room_id = <&RoomId>::try_from(room_id_str) .map_err(|_| Error::bad_database("Invalid room id field in event in database"))?; - if !db.rooms.server_in_room(sender_servername, &room_id)? { + if !db.rooms.server_in_room(sender_servername, room_id)? { return Err(Error::BadRequest( ErrorKind::Forbidden, "Server is not in room", @@ -2821,7 +2821,7 @@ async fn create_join_event( )); } - acl_check(sender_servername, room_id, &db)?; + acl_check(sender_servername, room_id, db)?; // We need to return the state prior to joining, let's keep a reference to that here let shortstatehash = db From 6e322716caf6f9181bf21444b552bf05d5f5a774 Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Thu, 20 Jan 2022 12:29:10 +0100 Subject: [PATCH 61/65] Delete rust-toolchain file --- rust-toolchain | 1 - 1 file changed, 1 deletion(-) delete mode 100644 rust-toolchain diff --git a/rust-toolchain b/rust-toolchain deleted file mode 100644 index 74df8b16..00000000 --- a/rust-toolchain +++ /dev/null @@ -1 +0,0 @@ -1.53 From 5afb27a5a9ae887dea042e3ca9f0ecef98feff47 Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Thu, 20 Jan 2022 12:29:24 +0100 Subject: [PATCH 62/65] Use latest stable for Docker image --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 5812fdf9..b629690d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # syntax=docker/dockerfile:1 -FROM docker.io/rust:1.53-alpine AS builder +FROM docker.io/rust:1.58-alpine AS builder WORKDIR /usr/src/conduit # Install required packages to build Conduit and it's dependencies @@ -38,7 +38,7 @@ FROM docker.io/alpine:3.15.0 AS runner # You still need to map the port when using the docker command or docker-compose. EXPOSE 6167 -# Note from @jfowl: I would like to remove this in the future and just have the Docker version be configured with envs. +# Note from @jfowl: I would like to remove this in the future and just have the Docker version be configured with envs. ENV CONDUIT_CONFIG="/srv/conduit/conduit.toml" # Conduit needs: @@ -78,4 +78,4 @@ WORKDIR /srv/conduit # Run Conduit and print backtraces on panics ENV RUST_BACKTRACE=1 -ENTRYPOINT [ "/srv/conduit/conduit" ] \ No newline at end of file +ENTRYPOINT [ "/srv/conduit/conduit" ] From ff5fec9e74b4ed12c4dae579344a94f1c1f22f29 Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Thu, 20 Jan 2022 12:29:52 +0100 Subject: [PATCH 63/65] Raise minimum supported Rust version to 1.56 --- Cargo.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 29a090c7..b6a2a2b7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,7 +7,8 @@ homepage = "https://conduit.rs" repository = "https://gitlab.com/famedly/conduit" readme = "README.md" version = "0.2.0" -edition = "2018" +rust-version = "1.56" +edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From 6bb1081b7127a38cdc85614e4250f52b557753c8 Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Thu, 20 Jan 2022 13:13:14 +0100 Subject: [PATCH 64/65] Use BTreeMap::into_values Stable under new MSRV. --- src/database/users.rs | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/src/database/users.rs b/src/database/users.rs index c4fcee3d..69a277c6 100644 --- a/src/database/users.rs +++ b/src/database/users.rs @@ -531,11 +531,11 @@ impl Users { prefix.push(0xff); // Master key - let master_key_map = master_key + let mut master_key_ids = master_key .deserialize() .map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid master key"))? - .keys; - let mut master_key_ids = master_key_map.values(); + .keys + .into_values(); let master_key_id = master_key_ids.next().ok_or(Error::BadRequest( ErrorKind::InvalidParam, @@ -560,13 +560,14 @@ impl Users { // Self-signing key if let Some(self_signing_key) = self_signing_key { - let self_signing_key_map = self_signing_key + let mut self_signing_key_ids = self_signing_key .deserialize() .map_err(|_| { Error::BadRequest(ErrorKind::InvalidParam, "Invalid self signing key") })? - .keys; - let mut self_signing_key_ids = self_signing_key_map.values(); + .keys + .into_values(); + let self_signing_key_id = self_signing_key_ids.next().ok_or(Error::BadRequest( ErrorKind::InvalidParam, "Self signing key contained no key.", @@ -593,13 +594,14 @@ impl Users { // User-signing key if let Some(user_signing_key) = user_signing_key { - let user_signing_key_map = user_signing_key + let mut user_signing_key_ids = user_signing_key .deserialize() .map_err(|_| { Error::BadRequest(ErrorKind::InvalidParam, "Invalid user signing key") })? - .keys; - let mut user_signing_key_ids = user_signing_key_map.values(); + .keys + .into_values(); + let user_signing_key_id = user_signing_key_ids.next().ok_or(Error::BadRequest( ErrorKind::InvalidParam, "User signing key contained no key.", From 8d81c1c0722ad2f608adea44d7b4ceb1a8f645ae Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Thu, 20 Jan 2022 13:23:58 +0100 Subject: [PATCH 65/65] Use MSRV for build CI jobs The test job will use the latest stable so all stable lints are included. --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 73a1a928..cdc1d4cb 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -21,7 +21,7 @@ variables: - if: '$CI_COMMIT_BRANCH == "next"' - if: "$CI_COMMIT_TAG" interruptible: true - image: "rust:latest" + image: "rust:1.56" tags: ["docker"] variables: CARGO_PROFILE_RELEASE_LTO: "true"