feat: add threadpool for iterator threads, bug fixes, tracing_flame support

merge-requests/140/head
Timo Kösters 3 years ago
parent e0072eff63
commit 5e924227b6
No known key found for this signature in database
GPG Key ID: 24DA7517711A2BA4

161
Cargo.lock generated

@ -137,9 +137,9 @@ checksum = "383d29d513d8764dcdc42ea295d979eb99c3c9f00607b3692cf68a431f7dca72"
[[package]] [[package]]
name = "bindgen" name = "bindgen"
version = "0.57.0" version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd4865004a46a0aafb2a0a5eb19d3c9fc46ee5f063a6cfc605c69ac9ecf5263d" checksum = "0b2ce639ee22f41a6ea0a3061e9bea9f690cf0c6ffc1ada0a3a599778f99ccba"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"cexpr", "cexpr",
@ -160,6 +160,18 @@ version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]]
name = "bitvec"
version = "0.19.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8942c8d352ae1838c9dda0b0ca2ab657696ef2232a20147cf1b30ae1a9cb4321"
dependencies = [
"funty",
"radium",
"tap",
"wyz",
]
[[package]] [[package]]
name = "blake2b_simd" name = "blake2b_simd"
version = "0.5.11" version = "0.5.11"
@ -215,9 +227,9 @@ dependencies = [
[[package]] [[package]]
name = "cexpr" name = "cexpr"
version = "0.4.0" version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27" checksum = "db507a7679252d2276ed0dd8113c6875ec56d3089f9225b2b42c30cc1f8e5c89"
dependencies = [ dependencies = [
"nom", "nom",
] ]
@ -269,7 +281,6 @@ dependencies = [
"http", "http",
"image", "image",
"jsonwebtoken", "jsonwebtoken",
"log",
"lru-cache", "lru-cache",
"num_cpus", "num_cpus",
"opentelemetry", "opentelemetry",
@ -292,8 +303,10 @@ dependencies = [
"serde_yaml", "serde_yaml",
"sled", "sled",
"thiserror", "thiserror",
"threadpool",
"tokio", "tokio",
"tracing", "tracing",
"tracing-flame",
"tracing-opentelemetry", "tracing-opentelemetry",
"tracing-subscriber", "tracing-subscriber",
"trust-dns-resolver", "trust-dns-resolver",
@ -326,9 +339,9 @@ checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]] [[package]]
name = "cookie" name = "cookie"
version = "0.15.0" version = "0.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffdf8865bac3d9a3bde5bde9088ca431b11f5d37c7a578b8086af77248b76627" checksum = "d5f1c7727e460397e56abc4bddc1d49e07a1ad78fc98eb2e1c8f032a58a2f80d"
dependencies = [ dependencies = [
"percent-encoding", "percent-encoding",
"time 0.2.27", "time 0.2.27",
@ -564,9 +577,9 @@ checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0"
[[package]] [[package]]
name = "ed25519" name = "ed25519"
version = "1.1.1" version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d0860415b12243916284c67a9be413e044ee6668247b99ba26d94b2bc06c8f6" checksum = "4620d40f6d2601794401d6dd95a5cf69b6c157852539470eeda433a99b3c0efc"
dependencies = [ dependencies = [
"signature", "signature",
] ]
@ -677,6 +690,12 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "funty"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7"
[[package]] [[package]]
name = "futures" name = "futures"
version = "0.3.15" version = "0.3.15"
@ -952,9 +971,9 @@ dependencies = [
[[package]] [[package]]
name = "hyper" name = "hyper"
version = "0.14.10" version = "0.14.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7728a72c4c7d72665fde02204bcbd93b247721025b222ef78606f14513e0fd03" checksum = "0b61cf2d1aebcf6e6352c97b81dc2244ca29194be1b276f5d8ad5c6330fffb11"
dependencies = [ dependencies = [
"bytes", "bytes",
"futures-channel", "futures-channel",
@ -1177,9 +1196,9 @@ dependencies = [
[[package]] [[package]]
name = "librocksdb-sys" name = "librocksdb-sys"
version = "6.17.3" version = "6.20.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5da125e1c0f22c7cae785982115523a0738728498547f415c9054cb17c7e89f9" checksum = "c309a9d2470844aceb9a4a098cf5286154d20596868b75a6b36357d2bb9ca25d"
dependencies = [ dependencies = [
"bindgen", "bindgen",
"cc", "cc",
@ -1346,10 +1365,12 @@ dependencies = [
[[package]] [[package]]
name = "nom" name = "nom"
version = "5.1.2" version = "6.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af" checksum = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2"
dependencies = [ dependencies = [
"bitvec",
"funty",
"memchr", "memchr",
"version_check", "version_check",
] ]
@ -1445,11 +1466,12 @@ checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a"
[[package]] [[package]]
name = "opentelemetry" name = "opentelemetry"
version = "0.12.0" version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "514d24875c140ed269eecc2d1b56d7b71b573716922a763c317fb1b1b4b58f15" checksum = "ff27b33e30432e7b9854936693ca103d8591b0501f7ae9f633de48cda3bf2a67"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"crossbeam-channel",
"futures", "futures",
"js-sys", "js-sys",
"lazy_static", "lazy_static",
@ -1461,9 +1483,9 @@ dependencies = [
[[package]] [[package]]
name = "opentelemetry-jaeger" name = "opentelemetry-jaeger"
version = "0.11.0" version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5677b3a361784aff6e2b1b30dbdb5f85f4ec57ff2ced41d9a481ad70a9d0b57" checksum = "09a9fc8192722e7daa0c56e59e2336b797122fb8598383dcb11c8852733b435c"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"lazy_static", "lazy_static",
@ -1569,18 +1591,18 @@ dependencies = [
[[package]] [[package]]
name = "pin-project" name = "pin-project"
version = "1.0.7" version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7509cc106041c40a4518d2af7a61530e1eed0e6285296a3d8c5472806ccc4a4" checksum = "576bc800220cc65dac09e99e97b08b358cfab6e17078de8dc5fee223bd2d0c08"
dependencies = [ dependencies = [
"pin-project-internal", "pin-project-internal",
] ]
[[package]] [[package]]
name = "pin-project-internal" name = "pin-project-internal"
version = "1.0.7" version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c950132583b500556b1efd71d45b319029f2b71518d979fcc208e16b42426f" checksum = "6e8fe8163d14ce7f0cdac2e040116f22eac817edabff0be91e8aff7e9accf389"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -1601,9 +1623,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]] [[package]]
name = "pkcs8" name = "pkcs8"
version = "0.7.0" version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09d156817ae0125e8aa5067710b0db24f0984830614f99875a70aa5e3b74db69" checksum = "87bb2d5c68b7505a3a89eb2f3583a4d56303863005226c2ef99319930a262be4"
dependencies = [ dependencies = [
"der", "der",
"spki", "spki",
@ -1703,6 +1725,12 @@ dependencies = [
"proc-macro2", "proc-macro2",
] ]
[[package]]
name = "radium"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8"
[[package]] [[package]]
name = "rand" name = "rand"
version = "0.7.3" version = "0.7.3"
@ -2015,7 +2043,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma" name = "ruma"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"assign", "assign",
"js_int", "js_int",
@ -2036,7 +2064,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-api" name = "ruma-api"
version = "0.17.1" version = "0.17.1"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"bytes", "bytes",
"http", "http",
@ -2052,7 +2080,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-api-macros" name = "ruma-api-macros"
version = "0.17.1" version = "0.17.1"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"proc-macro-crate", "proc-macro-crate",
"proc-macro2", "proc-macro2",
@ -2063,7 +2091,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-appservice-api" name = "ruma-appservice-api"
version = "0.3.0" version = "0.3.0"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"ruma-api", "ruma-api",
"ruma-common", "ruma-common",
@ -2077,7 +2105,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-client-api" name = "ruma-client-api"
version = "0.11.0" version = "0.11.0"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"assign", "assign",
"bytes", "bytes",
@ -2097,7 +2125,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-common" name = "ruma-common"
version = "0.5.4" version = "0.5.4"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"indexmap", "indexmap",
"js_int", "js_int",
@ -2112,7 +2140,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-events" name = "ruma-events"
version = "0.23.2" version = "0.23.2"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"indoc", "indoc",
"js_int", "js_int",
@ -2128,7 +2156,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-events-macros" name = "ruma-events-macros"
version = "0.23.2" version = "0.23.2"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"proc-macro-crate", "proc-macro-crate",
"proc-macro2", "proc-macro2",
@ -2139,7 +2167,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-federation-api" name = "ruma-federation-api"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"js_int", "js_int",
"ruma-api", "ruma-api",
@ -2154,7 +2182,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-identifiers" name = "ruma-identifiers"
version = "0.19.4" version = "0.19.4"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"paste", "paste",
"rand 0.8.4", "rand 0.8.4",
@ -2168,7 +2196,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-identifiers-macros" name = "ruma-identifiers-macros"
version = "0.19.4" version = "0.19.4"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"quote", "quote",
"ruma-identifiers-validation", "ruma-identifiers-validation",
@ -2178,12 +2206,12 @@ dependencies = [
[[package]] [[package]]
name = "ruma-identifiers-validation" name = "ruma-identifiers-validation"
version = "0.4.0" version = "0.4.0"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
[[package]] [[package]]
name = "ruma-identity-service-api" name = "ruma-identity-service-api"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"js_int", "js_int",
"ruma-api", "ruma-api",
@ -2196,7 +2224,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-push-gateway-api" name = "ruma-push-gateway-api"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"js_int", "js_int",
"ruma-api", "ruma-api",
@ -2211,7 +2239,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-serde" name = "ruma-serde"
version = "0.4.1" version = "0.4.1"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"bytes", "bytes",
"form_urlencoded", "form_urlencoded",
@ -2225,7 +2253,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-serde-macros" name = "ruma-serde-macros"
version = "0.4.1" version = "0.4.1"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"proc-macro-crate", "proc-macro-crate",
"proc-macro2", "proc-macro2",
@ -2236,7 +2264,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-signatures" name = "ruma-signatures"
version = "0.8.0" version = "0.8.0"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"base64 0.13.0", "base64 0.13.0",
"ed25519-dalek", "ed25519-dalek",
@ -2253,7 +2281,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-state-res" name = "ruma-state-res"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
dependencies = [ dependencies = [
"itertools 0.10.1", "itertools 0.10.1",
"js_int", "js_int",
@ -2529,9 +2557,9 @@ dependencies = [
[[package]] [[package]]
name = "shlex" name = "shlex"
version = "0.1.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" checksum = "42a568c8f2cd051a4d283bd6eb0343ac214c1b0f1ac19f93e1175b2dee38c73d"
[[package]] [[package]]
name = "signal-hook-registry" name = "signal-hook-registry"
@ -2714,9 +2742,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.73" version = "1.0.74"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f71489ff30030d2ae598524f61326b902466f72a0fb1a8564c001cc63425bcc7" checksum = "1873d832550d4588c3dbc20f01361ab00bfe741048f71e3fecf145a7cc18b29c"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -2735,6 +2763,12 @@ dependencies = [
"unicode-xid", "unicode-xid",
] ]
[[package]]
name = "tap"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]] [[package]]
name = "tempfile" name = "tempfile"
version = "3.2.0" version = "3.2.0"
@ -2859,9 +2893,9 @@ dependencies = [
[[package]] [[package]]
name = "tinyvec" name = "tinyvec"
version = "1.2.0" version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b5220f05bb7de7f3f53c7c065e1199b3172696fe2db9f9c4d8ad9b4ee74c342" checksum = "848a1e1181b9f6753b5e96a092749e29b11d19ede67dfbbd6c7dc7e0f49b5338"
dependencies = [ dependencies = [
"tinyvec_macros", "tinyvec_macros",
] ]
@ -2874,9 +2908,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.8.1" version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98c8b05dc14c75ea83d63dd391100353789f5f24b8b3866542a5e85c8be8e985" checksum = "c2602b8af3767c285202012822834005f596c811042315fa7e9f5b12b2a43207"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"bytes", "bytes",
@ -2997,6 +3031,17 @@ dependencies = [
"lazy_static", "lazy_static",
] ]
[[package]]
name = "tracing-flame"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd520fe41c667b437952383f3a1ec14f1fa45d653f719a77eedd6e6a02d8fa54"
dependencies = [
"lazy_static",
"tracing",
"tracing-subscriber",
]
[[package]] [[package]]
name = "tracing-log" name = "tracing-log"
version = "0.1.2" version = "0.1.2"
@ -3010,9 +3055,9 @@ dependencies = [
[[package]] [[package]]
name = "tracing-opentelemetry" name = "tracing-opentelemetry"
version = "0.11.0" version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cccdf13c28f1654fe806838f28c5b9cb23ca4c0eae71450daa489f50e523ceb1" checksum = "c47440f2979c4cd3138922840eec122e3c0ba2148bc290f756bd7fd60fc97fff"
dependencies = [ dependencies = [
"opentelemetry", "opentelemetry",
"tracing", "tracing",
@ -3394,6 +3439,12 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "wyz"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214"
[[package]] [[package]]
name = "yaml-rust" name = "yaml-rust"
version = "0.4.5" version = "0.4.5"
@ -3411,9 +3462,9 @@ checksum = "9fc79f4a1e39857fc00c3f662cbf2651c771f00e9c15fe2abc341806bd46bd71"
[[package]] [[package]]
name = "zeroize" name = "zeroize"
version = "1.3.0" version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" checksum = "377db0846015f7ae377174787dd452e1c5f5a9050bc6f954911d01f116daa0cd"
dependencies = [ dependencies = [
"zeroize_derive", "zeroize_derive",
] ]

@ -18,12 +18,12 @@ edition = "2018"
rocket = { version = "0.5.0-rc.1", features = ["tls"] } # Used to handle requests rocket = { version = "0.5.0-rc.1", features = ["tls"] } # Used to handle requests
# Used for matrix spec type definitions and helpers # Used for matrix spec type definitions and helpers
ruma = { git = "https://github.com/ruma/ruma", rev = "eb19b0e08a901b87d11b3be0890ec788cc760492", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } #ruma = { git = "https://github.com/ruma/ruma", rev = "eb19b0e08a901b87d11b3be0890ec788cc760492", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] }
#ruma = { git = "https://github.com/timokoesters/ruma", rev = "74cf83c4ca937fa5e2709fb71e9d11848e72e487", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } ruma = { git = "https://github.com/timokoesters/ruma", rev = "a2d93500e1dbc87e7032a3c74f3b2479a7f84e93", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] }
#ruma = { path = "../ruma/crates/ruma", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } #ruma = { path = "../ruma/crates/ruma", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] }
# Used for long polling and federation sender, should be the same as rocket::tokio # Used for long polling and federation sender, should be the same as rocket::tokio
tokio = "1.2.0" tokio = "1.8.2"
# Used for storing data permanently # Used for storing data permanently
sled = { version = "0.34.6", features = ["compression", "no_metrics"], optional = true } sled = { version = "0.34.6", features = ["compression", "no_metrics"], optional = true }
rocksdb = { version = "0.16.0", features = ["multi-threaded-cf"], optional = true } rocksdb = { version = "0.16.0", features = ["multi-threaded-cf"], optional = true }
@ -31,30 +31,28 @@ rocksdb = { version = "0.16.0", features = ["multi-threaded-cf"], optional = tru
# Used for the http request / response body type for Ruma endpoints used with reqwest # Used for the http request / response body type for Ruma endpoints used with reqwest
bytes = "1.0.1" bytes = "1.0.1"
# Used for emitting log entries
log = "0.4.14"
# Used for rocket<->ruma conversions # Used for rocket<->ruma conversions
http = "0.2.3" http = "0.2.4"
# Used to find data directory for default db path # Used to find data directory for default db path
directories = "3.0.1" directories = "3.0.2"
# Used for ruma wrapper # Used for ruma wrapper
serde_json = { version = "1.0.64", features = ["raw_value"] } serde_json = { version = "1.0.64", features = ["raw_value"] }
# Used for appservice registration files # Used for appservice registration files
serde_yaml = "0.8.17" serde_yaml = "0.8.17"
# Used for pdu definition # Used for pdu definition
serde = "1.0.123" serde = "1.0.126"
# Used for secure identifiers # Used for secure identifiers
rand = "0.8.3" rand = "0.8.4"
# Used to hash passwords # Used to hash passwords
rust-argon2 = "0.8.3" rust-argon2 = "0.8.3"
# Used to send requests # Used to send requests
reqwest = { version = "0.11.3", default-features = false, features = ["rustls-tls-native-roots", "socks"] } reqwest = { version = "0.11.4", default-features = false, features = ["rustls-tls-native-roots", "socks"] }
# Custom TLS verifier # Custom TLS verifier
rustls = { version = "0.19", features = ["dangerous_configuration"] } rustls = { version = "0.19.1", features = ["dangerous_configuration"] }
rustls-native-certs = "0.5.0" rustls-native-certs = "0.5.0"
webpki = "0.21.0" webpki = "0.21.0"
# Used for conduit::Error type # Used for conduit::Error type
thiserror = "1.0.24" thiserror = "1.0.26"
# Used to generate thumbnails for images # Used to generate thumbnails for images
image = { version = "0.23.14", default-features = false, features = ["jpeg", "png", "gif"] } image = { version = "0.23.14", default-features = false, features = ["jpeg", "png", "gif"] }
# Used to encode server public key # Used to encode server public key
@ -62,23 +60,25 @@ base64 = "0.13.0"
# Used when hashing the state # Used when hashing the state
ring = "0.16.20" ring = "0.16.20"
# Used when querying the SRV record of other servers # Used when querying the SRV record of other servers
trust-dns-resolver = "0.20.0" trust-dns-resolver = "0.20.3"
# Used to find matching events for appservices # Used to find matching events for appservices
regex = "1.4.3" regex = "1.5.4"
# jwt jsonwebtokens # jwt jsonwebtokens
jsonwebtoken = "7.2.0" jsonwebtoken = "7.2.0"
# Performance measurements # Performance measurements
tracing = "0.1.25" tracing = { version = "0.1.26", features = ["release_max_level_warn"] }
opentelemetry = "0.12.0" opentelemetry = "0.15.0"
tracing-subscriber = "0.2.16" tracing-subscriber = "0.2.19"
tracing-opentelemetry = "0.11.0" tracing-opentelemetry = "0.14.0"
opentelemetry-jaeger = "0.11.0" tracing-flame = "0.1.0"
opentelemetry-jaeger = "0.14.0"
pretty_env_logger = "0.4.0" pretty_env_logger = "0.4.0"
lru-cache = "0.1.2" lru-cache = "0.1.2"
rusqlite = { version = "0.25.3", optional = true, features = ["bundled"] } rusqlite = { version = "0.25.3", optional = true, features = ["bundled"] }
parking_lot = { version = "0.11.1", optional = true } parking_lot = { version = "0.11.1", optional = true }
crossbeam = { version = "0.8.1", optional = true } crossbeam = { version = "0.8.1", optional = true }
num_cpus = "1.13.0" num_cpus = "1.13.0"
threadpool = "1.8.1"
[features] [features]
default = ["conduit_bin", "backend_sqlite"] default = ["conduit_bin", "backend_sqlite"]

@ -1,6 +1,5 @@
use crate::{utils, Error, Result}; use crate::{utils, Error, Result};
use bytes::BytesMut; use bytes::BytesMut;
use log::warn;
use ruma::api::{IncomingResponse, OutgoingRequest, SendAccessToken}; use ruma::api::{IncomingResponse, OutgoingRequest, SendAccessToken};
use std::{ use std::{
convert::{TryFrom, TryInto}, convert::{TryFrom, TryInto},
@ -8,6 +7,7 @@ use std::{
mem, mem,
time::Duration, time::Duration,
}; };
use tracing::warn;
pub async fn send_request<T: OutgoingRequest>( pub async fn send_request<T: OutgoingRequest>(
globals: &crate::database::globals::Globals, globals: &crate::database::globals::Globals,

@ -6,7 +6,6 @@ use std::{
use super::{DEVICE_ID_LENGTH, SESSION_ID_LENGTH, TOKEN_LENGTH}; use super::{DEVICE_ID_LENGTH, SESSION_ID_LENGTH, TOKEN_LENGTH};
use crate::{database::DatabaseGuard, pdu::PduBuilder, utils, ConduitResult, Error, Ruma}; use crate::{database::DatabaseGuard, pdu::PduBuilder, utils, ConduitResult, Error, Ruma};
use log::info;
use ruma::{ use ruma::{
api::client::{ api::client::{
error::ErrorKind, error::ErrorKind,
@ -28,6 +27,7 @@ use ruma::{
identifiers::RoomName, identifiers::RoomName,
push, RoomAliasId, RoomId, RoomVersionId, UserId, push, RoomAliasId, RoomId, RoomVersionId, UserId,
}; };
use tracing::info;
use register::RegistrationKind; use register::RegistrationKind;
#[cfg(feature = "conduit_bin")] #[cfg(feature = "conduit_bin")]

@ -1,5 +1,4 @@
use crate::{database::DatabaseGuard, ConduitResult, Database, Error, Result, Ruma}; use crate::{database::DatabaseGuard, ConduitResult, Database, Error, Result, Ruma};
use log::info;
use ruma::{ use ruma::{
api::{ api::{
client::{ client::{
@ -22,6 +21,7 @@ use ruma::{
serde::Raw, serde::Raw,
ServerName, UInt, ServerName, UInt,
}; };
use tracing::info;
#[cfg(feature = "conduit_bin")] #[cfg(feature = "conduit_bin")]
use rocket::{get, post, put}; use rocket::{get, post, put};

@ -4,7 +4,6 @@ use crate::{
pdu::{PduBuilder, PduEvent}, pdu::{PduBuilder, PduEvent},
server_server, utils, ConduitResult, Database, Error, Result, Ruma, server_server, utils, ConduitResult, Database, Error, Result, Ruma,
}; };
use log::{debug, error, warn};
use member::{MemberEventContent, MembershipState}; use member::{MemberEventContent, MembershipState};
use rocket::futures; use rocket::futures;
use ruma::{ use ruma::{
@ -34,6 +33,7 @@ use std::{
sync::{Arc, RwLock}, sync::{Arc, RwLock},
time::{Duration, Instant}, time::{Duration, Instant},
}; };
use tracing::{debug, error, warn};
#[cfg(feature = "conduit_bin")] #[cfg(feature = "conduit_bin")]
use rocket::{get, post}; use rocket::{get, post};

@ -2,7 +2,6 @@ use crate::{
client_server::invite_helper, database::DatabaseGuard, pdu::PduBuilder, ConduitResult, Error, client_server::invite_helper, database::DatabaseGuard, pdu::PduBuilder, ConduitResult, Error,
Ruma, Ruma,
}; };
use log::info;
use ruma::{ use ruma::{
api::client::{ api::client::{
error::ErrorKind, error::ErrorKind,
@ -16,6 +15,7 @@ use ruma::{
RoomAliasId, RoomId, RoomVersionId, RoomAliasId, RoomId, RoomVersionId,
}; };
use std::{cmp::max, collections::BTreeMap, convert::TryFrom, sync::Arc}; use std::{cmp::max, collections::BTreeMap, convert::TryFrom, sync::Arc};
use tracing::info;
#[cfg(feature = "conduit_bin")] #[cfg(feature = "conduit_bin")]
use rocket::{get, post}; use rocket::{get, post};

@ -1,6 +1,5 @@
use super::{DEVICE_ID_LENGTH, TOKEN_LENGTH}; use super::{DEVICE_ID_LENGTH, TOKEN_LENGTH};
use crate::{database::DatabaseGuard, utils, ConduitResult, Error, Ruma}; use crate::{database::DatabaseGuard, utils, ConduitResult, Error, Ruma};
use log::info;
use ruma::{ use ruma::{
api::client::{ api::client::{
error::ErrorKind, error::ErrorKind,
@ -9,6 +8,7 @@ use ruma::{
UserId, UserId,
}; };
use serde::Deserialize; use serde::Deserialize;
use tracing::info;
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct Claims { struct Claims {

@ -1,5 +1,4 @@
use crate::{database::DatabaseGuard, ConduitResult, Database, Error, Result, Ruma, RumaResponse}; use crate::{database::DatabaseGuard, ConduitResult, Database, Error, Result, Ruma, RumaResponse};
use log::{error, warn};
use ruma::{ use ruma::{
api::client::r0::{sync::sync_events, uiaa::UiaaResponse}, api::client::r0::{sync::sync_events, uiaa::UiaaResponse},
events::{room::member::MembershipState, AnySyncEphemeralRoomEvent, EventType}, events::{room::member::MembershipState, AnySyncEphemeralRoomEvent, EventType},
@ -13,6 +12,7 @@ use std::{
time::Duration, time::Duration,
}; };
use tokio::sync::watch::Sender; use tokio::sync::watch::Sender;
use tracing::{error, warn};
#[cfg(feature = "conduit_bin")] #[cfg(feature = "conduit_bin")]
use rocket::{get, tokio}; use rocket::{get, tokio};

@ -17,7 +17,6 @@ pub mod users;
use crate::{utils, Error, Result}; use crate::{utils, Error, Result};
use abstraction::DatabaseEngine; use abstraction::DatabaseEngine;
use directories::ProjectDirs; use directories::ProjectDirs;
use log::error;
use lru_cache::LruCache; use lru_cache::LruCache;
use rocket::{ use rocket::{
futures::{channel::mpsc, stream::FuturesUnordered, StreamExt}, futures::{channel::mpsc, stream::FuturesUnordered, StreamExt},
@ -36,6 +35,7 @@ use std::{
sync::{Arc, Mutex, RwLock}, sync::{Arc, Mutex, RwLock},
}; };
use tokio::sync::{OwnedRwLockReadGuard, RwLock as TokioRwLock, Semaphore}; use tokio::sync::{OwnedRwLockReadGuard, RwLock as TokioRwLock, Semaphore};
use tracing::{debug, error, warn};
use self::proxy::ProxyConfig; use self::proxy::ProxyConfig;
@ -69,6 +69,8 @@ pub struct Config {
allow_federation: bool, allow_federation: bool,
#[serde(default = "false_fn")] #[serde(default = "false_fn")]
pub allow_jaeger: bool, pub allow_jaeger: bool,
#[serde(default = "false_fn")]
pub tracing_flame: bool,
#[serde(default)] #[serde(default)]
proxy: ProxyConfig, proxy: ProxyConfig,
jwt_secret: Option<String>, jwt_secret: Option<String>,
@ -91,12 +93,12 @@ impl Config {
.keys() .keys()
.filter(|key| DEPRECATED_KEYS.iter().any(|s| s == key)) .filter(|key| DEPRECATED_KEYS.iter().any(|s| s == key))
{ {
log::warn!("Config parameter {} is deprecated", key); warn!("Config parameter {} is deprecated", key);
was_deprecated = true; was_deprecated = true;
} }
if was_deprecated { if was_deprecated {
log::warn!("Read conduit documentation and check your configuration if any new configuration parameters should be adjusted"); warn!("Read conduit documentation and check your configuration if any new configuration parameters should be adjusted");
} }
} }
} }
@ -193,13 +195,13 @@ impl Database {
if sled_exists { if sled_exists {
if sqlite_exists { if sqlite_exists {
// most likely an in-place directory, only warn // most likely an in-place directory, only warn
log::warn!("Both sled and sqlite databases are detected in database directory"); warn!("Both sled and sqlite databases are detected in database directory");
log::warn!("Currently running from the sqlite database, but consider removing sled database files to free up space") warn!("Currently running from the sqlite database, but consider removing sled database files to free up space")
} else { } else {
log::error!( error!(
"Sled database detected, conduit now uses sqlite for database operations" "Sled database detected, conduit now uses sqlite for database operations"
); );
log::error!("This database must be converted to sqlite, go to https://github.com/ShadowJonathan/conduit_toolbox#conduit_sled_to_sqlite"); error!("This database must be converted to sqlite, go to https://github.com/ShadowJonathan/conduit_toolbox#conduit_sled_to_sqlite");
return Err(Error::bad_config( return Err(Error::bad_config(
"sled database detected, migrate to sqlite", "sled database detected, migrate to sqlite",
)); ));
@ -291,7 +293,7 @@ impl Database {
statehash_shortstatehash: builder.open_tree("statehash_shortstatehash")?, statehash_shortstatehash: builder.open_tree("statehash_shortstatehash")?,
eventid_outlierpdu: builder.open_tree("eventid_outlierpdu")?, eventid_outlierpdu: builder.open_tree("eventid_outlierpdu")?,
prevevent_parent: builder.open_tree("prevevent_parent")?, referencedevents: builder.open_tree("referencedevents")?,
pdu_cache: Mutex::new(LruCache::new(100_000)), pdu_cache: Mutex::new(LruCache::new(100_000)),
auth_chain_cache: Mutex::new(LruCache::new(100_000)), auth_chain_cache: Mutex::new(LruCache::new(100_000)),
}, },
@ -444,10 +446,12 @@ impl Database {
#[cfg(feature = "conduit_bin")] #[cfg(feature = "conduit_bin")]
pub async fn start_on_shutdown_tasks(db: Arc<TokioRwLock<Self>>, shutdown: Shutdown) { pub async fn start_on_shutdown_tasks(db: Arc<TokioRwLock<Self>>, shutdown: Shutdown) {
use tracing::info;
tokio::spawn(async move { tokio::spawn(async move {
shutdown.await; shutdown.await;
log::info!(target: "shutdown-sync", "Received shutdown notification, notifying sync helpers..."); info!(target: "shutdown-sync", "Received shutdown notification, notifying sync helpers...");
db.read().await.globals.rotate.fire(); db.read().await.globals.rotate.fire();
}); });
@ -543,22 +547,25 @@ impl Database {
futures.next().await; futures.next().await;
} }
#[tracing::instrument(skip(self))]
pub async fn flush(&self) -> Result<()> { pub async fn flush(&self) -> Result<()> {
let start = std::time::Instant::now(); let start = std::time::Instant::now();
let res = self._db.flush(); let res = self._db.flush();
log::debug!("flush: took {:?}", start.elapsed()); debug!("flush: took {:?}", start.elapsed());
res res
} }
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
#[tracing::instrument(skip(self))]
pub fn flush_wal(&self) -> Result<()> { pub fn flush_wal(&self) -> Result<()> {
self._db.flush_wal() self._db.flush_wal()
} }
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
#[tracing::instrument(skip(engine, config))]
pub async fn start_spillover_reap_task(engine: Arc<Engine>, config: &Config) { pub async fn start_spillover_reap_task(engine: Arc<Engine>, config: &Config) {
let fraction = config.sqlite_spillover_reap_fraction.clamp(0.01, 1.0); let fraction = config.sqlite_spillover_reap_fraction.clamp(0.01, 1.0);
let interval_secs = config.sqlite_spillover_reap_interval_secs as u64; let interval_secs = config.sqlite_spillover_reap_interval_secs as u64;
@ -585,11 +592,13 @@ impl Database {
} }
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
#[tracing::instrument(skip(lock, config))]
pub async fn start_wal_clean_task(lock: &Arc<TokioRwLock<Self>>, config: &Config) { pub async fn start_wal_clean_task(lock: &Arc<TokioRwLock<Self>>, config: &Config) {
use tokio::time::{interval, timeout}; use tokio::time::{interval, timeout};
#[cfg(unix)] #[cfg(unix)]
use tokio::signal::unix::{signal, SignalKind}; use tokio::signal::unix::{signal, SignalKind};
use tracing::info;
use std::{ use std::{
sync::Weak, sync::Weak,
@ -611,41 +620,41 @@ impl Database {
#[cfg(unix)] #[cfg(unix)]
tokio::select! { tokio::select! {
_ = i.tick(), if do_timer => { _ = i.tick(), if do_timer => {
log::info!(target: "wal-trunc", "Timer ticked") info!(target: "wal-trunc", "Timer ticked")
} }
_ = s.recv() => { _ = s.recv() => {
log::info!(target: "wal-trunc", "Received SIGHUP") info!(target: "wal-trunc", "Received SIGHUP")
} }
}; };
#[cfg(not(unix))] #[cfg(not(unix))]
if do_timer { if do_timer {
i.tick().await; i.tick().await;
log::info!(target: "wal-trunc", "Timer ticked") info!(target: "wal-trunc", "Timer ticked")
} else { } else {
// timer disabled, and there's no concept of signals on windows, bailing... // timer disabled, and there's no concept of signals on windows, bailing...
return; return;
} }
if let Some(arc) = Weak::upgrade(&weak) { if let Some(arc) = Weak::upgrade(&weak) {
log::info!(target: "wal-trunc", "Rotating sync helpers..."); info!(target: "wal-trunc", "Rotating sync helpers...");
// This actually creates a very small race condition between firing this and trying to acquire the subsequent write lock. // This actually creates a very small race condition between firing this and trying to acquire the subsequent write lock.
// Though it is not a huge deal if the write lock doesn't "catch", as it'll harmlessly time out. // Though it is not a huge deal if the write lock doesn't "catch", as it'll harmlessly time out.
arc.read().await.globals.rotate.fire(); arc.read().await.globals.rotate.fire();
log::info!(target: "wal-trunc", "Locking..."); info!(target: "wal-trunc", "Locking...");
let guard = { let guard = {
if let Ok(guard) = timeout(lock_timeout, arc.write()).await { if let Ok(guard) = timeout(lock_timeout, arc.write()).await {
guard guard
} else { } else {
log::info!(target: "wal-trunc", "Lock failed in timeout, canceled."); info!(target: "wal-trunc", "Lock failed in timeout, canceled.");
continue; continue;
} }
}; };
log::info!(target: "wal-trunc", "Locked, flushing..."); info!(target: "wal-trunc", "Locked, flushing...");
let start = Instant::now(); let start = Instant::now();
if let Err(e) = guard.flush_wal() { if let Err(e) = guard.flush_wal() {
log::error!(target: "wal-trunc", "Errored: {}", e); error!(target: "wal-trunc", "Errored: {}", e);
} else { } else {
log::info!(target: "wal-trunc", "Flushed in {:?}", start.elapsed()); info!(target: "wal-trunc", "Flushed in {:?}", start.elapsed());
} }
} else { } else {
break; break;

@ -1,7 +1,7 @@
use super::super::Config; use super::super::Config;
use crate::{utils, Result}; use crate::{utils, Result};
use log::warn;
use std::{future::Future, pin::Pin, sync::Arc}; use std::{future::Future, pin::Pin, sync::Arc};
use tracing::warn;
use super::{DatabaseEngine, Tree}; use super::{DatabaseEngine, Tree};

@ -3,9 +3,8 @@ use crate::{database::Config, Result};
use crossbeam::channel::{ use crossbeam::channel::{
bounded, unbounded, Receiver as ChannelReceiver, Sender as ChannelSender, TryRecvError, bounded, unbounded, Receiver as ChannelReceiver, Sender as ChannelSender, TryRecvError,
}; };
use log::debug;
use parking_lot::{Mutex, MutexGuard, RwLock}; use parking_lot::{Mutex, MutexGuard, RwLock};
use rusqlite::{params, Connection, DatabaseName::Main, OptionalExtension}; use rusqlite::{params, Connection, DatabaseName::Main, OptionalExtension, Params};
use std::{ use std::{
collections::HashMap, collections::HashMap,
future::Future, future::Future,
@ -13,10 +12,11 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
pin::Pin, pin::Pin,
sync::Arc, sync::Arc,
thread,
time::{Duration, Instant}, time::{Duration, Instant},
}; };
use threadpool::ThreadPool;
use tokio::sync::oneshot::Sender; use tokio::sync::oneshot::Sender;
use tracing::{debug, warn};
struct Pool { struct Pool {
writer: Mutex<Connection>, writer: Mutex<Connection>,
@ -86,9 +86,9 @@ impl Deref for RecycledConn {
impl Drop for RecycledConn { impl Drop for RecycledConn {
fn drop(&mut self) { fn drop(&mut self) {
if let Some(conn) = self.0.take() { if let Some(conn) = self.0.take() {
log::debug!("Recycled connection"); debug!("Recycled connection");
if let Err(e) = self.1.send(conn) { if let Err(e) = self.1.send(conn) {
log::warn!("Recycling a connection led to the following error: {:?}", e) warn!("Recycling a connection led to the following error: {:?}", e)
} }
} }
} }
@ -149,14 +149,14 @@ impl Pool {
} }
} }
log::debug!("read_lock: All permanent readers locked, obtaining spillover reader..."); debug!("read_lock: All permanent readers locked, obtaining spillover reader...");
// We didn't get a connection from the permanent pool, so we'll dumpster-dive for recycled connections. // We didn't get a connection from the permanent pool, so we'll dumpster-dive for recycled connections.
// Either we have a connection or we dont, if we don't, we make a new one. // Either we have a connection or we dont, if we don't, we make a new one.
let conn = match self.spills.try_take() { let conn = match self.spills.try_take() {
Some(conn) => conn, Some(conn) => conn,
None => { None => {
log::debug!("read_lock: No recycled connections left, creating new one..."); debug!("read_lock: No recycled connections left, creating new one...");
Self::prepare_conn(&self.path, None).unwrap() Self::prepare_conn(&self.path, None).unwrap()
} }
}; };
@ -169,7 +169,7 @@ impl Pool {
// If the spillover readers are more than the number of total readers, there might be a problem. // If the spillover readers are more than the number of total readers, there might be a problem.
if now_count > self.readers.len() { if now_count > self.readers.len() {
log::warn!( warn!(
"Database is under high load. Consider increasing sqlite_read_pool_size ({} spillover readers exist)", "Database is under high load. Consider increasing sqlite_read_pool_size ({} spillover readers exist)",
now_count now_count
); );
@ -182,6 +182,7 @@ impl Pool {
pub struct Engine { pub struct Engine {
pool: Pool, pool: Pool,
iter_pool: Mutex<ThreadPool>,
} }
impl DatabaseEngine for Engine { impl DatabaseEngine for Engine {
@ -195,7 +196,10 @@ impl DatabaseEngine for Engine {
pool.write_lock() pool.write_lock()
.execute("CREATE TABLE IF NOT EXISTS _noop (\"key\" INT)", params![])?; .execute("CREATE TABLE IF NOT EXISTS _noop (\"key\" INT)", params![])?;
let arc = Arc::new(Engine { pool }); let arc = Arc::new(Engine {
pool,
iter_pool: Mutex::new(ThreadPool::new(10)),
});
Ok(arc) Ok(arc)
} }
@ -259,7 +263,7 @@ impl Engine {
} }
} }
log::debug!("Reaped {} connections", reaped); debug!("Reaped {} connections", reaped);
} }
} }
@ -272,6 +276,7 @@ pub struct SqliteTable {
type TupleOfBytes = (Vec<u8>, Vec<u8>); type TupleOfBytes = (Vec<u8>, Vec<u8>);
impl SqliteTable { impl SqliteTable {
#[tracing::instrument(skip(self, guard, key))]
fn get_with_guard(&self, guard: &Connection, key: &[u8]) -> Result<Option<Vec<u8>>> { fn get_with_guard(&self, guard: &Connection, key: &[u8]) -> Result<Option<Vec<u8>>> {
Ok(guard Ok(guard
.prepare(format!("SELECT value FROM {} WHERE key = ?", self.name).as_str())? .prepare(format!("SELECT value FROM {} WHERE key = ?", self.name).as_str())?
@ -279,6 +284,7 @@ impl SqliteTable {
.optional()?) .optional()?)
} }
#[tracing::instrument(skip(self, guard, key, value))]
fn insert_with_guard(&self, guard: &Connection, key: &[u8], value: &[u8]) -> Result<()> { fn insert_with_guard(&self, guard: &Connection, key: &[u8], value: &[u8]) -> Result<()> {
guard.execute( guard.execute(
format!( format!(
@ -291,41 +297,67 @@ impl SqliteTable {
Ok(()) Ok(())
} }
fn _iter_from_thread<F>(&self, f: F) -> Box<dyn Iterator<Item = TupleOfBytes> + Send> #[tracing::instrument(skip(self, sql, param))]
where fn iter_from_thread(
F: (for<'a> FnOnce(&'a Connection, ChannelSender<TupleOfBytes>)) + Send + 'static, &self,
{ sql: String,
param: Option<Vec<u8>>,
) -> Box<dyn Iterator<Item = TupleOfBytes> + Send + Sync> {
let (s, r) = bounded::<TupleOfBytes>(5); let (s, r) = bounded::<TupleOfBytes>(5);
let engine = self.engine.clone(); let engine = Arc::clone(&self.engine);
thread::spawn(move || { let lock = self.engine.iter_pool.lock();
let _ = f(&engine.pool.read_lock(), s); if lock.active_count() < lock.max_count() {
}); lock.execute(move || {
if let Some(param) = param {
iter_from_thread_work(&engine.pool.read_lock(), &s, &sql, [param]);
} else {
iter_from_thread_work(&engine.pool.read_lock(), &s, &sql, []);
}
});
} else {
std::thread::spawn(move || {
if let Some(param) = param {
iter_from_thread_work(&engine.pool.read_lock(), &s, &sql, [param]);
} else {
iter_from_thread_work(&engine.pool.read_lock(), &s, &sql, []);
}
});
}
Box::new(r.into_iter()) Box::new(r.into_iter())
} }
} }
macro_rules! iter_from_thread { fn iter_from_thread_work<P>(
($self:expr, $sql:expr, $param:expr) => { guard: &HoldingConn<'_>,
$self._iter_from_thread(move |guard, s| { s: &ChannelSender<(Vec<u8>, Vec<u8>)>,
let _ = guard sql: &str,
.prepare($sql) params: P,
.unwrap() ) where
.query_map($param, |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) P: Params,
.unwrap() {
.map(|r| r.unwrap()) for bob in guard
.try_for_each(|bob| s.send(bob)); .prepare(sql)
}) .unwrap()
}; .query_map(params, |row| Ok((row.get_unwrap(0), row.get_unwrap(1))))
.unwrap()
.map(|r| r.unwrap())
{
if s.send(bob).is_err() {
return;
}
}
} }
impl Tree for SqliteTable { impl Tree for SqliteTable {
#[tracing::instrument(skip(self, key))]
fn get(&self, key: &[u8]) -> Result<Option<Vec<u8>>> { fn get(&self, key: &[u8]) -> Result<Option<Vec<u8>>> {
self.get_with_guard(&self.engine.pool.read_lock(), key) self.get_with_guard(&self.engine.pool.read_lock(), key)
} }
#[tracing::instrument(skip(self, key, value))]
fn insert(&self, key: &[u8], value: &[u8]) -> Result<()> { fn insert(&self, key: &[u8], value: &[u8]) -> Result<()> {
let guard = self.engine.pool.write_lock(); let guard = self.engine.pool.write_lock();
@ -365,6 +397,7 @@ impl Tree for SqliteTable {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self, key))]
fn remove(&self, key: &[u8]) -> Result<()> { fn remove(&self, key: &[u8]) -> Result<()> {
let guard = self.engine.pool.write_lock(); let guard = self.engine.pool.write_lock();
@ -385,15 +418,13 @@ impl Tree for SqliteTable {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self))]
fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = TupleOfBytes> + Send + 'a> { fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = TupleOfBytes> + Send + 'a> {
let name = self.name.clone(); let name = self.name.clone();
iter_from_thread!( self.iter_from_thread(format!("SELECT key, value FROM {}", name), None)
self,
format!("SELECT key, value FROM {}", name).as_str(),
params![]
)
} }
#[tracing::instrument(skip(self, from, backwards))]
fn iter_from<'a>( fn iter_from<'a>(
&'a self, &'a self,
from: &[u8], from: &[u8],
@ -402,28 +433,25 @@ impl Tree for SqliteTable {
let name = self.name.clone(); let name = self.name.clone();
let from = from.to_vec(); // TODO change interface? let from = from.to_vec(); // TODO change interface?
if backwards { if backwards {
iter_from_thread!( self.iter_from_thread(
self,
format!( format!(
"SELECT key, value FROM {} WHERE key <= ? ORDER BY key DESC", "SELECT key, value FROM {} WHERE key <= ? ORDER BY key DESC",
name name
) ),
.as_str(), Some(from),
[from]
) )
} else { } else {
iter_from_thread!( self.iter_from_thread(
self,
format!( format!(
"SELECT key, value FROM {} WHERE key >= ? ORDER BY key ASC", "SELECT key, value FROM {} WHERE key >= ? ORDER BY key ASC",
name name
) ),
.as_str(), Some(from),
[from]
) )
} }
} }
#[tracing::instrument(skip(self, key))]
fn increment(&self, key: &[u8]) -> Result<Vec<u8>> { fn increment(&self, key: &[u8]) -> Result<Vec<u8>> {
let guard = self.engine.pool.write_lock(); let guard = self.engine.pool.write_lock();
@ -446,18 +474,17 @@ impl Tree for SqliteTable {
Ok(new) Ok(new)
} }
#[tracing::instrument(skip(self, prefix))]
fn scan_prefix<'a>( fn scan_prefix<'a>(
&'a self, &'a self,
prefix: Vec<u8>, prefix: Vec<u8>,
) -> Box<dyn Iterator<Item = TupleOfBytes> + Send + 'a> { ) -> Box<dyn Iterator<Item = TupleOfBytes> + Send + 'a> {
// let name = self.name.clone(); // let name = self.name.clone();
// iter_from_thread!( // self.iter_from_thread(
// self,
// format!( // format!(
// "SELECT key, value FROM {} WHERE key BETWEEN ?1 AND ?1 || X'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF' ORDER BY key ASC", // "SELECT key, value FROM {} WHERE key BETWEEN ?1 AND ?1 || X'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF' ORDER BY key ASC",
// name // name
// ) // )
// .as_str(),
// [prefix] // [prefix]
// ) // )
Box::new( Box::new(
@ -466,6 +493,7 @@ impl Tree for SqliteTable {
) )
} }
#[tracing::instrument(skip(self, prefix))]
fn watch_prefix<'a>(&'a self, prefix: &[u8]) -> Pin<Box<dyn Future<Output = ()> + Send + 'a>> { fn watch_prefix<'a>(&'a self, prefix: &[u8]) -> Pin<Box<dyn Future<Output = ()> + Send + 'a>> {
let (tx, rx) = tokio::sync::oneshot::channel(); let (tx, rx) = tokio::sync::oneshot::channel();
@ -481,6 +509,7 @@ impl Tree for SqliteTable {
}) })
} }
#[tracing::instrument(skip(self))]
fn clear(&self) -> Result<()> { fn clear(&self) -> Result<()> {
debug!("clear: running"); debug!("clear: running");
self.engine self.engine

@ -16,6 +16,7 @@ pub struct AccountData {
impl AccountData { impl AccountData {
/// Places one event in the account data of the user and removes the previous entry. /// Places one event in the account data of the user and removes the previous entry.
#[tracing::instrument(skip(self, room_id, user_id, event_type, data, globals))]
pub fn update<T: Serialize>( pub fn update<T: Serialize>(
&self, &self,
room_id: Option<&RoomId>, room_id: Option<&RoomId>,
@ -60,6 +61,7 @@ impl AccountData {
} }
/// Searches the account data for a specific kind. /// Searches the account data for a specific kind.
#[tracing::instrument(skip(self, room_id, user_id, kind))]
pub fn get<T: DeserializeOwned>( pub fn get<T: DeserializeOwned>(
&self, &self,
room_id: Option<&RoomId>, room_id: Option<&RoomId>,
@ -74,7 +76,7 @@ impl AccountData {
} }
/// Returns all changes to the account data that happened after `since`. /// Returns all changes to the account data that happened after `since`.
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self, room_id, user_id, since))]
pub fn changes_since( pub fn changes_since(
&self, &self,
room_id: Option<&RoomId>, room_id: Option<&RoomId>,
@ -122,6 +124,7 @@ impl AccountData {
Ok(userdata) Ok(userdata)
} }
#[tracing::instrument(skip(self, room_id, user_id, kind))]
fn find_event( fn find_event(
&self, &self,
room_id: Option<&RoomId>, room_id: Option<&RoomId>,

@ -4,13 +4,13 @@ use std::{
}; };
use crate::{pdu::PduBuilder, Database}; use crate::{pdu::PduBuilder, Database};
use log::warn;
use rocket::futures::{channel::mpsc, stream::StreamExt}; use rocket::futures::{channel::mpsc, stream::StreamExt};
use ruma::{ use ruma::{
events::{room::message, EventType}, events::{room::message, EventType},
UserId, UserId,
}; };
use tokio::sync::{MutexGuard, RwLock, RwLockReadGuard}; use tokio::sync::{MutexGuard, RwLock, RwLockReadGuard};
use tracing::warn;
pub enum AdminCommand { pub enum AdminCommand {
RegisterAppservice(serde_yaml::Value), RegisterAppservice(serde_yaml::Value),

@ -1,5 +1,4 @@
use crate::{database::Config, utils, ConduitResult, Error, Result}; use crate::{database::Config, utils, ConduitResult, Error, Result};
use log::{error, info};
use ruma::{ use ruma::{
api::{ api::{
client::r0::sync::sync_events, client::r0::sync::sync_events,
@ -17,6 +16,7 @@ use std::{
time::{Duration, Instant}, time::{Duration, Instant},
}; };
use tokio::sync::{broadcast, watch::Receiver, Mutex, Semaphore}; use tokio::sync::{broadcast, watch::Receiver, Mutex, Semaphore};
use tracing::{error, info};
use trust_dns_resolver::TokioAsyncResolver; use trust_dns_resolver::TokioAsyncResolver;
use super::abstraction::Tree; use super::abstraction::Tree;
@ -56,6 +56,7 @@ struct MatrixServerVerifier {
} }
impl ServerCertVerifier for MatrixServerVerifier { impl ServerCertVerifier for MatrixServerVerifier {
#[tracing::instrument(skip(self, roots, presented_certs, dns_name, ocsp_response))]
fn verify_server_cert( fn verify_server_cert(
&self, &self,
roots: &rustls::RootCertStore, roots: &rustls::RootCertStore,
@ -220,11 +221,13 @@ impl Globals {
&self.reqwest_client &self.reqwest_client
} }
#[tracing::instrument(skip(self))]
pub fn next_count(&self) -> Result<u64> { pub fn next_count(&self) -> Result<u64> {
utils::u64_from_bytes(&self.globals.increment(COUNTER)?) utils::u64_from_bytes(&self.globals.increment(COUNTER)?)
.map_err(|_| Error::bad_database("Count has invalid bytes.")) .map_err(|_| Error::bad_database("Count has invalid bytes."))
} }
#[tracing::instrument(skip(self))]
pub fn current_count(&self) -> Result<u64> { pub fn current_count(&self) -> Result<u64> {
self.globals.get(COUNTER)?.map_or(Ok(0_u64), |bytes| { self.globals.get(COUNTER)?.map_or(Ok(0_u64), |bytes| {
utils::u64_from_bytes(&bytes) utils::u64_from_bytes(&bytes)

@ -1,6 +1,5 @@
use crate::{Database, Error, PduEvent, Result}; use crate::{Database, Error, PduEvent, Result};
use bytes::BytesMut; use bytes::BytesMut;
use log::{error, info, warn};
use ruma::{ use ruma::{
api::{ api::{
client::r0::push::{get_pushers, set_pusher, PusherKind}, client::r0::push::{get_pushers, set_pusher, PusherKind},
@ -15,6 +14,7 @@ use ruma::{
push::{Action, PushConditionRoomCtx, PushFormat, Ruleset, Tweak}, push::{Action, PushConditionRoomCtx, PushFormat, Ruleset, Tweak},
uint, UInt, UserId, uint, UInt, UserId,
}; };
use tracing::{error, info, warn};
use std::{convert::TryFrom, fmt::Debug, mem, sync::Arc}; use std::{convert::TryFrom, fmt::Debug, mem, sync::Arc};
@ -26,6 +26,7 @@ pub struct PushData {
} }
impl PushData { impl PushData {
#[tracing::instrument(skip(self, sender, pusher))]
pub fn set_pusher(&self, sender: &UserId, pusher: set_pusher::Pusher) -> Result<()> { pub fn set_pusher(&self, sender: &UserId, pusher: set_pusher::Pusher) -> Result<()> {
let mut key = sender.as_bytes().to_vec(); let mut key = sender.as_bytes().to_vec();
key.push(0xff); key.push(0xff);
@ -48,6 +49,7 @@ impl PushData {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self, senderkey))]
pub fn get_pusher(&self, senderkey: &[u8]) -> Result<Option<get_pushers::Pusher>> { pub fn get_pusher(&self, senderkey: &[u8]) -> Result<Option<get_pushers::Pusher>> {
self.senderkey_pusher self.senderkey_pusher
.get(senderkey)? .get(senderkey)?
@ -58,6 +60,7 @@ impl PushData {
.transpose() .transpose()
} }
#[tracing::instrument(skip(self, sender))]
pub fn get_pushers(&self, sender: &UserId) -> Result<Vec<get_pushers::Pusher>> { pub fn get_pushers(&self, sender: &UserId) -> Result<Vec<get_pushers::Pusher>> {
let mut prefix = sender.as_bytes().to_vec(); let mut prefix = sender.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);
@ -71,6 +74,7 @@ impl PushData {
.collect() .collect()
} }
#[tracing::instrument(skip(self, sender))]
pub fn get_pusher_senderkeys<'a>( pub fn get_pusher_senderkeys<'a>(
&'a self, &'a self,
sender: &UserId, sender: &UserId,
@ -82,6 +86,7 @@ impl PushData {
} }
} }
#[tracing::instrument(skip(globals, destination, request))]
pub async fn send_request<T: OutgoingRequest>( pub async fn send_request<T: OutgoingRequest>(
globals: &crate::database::globals::Globals, globals: &crate::database::globals::Globals,
destination: &str, destination: &str,
@ -155,6 +160,7 @@ where
} }
} }
#[tracing::instrument(skip(user, unread, pusher, ruleset, pdu, db))]
pub async fn send_push_notice( pub async fn send_push_notice(
user: &UserId, user: &UserId,
unread: UInt, unread: UInt,
@ -194,6 +200,7 @@ pub async fn send_push_notice(
Ok(()) Ok(())
} }
#[tracing::instrument(skip(user, ruleset, pdu, db))]
pub fn get_actions<'a>( pub fn get_actions<'a>(
user: &UserId, user: &UserId,
ruleset: &'a Ruleset, ruleset: &'a Ruleset,
@ -225,6 +232,7 @@ pub fn get_actions<'a>(
Ok(ruleset.get_actions(&pdu.to_sync_room_event(), &ctx)) Ok(ruleset.get_actions(&pdu.to_sync_room_event(), &ctx))
} }
#[tracing::instrument(skip(unread, pusher, tweaks, event, db))]
async fn send_notice( async fn send_notice(
unread: UInt, unread: UInt,
pusher: &get_pushers::Pusher, pusher: &get_pushers::Pusher,

@ -5,7 +5,6 @@ use member::MembershipState;
use tokio::sync::MutexGuard; use tokio::sync::MutexGuard;
use crate::{pdu::PduBuilder, utils, Database, Error, PduEvent, Result}; use crate::{pdu::PduBuilder, utils, Database, Error, PduEvent, Result};
use log::{debug, error, warn};
use lru_cache::LruCache; use lru_cache::LruCache;
use regex::Regex; use regex::Regex;
use ring::digest; use ring::digest;
@ -27,6 +26,7 @@ use std::{
mem, mem,
sync::{Arc, Mutex}, sync::{Arc, Mutex},
}; };
use tracing::{debug, error, warn};
use super::{abstraction::Tree, admin::AdminCommand, pusher}; use super::{abstraction::Tree, admin::AdminCommand, pusher};
@ -82,7 +82,7 @@ pub struct Rooms {
pub(super) eventid_outlierpdu: Arc<dyn Tree>, pub(super) eventid_outlierpdu: Arc<dyn Tree>,
/// RoomId + EventId -> Parent PDU EventId. /// RoomId + EventId -> Parent PDU EventId.
pub(super) prevevent_parent: Arc<dyn Tree>, pub(super) referencedevents: Arc<dyn Tree>,
pub(super) pdu_cache: Mutex<LruCache<EventId, Arc<PduEvent>>>, pub(super) pdu_cache: Mutex<LruCache<EventId, Arc<PduEvent>>>,
pub(super) auth_chain_cache: Mutex<LruCache<EventId, HashSet<EventId>>>, pub(super) auth_chain_cache: Mutex<LruCache<EventId, HashSet<EventId>>>,
@ -617,6 +617,7 @@ impl Rooms {
} }
/// Returns the leaf pdus of a room. /// Returns the leaf pdus of a room.
#[tracing::instrument(skip(self))]
pub fn get_pdu_leaves(&self, room_id: &RoomId) -> Result<HashSet<EventId>> { pub fn get_pdu_leaves(&self, room_id: &RoomId) -> Result<HashSet<EventId>> {
let mut prefix = room_id.as_bytes().to_vec(); let mut prefix = room_id.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);
@ -636,6 +637,7 @@ impl Rooms {
/// ///
/// The provided `event_ids` become the new leaves, this allows a room to have multiple /// The provided `event_ids` become the new leaves, this allows a room to have multiple
/// `prev_events`. /// `prev_events`.
#[tracing::instrument(skip(self))]
pub fn replace_pdu_leaves(&self, room_id: &RoomId, event_ids: &[EventId]) -> Result<()> { pub fn replace_pdu_leaves(&self, room_id: &RoomId, event_ids: &[EventId]) -> Result<()> {
let mut prefix = room_id.as_bytes().to_vec(); let mut prefix = room_id.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);
@ -653,13 +655,15 @@ impl Rooms {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self))]
pub fn is_event_referenced(&self, room_id: &RoomId, event_id: &EventId) -> Result<bool> { pub fn is_event_referenced(&self, room_id: &RoomId, event_id: &EventId) -> Result<bool> {
let mut key = room_id.as_bytes().to_vec(); let mut key = room_id.as_bytes().to_vec();
key.extend_from_slice(event_id.as_bytes()); key.extend_from_slice(event_id.as_bytes());
Ok(self.prevevent_parent.get(&key)?.is_some()) Ok(self.referencedevents.get(&key)?.is_some())
} }
/// Returns the pdu from the outlier tree. /// Returns the pdu from the outlier tree.
#[tracing::instrument(skip(self))]
pub fn get_pdu_outlier(&self, event_id: &EventId) -> Result<Option<PduEvent>> { pub fn get_pdu_outlier(&self, event_id: &EventId) -> Result<Option<PduEvent>> {
self.eventid_outlierpdu self.eventid_outlierpdu
.get(event_id.as_bytes())? .get(event_id.as_bytes())?
@ -671,6 +675,7 @@ impl Rooms {
/// Append the PDU as an outlier. /// Append the PDU as an outlier.
/// ///
/// Any event given to this will be processed (state-res) on another thread. /// Any event given to this will be processed (state-res) on another thread.
#[tracing::instrument(skip(self, pdu))]
pub fn add_pdu_outlier(&self, event_id: &EventId, pdu: &CanonicalJsonObject) -> Result<()> { pub fn add_pdu_outlier(&self, event_id: &EventId, pdu: &CanonicalJsonObject) -> Result<()> {
self.eventid_outlierpdu.insert( self.eventid_outlierpdu.insert(
&event_id.as_bytes(), &event_id.as_bytes(),
@ -684,7 +689,7 @@ impl Rooms {
/// ///
/// By this point the incoming event should be fully authenticated, no auth happens /// By this point the incoming event should be fully authenticated, no auth happens
/// in `append_pdu`. /// in `append_pdu`.
#[allow(clippy::too_many_arguments)] #[tracing::instrument(skip(self, pdu, pdu_json, leaves, db))]
pub fn append_pdu( pub fn append_pdu(
&self, &self,
pdu: &PduEvent, pdu: &PduEvent,
@ -721,11 +726,10 @@ impl Rooms {
} }
// We must keep track of all events that have been referenced. // We must keep track of all events that have been referenced.
for leaf in leaves { for prev in &pdu.prev_events {
let mut key = pdu.room_id().as_bytes().to_vec(); let mut key = pdu.room_id().as_bytes().to_vec();
key.extend_from_slice(leaf.as_bytes()); key.extend_from_slice(prev.as_bytes());
self.prevevent_parent self.referencedevents.insert(&key, &[])?;
.insert(&key, pdu.event_id().as_bytes())?;
} }
self.replace_pdu_leaves(&pdu.room_id, leaves)?; self.replace_pdu_leaves(&pdu.room_id, leaves)?;
@ -757,12 +761,11 @@ impl Rooms {
// See if the event matches any known pushers // See if the event matches any known pushers
for user in db for user in db
.users .rooms
.iter() .room_members(&pdu.room_id)
.filter_map(|r| r.ok()) .filter_map(|r| r.ok())
.filter(|user_id| user_id.server_name() == db.globals.server_name()) .filter(|user_id| user_id.server_name() == db.globals.server_name())
.filter(|user_id| !db.users.is_deactivated(user_id).unwrap_or(false)) .filter(|user_id| !db.users.is_deactivated(user_id).unwrap_or(false))
.filter(|user_id| self.is_joined(&user_id, &pdu.room_id).unwrap_or(false))
{ {
// Don't notify the user of their own events // Don't notify the user of their own events
if user == pdu.sender { if user == pdu.sender {
@ -992,6 +995,7 @@ impl Rooms {
Ok(pdu_id) Ok(pdu_id)
} }
#[tracing::instrument(skip(self))]
pub fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> { pub fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
let mut userroom_id = user_id.as_bytes().to_vec(); let mut userroom_id = user_id.as_bytes().to_vec();
userroom_id.push(0xff); userroom_id.push(0xff);
@ -1005,6 +1009,7 @@ impl Rooms {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self))]
pub fn notification_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> { pub fn notification_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
let mut userroom_id = user_id.as_bytes().to_vec(); let mut userroom_id = user_id.as_bytes().to_vec();
userroom_id.push(0xff); userroom_id.push(0xff);
@ -1019,6 +1024,7 @@ impl Rooms {
.unwrap_or(Ok(0)) .unwrap_or(Ok(0))
} }
#[tracing::instrument(skip(self))]
pub fn highlight_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> { pub fn highlight_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
let mut userroom_id = user_id.as_bytes().to_vec(); let mut userroom_id = user_id.as_bytes().to_vec();
userroom_id.push(0xff); userroom_id.push(0xff);
@ -1037,6 +1043,7 @@ impl Rooms {
/// ///
/// This adds all current state events (not including the incoming event) /// This adds all current state events (not including the incoming event)
/// to `stateid_pduid` and adds the incoming event to `eventid_statehash`. /// to `stateid_pduid` and adds the incoming event to `eventid_statehash`.
#[tracing::instrument(skip(self, state, globals))]
pub fn set_event_state( pub fn set_event_state(
&self, &self,
event_id: &EventId, event_id: &EventId,
@ -1121,6 +1128,7 @@ impl Rooms {
/// ///
/// This adds all current state events (not including the incoming event) /// This adds all current state events (not including the incoming event)
/// to `stateid_pduid` and adds the incoming event to `eventid_statehash`. /// to `stateid_pduid` and adds the incoming event to `eventid_statehash`.
#[tracing::instrument(skip(self, new_pdu, globals))]
pub fn append_to_state( pub fn append_to_state(
&self, &self,
new_pdu: &PduEvent, new_pdu: &PduEvent,
@ -1227,6 +1235,7 @@ impl Rooms {
} }
} }
#[tracing::instrument(skip(self, invite_event))]
pub fn calculate_invite_state( pub fn calculate_invite_state(
&self, &self,
invite_event: &PduEvent, invite_event: &PduEvent,
@ -1264,6 +1273,7 @@ impl Rooms {
Ok(state) Ok(state)
} }
#[tracing::instrument(skip(self))]
pub fn set_room_state(&self, room_id: &RoomId, shortstatehash: u64) -> Result<()> { pub fn set_room_state(&self, room_id: &RoomId, shortstatehash: u64) -> Result<()> {
self.roomid_shortstatehash self.roomid_shortstatehash
.insert(room_id.as_bytes(), &shortstatehash.to_be_bytes())?; .insert(room_id.as_bytes(), &shortstatehash.to_be_bytes())?;
@ -1272,6 +1282,7 @@ impl Rooms {
} }
/// Creates a new persisted data unit and adds it to a room. /// Creates a new persisted data unit and adds it to a room.
#[tracing::instrument(skip(self, db, _mutex_lock))]
pub fn build_and_append_pdu( pub fn build_and_append_pdu(
&self, &self,
pdu_builder: PduBuilder, pdu_builder: PduBuilder,
@ -1563,6 +1574,7 @@ impl Rooms {
/// Returns an iterator over all events and their tokens in a room that happened before the /// Returns an iterator over all events and their tokens in a room that happened before the
/// event with id `until` in reverse-chronological order. /// event with id `until` in reverse-chronological order.
#[tracing::instrument(skip(self))]
pub fn pdus_until<'a>( pub fn pdus_until<'a>(
&'a self, &'a self,
user_id: &UserId, user_id: &UserId,
@ -1625,6 +1637,7 @@ impl Rooms {
} }
/// Replace a PDU with the redacted form. /// Replace a PDU with the redacted form.
#[tracing::instrument(skip(self, reason))]
pub fn redact_pdu(&self, event_id: &EventId, reason: &PduEvent) -> Result<()> { pub fn redact_pdu(&self, event_id: &EventId, reason: &PduEvent) -> Result<()> {
if let Some(pdu_id) = self.get_pdu_id(event_id)? { if let Some(pdu_id) = self.get_pdu_id(event_id)? {
let mut pdu = self let mut pdu = self
@ -1642,6 +1655,7 @@ impl Rooms {
} }
/// Update current membership data. /// Update current membership data.
#[tracing::instrument(skip(self, last_state, db))]
pub fn update_membership( pub fn update_membership(
&self, &self,
room_id: &RoomId, room_id: &RoomId,
@ -2026,6 +2040,7 @@ impl Rooms {
} }
/// Makes a user forget a room. /// Makes a user forget a room.
#[tracing::instrument(skip(self))]
pub fn forget(&self, room_id: &RoomId, user_id: &UserId) -> Result<()> { pub fn forget(&self, room_id: &RoomId, user_id: &UserId) -> Result<()> {
let mut userroom_id = user_id.as_bytes().to_vec(); let mut userroom_id = user_id.as_bytes().to_vec();
userroom_id.push(0xff); userroom_id.push(0xff);
@ -2041,6 +2056,7 @@ impl Rooms {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self, globals))]
pub fn set_alias( pub fn set_alias(
&self, &self,
alias: &RoomAliasId, alias: &RoomAliasId,
@ -2076,6 +2092,7 @@ impl Rooms {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self))]
pub fn id_from_alias(&self, alias: &RoomAliasId) -> Result<Option<RoomId>> { pub fn id_from_alias(&self, alias: &RoomAliasId) -> Result<Option<RoomId>> {
self.alias_roomid self.alias_roomid
.get(alias.alias().as_bytes())? .get(alias.alias().as_bytes())?
@ -2089,6 +2106,7 @@ impl Rooms {
}) })
} }
#[tracing::instrument(skip(self))]
pub fn room_aliases<'a>( pub fn room_aliases<'a>(
&'a self, &'a self,
room_id: &RoomId, room_id: &RoomId,
@ -2104,6 +2122,7 @@ impl Rooms {
}) })
} }
#[tracing::instrument(skip(self))]
pub fn set_public(&self, room_id: &RoomId, public: bool) -> Result<()> { pub fn set_public(&self, room_id: &RoomId, public: bool) -> Result<()> {
if public { if public {
self.publicroomids.insert(room_id.as_bytes(), &[])?; self.publicroomids.insert(room_id.as_bytes(), &[])?;
@ -2114,10 +2133,12 @@ impl Rooms {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self))]
pub fn is_public_room(&self, room_id: &RoomId) -> Result<bool> { pub fn is_public_room(&self, room_id: &RoomId) -> Result<bool> {
Ok(self.publicroomids.get(room_id.as_bytes())?.is_some()) Ok(self.publicroomids.get(room_id.as_bytes())?.is_some())
} }
#[tracing::instrument(skip(self))]
pub fn public_rooms(&self) -> impl Iterator<Item = Result<RoomId>> + '_ { pub fn public_rooms(&self) -> impl Iterator<Item = Result<RoomId>> + '_ {
self.publicroomids.iter().map(|(bytes, _)| { self.publicroomids.iter().map(|(bytes, _)| {
RoomId::try_from( RoomId::try_from(
@ -2219,6 +2240,7 @@ impl Rooms {
} }
/// Returns an iterator of all servers participating in this room. /// Returns an iterator of all servers participating in this room.
#[tracing::instrument(skip(self))]
pub fn room_servers<'a>( pub fn room_servers<'a>(
&'a self, &'a self,
room_id: &RoomId, room_id: &RoomId,
@ -2242,6 +2264,7 @@ impl Rooms {
} }
/// Returns an iterator of all rooms a server participates in (as far as we know). /// Returns an iterator of all rooms a server participates in (as far as we know).
#[tracing::instrument(skip(self))]
pub fn server_rooms<'a>( pub fn server_rooms<'a>(
&'a self, &'a self,
server: &ServerName, server: &ServerName,
@ -2287,6 +2310,7 @@ impl Rooms {
} }
/// Returns an iterator over all User IDs who ever joined a room. /// Returns an iterator over all User IDs who ever joined a room.
#[tracing::instrument(skip(self))]
pub fn room_useroncejoined<'a>( pub fn room_useroncejoined<'a>(
&'a self, &'a self,
room_id: &RoomId, room_id: &RoomId,
@ -2494,6 +2518,7 @@ impl Rooms {
}) })
} }
#[tracing::instrument(skip(self))]
pub fn once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> { pub fn once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
let mut userroom_id = user_id.as_bytes().to_vec(); let mut userroom_id = user_id.as_bytes().to_vec();
userroom_id.push(0xff); userroom_id.push(0xff);
@ -2502,6 +2527,7 @@ impl Rooms {
Ok(self.roomuseroncejoinedids.get(&userroom_id)?.is_some()) Ok(self.roomuseroncejoinedids.get(&userroom_id)?.is_some())
} }
#[tracing::instrument(skip(self))]
pub fn is_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> { pub fn is_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
let mut userroom_id = user_id.as_bytes().to_vec(); let mut userroom_id = user_id.as_bytes().to_vec();
userroom_id.push(0xff); userroom_id.push(0xff);
@ -2510,6 +2536,7 @@ impl Rooms {
Ok(self.userroomid_joined.get(&userroom_id)?.is_some()) Ok(self.userroomid_joined.get(&userroom_id)?.is_some())
} }
#[tracing::instrument(skip(self))]
pub fn is_invited(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> { pub fn is_invited(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
let mut userroom_id = user_id.as_bytes().to_vec(); let mut userroom_id = user_id.as_bytes().to_vec();
userroom_id.push(0xff); userroom_id.push(0xff);
@ -2518,6 +2545,7 @@ impl Rooms {
Ok(self.userroomid_invitestate.get(&userroom_id)?.is_some()) Ok(self.userroomid_invitestate.get(&userroom_id)?.is_some())
} }
#[tracing::instrument(skip(self))]
pub fn is_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> { pub fn is_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
let mut userroom_id = user_id.as_bytes().to_vec(); let mut userroom_id = user_id.as_bytes().to_vec();
userroom_id.push(0xff); userroom_id.push(0xff);
@ -2526,6 +2554,7 @@ impl Rooms {
Ok(self.userroomid_leftstate.get(&userroom_id)?.is_some()) Ok(self.userroomid_leftstate.get(&userroom_id)?.is_some())
} }
#[tracing::instrument(skip(self))]
pub fn auth_chain_cache( pub fn auth_chain_cache(
&self, &self,
) -> std::sync::MutexGuard<'_, LruCache<EventId, HashSet<EventId>>> { ) -> std::sync::MutexGuard<'_, LruCache<EventId, HashSet<EventId>>> {

@ -116,6 +116,7 @@ impl RoomEdus {
} }
/// Sets a private read marker at `count`. /// Sets a private read marker at `count`.
#[tracing::instrument(skip(self, globals))]
pub fn private_read_set( pub fn private_read_set(
&self, &self,
room_id: &RoomId, room_id: &RoomId,

@ -10,7 +10,6 @@ use crate::{
appservice_server, database::pusher, server_server, utils, Database, Error, PduEvent, Result, appservice_server, database::pusher, server_server, utils, Database, Error, PduEvent, Result,
}; };
use federation::transactions::send_transaction_message; use federation::transactions::send_transaction_message;
use log::{error, warn};
use ring::digest; use ring::digest;
use rocket::futures::{ use rocket::futures::{
channel::mpsc, channel::mpsc,
@ -34,6 +33,7 @@ use tokio::{
select, select,
sync::{RwLock, Semaphore}, sync::{RwLock, Semaphore},
}; };
use tracing::{error, warn};
use super::abstraction::Tree; use super::abstraction::Tree;
@ -45,6 +45,7 @@ pub enum OutgoingKind {
} }
impl OutgoingKind { impl OutgoingKind {
#[tracing::instrument(skip(self))]
pub fn get_prefix(&self) -> Vec<u8> { pub fn get_prefix(&self) -> Vec<u8> {
let mut prefix = match self { let mut prefix = match self {
OutgoingKind::Appservice(server) => { OutgoingKind::Appservice(server) => {
@ -223,6 +224,7 @@ impl Sending {
}); });
} }
#[tracing::instrument(skip(outgoing_kind, new_events, current_transaction_status, db))]
fn select_events( fn select_events(
outgoing_kind: &OutgoingKind, outgoing_kind: &OutgoingKind,
new_events: Vec<(SendingEventType, Vec<u8>)>, // Events we want to send: event and full key new_events: Vec<(SendingEventType, Vec<u8>)>, // Events we want to send: event and full key
@ -295,6 +297,7 @@ impl Sending {
Ok(Some(events)) Ok(Some(events))
} }
#[tracing::instrument(skip(db, server))]
pub fn select_edus(db: &Database, server: &ServerName) -> Result<(Vec<Vec<u8>>, u64)> { pub fn select_edus(db: &Database, server: &ServerName) -> Result<(Vec<Vec<u8>>, u64)> {
// u64: count of last edu // u64: count of last edu
let since = db let since = db
@ -371,7 +374,7 @@ impl Sending {
Ok((events, max_edu_count)) Ok((events, max_edu_count))
} }
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self, pdu_id, senderkey))]
pub fn send_push_pdu(&self, pdu_id: &[u8], senderkey: Vec<u8>) -> Result<()> { pub fn send_push_pdu(&self, pdu_id: &[u8], senderkey: Vec<u8>) -> Result<()> {
let mut key = b"$".to_vec(); let mut key = b"$".to_vec();
key.extend_from_slice(&senderkey); key.extend_from_slice(&senderkey);
@ -383,7 +386,7 @@ impl Sending {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self, server, pdu_id))]
pub fn send_pdu(&self, server: &ServerName, pdu_id: &[u8]) -> Result<()> { pub fn send_pdu(&self, server: &ServerName, pdu_id: &[u8]) -> Result<()> {
let mut key = server.as_bytes().to_vec(); let mut key = server.as_bytes().to_vec();
key.push(0xff); key.push(0xff);
@ -394,7 +397,7 @@ impl Sending {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self, server, serialized))]
pub fn send_reliable_edu(&self, server: &ServerName, serialized: &[u8]) -> Result<()> { pub fn send_reliable_edu(&self, server: &ServerName, serialized: &[u8]) -> Result<()> {
let mut key = server.as_bytes().to_vec(); let mut key = server.as_bytes().to_vec();
key.push(0xff); key.push(0xff);
@ -418,7 +421,7 @@ impl Sending {
Ok(()) Ok(())
} }
#[tracing::instrument] #[tracing::instrument(skip(keys))]
fn calculate_hash(keys: &[&[u8]]) -> Vec<u8> { fn calculate_hash(keys: &[&[u8]]) -> Vec<u8> {
// We only hash the pdu's event ids, not the whole pdu // We only hash the pdu's event ids, not the whole pdu
let bytes = keys.join(&0xff); let bytes = keys.join(&0xff);
@ -426,7 +429,7 @@ impl Sending {
hash.as_ref().to_owned() hash.as_ref().to_owned()
} }
#[tracing::instrument(skip(db))] #[tracing::instrument(skip(db, events, kind))]
async fn handle_events( async fn handle_events(
kind: OutgoingKind, kind: OutgoingKind,
events: Vec<SendingEventType>, events: Vec<SendingEventType>,
@ -658,6 +661,7 @@ impl Sending {
} }
} }
#[tracing::instrument(skip(key))]
fn parse_servercurrentevent(key: &[u8]) -> Result<(OutgoingKind, SendingEventType)> { fn parse_servercurrentevent(key: &[u8]) -> Result<(OutgoingKind, SendingEventType)> {
// Appservices start with a plus // Appservices start with a plus
Ok::<_, Error>(if key.starts_with(b"+") { Ok::<_, Error>(if key.starts_with(b"+") {
@ -723,7 +727,7 @@ impl Sending {
}) })
} }
#[tracing::instrument(skip(self, globals))] #[tracing::instrument(skip(self, globals, destination, request))]
pub async fn send_federation_request<T: OutgoingRequest>( pub async fn send_federation_request<T: OutgoingRequest>(
&self, &self,
globals: &crate::database::globals::Globals, globals: &crate::database::globals::Globals,
@ -740,7 +744,7 @@ impl Sending {
response response
} }
#[tracing::instrument(skip(self, globals))] #[tracing::instrument(skip(self, globals, registration, request))]
pub async fn send_appservice_request<T: OutgoingRequest>( pub async fn send_appservice_request<T: OutgoingRequest>(
&self, &self,
globals: &crate::database::globals::Globals, globals: &crate::database::globals::Globals,

@ -8,6 +8,7 @@ use ruma::{
DeviceId, DeviceKeyAlgorithm, DeviceKeyId, MilliSecondsSinceUnixEpoch, UInt, UserId, DeviceId, DeviceKeyAlgorithm, DeviceKeyId, MilliSecondsSinceUnixEpoch, UInt, UserId,
}; };
use std::{collections::BTreeMap, convert::TryFrom, mem, sync::Arc}; use std::{collections::BTreeMap, convert::TryFrom, mem, sync::Arc};
use tracing::warn;
use super::abstraction::Tree; use super::abstraction::Tree;
@ -34,11 +35,13 @@ pub struct Users {
impl Users { impl Users {
/// Check if a user has an account on this homeserver. /// Check if a user has an account on this homeserver.
#[tracing::instrument(skip(self, user_id))]
pub fn exists(&self, user_id: &UserId) -> Result<bool> { pub fn exists(&self, user_id: &UserId) -> Result<bool> {
Ok(self.userid_password.get(user_id.as_bytes())?.is_some()) Ok(self.userid_password.get(user_id.as_bytes())?.is_some())
} }
/// Check if account is deactivated /// Check if account is deactivated
#[tracing::instrument(skip(self, user_id))]
pub fn is_deactivated(&self, user_id: &UserId) -> Result<bool> { pub fn is_deactivated(&self, user_id: &UserId) -> Result<bool> {
Ok(self Ok(self
.userid_password .userid_password
@ -51,17 +54,20 @@ impl Users {
} }
/// Create a new user account on this homeserver. /// Create a new user account on this homeserver.
#[tracing::instrument(skip(self, user_id, password))]
pub fn create(&self, user_id: &UserId, password: Option<&str>) -> Result<()> { pub fn create(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
self.set_password(user_id, password)?; self.set_password(user_id, password)?;
Ok(()) Ok(())
} }
/// Returns the number of users registered on this server. /// Returns the number of users registered on this server.
#[tracing::instrument(skip(self))]
pub fn count(&self) -> Result<usize> { pub fn count(&self) -> Result<usize> {
Ok(self.userid_password.iter().count()) Ok(self.userid_password.iter().count())
} }
/// Find out which user an access token belongs to. /// Find out which user an access token belongs to.
#[tracing::instrument(skip(self, token))]
pub fn find_from_token(&self, token: &str) -> Result<Option<(UserId, String)>> { pub fn find_from_token(&self, token: &str) -> Result<Option<(UserId, String)>> {
self.token_userdeviceid self.token_userdeviceid
.get(token.as_bytes())? .get(token.as_bytes())?
@ -89,6 +95,7 @@ impl Users {
} }
/// Returns an iterator over all users on this homeserver. /// Returns an iterator over all users on this homeserver.
#[tracing::instrument(skip(self))]
pub fn iter(&self) -> impl Iterator<Item = Result<UserId>> + '_ { pub fn iter(&self) -> impl Iterator<Item = Result<UserId>> + '_ {
self.userid_password.iter().map(|(bytes, _)| { self.userid_password.iter().map(|(bytes, _)| {
UserId::try_from(utils::string_from_bytes(&bytes).map_err(|_| { UserId::try_from(utils::string_from_bytes(&bytes).map_err(|_| {
@ -99,6 +106,7 @@ impl Users {
} }
/// Returns the password hash for the given user. /// Returns the password hash for the given user.
#[tracing::instrument(skip(self, user_id))]
pub fn password_hash(&self, user_id: &UserId) -> Result<Option<String>> { pub fn password_hash(&self, user_id: &UserId) -> Result<Option<String>> {
self.userid_password self.userid_password
.get(user_id.as_bytes())? .get(user_id.as_bytes())?
@ -110,6 +118,7 @@ impl Users {
} }
/// Hash and set the user's password to the Argon2 hash /// Hash and set the user's password to the Argon2 hash
#[tracing::instrument(skip(self, user_id, password))]
pub fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> { pub fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
if let Some(password) = password { if let Some(password) = password {
if let Ok(hash) = utils::calculate_hash(&password) { if let Ok(hash) = utils::calculate_hash(&password) {
@ -129,6 +138,7 @@ impl Users {
} }
/// Returns the displayname of a user on this homeserver. /// Returns the displayname of a user on this homeserver.
#[tracing::instrument(skip(self, user_id))]
pub fn displayname(&self, user_id: &UserId) -> Result<Option<String>> { pub fn displayname(&self, user_id: &UserId) -> Result<Option<String>> {
self.userid_displayname self.userid_displayname
.get(user_id.as_bytes())? .get(user_id.as_bytes())?
@ -140,6 +150,7 @@ impl Users {
} }
/// Sets a new displayname or removes it if displayname is None. You still need to nofify all rooms of this change. /// Sets a new displayname or removes it if displayname is None. You still need to nofify all rooms of this change.
#[tracing::instrument(skip(self, user_id, displayname))]
pub fn set_displayname(&self, user_id: &UserId, displayname: Option<String>) -> Result<()> { pub fn set_displayname(&self, user_id: &UserId, displayname: Option<String>) -> Result<()> {
if let Some(displayname) = displayname { if let Some(displayname) = displayname {
self.userid_displayname self.userid_displayname
@ -152,6 +163,7 @@ impl Users {
} }
/// Get the avatar_url of a user. /// Get the avatar_url of a user.
#[tracing::instrument(skip(self, user_id))]
pub fn avatar_url(&self, user_id: &UserId) -> Result<Option<MxcUri>> { pub fn avatar_url(&self, user_id: &UserId) -> Result<Option<MxcUri>> {
self.userid_avatarurl self.userid_avatarurl
.get(user_id.as_bytes())? .get(user_id.as_bytes())?
@ -164,6 +176,7 @@ impl Users {
} }
/// Sets a new avatar_url or removes it if avatar_url is None. /// Sets a new avatar_url or removes it if avatar_url is None.
#[tracing::instrument(skip(self, user_id, avatar_url))]
pub fn set_avatar_url(&self, user_id: &UserId, avatar_url: Option<MxcUri>) -> Result<()> { pub fn set_avatar_url(&self, user_id: &UserId, avatar_url: Option<MxcUri>) -> Result<()> {
if let Some(avatar_url) = avatar_url { if let Some(avatar_url) = avatar_url {
self.userid_avatarurl self.userid_avatarurl
@ -176,6 +189,7 @@ impl Users {
} }
/// Get the blurhash of a user. /// Get the blurhash of a user.
#[tracing::instrument(skip(self, user_id))]
pub fn blurhash(&self, user_id: &UserId) -> Result<Option<String>> { pub fn blurhash(&self, user_id: &UserId) -> Result<Option<String>> {
self.userid_blurhash self.userid_blurhash
.get(user_id.as_bytes())? .get(user_id.as_bytes())?
@ -189,6 +203,7 @@ impl Users {
} }
/// Sets a new avatar_url or removes it if avatar_url is None. /// Sets a new avatar_url or removes it if avatar_url is None.
#[tracing::instrument(skip(self, user_id, blurhash))]
pub fn set_blurhash(&self, user_id: &UserId, blurhash: Option<String>) -> Result<()> { pub fn set_blurhash(&self, user_id: &UserId, blurhash: Option<String>) -> Result<()> {
if let Some(blurhash) = blurhash { if let Some(blurhash) = blurhash {
self.userid_blurhash self.userid_blurhash
@ -201,6 +216,7 @@ impl Users {
} }
/// Adds a new device to a user. /// Adds a new device to a user.
#[tracing::instrument(skip(self, user_id, device_id, token, initial_device_display_name))]
pub fn create_device( pub fn create_device(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -235,6 +251,7 @@ impl Users {
} }
/// Removes a device from a user. /// Removes a device from a user.
#[tracing::instrument(skip(self, user_id, device_id))]
pub fn remove_device(&self, user_id: &UserId, device_id: &DeviceId) -> Result<()> { pub fn remove_device(&self, user_id: &UserId, device_id: &DeviceId) -> Result<()> {
let mut userdeviceid = user_id.as_bytes().to_vec(); let mut userdeviceid = user_id.as_bytes().to_vec();
userdeviceid.push(0xff); userdeviceid.push(0xff);
@ -265,6 +282,7 @@ impl Users {
} }
/// Returns an iterator over all device ids of this user. /// Returns an iterator over all device ids of this user.
#[tracing::instrument(skip(self, user_id))]
pub fn all_device_ids<'a>( pub fn all_device_ids<'a>(
&'a self, &'a self,
user_id: &UserId, user_id: &UserId,
@ -287,6 +305,7 @@ impl Users {
} }
/// Replaces the access token of one device. /// Replaces the access token of one device.
#[tracing::instrument(skip(self, user_id, device_id, token))]
pub fn set_token(&self, user_id: &UserId, device_id: &DeviceId, token: &str) -> Result<()> { pub fn set_token(&self, user_id: &UserId, device_id: &DeviceId, token: &str) -> Result<()> {
let mut userdeviceid = user_id.as_bytes().to_vec(); let mut userdeviceid = user_id.as_bytes().to_vec();
userdeviceid.push(0xff); userdeviceid.push(0xff);
@ -310,6 +329,14 @@ impl Users {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(
self,
user_id,
device_id,
one_time_key_key,
one_time_key_value,
globals
))]
pub fn add_one_time_key( pub fn add_one_time_key(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -346,7 +373,7 @@ impl Users {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self, user_id))]
pub fn last_one_time_keys_update(&self, user_id: &UserId) -> Result<u64> { pub fn last_one_time_keys_update(&self, user_id: &UserId) -> Result<u64> {
self.userid_lastonetimekeyupdate self.userid_lastonetimekeyupdate
.get(&user_id.as_bytes())? .get(&user_id.as_bytes())?
@ -358,6 +385,7 @@ impl Users {
.unwrap_or(Ok(0)) .unwrap_or(Ok(0))
} }
#[tracing::instrument(skip(self, user_id, device_id, key_algorithm, globals))]
pub fn take_one_time_key( pub fn take_one_time_key(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -397,7 +425,7 @@ impl Users {
.transpose() .transpose()
} }
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self, user_id, device_id))]
pub fn count_one_time_keys( pub fn count_one_time_keys(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -430,6 +458,7 @@ impl Users {
Ok(counts) Ok(counts)
} }
#[tracing::instrument(skip(self, user_id, device_id, device_keys, rooms, globals))]
pub fn add_device_keys( pub fn add_device_keys(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -452,6 +481,14 @@ impl Users {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(
self,
master_key,
self_signing_key,
user_signing_key,
rooms,
globals
))]
pub fn add_cross_signing_keys( pub fn add_cross_signing_keys(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -552,6 +589,7 @@ impl Users {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self, target_id, key_id, signature, sender_id, rooms, globals))]
pub fn sign_key( pub fn sign_key(
&self, &self,
target_id: &UserId, target_id: &UserId,
@ -595,7 +633,7 @@ impl Users {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self, user_or_room_id, from, to))]
pub fn keys_changed<'a>( pub fn keys_changed<'a>(
&'a self, &'a self,
user_or_room_id: &str, user_or_room_id: &str,
@ -608,9 +646,24 @@ impl Users {
let mut start = prefix.clone(); let mut start = prefix.clone();
start.extend_from_slice(&(from + 1).to_be_bytes()); start.extend_from_slice(&(from + 1).to_be_bytes());
let to = to.unwrap_or(u64::MAX);
self.keychangeid_userid self.keychangeid_userid
.iter_from(&start, false) .iter_from(&start, false)
.take_while(move |(k, _)| k.starts_with(&prefix)) .take_while(move |(k, _)| {
k.starts_with(&prefix)
&& if let Some(current) = k.splitn(2, |&b| b == 0xff).nth(1) {
if let Ok(c) = utils::u64_from_bytes(current) {
c <= to
} else {
warn!("BadDatabase: Could not parse keychangeid_userid bytes");
false
}
} else {
warn!("BadDatabase: Could not parse keychangeid_userid");
false
}
})
.map(|(_, bytes)| { .map(|(_, bytes)| {
UserId::try_from(utils::string_from_bytes(&bytes).map_err(|_| { UserId::try_from(utils::string_from_bytes(&bytes).map_err(|_| {
Error::bad_database("User ID in devicekeychangeid_userid is invalid unicode.") Error::bad_database("User ID in devicekeychangeid_userid is invalid unicode.")
@ -619,6 +672,7 @@ impl Users {
}) })
} }
#[tracing::instrument(skip(self, user_id, rooms, globals))]
fn mark_device_key_update( fn mark_device_key_update(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -650,6 +704,7 @@ impl Users {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self, user_id, device_id))]
pub fn get_device_keys( pub fn get_device_keys(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -666,6 +721,7 @@ impl Users {
}) })
} }
#[tracing::instrument(skip(self, user_id, allowed_signatures))]
pub fn get_master_key<F: Fn(&UserId) -> bool>( pub fn get_master_key<F: Fn(&UserId) -> bool>(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -693,6 +749,7 @@ impl Users {
}) })
} }
#[tracing::instrument(skip(self, user_id, allowed_signatures))]
pub fn get_self_signing_key<F: Fn(&UserId) -> bool>( pub fn get_self_signing_key<F: Fn(&UserId) -> bool>(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -720,6 +777,7 @@ impl Users {
}) })
} }
#[tracing::instrument(skip(self, user_id))]
pub fn get_user_signing_key(&self, user_id: &UserId) -> Result<Option<CrossSigningKey>> { pub fn get_user_signing_key(&self, user_id: &UserId) -> Result<Option<CrossSigningKey>> {
self.userid_usersigningkeyid self.userid_usersigningkeyid
.get(user_id.as_bytes())? .get(user_id.as_bytes())?
@ -732,6 +790,15 @@ impl Users {
}) })
} }
#[tracing::instrument(skip(
self,
sender,
target_user_id,
target_device_id,
event_type,
content,
globals
))]
pub fn add_to_device_event( pub fn add_to_device_event(
&self, &self,
sender: &UserId, sender: &UserId,
@ -759,7 +826,7 @@ impl Users {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self, user_id, device_id))]
pub fn get_to_device_events( pub fn get_to_device_events(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -782,7 +849,7 @@ impl Users {
Ok(events) Ok(events)
} }
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self, user_id, device_id, until))]
pub fn remove_to_device_events( pub fn remove_to_device_events(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -817,6 +884,7 @@ impl Users {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self, user_id, device_id, device))]
pub fn update_device_metadata( pub fn update_device_metadata(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -842,6 +910,7 @@ impl Users {
} }
/// Get device metadata. /// Get device metadata.
#[tracing::instrument(skip(self, user_id, device_id))]
pub fn get_device_metadata( pub fn get_device_metadata(
&self, &self,
user_id: &UserId, user_id: &UserId,
@ -860,6 +929,7 @@ impl Users {
}) })
} }
#[tracing::instrument(skip(self, user_id))]
pub fn get_devicelist_version(&self, user_id: &UserId) -> Result<Option<u64>> { pub fn get_devicelist_version(&self, user_id: &UserId) -> Result<Option<u64>> {
self.userid_devicelistversion self.userid_devicelistversion
.get(user_id.as_bytes())? .get(user_id.as_bytes())?
@ -870,6 +940,7 @@ impl Users {
}) })
} }
#[tracing::instrument(skip(self, user_id))]
pub fn all_devices_metadata<'a>( pub fn all_devices_metadata<'a>(
&'a self, &'a self,
user_id: &UserId, user_id: &UserId,
@ -886,6 +957,7 @@ impl Users {
} }
/// Deactivate account /// Deactivate account
#[tracing::instrument(skip(self, user_id))]
pub fn deactivate_account(&self, user_id: &UserId) -> Result<()> { pub fn deactivate_account(&self, user_id: &UserId) -> Result<()> {
// Remove all associated devices // Remove all associated devices
for device_id in self.all_device_ids(user_id) { for device_id in self.all_device_ids(user_id) {

@ -1,4 +1,3 @@
use log::warn;
use ruma::{ use ruma::{
api::client::{ api::client::{
error::{Error as RumaError, ErrorKind}, error::{Error as RumaError, ErrorKind},
@ -7,17 +6,18 @@ use ruma::{
ServerName, ServerName,
}; };
use thiserror::Error; use thiserror::Error;
use tracing::warn;
#[cfg(feature = "conduit_bin")] #[cfg(feature = "conduit_bin")]
use { use {
crate::RumaResponse, crate::RumaResponse,
http::StatusCode, http::StatusCode,
log::error,
rocket::{ rocket::{
response::{self, Responder}, response::{self, Responder},
Request, Request,
}, },
ruma::api::client::r0::uiaa::UiaaResponse, ruma::api::client::r0::uiaa::UiaaResponse,
tracing::error,
}; };
pub type Result<T> = std::result::Result<T, Error>; pub type Result<T> = std::result::Result<T, Error>;

@ -17,6 +17,7 @@ use std::sync::Arc;
use database::Config; use database::Config;
pub use database::Database; pub use database::Database;
pub use error::{Error, Result}; pub use error::{Error, Result};
use opentelemetry::trace::Tracer;
pub use pdu::PduEvent; pub use pdu::PduEvent;
pub use rocket::State; pub use rocket::State;
use ruma::api::client::error::ErrorKind; use ruma::api::client::error::ErrorKind;
@ -31,8 +32,7 @@ use rocket::{
routes, Request, routes, Request,
}; };
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing::span; use tracing_subscriber::{prelude::*, EnvFilter};
use tracing_subscriber::{prelude::*, Registry};
fn setup_rocket(config: Figment, data: Arc<RwLock<Database>>) -> rocket::Rocket<rocket::Build> { fn setup_rocket(config: Figment, data: Arc<RwLock<Database>>) -> rocket::Rocket<rocket::Build> {
rocket::custom(config) rocket::custom(config)
@ -201,38 +201,57 @@ async fn main() {
.extract::<Config>() .extract::<Config>()
.expect("It looks like your config is invalid. Please take a look at the error"); .expect("It looks like your config is invalid. Please take a look at the error");
let mut _span: Option<span::Span> = None; let start = async {
let mut _enter: Option<span::Entered<'_>> = None; config.warn_deprecated();
let db = Database::load_or_create(&config)
.await
.expect("config is valid");
let rocket = setup_rocket(raw_config, Arc::clone(&db))
.ignite()
.await
.unwrap();
Database::start_on_shutdown_tasks(db, rocket.shutdown()).await;
rocket.launch().await.unwrap();
};
if config.allow_jaeger { if config.allow_jaeger {
let (tracer, _uninstall) = opentelemetry_jaeger::new_pipeline() let tracer = opentelemetry_jaeger::new_pipeline()
.with_service_name("conduit") .with_service_name("conduit")
.install() .install_simple()
.unwrap(); .unwrap();
let telemetry = tracing_opentelemetry::layer().with_tracer(tracer);
Registry::default().with(telemetry).try_init().unwrap();
_span = Some(span!(tracing::Level::INFO, "app_start", work_units = 2)); let span = tracer.start("conduit");
_enter = Some(_span.as_ref().unwrap().enter()); start.await;
drop(span);
} else { } else {
std::env::set_var("RUST_LOG", &config.log); std::env::set_var("RUST_LOG", &config.log);
tracing_subscriber::fmt::init();
}
config.warn_deprecated();
let db = Database::load_or_create(&config) let registry = tracing_subscriber::Registry::default();
.await if config.tracing_flame {
.expect("config is valid"); let (flame_layer, _guard) =
tracing_flame::FlameLayer::with_file("./tracing.folded").unwrap();
let flame_layer = flame_layer.with_empty_samples(false);
let rocket = setup_rocket(raw_config, Arc::clone(&db)) let filter_layer = EnvFilter::new("trace,h2=off");
.ignite()
.await
.unwrap();
Database::start_on_shutdown_tasks(db, rocket.shutdown()).await; let subscriber = registry.with(filter_layer).with(flame_layer);
tracing::subscriber::set_global_default(subscriber).unwrap();
start.await;
} else {
let fmt_layer = tracing_subscriber::fmt::Layer::new();
let filter_layer = EnvFilter::try_from_default_env()
.or_else(|_| EnvFilter::try_new("info"))
.unwrap();
rocket.launch().await.unwrap(); let subscriber = registry.with(filter_layer).with(fmt_layer);
tracing::subscriber::set_global_default(subscriber).unwrap();
start.await;
}
}
} }
#[catch(404)] #[catch(404)]

@ -1,5 +1,4 @@
use crate::Error; use crate::Error;
use log::error;
use ruma::{ use ruma::{
events::{ events::{
pdu::EventHash, room::member::MemberEventContent, AnyEphemeralRoomEvent, pdu::EventHash, room::member::MemberEventContent, AnyEphemeralRoomEvent,
@ -13,6 +12,7 @@ use ruma::{
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use std::{cmp::Ordering, collections::BTreeMap, convert::TryFrom}; use std::{cmp::Ordering, collections::BTreeMap, convert::TryFrom};
use tracing::error;
#[derive(Clone, Deserialize, Serialize, Debug)] #[derive(Clone, Deserialize, Serialize, Debug)]
pub struct PduEvent { pub struct PduEvent {

@ -10,7 +10,6 @@ use std::ops::Deref;
#[cfg(feature = "conduit_bin")] #[cfg(feature = "conduit_bin")]
use { use {
crate::server_server, crate::server_server,
log::{debug, warn},
rocket::{ rocket::{
data::{self, ByteUnit, Data, FromData}, data::{self, ByteUnit, Data, FromData},
http::Status, http::Status,
@ -23,6 +22,7 @@ use {
std::collections::BTreeMap, std::collections::BTreeMap,
std::convert::TryFrom, std::convert::TryFrom,
std::io::Cursor, std::io::Cursor,
tracing::{debug, warn},
}; };
/// This struct converts rocket requests into ruma structs by converting them into http requests /// This struct converts rocket requests into ruma structs by converting them into http requests
@ -45,6 +45,7 @@ where
{ {
type Error = (); type Error = ();
#[tracing::instrument(skip(request, data))]
async fn from_data( async fn from_data(
request: &'a Request<'_>, request: &'a Request<'_>,
data: Data<'a>, data: Data<'a>,
@ -256,7 +257,10 @@ where
match ruma::signatures::verify_json(&pub_key_map, &request_map) { match ruma::signatures::verify_json(&pub_key_map, &request_map) {
Ok(()) => (None, None, Some(origin), false), Ok(()) => (None, None, Some(origin), false),
Err(e) => { Err(e) => {
warn!("Failed to verify json request from {}: {}", origin, e); warn!(
"Failed to verify json request from {}: {}\n{:?}",
origin, e, request_map
);
if request.uri().to_string().contains('@') { if request.uri().to_string().contains('@') {
warn!("Request uri contained '@' character. Make sure your reverse proxy gives Conduit the raw uri (apache: use nocanon)"); warn!("Request uri contained '@' character. Make sure your reverse proxy gives Conduit the raw uri (apache: use nocanon)");

@ -5,7 +5,6 @@ use crate::{
}; };
use get_profile_information::v1::ProfileField; use get_profile_information::v1::ProfileField;
use http::header::{HeaderValue, AUTHORIZATION, HOST}; use http::header::{HeaderValue, AUTHORIZATION, HOST};
use log::{debug, error, info, trace, warn};
use regex::Regex; use regex::Regex;
use rocket::response::content::Json; use rocket::response::content::Json;
use ruma::{ use ruma::{
@ -63,7 +62,8 @@ use std::{
sync::{Arc, RwLock}, sync::{Arc, RwLock},
time::{Duration, Instant, SystemTime}, time::{Duration, Instant, SystemTime},
}; };
use tokio::sync::Semaphore; use tokio::sync::{MutexGuard, Semaphore};
use tracing::{debug, error, info, trace, warn};
#[cfg(feature = "conduit_bin")] #[cfg(feature = "conduit_bin")]
use rocket::{get, post, put}; use rocket::{get, post, put};
@ -838,6 +838,7 @@ type AsyncRecursiveResult<'a, T, E> = Pin<Box<dyn Future<Output = StdResult<T, E
/// it /// it
/// 14. Use state resolution to find new room state /// 14. Use state resolution to find new room state
// We use some AsyncRecursiveResult hacks here so we can call this async funtion recursively // We use some AsyncRecursiveResult hacks here so we can call this async funtion recursively
#[tracing::instrument(skip(value, is_timeline_event, db, pub_key_map))]
pub fn handle_incoming_pdu<'a>( pub fn handle_incoming_pdu<'a>(
origin: &'a ServerName, origin: &'a ServerName,
event_id: &'a EventId, event_id: &'a EventId,
@ -1156,6 +1157,18 @@ pub fn handle_incoming_pdu<'a>(
} }
debug!("Auth check succeeded."); debug!("Auth check succeeded.");
// We start looking at current room state now, so lets lock the room
let mutex = Arc::clone(
db.globals
.roomid_mutex
.write()
.unwrap()
.entry(room_id.clone())
.or_default(),
);
let mutex_lock = mutex.lock().await;
// Now we calculate the set of extremities this room has after the incoming event has been // Now we calculate the set of extremities this room has after the incoming event has been
// applied. We start with the previous extremities (aka leaves) // applied. We start with the previous extremities (aka leaves)
let mut extremities = db let mut extremities = db
@ -1170,8 +1183,8 @@ pub fn handle_incoming_pdu<'a>(
} }
} }
// Only keep those extremities we don't have in our timeline yet // Only keep those extremities were not referenced yet
extremities.retain(|id| !matches!(db.rooms.get_non_outlier_pdu_json(id), Ok(Some(_)))); extremities.retain(|id| !matches!(db.rooms.is_event_referenced(&room_id, id), Ok(true)));
let mut extremity_statehashes = Vec::new(); let mut extremity_statehashes = Vec::new();
@ -1301,9 +1314,11 @@ pub fn handle_incoming_pdu<'a>(
return Err("State resolution failed, either an event could not be found or deserialization".into()); return Err("State resolution failed, either an event could not be found or deserialization".into());
} }
}; };
state state
}; };
debug!("starting soft fail auth check");
// 13. Check if the event passes auth based on the "current state" of the room, if not "soft fail" it // 13. Check if the event passes auth based on the "current state" of the room, if not "soft fail" it
let soft_fail = !state_res::event_auth::auth_check( let soft_fail = !state_res::event_auth::auth_check(
&room_version, &room_version,
@ -1322,11 +1337,11 @@ pub fn handle_incoming_pdu<'a>(
pdu_id = Some( pdu_id = Some(
append_incoming_pdu( append_incoming_pdu(
&db, &db,
&room_id,
&incoming_pdu, &incoming_pdu,
val, val,
extremities, extremities,
&state_at_incoming_event, &state_at_incoming_event,
&mutex_lock,
) )
.await .await
.map_err(|_| "Failed to add pdu to db.".to_owned())?, .map_err(|_| "Failed to add pdu to db.".to_owned())?,
@ -1350,6 +1365,7 @@ pub fn handle_incoming_pdu<'a>(
} }
// Event has passed all auth/stateres checks // Event has passed all auth/stateres checks
drop(mutex_lock);
Ok(pdu_id) Ok(pdu_id)
}) })
} }
@ -1626,25 +1642,15 @@ pub(crate) async fn fetch_signing_keys(
/// Append the incoming event setting the state snapshot to the state from the /// Append the incoming event setting the state snapshot to the state from the
/// server that sent the event. /// server that sent the event.
#[tracing::instrument(skip(db))] #[tracing::instrument(skip(db, pdu, pdu_json, new_room_leaves, state, _mutex_lock))]
async fn append_incoming_pdu( async fn append_incoming_pdu(
db: &Database, db: &Database,
room_id: &RoomId,
pdu: &PduEvent, pdu: &PduEvent,
pdu_json: CanonicalJsonObject, pdu_json: CanonicalJsonObject,
new_room_leaves: HashSet<EventId>, new_room_leaves: HashSet<EventId>,
state: &StateMap<Arc<PduEvent>>, state: &StateMap<Arc<PduEvent>>,
_mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room mutex
) -> Result<Vec<u8>> { ) -> Result<Vec<u8>> {
let mutex = Arc::clone(
db.globals
.roomid_mutex
.write()
.unwrap()
.entry(room_id.clone())
.or_default(),
);
let mutex_lock = mutex.lock().await;
// We append to state before appending the pdu, so we don't have a moment in time with the // We append to state before appending the pdu, so we don't have a moment in time with the
// pdu without it's state. This is okay because append_pdu can't fail. // pdu without it's state. This is okay because append_pdu can't fail.
db.rooms db.rooms
@ -1657,8 +1663,6 @@ async fn append_incoming_pdu(
&db, &db,
)?; )?;
drop(mutex_lock);
for appservice in db.appservice.iter_all()?.filter_map(|r| r.ok()) { for appservice in db.appservice.iter_all()?.filter_map(|r| r.ok()) {
if let Some(namespaces) = appservice.1.get("namespaces") { if let Some(namespaces) = appservice.1.get("namespaces") {
let users = namespaces let users = namespaces

@ -9,6 +9,7 @@ use std::{
time::{SystemTime, UNIX_EPOCH}, time::{SystemTime, UNIX_EPOCH},
}; };
#[tracing::instrument]
pub fn millis_since_unix_epoch() -> u64 { pub fn millis_since_unix_epoch() -> u64 {
SystemTime::now() SystemTime::now()
.duration_since(UNIX_EPOCH) .duration_since(UNIX_EPOCH)
@ -48,16 +49,19 @@ pub fn generate_keypair() -> Vec<u8> {
} }
/// Parses the bytes into an u64. /// Parses the bytes into an u64.
#[tracing::instrument(skip(bytes))]
pub fn u64_from_bytes(bytes: &[u8]) -> Result<u64, std::array::TryFromSliceError> { pub fn u64_from_bytes(bytes: &[u8]) -> Result<u64, std::array::TryFromSliceError> {
let array: [u8; 8] = bytes.try_into()?; let array: [u8; 8] = bytes.try_into()?;
Ok(u64::from_be_bytes(array)) Ok(u64::from_be_bytes(array))
} }
/// Parses the bytes into a string. /// Parses the bytes into a string.
#[tracing::instrument(skip(bytes))]
pub fn string_from_bytes(bytes: &[u8]) -> Result<String, std::string::FromUtf8Error> { pub fn string_from_bytes(bytes: &[u8]) -> Result<String, std::string::FromUtf8Error> {
String::from_utf8(bytes.to_vec()) String::from_utf8(bytes.to_vec())
} }
#[tracing::instrument(skip(length))]
pub fn random_string(length: usize) -> String { pub fn random_string(length: usize) -> String {
thread_rng() thread_rng()
.sample_iter(&rand::distributions::Alphanumeric) .sample_iter(&rand::distributions::Alphanumeric)
@ -67,6 +71,7 @@ pub fn random_string(length: usize) -> String {
} }
/// Calculate a new hash for the given password /// Calculate a new hash for the given password
#[tracing::instrument(skip(password))]
pub fn calculate_hash(password: &str) -> Result<String, argon2::Error> { pub fn calculate_hash(password: &str) -> Result<String, argon2::Error> {
let hashing_config = Config { let hashing_config = Config {
variant: Variant::Argon2id, variant: Variant::Argon2id,
@ -77,6 +82,7 @@ pub fn calculate_hash(password: &str) -> Result<String, argon2::Error> {
argon2::hash_encoded(password.as_bytes(), salt.as_bytes(), &hashing_config) argon2::hash_encoded(password.as_bytes(), salt.as_bytes(), &hashing_config)
} }
#[tracing::instrument(skip(iterators, check_order))]
pub fn common_elements( pub fn common_elements(
mut iterators: impl Iterator<Item = impl Iterator<Item = Vec<u8>>>, mut iterators: impl Iterator<Item = impl Iterator<Item = Vec<u8>>>,
check_order: impl Fn(&[u8], &[u8]) -> Ordering, check_order: impl Fn(&[u8], &[u8]) -> Ordering,
@ -104,6 +110,7 @@ pub fn common_elements(
/// Fallible conversion from any value that implements `Serialize` to a `CanonicalJsonObject`. /// Fallible conversion from any value that implements `Serialize` to a `CanonicalJsonObject`.
/// ///
/// `value` must serialize to an `serde_json::Value::Object`. /// `value` must serialize to an `serde_json::Value::Object`.
#[tracing::instrument(skip(value))]
pub fn to_canonical_object<T: serde::Serialize>( pub fn to_canonical_object<T: serde::Serialize>(
value: T, value: T,
) -> Result<CanonicalJsonObject, CanonicalJsonError> { ) -> Result<CanonicalJsonObject, CanonicalJsonError> {
@ -117,6 +124,7 @@ pub fn to_canonical_object<T: serde::Serialize>(
} }
} }
#[tracing::instrument(skip(deserializer))]
pub fn deserialize_from_str< pub fn deserialize_from_str<
'de, 'de,
D: serde::de::Deserializer<'de>, D: serde::de::Deserializer<'de>,

Loading…
Cancel
Save