Fixed more compile time errors

Nyaaori/refactor-next
Timo Kösters 2 years ago committed by Nyaaori
parent 785ddfc4aa
commit bd8b616ca0
No known key found for this signature in database
GPG Key ID: E7819C3ED4D1F82E

@ -2,7 +2,7 @@ use std::sync::Arc;
use super::{DEVICE_ID_LENGTH, SESSION_ID_LENGTH, TOKEN_LENGTH}; use super::{DEVICE_ID_LENGTH, SESSION_ID_LENGTH, TOKEN_LENGTH};
use crate::{ use crate::{
utils, Error, Result, Ruma, services, utils, Error, Result, Ruma, services, api::client_server,
}; };
use ruma::{ use ruma::{
api::client::{ api::client::{
@ -381,7 +381,7 @@ pub async fn deactivate_route(
} }
// Make the user leave all rooms before deactivation // Make the user leave all rooms before deactivation
services().rooms.leave_all_rooms(&sender_user).await?; client_server::leave_all_rooms(&sender_user).await?;
// Remove devices and mark account as deactivated // Remove devices and mark account as deactivated
services().users.deactivate_account(sender_user)?; services().users.deactivate_account(sender_user)?;

@ -25,12 +25,12 @@ pub async fn create_alias_route(
)); ));
} }
if services().rooms.id_from_alias(&body.room_alias)?.is_some() { if services().rooms.alias.resolve_local_alias(&body.room_alias)?.is_some() {
return Err(Error::Conflict("Alias already exists.")); return Err(Error::Conflict("Alias already exists."));
} }
services().rooms services().rooms.alias
.set_alias(&body.room_alias, Some(&body.room_id))?; .set_alias(&body.room_alias, &body.room_id)?;
Ok(create_alias::v3::Response::new()) Ok(create_alias::v3::Response::new())
} }
@ -51,7 +51,7 @@ pub async fn delete_alias_route(
)); ));
} }
services().rooms.set_alias(&body.room_alias, None)?; services().rooms.alias.remove_alias(&body.room_alias)?;
// TODO: update alt_aliases? // TODO: update alt_aliases?
@ -88,7 +88,7 @@ pub(crate) async fn get_alias_helper(
} }
let mut room_id = None; let mut room_id = None;
match services().rooms.id_from_alias(room_alias)? { match services().rooms.alias.resolve_local_alias(room_alias)? {
Some(r) => room_id = Some(r), Some(r) => room_id = Some(r),
None => { None => {
for (_id, registration) in services().appservice.all()? { for (_id, registration) in services().appservice.all()? {
@ -115,7 +115,7 @@ pub(crate) async fn get_alias_helper(
.await .await
.is_ok() .is_ok()
{ {
room_id = Some(services().rooms.id_from_alias(room_alias)?.ok_or_else(|| { room_id = Some(services().rooms.alias.resolve_local_alias(room_alias)?.ok_or_else(|| {
Error::bad_config("Appservice lied to us. Room does not exist.") Error::bad_config("Appservice lied to us. Room does not exist.")
})?); })?);
break; break;

@ -29,16 +29,18 @@ pub async fn get_context_route(
let base_pdu_id = services() let base_pdu_id = services()
.rooms .rooms
.timeline
.get_pdu_id(&body.event_id)? .get_pdu_id(&body.event_id)?
.ok_or(Error::BadRequest( .ok_or(Error::BadRequest(
ErrorKind::NotFound, ErrorKind::NotFound,
"Base event id not found.", "Base event id not found.",
))?; ))?;
let base_token = services().rooms.pdu_count(&base_pdu_id)?; let base_token = services().rooms.timeline.pdu_count(&base_pdu_id)?;
let base_event = services() let base_event = services()
.rooms .rooms
.timeline
.get_pdu_from_id(&base_pdu_id)? .get_pdu_from_id(&base_pdu_id)?
.ok_or(Error::BadRequest( .ok_or(Error::BadRequest(
ErrorKind::NotFound, ErrorKind::NotFound,
@ -47,14 +49,14 @@ pub async fn get_context_route(
let room_id = base_event.room_id.clone(); let room_id = base_event.room_id.clone();
if !services().rooms.is_joined(sender_user, &room_id)? { if !services().rooms.state_cache.is_joined(sender_user, &room_id)? {
return Err(Error::BadRequest( return Err(Error::BadRequest(
ErrorKind::Forbidden, ErrorKind::Forbidden,
"You don't have permission to view this room.", "You don't have permission to view this room.",
)); ));
} }
if !services().rooms.lazy_load_was_sent_before( if !services().rooms.lazy_loading.lazy_load_was_sent_before(
sender_user, sender_user,
sender_device, sender_device,
&room_id, &room_id,
@ -68,6 +70,7 @@ pub async fn get_context_route(
let events_before: Vec<_> = services() let events_before: Vec<_> = services()
.rooms .rooms
.timeline
.pdus_until(sender_user, &room_id, base_token)? .pdus_until(sender_user, &room_id, base_token)?
.take( .take(
u32::try_from(body.limit).map_err(|_| { u32::try_from(body.limit).map_err(|_| {
@ -79,7 +82,7 @@ pub async fn get_context_route(
.collect(); .collect();
for (_, event) in &events_before { for (_, event) in &events_before {
if !services().rooms.lazy_load_was_sent_before( if !services().rooms.lazy_loading.lazy_load_was_sent_before(
sender_user, sender_user,
sender_device, sender_device,
&room_id, &room_id,
@ -92,7 +95,7 @@ pub async fn get_context_route(
let start_token = events_before let start_token = events_before
.last() .last()
.and_then(|(pdu_id, _)| services().rooms.pdu_count(pdu_id).ok()) .and_then(|(pdu_id, _)| services().rooms.timeline.pdu_count(pdu_id).ok())
.map(|count| count.to_string()); .map(|count| count.to_string());
let events_before: Vec<_> = events_before let events_before: Vec<_> = events_before
@ -102,6 +105,7 @@ pub async fn get_context_route(
let events_after: Vec<_> = services() let events_after: Vec<_> = services()
.rooms .rooms
.timeline
.pdus_after(sender_user, &room_id, base_token)? .pdus_after(sender_user, &room_id, base_token)?
.take( .take(
u32::try_from(body.limit).map_err(|_| { u32::try_from(body.limit).map_err(|_| {
@ -113,7 +117,7 @@ pub async fn get_context_route(
.collect(); .collect();
for (_, event) in &events_after { for (_, event) in &events_after {
if !services().rooms.lazy_load_was_sent_before( if !services().rooms.lazy_loading.lazy_load_was_sent_before(
sender_user, sender_user,
sender_device, sender_device,
&room_id, &room_id,
@ -124,7 +128,7 @@ pub async fn get_context_route(
} }
} }
let shortstatehash = match services().rooms.pdu_shortstatehash( let shortstatehash = match services().rooms.state_accessor.pdu_shortstatehash(
events_after events_after
.last() .last()
.map_or(&*body.event_id, |(_, e)| &*e.event_id), .map_or(&*body.event_id, |(_, e)| &*e.event_id),
@ -132,15 +136,16 @@ pub async fn get_context_route(
Some(s) => s, Some(s) => s,
None => services() None => services()
.rooms .rooms
.current_shortstatehash(&room_id)? .state
.get_room_shortstatehash(&room_id)?
.expect("All rooms have state"), .expect("All rooms have state"),
}; };
let state_ids = services().rooms.state_full_ids(shortstatehash).await?; let state_ids = services().rooms.state_accessor.state_full_ids(shortstatehash).await?;
let end_token = events_after let end_token = events_after
.last() .last()
.and_then(|(pdu_id, _)| services().rooms.pdu_count(pdu_id).ok()) .and_then(|(pdu_id, _)| services().rooms.timeline.pdu_count(pdu_id).ok())
.map(|count| count.to_string()); .map(|count| count.to_string());
let events_after: Vec<_> = events_after let events_after: Vec<_> = events_after
@ -151,10 +156,10 @@ pub async fn get_context_route(
let mut state = Vec::new(); let mut state = Vec::new();
for (shortstatekey, id) in state_ids { for (shortstatekey, id) in state_ids {
let (event_type, state_key) = services().rooms.get_statekey_from_short(shortstatekey)?; let (event_type, state_key) = services().rooms.short.get_statekey_from_short(shortstatekey)?;
if event_type != StateEventType::RoomMember { if event_type != StateEventType::RoomMember {
let pdu = match services().rooms.get_pdu(&id)? { let pdu = match services().rooms.timeline.get_pdu(&id)? {
Some(pdu) => pdu, Some(pdu) => pdu,
None => { None => {
error!("Pdu in state not found: {}", id); error!("Pdu in state not found: {}", id);
@ -163,7 +168,7 @@ pub async fn get_context_route(
}; };
state.push(pdu.to_state_event()); state.push(pdu.to_state_event());
} else if !lazy_load_enabled || lazy_loaded.contains(&state_key) { } else if !lazy_load_enabled || lazy_loaded.contains(&state_key) {
let pdu = match services().rooms.get_pdu(&id)? { let pdu = match services().rooms.timeline.get_pdu(&id)? {
Some(pdu) => pdu, Some(pdu) => pdu,
None => { None => {
error!("Pdu in state not found: {}", id); error!("Pdu in state not found: {}", id);

@ -86,10 +86,10 @@ pub async fn set_room_visibility_route(
match &body.visibility { match &body.visibility {
room::Visibility::Public => { room::Visibility::Public => {
services().rooms.set_public(&body.room_id, true)?; services().rooms.directory.set_public(&body.room_id)?;
info!("{} made {} public", sender_user, body.room_id); info!("{} made {} public", sender_user, body.room_id);
} }
room::Visibility::Private => services().rooms.set_public(&body.room_id, false)?, room::Visibility::Private => services().rooms.directory.set_not_public(&body.room_id)?,
_ => { _ => {
return Err(Error::BadRequest( return Err(Error::BadRequest(
ErrorKind::InvalidParam, ErrorKind::InvalidParam,
@ -108,7 +108,7 @@ pub async fn get_room_visibility_route(
body: Ruma<get_room_visibility::v3::IncomingRequest>, body: Ruma<get_room_visibility::v3::IncomingRequest>,
) -> Result<get_room_visibility::v3::Response> { ) -> Result<get_room_visibility::v3::Response> {
Ok(get_room_visibility::v3::Response { Ok(get_room_visibility::v3::Response {
visibility: if services().rooms.is_public_room(&body.room_id)? { visibility: if services().rooms.directory.is_public_room(&body.room_id)? {
room::Visibility::Public room::Visibility::Public
} else { } else {
room::Visibility::Private room::Visibility::Private
@ -176,6 +176,7 @@ pub(crate) async fn get_public_rooms_filtered_helper(
let mut all_rooms: Vec<_> = services() let mut all_rooms: Vec<_> = services()
.rooms .rooms
.directory
.public_rooms() .public_rooms()
.map(|room_id| { .map(|room_id| {
let room_id = room_id?; let room_id = room_id?;
@ -183,6 +184,7 @@ pub(crate) async fn get_public_rooms_filtered_helper(
let chunk = PublicRoomsChunk { let chunk = PublicRoomsChunk {
canonical_alias: services() canonical_alias: services()
.rooms .rooms
.state_accessor
.room_state_get(&room_id, &StateEventType::RoomCanonicalAlias, "")? .room_state_get(&room_id, &StateEventType::RoomCanonicalAlias, "")?
.map_or(Ok(None), |s| { .map_or(Ok(None), |s| {
serde_json::from_str(s.content.get()) serde_json::from_str(s.content.get())
@ -193,6 +195,7 @@ pub(crate) async fn get_public_rooms_filtered_helper(
})?, })?,
name: services() name: services()
.rooms .rooms
.state_accessor
.room_state_get(&room_id, &StateEventType::RoomName, "")? .room_state_get(&room_id, &StateEventType::RoomName, "")?
.map_or(Ok(None), |s| { .map_or(Ok(None), |s| {
serde_json::from_str(s.content.get()) serde_json::from_str(s.content.get())
@ -203,6 +206,7 @@ pub(crate) async fn get_public_rooms_filtered_helper(
})?, })?,
num_joined_members: services() num_joined_members: services()
.rooms .rooms
.state_cache
.room_joined_count(&room_id)? .room_joined_count(&room_id)?
.unwrap_or_else(|| { .unwrap_or_else(|| {
warn!("Room {} has no member count", room_id); warn!("Room {} has no member count", room_id);
@ -212,6 +216,7 @@ pub(crate) async fn get_public_rooms_filtered_helper(
.expect("user count should not be that big"), .expect("user count should not be that big"),
topic: services() topic: services()
.rooms .rooms
.state_accessor
.room_state_get(&room_id, &StateEventType::RoomTopic, "")? .room_state_get(&room_id, &StateEventType::RoomTopic, "")?
.map_or(Ok(None), |s| { .map_or(Ok(None), |s| {
serde_json::from_str(s.content.get()) serde_json::from_str(s.content.get())
@ -222,6 +227,7 @@ pub(crate) async fn get_public_rooms_filtered_helper(
})?, })?,
world_readable: services() world_readable: services()
.rooms .rooms
.state_accessor
.room_state_get(&room_id, &StateEventType::RoomHistoryVisibility, "")? .room_state_get(&room_id, &StateEventType::RoomHistoryVisibility, "")?
.map_or(Ok(false), |s| { .map_or(Ok(false), |s| {
serde_json::from_str(s.content.get()) serde_json::from_str(s.content.get())
@ -236,6 +242,7 @@ pub(crate) async fn get_public_rooms_filtered_helper(
})?, })?,
guest_can_join: services() guest_can_join: services()
.rooms .rooms
.state_accessor
.room_state_get(&room_id, &StateEventType::RoomGuestAccess, "")? .room_state_get(&room_id, &StateEventType::RoomGuestAccess, "")?
.map_or(Ok(false), |s| { .map_or(Ok(false), |s| {
serde_json::from_str(s.content.get()) serde_json::from_str(s.content.get())
@ -248,6 +255,7 @@ pub(crate) async fn get_public_rooms_filtered_helper(
})?, })?,
avatar_url: services() avatar_url: services()
.rooms .rooms
.state_accessor
.room_state_get(&room_id, &StateEventType::RoomAvatar, "")? .room_state_get(&room_id, &StateEventType::RoomAvatar, "")?
.map(|s| { .map(|s| {
serde_json::from_str(s.content.get()) serde_json::from_str(s.content.get())
@ -261,6 +269,7 @@ pub(crate) async fn get_public_rooms_filtered_helper(
.flatten(), .flatten(),
join_rule: services() join_rule: services()
.rooms .rooms
.state_accessor
.room_state_get(&room_id, &StateEventType::RoomJoinRules, "")? .room_state_get(&room_id, &StateEventType::RoomJoinRules, "")?
.map(|s| { .map(|s| {
serde_json::from_str(s.content.get()) serde_json::from_str(s.content.get())

@ -230,7 +230,7 @@ pub async fn get_key_changes_route(
.filter_map(|r| r.ok()), .filter_map(|r| r.ok()),
); );
for room_id in services().rooms.rooms_joined(sender_user).filter_map(|r| r.ok()) { for room_id in services().rooms.state_cache.rooms_joined(sender_user).filter_map(|r| r.ok()) {
device_list_updates.extend( device_list_updates.extend(
services().users services().users
.keys_changed( .keys_changed(

@ -99,7 +99,7 @@ pub async fn get_content_route(
content_disposition, content_disposition,
content_type, content_type,
file, file,
}) = services().media.get(&mxc).await? }) = services().media.get(mxc.clone()).await?
{ {
Ok(get_content::v3::Response { Ok(get_content::v3::Response {
file, file,
@ -129,7 +129,7 @@ pub async fn get_content_as_filename_route(
content_disposition: _, content_disposition: _,
content_type, content_type,
file, file,
}) = services().media.get(&mxc).await? }) = services().media.get(mxc.clone()).await?
{ {
Ok(get_content_as_filename::v3::Response { Ok(get_content_as_filename::v3::Response {
file, file,
@ -165,7 +165,7 @@ pub async fn get_content_thumbnail_route(
}) = services() }) = services()
.media .media
.get_thumbnail( .get_thumbnail(
&mxc, mxc.clone(),
body.width body.width
.try_into() .try_into()
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Width is invalid."))?, .map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Width is invalid."))?,

@ -30,7 +30,7 @@ use std::{
}; };
use tracing::{debug, error, warn}; use tracing::{debug, error, warn};
use crate::{services, PduEvent, service::pdu::{gen_event_id_canonical_json, PduBuilder}, Error, api::{server_server}, utils, Ruma}; use crate::{Result, services, PduEvent, service::pdu::{gen_event_id_canonical_json, PduBuilder}, Error, api::{server_server, client_server}, utils, Ruma};
use super::get_alias_helper; use super::get_alias_helper;
@ -48,6 +48,7 @@ pub async fn join_room_by_id_route(
let mut servers = Vec::new(); // There is no body.server_name for /roomId/join let mut servers = Vec::new(); // There is no body.server_name for /roomId/join
servers.extend( servers.extend(
services().rooms services().rooms
.state_cache
.invite_state(sender_user, &body.room_id)? .invite_state(sender_user, &body.room_id)?
.unwrap_or_default() .unwrap_or_default()
.iter() .iter()
@ -88,6 +89,7 @@ pub async fn join_room_by_id_or_alias_route(
let mut servers = body.server_name.clone(); let mut servers = body.server_name.clone();
servers.extend( servers.extend(
services().rooms services().rooms
.state_cache
.invite_state(sender_user, &room_id)? .invite_state(sender_user, &room_id)?
.unwrap_or_default() .unwrap_or_default()
.iter() .iter()
@ -131,7 +133,7 @@ pub async fn leave_room_route(
) -> Result<leave_room::v3::Response> { ) -> Result<leave_room::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated"); let sender_user = body.sender_user.as_ref().expect("user is authenticated");
services().rooms.leave_room(sender_user, &body.room_id).await?; leave_room(sender_user, &body.room_id).await?;
Ok(leave_room::v3::Response::new()) Ok(leave_room::v3::Response::new())
} }
@ -162,6 +164,7 @@ pub async fn kick_user_route(
let mut event: RoomMemberEventContent = serde_json::from_str( let mut event: RoomMemberEventContent = serde_json::from_str(
services().rooms services().rooms
.state_accessor
.room_state_get( .room_state_get(
&body.room_id, &body.room_id,
&StateEventType::RoomMember, &StateEventType::RoomMember,
@ -189,7 +192,7 @@ pub async fn kick_user_route(
); );
let state_lock = mutex_state.lock().await; let state_lock = mutex_state.lock().await;
services().rooms.build_and_append_pdu( services().rooms.timeline.build_and_append_pdu(
PduBuilder { PduBuilder {
event_type: RoomEventType::RoomMember, event_type: RoomEventType::RoomMember,
content: to_raw_value(&event).expect("event is valid, we just created it"), content: to_raw_value(&event).expect("event is valid, we just created it"),
@ -219,6 +222,7 @@ pub async fn ban_user_route(
let event = services() let event = services()
.rooms .rooms
.state_accessor
.room_state_get( .room_state_get(
&body.room_id, &body.room_id,
&StateEventType::RoomMember, &StateEventType::RoomMember,
@ -255,7 +259,7 @@ pub async fn ban_user_route(
); );
let state_lock = mutex_state.lock().await; let state_lock = mutex_state.lock().await;
services().rooms.build_and_append_pdu( services().rooms.timeline.build_and_append_pdu(
PduBuilder { PduBuilder {
event_type: RoomEventType::RoomMember, event_type: RoomEventType::RoomMember,
content: to_raw_value(&event).expect("event is valid, we just created it"), content: to_raw_value(&event).expect("event is valid, we just created it"),
@ -283,6 +287,7 @@ pub async fn unban_user_route(
let mut event: RoomMemberEventContent = serde_json::from_str( let mut event: RoomMemberEventContent = serde_json::from_str(
services().rooms services().rooms
.state_accessor
.room_state_get( .room_state_get(
&body.room_id, &body.room_id,
&StateEventType::RoomMember, &StateEventType::RoomMember,
@ -309,7 +314,7 @@ pub async fn unban_user_route(
); );
let state_lock = mutex_state.lock().await; let state_lock = mutex_state.lock().await;
services().rooms.build_and_append_pdu( services().rooms.timeline.build_and_append_pdu(
PduBuilder { PduBuilder {
event_type: RoomEventType::RoomMember, event_type: RoomEventType::RoomMember,
content: to_raw_value(&event).expect("event is valid, we just created it"), content: to_raw_value(&event).expect("event is valid, we just created it"),
@ -340,7 +345,7 @@ pub async fn forget_room_route(
) -> Result<forget_room::v3::Response> { ) -> Result<forget_room::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated"); let sender_user = body.sender_user.as_ref().expect("user is authenticated");
services().rooms.forget(&body.room_id, sender_user)?; services().rooms.state_cache.forget(&body.room_id, sender_user)?;
Ok(forget_room::v3::Response::new()) Ok(forget_room::v3::Response::new())
} }
@ -356,6 +361,7 @@ pub async fn joined_rooms_route(
Ok(joined_rooms::v3::Response { Ok(joined_rooms::v3::Response {
joined_rooms: services() joined_rooms: services()
.rooms .rooms
.state_cache
.rooms_joined(sender_user) .rooms_joined(sender_user)
.filter_map(|r| r.ok()) .filter_map(|r| r.ok())
.collect(), .collect(),
@ -373,7 +379,7 @@ pub async fn get_member_events_route(
let sender_user = body.sender_user.as_ref().expect("user is authenticated"); let sender_user = body.sender_user.as_ref().expect("user is authenticated");
// TODO: check history visibility? // TODO: check history visibility?
if !services().rooms.is_joined(sender_user, &body.room_id)? { if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
return Err(Error::BadRequest( return Err(Error::BadRequest(
ErrorKind::Forbidden, ErrorKind::Forbidden,
"You don't have permission to view this room.", "You don't have permission to view this room.",
@ -383,6 +389,7 @@ pub async fn get_member_events_route(
Ok(get_member_events::v3::Response { Ok(get_member_events::v3::Response {
chunk: services() chunk: services()
.rooms .rooms
.state_accessor
.room_state_full(&body.room_id) .room_state_full(&body.room_id)
.await? .await?
.iter() .iter()
@ -403,7 +410,7 @@ pub async fn joined_members_route(
) -> Result<joined_members::v3::Response> { ) -> Result<joined_members::v3::Response> {
let sender_user = body.sender_user.as_ref().expect("user is authenticated"); let sender_user = body.sender_user.as_ref().expect("user is authenticated");
if !services().rooms.is_joined(sender_user, &body.room_id)? { if !services().rooms.state_cache.is_joined(sender_user, &body.room_id)? {
return Err(Error::BadRequest( return Err(Error::BadRequest(
ErrorKind::Forbidden, ErrorKind::Forbidden,
"You aren't a member of the room.", "You aren't a member of the room.",
@ -411,7 +418,7 @@ pub async fn joined_members_route(
} }
let mut joined = BTreeMap::new(); let mut joined = BTreeMap::new();
for user_id in services().rooms.room_members(&body.room_id).filter_map(|r| r.ok()) { for user_id in services().rooms.state_cache.room_members(&body.room_id).filter_map(|r| r.ok()) {
let display_name = services().users.displayname(&user_id)?; let display_name = services().users.displayname(&user_id)?;
let avatar_url = services().users.avatar_url(&user_id)?; let avatar_url = services().users.avatar_url(&user_id)?;
@ -446,7 +453,7 @@ async fn join_room_by_id_helper(
let state_lock = mutex_state.lock().await; let state_lock = mutex_state.lock().await;
// Ask a remote server if we don't have this room // Ask a remote server if we don't have this room
if !services().rooms.exists(room_id)? { if !services().rooms.metadata.exists(room_id)? {
let mut make_join_response_and_server = Err(Error::BadServerResponse( let mut make_join_response_and_server = Err(Error::BadServerResponse(
"No server available to assist in joining.", "No server available to assist in joining.",
)); ));
@ -553,7 +560,7 @@ async fn join_room_by_id_helper(
) )
.await?; .await?;
services().rooms.get_or_create_shortroomid(room_id, &services().globals)?; services().rooms.short.get_or_create_shortroomid(room_id)?;
let parsed_pdu = PduEvent::from_id_val(event_id, join_event.clone()) let parsed_pdu = PduEvent::from_id_val(event_id, join_event.clone())
.map_err(|_| Error::BadServerResponse("Invalid join event PDU."))?; .map_err(|_| Error::BadServerResponse("Invalid join event PDU."))?;
@ -586,7 +593,7 @@ async fn join_room_by_id_helper(
services().rooms.add_pdu_outlier(&event_id, &value)?; services().rooms.add_pdu_outlier(&event_id, &value)?;
if let Some(state_key) = &pdu.state_key { if let Some(state_key) = &pdu.state_key {
let shortstatekey = services().rooms.get_or_create_shortstatekey( let shortstatekey = services().rooms.short.get_or_create_shortstatekey(
&pdu.kind.to_string().into(), &pdu.kind.to_string().into(),
state_key, state_key,
)?; )?;
@ -594,7 +601,7 @@ async fn join_room_by_id_helper(
} }
} }
let incoming_shortstatekey = services().rooms.get_or_create_shortstatekey( let incoming_shortstatekey = services().rooms.short.get_or_create_shortstatekey(
&parsed_pdu.kind.to_string().into(), &parsed_pdu.kind.to_string().into(),
parsed_pdu parsed_pdu
.state_key .state_key
@ -606,6 +613,7 @@ async fn join_room_by_id_helper(
let create_shortstatekey = services() let create_shortstatekey = services()
.rooms .rooms
.short
.get_shortstatekey(&StateEventType::RoomCreate, "")? .get_shortstatekey(&StateEventType::RoomCreate, "")?
.expect("Room exists"); .expect("Room exists");
@ -613,7 +621,7 @@ async fn join_room_by_id_helper(
return Err(Error::BadServerResponse("State contained no create event.")); return Err(Error::BadServerResponse("State contained no create event."));
} }
services().rooms.force_state( services().rooms.state.force_state(
room_id, room_id,
state state
.into_iter() .into_iter()
@ -780,7 +788,7 @@ pub(crate) async fn invite_helper<'a>(
redacts: None, redacts: None,
}, sender_user, room_id, &state_lock); }, sender_user, room_id, &state_lock);
let invite_room_state = services().rooms.calculate_invite_state(&pdu)?; let invite_room_state = services().rooms.state.calculate_invite_state(&pdu)?;
drop(state_lock); drop(state_lock);

@ -87,7 +87,7 @@ pub async fn create_room_route(
Error::BadRequest(ErrorKind::InvalidParam, "Invalid alias.") Error::BadRequest(ErrorKind::InvalidParam, "Invalid alias.")
})?; })?;
if services().rooms.id_from_alias(&alias)?.is_some() { if services().rooms.alias.resolve_local_alias(&alias)?.is_some() {
Err(Error::BadRequest( Err(Error::BadRequest(
ErrorKind::RoomInUse, ErrorKind::RoomInUse,
"Room alias already exists.", "Room alias already exists.",

@ -246,7 +246,7 @@ async fn send_state_event_for_key_helper(
if alias.server_name() != services().globals.server_name() if alias.server_name() != services().globals.server_name()
|| services() || services()
.rooms .rooms
.id_from_alias(&alias)? .alias.resolve_local_alias(&alias)?
.filter(|room| room == room_id) // Make sure it's the right room .filter(|room| room == room_id) // Make sure it's the right room
.is_none() .is_none()
{ {

@ -1842,7 +1842,7 @@ pub async fn get_room_information_route(
let room_id = services() let room_id = services()
.rooms .rooms
.id_from_alias(&body.room_alias)? .alias.resolve_local_alias(&body.room_alias)?
.ok_or(Error::BadRequest( .ok_or(Error::BadRequest(
ErrorKind::NotFound, ErrorKind::NotFound,
"Room alias not found.", "Room alias not found.",

@ -1,17 +1,14 @@
use crate::{utils, Error, Result}; use std::collections::HashMap;
use ruma::{
api::client::error::ErrorKind, use ruma::{UserId, DeviceId, signatures::CanonicalJsonValue, api::client::{uiaa::UiaaInfo, error::ErrorKind}, events::{RoomAccountDataEventType, AnyEphemeralRoomEvent}, serde::Raw, RoomId};
events::{AnyEphemeralRoomEvent, RoomAccountDataEventType}, use serde::{Serialize, de::DeserializeOwned};
serde::Raw,
RoomId, UserId, use crate::{Result, database::KeyValueDatabase, service, Error, utils, services};
};
use serde::{de::DeserializeOwned, Serialize}; impl service::account_data::Data for KeyValueDatabase {
use std::{collections::HashMap, sync::Arc};
impl AccountData {
/// Places one event in the account data of the user and removes the previous entry. /// Places one event in the account data of the user and removes the previous entry.
#[tracing::instrument(skip(self, room_id, user_id, event_type, data))] #[tracing::instrument(skip(self, room_id, user_id, event_type, data))]
pub fn update<T: Serialize>( fn update<T: Serialize>(
&self, &self,
room_id: Option<&RoomId>, room_id: Option<&RoomId>,
user_id: &UserId, user_id: &UserId,
@ -63,7 +60,7 @@ impl AccountData {
/// Searches the account data for a specific kind. /// Searches the account data for a specific kind.
#[tracing::instrument(skip(self, room_id, user_id, kind))] #[tracing::instrument(skip(self, room_id, user_id, kind))]
pub fn get<T: DeserializeOwned>( fn get<T: DeserializeOwned>(
&self, &self,
room_id: Option<&RoomId>, room_id: Option<&RoomId>,
user_id: &UserId, user_id: &UserId,
@ -96,7 +93,7 @@ impl AccountData {
/// Returns all changes to the account data that happened after `since`. /// Returns all changes to the account data that happened after `since`.
#[tracing::instrument(skip(self, room_id, user_id, since))] #[tracing::instrument(skip(self, room_id, user_id, since))]
pub fn changes_since( fn changes_since(
&self, &self,
room_id: Option<&RoomId>, room_id: Option<&RoomId>,
user_id: &UserId, user_id: &UserId,

@ -1,4 +1,4 @@
use crate::{database::KeyValueDatabase, service, utils, Error}; use crate::{database::KeyValueDatabase, service, utils, Error, Result};
impl service::appservice::Data for KeyValueDatabase { impl service::appservice::Data for KeyValueDatabase {
/// Registers an appservice and returns the ID to the caller /// Registers an appservice and returns the ID to the caller
@ -54,7 +54,7 @@ impl service::appservice::Data for KeyValueDatabase {
) )
} }
fn iter_ids(&self) -> Result<impl Iterator<Item = Result<String>> + '_> { fn iter_ids(&self) -> Result<Box<dyn Iterator<Item = Result<String>>>> {
Ok(self.id_appserviceregistrations.iter().map(|(id, _)| { Ok(self.id_appserviceregistrations.iter().map(|(id, _)| {
utils::string_from_bytes(&id) utils::string_from_bytes(&id)
.map_err(|_| Error::bad_database("Invalid id bytes in id_appserviceregistrations.")) .map_err(|_| Error::bad_database("Invalid id bytes in id_appserviceregistrations."))

@ -1,108 +1,13 @@
mod data; use ruma::signatures::Ed25519KeyPair;
pub use data::Data;
use crate::service::*; use crate::{Result, service, database::KeyValueDatabase, Error, utils};
use crate::{database::Config, server_server::FedDest, utils, Error, Result}; impl service::globals::Data for KeyValueDatabase {
use ruma::{ fn load_keypair(&self) -> Result<Ed25519KeyPair> {
api::{ let keypair_bytes = self.globals.get(b"keypair")?.map_or_else(
client::sync::sync_events,
federation::discovery::{ServerSigningKeys, VerifyKey},
},
DeviceId, EventId, MilliSecondsSinceUnixEpoch, RoomId, RoomVersionId, ServerName,
ServerSigningKeyId, UserId,
};
use std::{
collections::{BTreeMap, HashMap},
fs,
future::Future,
net::{IpAddr, SocketAddr},
path::PathBuf,
sync::{Arc, Mutex, RwLock},
time::{Duration, Instant},
};
use tokio::sync::{broadcast, watch::Receiver, Mutex as TokioMutex, Semaphore};
use tracing::error;
use trust_dns_resolver::TokioAsyncResolver;
use super::abstraction::Tree;
pub const COUNTER: &[u8] = b"c";
type WellKnownMap = HashMap<Box<ServerName>, (FedDest, String)>;
type TlsNameMap = HashMap<String, (Vec<IpAddr>, u16)>;
type RateLimitState = (Instant, u32); // Time if last failed try, number of failed tries
type SyncHandle = (
Option<String>, // since
Receiver<Option<Result<sync_events::v3::Response>>>, // rx
);
pub struct Service<D: Data> {
db: D,
pub actual_destination_cache: Arc<RwLock<WellKnownMap>>, // actual_destination, host
pub tls_name_override: Arc<RwLock<TlsNameMap>>,
pub config: Config,
keypair: Arc<ruma::signatures::Ed25519KeyPair>,
dns_resolver: TokioAsyncResolver,
jwt_decoding_key: Option<jsonwebtoken::DecodingKey<'static>>,
federation_client: reqwest::Client,
default_client: reqwest::Client,
pub stable_room_versions: Vec<RoomVersionId>,
pub unstable_room_versions: Vec<RoomVersionId>,
pub bad_event_ratelimiter: Arc<RwLock<HashMap<Box<EventId>, RateLimitState>>>,
pub bad_signature_ratelimiter: Arc<RwLock<HashMap<Vec<String>, RateLimitState>>>,
pub servername_ratelimiter: Arc<RwLock<HashMap<Box<ServerName>, Arc<Semaphore>>>>,
pub sync_receivers: RwLock<HashMap<(Box<UserId>, Box<DeviceId>), SyncHandle>>,
pub roomid_mutex_insert: RwLock<HashMap<Box<RoomId>, Arc<Mutex<()>>>>,
pub roomid_mutex_state: RwLock<HashMap<Box<RoomId>, Arc<TokioMutex<()>>>>,
pub roomid_mutex_federation: RwLock<HashMap<Box<RoomId>, Arc<TokioMutex<()>>>>, // this lock will be held longer
pub roomid_federationhandletime: RwLock<HashMap<Box<RoomId>, (Box<EventId>, Instant)>>,
pub stateres_mutex: Arc<Mutex<()>>,
pub rotate: RotationHandler,
}
/// Handles "rotation" of long-polling requests. "Rotation" in this context is similar to "rotation" of log files and the like.
///
/// This is utilized to have sync workers return early and release read locks on the database.
pub struct RotationHandler(broadcast::Sender<()>, broadcast::Receiver<()>);
impl RotationHandler {
pub fn new() -> Self {
let (s, r) = broadcast::channel(1);
Self(s, r)
}
pub fn watch(&self) -> impl Future<Output = ()> {
let mut r = self.0.subscribe();
async move {
let _ = r.recv().await;
}
}
pub fn fire(&self) {
let _ = self.0.send(());
}
}
impl Default for RotationHandler {
fn default() -> Self {
Self::new()
}
}
impl Service<_> {
pub fn load(
globals: Arc<dyn Tree>,
server_signingkeys: Arc<dyn Tree>,
config: Config,
) -> Result<Self> {
let keypair_bytes = globals.get(b"keypair")?.map_or_else(
|| { || {
let keypair = utils::generate_keypair(); let keypair = utils::generate_keypair();
globals.insert(b"keypair", &keypair)?; self.globals.insert(b"keypair", &keypair)?;
Ok::<_, Error>(keypair) Ok::<_, Error>(keypair)
}, },
|s| Ok(s.to_vec()), |s| Ok(s.to_vec()),
@ -125,302 +30,11 @@ impl Service<_> {
.map(|key| (version, key)) .map(|key| (version, key))
}) })
.and_then(|(version, key)| { .and_then(|(version, key)| {
ruma::signatures::Ed25519KeyPair::from_der(key, version) Ed25519KeyPair::from_der(key, version)
.map_err(|_| Error::bad_database("Private or public keys are invalid.")) .map_err(|_| Error::bad_database("Private or public keys are invalid."))
}); });
let keypair = match keypair {
Ok(k) => k,
Err(e) => {
error!("Keypair invalid. Deleting...");
globals.remove(b"keypair")?;
return Err(e);
}
};
let tls_name_override = Arc::new(RwLock::new(TlsNameMap::new()));
let jwt_decoding_key = config
.jwt_secret
.as_ref()
.map(|secret| jsonwebtoken::DecodingKey::from_secret(secret.as_bytes()).into_static());
let default_client = reqwest_client_builder(&config)?.build()?;
let name_override = Arc::clone(&tls_name_override);
let federation_client = reqwest_client_builder(&config)?
.resolve_fn(move |domain| {
let read_guard = name_override.read().unwrap();
let (override_name, port) = read_guard.get(&domain)?;
let first_name = override_name.get(0)?;
Some(SocketAddr::new(*first_name, *port))
})
.build()?;
// Supported and stable room versions
let stable_room_versions = vec![
RoomVersionId::V6,
RoomVersionId::V7,
RoomVersionId::V8,
RoomVersionId::V9,
];
// Experimental, partially supported room versions
let unstable_room_versions = vec![RoomVersionId::V3, RoomVersionId::V4, RoomVersionId::V5];
let mut s = Self {
globals,
config,
keypair: Arc::new(keypair),
dns_resolver: TokioAsyncResolver::tokio_from_system_conf().map_err(|e| {
error!(
"Failed to set up trust dns resolver with system config: {}",
e
);
Error::bad_config("Failed to set up trust dns resolver with system config.")
})?,
actual_destination_cache: Arc::new(RwLock::new(WellKnownMap::new())),
tls_name_override,
federation_client,
default_client,
server_signingkeys,
jwt_decoding_key,
stable_room_versions,
unstable_room_versions,
bad_event_ratelimiter: Arc::new(RwLock::new(HashMap::new())),
bad_signature_ratelimiter: Arc::new(RwLock::new(HashMap::new())),
servername_ratelimiter: Arc::new(RwLock::new(HashMap::new())),
roomid_mutex_state: RwLock::new(HashMap::new()),
roomid_mutex_insert: RwLock::new(HashMap::new()),
roomid_mutex_federation: RwLock::new(HashMap::new()),
roomid_federationhandletime: RwLock::new(HashMap::new()),
stateres_mutex: Arc::new(Mutex::new(())),
sync_receivers: RwLock::new(HashMap::new()),
rotate: RotationHandler::new(),
};
fs::create_dir_all(s.get_media_folder())?;
if !s
.supported_room_versions()
.contains(&s.config.default_room_version)
{
error!("Room version in config isn't supported, falling back to Version 6");
s.config.default_room_version = RoomVersionId::V6;
};
Ok(s)
}
/// Returns this server's keypair.
pub fn keypair(&self) -> &ruma::signatures::Ed25519KeyPair {
&self.keypair
}
/// Returns a reqwest client which can be used to send requests
pub fn default_client(&self) -> reqwest::Client {
// Client is cheap to clone (Arc wrapper) and avoids lifetime issues
self.default_client.clone()
} }
fn remove_keypair(&self) -> Result<()> {
/// Returns a client used for resolving .well-knowns self.globals.remove(b"keypair")?
pub fn federation_client(&self) -> reqwest::Client {
// Client is cheap to clone (Arc wrapper) and avoids lifetime issues
self.federation_client.clone()
}
#[tracing::instrument(skip(self))]
pub fn next_count(&self) -> Result<u64> {
utils::u64_from_bytes(&self.globals.increment(COUNTER)?)
.map_err(|_| Error::bad_database("Count has invalid bytes."))
}
#[tracing::instrument(skip(self))]
pub fn current_count(&self) -> Result<u64> {
self.globals.get(COUNTER)?.map_or(Ok(0_u64), |bytes| {
utils::u64_from_bytes(&bytes)
.map_err(|_| Error::bad_database("Count has invalid bytes."))
})
}
pub fn server_name(&self) -> &ServerName {
self.config.server_name.as_ref()
}
pub fn max_request_size(&self) -> u32 {
self.config.max_request_size
}
pub fn allow_registration(&self) -> bool {
self.config.allow_registration
}
pub fn allow_encryption(&self) -> bool {
self.config.allow_encryption
}
pub fn allow_federation(&self) -> bool {
self.config.allow_federation
}
pub fn allow_room_creation(&self) -> bool {
self.config.allow_room_creation
} }
pub fn allow_unstable_room_versions(&self) -> bool {
self.config.allow_unstable_room_versions
}
pub fn default_room_version(&self) -> RoomVersionId {
self.config.default_room_version.clone()
}
pub fn trusted_servers(&self) -> &[Box<ServerName>] {
&self.config.trusted_servers
}
pub fn dns_resolver(&self) -> &TokioAsyncResolver {
&self.dns_resolver
}
pub fn jwt_decoding_key(&self) -> Option<&jsonwebtoken::DecodingKey<'_>> {
self.jwt_decoding_key.as_ref()
}
pub fn turn_password(&self) -> &String {
&self.config.turn_password
}
pub fn turn_ttl(&self) -> u64 {
self.config.turn_ttl
}
pub fn turn_uris(&self) -> &[String] {
&self.config.turn_uris
}
pub fn turn_username(&self) -> &String {
&self.config.turn_username
}
pub fn turn_secret(&self) -> &String {
&self.config.turn_secret
}
pub fn emergency_password(&self) -> &Option<String> {
&self.config.emergency_password
}
pub fn supported_room_versions(&self) -> Vec<RoomVersionId> {
let mut room_versions: Vec<RoomVersionId> = vec![];
room_versions.extend(self.stable_room_versions.clone());
if self.allow_unstable_room_versions() {
room_versions.extend(self.unstable_room_versions.clone());
};
room_versions
}
/// TODO: the key valid until timestamp is only honored in room version > 4
/// Remove the outdated keys and insert the new ones.
///
/// This doesn't actually check that the keys provided are newer than the old set.
pub fn add_signing_key(
&self,
origin: &ServerName,
new_keys: ServerSigningKeys,
) -> Result<BTreeMap<Box<ServerSigningKeyId>, VerifyKey>> {
// Not atomic, but this is not critical
let signingkeys = self.server_signingkeys.get(origin.as_bytes())?;
let mut keys = signingkeys
.and_then(|keys| serde_json::from_slice(&keys).ok())
.unwrap_or_else(|| {
// Just insert "now", it doesn't matter
ServerSigningKeys::new(origin.to_owned(), MilliSecondsSinceUnixEpoch::now())
});
let ServerSigningKeys {
verify_keys,
old_verify_keys,
..
} = new_keys;
keys.verify_keys.extend(verify_keys.into_iter());
keys.old_verify_keys.extend(old_verify_keys.into_iter());
self.server_signingkeys.insert(
origin.as_bytes(),
&serde_json::to_vec(&keys).expect("serversigningkeys can be serialized"),
)?;
let mut tree = keys.verify_keys;
tree.extend(
keys.old_verify_keys
.into_iter()
.map(|old| (old.0, VerifyKey::new(old.1.key))),
);
Ok(tree)
}
/// This returns an empty `Ok(BTreeMap<..>)` when there are no keys found for the server.
pub fn signing_keys_for(
&self,
origin: &ServerName,
) -> Result<BTreeMap<Box<ServerSigningKeyId>, VerifyKey>> {
let signingkeys = self
.server_signingkeys
.get(origin.as_bytes())?
.and_then(|bytes| serde_json::from_slice(&bytes).ok())
.map(|keys: ServerSigningKeys| {
let mut tree = keys.verify_keys;
tree.extend(
keys.old_verify_keys
.into_iter()
.map(|old| (old.0, VerifyKey::new(old.1.key))),
);
tree
})
.unwrap_or_else(BTreeMap::new);
Ok(signingkeys)
}
pub fn database_version(&self) -> Result<u64> {
self.globals.get(b"version")?.map_or(Ok(0), |version| {
utils::u64_from_bytes(&version)
.map_err(|_| Error::bad_database("Database version id is invalid."))
})
}
pub fn bump_database_version(&self, new_version: u64) -> Result<()> {
self.globals
.insert(b"version", &new_version.to_be_bytes())?;
Ok(())
}
pub fn get_media_folder(&self) -> PathBuf {
let mut r = PathBuf::new();
r.push(self.config.database_path.clone());
r.push("media");
r
}
pub fn get_media_file(&self, key: &[u8]) -> PathBuf {
let mut r = PathBuf::new();
r.push(self.config.database_path.clone());
r.push("media");
r.push(base64::encode_config(key, base64::URL_SAFE_NO_PAD));
r
}
}
fn reqwest_client_builder(config: &Config) -> Result<reqwest::ClientBuilder> {
let mut reqwest_client_builder = reqwest::Client::builder()
.connect_timeout(Duration::from_secs(30))
.timeout(Duration::from_secs(60 * 3));
if let Some(proxy) = config.proxy.to_proxy()? {
reqwest_client_builder = reqwest_client_builder.proxy(proxy);
}
Ok(reqwest_client_builder)
} }

@ -1,16 +1,11 @@
use crate::{utils, Error, Result, services}; use std::collections::BTreeMap;
use ruma::{
api::client::{ use ruma::{UserId, serde::Raw, api::client::{backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup}, error::ErrorKind}, RoomId};
backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup},
error::ErrorKind, use crate::{Result, service, database::KeyValueDatabase, services, Error, utils};
},
serde::Raw, impl service::key_backups::Data for KeyValueDatabase {
RoomId, UserId, fn create_backup(
};
use std::{collections::BTreeMap, sync::Arc};
impl KeyBackups {
pub fn create_backup(
&self, &self,
user_id: &UserId, user_id: &UserId,
backup_metadata: &Raw<BackupAlgorithm>, backup_metadata: &Raw<BackupAlgorithm>,
@ -30,7 +25,7 @@ impl KeyBackups {
Ok(version) Ok(version)
} }
pub fn delete_backup(&self, user_id: &UserId, version: &str) -> Result<()> { fn delete_backup(&self, user_id: &UserId, version: &str) -> Result<()> {
let mut key = user_id.as_bytes().to_vec(); let mut key = user_id.as_bytes().to_vec();
key.push(0xff); key.push(0xff);
key.extend_from_slice(version.as_bytes()); key.extend_from_slice(version.as_bytes());
@ -47,7 +42,7 @@ impl KeyBackups {
Ok(()) Ok(())
} }
pub fn update_backup( fn update_backup(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
@ -71,7 +66,7 @@ impl KeyBackups {
Ok(version.to_owned()) Ok(version.to_owned())
} }
pub fn get_latest_backup_version(&self, user_id: &UserId) -> Result<Option<String>> { fn get_latest_backup_version(&self, user_id: &UserId) -> Result<Option<String>> {
let mut prefix = user_id.as_bytes().to_vec(); let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);
let mut last_possible_key = prefix.clone(); let mut last_possible_key = prefix.clone();
@ -92,7 +87,7 @@ impl KeyBackups {
.transpose() .transpose()
} }
pub fn get_latest_backup( fn get_latest_backup(
&self, &self,
user_id: &UserId, user_id: &UserId,
) -> Result<Option<(String, Raw<BackupAlgorithm>)>> { ) -> Result<Option<(String, Raw<BackupAlgorithm>)>> {
@ -123,7 +118,7 @@ impl KeyBackups {
.transpose() .transpose()
} }
pub fn get_backup( fn get_backup(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
@ -140,7 +135,7 @@ impl KeyBackups {
}) })
} }
pub fn add_key( fn add_key(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
@ -173,7 +168,7 @@ impl KeyBackups {
Ok(()) Ok(())
} }
pub fn count_keys(&self, user_id: &UserId, version: &str) -> Result<usize> { fn count_keys(&self, user_id: &UserId, version: &str) -> Result<usize> {
let mut prefix = user_id.as_bytes().to_vec(); let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);
prefix.extend_from_slice(version.as_bytes()); prefix.extend_from_slice(version.as_bytes());
@ -181,7 +176,7 @@ impl KeyBackups {
Ok(self.backupkeyid_backup.scan_prefix(prefix).count()) Ok(self.backupkeyid_backup.scan_prefix(prefix).count())
} }
pub fn get_etag(&self, user_id: &UserId, version: &str) -> Result<String> { fn get_etag(&self, user_id: &UserId, version: &str) -> Result<String> {
let mut key = user_id.as_bytes().to_vec(); let mut key = user_id.as_bytes().to_vec();
key.push(0xff); key.push(0xff);
key.extend_from_slice(version.as_bytes()); key.extend_from_slice(version.as_bytes());
@ -196,7 +191,7 @@ impl KeyBackups {
.to_string()) .to_string())
} }
pub fn get_all( fn get_all(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
@ -252,7 +247,7 @@ impl KeyBackups {
Ok(rooms) Ok(rooms)
} }
pub fn get_room( fn get_room(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
@ -289,7 +284,7 @@ impl KeyBackups {
.collect()) .collect())
} }
pub fn get_session( fn get_session(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
@ -314,7 +309,7 @@ impl KeyBackups {
.transpose() .transpose()
} }
pub fn delete_all_keys(&self, user_id: &UserId, version: &str) -> Result<()> { fn delete_all_keys(&self, user_id: &UserId, version: &str) -> Result<()> {
let mut key = user_id.as_bytes().to_vec(); let mut key = user_id.as_bytes().to_vec();
key.push(0xff); key.push(0xff);
key.extend_from_slice(version.as_bytes()); key.extend_from_slice(version.as_bytes());
@ -327,7 +322,7 @@ impl KeyBackups {
Ok(()) Ok(())
} }
pub fn delete_room_keys( fn delete_room_keys(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
@ -347,7 +342,7 @@ impl KeyBackups {
Ok(()) Ok(())
} }
pub fn delete_room_key( fn delete_room_key(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,

@ -1,72 +1,7 @@
use image::{imageops::FilterType, GenericImageView}; use crate::{database::KeyValueDatabase, service, Error, utils, Result};
use super::abstraction::Tree; impl service::media::Data for KeyValueDatabase {
use crate::{utils, Error, Result}; fn create_file_metadata(&self, mxc: String, width: u32, height: u32, content_disposition: &Option<&str>, content_type: &Option<&str>) -> Result<Vec<u8>> {
use std::{mem, sync::Arc};
use tokio::{
fs::File,
io::{AsyncReadExt, AsyncWriteExt},
};
pub struct FileMeta {
pub content_disposition: Option<String>,
pub content_type: Option<String>,
pub file: Vec<u8>,
}
pub struct Media {
pub(super) mediaid_file: Arc<dyn Tree>, // MediaId = MXC + WidthHeight + ContentDisposition + ContentType
}
impl Media {
/// Uploads a file.
pub async fn create(
&self,
mxc: String,
globals: &Globals,
content_disposition: &Option<&str>,
content_type: &Option<&str>,
file: &[u8],
) -> Result<()> {
let mut key = mxc.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(&0_u32.to_be_bytes()); // Width = 0 if it's not a thumbnail
key.extend_from_slice(&0_u32.to_be_bytes()); // Height = 0 if it's not a thumbnail
key.push(0xff);
key.extend_from_slice(
content_disposition
.as_ref()
.map(|f| f.as_bytes())
.unwrap_or_default(),
);
key.push(0xff);
key.extend_from_slice(
content_type
.as_ref()
.map(|c| c.as_bytes())
.unwrap_or_default(),
);
let path = globals.get_media_file(&key);
let mut f = File::create(path).await?;
f.write_all(file).await?;
self.mediaid_file.insert(&key, &[])?;
Ok(())
}
/// Uploads or replaces a file thumbnail.
#[allow(clippy::too_many_arguments)]
pub async fn upload_thumbnail(
&self,
mxc: String,
globals: &Globals,
content_disposition: &Option<String>,
content_type: &Option<String>,
width: u32,
height: u32,
file: &[u8],
) -> Result<()> {
let mut key = mxc.as_bytes().to_vec(); let mut key = mxc.as_bytes().to_vec();
key.push(0xff); key.push(0xff);
key.extend_from_slice(&width.to_be_bytes()); key.extend_from_slice(&width.to_be_bytes());
@ -86,272 +21,46 @@ impl Media {
.unwrap_or_default(), .unwrap_or_default(),
); );
let path = globals.get_media_file(&key);
let mut f = File::create(path).await?;
f.write_all(file).await?;
self.mediaid_file.insert(&key, &[])?; self.mediaid_file.insert(&key, &[])?;
Ok(()) Ok(key)
} }
/// Downloads a file. fn search_file_metadata(&self, mxc: String, width: u32, height: u32) -> Result<(Option<String>, Option<String>, Vec<u8>)> {
pub async fn get(&self, globals: &Globals, mxc: &str) -> Result<Option<FileMeta>> {
let mut prefix = mxc.as_bytes().to_vec(); let mut prefix = mxc.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);
prefix.extend_from_slice(&0_u32.to_be_bytes()); // Width = 0 if it's not a thumbnail prefix.extend_from_slice(&0_u32.to_be_bytes()); // Width = 0 if it's not a thumbnail
prefix.extend_from_slice(&0_u32.to_be_bytes()); // Height = 0 if it's not a thumbnail prefix.extend_from_slice(&0_u32.to_be_bytes()); // Height = 0 if it's not a thumbnail
prefix.push(0xff); prefix.push(0xff);
let first = self.mediaid_file.scan_prefix(prefix).next(); let (key, _) = self.mediaid_file.scan_prefix(prefix).next().ok_or(Error::NotFound)?;
if let Some((key, _)) = first {
let path = globals.get_media_file(&key);
let mut file = Vec::new();
File::open(path).await?.read_to_end(&mut file).await?;
let mut parts = key.rsplit(|&b| b == 0xff);
let content_type = parts
.next()
.map(|bytes| {
utils::string_from_bytes(bytes).map_err(|_| {
Error::bad_database("Content type in mediaid_file is invalid unicode.")
})
})
.transpose()?;
let content_disposition_bytes = parts
.next()
.ok_or_else(|| Error::bad_database("Media ID in db is invalid."))?;
let content_disposition = if content_disposition_bytes.is_empty() {
None
} else {
Some(
utils::string_from_bytes(content_disposition_bytes).map_err(|_| {
Error::bad_database(
"Content Disposition in mediaid_file is invalid unicode.",
)
})?,
)
};
Ok(Some(FileMeta {
content_disposition,
content_type,
file,
}))
} else {
Ok(None)
}
}
/// Returns width, height of the thumbnail and whether it should be cropped. Returns None when
/// the server should send the original file.
pub fn thumbnail_properties(&self, width: u32, height: u32) -> Option<(u32, u32, bool)> {
match (width, height) {
(0..=32, 0..=32) => Some((32, 32, true)),
(0..=96, 0..=96) => Some((96, 96, true)),
(0..=320, 0..=240) => Some((320, 240, false)),
(0..=640, 0..=480) => Some((640, 480, false)),
(0..=800, 0..=600) => Some((800, 600, false)),
_ => None,
}
}
/// Downloads a file's thumbnail.
///
/// Here's an example on how it works:
///
/// - Client requests an image with width=567, height=567
/// - Server rounds that up to (800, 600), so it doesn't have to save too many thumbnails
/// - Server rounds that up again to (958, 600) to fix the aspect ratio (only for width,height>96)
/// - Server creates the thumbnail and sends it to the user
///
/// For width,height <= 96 the server uses another thumbnailing algorithm which crops the image afterwards.
pub async fn get_thumbnail(
&self,
mxc: &str,
globals: &Globals,
width: u32,
height: u32,
) -> Result<Option<FileMeta>> {
let (width, height, crop) = self
.thumbnail_properties(width, height)
.unwrap_or((0, 0, false)); // 0, 0 because that's the original file
let mut main_prefix = mxc.as_bytes().to_vec();
main_prefix.push(0xff);
let mut thumbnail_prefix = main_prefix.clone();
thumbnail_prefix.extend_from_slice(&width.to_be_bytes());
thumbnail_prefix.extend_from_slice(&height.to_be_bytes());
thumbnail_prefix.push(0xff);
let mut original_prefix = main_prefix;
original_prefix.extend_from_slice(&0_u32.to_be_bytes()); // Width = 0 if it's not a thumbnail
original_prefix.extend_from_slice(&0_u32.to_be_bytes()); // Height = 0 if it's not a thumbnail
original_prefix.push(0xff);
let first_thumbnailprefix = self.mediaid_file.scan_prefix(thumbnail_prefix).next();
let first_originalprefix = self.mediaid_file.scan_prefix(original_prefix).next();
if let Some((key, _)) = first_thumbnailprefix {
// Using saved thumbnail
let path = globals.get_media_file(&key);
let mut file = Vec::new();
File::open(path).await?.read_to_end(&mut file).await?;
let mut parts = key.rsplit(|&b| b == 0xff);
let content_type = parts
.next()
.map(|bytes| {
utils::string_from_bytes(bytes).map_err(|_| {
Error::bad_database("Content type in mediaid_file is invalid unicode.")
})
})
.transpose()?;
let content_disposition_bytes = parts
.next()
.ok_or_else(|| Error::bad_database("Media ID in db is invalid."))?;
let content_disposition = if content_disposition_bytes.is_empty() {
None
} else {
Some(
utils::string_from_bytes(content_disposition_bytes).map_err(|_| {
Error::bad_database("Content Disposition in db is invalid.")
})?,
)
};
Ok(Some(FileMeta {
content_disposition,
content_type,
file: file.to_vec(),
}))
} else if let Some((key, _)) = first_originalprefix {
// Generate a thumbnail
let path = globals.get_media_file(&key);
let mut file = Vec::new();
File::open(path).await?.read_to_end(&mut file).await?;
let mut parts = key.rsplit(|&b| b == 0xff); let mut parts = key.rsplit(|&b| b == 0xff);
let content_type = parts let content_type = parts
.next() .next()
.map(|bytes| { .map(|bytes| {
utils::string_from_bytes(bytes).map_err(|_| { utils::string_from_bytes(bytes).map_err(|_| {
Error::bad_database("Content type in mediaid_file is invalid unicode.") Error::bad_database("Content type in mediaid_file is invalid unicode.")
})
}) })
.transpose()?; })
.transpose()?;
let content_disposition_bytes = parts
.next()
.ok_or_else(|| Error::bad_database("Media ID in db is invalid."))?;
let content_disposition = if content_disposition_bytes.is_empty() {
None
} else {
Some(
utils::string_from_bytes(content_disposition_bytes).map_err(|_| {
Error::bad_database(
"Content Disposition in mediaid_file is invalid unicode.",
)
})?,
)
};
if let Ok(image) = image::load_from_memory(&file) {
let original_width = image.width();
let original_height = image.height();
if width > original_width || height > original_height {
return Ok(Some(FileMeta {
content_disposition,
content_type,
file: file.to_vec(),
}));
}
let thumbnail = if crop {
image.resize_to_fill(width, height, FilterType::CatmullRom)
} else {
let (exact_width, exact_height) = {
// Copied from image::dynimage::resize_dimensions
let ratio = u64::from(original_width) * u64::from(height);
let nratio = u64::from(width) * u64::from(original_height);
let use_width = nratio <= ratio;
let intermediate = if use_width {
u64::from(original_height) * u64::from(width)
/ u64::from(original_width)
} else {
u64::from(original_width) * u64::from(height)
/ u64::from(original_height)
};
if use_width {
if intermediate <= u64::from(::std::u32::MAX) {
(width, intermediate as u32)
} else {
(
(u64::from(width) * u64::from(::std::u32::MAX) / intermediate)
as u32,
::std::u32::MAX,
)
}
} else if intermediate <= u64::from(::std::u32::MAX) {
(intermediate as u32, height)
} else {
(
::std::u32::MAX,
(u64::from(height) * u64::from(::std::u32::MAX) / intermediate)
as u32,
)
}
};
image.thumbnail_exact(exact_width, exact_height)
};
let mut thumbnail_bytes = Vec::new();
thumbnail.write_to(&mut thumbnail_bytes, image::ImageOutputFormat::Png)?;
// Save thumbnail in database so we don't have to generate it again next time
let mut thumbnail_key = key.to_vec();
let width_index = thumbnail_key
.iter()
.position(|&b| b == 0xff)
.ok_or_else(|| Error::bad_database("Media in db is invalid."))?
+ 1;
let mut widthheight = width.to_be_bytes().to_vec();
widthheight.extend_from_slice(&height.to_be_bytes());
thumbnail_key.splice(
width_index..width_index + 2 * mem::size_of::<u32>(),
widthheight,
);
let path = globals.get_media_file(&thumbnail_key);
let mut f = File::create(path).await?;
f.write_all(&thumbnail_bytes).await?;
self.mediaid_file.insert(&thumbnail_key, &[])?; let content_disposition_bytes = parts
.next()
.ok_or_else(|| Error::bad_database("Media ID in db is invalid."))?;
Ok(Some(FileMeta { let content_disposition = if content_disposition_bytes.is_empty() {
content_disposition, None
content_type,
file: thumbnail_bytes.to_vec(),
}))
} else {
// Couldn't parse file to generate thumbnail, send original
Ok(Some(FileMeta {
content_disposition,
content_type,
file: file.to_vec(),
}))
}
} else { } else {
Ok(None) Some(
} utils::string_from_bytes(content_disposition_bytes).map_err(|_| {
Error::bad_database(
"Content Disposition in mediaid_file is invalid unicode.",
)
})?,
)
};
Ok((content_disposition, content_type, key))
} }
} }

@ -1,9 +1,9 @@
//mod account_data; mod account_data;
//mod admin; //mod admin;
mod appservice; mod appservice;
//mod globals; mod globals;
//mod key_backups; mod key_backups;
//mod media; mod media;
//mod pdu; //mod pdu;
mod pusher; mod pusher;
mod rooms; mod rooms;

@ -1,6 +1,6 @@
use ruma::{UserId, api::client::push::{set_pusher, get_pushers}}; use ruma::{UserId, api::client::push::{set_pusher, get_pushers}};
use crate::{service, database::KeyValueDatabase, Error}; use crate::{service, database::KeyValueDatabase, Error, Result};
impl service::pusher::Data for KeyValueDatabase { impl service::pusher::Data for KeyValueDatabase {
fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::Pusher) -> Result<()> { fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::Pusher) -> Result<()> {
@ -51,7 +51,7 @@ impl service::pusher::Data for KeyValueDatabase {
fn get_pusher_senderkeys<'a>( fn get_pusher_senderkeys<'a>(
&'a self, &'a self,
sender: &UserId, sender: &UserId,
) -> impl Iterator<Item = Vec<u8>> + 'a { ) -> Box<dyn Iterator<Item = Vec<u8>>> {
let mut prefix = sender.as_bytes().to_vec(); let mut prefix = sender.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);

@ -1,12 +1,12 @@
use ruma::{RoomId, RoomAliasId, api::client::error::ErrorKind}; use ruma::{RoomId, RoomAliasId, api::client::error::ErrorKind};
use crate::{service, database::KeyValueDatabase, utils, Error, services}; use crate::{service, database::KeyValueDatabase, utils, Error, services, Result};
impl service::rooms::alias::Data for KeyValueDatabase { impl service::rooms::alias::Data for KeyValueDatabase {
fn set_alias( fn set_alias(
&self, &self,
alias: &RoomAliasId, alias: &RoomAliasId,
room_id: Option<&RoomId> room_id: &RoomId
) -> Result<()> { ) -> Result<()> {
self.alias_roomid self.alias_roomid
.insert(alias.alias().as_bytes(), room_id.as_bytes())?; .insert(alias.alias().as_bytes(), room_id.as_bytes())?;
@ -41,7 +41,7 @@ impl service::rooms::alias::Data for KeyValueDatabase {
fn resolve_local_alias( fn resolve_local_alias(
&self, &self,
alias: &RoomAliasId alias: &RoomAliasId
) -> Result<()> { ) -> Result<Option<Box<RoomId>>> {
self.alias_roomid self.alias_roomid
.get(alias.alias().as_bytes())? .get(alias.alias().as_bytes())?
.map(|bytes| { .map(|bytes| {
@ -56,7 +56,7 @@ impl service::rooms::alias::Data for KeyValueDatabase {
fn local_aliases_for_room( fn local_aliases_for_room(
&self, &self,
room_id: &RoomId, room_id: &RoomId,
) -> Result<()> { ) -> Result<Box<dyn Iterator<Item=String>>> {
let mut prefix = room_id.as_bytes().to_vec(); let mut prefix = room_id.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);

@ -1,5 +1,9 @@
impl service::room::auth_chain::Data for KeyValueDatabase { use std::{collections::HashSet, mem::size_of};
fn get_cached_eventid_authchain<'a>() -> Result<HashSet<u64>> {
use crate::{service, database::KeyValueDatabase, Result, utils};
impl service::rooms::auth_chain::Data for KeyValueDatabase {
fn get_cached_eventid_authchain(&self, shorteventid: u64) -> Result<HashSet<u64>> {
self.shorteventid_authchain self.shorteventid_authchain
.get(&shorteventid.to_be_bytes())? .get(&shorteventid.to_be_bytes())?
.map(|chain| { .map(|chain| {
@ -12,8 +16,8 @@ impl service::room::auth_chain::Data for KeyValueDatabase {
}) })
} }
fn cache_eventid_authchain<'a>(shorteventid: u64, auth_chain: &HashSet<u64>) -> Result<()> { fn cache_eventid_authchain(&self, shorteventid: u64, auth_chain: &HashSet<u64>) -> Result<()> {
shorteventid_authchain.insert( self.shorteventid_authchain.insert(
&shorteventid.to_be_bytes(), &shorteventid.to_be_bytes(),
&auth_chain &auth_chain
.iter() .iter()

@ -1,6 +1,6 @@
use ruma::RoomId; use ruma::RoomId;
use crate::{service, database::KeyValueDatabase, utils, Error}; use crate::{service, database::KeyValueDatabase, utils, Error, Result};
impl service::rooms::directory::Data for KeyValueDatabase { impl service::rooms::directory::Data for KeyValueDatabase {
fn set_public(&self, room_id: &RoomId) -> Result<()> { fn set_public(&self, room_id: &RoomId) -> Result<()> {
@ -15,7 +15,7 @@ impl service::rooms::directory::Data for KeyValueDatabase {
Ok(self.publicroomids.get(room_id.as_bytes())?.is_some()) Ok(self.publicroomids.get(room_id.as_bytes())?.is_some())
} }
fn public_rooms(&self) -> impl Iterator<Item = Result<Box<RoomId>>> + '_ { fn public_rooms(&self) -> Box<dyn Iterator<Item = Result<Box<RoomId>>>> {
self.publicroomids.iter().map(|(bytes, _)| { self.publicroomids.iter().map(|(bytes, _)| {
RoomId::parse( RoomId::parse(
utils::string_from_bytes(&bytes).map_err(|_| { utils::string_from_bytes(&bytes).map_err(|_| {

@ -1,3 +1,7 @@
mod presence; mod presence;
mod typing; mod typing;
mod read_receipt; mod read_receipt;
use crate::{service, database::KeyValueDatabase};
impl service::rooms::edus::Data for KeyValueDatabase {}

@ -2,7 +2,7 @@ use std::collections::HashMap;
use ruma::{UserId, RoomId, events::presence::PresenceEvent, presence::PresenceState, UInt}; use ruma::{UserId, RoomId, events::presence::PresenceEvent, presence::PresenceState, UInt};
use crate::{service, database::KeyValueDatabase, utils, Error, services}; use crate::{service, database::KeyValueDatabase, utils, Error, services, Result};
impl service::rooms::edus::presence::Data for KeyValueDatabase { impl service::rooms::edus::presence::Data for KeyValueDatabase {
fn update_presence( fn update_presence(
@ -56,8 +56,8 @@ impl service::rooms::edus::presence::Data for KeyValueDatabase {
fn get_presence_event( fn get_presence_event(
&self, &self,
user_id: &UserId,
room_id: &RoomId, room_id: &RoomId,
user_id: &UserId,
count: u64, count: u64,
) -> Result<Option<PresenceEvent>> { ) -> Result<Option<PresenceEvent>> {
let mut presence_id = room_id.as_bytes().to_vec(); let mut presence_id = room_id.as_bytes().to_vec();

@ -2,7 +2,7 @@ use std::mem;
use ruma::{UserId, RoomId, events::receipt::ReceiptEvent, serde::Raw, signatures::CanonicalJsonObject}; use ruma::{UserId, RoomId, events::receipt::ReceiptEvent, serde::Raw, signatures::CanonicalJsonObject};
use crate::{database::KeyValueDatabase, service, utils, Error, services}; use crate::{database::KeyValueDatabase, service, utils, Error, services, Result};
impl service::rooms::edus::read_receipt::Data for KeyValueDatabase { impl service::rooms::edus::read_receipt::Data for KeyValueDatabase {
fn readreceipt_update( fn readreceipt_update(
@ -50,13 +50,13 @@ impl service::rooms::edus::read_receipt::Data for KeyValueDatabase {
&'a self, &'a self,
room_id: &RoomId, room_id: &RoomId,
since: u64, since: u64,
) -> impl Iterator< ) -> Box<dyn Iterator<
Item=Result<( Item=Result<(
Box<UserId>, Box<UserId>,
u64, u64,
Raw<ruma::events::AnySyncEphemeralRoomEvent>, Raw<ruma::events::AnySyncEphemeralRoomEvent>,
)>, )>,
> + 'a { >> {
let mut prefix = room_id.as_bytes().to_vec(); let mut prefix = room_id.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);
let prefix2 = prefix.clone(); let prefix2 = prefix.clone();

@ -2,7 +2,7 @@ use std::collections::HashSet;
use ruma::{UserId, RoomId}; use ruma::{UserId, RoomId};
use crate::{database::KeyValueDatabase, service, utils, Error, services}; use crate::{database::KeyValueDatabase, service, utils, Error, services, Result};
impl service::rooms::edus::typing::Data for KeyValueDatabase { impl service::rooms::edus::typing::Data for KeyValueDatabase {
fn typing_add( fn typing_add(
@ -79,7 +79,7 @@ impl service::rooms::edus::typing::Data for KeyValueDatabase {
fn typings_all( fn typings_all(
&self, &self,
room_id: &RoomId, room_id: &RoomId,
) -> Result<HashSet<UserId>> { ) -> Result<HashSet<Box<UserId>>> {
let mut prefix = room_id.as_bytes().to_vec(); let mut prefix = room_id.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);

@ -1,6 +1,6 @@
use ruma::{UserId, DeviceId, RoomId}; use ruma::{UserId, DeviceId, RoomId};
use crate::{service, database::KeyValueDatabase}; use crate::{service, database::KeyValueDatabase, Result};
impl service::rooms::lazy_loading::Data for KeyValueDatabase { impl service::rooms::lazy_loading::Data for KeyValueDatabase {
fn lazy_load_was_sent_before( fn lazy_load_was_sent_before(

@ -1,6 +1,6 @@
use ruma::RoomId; use ruma::RoomId;
use crate::{service, database::KeyValueDatabase}; use crate::{service, database::KeyValueDatabase, Result};
impl service::rooms::metadata::Data for KeyValueDatabase { impl service::rooms::metadata::Data for KeyValueDatabase {
fn exists(&self, room_id: &RoomId) -> Result<bool> { fn exists(&self, room_id: &RoomId) -> Result<bool> {

@ -1,16 +1,20 @@
mod alias; mod alias;
mod auth_chain;
mod directory; mod directory;
mod edus; mod edus;
//mod event_handler;
mod lazy_load; mod lazy_load;
mod metadata; mod metadata;
mod outlier; mod outlier;
mod pdu_metadata; mod pdu_metadata;
mod search; mod search;
//mod short; mod short;
mod state; mod state;
mod state_accessor; mod state_accessor;
mod state_cache; mod state_cache;
mod state_compressor; mod state_compressor;
mod timeline; mod timeline;
mod user; mod user;
use crate::{database::KeyValueDatabase, service};
impl service::rooms::Data for KeyValueDatabase {}

@ -1,6 +1,6 @@
use ruma::{EventId, signatures::CanonicalJsonObject}; use ruma::{EventId, signatures::CanonicalJsonObject};
use crate::{service, database::KeyValueDatabase, PduEvent, Error}; use crate::{service, database::KeyValueDatabase, PduEvent, Error, Result};
impl service::rooms::outlier::Data for KeyValueDatabase { impl service::rooms::outlier::Data for KeyValueDatabase {
fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> { fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {

@ -2,7 +2,7 @@ use std::sync::Arc;
use ruma::{RoomId, EventId}; use ruma::{RoomId, EventId};
use crate::{service, database::KeyValueDatabase}; use crate::{service, database::KeyValueDatabase, Result};
impl service::rooms::pdu_metadata::Data for KeyValueDatabase { impl service::rooms::pdu_metadata::Data for KeyValueDatabase {
fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()> { fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()> {

@ -2,10 +2,10 @@ use std::mem::size_of;
use ruma::RoomId; use ruma::RoomId;
use crate::{service, database::KeyValueDatabase, utils}; use crate::{service, database::KeyValueDatabase, utils, Result};
impl service::rooms::search::Data for KeyValueDatabase { impl service::rooms::search::Data for KeyValueDatabase {
fn index_pdu<'a>(&self, room_id: &RoomId, pdu_id: u64, message_body: String) -> Result<()> { fn index_pdu<'a>(&self, shortroomid: u64, pdu_id: u64, message_body: String) -> Result<()> {
let mut batch = message_body let mut batch = message_body
.split_terminator(|c: char| !c.is_alphanumeric()) .split_terminator(|c: char| !c.is_alphanumeric())
.filter(|s| !s.is_empty()) .filter(|s| !s.is_empty())
@ -26,7 +26,7 @@ impl service::rooms::search::Data for KeyValueDatabase {
&'a self, &'a self,
room_id: &RoomId, room_id: &RoomId,
search_string: &str, search_string: &str,
) -> Result<Option<(impl Iterator<Item = Vec<u8>> + 'a, Vec<String>)>> { ) -> Result<Option<(Box<dyn Iterator<Item = Vec<u8>>>, Vec<String>)>> {
let prefix = self let prefix = self
.get_shortroomid(room_id)? .get_shortroomid(room_id)?
.expect("room exists") .expect("room exists")

@ -0,0 +1,4 @@
use crate::{database::KeyValueDatabase, service};
impl service::rooms::short::Data for KeyValueDatabase {
}

@ -3,7 +3,7 @@ use std::sync::Arc;
use std::{sync::MutexGuard, collections::HashSet}; use std::{sync::MutexGuard, collections::HashSet};
use std::fmt::Debug; use std::fmt::Debug;
use crate::{service, database::KeyValueDatabase, utils, Error}; use crate::{service, database::KeyValueDatabase, utils, Error, Result};
impl service::rooms::state::Data for KeyValueDatabase { impl service::rooms::state::Data for KeyValueDatabase {
fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>> { fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>> {
@ -24,7 +24,7 @@ impl service::rooms::state::Data for KeyValueDatabase {
Ok(()) Ok(())
} }
fn set_event_state(&self, shorteventid: Vec<u8>, shortstatehash: Vec<u8>) -> Result<()> { fn set_event_state(&self, shorteventid: u64, shortstatehash: u64) -> Result<()> {
self.shorteventid_shortstatehash self.shorteventid_shortstatehash
.insert(&shorteventid.to_be_bytes(), &shortstatehash.to_be_bytes())?; .insert(&shorteventid.to_be_bytes(), &shortstatehash.to_be_bytes())?;
Ok(()) Ok(())

@ -1,6 +1,6 @@
use std::{collections::{BTreeMap, HashMap}, sync::Arc}; use std::{collections::{BTreeMap, HashMap}, sync::Arc};
use crate::{database::KeyValueDatabase, service, PduEvent, Error, utils}; use crate::{database::KeyValueDatabase, service, PduEvent, Error, utils, Result};
use async_trait::async_trait; use async_trait::async_trait;
use ruma::{EventId, events::StateEventType, RoomId}; use ruma::{EventId, events::StateEventType, RoomId};

@ -1,6 +1,6 @@
use ruma::{UserId, RoomId}; use ruma::{UserId, RoomId, events::{AnyStrippedStateEvent, AnySyncStateEvent}, serde::Raw};
use crate::{service, database::KeyValueDatabase}; use crate::{service, database::KeyValueDatabase, services, Result};
impl service::rooms::state_cache::Data for KeyValueDatabase { impl service::rooms::state_cache::Data for KeyValueDatabase {
fn mark_as_once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> { fn mark_as_once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
@ -9,4 +9,70 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
userroom_id.extend_from_slice(room_id.as_bytes()); userroom_id.extend_from_slice(room_id.as_bytes());
self.roomuseroncejoinedids.insert(&userroom_id, &[]) self.roomuseroncejoinedids.insert(&userroom_id, &[])
} }
fn mark_as_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
let mut roomuser_id = room_id.as_bytes().to_vec();
roomuser_id.push(0xff);
roomuser_id.extend_from_slice(user_id.as_bytes());
let mut userroom_id = user_id.as_bytes().to_vec();
userroom_id.push(0xff);
userroom_id.extend_from_slice(room_id.as_bytes());
self.userroomid_joined.insert(&userroom_id, &[])?;
self.roomuserid_joined.insert(&roomuser_id, &[])?;
self.userroomid_invitestate.remove(&userroom_id)?;
self.roomuserid_invitecount.remove(&roomuser_id)?;
self.userroomid_leftstate.remove(&userroom_id)?;
self.roomuserid_leftcount.remove(&roomuser_id)?;
Ok(())
}
fn mark_as_invited(&self, user_id: &UserId, room_id: &RoomId, last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>) -> Result<()> {
let mut roomuser_id = room_id.as_bytes().to_vec();
roomuser_id.push(0xff);
roomuser_id.extend_from_slice(user_id.as_bytes());
let mut userroom_id = user_id.as_bytes().to_vec();
userroom_id.push(0xff);
userroom_id.extend_from_slice(room_id.as_bytes());
self.userroomid_invitestate.insert(
&userroom_id,
&serde_json::to_vec(&last_state.unwrap_or_default())
.expect("state to bytes always works"),
)?;
self.roomuserid_invitecount
.insert(&roomuser_id, &services().globals.next_count()?.to_be_bytes())?;
self.userroomid_joined.remove(&userroom_id)?;
self.roomuserid_joined.remove(&roomuser_id)?;
self.userroomid_leftstate.remove(&userroom_id)?;
self.roomuserid_leftcount.remove(&roomuser_id)?;
Ok(())
}
fn mark_as_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
let mut roomuser_id = room_id.as_bytes().to_vec();
roomuser_id.push(0xff);
roomuser_id.extend_from_slice(user_id.as_bytes());
let mut userroom_id = user_id.as_bytes().to_vec();
userroom_id.push(0xff);
userroom_id.extend_from_slice(room_id.as_bytes());
self.userroomid_leftstate.insert(
&userroom_id,
&serde_json::to_vec(&Vec::<Raw<AnySyncStateEvent>>::new()).unwrap(),
)?; // TODO
self.roomuserid_leftcount
.insert(&roomuser_id, &services().globals.next_count()?.to_be_bytes())?;
self.userroomid_joined.remove(&userroom_id)?;
self.roomuserid_joined.remove(&roomuser_id)?;
self.userroomid_invitestate.remove(&userroom_id)?;
self.roomuserid_invitecount.remove(&roomuser_id)?;
Ok(())
}
} }

@ -1,6 +1,6 @@
use std::{collections::HashSet, mem::size_of}; use std::{collections::HashSet, mem::size_of};
use crate::{service::{self, rooms::state_compressor::data::StateDiff}, database::KeyValueDatabase, Error, utils}; use crate::{service::{self, rooms::state_compressor::data::StateDiff}, database::KeyValueDatabase, Error, utils, Result};
impl service::rooms::state_compressor::Data for KeyValueDatabase { impl service::rooms::state_compressor::Data for KeyValueDatabase {
fn get_statediff(&self, shortstatehash: u64) -> Result<StateDiff> { fn get_statediff(&self, shortstatehash: u64) -> Result<StateDiff> {

@ -3,7 +3,7 @@ use std::{collections::hash_map, mem::size_of, sync::Arc};
use ruma::{UserId, RoomId, api::client::error::ErrorKind, EventId, signatures::CanonicalJsonObject}; use ruma::{UserId, RoomId, api::client::error::ErrorKind, EventId, signatures::CanonicalJsonObject};
use tracing::error; use tracing::error;
use crate::{service, database::KeyValueDatabase, utils, Error, PduEvent}; use crate::{service, database::KeyValueDatabase, utils, Error, PduEvent, Result};
impl service::rooms::timeline::Data for KeyValueDatabase { impl service::rooms::timeline::Data for KeyValueDatabase {
fn last_timeline_count(&self, sender_user: &UserId, room_id: &RoomId) -> Result<u64> { fn last_timeline_count(&self, sender_user: &UserId, room_id: &RoomId) -> Result<u64> {
@ -190,7 +190,7 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
user_id: &UserId, user_id: &UserId,
room_id: &RoomId, room_id: &RoomId,
since: u64, since: u64,
) -> Result<impl Iterator<Item = Result<(Vec<u8>, PduEvent)>> + 'a> { ) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>> {
let prefix = self let prefix = self
.get_shortroomid(room_id)? .get_shortroomid(room_id)?
.expect("room exists") .expect("room exists")
@ -224,7 +224,7 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
user_id: &UserId, user_id: &UserId,
room_id: &RoomId, room_id: &RoomId,
until: u64, until: u64,
) -> Result<impl Iterator<Item = Result<(Vec<u8>, PduEvent)>> + 'a> { ) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>> {
// Create the first part of the full pdu id // Create the first part of the full pdu id
let prefix = self let prefix = self
.get_shortroomid(room_id)? .get_shortroomid(room_id)?
@ -258,7 +258,7 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
user_id: &UserId, user_id: &UserId,
room_id: &RoomId, room_id: &RoomId,
from: u64, from: u64,
) -> Result<impl Iterator<Item = Result<(Vec<u8>, PduEvent)>> + 'a> { ) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>> {
// Create the first part of the full pdu id // Create the first part of the full pdu id
let prefix = self let prefix = self
.get_shortroomid(room_id)? .get_shortroomid(room_id)?

@ -1,6 +1,6 @@
use ruma::{UserId, RoomId}; use ruma::{UserId, RoomId};
use crate::{service, database::KeyValueDatabase, utils, Error}; use crate::{service, database::KeyValueDatabase, utils, Error, Result};
impl service::rooms::user::Data for KeyValueDatabase { impl service::rooms::user::Data for KeyValueDatabase {
fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> { fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
@ -78,7 +78,7 @@ impl service::rooms::user::Data for KeyValueDatabase {
fn get_shared_rooms<'a>( fn get_shared_rooms<'a>(
&'a self, &'a self,
users: Vec<Box<UserId>>, users: Vec<Box<UserId>>,
) -> Result<impl Iterator<Item = Result<Box<RoomId>>> + 'a> { ) -> Result<Box<dyn Iterator<Item = Result<Box<RoomId>>>>> {
let iterators = users.into_iter().map(move |user_id| { let iterators = users.into_iter().map(move |user_id| {
let mut prefix = user_id.as_bytes().to_vec(); let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);

@ -1,6 +1,6 @@
use ruma::{UserId, DeviceId, TransactionId}; use ruma::{UserId, DeviceId, TransactionId};
use crate::{service, database::KeyValueDatabase}; use crate::{service, database::KeyValueDatabase, Result};
impl service::transaction_ids::Data for KeyValueDatabase { impl service::transaction_ids::Data for KeyValueDatabase {
fn add_txnid( fn add_txnid(

@ -1,8 +1,6 @@
use std::io::ErrorKind; use ruma::{UserId, DeviceId, signatures::CanonicalJsonValue, api::client::{uiaa::UiaaInfo, error::ErrorKind}};
use ruma::{UserId, DeviceId, signatures::CanonicalJsonValue, api::client::uiaa::UiaaInfo}; use crate::{database::KeyValueDatabase, service, Error, Result};
use crate::{database::KeyValueDatabase, service, Error};
impl service::uiaa::Data for KeyValueDatabase { impl service::uiaa::Data for KeyValueDatabase {
fn set_uiaa_request( fn set_uiaa_request(

@ -3,7 +3,7 @@ use std::{mem::size_of, collections::BTreeMap};
use ruma::{api::client::{filter::IncomingFilterDefinition, error::ErrorKind, device::Device}, UserId, RoomAliasId, MxcUri, DeviceId, MilliSecondsSinceUnixEpoch, DeviceKeyId, encryption::{OneTimeKey, CrossSigningKey, DeviceKeys}, serde::Raw, events::{AnyToDeviceEvent, StateEventType}, DeviceKeyAlgorithm, UInt}; use ruma::{api::client::{filter::IncomingFilterDefinition, error::ErrorKind, device::Device}, UserId, RoomAliasId, MxcUri, DeviceId, MilliSecondsSinceUnixEpoch, DeviceKeyId, encryption::{OneTimeKey, CrossSigningKey, DeviceKeys}, serde::Raw, events::{AnyToDeviceEvent, StateEventType}, DeviceKeyAlgorithm, UInt};
use tracing::warn; use tracing::warn;
use crate::{service::{self, users::clean_signatures}, database::KeyValueDatabase, Error, utils, services}; use crate::{service::{self, users::clean_signatures}, database::KeyValueDatabase, Error, utils, services, Result};
impl service::users::Data for KeyValueDatabase { impl service::users::Data for KeyValueDatabase {
/// Check if a user has an account on this homeserver. /// Check if a user has an account on this homeserver.
@ -56,7 +56,7 @@ impl service::users::Data for KeyValueDatabase {
} }
/// Returns an iterator over all users on this homeserver. /// Returns an iterator over all users on this homeserver.
fn iter(&self) -> impl Iterator<Item = Result<Box<UserId>>> + '_ { fn iter(&self) -> Box<dyn Iterator<Item = Result<Box<UserId>>>> {
self.userid_password.iter().map(|(bytes, _)| { self.userid_password.iter().map(|(bytes, _)| {
UserId::parse(utils::string_from_bytes(&bytes).map_err(|_| { UserId::parse(utils::string_from_bytes(&bytes).map_err(|_| {
Error::bad_database("User ID in userid_password is invalid unicode.") Error::bad_database("User ID in userid_password is invalid unicode.")
@ -270,7 +270,7 @@ impl service::users::Data for KeyValueDatabase {
fn all_device_ids<'a>( fn all_device_ids<'a>(
&'a self, &'a self,
user_id: &UserId, user_id: &UserId,
) -> impl Iterator<Item = Result<Box<DeviceId>>> + 'a { ) -> Box<dyn Iterator<Item = Result<Box<DeviceId>>>> {
let mut prefix = user_id.as_bytes().to_vec(); let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);
// All devices have metadata // All devices have metadata
@ -608,7 +608,7 @@ impl service::users::Data for KeyValueDatabase {
user_or_room_id: &str, user_or_room_id: &str,
from: u64, from: u64,
to: Option<u64>, to: Option<u64>,
) -> impl Iterator<Item = Result<Box<UserId>>> + 'a { ) -> Box<dyn Iterator<Item = Result<Box<UserId>>>> {
let mut prefix = user_or_room_id.as_bytes().to_vec(); let mut prefix = user_or_room_id.as_bytes().to_vec();
prefix.push(0xff); prefix.push(0xff);
@ -878,7 +878,7 @@ impl service::users::Data for KeyValueDatabase {
fn all_devices_metadata<'a>( fn all_devices_metadata<'a>(
&'a self, &'a self,
user_id: &UserId, user_id: &UserId,
) -> impl Iterator<Item = Result<Device>> + 'a { ) -> Box<dyn Iterator<Item = Result<Device>>> {
let mut key = user_id.as_bytes().to_vec(); let mut key = user_id.as_bytes().to_vec();
key.push(0xff); key.push(0xff);

@ -1,7 +1,7 @@
pub mod abstraction; pub mod abstraction;
pub mod key_value; pub mod key_value;
use crate::{utils, Config, Error, Result, service::{users, globals, uiaa, rooms, account_data, media, key_backups, transaction_ids, sending, admin::{self, create_admin_room}, appservice, pusher}}; use crate::{utils, Config, Error, Result, service::{users, globals, uiaa, rooms, account_data, media, key_backups, transaction_ids, sending, appservice, pusher}};
use abstraction::KeyValueDatabaseEngine; use abstraction::KeyValueDatabaseEngine;
use directories::ProjectDirs; use directories::ProjectDirs;
use futures_util::{stream::FuturesUnordered, StreamExt}; use futures_util::{stream::FuturesUnordered, StreamExt};
@ -253,7 +253,7 @@ impl KeyValueDatabase {
let (admin_sender, admin_receiver) = mpsc::unbounded_channel(); let (admin_sender, admin_receiver) = mpsc::unbounded_channel();
let (sending_sender, sending_receiver) = mpsc::unbounded_channel(); let (sending_sender, sending_receiver) = mpsc::unbounded_channel();
let db = Arc::new(TokioRwLock::from(Self { let db = Self {
_db: builder.clone(), _db: builder.clone(),
userid_password: builder.open_tree("userid_password")?, userid_password: builder.open_tree("userid_password")?,
userid_displayname: builder.open_tree("userid_displayname")?, userid_displayname: builder.open_tree("userid_displayname")?,
@ -345,10 +345,9 @@ impl KeyValueDatabase {
senderkey_pusher: builder.open_tree("senderkey_pusher")?, senderkey_pusher: builder.open_tree("senderkey_pusher")?,
global: builder.open_tree("global")?, global: builder.open_tree("global")?,
server_signingkeys: builder.open_tree("server_signingkeys")?, server_signingkeys: builder.open_tree("server_signingkeys")?,
})); };
// TODO: do this after constructing the db // TODO: do this after constructing the db
let guard = db.read().await;
// Matrix resource ownership is based on the server name; changing it // Matrix resource ownership is based on the server name; changing it
// requires recreating the database from scratch. // requires recreating the database from scratch.

@ -13,7 +13,7 @@ mod service;
pub mod api; pub mod api;
mod utils; mod utils;
use std::cell::Cell; use std::{cell::Cell, sync::RwLock};
pub use config::Config; pub use config::Config;
pub use utils::error::{Error, Result}; pub use utils::error::{Error, Result};
@ -22,13 +22,13 @@ pub use api::ruma_wrapper::{Ruma, RumaResponse};
use crate::database::KeyValueDatabase; use crate::database::KeyValueDatabase;
pub static SERVICES: Cell<Option<ServicesEnum>> = Cell::new(None); pub static SERVICES: RwLock<Option<ServicesEnum>> = RwLock::new(None);
enum ServicesEnum { enum ServicesEnum {
Rocksdb(Services<KeyValueDatabase>) Rocksdb(Services<KeyValueDatabase>)
} }
pub fn services() -> Services { pub fn services() -> Services<KeyValueDatabase> {
SERVICES.get().unwrap() SERVICES.read().unwrap()
} }

@ -1,145 +1,32 @@
use crate::{utils, Error, Result}; use std::collections::HashMap;
use ruma::{
api::client::error::ErrorKind,
events::{AnyEphemeralRoomEvent, RoomAccountDataEventType},
serde::Raw,
RoomId, UserId,
};
use serde::{de::DeserializeOwned, Serialize};
use std::{collections::HashMap, sync::Arc};
impl AccountData { use ruma::{UserId, RoomId, events::{RoomAccountDataEventType, AnyEphemeralRoomEvent}, serde::Raw};
use serde::{Serialize, de::DeserializeOwned};
use crate::Result;
pub trait Data {
/// Places one event in the account data of the user and removes the previous entry. /// Places one event in the account data of the user and removes the previous entry.
#[tracing::instrument(skip(self, room_id, user_id, event_type, data))] fn update<T: Serialize>(
pub fn update<T: Serialize>(
&self, &self,
room_id: Option<&RoomId>, room_id: Option<&RoomId>,
user_id: &UserId, user_id: &UserId,
event_type: RoomAccountDataEventType, event_type: RoomAccountDataEventType,
data: &T, data: &T,
) -> Result<()> { ) -> Result<()>;
let mut prefix = room_id
.map(|r| r.to_string())
.unwrap_or_default()
.as_bytes()
.to_vec();
prefix.push(0xff);
prefix.extend_from_slice(user_id.as_bytes());
prefix.push(0xff);
let mut roomuserdataid = prefix.clone();
roomuserdataid.extend_from_slice(&services().globals.next_count()?.to_be_bytes());
roomuserdataid.push(0xff);
roomuserdataid.extend_from_slice(event_type.to_string().as_bytes());
let mut key = prefix;
key.extend_from_slice(event_type.to_string().as_bytes());
let json = serde_json::to_value(data).expect("all types here can be serialized"); // TODO: maybe add error handling
if json.get("type").is_none() || json.get("content").is_none() {
return Err(Error::BadRequest(
ErrorKind::InvalidParam,
"Account data doesn't have all required fields.",
));
}
self.roomuserdataid_accountdata.insert(
&roomuserdataid,
&serde_json::to_vec(&json).expect("to_vec always works on json values"),
)?;
let prev = self.roomusertype_roomuserdataid.get(&key)?;
self.roomusertype_roomuserdataid
.insert(&key, &roomuserdataid)?;
// Remove old entry
if let Some(prev) = prev {
self.roomuserdataid_accountdata.remove(&prev)?;
}
Ok(())
}
/// Searches the account data for a specific kind. /// Searches the account data for a specific kind.
#[tracing::instrument(skip(self, room_id, user_id, kind))] fn get<T: DeserializeOwned>(
pub fn get<T: DeserializeOwned>(
&self, &self,
room_id: Option<&RoomId>, room_id: Option<&RoomId>,
user_id: &UserId, user_id: &UserId,
kind: RoomAccountDataEventType, kind: RoomAccountDataEventType,
) -> Result<Option<T>> { ) -> Result<Option<T>>;
let mut key = room_id
.map(|r| r.to_string())
.unwrap_or_default()
.as_bytes()
.to_vec();
key.push(0xff);
key.extend_from_slice(user_id.as_bytes());
key.push(0xff);
key.extend_from_slice(kind.to_string().as_bytes());
self.roomusertype_roomuserdataid
.get(&key)?
.and_then(|roomuserdataid| {
self.roomuserdataid_accountdata
.get(&roomuserdataid)
.transpose()
})
.transpose()?
.map(|data| {
serde_json::from_slice(&data)
.map_err(|_| Error::bad_database("could not deserialize"))
})
.transpose()
}
/// Returns all changes to the account data that happened after `since`. /// Returns all changes to the account data that happened after `since`.
#[tracing::instrument(skip(self, room_id, user_id, since))] fn changes_since(
pub fn changes_since(
&self, &self,
room_id: Option<&RoomId>, room_id: Option<&RoomId>,
user_id: &UserId, user_id: &UserId,
since: u64, since: u64,
) -> Result<HashMap<RoomAccountDataEventType, Raw<AnyEphemeralRoomEvent>>> { ) -> Result<HashMap<RoomAccountDataEventType, Raw<AnyEphemeralRoomEvent>>>;
let mut userdata = HashMap::new();
let mut prefix = room_id
.map(|r| r.to_string())
.unwrap_or_default()
.as_bytes()
.to_vec();
prefix.push(0xff);
prefix.extend_from_slice(user_id.as_bytes());
prefix.push(0xff);
// Skip the data that's exactly at since, because we sent that last time
let mut first_possible = prefix.clone();
first_possible.extend_from_slice(&(since + 1).to_be_bytes());
for r in self
.roomuserdataid_accountdata
.iter_from(&first_possible, false)
.take_while(move |(k, _)| k.starts_with(&prefix))
.map(|(k, v)| {
Ok::<_, Error>((
RoomAccountDataEventType::try_from(
utils::string_from_bytes(k.rsplit(|&b| b == 0xff).next().ok_or_else(
|| Error::bad_database("RoomUserData ID in db is invalid."),
)?)
.map_err(|_| Error::bad_database("RoomUserData ID in db is invalid."))?,
)
.map_err(|_| Error::bad_database("RoomUserData ID in db is invalid."))?,
serde_json::from_slice::<Raw<AnyEphemeralRoomEvent>>(&v).map_err(|_| {
Error::bad_database("Database contains invalid account data.")
})?,
))
})
{
let (kind, data) = r?;
userdata.insert(kind, data);
}
Ok(userdata)
}
} }

@ -1,14 +1,27 @@
use crate::{utils, Error, Result}; mod data;
pub use data::Data;
use ruma::{ use ruma::{
api::client::error::ErrorKind, api::client::{
error::ErrorKind,
},
events::{AnyEphemeralRoomEvent, RoomAccountDataEventType}, events::{AnyEphemeralRoomEvent, RoomAccountDataEventType},
serde::Raw, serde::Raw,
RoomId, UserId, signatures::CanonicalJsonValue,
DeviceId, RoomId, UserId,
}; };
use serde::{de::DeserializeOwned, Serialize}; use serde::{de::DeserializeOwned, Serialize};
use std::{collections::HashMap, sync::Arc}; use std::{collections::HashMap, sync::Arc};
use tracing::error;
use crate::{service::*, services, utils, Error, Result};
pub struct Service<D: Data> {
db: D,
}
impl AccountData { impl<D: Data> Service<D> {
/// Places one event in the account data of the user and removes the previous entry. /// Places one event in the account data of the user and removes the previous entry.
#[tracing::instrument(skip(self, room_id, user_id, event_type, data))] #[tracing::instrument(skip(self, room_id, user_id, event_type, data))]
pub fn update<T: Serialize>( pub fn update<T: Serialize>(

File diff suppressed because it is too large Load Diff

@ -1,5 +1,6 @@
use crate::Result;
pub trait Data { pub trait Data {
type Iter: Iterator;
/// Registers an appservice and returns the ID to the caller /// Registers an appservice and returns the ID to the caller
fn register_appservice(&self, yaml: serde_yaml::Value) -> Result<String>; fn register_appservice(&self, yaml: serde_yaml::Value) -> Result<String>;
@ -12,7 +13,7 @@ pub trait Data {
fn get_registration(&self, id: &str) -> Result<Option<serde_yaml::Value>>; fn get_registration(&self, id: &str) -> Result<Option<serde_yaml::Value>>;
fn iter_ids(&self) -> Result<Self::Iter<Item = Result<String>>>; fn iter_ids(&self) -> Result<Box<dyn Iterator<Item = Result<String>>>>;
fn all(&self) -> Result<Vec<(String, serde_yaml::Value)>>; fn all(&self) -> Result<Vec<(String, serde_yaml::Value)>>;
} }

@ -1,13 +1,13 @@
mod data; mod data;
pub use data::Data; pub use data::Data;
use crate::service::*; use crate::Result;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
/// Registers an appservice and returns the ID to the caller /// Registers an appservice and returns the ID to the caller
pub fn register_appservice(&self, yaml: serde_yaml::Value) -> Result<String> { pub fn register_appservice(&self, yaml: serde_yaml::Value) -> Result<String> {
self.db.register_appservice(yaml) self.db.register_appservice(yaml)

@ -0,0 +1,8 @@
use ruma::signatures::Ed25519KeyPair;
use crate::Result;
pub trait Data {
fn load_keypair(&self) -> Result<Ed25519KeyPair>;
fn remove_keypair(&self) -> Result<()>;
}

@ -3,7 +3,7 @@ pub use data::Data;
use crate::service::*; use crate::service::*;
use crate::{database::Config, server_server::FedDest, utils, Error, Result}; use crate::{Config, utils, Error, Result};
use ruma::{ use ruma::{
api::{ api::{
client::sync::sync_events, client::sync::sync_events,
@ -25,8 +25,6 @@ use tokio::sync::{broadcast, watch::Receiver, Mutex as TokioMutex, Semaphore};
use tracing::error; use tracing::error;
use trust_dns_resolver::TokioAsyncResolver; use trust_dns_resolver::TokioAsyncResolver;
use super::abstraction::Tree;
pub const COUNTER: &[u8] = b"c"; pub const COUNTER: &[u8] = b"c";
type WellKnownMap = HashMap<Box<ServerName>, (FedDest, String)>; type WellKnownMap = HashMap<Box<ServerName>, (FedDest, String)>;
@ -93,47 +91,18 @@ impl Default for RotationHandler {
} }
impl Service<_> { impl<D: Data> Service<D> {
pub fn load( pub fn load(
globals: Arc<dyn Tree>, db: D,
server_signingkeys: Arc<dyn Tree>,
config: Config, config: Config,
) -> Result<Self> { ) -> Result<Self> {
let keypair_bytes = globals.get(b"keypair")?.map_or_else( let keypair = db.load_keypair();
|| {
let keypair = utils::generate_keypair();
globals.insert(b"keypair", &keypair)?;
Ok::<_, Error>(keypair)
},
|s| Ok(s.to_vec()),
)?;
let mut parts = keypair_bytes.splitn(2, |&b| b == 0xff);
let keypair = utils::string_from_bytes(
// 1. version
parts
.next()
.expect("splitn always returns at least one element"),
)
.map_err(|_| Error::bad_database("Invalid version bytes in keypair."))
.and_then(|version| {
// 2. key
parts
.next()
.ok_or_else(|| Error::bad_database("Invalid keypair format in database."))
.map(|key| (version, key))
})
.and_then(|(version, key)| {
ruma::signatures::Ed25519KeyPair::from_der(key, version)
.map_err(|_| Error::bad_database("Private or public keys are invalid."))
});
let keypair = match keypair { let keypair = match keypair {
Ok(k) => k, Ok(k) => k,
Err(e) => { Err(e) => {
error!("Keypair invalid. Deleting..."); error!("Keypair invalid. Deleting...");
globals.remove(b"keypair")?; db.remove_keypair();
return Err(e); return Err(e);
} }
}; };
@ -167,7 +136,7 @@ impl Service<_> {
let unstable_room_versions = vec![RoomVersionId::V3, RoomVersionId::V4, RoomVersionId::V5]; let unstable_room_versions = vec![RoomVersionId::V3, RoomVersionId::V4, RoomVersionId::V5];
let mut s = Self { let mut s = Self {
globals, db,
config, config,
keypair: Arc::new(keypair), keypair: Arc::new(keypair),
dns_resolver: TokioAsyncResolver::tokio_from_system_conf().map_err(|e| { dns_resolver: TokioAsyncResolver::tokio_from_system_conf().map_err(|e| {
@ -181,7 +150,6 @@ impl Service<_> {
tls_name_override, tls_name_override,
federation_client, federation_client,
default_client, default_client,
server_signingkeys,
jwt_decoding_key, jwt_decoding_key,
stable_room_versions, stable_room_versions,
unstable_room_versions, unstable_room_versions,

@ -1,371 +1,85 @@
use crate::{utils, Error, Result, services}; use std::collections::BTreeMap;
use ruma::{
api::client::{
backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup},
error::ErrorKind,
},
serde::Raw,
RoomId, UserId,
};
use std::{collections::BTreeMap, sync::Arc};
impl KeyBackups { use ruma::{api::client::backup::{BackupAlgorithm, RoomKeyBackup, KeyBackupData}, serde::Raw, UserId, RoomId};
pub fn create_backup( use crate::Result;
pub trait Data {
fn create_backup(
&self, &self,
user_id: &UserId, user_id: &UserId,
backup_metadata: &Raw<BackupAlgorithm>, backup_metadata: &Raw<BackupAlgorithm>,
) -> Result<String> { ) -> Result<String>;
let version = services().globals.next_count()?.to_string();
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
self.backupid_algorithm.insert(
&key,
&serde_json::to_vec(backup_metadata).expect("BackupAlgorithm::to_vec always works"),
)?;
self.backupid_etag
.insert(&key, &services().globals.next_count()?.to_be_bytes())?;
Ok(version)
}
pub fn delete_backup(&self, user_id: &UserId, version: &str) -> Result<()> {
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
self.backupid_algorithm.remove(&key)?;
self.backupid_etag.remove(&key)?;
key.push(0xff); fn delete_backup(&self, user_id: &UserId, version: &str) -> Result<()>;
for (outdated_key, _) in self.backupkeyid_backup.scan_prefix(key) { fn update_backup(
self.backupkeyid_backup.remove(&outdated_key)?;
}
Ok(())
}
pub fn update_backup(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
backup_metadata: &Raw<BackupAlgorithm>, backup_metadata: &Raw<BackupAlgorithm>,
) -> Result<String> { ) -> Result<String>;
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
if self.backupid_algorithm.get(&key)?.is_none() {
return Err(Error::BadRequest(
ErrorKind::NotFound,
"Tried to update nonexistent backup.",
));
}
self.backupid_algorithm
.insert(&key, backup_metadata.json().get().as_bytes())?;
self.backupid_etag
.insert(&key, &services().globals.next_count()?.to_be_bytes())?;
Ok(version.to_owned())
}
pub fn get_latest_backup_version(&self, user_id: &UserId) -> Result<Option<String>> {
let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff);
let mut last_possible_key = prefix.clone();
last_possible_key.extend_from_slice(&u64::MAX.to_be_bytes());
self.backupid_algorithm fn get_latest_backup_version(&self, user_id: &UserId) -> Result<Option<String>>;
.iter_from(&last_possible_key, true)
.take_while(move |(k, _)| k.starts_with(&prefix))
.next()
.map(|(key, _)| {
utils::string_from_bytes(
key.rsplit(|&b| b == 0xff)
.next()
.expect("rsplit always returns an element"),
)
.map_err(|_| Error::bad_database("backupid_algorithm key is invalid."))
})
.transpose()
}
pub fn get_latest_backup( fn get_latest_backup(
&self, &self,
user_id: &UserId, user_id: &UserId,
) -> Result<Option<(String, Raw<BackupAlgorithm>)>> { ) -> Result<Option<(String, Raw<BackupAlgorithm>)>>;
let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff);
let mut last_possible_key = prefix.clone();
last_possible_key.extend_from_slice(&u64::MAX.to_be_bytes());
self.backupid_algorithm fn get_backup(
.iter_from(&last_possible_key, true)
.take_while(move |(k, _)| k.starts_with(&prefix))
.next()
.map(|(key, value)| {
let version = utils::string_from_bytes(
key.rsplit(|&b| b == 0xff)
.next()
.expect("rsplit always returns an element"),
)
.map_err(|_| Error::bad_database("backupid_algorithm key is invalid."))?;
Ok((
version,
serde_json::from_slice(&value).map_err(|_| {
Error::bad_database("Algorithm in backupid_algorithm is invalid.")
})?,
))
})
.transpose()
}
pub fn get_backup(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
) -> Result<Option<Raw<BackupAlgorithm>>> { ) -> Result<Option<Raw<BackupAlgorithm>>>;
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
self.backupid_algorithm
.get(&key)?
.map_or(Ok(None), |bytes| {
serde_json::from_slice(&bytes)
.map_err(|_| Error::bad_database("Algorithm in backupid_algorithm is invalid."))
})
}
pub fn add_key( fn add_key(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
room_id: &RoomId, room_id: &RoomId,
session_id: &str, session_id: &str,
key_data: &Raw<KeyBackupData>, key_data: &Raw<KeyBackupData>,
) -> Result<()> { ) -> Result<()>;
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
if self.backupid_algorithm.get(&key)?.is_none() {
return Err(Error::BadRequest(
ErrorKind::NotFound,
"Tried to update nonexistent backup.",
));
}
self.backupid_etag
.insert(&key, &services().globals.next_count()?.to_be_bytes())?;
key.push(0xff);
key.extend_from_slice(room_id.as_bytes());
key.push(0xff);
key.extend_from_slice(session_id.as_bytes());
self.backupkeyid_backup
.insert(&key, key_data.json().get().as_bytes())?;
Ok(())
}
pub fn count_keys(&self, user_id: &UserId, version: &str) -> Result<usize> {
let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff);
prefix.extend_from_slice(version.as_bytes());
Ok(self.backupkeyid_backup.scan_prefix(prefix).count()) fn count_keys(&self, user_id: &UserId, version: &str) -> Result<usize>;
}
pub fn get_etag(&self, user_id: &UserId, version: &str) -> Result<String> { fn get_etag(&self, user_id: &UserId, version: &str) -> Result<String>;
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
Ok(utils::u64_from_bytes( fn get_all(
&self
.backupid_etag
.get(&key)?
.ok_or_else(|| Error::bad_database("Backup has no etag."))?,
)
.map_err(|_| Error::bad_database("etag in backupid_etag invalid."))?
.to_string())
}
pub fn get_all(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
) -> Result<BTreeMap<Box<RoomId>, RoomKeyBackup>> { ) -> Result<BTreeMap<Box<RoomId>, RoomKeyBackup>>;
let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff);
prefix.extend_from_slice(version.as_bytes());
prefix.push(0xff);
let mut rooms = BTreeMap::<Box<RoomId>, RoomKeyBackup>::new();
for result in self
.backupkeyid_backup
.scan_prefix(prefix)
.map(|(key, value)| {
let mut parts = key.rsplit(|&b| b == 0xff);
let session_id =
utils::string_from_bytes(parts.next().ok_or_else(|| {
Error::bad_database("backupkeyid_backup key is invalid.")
})?)
.map_err(|_| {
Error::bad_database("backupkeyid_backup session_id is invalid.")
})?;
let room_id = RoomId::parse(
utils::string_from_bytes(parts.next().ok_or_else(|| {
Error::bad_database("backupkeyid_backup key is invalid.")
})?)
.map_err(|_| Error::bad_database("backupkeyid_backup room_id is invalid."))?,
)
.map_err(|_| {
Error::bad_database("backupkeyid_backup room_id is invalid room id.")
})?;
let key_data = serde_json::from_slice(&value).map_err(|_| { fn get_room(
Error::bad_database("KeyBackupData in backupkeyid_backup is invalid.")
})?;
Ok::<_, Error>((room_id, session_id, key_data))
})
{
let (room_id, session_id, key_data) = result?;
rooms
.entry(room_id)
.or_insert_with(|| RoomKeyBackup {
sessions: BTreeMap::new(),
})
.sessions
.insert(session_id, key_data);
}
Ok(rooms)
}
pub fn get_room(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
room_id: &RoomId, room_id: &RoomId,
) -> Result<BTreeMap<String, Raw<KeyBackupData>>> { ) -> Result<BTreeMap<String, Raw<KeyBackupData>>>;
let mut prefix = user_id.as_bytes().to_vec();
prefix.push(0xff);
prefix.extend_from_slice(version.as_bytes());
prefix.push(0xff);
prefix.extend_from_slice(room_id.as_bytes());
prefix.push(0xff);
Ok(self
.backupkeyid_backup
.scan_prefix(prefix)
.map(|(key, value)| {
let mut parts = key.rsplit(|&b| b == 0xff);
let session_id =
utils::string_from_bytes(parts.next().ok_or_else(|| {
Error::bad_database("backupkeyid_backup key is invalid.")
})?)
.map_err(|_| {
Error::bad_database("backupkeyid_backup session_id is invalid.")
})?;
let key_data = serde_json::from_slice(&value).map_err(|_| { fn get_session(
Error::bad_database("KeyBackupData in backupkeyid_backup is invalid.")
})?;
Ok::<_, Error>((session_id, key_data))
})
.filter_map(|r| r.ok())
.collect())
}
pub fn get_session(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
room_id: &RoomId, room_id: &RoomId,
session_id: &str, session_id: &str,
) -> Result<Option<Raw<KeyBackupData>>> { ) -> Result<Option<Raw<KeyBackupData>>>;
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
key.push(0xff);
key.extend_from_slice(room_id.as_bytes());
key.push(0xff);
key.extend_from_slice(session_id.as_bytes());
self.backupkeyid_backup
.get(&key)?
.map(|value| {
serde_json::from_slice(&value).map_err(|_| {
Error::bad_database("KeyBackupData in backupkeyid_backup is invalid.")
})
})
.transpose()
}
pub fn delete_all_keys(&self, user_id: &UserId, version: &str) -> Result<()> {
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
key.push(0xff);
for (outdated_key, _) in self.backupkeyid_backup.scan_prefix(key) {
self.backupkeyid_backup.remove(&outdated_key)?;
}
Ok(()) fn delete_all_keys(&self, user_id: &UserId, version: &str) -> Result<()>;
}
pub fn delete_room_keys( fn delete_room_keys(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
room_id: &RoomId, room_id: &RoomId,
) -> Result<()> { ) -> Result<()>;
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
key.push(0xff);
key.extend_from_slice(room_id.as_bytes());
key.push(0xff);
for (outdated_key, _) in self.backupkeyid_backup.scan_prefix(key) { fn delete_room_key(
self.backupkeyid_backup.remove(&outdated_key)?;
}
Ok(())
}
pub fn delete_room_key(
&self, &self,
user_id: &UserId, user_id: &UserId,
version: &str, version: &str,
room_id: &RoomId, room_id: &RoomId,
session_id: &str, session_id: &str,
) -> Result<()> { ) -> Result<()>;
let mut key = user_id.as_bytes().to_vec();
key.push(0xff);
key.extend_from_slice(version.as_bytes());
key.push(0xff);
key.extend_from_slice(room_id.as_bytes());
key.push(0xff);
key.extend_from_slice(session_id.as_bytes());
for (outdated_key, _) in self.backupkeyid_backup.scan_prefix(key) {
self.backupkeyid_backup.remove(&outdated_key)?;
}
Ok(())
}
} }

@ -1,3 +1,6 @@
mod data;
pub use data::Data;
use crate::{utils, Error, Result, services}; use crate::{utils, Error, Result, services};
use ruma::{ use ruma::{
api::client::{ api::client::{
@ -9,7 +12,11 @@ use ruma::{
}; };
use std::{collections::BTreeMap, sync::Arc}; use std::{collections::BTreeMap, sync::Arc};
impl KeyBackups { pub struct Service<D: Data> {
db: D,
}
impl<D: Data> Service<D> {
pub fn create_backup( pub fn create_backup(
&self, &self,
user_id: &UserId, user_id: &UserId,

@ -0,0 +1,8 @@
use crate::Result;
pub trait Data {
fn create_file_metadata(&self, mxc: String, width: u32, height: u32, content_disposition: &Option<&str>, content_type: &Option<&str>) -> Result<Vec<u8>>;
/// Returns content_disposition, content_type and the metadata key.
fn search_file_metadata(&self, mxc: String, width: u32, height: u32) -> Result<(Option<String>, Option<String>, Vec<u8>)>;
}

@ -1,7 +1,8 @@
use image::{imageops::FilterType, GenericImageView}; mod data;
pub use data::Data;
use super::abstraction::Tree; use image::{imageops::FilterType, GenericImageView};
use crate::{utils, Error, Result}; use crate::{utils, Error, Result, services};
use std::{mem, sync::Arc}; use std::{mem, sync::Arc};
use tokio::{ use tokio::{
fs::File, fs::File,
@ -14,44 +15,25 @@ pub struct FileMeta {
pub file: Vec<u8>, pub file: Vec<u8>,
} }
pub struct Media { pub struct Service<D: Data> {
pub(super) mediaid_file: Arc<dyn Tree>, // MediaId = MXC + WidthHeight + ContentDisposition + ContentType db: D,
} }
impl Media { impl<D: Data> Service<D> {
/// Uploads a file. /// Uploads a file.
pub async fn create( pub async fn create(
&self, &self,
mxc: String, mxc: String,
globals: &Globals,
content_disposition: &Option<&str>, content_disposition: &Option<&str>,
content_type: &Option<&str>, content_type: &Option<&str>,
file: &[u8], file: &[u8],
) -> Result<()> { ) -> Result<()> {
let mut key = mxc.as_bytes().to_vec(); // Width, Height = 0 if it's not a thumbnail
key.push(0xff); let key = self.db.create_file_metadata(mxc, 0, 0, content_disposition, content_type);
key.extend_from_slice(&0_u32.to_be_bytes()); // Width = 0 if it's not a thumbnail
key.extend_from_slice(&0_u32.to_be_bytes()); // Height = 0 if it's not a thumbnail
key.push(0xff);
key.extend_from_slice(
content_disposition
.as_ref()
.map(|f| f.as_bytes())
.unwrap_or_default(),
);
key.push(0xff);
key.extend_from_slice(
content_type
.as_ref()
.map(|c| c.as_bytes())
.unwrap_or_default(),
);
let path = globals.get_media_file(&key); let path = services().globals.get_media_file(&key);
let mut f = File::create(path).await?; let mut f = File::create(path).await?;
f.write_all(file).await?; f.write_all(file).await?;
self.mediaid_file.insert(&key, &[])?;
Ok(()) Ok(())
} }
@ -60,80 +42,28 @@ impl Media {
pub async fn upload_thumbnail( pub async fn upload_thumbnail(
&self, &self,
mxc: String, mxc: String,
globals: &Globals,
content_disposition: &Option<String>, content_disposition: &Option<String>,
content_type: &Option<String>, content_type: &Option<String>,
width: u32, width: u32,
height: u32, height: u32,
file: &[u8], file: &[u8],
) -> Result<()> { ) -> Result<()> {
let mut key = mxc.as_bytes().to_vec(); let key = self.db.create_file_metadata(mxc, width, height, content_disposition, content_type);
key.push(0xff);
key.extend_from_slice(&width.to_be_bytes());
key.extend_from_slice(&height.to_be_bytes());
key.push(0xff);
key.extend_from_slice(
content_disposition
.as_ref()
.map(|f| f.as_bytes())
.unwrap_or_default(),
);
key.push(0xff);
key.extend_from_slice(
content_type
.as_ref()
.map(|c| c.as_bytes())
.unwrap_or_default(),
);
let path = globals.get_media_file(&key); let path = services().globals.get_media_file(&key);
let mut f = File::create(path).await?; let mut f = File::create(path).await?;
f.write_all(file).await?; f.write_all(file).await?;
self.mediaid_file.insert(&key, &[])?;
Ok(()) Ok(())
} }
/// Downloads a file. /// Downloads a file.
pub async fn get(&self, globals: &Globals, mxc: &str) -> Result<Option<FileMeta>> { pub async fn get(&self, mxc: String) -> Result<Option<FileMeta>> {
let mut prefix = mxc.as_bytes().to_vec(); if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc, 0, 0) {
prefix.push(0xff); let path = services().globals.get_media_file(&key);
prefix.extend_from_slice(&0_u32.to_be_bytes()); // Width = 0 if it's not a thumbnail
prefix.extend_from_slice(&0_u32.to_be_bytes()); // Height = 0 if it's not a thumbnail
prefix.push(0xff);
let first = self.mediaid_file.scan_prefix(prefix).next();
if let Some((key, _)) = first {
let path = globals.get_media_file(&key);
let mut file = Vec::new(); let mut file = Vec::new();
File::open(path).await?.read_to_end(&mut file).await?; File::open(path).await?.read_to_end(&mut file).await?;
let mut parts = key.rsplit(|&b| b == 0xff);
let content_type = parts
.next()
.map(|bytes| {
utils::string_from_bytes(bytes).map_err(|_| {
Error::bad_database("Content type in mediaid_file is invalid unicode.")
})
})
.transpose()?;
let content_disposition_bytes = parts
.next()
.ok_or_else(|| Error::bad_database("Media ID in db is invalid."))?;
let content_disposition = if content_disposition_bytes.is_empty() {
None
} else {
Some(
utils::string_from_bytes(content_disposition_bytes).map_err(|_| {
Error::bad_database(
"Content Disposition in mediaid_file is invalid unicode.",
)
})?,
)
};
Ok(Some(FileMeta { Ok(Some(FileMeta {
content_disposition, content_disposition,
@ -170,8 +100,7 @@ impl Media {
/// For width,height <= 96 the server uses another thumbnailing algorithm which crops the image afterwards. /// For width,height <= 96 the server uses another thumbnailing algorithm which crops the image afterwards.
pub async fn get_thumbnail( pub async fn get_thumbnail(
&self, &self,
mxc: &str, mxc: String,
globals: &Globals,
width: u32, width: u32,
height: u32, height: u32,
) -> Result<Option<FileMeta>> { ) -> Result<Option<FileMeta>> {
@ -179,89 +108,23 @@ impl Media {
.thumbnail_properties(width, height) .thumbnail_properties(width, height)
.unwrap_or((0, 0, false)); // 0, 0 because that's the original file .unwrap_or((0, 0, false)); // 0, 0 because that's the original file
let mut main_prefix = mxc.as_bytes().to_vec(); if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc, width, height) {
main_prefix.push(0xff);
let mut thumbnail_prefix = main_prefix.clone();
thumbnail_prefix.extend_from_slice(&width.to_be_bytes());
thumbnail_prefix.extend_from_slice(&height.to_be_bytes());
thumbnail_prefix.push(0xff);
let mut original_prefix = main_prefix;
original_prefix.extend_from_slice(&0_u32.to_be_bytes()); // Width = 0 if it's not a thumbnail
original_prefix.extend_from_slice(&0_u32.to_be_bytes()); // Height = 0 if it's not a thumbnail
original_prefix.push(0xff);
let first_thumbnailprefix = self.mediaid_file.scan_prefix(thumbnail_prefix).next();
let first_originalprefix = self.mediaid_file.scan_prefix(original_prefix).next();
if let Some((key, _)) = first_thumbnailprefix {
// Using saved thumbnail // Using saved thumbnail
let path = globals.get_media_file(&key); let path = services().globals.get_media_file(&key);
let mut file = Vec::new(); let mut file = Vec::new();
File::open(path).await?.read_to_end(&mut file).await?; File::open(path).await?.read_to_end(&mut file).await?;
let mut parts = key.rsplit(|&b| b == 0xff);
let content_type = parts
.next()
.map(|bytes| {
utils::string_from_bytes(bytes).map_err(|_| {
Error::bad_database("Content type in mediaid_file is invalid unicode.")
})
})
.transpose()?;
let content_disposition_bytes = parts
.next()
.ok_or_else(|| Error::bad_database("Media ID in db is invalid."))?;
let content_disposition = if content_disposition_bytes.is_empty() {
None
} else {
Some(
utils::string_from_bytes(content_disposition_bytes).map_err(|_| {
Error::bad_database("Content Disposition in db is invalid.")
})?,
)
};
Ok(Some(FileMeta { Ok(Some(FileMeta {
content_disposition, content_disposition,
content_type, content_type,
file: file.to_vec(), file: file.to_vec(),
})) }))
} else if let Some((key, _)) = first_originalprefix { } else if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc, 0, 0) {
// Generate a thumbnail // Generate a thumbnail
let path = globals.get_media_file(&key); let path = services().globals.get_media_file(&key);
let mut file = Vec::new(); let mut file = Vec::new();
File::open(path).await?.read_to_end(&mut file).await?; File::open(path).await?.read_to_end(&mut file).await?;
let mut parts = key.rsplit(|&b| b == 0xff);
let content_type = parts
.next()
.map(|bytes| {
utils::string_from_bytes(bytes).map_err(|_| {
Error::bad_database("Content type in mediaid_file is invalid unicode.")
})
})
.transpose()?;
let content_disposition_bytes = parts
.next()
.ok_or_else(|| Error::bad_database("Media ID in db is invalid."))?;
let content_disposition = if content_disposition_bytes.is_empty() {
None
} else {
Some(
utils::string_from_bytes(content_disposition_bytes).map_err(|_| {
Error::bad_database(
"Content Disposition in mediaid_file is invalid unicode.",
)
})?,
)
};
if let Ok(image) = image::load_from_memory(&file) { if let Ok(image) = image::load_from_memory(&file) {
let original_width = image.width(); let original_width = image.width();
let original_height = image.height(); let original_height = image.height();
@ -317,26 +180,12 @@ impl Media {
thumbnail.write_to(&mut thumbnail_bytes, image::ImageOutputFormat::Png)?; thumbnail.write_to(&mut thumbnail_bytes, image::ImageOutputFormat::Png)?;
// Save thumbnail in database so we don't have to generate it again next time // Save thumbnail in database so we don't have to generate it again next time
let mut thumbnail_key = key.to_vec(); let thumbnail_key = self.db.create_file_metadata(mxc, width, height, content_disposition, content_type)?;
let width_index = thumbnail_key
.iter()
.position(|&b| b == 0xff)
.ok_or_else(|| Error::bad_database("Media in db is invalid."))?
+ 1;
let mut widthheight = width.to_be_bytes().to_vec();
widthheight.extend_from_slice(&height.to_be_bytes());
thumbnail_key.splice( let path = services().globals.get_media_file(&thumbnail_key);
width_index..width_index + 2 * mem::size_of::<u32>(),
widthheight,
);
let path = globals.get_media_file(&thumbnail_key);
let mut f = File::create(path).await?; let mut f = File::create(path).await?;
f.write_all(&thumbnail_bytes).await?; f.write_all(&thumbnail_bytes).await?;
self.mediaid_file.insert(&thumbnail_key, &[])?;
Ok(Some(FileMeta { Ok(Some(FileMeta {
content_disposition, content_disposition,
content_type, content_type,

@ -1,28 +1,29 @@
pub mod pdu;
pub mod appservice;
pub mod pusher;
pub mod rooms;
pub mod transaction_ids;
pub mod uiaa;
pub mod users;
pub mod account_data; pub mod account_data;
pub mod admin; pub mod admin;
pub mod appservice;
pub mod globals; pub mod globals;
pub mod key_backups; pub mod key_backups;
pub mod media; pub mod media;
pub mod pdu;
pub mod pusher;
pub mod rooms;
pub mod sending; pub mod sending;
pub mod transaction_ids;
pub mod uiaa;
pub mod users;
pub struct Services<D> { pub struct Services<D: appservice::Data + pusher::Data + rooms::Data + transaction_ids::Data + uiaa::Data + users::Data + account_data::Data + globals::Data + key_backups::Data + media::Data>
{
pub appservice: appservice::Service<D>, pub appservice: appservice::Service<D>,
pub pusher: pusher::Service<D>, pub pusher: pusher::Service<D>,
pub rooms: rooms::Service<D>, pub rooms: rooms::Service<D>,
pub transaction_ids: transaction_ids::Service<D>, pub transaction_ids: transaction_ids::Service<D>,
pub uiaa: uiaa::Service<D>, pub uiaa: uiaa::Service<D>,
pub users: users::Service<D>, pub users: users::Service<D>,
//pub account_data: account_data::Service<D>, pub account_data: account_data::Service<D>,
//pub admin: admin::Service<D>, pub admin: admin::Service,
pub globals: globals::Service<D>, pub globals: globals::Service<D>,
//pub key_backups: key_backups::Service<D>, pub key_backups: key_backups::Service<D>,
//pub media: media::Service<D>, pub media: media::Service<D>,
//pub sending: sending::Service<D>, pub sending: sending::Service,
} }

@ -1,4 +1,4 @@
use crate::{Database, Error, services}; use crate::{Error, services};
use ruma::{ use ruma::{
events::{ events::{
room::member::RoomMemberEventContent, AnyEphemeralRoomEvent, AnyRoomEvent, AnyStateEvent, room::member::RoomMemberEventContent, AnyEphemeralRoomEvent, AnyRoomEvent, AnyStateEvent,
@ -357,7 +357,7 @@ pub(crate) fn gen_event_id_canonical_json(
Ok((event_id, value)) Ok((event_id, value))
} }
/// Build the start of a PDU in order to add it to the `Database`. /// Build the start of a PDU in order to add it to the Database.
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct PduBuilder { pub struct PduBuilder {
#[serde(rename = "type")] #[serde(rename = "type")]

@ -1,4 +1,5 @@
use ruma::{UserId, api::client::push::{set_pusher, get_pushers}}; use ruma::{UserId, api::client::push::{set_pusher, get_pushers}};
use crate::Result;
pub trait Data { pub trait Data {
fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::Pusher) -> Result<()>; fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::Pusher) -> Result<()>;
@ -10,5 +11,5 @@ pub trait Data {
fn get_pusher_senderkeys<'a>( fn get_pusher_senderkeys<'a>(
&'a self, &'a self,
sender: &UserId, sender: &UserId,
) -> impl Iterator<Item = Vec<u8>> + 'a; ) -> Box<dyn Iterator<Item = Vec<u8>>>;
} }

@ -1,7 +1,7 @@
mod data; mod data;
pub use data::Data; pub use data::Data;
use crate::{services, Error, PduEvent}; use crate::{services, Error, PduEvent, Result};
use bytes::BytesMut; use bytes::BytesMut;
use ruma::{ use ruma::{
api::{ api::{
@ -27,7 +27,7 @@ pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
pub fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::Pusher) -> Result<()> { pub fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::Pusher) -> Result<()> {
self.db.set_pusher(sender, pusher) self.db.set_pusher(sender, pusher)
} }

@ -1,24 +1,29 @@
use ruma::{RoomId, RoomAliasId}; use ruma::{RoomId, RoomAliasId};
use crate::Result;
pub trait Data { pub trait Data {
/// Creates or updates the alias to the given room id. /// Creates or updates the alias to the given room id.
fn set_alias( fn set_alias(
&self,
alias: &RoomAliasId, alias: &RoomAliasId,
room_id: &RoomId room_id: &RoomId
) -> Result<()>; ) -> Result<()>;
/// Forgets about an alias. Returns an error if the alias did not exist. /// Forgets about an alias. Returns an error if the alias did not exist.
fn remove_alias( fn remove_alias(
&self,
alias: &RoomAliasId, alias: &RoomAliasId,
) -> Result<()>; ) -> Result<()>;
/// Looks up the roomid for the given alias. /// Looks up the roomid for the given alias.
fn resolve_local_alias( fn resolve_local_alias(
&self,
alias: &RoomAliasId, alias: &RoomAliasId,
) -> Result<()>; ) -> Result<Option<Box<RoomId>>>;
/// Returns all local aliases that point to the given room /// Returns all local aliases that point to the given room
fn local_aliases_for_room( fn local_aliases_for_room(
alias: &RoomAliasId, &self,
) -> Result<()>; room_id: &RoomId,
) -> Result<Box<dyn Iterator<Item=String>>>;
} }

@ -1,12 +1,14 @@
mod data; mod data;
pub use data::Data; pub use data::Data;
use ruma::{RoomAliasId, RoomId}; use ruma::{RoomAliasId, RoomId};
use crate::Result;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]
pub fn set_alias( pub fn set_alias(
&self, &self,
@ -26,7 +28,7 @@ impl Service<_> {
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]
pub fn resolve_local_alias(&self, alias: &RoomAliasId) -> Result<Option<Box<RoomId>>> { pub fn resolve_local_alias(&self, alias: &RoomAliasId) -> Result<Option<Box<RoomId>>> {
self.db.resolve_local_alias(alias: &RoomAliasId) self.db.resolve_local_alias(alias)
} }
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]

@ -1,6 +1,7 @@
use std::collections::HashSet; use std::collections::HashSet;
use crate::Result;
pub trait Data { pub trait Data {
fn get_cached_eventid_authchain<'a>() -> Result<HashSet<u64>>; fn get_cached_eventid_authchain(&self, shorteventid: u64) -> Result<HashSet<u64>>;
fn cache_eventid_authchain<'a>(shorteventid: u64, auth_chain: &HashSet<u64>) -> Result<HashSet<u64>>; fn cache_eventid_authchain(&self, shorteventid: u64, auth_chain: &HashSet<u64>) -> Result<()>;
} }

@ -3,13 +3,13 @@ use std::{sync::Arc, collections::HashSet};
pub use data::Data; pub use data::Data;
use crate::service::*; use crate::Result;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]
pub fn get_cached_eventid_authchain<'a>( pub fn get_cached_eventid_authchain<'a>(
&'a self, &'a self,

@ -1,15 +1,16 @@
use ruma::RoomId; use ruma::RoomId;
use crate::Result;
pub trait Data { pub trait Data {
/// Adds the room to the public room directory /// Adds the room to the public room directory
fn set_public(room_id: &RoomId) -> Result<()>; fn set_public(&self, room_id: &RoomId) -> Result<()>;
/// Removes the room from the public room directory. /// Removes the room from the public room directory.
fn set_not_public(room_id: &RoomId) -> Result<()>; fn set_not_public(&self, room_id: &RoomId) -> Result<()>;
/// Returns true if the room is in the public room directory. /// Returns true if the room is in the public room directory.
fn is_public_room(room_id: &RoomId) -> Result<bool>; fn is_public_room(&self, room_id: &RoomId) -> Result<bool>;
/// Returns the unsorted public room directory /// Returns the unsorted public room directory
fn public_rooms() -> impl Iterator<Item = Result<Box<RoomId>>> + '_; fn public_rooms(&self) -> Box<dyn Iterator<Item = Result<Box<RoomId>>>>;
} }

@ -2,13 +2,13 @@ mod data;
pub use data::Data; pub use data::Data;
use ruma::RoomId; use ruma::RoomId;
use crate::service::*; use crate::Result;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]
pub fn set_public(&self, room_id: &RoomId) -> Result<()> { pub fn set_public(&self, room_id: &RoomId) -> Result<()> {
self.db.set_public(room_id) self.db.set_public(room_id)

@ -2,7 +2,9 @@ pub mod presence;
pub mod read_receipt; pub mod read_receipt;
pub mod typing; pub mod typing;
pub struct Service<D> { pub trait Data: presence::Data + read_receipt::Data + typing::Data {}
pub struct Service<D: Data> {
presence: presence::Service<D>, presence: presence::Service<D>,
read_receipt: read_receipt::Service<D>, read_receipt: read_receipt::Service<D>,
typing: typing::Service<D>, typing: typing::Service<D>,

@ -1,6 +1,7 @@
use std::collections::HashMap; use std::collections::HashMap;
use ruma::{UserId, RoomId, events::presence::PresenceEvent}; use ruma::{UserId, RoomId, events::presence::PresenceEvent};
use crate::Result;
pub trait Data { pub trait Data {
/// Adds a presence event which will be saved until a new event replaces it. /// Adds a presence event which will be saved until a new event replaces it.

@ -4,13 +4,13 @@ use std::collections::HashMap;
pub use data::Data; pub use data::Data;
use ruma::{RoomId, UserId, events::presence::PresenceEvent}; use ruma::{RoomId, UserId, events::presence::PresenceEvent};
use crate::service::*; use crate::Result;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
/// Adds a presence event which will be saved until a new event replaces it. /// Adds a presence event which will be saved until a new event replaces it.
/// ///
/// Note: This method takes a RoomId because presence updates are always bound to rooms to /// Note: This method takes a RoomId because presence updates are always bound to rooms to

@ -1,4 +1,5 @@
use ruma::{RoomId, events::receipt::ReceiptEvent, UserId, serde::Raw}; use ruma::{RoomId, events::receipt::ReceiptEvent, UserId, serde::Raw};
use crate::Result;
pub trait Data { pub trait Data {
/// Replaces the previous read receipt. /// Replaces the previous read receipt.
@ -14,13 +15,13 @@ pub trait Data {
&self, &self,
room_id: &RoomId, room_id: &RoomId,
since: u64, since: u64,
) -> impl Iterator< ) -> Box<dyn Iterator<
Item = Result<( Item = Result<(
Box<UserId>, Box<UserId>,
u64, u64,
Raw<ruma::events::AnySyncEphemeralRoomEvent>, Raw<ruma::events::AnySyncEphemeralRoomEvent>,
)>, )>,
>; >>;
/// Sets a private read marker at `count`. /// Sets a private read marker at `count`.
fn private_read_set(&self, room_id: &RoomId, user_id: &UserId, count: u64) -> Result<()>; fn private_read_set(&self, room_id: &RoomId, user_id: &UserId, count: u64) -> Result<()>;

@ -1,12 +1,14 @@
mod data; mod data;
pub use data::Data; pub use data::Data;
use ruma::{RoomId, UserId, events::receipt::ReceiptEvent, serde::Raw}; use ruma::{RoomId, UserId, events::receipt::ReceiptEvent, serde::Raw};
use crate::Result;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
/// Replaces the previous read receipt. /// Replaces the previous read receipt.
pub fn readreceipt_update( pub fn readreceipt_update(
&self, &self,

@ -1,5 +1,5 @@
use std::collections::HashSet; use std::collections::HashSet;
use crate::Result;
use ruma::{UserId, RoomId}; use ruma::{UserId, RoomId};
pub trait Data { pub trait Data {
@ -14,5 +14,5 @@ pub trait Data {
fn last_typing_update(&self, room_id: &RoomId) -> Result<u64>; fn last_typing_update(&self, room_id: &RoomId) -> Result<u64>;
/// Returns all user ids currently typing. /// Returns all user ids currently typing.
fn typings_all(&self, room_id: &RoomId) -> Result<HashSet<UserId>>; fn typings_all(&self, room_id: &RoomId) -> Result<HashSet<Box<UserId>>>;
} }

@ -1,14 +1,14 @@
mod data; mod data;
pub use data::Data; pub use data::Data;
use ruma::{UserId, RoomId}; use ruma::{UserId, RoomId, events::SyncEphemeralRoomEvent};
use crate::service::*; use crate::Result;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
/// Sets a user as typing until the timeout timestamp is reached or roomtyping_remove is /// Sets a user as typing until the timeout timestamp is reached or roomtyping_remove is
/// called. /// called.
pub fn typing_add(&self, user_id: &UserId, room_id: &RoomId, timeout: u64) -> Result<()> { pub fn typing_add(&self, user_id: &UserId, room_id: &RoomId, timeout: u64) -> Result<()> {

@ -250,7 +250,7 @@ impl Service {
// We go through all the signatures we see on the value and fetch the corresponding signing // We go through all the signatures we see on the value and fetch the corresponding signing
// keys // keys
self.fetch_required_signing_keys(&value, pub_key_map, db) self.fetch_required_signing_keys(&value, pub_key_map)
.await?; .await?;
// 2. Check signatures, otherwise drop // 2. Check signatures, otherwise drop
@ -1153,6 +1153,11 @@ impl Service {
let mut eventid_info = HashMap::new(); let mut eventid_info = HashMap::new();
let mut todo_outlier_stack: Vec<Arc<EventId>> = initial_set; let mut todo_outlier_stack: Vec<Arc<EventId>> = initial_set;
let first_pdu_in_room = services()
.rooms
.first_pdu_in_room(room_id)?
.ok_or_else(|| Error::bad_database("Failed to find first pdu in db."))?;
let mut amount = 0; let mut amount = 0;
while let Some(prev_event_id) = todo_outlier_stack.pop() { while let Some(prev_event_id) = todo_outlier_stack.pop() {

@ -1,4 +1,5 @@
use ruma::{RoomId, DeviceId, UserId}; use ruma::{RoomId, DeviceId, UserId};
use crate::Result;
pub trait Data { pub trait Data {
fn lazy_load_was_sent_before( fn lazy_load_was_sent_before(

@ -4,13 +4,13 @@ use std::collections::HashSet;
pub use data::Data; pub use data::Data;
use ruma::{DeviceId, UserId, RoomId}; use ruma::{DeviceId, UserId, RoomId};
use crate::service::*; use crate::Result;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]
pub fn lazy_load_was_sent_before( pub fn lazy_load_was_sent_before(
&self, &self,

@ -1,4 +1,5 @@
use ruma::RoomId; use ruma::RoomId;
use crate::Result;
pub trait Data { pub trait Data {
fn exists(&self, room_id: &RoomId) -> Result<bool>; fn exists(&self, room_id: &RoomId) -> Result<bool>;

@ -2,13 +2,13 @@ mod data;
pub use data::Data; pub use data::Data;
use ruma::RoomId; use ruma::RoomId;
use crate::service::*; use crate::Result;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
/// Checks if a room exists. /// Checks if a room exists.
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]
pub fn exists(&self, room_id: &RoomId) -> Result<bool> { pub fn exists(&self, room_id: &RoomId) -> Result<bool> {

@ -16,7 +16,9 @@ pub mod state_compressor;
pub mod timeline; pub mod timeline;
pub mod user; pub mod user;
pub struct Service<D> { pub trait Data: alias::Data + auth_chain::Data + directory::Data + edus::Data + lazy_loading::Data + metadata::Data + outlier::Data + pdu_metadata::Data + search::Data + short::Data + state::Data + state_accessor::Data + state_cache::Data + state_compressor::Data + timeline::Data + user::Data {}
pub struct Service<D: Data> {
pub alias: alias::Service<D>, pub alias: alias::Service<D>,
pub auth_chain: auth_chain::Service<D>, pub auth_chain: auth_chain::Service<D>,
pub directory: directory::Service<D>, pub directory: directory::Service<D>,

@ -1,6 +1,6 @@
use ruma::{EventId, signatures::CanonicalJsonObject}; use ruma::{signatures::CanonicalJsonObject, EventId};
use crate::PduEvent; use crate::{PduEvent, Result};
pub trait Data { pub trait Data {
fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>>; fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>>;

@ -2,13 +2,13 @@ mod data;
pub use data::Data; pub use data::Data;
use ruma::{EventId, signatures::CanonicalJsonObject}; use ruma::{EventId, signatures::CanonicalJsonObject};
use crate::{service::*, PduEvent}; use crate::{Result, PduEvent};
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
/// Returns the pdu from the outlier tree. /// Returns the pdu from the outlier tree.
pub fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> { pub fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
self.db.get_outlier_pdu_json(event_id) self.db.get_outlier_pdu_json(event_id)

@ -1,6 +1,7 @@
use std::sync::Arc; use std::sync::Arc;
use ruma::{EventId, RoomId}; use ruma::{EventId, RoomId};
use crate::Result;
pub trait Data { pub trait Data {
fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()>; fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()>;

@ -4,13 +4,13 @@ use std::sync::Arc;
pub use data::Data; pub use data::Data;
use ruma::{RoomId, EventId}; use ruma::{RoomId, EventId};
use crate::service::*; use crate::Result;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
#[tracing::instrument(skip(self, room_id, event_ids))] #[tracing::instrument(skip(self, room_id, event_ids))]
pub fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()> { pub fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()> {
self.db.mark_as_referenced(room_id, event_ids) self.db.mark_as_referenced(room_id, event_ids)

@ -1,11 +1,12 @@
use ruma::RoomId; use ruma::RoomId;
use crate::Result;
pub trait Data { pub trait Data {
fn index_pdu<'a>(&self, room_id: &RoomId, pdu_id: u64, message_body: String) -> Result<()>; fn index_pdu<'a>(&self, shortroomid: u64, pdu_id: u64, message_body: String) -> Result<()>;
fn search_pdus<'a>( fn search_pdus<'a>(
&'a self, &'a self,
room_id: &RoomId, room_id: &RoomId,
search_string: &str, search_string: &str,
) -> Result<Option<(impl Iterator<Item = Vec<u8>> + 'a, Vec<String>)>>; ) -> Result<Option<(Box<dyn Iterator<Item = Vec<u8>>>, Vec<String>)>>;
} }

@ -1,12 +1,14 @@
mod data; mod data;
pub use data::Data; pub use data::Data;
use crate::Result;
use ruma::RoomId; use ruma::RoomId;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]
pub fn search_pdus<'a>( pub fn search_pdus<'a>(
&'a self, &'a self,

@ -0,0 +1,2 @@
pub trait Data {
}

@ -2,19 +2,18 @@ mod data;
use std::sync::Arc; use std::sync::Arc;
pub use data::Data; pub use data::Data;
use ruma::{EventId, events::StateEventType}; use ruma::{EventId, events::StateEventType, RoomId};
use crate::{service::*, Error, utils}; use crate::{Result, Error, utils, services};
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
pub fn get_or_create_shorteventid( pub fn get_or_create_shorteventid(
&self, &self,
event_id: &EventId, event_id: &EventId,
globals: &super::globals::Globals,
) -> Result<u64> { ) -> Result<u64> {
if let Some(short) = self.eventidshort_cache.lock().unwrap().get_mut(event_id) { if let Some(short) = self.eventidshort_cache.lock().unwrap().get_mut(event_id) {
return Ok(*short); return Ok(*short);
@ -24,7 +23,7 @@ impl Service<_> {
Some(shorteventid) => utils::u64_from_bytes(&shorteventid) Some(shorteventid) => utils::u64_from_bytes(&shorteventid)
.map_err(|_| Error::bad_database("Invalid shorteventid in db."))?, .map_err(|_| Error::bad_database("Invalid shorteventid in db."))?,
None => { None => {
let shorteventid = globals.next_count()?; let shorteventid = services().globals.next_count()?;
self.eventid_shorteventid self.eventid_shorteventid
.insert(event_id.as_bytes(), &shorteventid.to_be_bytes())?; .insert(event_id.as_bytes(), &shorteventid.to_be_bytes())?;
self.shorteventid_eventid self.shorteventid_eventid
@ -82,7 +81,6 @@ impl Service<_> {
&self, &self,
event_type: &StateEventType, event_type: &StateEventType,
state_key: &str, state_key: &str,
globals: &super::globals::Globals,
) -> Result<u64> { ) -> Result<u64> {
if let Some(short) = self if let Some(short) = self
.statekeyshort_cache .statekeyshort_cache
@ -101,7 +99,7 @@ impl Service<_> {
Some(shortstatekey) => utils::u64_from_bytes(&shortstatekey) Some(shortstatekey) => utils::u64_from_bytes(&shortstatekey)
.map_err(|_| Error::bad_database("Invalid shortstatekey in db."))?, .map_err(|_| Error::bad_database("Invalid shortstatekey in db."))?,
None => { None => {
let shortstatekey = globals.next_count()?; let shortstatekey = services().globals.next_count()?;
self.statekey_shortstatekey self.statekey_shortstatekey
.insert(&statekey, &shortstatekey.to_be_bytes())?; .insert(&statekey, &shortstatekey.to_be_bytes())?;
self.shortstatekey_statekey self.shortstatekey_statekey
@ -190,7 +188,7 @@ impl Service<_> {
/// Returns (shortstatehash, already_existed) /// Returns (shortstatehash, already_existed)
fn get_or_create_shortstatehash( fn get_or_create_shortstatehash(
&self, &self,
state_hash: &StateHashId, state_hash: &[u8],
) -> Result<(u64, bool)> { ) -> Result<(u64, bool)> {
Ok(match self.statehash_shortstatehash.get(state_hash)? { Ok(match self.statehash_shortstatehash.get(state_hash)? {
Some(shortstatehash) => ( Some(shortstatehash) => (
@ -199,7 +197,7 @@ impl Service<_> {
true, true,
), ),
None => { None => {
let shortstatehash = globals.next_count()?; let shortstatehash = services().globals.next_count()?;
self.statehash_shortstatehash self.statehash_shortstatehash
.insert(state_hash, &shortstatehash.to_be_bytes())?; .insert(state_hash, &shortstatehash.to_be_bytes())?;
(shortstatehash, false) (shortstatehash, false)
@ -220,13 +218,12 @@ impl Service<_> {
pub fn get_or_create_shortroomid( pub fn get_or_create_shortroomid(
&self, &self,
room_id: &RoomId, room_id: &RoomId,
globals: &super::globals::Globals,
) -> Result<u64> { ) -> Result<u64> {
Ok(match self.roomid_shortroomid.get(room_id.as_bytes())? { Ok(match self.roomid_shortroomid.get(room_id.as_bytes())? {
Some(short) => utils::u64_from_bytes(&short) Some(short) => utils::u64_from_bytes(&short)
.map_err(|_| Error::bad_database("Invalid shortroomid in db."))?, .map_err(|_| Error::bad_database("Invalid shortroomid in db."))?,
None => { None => {
let short = globals.next_count()?; let short = services().globals.next_count()?;
self.roomid_shortroomid self.roomid_shortroomid
.insert(room_id.as_bytes(), &short.to_be_bytes())?; .insert(room_id.as_bytes(), &short.to_be_bytes())?;
short short

@ -1,30 +1,28 @@
use std::sync::Arc; use std::sync::Arc;
use std::{sync::MutexGuard, collections::HashSet}; use std::{sync::MutexGuard, collections::HashSet};
use std::fmt::Debug; use std::fmt::Debug;
use crate::Result;
use ruma::{EventId, RoomId}; use ruma::{EventId, RoomId};
pub trait Data { pub trait Data {
/// Returns the last state hash key added to the db for the given room. /// Returns the last state hash key added to the db for the given room.
fn get_room_shortstatehash(room_id: &RoomId); fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>>;
/// Update the current state of the room. /// Update the current state of the room.
fn set_room_state(room_id: &RoomId, new_shortstatehash: u64, fn set_room_state(&self, room_id: &RoomId, new_shortstatehash: u64,
_mutex_lock: &MutexGuard<'_, StateLock>, // Take mutex guard to make sure users get the room state mutex _mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
); ) -> Result<()>;
/// Associates a state with an event. /// Associates a state with an event.
fn set_event_state(shorteventid: u64, shortstatehash: u64) -> Result<()>; fn set_event_state(&self, shorteventid: u64, shortstatehash: u64) -> Result<()>;
/// Returns all events we would send as the prev_events of the next event. /// Returns all events we would send as the prev_events of the next event.
fn get_forward_extremities(room_id: &RoomId) -> Result<HashSet<Arc<EventId>>>; fn get_forward_extremities(&self, room_id: &RoomId) -> Result<HashSet<Arc<EventId>>>;
/// Replace the forward extremities of the room. /// Replace the forward extremities of the room.
fn set_forward_extremities( fn set_forward_extremities<'a>(&self,
room_id: &RoomId, room_id: &RoomId,
event_ids: impl IntoIterator<Item = &'_ EventId> + Debug, event_ids: impl IntoIterator<Item = &'a EventId> + Debug,
_mutex_lock: &MutexGuard<'_, StateLock>, // Take mutex guard to make sure users get the room state mutex _mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
) -> Result<()>; ) -> Result<()>;
} }
pub struct StateLock;

@ -6,13 +6,15 @@ use ruma::{RoomId, events::{room::{member::MembershipState, create::RoomCreateEv
use serde::Deserialize; use serde::Deserialize;
use tracing::warn; use tracing::warn;
use crate::{service::*, SERVICE, PduEvent, Error, utils::calculate_hash}; use crate::{Result, services, PduEvent, Error, utils::calculate_hash};
use super::state_compressor::CompressedStateEvent;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
/// Set the room to the given statehash and update caches. /// Set the room to the given statehash and update caches.
pub fn force_state( pub fn force_state(
&self, &self,
@ -23,11 +25,11 @@ impl Service<_> {
) -> Result<()> { ) -> Result<()> {
for event_id in statediffnew.into_iter().filter_map(|new| { for event_id in statediffnew.into_iter().filter_map(|new| {
SERVICE.rooms.state_compressor.parse_compressed_state_event(new) services().rooms.state_compressor.parse_compressed_state_event(new)
.ok() .ok()
.map(|(_, id)| id) .map(|(_, id)| id)
}) { }) {
let pdu = match SERVICE.rooms.timeline.get_pdu_json(&event_id)? { let pdu = match services().rooms.timeline.get_pdu_json(&event_id)? {
Some(pdu) => pdu, Some(pdu) => pdu,
None => continue, None => continue,
}; };
@ -63,10 +65,10 @@ impl Service<_> {
Err(_) => continue, Err(_) => continue,
}; };
SERVICE.room.state_cache.update_membership(room_id, &user_id, membership, &pdu.sender, None, false)?; services().room.state_cache.update_membership(room_id, &user_id, membership, &pdu.sender, None, false)?;
} }
SERVICE.room.state_cache.update_joined_count(room_id)?; services().room.state_cache.update_joined_count(room_id)?;
self.db.set_room_state(room_id, shortstatehash); self.db.set_room_state(room_id, shortstatehash);
@ -84,7 +86,7 @@ impl Service<_> {
room_id: &RoomId, room_id: &RoomId,
state_ids_compressed: HashSet<CompressedStateEvent>, state_ids_compressed: HashSet<CompressedStateEvent>,
) -> Result<()> { ) -> Result<()> {
let shorteventid = SERVICE.short.get_or_create_shorteventid(event_id)?; let shorteventid = services().short.get_or_create_shorteventid(event_id)?;
let previous_shortstatehash = self.db.get_room_shortstatehash(room_id)?; let previous_shortstatehash = self.db.get_room_shortstatehash(room_id)?;
@ -96,11 +98,11 @@ impl Service<_> {
); );
let (shortstatehash, already_existed) = let (shortstatehash, already_existed) =
SERVICE.short.get_or_create_shortstatehash(&state_hash)?; services().short.get_or_create_shortstatehash(&state_hash)?;
if !already_existed { if !already_existed {
let states_parents = previous_shortstatehash let states_parents = previous_shortstatehash
.map_or_else(|| Ok(Vec::new()), |p| SERVICE.room.state_compressor.load_shortstatehash_info(p))?; .map_or_else(|| Ok(Vec::new()), |p| services().room.state_compressor.load_shortstatehash_info(p))?;
let (statediffnew, statediffremoved) = let (statediffnew, statediffremoved) =
if let Some(parent_stateinfo) = states_parents.last() { if let Some(parent_stateinfo) = states_parents.last() {
@ -119,7 +121,7 @@ impl Service<_> {
} else { } else {
(state_ids_compressed, HashSet::new()) (state_ids_compressed, HashSet::new())
}; };
SERVICE.room.state_compressor.save_state_from_diff( services().room.state_compressor.save_state_from_diff(
shortstatehash, shortstatehash,
statediffnew, statediffnew,
statediffremoved, statediffremoved,
@ -176,7 +178,7 @@ impl Service<_> {
} }
// TODO: statehash with deterministic inputs // TODO: statehash with deterministic inputs
let shortstatehash = SERVICE.globals.next_count()?; let shortstatehash = services().globals.next_count()?;
let mut statediffnew = HashSet::new(); let mut statediffnew = HashSet::new();
statediffnew.insert(new); statediffnew.insert(new);
@ -273,4 +275,8 @@ impl Service<_> {
.ok_or_else(|| Error::BadDatabase("Invalid room version"))?; .ok_or_else(|| Error::BadDatabase("Invalid room version"))?;
Ok(room_version) Ok(room_version)
} }
pub fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>> {
self.db.get_room_shortstatehash(room_id)
}
} }

@ -1,9 +1,11 @@
use std::{sync::Arc, collections::HashMap}; use std::{sync::Arc, collections::{HashMap, BTreeMap}};
use async_trait::async_trait;
use ruma::{EventId, events::StateEventType, RoomId}; use ruma::{EventId, events::StateEventType, RoomId};
use crate::PduEvent; use crate::{Result, PduEvent};
#[async_trait]
pub trait Data { pub trait Data {
/// Builds a StateMap by iterating over all keys that start /// Builds a StateMap by iterating over all keys that start
/// with state_hash, this gives the full state for the given state_hash. /// with state_hash, this gives the full state for the given state_hash.

@ -4,13 +4,13 @@ use std::{sync::Arc, collections::{HashMap, BTreeMap}};
pub use data::Data; pub use data::Data;
use ruma::{events::StateEventType, RoomId, EventId}; use ruma::{events::StateEventType, RoomId, EventId};
use crate::{service::*, PduEvent}; use crate::{Result, PduEvent};
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
/// Builds a StateMap by iterating over all keys that start /// Builds a StateMap by iterating over all keys that start
/// with state_hash, this gives the full state for the given state_hash. /// with state_hash, this gives the full state for the given state_hash.
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]

@ -1,5 +1,9 @@
use ruma::{UserId, RoomId}; use ruma::{UserId, RoomId, serde::Raw, events::AnyStrippedStateEvent};
use crate::Result;
pub trait Data { pub trait Data {
fn mark_as_once_joined(user_id: &UserId, room_id: &RoomId) -> Result<()>; fn mark_as_once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
fn mark_as_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
fn mark_as_invited(&self, user_id: &UserId, room_id: &RoomId, last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>) -> Result<()>;
fn mark_as_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
} }

@ -5,13 +5,13 @@ pub use data::Data;
use regex::Regex; use regex::Regex;
use ruma::{RoomId, UserId, events::{room::{member::MembershipState, create::RoomCreateEventContent}, AnyStrippedStateEvent, StateEventType, tag::TagEvent, RoomAccountDataEventType, GlobalAccountDataEventType, direct::DirectEvent, ignored_user_list::IgnoredUserListEvent, AnySyncStateEvent}, serde::Raw, ServerName}; use ruma::{RoomId, UserId, events::{room::{member::MembershipState, create::RoomCreateEventContent}, AnyStrippedStateEvent, StateEventType, tag::TagEvent, RoomAccountDataEventType, GlobalAccountDataEventType, direct::DirectEvent, ignored_user_list::IgnoredUserListEvent, AnySyncStateEvent}, serde::Raw, ServerName};
use crate::{service::*, SERVICE, utils, Error}; use crate::{Result, services, utils, Error};
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
/// Update current membership data. /// Update current membership data.
#[tracing::instrument(skip(self, last_state))] #[tracing::instrument(skip(self, last_state))]
pub fn update_membership( pub fn update_membership(
@ -24,8 +24,8 @@ impl Service<_> {
update_joined_count: bool, update_joined_count: bool,
) -> Result<()> { ) -> Result<()> {
// Keep track what remote users exist by adding them as "deactivated" users // Keep track what remote users exist by adding them as "deactivated" users
if user_id.server_name() != SERVICE.globals.server_name() { if user_id.server_name() != services().globals.server_name() {
SERVICE.users.create(user_id, None)?; services().users.create(user_id, None)?;
// TODO: displayname, avatar url // TODO: displayname, avatar url
} }
@ -37,10 +37,6 @@ impl Service<_> {
serverroom_id.push(0xff); serverroom_id.push(0xff);
serverroom_id.extend_from_slice(room_id.as_bytes()); serverroom_id.extend_from_slice(room_id.as_bytes());
let mut roomuser_id = room_id.as_bytes().to_vec();
roomuser_id.push(0xff);
roomuser_id.extend_from_slice(user_id.as_bytes());
match &membership { match &membership {
MembershipState::Join => { MembershipState::Join => {
// Check if the user never joined this room // Check if the user never joined this room
@ -80,24 +76,23 @@ impl Service<_> {
// .ok(); // .ok();
// Copy old tags to new room // Copy old tags to new room
if let Some(tag_event) = db.account_data.get::<TagEvent>( if let Some(tag_event) = services().account_data.get::<TagEvent>(
Some(&predecessor.room_id), Some(&predecessor.room_id),
user_id, user_id,
RoomAccountDataEventType::Tag, RoomAccountDataEventType::Tag,
)? { )? {
SERVICE.account_data services().account_data
.update( .update(
Some(room_id), Some(room_id),
user_id, user_id,
RoomAccountDataEventType::Tag, RoomAccountDataEventType::Tag,
&tag_event, &tag_event,
&db.globals,
) )
.ok(); .ok();
}; };
// Copy direct chat flag // Copy direct chat flag
if let Some(mut direct_event) = SERVICE.account_data.get::<DirectEvent>( if let Some(mut direct_event) = services().account_data.get::<DirectEvent>(
None, None,
user_id, user_id,
GlobalAccountDataEventType::Direct.to_string().into(), GlobalAccountDataEventType::Direct.to_string().into(),
@ -112,7 +107,7 @@ impl Service<_> {
} }
if room_ids_updated { if room_ids_updated {
SERVICE.account_data.update( services().account_data.update(
None, None,
user_id, user_id,
GlobalAccountDataEventType::Direct.to_string().into(), GlobalAccountDataEventType::Direct.to_string().into(),
@ -123,16 +118,11 @@ impl Service<_> {
} }
} }
self.userroomid_joined.insert(&userroom_id, &[])?; self.db.mark_as_joined(user_id, room_id)?;
self.roomuserid_joined.insert(&roomuser_id, &[])?;
self.userroomid_invitestate.remove(&userroom_id)?;
self.roomuserid_invitecount.remove(&roomuser_id)?;
self.userroomid_leftstate.remove(&userroom_id)?;
self.roomuserid_leftcount.remove(&roomuser_id)?;
} }
MembershipState::Invite => { MembershipState::Invite => {
// We want to know if the sender is ignored by the receiver // We want to know if the sender is ignored by the receiver
let is_ignored = SERVICE let is_ignored = services()
.account_data .account_data
.get::<IgnoredUserListEvent>( .get::<IgnoredUserListEvent>(
None, // Ignored users are in global account data None, // Ignored users are in global account data
@ -153,41 +143,22 @@ impl Service<_> {
return Ok(()); return Ok(());
} }
self.userroomid_invitestate.insert( self.db.mark_as_invited(user_id, room_id, last_state)?;
&userroom_id,
&serde_json::to_vec(&last_state.unwrap_or_default())
.expect("state to bytes always works"),
)?;
self.roomuserid_invitecount
.insert(&roomuser_id, &db.globals.next_count()?.to_be_bytes())?;
self.userroomid_joined.remove(&userroom_id)?;
self.roomuserid_joined.remove(&roomuser_id)?;
self.userroomid_leftstate.remove(&userroom_id)?;
self.roomuserid_leftcount.remove(&roomuser_id)?;
} }
MembershipState::Leave | MembershipState::Ban => { MembershipState::Leave | MembershipState::Ban => {
self.userroomid_leftstate.insert( self.db.mark_as_left(user_id, room_id)?;
&userroom_id,
&serde_json::to_vec(&Vec::<Raw<AnySyncStateEvent>>::new()).unwrap(),
)?; // TODO
self.roomuserid_leftcount
.insert(&roomuser_id, &db.globals.next_count()?.to_be_bytes())?;
self.userroomid_joined.remove(&userroom_id)?;
self.roomuserid_joined.remove(&roomuser_id)?;
self.userroomid_invitestate.remove(&userroom_id)?;
self.roomuserid_invitecount.remove(&roomuser_id)?;
} }
_ => {} _ => {}
} }
if update_joined_count { if update_joined_count {
self.update_joined_count(room_id, db)?; self.update_joined_count(room_id)?;
} }
Ok(()) Ok(())
} }
#[tracing::instrument(skip(self, room_id, db))] #[tracing::instrument(skip(self, room_id))]
pub fn update_joined_count(&self, room_id: &RoomId) -> Result<()> { pub fn update_joined_count(&self, room_id: &RoomId) -> Result<()> {
let mut joinedcount = 0_u64; let mut joinedcount = 0_u64;
let mut invitedcount = 0_u64; let mut invitedcount = 0_u64;
@ -196,8 +167,8 @@ impl Service<_> {
for joined in self.room_members(room_id).filter_map(|r| r.ok()) { for joined in self.room_members(room_id).filter_map(|r| r.ok()) {
joined_servers.insert(joined.server_name().to_owned()); joined_servers.insert(joined.server_name().to_owned());
if joined.server_name() == db.globals.server_name() if joined.server_name() == services().globals.server_name()
&& !db.users.is_deactivated(&joined).unwrap_or(true) && !services().users.is_deactivated(&joined).unwrap_or(true)
{ {
real_users.insert(joined); real_users.insert(joined);
} }
@ -285,7 +256,7 @@ impl Service<_> {
.get("sender_localpart") .get("sender_localpart")
.and_then(|string| string.as_str()) .and_then(|string| string.as_str())
.and_then(|string| { .and_then(|string| {
UserId::parse_with_server_name(string, SERVICE.globals.server_name()).ok() UserId::parse_with_server_name(string, services().globals.server_name()).ok()
}); });
let in_room = bridge_user_id let in_room = bridge_user_id

@ -1,4 +1,5 @@
use crate::service::rooms::CompressedStateEvent; use super::CompressedStateEvent;
use crate::Result;
pub struct StateDiff { pub struct StateDiff {
parent: Option<u64>, parent: Option<u64>,
@ -7,6 +8,6 @@ pub struct StateDiff {
} }
pub trait Data { pub trait Data {
fn get_statediff(shortstatehash: u64) -> Result<StateDiff>; fn get_statediff(&self, shortstatehash: u64) -> Result<StateDiff>;
fn save_statediff(shortstatehash: u64, diff: StateDiff) -> Result<()>; fn save_statediff(&self, shortstatehash: u64, diff: StateDiff) -> Result<()>;
} }

@ -4,7 +4,7 @@ use std::{mem::size_of, sync::Arc, collections::HashSet};
pub use data::Data; pub use data::Data;
use ruma::{EventId, RoomId}; use ruma::{EventId, RoomId};
use crate::{service::*, utils}; use crate::{Result, utils, services};
use self::data::StateDiff; use self::data::StateDiff;
@ -12,7 +12,9 @@ pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { pub type CompressedStateEvent = [u8; 2 * size_of::<u64>()];
impl<D: Data> Service<D> {
/// Returns a stack with info on shortstatehash, full state, added diff and removed diff for the selected shortstatehash and each parent layer. /// Returns a stack with info on shortstatehash, full state, added diff and removed diff for the selected shortstatehash and each parent layer.
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]
pub fn load_shortstatehash_info( pub fn load_shortstatehash_info(
@ -62,12 +64,11 @@ impl Service<_> {
&self, &self,
shortstatekey: u64, shortstatekey: u64,
event_id: &EventId, event_id: &EventId,
globals: &super::globals::Globals,
) -> Result<CompressedStateEvent> { ) -> Result<CompressedStateEvent> {
let mut v = shortstatekey.to_be_bytes().to_vec(); let mut v = shortstatekey.to_be_bytes().to_vec();
v.extend_from_slice( v.extend_from_slice(
&self &self
.get_or_create_shorteventid(event_id, globals)? .get_or_create_shorteventid(event_id)?
.to_be_bytes(), .to_be_bytes(),
); );
Ok(v.try_into().expect("we checked the size above")) Ok(v.try_into().expect("we checked the size above"))
@ -210,15 +211,16 @@ impl Service<_> {
/// Returns the new shortstatehash /// Returns the new shortstatehash
pub fn save_state( pub fn save_state(
&self,
room_id: &RoomId, room_id: &RoomId,
new_state_ids_compressed: HashSet<CompressedStateEvent>, new_state_ids_compressed: HashSet<CompressedStateEvent>,
) -> Result<(u64, ) -> Result<(u64,
HashSet<CompressedStateEvent>, // added HashSet<CompressedStateEvent>, // added
HashSet<CompressedStateEvent>)> // removed HashSet<CompressedStateEvent>)> // removed
{ {
let previous_shortstatehash = self.d.current_shortstatehash(room_id)?; let previous_shortstatehash = self.db.current_shortstatehash(room_id)?;
let state_hash = self.calculate_hash( let state_hash = utils::calculate_hash(
&new_state_ids_compressed &new_state_ids_compressed
.iter() .iter()
.map(|bytes| &bytes[..]) .map(|bytes| &bytes[..])
@ -226,7 +228,7 @@ impl Service<_> {
); );
let (new_shortstatehash, already_existed) = let (new_shortstatehash, already_existed) =
self.get_or_create_shortstatehash(&state_hash, &db.globals)?; services().rooms.short.get_or_create_shortstatehash(&state_hash)?;
if Some(new_shortstatehash) == previous_shortstatehash { if Some(new_shortstatehash) == previous_shortstatehash {
return Ok(()); return Ok(());

@ -2,7 +2,7 @@ use std::sync::Arc;
use ruma::{signatures::CanonicalJsonObject, EventId, UserId, RoomId}; use ruma::{signatures::CanonicalJsonObject, EventId, UserId, RoomId};
use crate::PduEvent; use crate::{Result, PduEvent};
pub trait Data { pub trait Data {
fn last_timeline_count(&self, sender_user: &UserId, room_id: &RoomId) -> Result<u64>; fn last_timeline_count(&self, sender_user: &UserId, room_id: &RoomId) -> Result<u64>;
@ -48,28 +48,26 @@ pub trait Data {
/// Returns an iterator over all events in a room that happened after the event with id `since` /// Returns an iterator over all events in a room that happened after the event with id `since`
/// in chronological order. /// in chronological order.
#[tracing::instrument(skip(self))]
fn pdus_since<'a>( fn pdus_since<'a>(
&'a self, &'a self,
user_id: &UserId, user_id: &UserId,
room_id: &RoomId, room_id: &RoomId,
since: u64, since: u64,
) -> Result<impl Iterator<Item = Result<(Vec<u8>, PduEvent)>> + 'a>; ) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>>;
/// Returns an iterator over all events and their tokens in a room that happened before the /// Returns an iterator over all events and their tokens in a room that happened before the
/// event with id `until` in reverse-chronological order. /// event with id `until` in reverse-chronological order.
#[tracing::instrument(skip(self))]
fn pdus_until<'a>( fn pdus_until<'a>(
&'a self, &'a self,
user_id: &UserId, user_id: &UserId,
room_id: &RoomId, room_id: &RoomId,
until: u64, until: u64,
) -> Result<impl Iterator<Item = Result<(Vec<u8>, PduEvent)>> + 'a>; ) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>>;
fn pdus_after<'a>( fn pdus_after<'a>(
&'a self, &'a self,
user_id: &UserId, user_id: &UserId,
room_id: &RoomId, room_id: &RoomId,
from: u64, from: u64,
) -> Result<impl Iterator<Item = Result<(Vec<u8>, PduEvent)>> + 'a>; ) -> Result<Box<dyn Iterator<Item = Result<(Vec<u8>, PduEvent)>>>>;
} }

@ -1,23 +1,29 @@
mod data; mod data;
use std::borrow::Cow;
use std::sync::Arc;
use std::{sync::MutexGuard, iter, collections::HashSet}; use std::{sync::MutexGuard, iter, collections::HashSet};
use std::fmt::Debug; use std::fmt::Debug;
pub use data::Data; pub use data::Data;
use regex::Regex; use regex::Regex;
use ruma::events::room::power_levels::RoomPowerLevelsEventContent;
use ruma::push::Ruleset;
use ruma::signatures::CanonicalJsonValue; use ruma::signatures::CanonicalJsonValue;
use ruma::state_res::RoomVersion;
use ruma::{EventId, signatures::CanonicalJsonObject, push::{Action, Tweak}, events::{push_rules::PushRulesEvent, GlobalAccountDataEventType, RoomEventType, room::{member::MembershipState, create::RoomCreateEventContent}, StateEventType}, UserId, RoomAliasId, RoomId, uint, state_res, api::client::error::ErrorKind, serde::to_canonical_value, ServerName}; use ruma::{EventId, signatures::CanonicalJsonObject, push::{Action, Tweak}, events::{push_rules::PushRulesEvent, GlobalAccountDataEventType, RoomEventType, room::{member::MembershipState, create::RoomCreateEventContent}, StateEventType}, UserId, RoomAliasId, RoomId, uint, state_res, api::client::error::ErrorKind, serde::to_canonical_value, ServerName};
use serde::Deserialize; use serde::Deserialize;
use serde_json::value::to_raw_value; use serde_json::value::to_raw_value;
use tracing::{warn, error}; use tracing::{warn, error};
use crate::SERVICE; use crate::{services, Result, service::pdu::{PduBuilder, EventHash}, Error, PduEvent, utils};
use crate::{service::{*, pdu::{PduBuilder, EventHash}}, Error, PduEvent, utils};
use super::state_compressor::CompressedStateEvent;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
/* /*
/// Checks if a room exists. /// Checks if a room exists.
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]
@ -44,7 +50,7 @@ impl Service<_> {
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]
pub fn last_timeline_count(&self, sender_user: &UserId, room_id: &RoomId) -> Result<u64> { pub fn last_timeline_count(&self, sender_user: &UserId, room_id: &RoomId) -> Result<u64> {
self.db.last_timeline_count(sender_user: &UserId, room_id: &RoomId) self.db.last_timeline_count(sender_user, room_id)
} }
// TODO Is this the same as the function above? // TODO Is this the same as the function above?
@ -127,7 +133,7 @@ impl Service<_> {
/// Removes a pdu and creates a new one with the same id. /// Removes a pdu and creates a new one with the same id.
#[tracing::instrument(skip(self))] #[tracing::instrument(skip(self))]
fn replace_pdu(&self, pdu_id: &[u8], pdu: &PduEvent) -> Result<()> { fn replace_pdu(&self, pdu_id: &[u8], pdu: &PduEvent) -> Result<()> {
self.db.pdu_count(pdu_id, pdu: &PduEvent) self.db.replace_pdu(pdu_id, pdu)
} }
/// Creates a new persisted data unit and adds it to a room. /// Creates a new persisted data unit and adds it to a room.
@ -177,7 +183,7 @@ impl Service<_> {
self.replace_pdu_leaves(&pdu.room_id, leaves)?; self.replace_pdu_leaves(&pdu.room_id, leaves)?;
let mutex_insert = Arc::clone( let mutex_insert = Arc::clone(
db.globals services().globals
.roomid_mutex_insert .roomid_mutex_insert
.write() .write()
.unwrap() .unwrap()
@ -186,14 +192,14 @@ impl Service<_> {
); );
let insert_lock = mutex_insert.lock().unwrap(); let insert_lock = mutex_insert.lock().unwrap();
let count1 = db.globals.next_count()?; let count1 = services().globals.next_count()?;
// Mark as read first so the sending client doesn't get a notification even if appending // Mark as read first so the sending client doesn't get a notification even if appending
// fails // fails
self.edus self.edus
.private_read_set(&pdu.room_id, &pdu.sender, count1, &db.globals)?; .private_read_set(&pdu.room_id, &pdu.sender, count1)?;
self.reset_notification_counts(&pdu.sender, &pdu.room_id)?; self.reset_notification_counts(&pdu.sender, &pdu.room_id)?;
let count2 = db.globals.next_count()?; let count2 = services().globals.next_count()?;
let mut pdu_id = shortroomid.to_be_bytes().to_vec(); let mut pdu_id = shortroomid.to_be_bytes().to_vec();
pdu_id.extend_from_slice(&count2.to_be_bytes()); pdu_id.extend_from_slice(&count2.to_be_bytes());
@ -218,7 +224,7 @@ impl Service<_> {
drop(insert_lock); drop(insert_lock);
// See if the event matches any known pushers // See if the event matches any known pushers
let power_levels: RoomPowerLevelsEventContent = db let power_levels: RoomPowerLevelsEventContent = services()
.rooms .rooms
.room_state_get(&pdu.room_id, &StateEventType::RoomPowerLevels, "")? .room_state_get(&pdu.room_id, &StateEventType::RoomPowerLevels, "")?
.map(|ev| { .map(|ev| {
@ -233,13 +239,13 @@ impl Service<_> {
let mut notifies = Vec::new(); let mut notifies = Vec::new();
let mut highlights = Vec::new(); let mut highlights = Vec::new();
for user in self.get_our_real_users(&pdu.room_id, db)?.iter() { for user in self.get_our_real_users(&pdu.room_id)?.iter() {
// Don't notify the user of their own events // Don't notify the user of their own events
if user == &pdu.sender { if user == &pdu.sender {
continue; continue;
} }
let rules_for_user = db let rules_for_user = services()
.account_data .account_data
.get( .get(
None, None,
@ -252,7 +258,7 @@ impl Service<_> {
let mut highlight = false; let mut highlight = false;
let mut notify = false; let mut notify = false;
for action in pusher::get_actions( for action in services().pusher.get_actions(
user, user,
&rules_for_user, &rules_for_user,
&power_levels, &power_levels,
@ -282,8 +288,8 @@ impl Service<_> {
highlights.push(userroom_id); highlights.push(userroom_id);
} }
for senderkey in db.pusher.get_pusher_senderkeys(user) { for senderkey in services().pusher.get_pusher_senderkeys(user) {
db.sending.send_push_pdu(&*pdu_id, senderkey)?; services().sending.send_push_pdu(&*pdu_id, senderkey)?;
} }
} }
@ -328,7 +334,6 @@ impl Service<_> {
content.membership, content.membership,
&pdu.sender, &pdu.sender,
invite_state, invite_state,
db,
true, true,
)?; )?;
} }
@ -344,34 +349,34 @@ impl Service<_> {
.map_err(|_| Error::bad_database("Invalid content in pdu."))?; .map_err(|_| Error::bad_database("Invalid content in pdu."))?;
if let Some(body) = content.body { if let Some(body) = content.body {
DB.rooms.search.index_pdu(room_id, pdu_id, body)?; services().rooms.search.index_pdu(shortroomid, pdu_id, body)?;
let admin_room = self.id_from_alias( let admin_room = self.alias.resolve_local_alias(
<&RoomAliasId>::try_from( <&RoomAliasId>::try_from(
format!("#admins:{}", db.globals.server_name()).as_str(), format!("#admins:{}", services().globals.server_name()).as_str(),
) )
.expect("#admins:server_name is a valid room alias"), .expect("#admins:server_name is a valid room alias"),
)?; )?;
let server_user = format!("@conduit:{}", db.globals.server_name()); let server_user = format!("@conduit:{}", services().globals.server_name());
let to_conduit = body.starts_with(&format!("{}: ", server_user)); let to_conduit = body.starts_with(&format!("{}: ", server_user));
// This will evaluate to false if the emergency password is set up so that // This will evaluate to false if the emergency password is set up so that
// the administrator can execute commands as conduit // the administrator can execute commands as conduit
let from_conduit = let from_conduit =
pdu.sender == server_user && db.globals.emergency_password().is_none(); pdu.sender == server_user && services().globals.emergency_password().is_none();
if to_conduit && !from_conduit && admin_room.as_ref() == Some(&pdu.room_id) { if to_conduit && !from_conduit && admin_room.as_ref() == Some(&pdu.room_id) {
db.admin.process_message(body.to_string()); services().admin.process_message(body.to_string());
} }
} }
} }
_ => {} _ => {}
} }
for appservice in db.appservice.all()? { for appservice in services().appservice.all()? {
if self.appservice_in_room(room_id, &appservice, db)? { if self.appservice_in_room(&pdu.room_id, &appservice)? {
db.sending.send_pdu_appservice(&appservice.0, &pdu_id)?; services().sending.send_pdu_appservice(&appservice.0, &pdu_id)?;
continue; continue;
} }
@ -388,11 +393,11 @@ impl Service<_> {
.get("sender_localpart") .get("sender_localpart")
.and_then(|string| string.as_str()) .and_then(|string| string.as_str())
.and_then(|string| { .and_then(|string| {
UserId::parse_with_server_name(string, db.globals.server_name()).ok() UserId::parse_with_server_name(string, services().globals.server_name()).ok()
}) })
{ {
if state_key_uid == &appservice_uid { if state_key_uid == &appservice_uid {
db.sending.send_pdu_appservice(&appservice.0, &pdu_id)?; services().sending.send_pdu_appservice(&appservice.0, &pdu_id)?;
continue; continue;
} }
} }
@ -431,16 +436,16 @@ impl Service<_> {
.map_or(false, |state_key| users.is_match(state_key)) .map_or(false, |state_key| users.is_match(state_key))
}; };
let matching_aliases = |aliases: &Regex| { let matching_aliases = |aliases: &Regex| {
self.room_aliases(room_id) self.room_aliases(&pdu.room_id)
.filter_map(|r| r.ok()) .filter_map(|r| r.ok())
.any(|room_alias| aliases.is_match(room_alias.as_str())) .any(|room_alias| aliases.is_match(room_alias.as_str()))
}; };
if aliases.iter().any(matching_aliases) if aliases.iter().any(matching_aliases)
|| rooms.map_or(false, |rooms| rooms.contains(&room_id.as_str().into())) || rooms.map_or(false, |rooms| rooms.contains(&pdu.room_id.as_str().into()))
|| users.iter().any(matching_users) || users.iter().any(matching_users)
{ {
db.sending.send_pdu_appservice(&appservice.0, &pdu_id)?; services().sending.send_pdu_appservice(&appservice.0, &pdu_id)?;
} }
} }
} }
@ -464,14 +469,14 @@ impl Service<_> {
redacts, redacts,
} = pdu_builder; } = pdu_builder;
let prev_events: Vec<_> = SERVICE let prev_events: Vec<_> = services()
.rooms .rooms
.get_pdu_leaves(room_id)? .get_pdu_leaves(room_id)?
.into_iter() .into_iter()
.take(20) .take(20)
.collect(); .collect();
let create_event = SERVICE let create_event = services()
.rooms .rooms
.room_state_get(room_id, &StateEventType::RoomCreate, "")?; .room_state_get(room_id, &StateEventType::RoomCreate, "")?;
@ -488,7 +493,7 @@ impl Service<_> {
// If there was no create event yet, assume we are creating a room with the default // If there was no create event yet, assume we are creating a room with the default
// version right now // version right now
let room_version_id = create_event_content let room_version_id = create_event_content
.map_or(SERVICE.globals.default_room_version(), |create_event| { .map_or(services().globals.default_room_version(), |create_event| {
create_event.room_version create_event.room_version
}); });
let room_version = let room_version =
@ -500,7 +505,7 @@ impl Service<_> {
// Our depth is the maximum depth of prev_events + 1 // Our depth is the maximum depth of prev_events + 1
let depth = prev_events let depth = prev_events
.iter() .iter()
.filter_map(|event_id| Some(db.rooms.get_pdu(event_id).ok()??.depth)) .filter_map(|event_id| Some(services().rooms.get_pdu(event_id).ok()??.depth))
.max() .max()
.unwrap_or_else(|| uint!(0)) .unwrap_or_else(|| uint!(0))
+ uint!(1); + uint!(1);
@ -525,7 +530,7 @@ impl Service<_> {
let pdu = PduEvent { let pdu = PduEvent {
event_id: ruma::event_id!("$thiswillbefilledinlater").into(), event_id: ruma::event_id!("$thiswillbefilledinlater").into(),
room_id: room_id.to_owned(), room_id: room_id.to_owned(),
sender: sender_user.to_owned(), sender: sender.to_owned(),
origin_server_ts: utils::millis_since_unix_epoch() origin_server_ts: utils::millis_since_unix_epoch()
.try_into() .try_into()
.expect("time is valid"), .expect("time is valid"),
@ -577,13 +582,13 @@ impl Service<_> {
// Add origin because synapse likes that (and it's required in the spec) // Add origin because synapse likes that (and it's required in the spec)
pdu_json.insert( pdu_json.insert(
"origin".to_owned(), "origin".to_owned(),
to_canonical_value(db.globals.server_name()) to_canonical_value(services().globals.server_name())
.expect("server name is a valid CanonicalJsonValue"), .expect("server name is a valid CanonicalJsonValue"),
); );
match ruma::signatures::hash_and_sign_event( match ruma::signatures::hash_and_sign_event(
SERVICE.globals.server_name().as_str(), services().globals.server_name().as_str(),
SERVICE.globals.keypair(), services().globals.keypair(),
&mut pdu_json, &mut pdu_json,
&room_version_id, &room_version_id,
) { ) {
@ -616,22 +621,20 @@ impl Service<_> {
); );
// Generate short event id // Generate short event id
let _shorteventid = self.get_or_create_shorteventid(&pdu.event_id, &db.globals)?; let _shorteventid = self.get_or_create_shorteventid(&pdu.event_id)?;
} }
/// Creates a new persisted data unit and adds it to a room. This function takes a /// Creates a new persisted data unit and adds it to a room. This function takes a
/// roomid_mutex_state, meaning that only this function is able to mutate the room state. /// roomid_mutex_state, meaning that only this function is able to mutate the room state.
#[tracing::instrument(skip(self, _mutex_lock))] #[tracing::instrument(skip(self, state_lock))]
pub fn build_and_append_pdu( pub fn build_and_append_pdu(
&self, &self,
pdu_builder: PduBuilder, pdu_builder: PduBuilder,
sender: &UserId, sender: &UserId,
room_id: &RoomId, room_id: &RoomId,
_mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex state_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
) -> Result<Arc<EventId>> { ) -> Result<Arc<EventId>> {
let (pdu, pdu_json) = self.create_hash_and_sign_event(pdu_builder, sender, room_id, &state_lock);
let (pdu, pdu_json) = self.create_hash_and_sign_event()?;
// We append to state before appending the pdu, so we don't have a moment in time with the // We append to state before appending the pdu, so we don't have a moment in time with the
// pdu without it's state. This is okay because append_pdu can't fail. // pdu without it's state. This is okay because append_pdu can't fail.
@ -664,9 +667,9 @@ impl Service<_> {
} }
// Remove our server from the server list since it will be added to it by room_servers() and/or the if statement above // Remove our server from the server list since it will be added to it by room_servers() and/or the if statement above
servers.remove(SERVICE.globals.server_name()); servers.remove(services().globals.server_name());
SERVICE.sending.send_pdu(servers.into_iter(), &pdu_id)?; services().sending.send_pdu(servers.into_iter(), &pdu_id)?;
Ok(pdu.event_id) Ok(pdu.event_id)
} }
@ -684,20 +687,20 @@ impl Service<_> {
) -> Result<Option<Vec<u8>>> { ) -> Result<Option<Vec<u8>>> {
// We append to state before appending the pdu, so we don't have a moment in time with the // We append to state before appending the pdu, so we don't have a moment in time with the
// pdu without it's state. This is okay because append_pdu can't fail. // pdu without it's state. This is okay because append_pdu can't fail.
SERVICE.rooms.set_event_state( services().rooms.set_event_state(
&pdu.event_id, &pdu.event_id,
&pdu.room_id, &pdu.room_id,
state_ids_compressed, state_ids_compressed,
)?; )?;
if soft_fail { if soft_fail {
SERVICE.rooms services().rooms
.mark_as_referenced(&pdu.room_id, &pdu.prev_events)?; .mark_as_referenced(&pdu.room_id, &pdu.prev_events)?;
SERVICE.rooms.replace_pdu_leaves(&pdu.room_id, new_room_leaves)?; services().rooms.replace_pdu_leaves(&pdu.room_id, new_room_leaves)?;
return Ok(None); return Ok(None);
} }
let pdu_id = SERVICE.rooms.append_pdu(pdu, pdu_json, new_room_leaves)?; let pdu_id = services().rooms.append_pdu(pdu, pdu_json, new_room_leaves)?;
Ok(Some(pdu_id)) Ok(Some(pdu_id))
} }

@ -1,3 +1,6 @@
use ruma::{UserId, RoomId};
use crate::Result;
pub trait Data { pub trait Data {
fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>; fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
@ -17,5 +20,5 @@ pub trait Data {
fn get_shared_rooms<'a>( fn get_shared_rooms<'a>(
&'a self, &'a self,
users: Vec<Box<UserId>>, users: Vec<Box<UserId>>,
) -> Result<impl Iterator<Item = Result<Box<RoomId>>> + 'a>; ) -> Result<Box<dyn Iterator<Item = Result<Box<RoomId>>>>>;
} }

@ -2,13 +2,13 @@ mod data;
pub use data::Data; pub use data::Data;
use ruma::{RoomId, UserId}; use ruma::{RoomId, UserId};
use crate::service::*; use crate::Result;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
pub fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> { pub fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
self.db.reset_notification_counts(user_id, room_id) self.db.reset_notification_counts(user_id, room_id)
} }
@ -27,7 +27,7 @@ impl Service<_> {
token: u64, token: u64,
shortstatehash: u64, shortstatehash: u64,
) -> Result<()> { ) -> Result<()> {
self.db.associate_token_shortstatehash(user_id, room_id) self.db.associate_token_shortstatehash(room_id, token, shortstatehash)
} }
pub fn get_token_shortstatehash(&self, room_id: &RoomId, token: u64) -> Result<Option<u64>> { pub fn get_token_shortstatehash(&self, room_id: &RoomId, token: u64) -> Result<Option<u64>> {

@ -6,7 +6,7 @@ use std::{
}; };
use crate::{ use crate::{
appservice_server, database::pusher, server_server, utils, Database, Error, PduEvent, Result, utils, Error, PduEvent, Result, services, api::{server_server, appservice_server},
}; };
use federation::transactions::send_transaction_message; use federation::transactions::send_transaction_message;
use futures_util::{stream::FuturesUnordered, StreamExt}; use futures_util::{stream::FuturesUnordered, StreamExt};
@ -34,8 +34,6 @@ use tokio::{
}; };
use tracing::{error, warn}; use tracing::{error, warn};
use super::abstraction::Tree;
#[derive(Clone, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum OutgoingKind { pub enum OutgoingKind {
Appservice(String), Appservice(String),
@ -77,11 +75,8 @@ pub enum SendingEventType {
Edu(Vec<u8>), Edu(Vec<u8>),
} }
pub struct Sending { pub struct Service {
/// The state for a given state hash. /// The state for a given state hash.
pub(super) servername_educount: Arc<dyn Tree>, // EduCount: Count of last EDU sync
pub(super) servernameevent_data: Arc<dyn Tree>, // ServernameEvent = (+ / $)SenderKey / ServerName / UserId + PduId / Id (for edus), Data = EDU content
pub(super) servercurrentevent_data: Arc<dyn Tree>, // ServerCurrentEvents = (+ / $)ServerName / UserId + PduId / Id (for edus), Data = EDU content
pub(super) maximum_requests: Arc<Semaphore>, pub(super) maximum_requests: Arc<Semaphore>,
pub sender: mpsc::UnboundedSender<(Vec<u8>, Vec<u8>)>, pub sender: mpsc::UnboundedSender<(Vec<u8>, Vec<u8>)>,
} }
@ -92,10 +87,9 @@ enum TransactionStatus {
Retrying(u32), // number of times failed Retrying(u32), // number of times failed
} }
impl Sending { impl Service {
pub fn start_handler( pub fn start_handler(
&self, &self,
db: Arc<RwLock<Database>>,
mut receiver: mpsc::UnboundedReceiver<(Vec<u8>, Vec<u8>)>, mut receiver: mpsc::UnboundedReceiver<(Vec<u8>, Vec<u8>)>,
) { ) {
tokio::spawn(async move { tokio::spawn(async move {
@ -106,9 +100,7 @@ impl Sending {
// Retry requests we could not finish yet // Retry requests we could not finish yet
let mut initial_transactions = HashMap::<OutgoingKind, Vec<SendingEventType>>::new(); let mut initial_transactions = HashMap::<OutgoingKind, Vec<SendingEventType>>::new();
let guard = db.read().await; for (key, outgoing_kind, event) in services()
for (key, outgoing_kind, event) in guard
.sending .sending
.servercurrentevent_data .servercurrentevent_data
.iter() .iter()
@ -127,22 +119,19 @@ impl Sending {
"Dropping some current events: {:?} {:?} {:?}", "Dropping some current events: {:?} {:?} {:?}",
key, outgoing_kind, event key, outgoing_kind, event
); );
guard.sending.servercurrentevent_data.remove(&key).unwrap(); services().sending.servercurrentevent_data.remove(&key).unwrap();
continue; continue;
} }
entry.push(event); entry.push(event);
} }
drop(guard);
for (outgoing_kind, events) in initial_transactions { for (outgoing_kind, events) in initial_transactions {
current_transaction_status current_transaction_status
.insert(outgoing_kind.get_prefix(), TransactionStatus::Running); .insert(outgoing_kind.get_prefix(), TransactionStatus::Running);
futures.push(Self::handle_events( futures.push(Self::handle_events(
outgoing_kind.clone(), outgoing_kind.clone(),
events, events,
Arc::clone(&db),
)); ));
} }
@ -151,17 +140,15 @@ impl Sending {
Some(response) = futures.next() => { Some(response) = futures.next() => {
match response { match response {
Ok(outgoing_kind) => { Ok(outgoing_kind) => {
let guard = db.read().await;
let prefix = outgoing_kind.get_prefix(); let prefix = outgoing_kind.get_prefix();
for (key, _) in guard.sending.servercurrentevent_data for (key, _) in services().sending.servercurrentevent_data
.scan_prefix(prefix.clone()) .scan_prefix(prefix.clone())
{ {
guard.sending.servercurrentevent_data.remove(&key).unwrap(); services().sending.servercurrentevent_data.remove(&key).unwrap();
} }
// Find events that have been added since starting the last request // Find events that have been added since starting the last request
let new_events: Vec<_> = guard.sending.servernameevent_data let new_events: Vec<_> = services().sending.servernameevent_data
.scan_prefix(prefix.clone()) .scan_prefix(prefix.clone())
.filter_map(|(k, v)| { .filter_map(|(k, v)| {
Self::parse_servercurrentevent(&k, v).ok().map(|ev| (ev, k)) Self::parse_servercurrentevent(&k, v).ok().map(|ev| (ev, k))
@ -175,17 +162,14 @@ impl Sending {
// Insert pdus we found // Insert pdus we found
for (e, key) in &new_events { for (e, key) in &new_events {
let value = if let SendingEventType::Edu(value) = &e.1 { &**value } else { &[] }; let value = if let SendingEventType::Edu(value) = &e.1 { &**value } else { &[] };
guard.sending.servercurrentevent_data.insert(key, value).unwrap(); services().sending.servercurrentevent_data.insert(key, value).unwrap();
guard.sending.servernameevent_data.remove(key).unwrap(); services().sending.servernameevent_data.remove(key).unwrap();
} }
drop(guard);
futures.push( futures.push(
Self::handle_events( Self::handle_events(
outgoing_kind.clone(), outgoing_kind.clone(),
new_events.into_iter().map(|(event, _)| event.1).collect(), new_events.into_iter().map(|(event, _)| event.1).collect(),
Arc::clone(&db),
) )
); );
} else { } else {
@ -206,15 +190,12 @@ impl Sending {
}, },
Some((key, value)) = receiver.recv() => { Some((key, value)) = receiver.recv() => {
if let Ok((outgoing_kind, event)) = Self::parse_servercurrentevent(&key, value) { if let Ok((outgoing_kind, event)) = Self::parse_servercurrentevent(&key, value) {
let guard = db.read().await;
if let Ok(Some(events)) = Self::select_events( if let Ok(Some(events)) = Self::select_events(
&outgoing_kind, &outgoing_kind,
vec![(event, key)], vec![(event, key)],
&mut current_transaction_status, &mut current_transaction_status,
&guard
) { ) {
futures.push(Self::handle_events(outgoing_kind, events, Arc::clone(&db))); futures.push(Self::handle_events(outgoing_kind, events));
} }
} }
} }
@ -223,12 +204,11 @@ impl Sending {
}); });
} }
#[tracing::instrument(skip(outgoing_kind, new_events, current_transaction_status, db))] #[tracing::instrument(skip(outgoing_kind, new_events, current_transaction_status))]
fn select_events( fn select_events(
outgoing_kind: &OutgoingKind, outgoing_kind: &OutgoingKind,
new_events: Vec<(SendingEventType, Vec<u8>)>, // Events we want to send: event and full key new_events: Vec<(SendingEventType, Vec<u8>)>, // Events we want to send: event and full key
current_transaction_status: &mut HashMap<Vec<u8>, TransactionStatus>, current_transaction_status: &mut HashMap<Vec<u8>, TransactionStatus>,
db: &Database,
) -> Result<Option<Vec<SendingEventType>>> { ) -> Result<Option<Vec<SendingEventType>>> {
let mut retry = false; let mut retry = false;
let mut allow = true; let mut allow = true;
@ -266,7 +246,7 @@ impl Sending {
if retry { if retry {
// We retry the previous transaction // We retry the previous transaction
for (key, value) in db.sending.servercurrentevent_data.scan_prefix(prefix) { for (key, value) in services().sending.servercurrentevent_data.scan_prefix(prefix) {
if let Ok((_, e)) = Self::parse_servercurrentevent(&key, value) { if let Ok((_, e)) = Self::parse_servercurrentevent(&key, value) {
events.push(e); events.push(e);
} }
@ -278,22 +258,22 @@ impl Sending {
} else { } else {
&[][..] &[][..]
}; };
db.sending services().sending
.servercurrentevent_data .servercurrentevent_data
.insert(&full_key, value)?; .insert(&full_key, value)?;
// If it was a PDU we have to unqueue it // If it was a PDU we have to unqueue it
// TODO: don't try to unqueue EDUs // TODO: don't try to unqueue EDUs
db.sending.servernameevent_data.remove(&full_key)?; services().sending.servernameevent_data.remove(&full_key)?;
events.push(e); events.push(e);
} }
if let OutgoingKind::Normal(server_name) = outgoing_kind { if let OutgoingKind::Normal(server_name) = outgoing_kind {
if let Ok((select_edus, last_count)) = Self::select_edus(db, server_name) { if let Ok((select_edus, last_count)) = Self::select_edus(server_name) {
events.extend(select_edus.into_iter().map(SendingEventType::Edu)); events.extend(select_edus.into_iter().map(SendingEventType::Edu));
db.sending services().sending
.servername_educount .servername_educount
.insert(server_name.as_bytes(), &last_count.to_be_bytes())?; .insert(server_name.as_bytes(), &last_count.to_be_bytes())?;
} }
@ -303,10 +283,10 @@ impl Sending {
Ok(Some(events)) Ok(Some(events))
} }
#[tracing::instrument(skip(db, server))] #[tracing::instrument(skip(server))]
pub fn select_edus(db: &Database, server: &ServerName) -> Result<(Vec<Vec<u8>>, u64)> { pub fn select_edus(server: &ServerName) -> Result<(Vec<Vec<u8>>, u64)> {
// u64: count of last edu // u64: count of last edu
let since = db let since = services()
.sending .sending
.servername_educount .servername_educount
.get(server.as_bytes())? .get(server.as_bytes())?
@ -318,25 +298,25 @@ impl Sending {
let mut max_edu_count = since; let mut max_edu_count = since;
let mut device_list_changes = HashSet::new(); let mut device_list_changes = HashSet::new();
'outer: for room_id in db.rooms.server_rooms(server) { 'outer: for room_id in services().rooms.server_rooms(server) {
let room_id = room_id?; let room_id = room_id?;
// Look for device list updates in this room // Look for device list updates in this room
device_list_changes.extend( device_list_changes.extend(
db.users services().users
.keys_changed(&room_id.to_string(), since, None) .keys_changed(&room_id.to_string(), since, None)
.filter_map(|r| r.ok()) .filter_map(|r| r.ok())
.filter(|user_id| user_id.server_name() == db.globals.server_name()), .filter(|user_id| user_id.server_name() == services().globals.server_name()),
); );
// Look for read receipts in this room // Look for read receipts in this room
for r in db.rooms.edus.readreceipts_since(&room_id, since) { for r in services().rooms.edus.readreceipts_since(&room_id, since) {
let (user_id, count, read_receipt) = r?; let (user_id, count, read_receipt) = r?;
if count > max_edu_count { if count > max_edu_count {
max_edu_count = count; max_edu_count = count;
} }
if user_id.server_name() != db.globals.server_name() { if user_id.server_name() != services().globals.server_name() {
continue; continue;
} }
@ -496,14 +476,11 @@ impl Sending {
Ok(()) Ok(())
} }
#[tracing::instrument(skip(db, events, kind))] #[tracing::instrument(skip(events, kind))]
async fn handle_events( async fn handle_events(
kind: OutgoingKind, kind: OutgoingKind,
events: Vec<SendingEventType>, events: Vec<SendingEventType>,
db: Arc<RwLock<Database>>,
) -> Result<OutgoingKind, (OutgoingKind, Error)> { ) -> Result<OutgoingKind, (OutgoingKind, Error)> {
let db = db.read().await;
match &kind { match &kind {
OutgoingKind::Appservice(id) => { OutgoingKind::Appservice(id) => {
let mut pdu_jsons = Vec::new(); let mut pdu_jsons = Vec::new();
@ -511,7 +488,7 @@ impl Sending {
for event in &events { for event in &events {
match event { match event {
SendingEventType::Pdu(pdu_id) => { SendingEventType::Pdu(pdu_id) => {
pdu_jsons.push(db.rooms pdu_jsons.push(services().rooms
.get_pdu_from_id(pdu_id) .get_pdu_from_id(pdu_id)
.map_err(|e| (kind.clone(), e))? .map_err(|e| (kind.clone(), e))?
.ok_or_else(|| { .ok_or_else(|| {
@ -530,11 +507,10 @@ impl Sending {
} }
} }
let permit = db.sending.maximum_requests.acquire().await; let permit = services().sending.maximum_requests.acquire().await;
let response = appservice_server::send_request( let response = appservice_server::send_request(
&db.globals, services().appservice
db.appservice
.get_registration(&id) .get_registration(&id)
.map_err(|e| (kind.clone(), e))? .map_err(|e| (kind.clone(), e))?
.ok_or_else(|| { .ok_or_else(|| {
@ -576,7 +552,7 @@ impl Sending {
match event { match event {
SendingEventType::Pdu(pdu_id) => { SendingEventType::Pdu(pdu_id) => {
pdus.push( pdus.push(
db.rooms services().rooms
.get_pdu_from_id(pdu_id) .get_pdu_from_id(pdu_id)
.map_err(|e| (kind.clone(), e))? .map_err(|e| (kind.clone(), e))?
.ok_or_else(|| { .ok_or_else(|| {
@ -624,7 +600,7 @@ impl Sending {
senderkey.push(0xff); senderkey.push(0xff);
senderkey.extend_from_slice(pushkey); senderkey.extend_from_slice(pushkey);
let pusher = match db let pusher = match services()
.pusher .pusher
.get_pusher(&senderkey) .get_pusher(&senderkey)
.map_err(|e| (OutgoingKind::Push(user.clone(), pushkey.clone()), e))? .map_err(|e| (OutgoingKind::Push(user.clone(), pushkey.clone()), e))?
@ -633,7 +609,7 @@ impl Sending {
None => continue, None => continue,
}; };
let rules_for_user = db let rules_for_user = services()
.account_data .account_data
.get( .get(
None, None,
@ -644,22 +620,21 @@ impl Sending {
.map(|ev: PushRulesEvent| ev.content.global) .map(|ev: PushRulesEvent| ev.content.global)
.unwrap_or_else(|| push::Ruleset::server_default(&userid)); .unwrap_or_else(|| push::Ruleset::server_default(&userid));
let unread: UInt = db let unread: UInt = services()
.rooms .rooms
.notification_count(&userid, &pdu.room_id) .notification_count(&userid, &pdu.room_id)
.map_err(|e| (kind.clone(), e))? .map_err(|e| (kind.clone(), e))?
.try_into() .try_into()
.expect("notifiation count can't go that high"); .expect("notifiation count can't go that high");
let permit = db.sending.maximum_requests.acquire().await; let permit = services().sending.maximum_requests.acquire().await;
let _response = pusher::send_push_notice( let _response = services().pusher.send_push_notice(
&userid, &userid,
unread, unread,
&pusher, &pusher,
rules_for_user, rules_for_user,
&pdu, &pdu,
&db,
) )
.await .await
.map(|_response| kind.clone()) .map(|_response| kind.clone())
@ -678,7 +653,7 @@ impl Sending {
SendingEventType::Pdu(pdu_id) => { SendingEventType::Pdu(pdu_id) => {
// TODO: check room version and remove event_id if needed // TODO: check room version and remove event_id if needed
let raw = PduEvent::convert_to_outgoing_federation_event( let raw = PduEvent::convert_to_outgoing_federation_event(
db.rooms services().rooms
.get_pdu_json_from_id(pdu_id) .get_pdu_json_from_id(pdu_id)
.map_err(|e| (OutgoingKind::Normal(server.clone()), e))? .map_err(|e| (OutgoingKind::Normal(server.clone()), e))?
.ok_or_else(|| { .ok_or_else(|| {
@ -700,13 +675,12 @@ impl Sending {
} }
} }
let permit = db.sending.maximum_requests.acquire().await; let permit = services().sending.maximum_requests.acquire().await;
let response = server_server::send_request( let response = server_server::send_request(
&db.globals,
&*server, &*server,
send_transaction_message::v1::Request { send_transaction_message::v1::Request {
origin: db.globals.server_name(), origin: services().globals.server_name(),
pdus: &pdu_jsons, pdus: &pdu_jsons,
edus: &edu_jsons, edus: &edu_jsons,
origin_server_ts: MilliSecondsSinceUnixEpoch::now(), origin_server_ts: MilliSecondsSinceUnixEpoch::now(),
@ -809,10 +783,9 @@ impl Sending {
}) })
} }
#[tracing::instrument(skip(self, globals, destination, request))] #[tracing::instrument(skip(self, destination, request))]
pub async fn send_federation_request<T: OutgoingRequest>( pub async fn send_federation_request<T: OutgoingRequest>(
&self, &self,
globals: &crate::database::globals::Globals,
destination: &ServerName, destination: &ServerName,
request: T, request: T,
) -> Result<T::IncomingResponse> ) -> Result<T::IncomingResponse>
@ -820,16 +793,15 @@ impl Sending {
T: Debug, T: Debug,
{ {
let permit = self.maximum_requests.acquire().await; let permit = self.maximum_requests.acquire().await;
let response = server_server::send_request(globals, destination, request).await; let response = server_server::send_request(destination, request).await;
drop(permit); drop(permit);
response response
} }
#[tracing::instrument(skip(self, globals, registration, request))] #[tracing::instrument(skip(self, registration, request))]
pub async fn send_appservice_request<T: OutgoingRequest>( pub async fn send_appservice_request<T: OutgoingRequest>(
&self, &self,
globals: &crate::database::globals::Globals,
registration: serde_yaml::Value, registration: serde_yaml::Value,
request: T, request: T,
) -> Result<T::IncomingResponse> ) -> Result<T::IncomingResponse>
@ -837,7 +809,7 @@ impl Sending {
T: Debug, T: Debug,
{ {
let permit = self.maximum_requests.acquire().await; let permit = self.maximum_requests.acquire().await;
let response = appservice_server::send_request(globals, registration, request).await; let response = appservice_server::send_request(registration, request).await;
drop(permit); drop(permit);
response response

@ -1,3 +1,6 @@
use ruma::{DeviceId, UserId, TransactionId};
use crate::Result;
pub trait Data { pub trait Data {
fn add_txnid( fn add_txnid(
&self, &self,

@ -1,14 +1,14 @@
mod data; mod data;
pub use data::Data; pub use data::Data;
use ruma::{UserId, DeviceId, TransactionId};
use crate::service::*; use ruma::{UserId, DeviceId, TransactionId};
use crate::Result;
pub struct Service<D: Data> { pub struct Service<D: Data> {
db: D, db: D,
} }
impl Service<_> { impl<D: Data> Service<D> {
pub fn add_txnid( pub fn add_txnid(
&self, &self,
user_id: &UserId, user_id: &UserId,

@ -1,4 +1,5 @@
use ruma::{api::client::uiaa::UiaaInfo, DeviceId, UserId, signatures::CanonicalJsonValue}; use ruma::{api::client::uiaa::UiaaInfo, DeviceId, UserId, signatures::CanonicalJsonValue};
use crate::Result;
pub trait Data { pub trait Data {
fn set_uiaa_request( fn set_uiaa_request(

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save