add rustfmt.toml, format entire codebase
Signed-off-by: strawberry <strawberry@puppygock.gay>
This commit is contained in:
parent
9fd521f041
commit
f419c64aca
144 changed files with 25573 additions and 31053 deletions
|
@ -1,35 +1,28 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use crate::Result;
|
||||
use ruma::{
|
||||
events::{AnyEphemeralRoomEvent, RoomAccountDataEventType},
|
||||
serde::Raw,
|
||||
RoomId, UserId,
|
||||
events::{AnyEphemeralRoomEvent, RoomAccountDataEventType},
|
||||
serde::Raw,
|
||||
RoomId, UserId,
|
||||
};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Places one event in the account data of the user and removes the previous entry.
|
||||
fn update(
|
||||
&self,
|
||||
room_id: Option<&RoomId>,
|
||||
user_id: &UserId,
|
||||
event_type: RoomAccountDataEventType,
|
||||
data: &serde_json::Value,
|
||||
) -> Result<()>;
|
||||
/// Places one event in the account data of the user and removes the
|
||||
/// previous entry.
|
||||
fn update(
|
||||
&self, room_id: Option<&RoomId>, user_id: &UserId, event_type: RoomAccountDataEventType,
|
||||
data: &serde_json::Value,
|
||||
) -> Result<()>;
|
||||
|
||||
/// Searches the account data for a specific kind.
|
||||
fn get(
|
||||
&self,
|
||||
room_id: Option<&RoomId>,
|
||||
user_id: &UserId,
|
||||
kind: RoomAccountDataEventType,
|
||||
) -> Result<Option<Box<serde_json::value::RawValue>>>;
|
||||
/// Searches the account data for a specific kind.
|
||||
fn get(
|
||||
&self, room_id: Option<&RoomId>, user_id: &UserId, kind: RoomAccountDataEventType,
|
||||
) -> Result<Option<Box<serde_json::value::RawValue>>>;
|
||||
|
||||
/// Returns all changes to the account data that happened after `since`.
|
||||
fn changes_since(
|
||||
&self,
|
||||
room_id: Option<&RoomId>,
|
||||
user_id: &UserId,
|
||||
since: u64,
|
||||
) -> Result<HashMap<RoomAccountDataEventType, Raw<AnyEphemeralRoomEvent>>>;
|
||||
/// Returns all changes to the account data that happened after `since`.
|
||||
fn changes_since(
|
||||
&self, room_id: Option<&RoomId>, user_id: &UserId, since: u64,
|
||||
) -> Result<HashMap<RoomAccountDataEventType, Raw<AnyEphemeralRoomEvent>>>;
|
||||
}
|
||||
|
|
|
@ -1,53 +1,44 @@
|
|||
mod data;
|
||||
|
||||
pub(crate) use data::Data;
|
||||
|
||||
use ruma::{
|
||||
events::{AnyEphemeralRoomEvent, RoomAccountDataEventType},
|
||||
serde::Raw,
|
||||
RoomId, UserId,
|
||||
};
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub(crate) use data::Data;
|
||||
use ruma::{
|
||||
events::{AnyEphemeralRoomEvent, RoomAccountDataEventType},
|
||||
serde::Raw,
|
||||
RoomId, UserId,
|
||||
};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Places one event in the account data of the user and removes the previous entry.
|
||||
#[tracing::instrument(skip(self, room_id, user_id, event_type, data))]
|
||||
pub fn update(
|
||||
&self,
|
||||
room_id: Option<&RoomId>,
|
||||
user_id: &UserId,
|
||||
event_type: RoomAccountDataEventType,
|
||||
data: &serde_json::Value,
|
||||
) -> Result<()> {
|
||||
self.db.update(room_id, user_id, event_type, data)
|
||||
}
|
||||
/// Places one event in the account data of the user and removes the
|
||||
/// previous entry.
|
||||
#[tracing::instrument(skip(self, room_id, user_id, event_type, data))]
|
||||
pub fn update(
|
||||
&self, room_id: Option<&RoomId>, user_id: &UserId, event_type: RoomAccountDataEventType,
|
||||
data: &serde_json::Value,
|
||||
) -> Result<()> {
|
||||
self.db.update(room_id, user_id, event_type, data)
|
||||
}
|
||||
|
||||
/// Searches the account data for a specific kind.
|
||||
#[tracing::instrument(skip(self, room_id, user_id, event_type))]
|
||||
pub fn get(
|
||||
&self,
|
||||
room_id: Option<&RoomId>,
|
||||
user_id: &UserId,
|
||||
event_type: RoomAccountDataEventType,
|
||||
) -> Result<Option<Box<serde_json::value::RawValue>>> {
|
||||
self.db.get(room_id, user_id, event_type)
|
||||
}
|
||||
/// Searches the account data for a specific kind.
|
||||
#[tracing::instrument(skip(self, room_id, user_id, event_type))]
|
||||
pub fn get(
|
||||
&self, room_id: Option<&RoomId>, user_id: &UserId, event_type: RoomAccountDataEventType,
|
||||
) -> Result<Option<Box<serde_json::value::RawValue>>> {
|
||||
self.db.get(room_id, user_id, event_type)
|
||||
}
|
||||
|
||||
/// Returns all changes to the account data that happened after `since`.
|
||||
#[tracing::instrument(skip(self, room_id, user_id, since))]
|
||||
pub fn changes_since(
|
||||
&self,
|
||||
room_id: Option<&RoomId>,
|
||||
user_id: &UserId,
|
||||
since: u64,
|
||||
) -> Result<HashMap<RoomAccountDataEventType, Raw<AnyEphemeralRoomEvent>>> {
|
||||
self.db.changes_since(room_id, user_id, since)
|
||||
}
|
||||
/// Returns all changes to the account data that happened after `since`.
|
||||
#[tracing::instrument(skip(self, room_id, user_id, since))]
|
||||
pub fn changes_since(
|
||||
&self, room_id: Option<&RoomId>, user_id: &UserId, since: u64,
|
||||
) -> Result<HashMap<RoomAccountDataEventType, Raw<AnyEphemeralRoomEvent>>> {
|
||||
self.db.changes_since(room_id, user_id, since)
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -3,19 +3,19 @@ use ruma::api::appservice::Registration;
|
|||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Registers an appservice and returns the ID to the caller
|
||||
fn register_appservice(&self, yaml: Registration) -> Result<String>;
|
||||
/// Registers an appservice and returns the ID to the caller
|
||||
fn register_appservice(&self, yaml: Registration) -> Result<String>;
|
||||
|
||||
/// Remove an appservice registration
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `service_name` - the name you send to register the service previously
|
||||
fn unregister_appservice(&self, service_name: &str) -> Result<()>;
|
||||
/// Remove an appservice registration
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `service_name` - the name you send to register the service previously
|
||||
fn unregister_appservice(&self, service_name: &str) -> Result<()>;
|
||||
|
||||
fn get_registration(&self, id: &str) -> Result<Option<Registration>>;
|
||||
fn get_registration(&self, id: &str) -> Result<Option<Registration>>;
|
||||
|
||||
fn iter_ids<'a>(&'a self) -> Result<Box<dyn Iterator<Item = Result<String>> + 'a>>;
|
||||
fn iter_ids<'a>(&'a self) -> Result<Box<dyn Iterator<Item = Result<String>> + 'a>>;
|
||||
|
||||
fn all(&self) -> Result<Vec<(String, Registration)>>;
|
||||
fn all(&self) -> Result<Vec<(String, Registration)>>;
|
||||
}
|
||||
|
|
|
@ -6,33 +6,25 @@ use ruma::api::appservice::Registration;
|
|||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Registers an appservice and returns the ID to the caller
|
||||
pub fn register_appservice(&self, yaml: Registration) -> Result<String> {
|
||||
self.db.register_appservice(yaml)
|
||||
}
|
||||
/// Registers an appservice and returns the ID to the caller
|
||||
pub fn register_appservice(&self, yaml: Registration) -> Result<String> { self.db.register_appservice(yaml) }
|
||||
|
||||
/// Remove an appservice registration
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `service_name` - the name you send to register the service previously
|
||||
pub fn unregister_appservice(&self, service_name: &str) -> Result<()> {
|
||||
self.db.unregister_appservice(service_name)
|
||||
}
|
||||
/// Remove an appservice registration
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `service_name` - the name you send to register the service previously
|
||||
pub fn unregister_appservice(&self, service_name: &str) -> Result<()> {
|
||||
self.db.unregister_appservice(service_name)
|
||||
}
|
||||
|
||||
pub fn get_registration(&self, id: &str) -> Result<Option<Registration>> {
|
||||
self.db.get_registration(id)
|
||||
}
|
||||
pub fn get_registration(&self, id: &str) -> Result<Option<Registration>> { self.db.get_registration(id) }
|
||||
|
||||
pub fn iter_ids(&self) -> Result<impl Iterator<Item = Result<String>> + '_> {
|
||||
self.db.iter_ids()
|
||||
}
|
||||
pub fn iter_ids(&self) -> Result<impl Iterator<Item = Result<String>> + '_> { self.db.iter_ids() }
|
||||
|
||||
pub fn all(&self) -> Result<Vec<(String, Registration)>> {
|
||||
self.db.all()
|
||||
}
|
||||
pub fn all(&self) -> Result<Vec<(String, Registration)>> { self.db.all() }
|
||||
}
|
||||
|
|
|
@ -2,36 +2,32 @@ use std::collections::BTreeMap;
|
|||
|
||||
use async_trait::async_trait;
|
||||
use ruma::{
|
||||
api::federation::discovery::{ServerSigningKeys, VerifyKey},
|
||||
signatures::Ed25519KeyPair,
|
||||
DeviceId, OwnedServerSigningKeyId, ServerName, UserId,
|
||||
api::federation::discovery::{ServerSigningKeys, VerifyKey},
|
||||
signatures::Ed25519KeyPair,
|
||||
DeviceId, OwnedServerSigningKeyId, ServerName, UserId,
|
||||
};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
#[async_trait]
|
||||
pub trait Data: Send + Sync {
|
||||
fn next_count(&self) -> Result<u64>;
|
||||
fn current_count(&self) -> Result<u64>;
|
||||
fn last_check_for_updates_id(&self) -> Result<u64>;
|
||||
fn update_check_for_updates_id(&self, id: u64) -> Result<()>;
|
||||
async fn watch(&self, user_id: &UserId, device_id: &DeviceId) -> Result<()>;
|
||||
fn cleanup(&self) -> Result<()>;
|
||||
fn memory_usage(&self) -> String;
|
||||
fn clear_caches(&self, amount: u32);
|
||||
fn load_keypair(&self) -> Result<Ed25519KeyPair>;
|
||||
fn remove_keypair(&self) -> Result<()>;
|
||||
fn add_signing_key(
|
||||
&self,
|
||||
origin: &ServerName,
|
||||
new_keys: ServerSigningKeys,
|
||||
) -> Result<BTreeMap<OwnedServerSigningKeyId, VerifyKey>>;
|
||||
fn next_count(&self) -> Result<u64>;
|
||||
fn current_count(&self) -> Result<u64>;
|
||||
fn last_check_for_updates_id(&self) -> Result<u64>;
|
||||
fn update_check_for_updates_id(&self, id: u64) -> Result<()>;
|
||||
async fn watch(&self, user_id: &UserId, device_id: &DeviceId) -> Result<()>;
|
||||
fn cleanup(&self) -> Result<()>;
|
||||
fn memory_usage(&self) -> String;
|
||||
fn clear_caches(&self, amount: u32);
|
||||
fn load_keypair(&self) -> Result<Ed25519KeyPair>;
|
||||
fn remove_keypair(&self) -> Result<()>;
|
||||
fn add_signing_key(
|
||||
&self, origin: &ServerName, new_keys: ServerSigningKeys,
|
||||
) -> Result<BTreeMap<OwnedServerSigningKeyId, VerifyKey>>;
|
||||
|
||||
/// This returns an empty `Ok(BTreeMap<..>)` when there are no keys found for the server.
|
||||
fn signing_keys_for(
|
||||
&self,
|
||||
origin: &ServerName,
|
||||
) -> Result<BTreeMap<OwnedServerSigningKeyId, VerifyKey>>;
|
||||
fn database_version(&self) -> Result<u64>;
|
||||
fn bump_database_version(&self, new_version: u64) -> Result<()>;
|
||||
/// This returns an empty `Ok(BTreeMap<..>)` when there are no keys found
|
||||
/// for the server.
|
||||
fn signing_keys_for(&self, origin: &ServerName) -> Result<BTreeMap<OwnedServerSigningKeyId, VerifyKey>>;
|
||||
fn database_version(&self) -> Result<u64>;
|
||||
fn bump_database_version(&self, new_version: u64) -> Result<()>;
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,78 +1,47 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::Result;
|
||||
use ruma::{
|
||||
api::client::backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup},
|
||||
serde::Raw,
|
||||
OwnedRoomId, RoomId, UserId,
|
||||
api::client::backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup},
|
||||
serde::Raw,
|
||||
OwnedRoomId, RoomId, UserId,
|
||||
};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn create_backup(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
backup_metadata: &Raw<BackupAlgorithm>,
|
||||
) -> Result<String>;
|
||||
fn create_backup(&self, user_id: &UserId, backup_metadata: &Raw<BackupAlgorithm>) -> Result<String>;
|
||||
|
||||
fn delete_backup(&self, user_id: &UserId, version: &str) -> Result<()>;
|
||||
fn delete_backup(&self, user_id: &UserId, version: &str) -> Result<()>;
|
||||
|
||||
fn update_backup(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
backup_metadata: &Raw<BackupAlgorithm>,
|
||||
) -> Result<String>;
|
||||
fn update_backup(&self, user_id: &UserId, version: &str, backup_metadata: &Raw<BackupAlgorithm>) -> Result<String>;
|
||||
|
||||
fn get_latest_backup_version(&self, user_id: &UserId) -> Result<Option<String>>;
|
||||
fn get_latest_backup_version(&self, user_id: &UserId) -> Result<Option<String>>;
|
||||
|
||||
fn get_latest_backup(&self, user_id: &UserId)
|
||||
-> Result<Option<(String, Raw<BackupAlgorithm>)>>;
|
||||
fn get_latest_backup(&self, user_id: &UserId) -> Result<Option<(String, Raw<BackupAlgorithm>)>>;
|
||||
|
||||
fn get_backup(&self, user_id: &UserId, version: &str) -> Result<Option<Raw<BackupAlgorithm>>>;
|
||||
fn get_backup(&self, user_id: &UserId, version: &str) -> Result<Option<Raw<BackupAlgorithm>>>;
|
||||
|
||||
fn add_key(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
room_id: &RoomId,
|
||||
session_id: &str,
|
||||
key_data: &Raw<KeyBackupData>,
|
||||
) -> Result<()>;
|
||||
fn add_key(
|
||||
&self, user_id: &UserId, version: &str, room_id: &RoomId, session_id: &str, key_data: &Raw<KeyBackupData>,
|
||||
) -> Result<()>;
|
||||
|
||||
fn count_keys(&self, user_id: &UserId, version: &str) -> Result<usize>;
|
||||
fn count_keys(&self, user_id: &UserId, version: &str) -> Result<usize>;
|
||||
|
||||
fn get_etag(&self, user_id: &UserId, version: &str) -> Result<String>;
|
||||
fn get_etag(&self, user_id: &UserId, version: &str) -> Result<String>;
|
||||
|
||||
fn get_all(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
) -> Result<BTreeMap<OwnedRoomId, RoomKeyBackup>>;
|
||||
fn get_all(&self, user_id: &UserId, version: &str) -> Result<BTreeMap<OwnedRoomId, RoomKeyBackup>>;
|
||||
|
||||
fn get_room(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
room_id: &RoomId,
|
||||
) -> Result<BTreeMap<String, Raw<KeyBackupData>>>;
|
||||
fn get_room(
|
||||
&self, user_id: &UserId, version: &str, room_id: &RoomId,
|
||||
) -> Result<BTreeMap<String, Raw<KeyBackupData>>>;
|
||||
|
||||
fn get_session(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
room_id: &RoomId,
|
||||
session_id: &str,
|
||||
) -> Result<Option<Raw<KeyBackupData>>>;
|
||||
fn get_session(
|
||||
&self, user_id: &UserId, version: &str, room_id: &RoomId, session_id: &str,
|
||||
) -> Result<Option<Raw<KeyBackupData>>>;
|
||||
|
||||
fn delete_all_keys(&self, user_id: &UserId, version: &str) -> Result<()>;
|
||||
fn delete_all_keys(&self, user_id: &UserId, version: &str) -> Result<()>;
|
||||
|
||||
fn delete_room_keys(&self, user_id: &UserId, version: &str, room_id: &RoomId) -> Result<()>;
|
||||
fn delete_room_keys(&self, user_id: &UserId, version: &str, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
fn delete_room_key(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
room_id: &RoomId,
|
||||
session_id: &str,
|
||||
) -> Result<()>;
|
||||
fn delete_room_key(&self, user_id: &UserId, version: &str, room_id: &RoomId, session_id: &str) -> Result<()>;
|
||||
}
|
||||
|
|
|
@ -1,127 +1,81 @@
|
|||
mod data;
|
||||
pub(crate) use data::Data;
|
||||
|
||||
use crate::Result;
|
||||
use ruma::{
|
||||
api::client::backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup},
|
||||
serde::Raw,
|
||||
OwnedRoomId, RoomId, UserId,
|
||||
};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
pub(crate) use data::Data;
|
||||
use ruma::{
|
||||
api::client::backup::{BackupAlgorithm, KeyBackupData, RoomKeyBackup},
|
||||
serde::Raw,
|
||||
OwnedRoomId, RoomId, UserId,
|
||||
};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
pub fn create_backup(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
backup_metadata: &Raw<BackupAlgorithm>,
|
||||
) -> Result<String> {
|
||||
self.db.create_backup(user_id, backup_metadata)
|
||||
}
|
||||
pub fn create_backup(&self, user_id: &UserId, backup_metadata: &Raw<BackupAlgorithm>) -> Result<String> {
|
||||
self.db.create_backup(user_id, backup_metadata)
|
||||
}
|
||||
|
||||
pub fn delete_backup(&self, user_id: &UserId, version: &str) -> Result<()> {
|
||||
self.db.delete_backup(user_id, version)
|
||||
}
|
||||
pub fn delete_backup(&self, user_id: &UserId, version: &str) -> Result<()> {
|
||||
self.db.delete_backup(user_id, version)
|
||||
}
|
||||
|
||||
pub fn update_backup(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
backup_metadata: &Raw<BackupAlgorithm>,
|
||||
) -> Result<String> {
|
||||
self.db.update_backup(user_id, version, backup_metadata)
|
||||
}
|
||||
pub fn update_backup(
|
||||
&self, user_id: &UserId, version: &str, backup_metadata: &Raw<BackupAlgorithm>,
|
||||
) -> Result<String> {
|
||||
self.db.update_backup(user_id, version, backup_metadata)
|
||||
}
|
||||
|
||||
pub fn get_latest_backup_version(&self, user_id: &UserId) -> Result<Option<String>> {
|
||||
self.db.get_latest_backup_version(user_id)
|
||||
}
|
||||
pub fn get_latest_backup_version(&self, user_id: &UserId) -> Result<Option<String>> {
|
||||
self.db.get_latest_backup_version(user_id)
|
||||
}
|
||||
|
||||
pub fn get_latest_backup(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
) -> Result<Option<(String, Raw<BackupAlgorithm>)>> {
|
||||
self.db.get_latest_backup(user_id)
|
||||
}
|
||||
pub fn get_latest_backup(&self, user_id: &UserId) -> Result<Option<(String, Raw<BackupAlgorithm>)>> {
|
||||
self.db.get_latest_backup(user_id)
|
||||
}
|
||||
|
||||
pub fn get_backup(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
) -> Result<Option<Raw<BackupAlgorithm>>> {
|
||||
self.db.get_backup(user_id, version)
|
||||
}
|
||||
pub fn get_backup(&self, user_id: &UserId, version: &str) -> Result<Option<Raw<BackupAlgorithm>>> {
|
||||
self.db.get_backup(user_id, version)
|
||||
}
|
||||
|
||||
pub fn add_key(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
room_id: &RoomId,
|
||||
session_id: &str,
|
||||
key_data: &Raw<KeyBackupData>,
|
||||
) -> Result<()> {
|
||||
self.db
|
||||
.add_key(user_id, version, room_id, session_id, key_data)
|
||||
}
|
||||
pub fn add_key(
|
||||
&self, user_id: &UserId, version: &str, room_id: &RoomId, session_id: &str, key_data: &Raw<KeyBackupData>,
|
||||
) -> Result<()> {
|
||||
self.db.add_key(user_id, version, room_id, session_id, key_data)
|
||||
}
|
||||
|
||||
pub fn count_keys(&self, user_id: &UserId, version: &str) -> Result<usize> {
|
||||
self.db.count_keys(user_id, version)
|
||||
}
|
||||
pub fn count_keys(&self, user_id: &UserId, version: &str) -> Result<usize> { self.db.count_keys(user_id, version) }
|
||||
|
||||
pub fn get_etag(&self, user_id: &UserId, version: &str) -> Result<String> {
|
||||
self.db.get_etag(user_id, version)
|
||||
}
|
||||
pub fn get_etag(&self, user_id: &UserId, version: &str) -> Result<String> { self.db.get_etag(user_id, version) }
|
||||
|
||||
pub fn get_all(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
) -> Result<BTreeMap<OwnedRoomId, RoomKeyBackup>> {
|
||||
self.db.get_all(user_id, version)
|
||||
}
|
||||
pub fn get_all(&self, user_id: &UserId, version: &str) -> Result<BTreeMap<OwnedRoomId, RoomKeyBackup>> {
|
||||
self.db.get_all(user_id, version)
|
||||
}
|
||||
|
||||
pub fn get_room(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
room_id: &RoomId,
|
||||
) -> Result<BTreeMap<String, Raw<KeyBackupData>>> {
|
||||
self.db.get_room(user_id, version, room_id)
|
||||
}
|
||||
pub fn get_room(
|
||||
&self, user_id: &UserId, version: &str, room_id: &RoomId,
|
||||
) -> Result<BTreeMap<String, Raw<KeyBackupData>>> {
|
||||
self.db.get_room(user_id, version, room_id)
|
||||
}
|
||||
|
||||
pub fn get_session(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
room_id: &RoomId,
|
||||
session_id: &str,
|
||||
) -> Result<Option<Raw<KeyBackupData>>> {
|
||||
self.db.get_session(user_id, version, room_id, session_id)
|
||||
}
|
||||
pub fn get_session(
|
||||
&self, user_id: &UserId, version: &str, room_id: &RoomId, session_id: &str,
|
||||
) -> Result<Option<Raw<KeyBackupData>>> {
|
||||
self.db.get_session(user_id, version, room_id, session_id)
|
||||
}
|
||||
|
||||
pub fn delete_all_keys(&self, user_id: &UserId, version: &str) -> Result<()> {
|
||||
self.db.delete_all_keys(user_id, version)
|
||||
}
|
||||
pub fn delete_all_keys(&self, user_id: &UserId, version: &str) -> Result<()> {
|
||||
self.db.delete_all_keys(user_id, version)
|
||||
}
|
||||
|
||||
pub fn delete_room_keys(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
room_id: &RoomId,
|
||||
) -> Result<()> {
|
||||
self.db.delete_room_keys(user_id, version, room_id)
|
||||
}
|
||||
pub fn delete_room_keys(&self, user_id: &UserId, version: &str, room_id: &RoomId) -> Result<()> {
|
||||
self.db.delete_room_keys(user_id, version, room_id)
|
||||
}
|
||||
|
||||
pub fn delete_room_key(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
version: &str,
|
||||
room_id: &RoomId,
|
||||
session_id: &str,
|
||||
) -> Result<()> {
|
||||
self.db
|
||||
.delete_room_key(user_id, version, room_id, session_id)
|
||||
}
|
||||
pub fn delete_room_key(&self, user_id: &UserId, version: &str, room_id: &RoomId, session_id: &str) -> Result<()> {
|
||||
self.db.delete_room_key(user_id, version, room_id, session_id)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,37 +1,24 @@
|
|||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn create_file_metadata(
|
||||
&self,
|
||||
mxc: String,
|
||||
width: u32,
|
||||
height: u32,
|
||||
content_disposition: Option<&str>,
|
||||
content_type: Option<&str>,
|
||||
) -> Result<Vec<u8>>;
|
||||
fn create_file_metadata(
|
||||
&self, mxc: String, width: u32, height: u32, content_disposition: Option<&str>, content_type: Option<&str>,
|
||||
) -> Result<Vec<u8>>;
|
||||
|
||||
fn delete_file_mxc(&self, mxc: String) -> Result<()>;
|
||||
fn delete_file_mxc(&self, mxc: String) -> Result<()>;
|
||||
|
||||
/// Returns content_disposition, content_type and the metadata key.
|
||||
fn search_file_metadata(
|
||||
&self,
|
||||
mxc: String,
|
||||
width: u32,
|
||||
height: u32,
|
||||
) -> Result<(Option<String>, Option<String>, Vec<u8>)>;
|
||||
/// Returns content_disposition, content_type and the metadata key.
|
||||
fn search_file_metadata(
|
||||
&self, mxc: String, width: u32, height: u32,
|
||||
) -> Result<(Option<String>, Option<String>, Vec<u8>)>;
|
||||
|
||||
fn search_mxc_metadata_prefix(&self, mxc: String) -> Result<Vec<Vec<u8>>>;
|
||||
fn search_mxc_metadata_prefix(&self, mxc: String) -> Result<Vec<Vec<u8>>>;
|
||||
|
||||
fn get_all_media_keys(&self) -> Result<Vec<Vec<u8>>>;
|
||||
fn get_all_media_keys(&self) -> Result<Vec<Vec<u8>>>;
|
||||
|
||||
fn remove_url_preview(&self, url: &str) -> Result<()>;
|
||||
fn remove_url_preview(&self, url: &str) -> Result<()>;
|
||||
|
||||
fn set_url_preview(
|
||||
&self,
|
||||
url: &str,
|
||||
data: &super::UrlPreviewData,
|
||||
timestamp: std::time::Duration,
|
||||
) -> Result<()>;
|
||||
fn set_url_preview(&self, url: &str, data: &super::UrlPreviewData, timestamp: std::time::Duration) -> Result<()>;
|
||||
|
||||
fn get_url_preview(&self, url: &str) -> Option<super::UrlPreviewData>;
|
||||
fn get_url_preview(&self, url: &str) -> Option<super::UrlPreviewData>;
|
||||
}
|
||||
|
|
|
@ -1,579 +1,480 @@
|
|||
mod data;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::Cursor,
|
||||
sync::{Arc, RwLock},
|
||||
time::SystemTime,
|
||||
collections::HashMap,
|
||||
io::Cursor,
|
||||
sync::{Arc, RwLock},
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
pub(crate) use data::Data;
|
||||
use image::imageops::FilterType;
|
||||
use ruma::OwnedMxcUri;
|
||||
use serde::Serialize;
|
||||
use tokio::{
|
||||
fs::{self, File},
|
||||
io::{AsyncReadExt, AsyncWriteExt, BufReader},
|
||||
sync::Mutex,
|
||||
};
|
||||
use tracing::{debug, error};
|
||||
|
||||
use crate::{services, utils, Error, Result};
|
||||
use image::imageops::FilterType;
|
||||
|
||||
use tokio::{
|
||||
fs::{self, File},
|
||||
io::{AsyncReadExt, AsyncWriteExt, BufReader},
|
||||
sync::Mutex,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FileMeta {
|
||||
pub content_disposition: Option<String>,
|
||||
pub content_type: Option<String>,
|
||||
pub file: Vec<u8>,
|
||||
pub content_disposition: Option<String>,
|
||||
pub content_type: Option<String>,
|
||||
pub file: Vec<u8>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct UrlPreviewData {
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename(serialize = "og:title")
|
||||
)]
|
||||
pub title: Option<String>,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename(serialize = "og:description")
|
||||
)]
|
||||
pub description: Option<String>,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename(serialize = "og:image")
|
||||
)]
|
||||
pub image: Option<String>,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename(serialize = "matrix:image:size")
|
||||
)]
|
||||
pub image_size: Option<usize>,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename(serialize = "og:image:width")
|
||||
)]
|
||||
pub image_width: Option<u32>,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename(serialize = "og:image:height")
|
||||
)]
|
||||
pub image_height: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none", rename(serialize = "og:title"))]
|
||||
pub title: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none", rename(serialize = "og:description"))]
|
||||
pub description: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none", rename(serialize = "og:image"))]
|
||||
pub image: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none", rename(serialize = "matrix:image:size"))]
|
||||
pub image_size: Option<usize>,
|
||||
#[serde(skip_serializing_if = "Option::is_none", rename(serialize = "og:image:width"))]
|
||||
pub image_width: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none", rename(serialize = "og:image:height"))]
|
||||
pub image_height: Option<u32>,
|
||||
}
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub url_preview_mutex: RwLock<HashMap<String, Arc<Mutex<()>>>>,
|
||||
pub db: &'static dyn Data,
|
||||
pub url_preview_mutex: RwLock<HashMap<String, Arc<Mutex<()>>>>,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Uploads a file.
|
||||
pub async fn create(
|
||||
&self,
|
||||
mxc: String,
|
||||
content_disposition: Option<&str>,
|
||||
content_type: Option<&str>,
|
||||
file: &[u8],
|
||||
) -> Result<()> {
|
||||
// Width, Height = 0 if it's not a thumbnail
|
||||
let key = self
|
||||
.db
|
||||
.create_file_metadata(mxc, 0, 0, content_disposition, content_type)?;
|
||||
/// Uploads a file.
|
||||
pub async fn create(
|
||||
&self, mxc: String, content_disposition: Option<&str>, content_type: Option<&str>, file: &[u8],
|
||||
) -> Result<()> {
|
||||
// Width, Height = 0 if it's not a thumbnail
|
||||
let key = self.db.create_file_metadata(mxc, 0, 0, content_disposition, content_type)?;
|
||||
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
|
||||
let mut f = File::create(path).await?;
|
||||
f.write_all(file).await?;
|
||||
Ok(())
|
||||
}
|
||||
let mut f = File::create(path).await?;
|
||||
f.write_all(file).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Deletes a file in the database and from the media directory via an MXC
|
||||
pub async fn delete(&self, mxc: String) -> Result<()> {
|
||||
if let Ok(keys) = self.db.search_mxc_metadata_prefix(mxc.clone()) {
|
||||
for key in keys {
|
||||
let file_path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
debug!("Got local file path: {:?}", file_path);
|
||||
/// Deletes a file in the database and from the media directory via an MXC
|
||||
pub async fn delete(&self, mxc: String) -> Result<()> {
|
||||
if let Ok(keys) = self.db.search_mxc_metadata_prefix(mxc.clone()) {
|
||||
for key in keys {
|
||||
let file_path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
debug!("Got local file path: {:?}", file_path);
|
||||
|
||||
debug!(
|
||||
"Deleting local file {:?} from filesystem, original MXC: {}",
|
||||
file_path, mxc
|
||||
);
|
||||
tokio::fs::remove_file(file_path).await?;
|
||||
debug!("Deleting local file {:?} from filesystem, original MXC: {}", file_path, mxc);
|
||||
tokio::fs::remove_file(file_path).await?;
|
||||
|
||||
debug!("Deleting MXC {mxc} from database");
|
||||
self.db.delete_file_mxc(mxc.clone())?;
|
||||
}
|
||||
debug!("Deleting MXC {mxc} from database");
|
||||
self.db.delete_file_mxc(mxc.clone())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
} else {
|
||||
error!("Failed to find any media keys for MXC \"{mxc}\" in our database (MXC does not exist)");
|
||||
Err(Error::bad_database("Failed to find any media keys for the provided MXC in our database (MXC does not exist)"))
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
error!("Failed to find any media keys for MXC \"{mxc}\" in our database (MXC does not exist)");
|
||||
Err(Error::bad_database(
|
||||
"Failed to find any media keys for the provided MXC in our database (MXC does not exist)",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Uploads or replaces a file thumbnail.
|
||||
pub async fn upload_thumbnail(
|
||||
&self,
|
||||
mxc: String,
|
||||
content_disposition: Option<&str>,
|
||||
content_type: Option<&str>,
|
||||
width: u32,
|
||||
height: u32,
|
||||
file: &[u8],
|
||||
) -> Result<()> {
|
||||
let key =
|
||||
self.db
|
||||
.create_file_metadata(mxc, width, height, content_disposition, content_type)?;
|
||||
/// Uploads or replaces a file thumbnail.
|
||||
pub async fn upload_thumbnail(
|
||||
&self, mxc: String, content_disposition: Option<&str>, content_type: Option<&str>, width: u32, height: u32,
|
||||
file: &[u8],
|
||||
) -> Result<()> {
|
||||
let key = self.db.create_file_metadata(mxc, width, height, content_disposition, content_type)?;
|
||||
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
|
||||
let mut f = File::create(path).await?;
|
||||
f.write_all(file).await?;
|
||||
let mut f = File::create(path).await?;
|
||||
f.write_all(file).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Downloads a file.
|
||||
pub async fn get(&self, mxc: String) -> Result<Option<FileMeta>> {
|
||||
if let Ok((content_disposition, content_type, key)) =
|
||||
self.db.search_file_metadata(mxc, 0, 0)
|
||||
{
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
/// Downloads a file.
|
||||
pub async fn get(&self, mxc: String) -> Result<Option<FileMeta>> {
|
||||
if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc, 0, 0) {
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
|
||||
let mut file = Vec::new();
|
||||
BufReader::new(File::open(path).await?)
|
||||
.read_to_end(&mut file)
|
||||
.await?;
|
||||
let mut file = Vec::new();
|
||||
BufReader::new(File::open(path).await?).read_to_end(&mut file).await?;
|
||||
|
||||
Ok(Some(FileMeta {
|
||||
content_disposition,
|
||||
content_type,
|
||||
file,
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
Ok(Some(FileMeta {
|
||||
content_disposition,
|
||||
content_type,
|
||||
file,
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Deletes all remote only media files in the given at or after time/duration. Returns a u32
|
||||
/// with the amount of media files deleted.
|
||||
pub async fn delete_all_remote_media_at_after_time(&self, time: String) -> Result<u32> {
|
||||
if let Ok(all_keys) = self.db.get_all_media_keys() {
|
||||
let user_duration: SystemTime = match cyborgtime::parse_duration(&time) {
|
||||
Ok(duration) => {
|
||||
debug!("Parsed duration: {:?}", duration);
|
||||
debug!("System time now: {:?}", SystemTime::now());
|
||||
SystemTime::now() - duration
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to parse user-specified time duration: {}", e);
|
||||
return Err(Error::bad_database(
|
||||
"Failed to parse user-specified time duration.",
|
||||
));
|
||||
}
|
||||
};
|
||||
/// Deletes all remote only media files in the given at or after
|
||||
/// time/duration. Returns a u32 with the amount of media files deleted.
|
||||
pub async fn delete_all_remote_media_at_after_time(&self, time: String) -> Result<u32> {
|
||||
if let Ok(all_keys) = self.db.get_all_media_keys() {
|
||||
let user_duration: SystemTime = match cyborgtime::parse_duration(&time) {
|
||||
Ok(duration) => {
|
||||
debug!("Parsed duration: {:?}", duration);
|
||||
debug!("System time now: {:?}", SystemTime::now());
|
||||
SystemTime::now() - duration
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Failed to parse user-specified time duration: {}", e);
|
||||
return Err(Error::bad_database("Failed to parse user-specified time duration."));
|
||||
},
|
||||
};
|
||||
|
||||
let mut remote_mxcs: Vec<String> = vec![];
|
||||
let mut remote_mxcs: Vec<String> = vec![];
|
||||
|
||||
for key in all_keys {
|
||||
debug!("Full MXC key from database: {:?}", key);
|
||||
for key in all_keys {
|
||||
debug!("Full MXC key from database: {:?}", key);
|
||||
|
||||
// we need to get the MXC URL from the first part of the key (the first 0xff / 255 push)
|
||||
// this code does look kinda crazy but blame conduit for using magic keys
|
||||
let mut parts = key.split(|&b| b == 0xff);
|
||||
let mxc = parts
|
||||
.next()
|
||||
.map(|bytes| {
|
||||
utils::string_from_bytes(bytes).map_err(|e| {
|
||||
error!("Failed to parse MXC unicode bytes from our database: {}", e);
|
||||
Error::bad_database(
|
||||
"Failed to parse MXC unicode bytes from our database",
|
||||
)
|
||||
})
|
||||
})
|
||||
.transpose()?;
|
||||
// we need to get the MXC URL from the first part of the key (the first 0xff /
|
||||
// 255 push) this code does look kinda crazy but blame conduit for using magic
|
||||
// keys
|
||||
let mut parts = key.split(|&b| b == 0xFF);
|
||||
let mxc = parts
|
||||
.next()
|
||||
.map(|bytes| {
|
||||
utils::string_from_bytes(bytes).map_err(|e| {
|
||||
error!("Failed to parse MXC unicode bytes from our database: {}", e);
|
||||
Error::bad_database("Failed to parse MXC unicode bytes from our database")
|
||||
})
|
||||
})
|
||||
.transpose()?;
|
||||
|
||||
let mxc_s = match mxc {
|
||||
Some(mxc) => mxc,
|
||||
None => {
|
||||
return Err(Error::bad_database(
|
||||
"Parsed MXC URL unicode bytes from database but still is None",
|
||||
));
|
||||
}
|
||||
};
|
||||
let mxc_s = match mxc {
|
||||
Some(mxc) => mxc,
|
||||
None => {
|
||||
return Err(Error::bad_database(
|
||||
"Parsed MXC URL unicode bytes from database but still is None",
|
||||
));
|
||||
},
|
||||
};
|
||||
|
||||
debug!("Parsed MXC key to URL: {}", mxc_s);
|
||||
debug!("Parsed MXC key to URL: {}", mxc_s);
|
||||
|
||||
let mxc = OwnedMxcUri::from(mxc_s);
|
||||
if mxc.server_name() == Ok(services().globals.server_name()) {
|
||||
debug!("Ignoring local media MXC: {}", mxc);
|
||||
// ignore our own MXC URLs as this would be local media.
|
||||
continue;
|
||||
}
|
||||
let mxc = OwnedMxcUri::from(mxc_s);
|
||||
if mxc.server_name() == Ok(services().globals.server_name()) {
|
||||
debug!("Ignoring local media MXC: {}", mxc);
|
||||
// ignore our own MXC URLs as this would be local media.
|
||||
continue;
|
||||
}
|
||||
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
|
||||
debug!("MXC path: {:?}", path);
|
||||
debug!("MXC path: {:?}", path);
|
||||
|
||||
let file_metadata = fs::metadata(path.clone()).await?;
|
||||
debug!("File metadata: {:?}", file_metadata);
|
||||
let file_metadata = fs::metadata(path.clone()).await?;
|
||||
debug!("File metadata: {:?}", file_metadata);
|
||||
|
||||
let file_created_at = file_metadata.created()?;
|
||||
debug!("File created at: {:?}", file_created_at);
|
||||
let file_created_at = file_metadata.created()?;
|
||||
debug!("File created at: {:?}", file_created_at);
|
||||
|
||||
if file_created_at >= user_duration {
|
||||
debug!("File is within user duration, pushing to list of file paths and keys to delete.");
|
||||
remote_mxcs.push(mxc.to_string());
|
||||
}
|
||||
}
|
||||
if file_created_at >= user_duration {
|
||||
debug!("File is within user duration, pushing to list of file paths and keys to delete.");
|
||||
remote_mxcs.push(mxc.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Finished going through all our media in database for eligible keys to delete, checking if these are empty");
|
||||
debug!(
|
||||
"Finished going through all our media in database for eligible keys to delete, checking if these are \
|
||||
empty"
|
||||
);
|
||||
|
||||
if remote_mxcs.is_empty() {
|
||||
return Err(Error::bad_database(
|
||||
"Did not found any eligible MXCs to delete.",
|
||||
));
|
||||
}
|
||||
if remote_mxcs.is_empty() {
|
||||
return Err(Error::bad_database("Did not found any eligible MXCs to delete."));
|
||||
}
|
||||
|
||||
debug!("Deleting media now in the past \"{:?}\".", user_duration);
|
||||
debug!("Deleting media now in the past \"{:?}\".", user_duration);
|
||||
|
||||
let mut deletion_count = 0;
|
||||
let mut deletion_count = 0;
|
||||
|
||||
for mxc in remote_mxcs {
|
||||
debug!("Deleting MXC {mxc} from database and filesystem");
|
||||
self.delete(mxc).await?;
|
||||
deletion_count += 1;
|
||||
}
|
||||
for mxc in remote_mxcs {
|
||||
debug!("Deleting MXC {mxc} from database and filesystem");
|
||||
self.delete(mxc).await?;
|
||||
deletion_count += 1;
|
||||
}
|
||||
|
||||
Ok(deletion_count)
|
||||
} else {
|
||||
Err(Error::bad_database(
|
||||
"Failed to get all our media keys (filesystem or database issue?).",
|
||||
))
|
||||
}
|
||||
}
|
||||
Ok(deletion_count)
|
||||
} else {
|
||||
Err(Error::bad_database(
|
||||
"Failed to get all our media keys (filesystem or database issue?).",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns width, height of the thumbnail and whether it should be cropped. Returns None when
|
||||
/// the server should send the original file.
|
||||
pub fn thumbnail_properties(&self, width: u32, height: u32) -> Option<(u32, u32, bool)> {
|
||||
match (width, height) {
|
||||
(0..=32, 0..=32) => Some((32, 32, true)),
|
||||
(0..=96, 0..=96) => Some((96, 96, true)),
|
||||
(0..=320, 0..=240) => Some((320, 240, false)),
|
||||
(0..=640, 0..=480) => Some((640, 480, false)),
|
||||
(0..=800, 0..=600) => Some((800, 600, false)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
/// Returns width, height of the thumbnail and whether it should be cropped.
|
||||
/// Returns None when the server should send the original file.
|
||||
pub fn thumbnail_properties(&self, width: u32, height: u32) -> Option<(u32, u32, bool)> {
|
||||
match (width, height) {
|
||||
(0..=32, 0..=32) => Some((32, 32, true)),
|
||||
(0..=96, 0..=96) => Some((96, 96, true)),
|
||||
(0..=320, 0..=240) => Some((320, 240, false)),
|
||||
(0..=640, 0..=480) => Some((640, 480, false)),
|
||||
(0..=800, 0..=600) => Some((800, 600, false)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Downloads a file's thumbnail.
|
||||
///
|
||||
/// Here's an example on how it works:
|
||||
///
|
||||
/// - Client requests an image with width=567, height=567
|
||||
/// - Server rounds that up to (800, 600), so it doesn't have to save too many thumbnails
|
||||
/// - Server rounds that up again to (958, 600) to fix the aspect ratio (only for width,height>96)
|
||||
/// - Server creates the thumbnail and sends it to the user
|
||||
///
|
||||
/// For width,height <= 96 the server uses another thumbnailing algorithm which crops the image afterwards.
|
||||
pub async fn get_thumbnail(
|
||||
&self,
|
||||
mxc: String,
|
||||
width: u32,
|
||||
height: u32,
|
||||
) -> Result<Option<FileMeta>> {
|
||||
let (width, height, crop) = self
|
||||
.thumbnail_properties(width, height)
|
||||
.unwrap_or((0, 0, false)); // 0, 0 because that's the original file
|
||||
/// Downloads a file's thumbnail.
|
||||
///
|
||||
/// Here's an example on how it works:
|
||||
///
|
||||
/// - Client requests an image with width=567, height=567
|
||||
/// - Server rounds that up to (800, 600), so it doesn't have to save too
|
||||
/// many thumbnails
|
||||
/// - Server rounds that up again to (958, 600) to fix the aspect ratio
|
||||
/// (only for width,height>96)
|
||||
/// - Server creates the thumbnail and sends it to the user
|
||||
///
|
||||
/// For width,height <= 96 the server uses another thumbnailing algorithm
|
||||
/// which crops the image afterwards.
|
||||
pub async fn get_thumbnail(&self, mxc: String, width: u32, height: u32) -> Result<Option<FileMeta>> {
|
||||
let (width, height, crop) = self.thumbnail_properties(width, height).unwrap_or((0, 0, false)); // 0, 0 because that's the original file
|
||||
|
||||
if let Ok((content_disposition, content_type, key)) =
|
||||
self.db.search_file_metadata(mxc.clone(), width, height)
|
||||
{
|
||||
// Using saved thumbnail
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc.clone(), width, height) {
|
||||
// Using saved thumbnail
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
|
||||
let mut file = Vec::new();
|
||||
File::open(path).await?.read_to_end(&mut file).await?;
|
||||
let mut file = Vec::new();
|
||||
File::open(path).await?.read_to_end(&mut file).await?;
|
||||
|
||||
Ok(Some(FileMeta {
|
||||
content_disposition,
|
||||
content_type,
|
||||
file: file.clone(),
|
||||
}))
|
||||
} else if let Ok((content_disposition, content_type, key)) =
|
||||
self.db.search_file_metadata(mxc.clone(), 0, 0)
|
||||
{
|
||||
// Generate a thumbnail
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
Ok(Some(FileMeta {
|
||||
content_disposition,
|
||||
content_type,
|
||||
file: file.clone(),
|
||||
}))
|
||||
} else if let Ok((content_disposition, content_type, key)) = self.db.search_file_metadata(mxc.clone(), 0, 0) {
|
||||
// Generate a thumbnail
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&key)
|
||||
};
|
||||
|
||||
let mut file = Vec::new();
|
||||
File::open(path).await?.read_to_end(&mut file).await?;
|
||||
let mut file = Vec::new();
|
||||
File::open(path).await?.read_to_end(&mut file).await?;
|
||||
|
||||
if let Ok(image) = image::load_from_memory(&file) {
|
||||
let original_width = image.width();
|
||||
let original_height = image.height();
|
||||
if width > original_width || height > original_height {
|
||||
return Ok(Some(FileMeta {
|
||||
content_disposition,
|
||||
content_type,
|
||||
file: file.clone(),
|
||||
}));
|
||||
}
|
||||
if let Ok(image) = image::load_from_memory(&file) {
|
||||
let original_width = image.width();
|
||||
let original_height = image.height();
|
||||
if width > original_width || height > original_height {
|
||||
return Ok(Some(FileMeta {
|
||||
content_disposition,
|
||||
content_type,
|
||||
file: file.clone(),
|
||||
}));
|
||||
}
|
||||
|
||||
let thumbnail = if crop {
|
||||
image.resize_to_fill(width, height, FilterType::CatmullRom)
|
||||
} else {
|
||||
let (exact_width, exact_height) = {
|
||||
// Copied from image::dynimage::resize_dimensions
|
||||
let ratio = u64::from(original_width) * u64::from(height);
|
||||
let nratio = u64::from(width) * u64::from(original_height);
|
||||
let thumbnail = if crop {
|
||||
image.resize_to_fill(width, height, FilterType::CatmullRom)
|
||||
} else {
|
||||
let (exact_width, exact_height) = {
|
||||
// Copied from image::dynimage::resize_dimensions
|
||||
let ratio = u64::from(original_width) * u64::from(height);
|
||||
let nratio = u64::from(width) * u64::from(original_height);
|
||||
|
||||
let use_width = nratio <= ratio;
|
||||
let intermediate = if use_width {
|
||||
u64::from(original_height) * u64::from(width)
|
||||
/ u64::from(original_width)
|
||||
} else {
|
||||
u64::from(original_width) * u64::from(height)
|
||||
/ u64::from(original_height)
|
||||
};
|
||||
if use_width {
|
||||
if intermediate <= u64::from(::std::u32::MAX) {
|
||||
(width, intermediate as u32)
|
||||
} else {
|
||||
(
|
||||
(u64::from(width) * u64::from(::std::u32::MAX) / intermediate)
|
||||
as u32,
|
||||
::std::u32::MAX,
|
||||
)
|
||||
}
|
||||
} else if intermediate <= u64::from(::std::u32::MAX) {
|
||||
(intermediate as u32, height)
|
||||
} else {
|
||||
(
|
||||
::std::u32::MAX,
|
||||
(u64::from(height) * u64::from(::std::u32::MAX) / intermediate)
|
||||
as u32,
|
||||
)
|
||||
}
|
||||
};
|
||||
let use_width = nratio <= ratio;
|
||||
let intermediate = if use_width {
|
||||
u64::from(original_height) * u64::from(width) / u64::from(original_width)
|
||||
} else {
|
||||
u64::from(original_width) * u64::from(height) / u64::from(original_height)
|
||||
};
|
||||
if use_width {
|
||||
if intermediate <= u64::from(::std::u32::MAX) {
|
||||
(width, intermediate as u32)
|
||||
} else {
|
||||
(
|
||||
(u64::from(width) * u64::from(::std::u32::MAX) / intermediate) as u32,
|
||||
::std::u32::MAX,
|
||||
)
|
||||
}
|
||||
} else if intermediate <= u64::from(::std::u32::MAX) {
|
||||
(intermediate as u32, height)
|
||||
} else {
|
||||
(
|
||||
::std::u32::MAX,
|
||||
(u64::from(height) * u64::from(::std::u32::MAX) / intermediate) as u32,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
image.thumbnail_exact(exact_width, exact_height)
|
||||
};
|
||||
image.thumbnail_exact(exact_width, exact_height)
|
||||
};
|
||||
|
||||
let mut thumbnail_bytes = Vec::new();
|
||||
thumbnail.write_to(
|
||||
&mut Cursor::new(&mut thumbnail_bytes),
|
||||
image::ImageOutputFormat::Png,
|
||||
)?;
|
||||
let mut thumbnail_bytes = Vec::new();
|
||||
thumbnail.write_to(&mut Cursor::new(&mut thumbnail_bytes), image::ImageOutputFormat::Png)?;
|
||||
|
||||
// Save thumbnail in database so we don't have to generate it again next time
|
||||
let thumbnail_key = self.db.create_file_metadata(
|
||||
mxc,
|
||||
width,
|
||||
height,
|
||||
content_disposition.as_deref(),
|
||||
content_type.as_deref(),
|
||||
)?;
|
||||
// Save thumbnail in database so we don't have to generate it again next time
|
||||
let thumbnail_key = self.db.create_file_metadata(
|
||||
mxc,
|
||||
width,
|
||||
height,
|
||||
content_disposition.as_deref(),
|
||||
content_type.as_deref(),
|
||||
)?;
|
||||
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&thumbnail_key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&thumbnail_key)
|
||||
};
|
||||
let path = if cfg!(feature = "sha256_media") {
|
||||
services().globals.get_media_file_new(&thumbnail_key)
|
||||
} else {
|
||||
#[allow(deprecated)]
|
||||
services().globals.get_media_file(&thumbnail_key)
|
||||
};
|
||||
|
||||
let mut f = File::create(path).await?;
|
||||
f.write_all(&thumbnail_bytes).await?;
|
||||
let mut f = File::create(path).await?;
|
||||
f.write_all(&thumbnail_bytes).await?;
|
||||
|
||||
Ok(Some(FileMeta {
|
||||
content_disposition,
|
||||
content_type,
|
||||
file: thumbnail_bytes.clone(),
|
||||
}))
|
||||
} else {
|
||||
// Couldn't parse file to generate thumbnail, send original
|
||||
Ok(Some(FileMeta {
|
||||
content_disposition,
|
||||
content_type,
|
||||
file: file.clone(),
|
||||
}))
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
Ok(Some(FileMeta {
|
||||
content_disposition,
|
||||
content_type,
|
||||
file: thumbnail_bytes.clone(),
|
||||
}))
|
||||
} else {
|
||||
// Couldn't parse file to generate thumbnail, send original
|
||||
Ok(Some(FileMeta {
|
||||
content_disposition,
|
||||
content_type,
|
||||
file: file.clone(),
|
||||
}))
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_url_preview(&self, url: &str) -> Option<UrlPreviewData> {
|
||||
self.db.get_url_preview(url)
|
||||
}
|
||||
pub async fn get_url_preview(&self, url: &str) -> Option<UrlPreviewData> { self.db.get_url_preview(url) }
|
||||
|
||||
pub async fn remove_url_preview(&self, url: &str) -> Result<()> {
|
||||
// TODO: also remove the downloaded image
|
||||
self.db.remove_url_preview(url)
|
||||
}
|
||||
pub async fn remove_url_preview(&self, url: &str) -> Result<()> {
|
||||
// TODO: also remove the downloaded image
|
||||
self.db.remove_url_preview(url)
|
||||
}
|
||||
|
||||
pub async fn set_url_preview(&self, url: &str, data: &UrlPreviewData) -> Result<()> {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.expect("valid system time");
|
||||
self.db.set_url_preview(url, data, now)
|
||||
}
|
||||
pub async fn set_url_preview(&self, url: &str, data: &UrlPreviewData) -> Result<()> {
|
||||
let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).expect("valid system time");
|
||||
self.db.set_url_preview(url, data, now)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::PathBuf;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use sha2::Digest;
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
use sha2::Digest;
|
||||
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
use super::*;
|
||||
|
||||
use super::*;
|
||||
struct MockedKVDatabase;
|
||||
|
||||
struct MockedKVDatabase;
|
||||
impl Data for MockedKVDatabase {
|
||||
fn create_file_metadata(
|
||||
&self, mxc: String, width: u32, height: u32, content_disposition: Option<&str>, content_type: Option<&str>,
|
||||
) -> Result<Vec<u8>> {
|
||||
// copied from src/database/key_value/media.rs
|
||||
let mut key = mxc.as_bytes().to_vec();
|
||||
key.push(0xFF);
|
||||
key.extend_from_slice(&width.to_be_bytes());
|
||||
key.extend_from_slice(&height.to_be_bytes());
|
||||
key.push(0xFF);
|
||||
key.extend_from_slice(content_disposition.as_ref().map(|f| f.as_bytes()).unwrap_or_default());
|
||||
key.push(0xFF);
|
||||
key.extend_from_slice(content_type.as_ref().map(|c| c.as_bytes()).unwrap_or_default());
|
||||
|
||||
impl Data for MockedKVDatabase {
|
||||
fn create_file_metadata(
|
||||
&self,
|
||||
mxc: String,
|
||||
width: u32,
|
||||
height: u32,
|
||||
content_disposition: Option<&str>,
|
||||
content_type: Option<&str>,
|
||||
) -> Result<Vec<u8>> {
|
||||
// copied from src/database/key_value/media.rs
|
||||
let mut key = mxc.as_bytes().to_vec();
|
||||
key.push(0xff);
|
||||
key.extend_from_slice(&width.to_be_bytes());
|
||||
key.extend_from_slice(&height.to_be_bytes());
|
||||
key.push(0xff);
|
||||
key.extend_from_slice(
|
||||
content_disposition
|
||||
.as_ref()
|
||||
.map(|f| f.as_bytes())
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
key.push(0xff);
|
||||
key.extend_from_slice(
|
||||
content_type
|
||||
.as_ref()
|
||||
.map(|c| c.as_bytes())
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
Ok(key)
|
||||
}
|
||||
|
||||
Ok(key)
|
||||
}
|
||||
fn delete_file_mxc(&self, _mxc: String) -> Result<()> { todo!() }
|
||||
|
||||
fn delete_file_mxc(&self, _mxc: String) -> Result<()> {
|
||||
todo!()
|
||||
}
|
||||
fn search_mxc_metadata_prefix(&self, _mxc: String) -> Result<Vec<Vec<u8>>> { todo!() }
|
||||
|
||||
fn search_mxc_metadata_prefix(&self, _mxc: String) -> Result<Vec<Vec<u8>>> {
|
||||
todo!()
|
||||
}
|
||||
fn get_all_media_keys(&self) -> Result<Vec<Vec<u8>>> { todo!() }
|
||||
|
||||
fn get_all_media_keys(&self) -> Result<Vec<Vec<u8>>> {
|
||||
todo!()
|
||||
}
|
||||
fn search_file_metadata(
|
||||
&self, _mxc: String, _width: u32, _height: u32,
|
||||
) -> Result<(Option<String>, Option<String>, Vec<u8>)> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn search_file_metadata(
|
||||
&self,
|
||||
_mxc: String,
|
||||
_width: u32,
|
||||
_height: u32,
|
||||
) -> Result<(Option<String>, Option<String>, Vec<u8>)> {
|
||||
todo!()
|
||||
}
|
||||
fn remove_url_preview(&self, _url: &str) -> Result<()> { todo!() }
|
||||
|
||||
fn remove_url_preview(&self, _url: &str) -> Result<()> {
|
||||
todo!()
|
||||
}
|
||||
fn set_url_preview(&self, _url: &str, _data: &UrlPreviewData, _timestamp: std::time::Duration) -> Result<()> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn set_url_preview(
|
||||
&self,
|
||||
_url: &str,
|
||||
_data: &UrlPreviewData,
|
||||
_timestamp: std::time::Duration,
|
||||
) -> Result<()> {
|
||||
todo!()
|
||||
}
|
||||
fn get_url_preview(&self, _url: &str) -> Option<UrlPreviewData> { todo!() }
|
||||
}
|
||||
|
||||
fn get_url_preview(&self, _url: &str) -> Option<UrlPreviewData> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
#[tokio::test]
|
||||
async fn long_file_names_works() {
|
||||
static DB: MockedKVDatabase = MockedKVDatabase;
|
||||
let media = Service {
|
||||
db: &DB,
|
||||
url_preview_mutex: RwLock::new(HashMap::new()),
|
||||
};
|
||||
|
||||
#[tokio::test]
|
||||
async fn long_file_names_works() {
|
||||
static DB: MockedKVDatabase = MockedKVDatabase;
|
||||
let media = Service {
|
||||
db: &DB,
|
||||
url_preview_mutex: RwLock::new(HashMap::new()),
|
||||
};
|
||||
|
||||
let mxc = "mxc://example.com/ascERGshawAWawugaAcauga".to_owned();
|
||||
let width = 100;
|
||||
let height = 100;
|
||||
let content_disposition = "attachment; filename=\"this is a very long file name with spaces and special characters like äöüß and even emoji like 🦀.png\"";
|
||||
let content_type = "image/png";
|
||||
let key = media
|
||||
.db
|
||||
.create_file_metadata(
|
||||
mxc,
|
||||
width,
|
||||
height,
|
||||
Some(content_disposition),
|
||||
Some(content_type),
|
||||
)
|
||||
.unwrap();
|
||||
let mut r = PathBuf::new();
|
||||
r.push("/tmp");
|
||||
r.push("media");
|
||||
// r.push(base64::encode_config(key, base64::URL_SAFE_NO_PAD));
|
||||
// use the sha256 hash of the key as the file name instead of the key itself
|
||||
// this is because the base64 encoded key can be longer than 255 characters.
|
||||
r.push(general_purpose::URL_SAFE_NO_PAD.encode(sha2::Sha256::digest(key)));
|
||||
// Check that the file path is not longer than 255 characters
|
||||
// (255 is the maximum length of a file path on most file systems)
|
||||
assert!(
|
||||
r.to_str().unwrap().len() <= 255,
|
||||
"File path is too long: {}",
|
||||
r.to_str().unwrap().len()
|
||||
);
|
||||
}
|
||||
let mxc = "mxc://example.com/ascERGshawAWawugaAcauga".to_owned();
|
||||
let width = 100;
|
||||
let height = 100;
|
||||
let content_disposition = "attachment; filename=\"this is a very long file name with spaces and special \
|
||||
characters like äöüß and even emoji like 🦀.png\"";
|
||||
let content_type = "image/png";
|
||||
let key =
|
||||
media.db.create_file_metadata(mxc, width, height, Some(content_disposition), Some(content_type)).unwrap();
|
||||
let mut r = PathBuf::new();
|
||||
r.push("/tmp");
|
||||
r.push("media");
|
||||
// r.push(base64::encode_config(key, base64::URL_SAFE_NO_PAD));
|
||||
// use the sha256 hash of the key as the file name instead of the key itself
|
||||
// this is because the base64 encoded key can be longer than 255 characters.
|
||||
r.push(general_purpose::URL_SAFE_NO_PAD.encode(sha2::Sha256::digest(key)));
|
||||
// Check that the file path is not longer than 255 characters
|
||||
// (255 is the maximum length of a file path on most file systems)
|
||||
assert!(
|
||||
r.to_str().unwrap().len() <= 255,
|
||||
"File path is too long: {}",
|
||||
r.to_str().unwrap().len()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::{
|
||||
collections::{BTreeMap, HashMap},
|
||||
sync::{Arc, Mutex, RwLock},
|
||||
collections::{BTreeMap, HashMap},
|
||||
sync::{Arc, Mutex, RwLock},
|
||||
};
|
||||
|
||||
use lru_cache::LruCache;
|
||||
|
@ -22,210 +22,186 @@ pub(crate) mod uiaa;
|
|||
pub(crate) mod users;
|
||||
|
||||
pub struct Services<'a> {
|
||||
pub appservice: appservice::Service,
|
||||
pub pusher: pusher::Service,
|
||||
pub rooms: rooms::Service,
|
||||
pub transaction_ids: transaction_ids::Service,
|
||||
pub uiaa: uiaa::Service,
|
||||
pub users: users::Service,
|
||||
pub account_data: account_data::Service,
|
||||
pub admin: Arc<admin::Service>,
|
||||
pub globals: globals::Service<'a>,
|
||||
pub key_backups: key_backups::Service,
|
||||
pub media: media::Service,
|
||||
pub sending: Arc<sending::Service>,
|
||||
pub appservice: appservice::Service,
|
||||
pub pusher: pusher::Service,
|
||||
pub rooms: rooms::Service,
|
||||
pub transaction_ids: transaction_ids::Service,
|
||||
pub uiaa: uiaa::Service,
|
||||
pub users: users::Service,
|
||||
pub account_data: account_data::Service,
|
||||
pub admin: Arc<admin::Service>,
|
||||
pub globals: globals::Service<'a>,
|
||||
pub key_backups: key_backups::Service,
|
||||
pub media: media::Service,
|
||||
pub sending: Arc<sending::Service>,
|
||||
}
|
||||
|
||||
impl Services<'_> {
|
||||
pub fn build<
|
||||
D: appservice::Data
|
||||
+ pusher::Data
|
||||
+ rooms::Data
|
||||
+ transaction_ids::Data
|
||||
+ uiaa::Data
|
||||
+ users::Data
|
||||
+ account_data::Data
|
||||
+ globals::Data
|
||||
+ key_backups::Data
|
||||
+ media::Data
|
||||
+ sending::Data
|
||||
+ 'static,
|
||||
>(
|
||||
db: &'static D,
|
||||
config: Config,
|
||||
) -> Result<Self> {
|
||||
Ok(Self {
|
||||
appservice: appservice::Service { db },
|
||||
pusher: pusher::Service { db },
|
||||
rooms: rooms::Service {
|
||||
alias: rooms::alias::Service { db },
|
||||
auth_chain: rooms::auth_chain::Service { db },
|
||||
directory: rooms::directory::Service { db },
|
||||
edus: rooms::edus::Service {
|
||||
presence: rooms::edus::presence::Service { db },
|
||||
read_receipt: rooms::edus::read_receipt::Service { db },
|
||||
typing: rooms::edus::typing::Service { db },
|
||||
},
|
||||
event_handler: rooms::event_handler::Service,
|
||||
lazy_loading: rooms::lazy_loading::Service {
|
||||
db,
|
||||
lazy_load_waiting: Mutex::new(HashMap::new()),
|
||||
},
|
||||
metadata: rooms::metadata::Service { db },
|
||||
outlier: rooms::outlier::Service { db },
|
||||
pdu_metadata: rooms::pdu_metadata::Service { db },
|
||||
search: rooms::search::Service { db },
|
||||
short: rooms::short::Service { db },
|
||||
state: rooms::state::Service { db },
|
||||
state_accessor: rooms::state_accessor::Service {
|
||||
db,
|
||||
server_visibility_cache: Mutex::new(LruCache::new(
|
||||
(100.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
user_visibility_cache: Mutex::new(LruCache::new(
|
||||
(100.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
},
|
||||
state_cache: rooms::state_cache::Service { db },
|
||||
state_compressor: rooms::state_compressor::Service {
|
||||
db,
|
||||
stateinfo_cache: Mutex::new(LruCache::new(
|
||||
(100.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
},
|
||||
timeline: rooms::timeline::Service {
|
||||
db,
|
||||
lasttimelinecount_cache: Mutex::new(HashMap::new()),
|
||||
},
|
||||
threads: rooms::threads::Service { db },
|
||||
spaces: rooms::spaces::Service {
|
||||
roomid_spacechunk_cache: Mutex::new(LruCache::new(
|
||||
(100.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
},
|
||||
user: rooms::user::Service { db },
|
||||
},
|
||||
transaction_ids: transaction_ids::Service { db },
|
||||
uiaa: uiaa::Service { db },
|
||||
users: users::Service {
|
||||
db,
|
||||
connections: Mutex::new(BTreeMap::new()),
|
||||
},
|
||||
account_data: account_data::Service { db },
|
||||
admin: admin::Service::build(),
|
||||
key_backups: key_backups::Service { db },
|
||||
media: media::Service {
|
||||
db,
|
||||
url_preview_mutex: RwLock::new(HashMap::new()),
|
||||
},
|
||||
sending: sending::Service::build(db, &config),
|
||||
pub fn build<
|
||||
D: appservice::Data
|
||||
+ pusher::Data
|
||||
+ rooms::Data
|
||||
+ transaction_ids::Data
|
||||
+ uiaa::Data
|
||||
+ users::Data
|
||||
+ account_data::Data
|
||||
+ globals::Data
|
||||
+ key_backups::Data
|
||||
+ media::Data
|
||||
+ sending::Data
|
||||
+ 'static,
|
||||
>(
|
||||
db: &'static D, config: Config,
|
||||
) -> Result<Self> {
|
||||
Ok(Self {
|
||||
appservice: appservice::Service {
|
||||
db,
|
||||
},
|
||||
pusher: pusher::Service {
|
||||
db,
|
||||
},
|
||||
rooms: rooms::Service {
|
||||
alias: rooms::alias::Service {
|
||||
db,
|
||||
},
|
||||
auth_chain: rooms::auth_chain::Service {
|
||||
db,
|
||||
},
|
||||
directory: rooms::directory::Service {
|
||||
db,
|
||||
},
|
||||
edus: rooms::edus::Service {
|
||||
presence: rooms::edus::presence::Service {
|
||||
db,
|
||||
},
|
||||
read_receipt: rooms::edus::read_receipt::Service {
|
||||
db,
|
||||
},
|
||||
typing: rooms::edus::typing::Service {
|
||||
db,
|
||||
},
|
||||
},
|
||||
event_handler: rooms::event_handler::Service,
|
||||
lazy_loading: rooms::lazy_loading::Service {
|
||||
db,
|
||||
lazy_load_waiting: Mutex::new(HashMap::new()),
|
||||
},
|
||||
metadata: rooms::metadata::Service {
|
||||
db,
|
||||
},
|
||||
outlier: rooms::outlier::Service {
|
||||
db,
|
||||
},
|
||||
pdu_metadata: rooms::pdu_metadata::Service {
|
||||
db,
|
||||
},
|
||||
search: rooms::search::Service {
|
||||
db,
|
||||
},
|
||||
short: rooms::short::Service {
|
||||
db,
|
||||
},
|
||||
state: rooms::state::Service {
|
||||
db,
|
||||
},
|
||||
state_accessor: rooms::state_accessor::Service {
|
||||
db,
|
||||
server_visibility_cache: Mutex::new(LruCache::new(
|
||||
(100.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
user_visibility_cache: Mutex::new(LruCache::new(
|
||||
(100.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
},
|
||||
state_cache: rooms::state_cache::Service {
|
||||
db,
|
||||
},
|
||||
state_compressor: rooms::state_compressor::Service {
|
||||
db,
|
||||
stateinfo_cache: Mutex::new(LruCache::new(
|
||||
(100.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
},
|
||||
timeline: rooms::timeline::Service {
|
||||
db,
|
||||
lasttimelinecount_cache: Mutex::new(HashMap::new()),
|
||||
},
|
||||
threads: rooms::threads::Service {
|
||||
db,
|
||||
},
|
||||
spaces: rooms::spaces::Service {
|
||||
roomid_spacechunk_cache: Mutex::new(LruCache::new(
|
||||
(100.0 * config.conduit_cache_capacity_modifier) as usize,
|
||||
)),
|
||||
},
|
||||
user: rooms::user::Service {
|
||||
db,
|
||||
},
|
||||
},
|
||||
transaction_ids: transaction_ids::Service {
|
||||
db,
|
||||
},
|
||||
uiaa: uiaa::Service {
|
||||
db,
|
||||
},
|
||||
users: users::Service {
|
||||
db,
|
||||
connections: Mutex::new(BTreeMap::new()),
|
||||
},
|
||||
account_data: account_data::Service {
|
||||
db,
|
||||
},
|
||||
admin: admin::Service::build(),
|
||||
key_backups: key_backups::Service {
|
||||
db,
|
||||
},
|
||||
media: media::Service {
|
||||
db,
|
||||
url_preview_mutex: RwLock::new(HashMap::new()),
|
||||
},
|
||||
sending: sending::Service::build(db, &config),
|
||||
|
||||
globals: globals::Service::load(db, config)?,
|
||||
})
|
||||
}
|
||||
fn memory_usage(&self) -> String {
|
||||
let lazy_load_waiting = self
|
||||
.rooms
|
||||
.lazy_loading
|
||||
.lazy_load_waiting
|
||||
.lock()
|
||||
.unwrap()
|
||||
.len();
|
||||
let server_visibility_cache = self
|
||||
.rooms
|
||||
.state_accessor
|
||||
.server_visibility_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.len();
|
||||
let user_visibility_cache = self
|
||||
.rooms
|
||||
.state_accessor
|
||||
.user_visibility_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.len();
|
||||
let stateinfo_cache = self
|
||||
.rooms
|
||||
.state_compressor
|
||||
.stateinfo_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.len();
|
||||
let lasttimelinecount_cache = self
|
||||
.rooms
|
||||
.timeline
|
||||
.lasttimelinecount_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.len();
|
||||
let roomid_spacechunk_cache = self
|
||||
.rooms
|
||||
.spaces
|
||||
.roomid_spacechunk_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.len();
|
||||
globals: globals::Service::load(db, config)?,
|
||||
})
|
||||
}
|
||||
|
||||
format!(
|
||||
"\
|
||||
fn memory_usage(&self) -> String {
|
||||
let lazy_load_waiting = self.rooms.lazy_loading.lazy_load_waiting.lock().unwrap().len();
|
||||
let server_visibility_cache = self.rooms.state_accessor.server_visibility_cache.lock().unwrap().len();
|
||||
let user_visibility_cache = self.rooms.state_accessor.user_visibility_cache.lock().unwrap().len();
|
||||
let stateinfo_cache = self.rooms.state_compressor.stateinfo_cache.lock().unwrap().len();
|
||||
let lasttimelinecount_cache = self.rooms.timeline.lasttimelinecount_cache.lock().unwrap().len();
|
||||
let roomid_spacechunk_cache = self.rooms.spaces.roomid_spacechunk_cache.lock().unwrap().len();
|
||||
|
||||
format!(
|
||||
"\
|
||||
lazy_load_waiting: {lazy_load_waiting}
|
||||
server_visibility_cache: {server_visibility_cache}
|
||||
user_visibility_cache: {user_visibility_cache}
|
||||
stateinfo_cache: {stateinfo_cache}
|
||||
lasttimelinecount_cache: {lasttimelinecount_cache}
|
||||
roomid_spacechunk_cache: {roomid_spacechunk_cache}\
|
||||
"
|
||||
)
|
||||
}
|
||||
fn clear_caches(&self, amount: u32) {
|
||||
if amount > 0 {
|
||||
self.rooms
|
||||
.lazy_loading
|
||||
.lazy_load_waiting
|
||||
.lock()
|
||||
.unwrap()
|
||||
.clear();
|
||||
}
|
||||
if amount > 1 {
|
||||
self.rooms
|
||||
.state_accessor
|
||||
.server_visibility_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.clear();
|
||||
}
|
||||
if amount > 2 {
|
||||
self.rooms
|
||||
.state_accessor
|
||||
.user_visibility_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.clear();
|
||||
}
|
||||
if amount > 3 {
|
||||
self.rooms
|
||||
.state_compressor
|
||||
.stateinfo_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.clear();
|
||||
}
|
||||
if amount > 4 {
|
||||
self.rooms
|
||||
.timeline
|
||||
.lasttimelinecount_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.clear();
|
||||
}
|
||||
if amount > 5 {
|
||||
self.rooms
|
||||
.spaces
|
||||
.roomid_spacechunk_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.clear();
|
||||
}
|
||||
}
|
||||
roomid_spacechunk_cache: {roomid_spacechunk_cache}"
|
||||
)
|
||||
}
|
||||
|
||||
fn clear_caches(&self, amount: u32) {
|
||||
if amount > 0 {
|
||||
self.rooms.lazy_loading.lazy_load_waiting.lock().unwrap().clear();
|
||||
}
|
||||
if amount > 1 {
|
||||
self.rooms.state_accessor.server_visibility_cache.lock().unwrap().clear();
|
||||
}
|
||||
if amount > 2 {
|
||||
self.rooms.state_accessor.user_visibility_cache.lock().unwrap().clear();
|
||||
}
|
||||
if amount > 3 {
|
||||
self.rooms.state_compressor.stateinfo_cache.lock().unwrap().clear();
|
||||
}
|
||||
if amount > 4 {
|
||||
self.rooms.timeline.lasttimelinecount_cache.lock().unwrap().clear();
|
||||
}
|
||||
if amount > 5 {
|
||||
self.rooms.spaces.roomid_spacechunk_cache.lock().unwrap().clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,410 +1,372 @@
|
|||
use crate::Error;
|
||||
use std::{cmp::Ordering, collections::BTreeMap, sync::Arc};
|
||||
|
||||
use ruma::{
|
||||
canonical_json::redact_content_in_place,
|
||||
events::{
|
||||
room::member::RoomMemberEventContent, space::child::HierarchySpaceChildEvent,
|
||||
AnyEphemeralRoomEvent, AnyMessageLikeEvent, AnyStateEvent, AnyStrippedStateEvent,
|
||||
AnySyncStateEvent, AnySyncTimelineEvent, AnyTimelineEvent, StateEvent, TimelineEventType,
|
||||
},
|
||||
serde::Raw,
|
||||
state_res, CanonicalJsonObject, CanonicalJsonValue, EventId, MilliSecondsSinceUnixEpoch,
|
||||
OwnedEventId, OwnedRoomId, OwnedUserId, RoomId, RoomVersionId, UInt, UserId,
|
||||
canonical_json::redact_content_in_place,
|
||||
events::{
|
||||
room::member::RoomMemberEventContent, space::child::HierarchySpaceChildEvent, AnyEphemeralRoomEvent,
|
||||
AnyMessageLikeEvent, AnyStateEvent, AnyStrippedStateEvent, AnySyncStateEvent, AnySyncTimelineEvent,
|
||||
AnyTimelineEvent, StateEvent, TimelineEventType,
|
||||
},
|
||||
serde::Raw,
|
||||
state_res, CanonicalJsonObject, CanonicalJsonValue, EventId, MilliSecondsSinceUnixEpoch, OwnedEventId, OwnedRoomId,
|
||||
OwnedUserId, RoomId, RoomVersionId, UInt, UserId,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{
|
||||
json,
|
||||
value::{to_raw_value, RawValue as RawJsonValue},
|
||||
json,
|
||||
value::{to_raw_value, RawValue as RawJsonValue},
|
||||
};
|
||||
use std::{cmp::Ordering, collections::BTreeMap, sync::Arc};
|
||||
use tracing::warn;
|
||||
|
||||
use crate::Error;
|
||||
|
||||
/// Content hashes of a PDU.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct EventHash {
|
||||
/// The SHA-256 hash.
|
||||
pub sha256: String,
|
||||
/// The SHA-256 hash.
|
||||
pub sha256: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize, Serialize, Debug)]
|
||||
pub struct PduEvent {
|
||||
pub event_id: Arc<EventId>,
|
||||
pub room_id: OwnedRoomId,
|
||||
pub sender: OwnedUserId,
|
||||
pub origin_server_ts: UInt,
|
||||
#[serde(rename = "type")]
|
||||
pub kind: TimelineEventType,
|
||||
pub content: Box<RawJsonValue>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub state_key: Option<String>,
|
||||
pub prev_events: Vec<Arc<EventId>>,
|
||||
pub depth: UInt,
|
||||
pub auth_events: Vec<Arc<EventId>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub redacts: Option<Arc<EventId>>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub unsigned: Option<Box<RawJsonValue>>,
|
||||
pub hashes: EventHash,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub signatures: Option<Box<RawJsonValue>>, // BTreeMap<Box<ServerName>, BTreeMap<ServerSigningKeyId, String>>
|
||||
pub event_id: Arc<EventId>,
|
||||
pub room_id: OwnedRoomId,
|
||||
pub sender: OwnedUserId,
|
||||
pub origin_server_ts: UInt,
|
||||
#[serde(rename = "type")]
|
||||
pub kind: TimelineEventType,
|
||||
pub content: Box<RawJsonValue>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub state_key: Option<String>,
|
||||
pub prev_events: Vec<Arc<EventId>>,
|
||||
pub depth: UInt,
|
||||
pub auth_events: Vec<Arc<EventId>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub redacts: Option<Arc<EventId>>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub unsigned: Option<Box<RawJsonValue>>,
|
||||
pub hashes: EventHash,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub signatures: Option<Box<RawJsonValue>>, // BTreeMap<Box<ServerName>, BTreeMap<ServerSigningKeyId, String>>
|
||||
}
|
||||
|
||||
impl PduEvent {
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn redact(
|
||||
&mut self,
|
||||
room_version_id: RoomVersionId,
|
||||
reason: &PduEvent,
|
||||
) -> crate::Result<()> {
|
||||
self.unsigned = None;
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn redact(&mut self, room_version_id: RoomVersionId, reason: &PduEvent) -> crate::Result<()> {
|
||||
self.unsigned = None;
|
||||
|
||||
let mut content = serde_json::from_str(self.content.get())
|
||||
.map_err(|_| Error::bad_database("PDU in db has invalid content."))?;
|
||||
redact_content_in_place(&mut content, &room_version_id, self.kind.to_string())
|
||||
.map_err(|e| Error::RedactionError(self.sender.server_name().to_owned(), e))?;
|
||||
let mut content = serde_json::from_str(self.content.get())
|
||||
.map_err(|_| Error::bad_database("PDU in db has invalid content."))?;
|
||||
redact_content_in_place(&mut content, &room_version_id, self.kind.to_string())
|
||||
.map_err(|e| Error::RedactionError(self.sender.server_name().to_owned(), e))?;
|
||||
|
||||
self.unsigned = Some(to_raw_value(&json!({
|
||||
"redacted_because": serde_json::to_value(reason).expect("to_value(PduEvent) always works")
|
||||
})).expect("to string always works"));
|
||||
self.unsigned = Some(
|
||||
to_raw_value(&json!({
|
||||
"redacted_because": serde_json::to_value(reason).expect("to_value(PduEvent) always works")
|
||||
}))
|
||||
.expect("to string always works"),
|
||||
);
|
||||
|
||||
self.content = to_raw_value(&content).expect("to string always works");
|
||||
self.content = to_raw_value(&content).expect("to string always works");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn remove_transaction_id(&mut self) -> crate::Result<()> {
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
let mut unsigned: BTreeMap<String, Box<RawJsonValue>> =
|
||||
serde_json::from_str(unsigned.get())
|
||||
.map_err(|_| Error::bad_database("Invalid unsigned in pdu event"))?;
|
||||
unsigned.remove("transaction_id");
|
||||
self.unsigned = Some(to_raw_value(&unsigned).expect("unsigned is valid"));
|
||||
}
|
||||
pub fn remove_transaction_id(&mut self) -> crate::Result<()> {
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
let mut unsigned: BTreeMap<String, Box<RawJsonValue>> = serde_json::from_str(unsigned.get())
|
||||
.map_err(|_| Error::bad_database("Invalid unsigned in pdu event"))?;
|
||||
unsigned.remove("transaction_id");
|
||||
self.unsigned = Some(to_raw_value(&unsigned).expect("unsigned is valid"));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn add_age(&mut self) -> crate::Result<()> {
|
||||
let mut unsigned: BTreeMap<String, Box<RawJsonValue>> = self
|
||||
.unsigned
|
||||
.as_ref()
|
||||
.map_or_else(|| Ok(BTreeMap::new()), |u| serde_json::from_str(u.get()))
|
||||
.map_err(|_| Error::bad_database("Invalid unsigned in pdu event"))?;
|
||||
pub fn add_age(&mut self) -> crate::Result<()> {
|
||||
let mut unsigned: BTreeMap<String, Box<RawJsonValue>> = self
|
||||
.unsigned
|
||||
.as_ref()
|
||||
.map_or_else(|| Ok(BTreeMap::new()), |u| serde_json::from_str(u.get()))
|
||||
.map_err(|_| Error::bad_database("Invalid unsigned in pdu event"))?;
|
||||
|
||||
unsigned.insert("age".to_owned(), to_raw_value(&1).unwrap());
|
||||
self.unsigned = Some(to_raw_value(&unsigned).expect("unsigned is valid"));
|
||||
unsigned.insert("age".to_owned(), to_raw_value(&1).unwrap());
|
||||
self.unsigned = Some(to_raw_value(&unsigned).expect("unsigned is valid"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_sync_room_event(&self) -> Raw<AnySyncTimelineEvent> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
});
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_sync_room_event(&self) -> Raw<AnySyncTimelineEvent> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
});
|
||||
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
if let Some(state_key) = &self.state_key {
|
||||
json["state_key"] = json!(state_key);
|
||||
}
|
||||
if let Some(redacts) = &self.redacts {
|
||||
json["redacts"] = json!(redacts);
|
||||
}
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
if let Some(state_key) = &self.state_key {
|
||||
json["state_key"] = json!(state_key);
|
||||
}
|
||||
if let Some(redacts) = &self.redacts {
|
||||
json["redacts"] = json!(redacts);
|
||||
}
|
||||
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
|
||||
/// This only works for events that are also AnyRoomEvents.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_any_event(&self) -> Raw<AnyEphemeralRoomEvent> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"room_id": self.room_id,
|
||||
});
|
||||
/// This only works for events that are also AnyRoomEvents.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_any_event(&self) -> Raw<AnyEphemeralRoomEvent> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"room_id": self.room_id,
|
||||
});
|
||||
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
if let Some(state_key) = &self.state_key {
|
||||
json["state_key"] = json!(state_key);
|
||||
}
|
||||
if let Some(redacts) = &self.redacts {
|
||||
json["redacts"] = json!(redacts);
|
||||
}
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
if let Some(state_key) = &self.state_key {
|
||||
json["state_key"] = json!(state_key);
|
||||
}
|
||||
if let Some(redacts) = &self.redacts {
|
||||
json["redacts"] = json!(redacts);
|
||||
}
|
||||
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_room_event(&self) -> Raw<AnyTimelineEvent> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"room_id": self.room_id,
|
||||
});
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_room_event(&self) -> Raw<AnyTimelineEvent> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"room_id": self.room_id,
|
||||
});
|
||||
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
if let Some(state_key) = &self.state_key {
|
||||
json["state_key"] = json!(state_key);
|
||||
}
|
||||
if let Some(redacts) = &self.redacts {
|
||||
json["redacts"] = json!(redacts);
|
||||
}
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
if let Some(state_key) = &self.state_key {
|
||||
json["state_key"] = json!(state_key);
|
||||
}
|
||||
if let Some(redacts) = &self.redacts {
|
||||
json["redacts"] = json!(redacts);
|
||||
}
|
||||
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_message_like_event(&self) -> Raw<AnyMessageLikeEvent> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"room_id": self.room_id,
|
||||
});
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_message_like_event(&self) -> Raw<AnyMessageLikeEvent> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"room_id": self.room_id,
|
||||
});
|
||||
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
if let Some(state_key) = &self.state_key {
|
||||
json["state_key"] = json!(state_key);
|
||||
}
|
||||
if let Some(redacts) = &self.redacts {
|
||||
json["redacts"] = json!(redacts);
|
||||
}
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
if let Some(state_key) = &self.state_key {
|
||||
json["state_key"] = json!(state_key);
|
||||
}
|
||||
if let Some(redacts) = &self.redacts {
|
||||
json["redacts"] = json!(redacts);
|
||||
}
|
||||
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_state_event(&self) -> Raw<AnyStateEvent> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"room_id": self.room_id,
|
||||
"state_key": self.state_key,
|
||||
});
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_state_event(&self) -> Raw<AnyStateEvent> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"room_id": self.room_id,
|
||||
"state_key": self.state_key,
|
||||
});
|
||||
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_sync_state_event(&self) -> Raw<AnySyncStateEvent> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"state_key": self.state_key,
|
||||
});
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_sync_state_event(&self) -> Raw<AnySyncStateEvent> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"state_key": self.state_key,
|
||||
});
|
||||
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_stripped_state_event(&self) -> Raw<AnyStrippedStateEvent> {
|
||||
let json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"sender": self.sender,
|
||||
"state_key": self.state_key,
|
||||
});
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_stripped_state_event(&self) -> Raw<AnyStrippedStateEvent> {
|
||||
let json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"sender": self.sender,
|
||||
"state_key": self.state_key,
|
||||
});
|
||||
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_stripped_spacechild_state_event(&self) -> Raw<HierarchySpaceChildEvent> {
|
||||
let json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"sender": self.sender,
|
||||
"state_key": self.state_key,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
});
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_stripped_spacechild_state_event(&self) -> Raw<HierarchySpaceChildEvent> {
|
||||
let json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"sender": self.sender,
|
||||
"state_key": self.state_key,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
});
|
||||
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_member_event(&self) -> Raw<StateEvent<RoomMemberEventContent>> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"redacts": self.redacts,
|
||||
"room_id": self.room_id,
|
||||
"state_key": self.state_key,
|
||||
});
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn to_member_event(&self) -> Raw<StateEvent<RoomMemberEventContent>> {
|
||||
let mut json = json!({
|
||||
"content": self.content,
|
||||
"type": self.kind,
|
||||
"event_id": self.event_id,
|
||||
"sender": self.sender,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"redacts": self.redacts,
|
||||
"room_id": self.room_id,
|
||||
"state_key": self.state_key,
|
||||
});
|
||||
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
if let Some(unsigned) = &self.unsigned {
|
||||
json["unsigned"] = json!(unsigned);
|
||||
}
|
||||
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
serde_json::from_value(json).expect("Raw::from_value always works")
|
||||
}
|
||||
|
||||
/// This does not return a full `Pdu` it is only to satisfy ruma's types.
|
||||
#[tracing::instrument]
|
||||
pub fn convert_to_outgoing_federation_event(
|
||||
mut pdu_json: CanonicalJsonObject,
|
||||
) -> Box<RawJsonValue> {
|
||||
if let Some(unsigned) = pdu_json
|
||||
.get_mut("unsigned")
|
||||
.and_then(|val| val.as_object_mut())
|
||||
{
|
||||
unsigned.remove("transaction_id");
|
||||
}
|
||||
/// This does not return a full `Pdu` it is only to satisfy ruma's types.
|
||||
#[tracing::instrument]
|
||||
pub fn convert_to_outgoing_federation_event(mut pdu_json: CanonicalJsonObject) -> Box<RawJsonValue> {
|
||||
if let Some(unsigned) = pdu_json.get_mut("unsigned").and_then(|val| val.as_object_mut()) {
|
||||
unsigned.remove("transaction_id");
|
||||
}
|
||||
|
||||
pdu_json.remove("event_id");
|
||||
pdu_json.remove("event_id");
|
||||
|
||||
// TODO: another option would be to convert it to a canonical string to validate size
|
||||
// and return a Result<Raw<...>>
|
||||
// serde_json::from_str::<Raw<_>>(
|
||||
// ruma::serde::to_canonical_json_string(pdu_json).expect("CanonicalJson is valid serde_json::Value"),
|
||||
// )
|
||||
// .expect("Raw::from_value always works")
|
||||
// TODO: another option would be to convert it to a canonical string to validate
|
||||
// size and return a Result<Raw<...>>
|
||||
// serde_json::from_str::<Raw<_>>(
|
||||
// ruma::serde::to_canonical_json_string(pdu_json).expect("CanonicalJson is
|
||||
// valid serde_json::Value"), )
|
||||
// .expect("Raw::from_value always works")
|
||||
|
||||
to_raw_value(&pdu_json).expect("CanonicalJson is valid serde_json::Value")
|
||||
}
|
||||
to_raw_value(&pdu_json).expect("CanonicalJson is valid serde_json::Value")
|
||||
}
|
||||
|
||||
pub fn from_id_val(
|
||||
event_id: &EventId,
|
||||
mut json: CanonicalJsonObject,
|
||||
) -> Result<Self, serde_json::Error> {
|
||||
json.insert(
|
||||
"event_id".to_owned(),
|
||||
CanonicalJsonValue::String(event_id.as_str().to_owned()),
|
||||
);
|
||||
pub fn from_id_val(event_id: &EventId, mut json: CanonicalJsonObject) -> Result<Self, serde_json::Error> {
|
||||
json.insert("event_id".to_owned(), CanonicalJsonValue::String(event_id.as_str().to_owned()));
|
||||
|
||||
serde_json::from_value(serde_json::to_value(json).expect("valid JSON"))
|
||||
}
|
||||
serde_json::from_value(serde_json::to_value(json).expect("valid JSON"))
|
||||
}
|
||||
}
|
||||
|
||||
impl state_res::Event for PduEvent {
|
||||
type Id = Arc<EventId>;
|
||||
type Id = Arc<EventId>;
|
||||
|
||||
fn event_id(&self) -> &Self::Id {
|
||||
&self.event_id
|
||||
}
|
||||
fn event_id(&self) -> &Self::Id { &self.event_id }
|
||||
|
||||
fn room_id(&self) -> &RoomId {
|
||||
&self.room_id
|
||||
}
|
||||
fn room_id(&self) -> &RoomId { &self.room_id }
|
||||
|
||||
fn sender(&self) -> &UserId {
|
||||
&self.sender
|
||||
}
|
||||
fn sender(&self) -> &UserId { &self.sender }
|
||||
|
||||
fn event_type(&self) -> &TimelineEventType {
|
||||
&self.kind
|
||||
}
|
||||
fn event_type(&self) -> &TimelineEventType { &self.kind }
|
||||
|
||||
fn content(&self) -> &RawJsonValue {
|
||||
&self.content
|
||||
}
|
||||
fn content(&self) -> &RawJsonValue { &self.content }
|
||||
|
||||
fn origin_server_ts(&self) -> MilliSecondsSinceUnixEpoch {
|
||||
MilliSecondsSinceUnixEpoch(self.origin_server_ts)
|
||||
}
|
||||
fn origin_server_ts(&self) -> MilliSecondsSinceUnixEpoch { MilliSecondsSinceUnixEpoch(self.origin_server_ts) }
|
||||
|
||||
fn state_key(&self) -> Option<&str> {
|
||||
self.state_key.as_deref()
|
||||
}
|
||||
fn state_key(&self) -> Option<&str> { self.state_key.as_deref() }
|
||||
|
||||
fn prev_events(&self) -> Box<dyn DoubleEndedIterator<Item = &Self::Id> + '_> {
|
||||
Box::new(self.prev_events.iter())
|
||||
}
|
||||
fn prev_events(&self) -> Box<dyn DoubleEndedIterator<Item = &Self::Id> + '_> { Box::new(self.prev_events.iter()) }
|
||||
|
||||
fn auth_events(&self) -> Box<dyn DoubleEndedIterator<Item = &Self::Id> + '_> {
|
||||
Box::new(self.auth_events.iter())
|
||||
}
|
||||
fn auth_events(&self) -> Box<dyn DoubleEndedIterator<Item = &Self::Id> + '_> { Box::new(self.auth_events.iter()) }
|
||||
|
||||
fn redacts(&self) -> Option<&Self::Id> {
|
||||
self.redacts.as_ref()
|
||||
}
|
||||
fn redacts(&self) -> Option<&Self::Id> { self.redacts.as_ref() }
|
||||
}
|
||||
|
||||
// These impl's allow us to dedup state snapshots when resolving state
|
||||
// for incoming events (federation/send/{txn}).
|
||||
impl Eq for PduEvent {}
|
||||
impl PartialEq for PduEvent {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.event_id == other.event_id
|
||||
}
|
||||
fn eq(&self, other: &Self) -> bool { self.event_id == other.event_id }
|
||||
}
|
||||
impl PartialOrd for PduEvent {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) }
|
||||
}
|
||||
impl Ord for PduEvent {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.event_id.cmp(&other.event_id)
|
||||
}
|
||||
fn cmp(&self, other: &Self) -> Ordering { self.event_id.cmp(&other.event_id) }
|
||||
}
|
||||
|
||||
/// Generates a correct eventId for the incoming pdu.
|
||||
///
|
||||
/// Returns a tuple of the new `EventId` and the PDU as a `BTreeMap<String, CanonicalJsonValue>`.
|
||||
/// Returns a tuple of the new `EventId` and the PDU as a `BTreeMap<String,
|
||||
/// CanonicalJsonValue>`.
|
||||
pub(crate) fn gen_event_id_canonical_json(
|
||||
pdu: &RawJsonValue,
|
||||
room_version_id: &RoomVersionId,
|
||||
pdu: &RawJsonValue, room_version_id: &RoomVersionId,
|
||||
) -> crate::Result<(OwnedEventId, CanonicalJsonObject)> {
|
||||
let value: CanonicalJsonObject = serde_json::from_str(pdu.get()).map_err(|e| {
|
||||
warn!("Error parsing incoming event {:?}: {:?}", pdu, e);
|
||||
Error::BadServerResponse("Invalid PDU in server response")
|
||||
})?;
|
||||
let value: CanonicalJsonObject = serde_json::from_str(pdu.get()).map_err(|e| {
|
||||
warn!("Error parsing incoming event {:?}: {:?}", pdu, e);
|
||||
Error::BadServerResponse("Invalid PDU in server response")
|
||||
})?;
|
||||
|
||||
let event_id = format!(
|
||||
"${}",
|
||||
// Anything higher than version3 behaves the same
|
||||
ruma::signatures::reference_hash(&value, room_version_id)
|
||||
.expect("ruma can calculate reference hashes")
|
||||
)
|
||||
.try_into()
|
||||
.expect("ruma's reference hashes are valid event ids");
|
||||
let event_id = format!(
|
||||
"${}",
|
||||
// Anything higher than version3 behaves the same
|
||||
ruma::signatures::reference_hash(&value, room_version_id).expect("ruma can calculate reference hashes")
|
||||
)
|
||||
.try_into()
|
||||
.expect("ruma's reference hashes are valid event ids");
|
||||
|
||||
Ok((event_id, value))
|
||||
Ok((event_id, value))
|
||||
}
|
||||
|
||||
/// Build the start of a PDU in order to add it to the Database.
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct PduBuilder {
|
||||
#[serde(rename = "type")]
|
||||
pub event_type: TimelineEventType,
|
||||
pub content: Box<RawJsonValue>,
|
||||
pub unsigned: Option<BTreeMap<String, serde_json::Value>>,
|
||||
pub state_key: Option<String>,
|
||||
pub redacts: Option<Arc<EventId>>,
|
||||
#[serde(rename = "type")]
|
||||
pub event_type: TimelineEventType,
|
||||
pub content: Box<RawJsonValue>,
|
||||
pub unsigned: Option<BTreeMap<String, serde_json::Value>>,
|
||||
pub state_key: Option<String>,
|
||||
pub redacts: Option<Arc<EventId>>,
|
||||
}
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
use crate::Result;
|
||||
use ruma::{
|
||||
api::client::push::{set_pusher, Pusher},
|
||||
UserId,
|
||||
api::client::push::{set_pusher, Pusher},
|
||||
UserId,
|
||||
};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::PusherAction) -> Result<()>;
|
||||
fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::PusherAction) -> Result<()>;
|
||||
|
||||
fn get_pusher(&self, sender: &UserId, pushkey: &str) -> Result<Option<Pusher>>;
|
||||
fn get_pusher(&self, sender: &UserId, pushkey: &str) -> Result<Option<Pusher>>;
|
||||
|
||||
fn get_pushers(&self, sender: &UserId) -> Result<Vec<Pusher>>;
|
||||
fn get_pushers(&self, sender: &UserId) -> Result<Vec<Pusher>>;
|
||||
|
||||
fn get_pushkeys<'a>(&'a self, sender: &UserId)
|
||||
-> Box<dyn Iterator<Item = Result<String>> + 'a>;
|
||||
fn get_pushkeys<'a>(&'a self, sender: &UserId) -> Box<dyn Iterator<Item = Result<String>> + 'a>;
|
||||
}
|
||||
|
|
|
@ -1,292 +1,236 @@
|
|||
mod data;
|
||||
pub use data::Data;
|
||||
use ruma::{events::AnySyncTimelineEvent, push::PushConditionPowerLevelsCtx};
|
||||
|
||||
use crate::{services, Error, PduEvent, Result};
|
||||
use bytes::BytesMut;
|
||||
use ruma::{
|
||||
api::{
|
||||
client::push::{set_pusher, Pusher, PusherKind},
|
||||
push_gateway::send_event_notification::{
|
||||
self,
|
||||
v1::{Device, Notification, NotificationCounts, NotificationPriority},
|
||||
},
|
||||
IncomingResponse, MatrixVersion, OutgoingRequest, SendAccessToken,
|
||||
},
|
||||
events::{room::power_levels::RoomPowerLevelsEventContent, StateEventType, TimelineEventType},
|
||||
push::{Action, PushConditionRoomCtx, PushFormat, Ruleset, Tweak},
|
||||
serde::Raw,
|
||||
uint, RoomId, UInt, UserId,
|
||||
};
|
||||
|
||||
use std::{fmt::Debug, mem};
|
||||
|
||||
use bytes::BytesMut;
|
||||
pub use data::Data;
|
||||
use ruma::{
|
||||
api::{
|
||||
client::push::{set_pusher, Pusher, PusherKind},
|
||||
push_gateway::send_event_notification::{
|
||||
self,
|
||||
v1::{Device, Notification, NotificationCounts, NotificationPriority},
|
||||
},
|
||||
IncomingResponse, MatrixVersion, OutgoingRequest, SendAccessToken,
|
||||
},
|
||||
events::{
|
||||
room::power_levels::RoomPowerLevelsEventContent, AnySyncTimelineEvent, StateEventType, TimelineEventType,
|
||||
},
|
||||
push::{Action, PushConditionPowerLevelsCtx, PushConditionRoomCtx, PushFormat, Ruleset, Tweak},
|
||||
serde::Raw,
|
||||
uint, RoomId, UInt, UserId,
|
||||
};
|
||||
use tracing::{info, warn};
|
||||
|
||||
use crate::{services, Error, PduEvent, Result};
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
pub fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::PusherAction) -> Result<()> {
|
||||
self.db.set_pusher(sender, pusher)
|
||||
}
|
||||
pub fn set_pusher(&self, sender: &UserId, pusher: set_pusher::v3::PusherAction) -> Result<()> {
|
||||
self.db.set_pusher(sender, pusher)
|
||||
}
|
||||
|
||||
pub fn get_pusher(&self, sender: &UserId, pushkey: &str) -> Result<Option<Pusher>> {
|
||||
self.db.get_pusher(sender, pushkey)
|
||||
}
|
||||
pub fn get_pusher(&self, sender: &UserId, pushkey: &str) -> Result<Option<Pusher>> {
|
||||
self.db.get_pusher(sender, pushkey)
|
||||
}
|
||||
|
||||
pub fn get_pushers(&self, sender: &UserId) -> Result<Vec<Pusher>> {
|
||||
self.db.get_pushers(sender)
|
||||
}
|
||||
pub fn get_pushers(&self, sender: &UserId) -> Result<Vec<Pusher>> { self.db.get_pushers(sender) }
|
||||
|
||||
pub fn get_pushkeys(&self, sender: &UserId) -> Box<dyn Iterator<Item = Result<String>>> {
|
||||
self.db.get_pushkeys(sender)
|
||||
}
|
||||
pub fn get_pushkeys(&self, sender: &UserId) -> Box<dyn Iterator<Item = Result<String>>> {
|
||||
self.db.get_pushkeys(sender)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, destination, request))]
|
||||
pub async fn send_request<T>(
|
||||
&self,
|
||||
destination: &str,
|
||||
request: T,
|
||||
) -> Result<T::IncomingResponse>
|
||||
where
|
||||
T: OutgoingRequest + Debug,
|
||||
{
|
||||
let destination = destination.replace(services().globals.notification_push_path(), "");
|
||||
#[tracing::instrument(skip(self, destination, request))]
|
||||
pub async fn send_request<T>(&self, destination: &str, request: T) -> Result<T::IncomingResponse>
|
||||
where
|
||||
T: OutgoingRequest + Debug,
|
||||
{
|
||||
let destination = destination.replace(services().globals.notification_push_path(), "");
|
||||
|
||||
let http_request = request
|
||||
.try_into_http_request::<BytesMut>(
|
||||
&destination,
|
||||
SendAccessToken::IfRequired(""),
|
||||
&[MatrixVersion::V1_0],
|
||||
)
|
||||
.map_err(|e| {
|
||||
warn!("Failed to find destination {}: {}", destination, e);
|
||||
Error::BadServerResponse("Invalid destination")
|
||||
})?
|
||||
.map(bytes::BytesMut::freeze);
|
||||
let http_request = request
|
||||
.try_into_http_request::<BytesMut>(&destination, SendAccessToken::IfRequired(""), &[MatrixVersion::V1_0])
|
||||
.map_err(|e| {
|
||||
warn!("Failed to find destination {}: {}", destination, e);
|
||||
Error::BadServerResponse("Invalid destination")
|
||||
})?
|
||||
.map(bytes::BytesMut::freeze);
|
||||
|
||||
let reqwest_request = reqwest::Request::try_from(http_request)?;
|
||||
let reqwest_request = reqwest::Request::try_from(http_request)?;
|
||||
|
||||
// TODO: we could keep this very short and let expo backoff do it's thing...
|
||||
//*reqwest_request.timeout_mut() = Some(Duration::from_secs(5));
|
||||
// TODO: we could keep this very short and let expo backoff do it's thing...
|
||||
//*reqwest_request.timeout_mut() = Some(Duration::from_secs(5));
|
||||
|
||||
let url = reqwest_request.url().clone();
|
||||
let response = services()
|
||||
.globals
|
||||
.default_client()
|
||||
.execute(reqwest_request)
|
||||
.await;
|
||||
let url = reqwest_request.url().clone();
|
||||
let response = services().globals.default_client().execute(reqwest_request).await;
|
||||
|
||||
match response {
|
||||
Ok(mut response) => {
|
||||
// reqwest::Response -> http::Response conversion
|
||||
let status = response.status();
|
||||
let mut http_response_builder = http::Response::builder()
|
||||
.status(status)
|
||||
.version(response.version());
|
||||
mem::swap(
|
||||
response.headers_mut(),
|
||||
http_response_builder
|
||||
.headers_mut()
|
||||
.expect("http::response::Builder is usable"),
|
||||
);
|
||||
match response {
|
||||
Ok(mut response) => {
|
||||
// reqwest::Response -> http::Response conversion
|
||||
let status = response.status();
|
||||
let mut http_response_builder = http::Response::builder().status(status).version(response.version());
|
||||
mem::swap(
|
||||
response.headers_mut(),
|
||||
http_response_builder.headers_mut().expect("http::response::Builder is usable"),
|
||||
);
|
||||
|
||||
let body = response.bytes().await.unwrap_or_else(|e| {
|
||||
warn!("server error {}", e);
|
||||
Vec::new().into()
|
||||
}); // TODO: handle timeout
|
||||
let body = response.bytes().await.unwrap_or_else(|e| {
|
||||
warn!("server error {}", e);
|
||||
Vec::new().into()
|
||||
}); // TODO: handle timeout
|
||||
|
||||
if !status.is_success() {
|
||||
info!(
|
||||
"Push gateway returned bad response {} {}\n{}\n{:?}",
|
||||
destination,
|
||||
status,
|
||||
url,
|
||||
crate::utils::string_from_bytes(&body)
|
||||
);
|
||||
}
|
||||
if !status.is_success() {
|
||||
info!(
|
||||
"Push gateway returned bad response {} {}\n{}\n{:?}",
|
||||
destination,
|
||||
status,
|
||||
url,
|
||||
crate::utils::string_from_bytes(&body)
|
||||
);
|
||||
}
|
||||
|
||||
let response = T::IncomingResponse::try_from_http_response(
|
||||
http_response_builder
|
||||
.body(body)
|
||||
.expect("reqwest body is valid http body"),
|
||||
);
|
||||
response.map_err(|_| {
|
||||
info!(
|
||||
"Push gateway returned invalid response bytes {}\n{}",
|
||||
destination, url
|
||||
);
|
||||
Error::BadServerResponse("Push gateway returned bad response.")
|
||||
})
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Could not send request to pusher {}: {}", destination, e);
|
||||
Err(e.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
let response = T::IncomingResponse::try_from_http_response(
|
||||
http_response_builder.body(body).expect("reqwest body is valid http body"),
|
||||
);
|
||||
response.map_err(|_| {
|
||||
info!("Push gateway returned invalid response bytes {}\n{}", destination, url);
|
||||
Error::BadServerResponse("Push gateway returned bad response.")
|
||||
})
|
||||
},
|
||||
Err(e) => {
|
||||
warn!("Could not send request to pusher {}: {}", destination, e);
|
||||
Err(e.into())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, user, unread, pusher, ruleset, pdu))]
|
||||
pub async fn send_push_notice(
|
||||
&self,
|
||||
user: &UserId,
|
||||
unread: UInt,
|
||||
pusher: &Pusher,
|
||||
ruleset: Ruleset,
|
||||
pdu: &PduEvent,
|
||||
) -> Result<()> {
|
||||
let mut notify = None;
|
||||
let mut tweaks = Vec::new();
|
||||
#[tracing::instrument(skip(self, user, unread, pusher, ruleset, pdu))]
|
||||
pub async fn send_push_notice(
|
||||
&self, user: &UserId, unread: UInt, pusher: &Pusher, ruleset: Ruleset, pdu: &PduEvent,
|
||||
) -> Result<()> {
|
||||
let mut notify = None;
|
||||
let mut tweaks = Vec::new();
|
||||
|
||||
let power_levels: RoomPowerLevelsEventContent = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&pdu.room_id, &StateEventType::RoomPowerLevels, "")?
|
||||
.map(|ev| {
|
||||
serde_json::from_str(ev.content.get())
|
||||
.map_err(|_| Error::bad_database("invalid m.room.power_levels event"))
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
let power_levels: RoomPowerLevelsEventContent = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(&pdu.room_id, &StateEventType::RoomPowerLevels, "")?
|
||||
.map(|ev| {
|
||||
serde_json::from_str(ev.content.get())
|
||||
.map_err(|_| Error::bad_database("invalid m.room.power_levels event"))
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
for action in self.get_actions(
|
||||
user,
|
||||
&ruleset,
|
||||
&power_levels,
|
||||
&pdu.to_sync_room_event(),
|
||||
&pdu.room_id,
|
||||
)? {
|
||||
let n = match action {
|
||||
Action::Notify => true,
|
||||
Action::SetTweak(tweak) => {
|
||||
tweaks.push(tweak.clone());
|
||||
continue;
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
for action in self.get_actions(user, &ruleset, &power_levels, &pdu.to_sync_room_event(), &pdu.room_id)? {
|
||||
let n = match action {
|
||||
Action::Notify => true,
|
||||
Action::SetTweak(tweak) => {
|
||||
tweaks.push(tweak.clone());
|
||||
continue;
|
||||
},
|
||||
_ => false,
|
||||
};
|
||||
|
||||
if notify.is_some() {
|
||||
return Err(Error::bad_database(
|
||||
r#"Malformed pushrule contains more than one of these actions: ["dont_notify", "notify", "coalesce"]"#,
|
||||
));
|
||||
}
|
||||
if notify.is_some() {
|
||||
return Err(Error::bad_database(
|
||||
r#"Malformed pushrule contains more than one of these actions: ["dont_notify", "notify", "coalesce"]"#,
|
||||
));
|
||||
}
|
||||
|
||||
notify = Some(n);
|
||||
}
|
||||
notify = Some(n);
|
||||
}
|
||||
|
||||
if notify == Some(true) {
|
||||
self.send_notice(unread, pusher, tweaks, pdu).await?;
|
||||
}
|
||||
// Else the event triggered no actions
|
||||
if notify == Some(true) {
|
||||
self.send_notice(unread, pusher, tweaks, pdu).await?;
|
||||
}
|
||||
// Else the event triggered no actions
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, user, ruleset, pdu))]
|
||||
pub fn get_actions<'a>(
|
||||
&self,
|
||||
user: &UserId,
|
||||
ruleset: &'a Ruleset,
|
||||
power_levels: &RoomPowerLevelsEventContent,
|
||||
pdu: &Raw<AnySyncTimelineEvent>,
|
||||
room_id: &RoomId,
|
||||
) -> Result<&'a [Action]> {
|
||||
let power_levels = PushConditionPowerLevelsCtx {
|
||||
users: power_levels.users.clone(),
|
||||
users_default: power_levels.users_default,
|
||||
notifications: power_levels.notifications.clone(),
|
||||
};
|
||||
#[tracing::instrument(skip(self, user, ruleset, pdu))]
|
||||
pub fn get_actions<'a>(
|
||||
&self, user: &UserId, ruleset: &'a Ruleset, power_levels: &RoomPowerLevelsEventContent,
|
||||
pdu: &Raw<AnySyncTimelineEvent>, room_id: &RoomId,
|
||||
) -> Result<&'a [Action]> {
|
||||
let power_levels = PushConditionPowerLevelsCtx {
|
||||
users: power_levels.users.clone(),
|
||||
users_default: power_levels.users_default,
|
||||
notifications: power_levels.notifications.clone(),
|
||||
};
|
||||
|
||||
let ctx = PushConditionRoomCtx {
|
||||
room_id: room_id.to_owned(),
|
||||
member_count: UInt::from(
|
||||
services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.room_joined_count(room_id)?
|
||||
.unwrap_or(1) as u32,
|
||||
),
|
||||
user_id: user.to_owned(),
|
||||
user_display_name: services()
|
||||
.users
|
||||
.displayname(user)?
|
||||
.unwrap_or_else(|| user.localpart().to_owned()),
|
||||
power_levels: Some(power_levels),
|
||||
};
|
||||
let ctx = PushConditionRoomCtx {
|
||||
room_id: room_id.to_owned(),
|
||||
member_count: UInt::from(services().rooms.state_cache.room_joined_count(room_id)?.unwrap_or(1) as u32),
|
||||
user_id: user.to_owned(),
|
||||
user_display_name: services().users.displayname(user)?.unwrap_or_else(|| user.localpart().to_owned()),
|
||||
power_levels: Some(power_levels),
|
||||
};
|
||||
|
||||
Ok(ruleset.get_actions(pdu, &ctx))
|
||||
}
|
||||
Ok(ruleset.get_actions(pdu, &ctx))
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, unread, pusher, tweaks, event))]
|
||||
async fn send_notice(
|
||||
&self,
|
||||
unread: UInt,
|
||||
pusher: &Pusher,
|
||||
tweaks: Vec<Tweak>,
|
||||
event: &PduEvent,
|
||||
) -> Result<()> {
|
||||
// TODO: email
|
||||
match &pusher.kind {
|
||||
PusherKind::Http(http) => {
|
||||
// TODO:
|
||||
// Two problems with this
|
||||
// 1. if "event_id_only" is the only format kind it seems we should never add more info
|
||||
// 2. can pusher/devices have conflicting formats
|
||||
let event_id_only = http.format == Some(PushFormat::EventIdOnly);
|
||||
#[tracing::instrument(skip(self, unread, pusher, tweaks, event))]
|
||||
async fn send_notice(&self, unread: UInt, pusher: &Pusher, tweaks: Vec<Tweak>, event: &PduEvent) -> Result<()> {
|
||||
// TODO: email
|
||||
match &pusher.kind {
|
||||
PusherKind::Http(http) => {
|
||||
// TODO:
|
||||
// Two problems with this
|
||||
// 1. if "event_id_only" is the only format kind it seems we should never add
|
||||
// more info
|
||||
// 2. can pusher/devices have conflicting formats
|
||||
let event_id_only = http.format == Some(PushFormat::EventIdOnly);
|
||||
|
||||
let mut device = Device::new(pusher.ids.app_id.clone(), pusher.ids.pushkey.clone());
|
||||
device.data.default_payload = http.default_payload.clone();
|
||||
device.data.format = http.format.clone();
|
||||
let mut device = Device::new(pusher.ids.app_id.clone(), pusher.ids.pushkey.clone());
|
||||
device.data.default_payload = http.default_payload.clone();
|
||||
device.data.format = http.format.clone();
|
||||
|
||||
// Tweaks are only added if the format is NOT event_id_only
|
||||
if !event_id_only {
|
||||
device.tweaks = tweaks.clone();
|
||||
}
|
||||
// Tweaks are only added if the format is NOT event_id_only
|
||||
if !event_id_only {
|
||||
device.tweaks = tweaks.clone();
|
||||
}
|
||||
|
||||
let d = vec![device];
|
||||
let mut notifi = Notification::new(d);
|
||||
let d = vec![device];
|
||||
let mut notifi = Notification::new(d);
|
||||
|
||||
notifi.prio = NotificationPriority::Low;
|
||||
notifi.event_id = Some((*event.event_id).to_owned());
|
||||
notifi.room_id = Some((*event.room_id).to_owned());
|
||||
// TODO: missed calls
|
||||
notifi.counts = NotificationCounts::new(unread, uint!(0));
|
||||
notifi.prio = NotificationPriority::Low;
|
||||
notifi.event_id = Some((*event.event_id).to_owned());
|
||||
notifi.room_id = Some((*event.room_id).to_owned());
|
||||
// TODO: missed calls
|
||||
notifi.counts = NotificationCounts::new(unread, uint!(0));
|
||||
|
||||
if event.kind == TimelineEventType::RoomEncrypted
|
||||
|| tweaks
|
||||
.iter()
|
||||
.any(|t| matches!(t, Tweak::Highlight(true) | Tweak::Sound(_)))
|
||||
{
|
||||
notifi.prio = NotificationPriority::High;
|
||||
}
|
||||
if event.kind == TimelineEventType::RoomEncrypted
|
||||
|| tweaks.iter().any(|t| matches!(t, Tweak::Highlight(true) | Tweak::Sound(_)))
|
||||
{
|
||||
notifi.prio = NotificationPriority::High;
|
||||
}
|
||||
|
||||
if event_id_only {
|
||||
self.send_request(&http.url, send_event_notification::v1::Request::new(notifi))
|
||||
.await?;
|
||||
} else {
|
||||
notifi.sender = Some(event.sender.clone());
|
||||
notifi.event_type = Some(event.kind.clone());
|
||||
notifi.content = serde_json::value::to_raw_value(&event.content).ok();
|
||||
if event_id_only {
|
||||
self.send_request(&http.url, send_event_notification::v1::Request::new(notifi)).await?;
|
||||
} else {
|
||||
notifi.sender = Some(event.sender.clone());
|
||||
notifi.event_type = Some(event.kind.clone());
|
||||
notifi.content = serde_json::value::to_raw_value(&event.content).ok();
|
||||
|
||||
if event.kind == TimelineEventType::RoomMember {
|
||||
notifi.user_is_target =
|
||||
event.state_key.as_deref() == Some(event.sender.as_str());
|
||||
}
|
||||
if event.kind == TimelineEventType::RoomMember {
|
||||
notifi.user_is_target = event.state_key.as_deref() == Some(event.sender.as_str());
|
||||
}
|
||||
|
||||
notifi.sender_display_name = services().users.displayname(&event.sender)?;
|
||||
notifi.sender_display_name = services().users.displayname(&event.sender)?;
|
||||
|
||||
notifi.room_name = services().rooms.state_accessor.get_name(&event.room_id)?;
|
||||
notifi.room_name = services().rooms.state_accessor.get_name(&event.room_id)?;
|
||||
|
||||
self.send_request(&http.url, send_event_notification::v1::Request::new(notifi))
|
||||
.await?;
|
||||
}
|
||||
self.send_request(&http.url, send_event_notification::v1::Request::new(notifi)).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
// TODO: Handle email
|
||||
//PusherKind::Email(_) => Ok(()),
|
||||
_ => Ok(()),
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
// TODO: Handle email
|
||||
//PusherKind::Email(_) => Ok(()),
|
||||
_ => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,24 +1,22 @@
|
|||
use crate::Result;
|
||||
use ruma::{OwnedRoomAliasId, OwnedRoomId, RoomAliasId, RoomId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Creates or updates the alias to the given room id.
|
||||
fn set_alias(&self, alias: &RoomAliasId, room_id: &RoomId) -> Result<()>;
|
||||
/// Creates or updates the alias to the given room id.
|
||||
fn set_alias(&self, alias: &RoomAliasId, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
/// Forgets about an alias. Returns an error if the alias did not exist.
|
||||
fn remove_alias(&self, alias: &RoomAliasId) -> Result<()>;
|
||||
/// Forgets about an alias. Returns an error if the alias did not exist.
|
||||
fn remove_alias(&self, alias: &RoomAliasId) -> Result<()>;
|
||||
|
||||
/// Looks up the roomid for the given alias.
|
||||
fn resolve_local_alias(&self, alias: &RoomAliasId) -> Result<Option<OwnedRoomId>>;
|
||||
/// Looks up the roomid for the given alias.
|
||||
fn resolve_local_alias(&self, alias: &RoomAliasId) -> Result<Option<OwnedRoomId>>;
|
||||
|
||||
/// Returns all local aliases that point to the given room
|
||||
fn local_aliases_for_room<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedRoomAliasId>> + 'a>;
|
||||
/// Returns all local aliases that point to the given room
|
||||
fn local_aliases_for_room<'a>(
|
||||
&'a self, room_id: &RoomId,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedRoomAliasId>> + 'a>;
|
||||
|
||||
/// Returns all local aliases on the server
|
||||
fn all_local_aliases<'a>(
|
||||
&'a self,
|
||||
) -> Box<dyn Iterator<Item = Result<(OwnedRoomId, String)>> + 'a>;
|
||||
/// Returns all local aliases on the server
|
||||
fn all_local_aliases<'a>(&'a self) -> Box<dyn Iterator<Item = Result<(OwnedRoomId, String)>> + 'a>;
|
||||
}
|
||||
|
|
|
@ -1,42 +1,35 @@
|
|||
mod data;
|
||||
|
||||
pub use data::Data;
|
||||
|
||||
use crate::Result;
|
||||
use ruma::{OwnedRoomAliasId, OwnedRoomId, RoomAliasId, RoomId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn set_alias(&self, alias: &RoomAliasId, room_id: &RoomId) -> Result<()> {
|
||||
self.db.set_alias(alias, room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn set_alias(&self, alias: &RoomAliasId, room_id: &RoomId) -> Result<()> { self.db.set_alias(alias, room_id) }
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn remove_alias(&self, alias: &RoomAliasId) -> Result<()> {
|
||||
self.db.remove_alias(alias)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn remove_alias(&self, alias: &RoomAliasId) -> Result<()> { self.db.remove_alias(alias) }
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn resolve_local_alias(&self, alias: &RoomAliasId) -> Result<Option<OwnedRoomId>> {
|
||||
self.db.resolve_local_alias(alias)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn resolve_local_alias(&self, alias: &RoomAliasId) -> Result<Option<OwnedRoomId>> {
|
||||
self.db.resolve_local_alias(alias)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn local_aliases_for_room<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedRoomAliasId>> + 'a> {
|
||||
self.db.local_aliases_for_room(room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn local_aliases_for_room<'a>(
|
||||
&'a self, room_id: &RoomId,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedRoomAliasId>> + 'a> {
|
||||
self.db.local_aliases_for_room(room_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn all_local_aliases<'a>(
|
||||
&'a self,
|
||||
) -> Box<dyn Iterator<Item = Result<(OwnedRoomId, String)>> + 'a> {
|
||||
self.db.all_local_aliases()
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn all_local_aliases<'a>(&'a self) -> Box<dyn Iterator<Item = Result<(OwnedRoomId, String)>> + 'a> {
|
||||
self.db.all_local_aliases()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
use crate::Result;
|
||||
use std::{collections::HashSet, sync::Arc};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn get_cached_eventid_authchain(
|
||||
&self,
|
||||
shorteventid: &[u64],
|
||||
) -> Result<Option<Arc<HashSet<u64>>>>;
|
||||
fn cache_auth_chain(&self, shorteventid: Vec<u64>, auth_chain: Arc<HashSet<u64>>)
|
||||
-> Result<()>;
|
||||
fn get_cached_eventid_authchain(&self, shorteventid: &[u64]) -> Result<Option<Arc<HashSet<u64>>>>;
|
||||
fn cache_auth_chain(&self, shorteventid: Vec<u64>, auth_chain: Arc<HashSet<u64>>) -> Result<()>;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
mod data;
|
||||
use std::{
|
||||
collections::{BTreeSet, HashSet},
|
||||
sync::Arc,
|
||||
collections::{BTreeSet, HashSet},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
pub use data::Data;
|
||||
|
@ -11,151 +11,130 @@ use tracing::{debug, error, warn};
|
|||
use crate::{services, Error, Result};
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
pub fn get_cached_eventid_authchain(&self, key: &[u64]) -> Result<Option<Arc<HashSet<u64>>>> {
|
||||
self.db.get_cached_eventid_authchain(key)
|
||||
}
|
||||
pub fn get_cached_eventid_authchain(&self, key: &[u64]) -> Result<Option<Arc<HashSet<u64>>>> {
|
||||
self.db.get_cached_eventid_authchain(key)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn cache_auth_chain(&self, key: Vec<u64>, auth_chain: Arc<HashSet<u64>>) -> Result<()> {
|
||||
self.db.cache_auth_chain(key, auth_chain)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn cache_auth_chain(&self, key: Vec<u64>, auth_chain: Arc<HashSet<u64>>) -> Result<()> {
|
||||
self.db.cache_auth_chain(key, auth_chain)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, starting_events))]
|
||||
pub async fn get_auth_chain<'a>(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
starting_events: Vec<Arc<EventId>>,
|
||||
) -> Result<impl Iterator<Item = Arc<EventId>> + 'a> {
|
||||
const NUM_BUCKETS: usize = 50;
|
||||
#[tracing::instrument(skip(self, starting_events))]
|
||||
pub async fn get_auth_chain<'a>(
|
||||
&self, room_id: &RoomId, starting_events: Vec<Arc<EventId>>,
|
||||
) -> Result<impl Iterator<Item = Arc<EventId>> + 'a> {
|
||||
const NUM_BUCKETS: usize = 50;
|
||||
|
||||
let mut buckets = vec![BTreeSet::new(); NUM_BUCKETS];
|
||||
let mut buckets = vec![BTreeSet::new(); NUM_BUCKETS];
|
||||
|
||||
let mut i = 0;
|
||||
for id in starting_events {
|
||||
let short = services().rooms.short.get_or_create_shorteventid(&id)?;
|
||||
let bucket_id = (short % NUM_BUCKETS as u64) as usize;
|
||||
buckets[bucket_id].insert((short, id.clone()));
|
||||
i += 1;
|
||||
if i % 100 == 0 {
|
||||
tokio::task::yield_now().await;
|
||||
}
|
||||
}
|
||||
let mut i = 0;
|
||||
for id in starting_events {
|
||||
let short = services().rooms.short.get_or_create_shorteventid(&id)?;
|
||||
let bucket_id = (short % NUM_BUCKETS as u64) as usize;
|
||||
buckets[bucket_id].insert((short, id.clone()));
|
||||
i += 1;
|
||||
if i % 100 == 0 {
|
||||
tokio::task::yield_now().await;
|
||||
}
|
||||
}
|
||||
|
||||
let mut full_auth_chain = HashSet::new();
|
||||
let mut full_auth_chain = HashSet::new();
|
||||
|
||||
let mut hits = 0;
|
||||
let mut misses = 0;
|
||||
for chunk in buckets {
|
||||
if chunk.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let mut hits = 0;
|
||||
let mut misses = 0;
|
||||
for chunk in buckets {
|
||||
if chunk.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let chunk_key: Vec<u64> = chunk.iter().map(|(short, _)| short).copied().collect();
|
||||
if let Some(cached) = services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.get_cached_eventid_authchain(&chunk_key)?
|
||||
{
|
||||
hits += 1;
|
||||
full_auth_chain.extend(cached.iter().copied());
|
||||
continue;
|
||||
}
|
||||
misses += 1;
|
||||
let chunk_key: Vec<u64> = chunk.iter().map(|(short, _)| short).copied().collect();
|
||||
if let Some(cached) = services().rooms.auth_chain.get_cached_eventid_authchain(&chunk_key)? {
|
||||
hits += 1;
|
||||
full_auth_chain.extend(cached.iter().copied());
|
||||
continue;
|
||||
}
|
||||
misses += 1;
|
||||
|
||||
let mut chunk_cache = HashSet::new();
|
||||
let mut hits2 = 0;
|
||||
let mut misses2 = 0;
|
||||
let mut i = 0;
|
||||
for (sevent_id, event_id) in chunk {
|
||||
if let Some(cached) = services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.get_cached_eventid_authchain(&[sevent_id])?
|
||||
{
|
||||
hits2 += 1;
|
||||
chunk_cache.extend(cached.iter().copied());
|
||||
} else {
|
||||
misses2 += 1;
|
||||
let auth_chain = Arc::new(self.get_auth_chain_inner(room_id, &event_id)?);
|
||||
services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.cache_auth_chain(vec![sevent_id], Arc::clone(&auth_chain))?;
|
||||
debug!(
|
||||
event_id = ?event_id,
|
||||
chain_length = ?auth_chain.len(),
|
||||
"Cache missed event"
|
||||
);
|
||||
chunk_cache.extend(auth_chain.iter());
|
||||
let mut chunk_cache = HashSet::new();
|
||||
let mut hits2 = 0;
|
||||
let mut misses2 = 0;
|
||||
let mut i = 0;
|
||||
for (sevent_id, event_id) in chunk {
|
||||
if let Some(cached) = services().rooms.auth_chain.get_cached_eventid_authchain(&[sevent_id])? {
|
||||
hits2 += 1;
|
||||
chunk_cache.extend(cached.iter().copied());
|
||||
} else {
|
||||
misses2 += 1;
|
||||
let auth_chain = Arc::new(self.get_auth_chain_inner(room_id, &event_id)?);
|
||||
services().rooms.auth_chain.cache_auth_chain(vec![sevent_id], Arc::clone(&auth_chain))?;
|
||||
debug!(
|
||||
event_id = ?event_id,
|
||||
chain_length = ?auth_chain.len(),
|
||||
"Cache missed event"
|
||||
);
|
||||
chunk_cache.extend(auth_chain.iter());
|
||||
|
||||
i += 1;
|
||||
if i % 100 == 0 {
|
||||
tokio::task::yield_now().await;
|
||||
}
|
||||
};
|
||||
}
|
||||
debug!(
|
||||
chunk_cache_length = ?chunk_cache.len(),
|
||||
hits = ?hits2,
|
||||
misses = ?misses2,
|
||||
"Chunk missed",
|
||||
);
|
||||
let chunk_cache = Arc::new(chunk_cache);
|
||||
services()
|
||||
.rooms
|
||||
.auth_chain
|
||||
.cache_auth_chain(chunk_key, Arc::clone(&chunk_cache))?;
|
||||
full_auth_chain.extend(chunk_cache.iter());
|
||||
}
|
||||
i += 1;
|
||||
if i % 100 == 0 {
|
||||
tokio::task::yield_now().await;
|
||||
}
|
||||
};
|
||||
}
|
||||
debug!(
|
||||
chunk_cache_length = ?chunk_cache.len(),
|
||||
hits = ?hits2,
|
||||
misses = ?misses2,
|
||||
"Chunk missed",
|
||||
);
|
||||
let chunk_cache = Arc::new(chunk_cache);
|
||||
services().rooms.auth_chain.cache_auth_chain(chunk_key, Arc::clone(&chunk_cache))?;
|
||||
full_auth_chain.extend(chunk_cache.iter());
|
||||
}
|
||||
|
||||
debug!(
|
||||
chain_length = ?full_auth_chain.len(),
|
||||
hits = ?hits,
|
||||
misses = ?misses,
|
||||
"Auth chain stats",
|
||||
);
|
||||
debug!(
|
||||
chain_length = ?full_auth_chain.len(),
|
||||
hits = ?hits,
|
||||
misses = ?misses,
|
||||
"Auth chain stats",
|
||||
);
|
||||
|
||||
Ok(full_auth_chain
|
||||
.into_iter()
|
||||
.filter_map(move |sid| services().rooms.short.get_eventid_from_short(sid).ok()))
|
||||
}
|
||||
Ok(full_auth_chain.into_iter().filter_map(move |sid| services().rooms.short.get_eventid_from_short(sid).ok()))
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, event_id))]
|
||||
fn get_auth_chain_inner(&self, room_id: &RoomId, event_id: &EventId) -> Result<HashSet<u64>> {
|
||||
let mut todo = vec![Arc::from(event_id)];
|
||||
let mut found = HashSet::new();
|
||||
#[tracing::instrument(skip(self, event_id))]
|
||||
fn get_auth_chain_inner(&self, room_id: &RoomId, event_id: &EventId) -> Result<HashSet<u64>> {
|
||||
let mut todo = vec![Arc::from(event_id)];
|
||||
let mut found = HashSet::new();
|
||||
|
||||
while let Some(event_id) = todo.pop() {
|
||||
match services().rooms.timeline.get_pdu(&event_id) {
|
||||
Ok(Some(pdu)) => {
|
||||
if pdu.room_id != room_id {
|
||||
return Err(Error::BadRequest(ErrorKind::Forbidden, "Evil event in db"));
|
||||
}
|
||||
for auth_event in &pdu.auth_events {
|
||||
let sauthevent = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shorteventid(auth_event)?;
|
||||
while let Some(event_id) = todo.pop() {
|
||||
match services().rooms.timeline.get_pdu(&event_id) {
|
||||
Ok(Some(pdu)) => {
|
||||
if pdu.room_id != room_id {
|
||||
return Err(Error::BadRequest(ErrorKind::Forbidden, "Evil event in db"));
|
||||
}
|
||||
for auth_event in &pdu.auth_events {
|
||||
let sauthevent = services().rooms.short.get_or_create_shorteventid(auth_event)?;
|
||||
|
||||
if !found.contains(&sauthevent) {
|
||||
found.insert(sauthevent);
|
||||
todo.push(auth_event.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None) => {
|
||||
warn!(?event_id, "Could not find pdu mentioned in auth events");
|
||||
}
|
||||
Err(error) => {
|
||||
error!(?event_id, ?error, "Could not load event in auth chain");
|
||||
}
|
||||
}
|
||||
}
|
||||
if !found.contains(&sauthevent) {
|
||||
found.insert(sauthevent);
|
||||
todo.push(auth_event.clone());
|
||||
}
|
||||
}
|
||||
},
|
||||
Ok(None) => {
|
||||
warn!(?event_id, "Could not find pdu mentioned in auth events");
|
||||
},
|
||||
Err(error) => {
|
||||
error!(?event_id, ?error, "Could not load event in auth chain");
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
Ok(found)
|
||||
}
|
||||
Ok(found)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,16 +1,17 @@
|
|||
use crate::Result;
|
||||
use ruma::{OwnedRoomId, RoomId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Adds the room to the public room directory
|
||||
fn set_public(&self, room_id: &RoomId) -> Result<()>;
|
||||
/// Adds the room to the public room directory
|
||||
fn set_public(&self, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
/// Removes the room from the public room directory.
|
||||
fn set_not_public(&self, room_id: &RoomId) -> Result<()>;
|
||||
/// Removes the room from the public room directory.
|
||||
fn set_not_public(&self, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
/// Returns true if the room is in the public room directory.
|
||||
fn is_public_room(&self, room_id: &RoomId) -> Result<bool>;
|
||||
/// Returns true if the room is in the public room directory.
|
||||
fn is_public_room(&self, room_id: &RoomId) -> Result<bool>;
|
||||
|
||||
/// Returns the unsorted public room directory
|
||||
fn public_rooms<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a>;
|
||||
/// Returns the unsorted public room directory
|
||||
fn public_rooms<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a>;
|
||||
}
|
||||
|
|
|
@ -6,27 +6,19 @@ use ruma::{OwnedRoomId, RoomId};
|
|||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn set_public(&self, room_id: &RoomId) -> Result<()> {
|
||||
self.db.set_public(room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn set_public(&self, room_id: &RoomId) -> Result<()> { self.db.set_public(room_id) }
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn set_not_public(&self, room_id: &RoomId) -> Result<()> {
|
||||
self.db.set_not_public(room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn set_not_public(&self, room_id: &RoomId) -> Result<()> { self.db.set_not_public(room_id) }
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn is_public_room(&self, room_id: &RoomId) -> Result<bool> {
|
||||
self.db.is_public_room(room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn is_public_room(&self, room_id: &RoomId) -> Result<bool> { self.db.is_public_room(room_id) }
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn public_rooms(&self) -> impl Iterator<Item = Result<OwnedRoomId>> + '_ {
|
||||
self.db.public_rooms()
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn public_rooms(&self) -> impl Iterator<Item = Result<OwnedRoomId>> + '_ { self.db.public_rooms() }
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ pub mod typing;
|
|||
pub trait Data: presence::Data + read_receipt::Data + typing::Data + 'static {}
|
||||
|
||||
pub struct Service {
|
||||
pub presence: presence::Service,
|
||||
pub read_receipt: read_receipt::Service,
|
||||
pub typing: typing::Service,
|
||||
pub presence: presence::Service,
|
||||
pub read_receipt: read_receipt::Service,
|
||||
pub typing: typing::Service,
|
||||
}
|
||||
|
|
|
@ -1,33 +1,27 @@
|
|||
use ruma::{events::presence::PresenceEvent, presence::PresenceState, OwnedUserId, RoomId, UInt, UserId};
|
||||
|
||||
use crate::Result;
|
||||
use ruma::{
|
||||
events::presence::PresenceEvent, presence::PresenceState, OwnedUserId, RoomId, UInt, UserId,
|
||||
};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Returns the latest presence event for the given user in the given room.
|
||||
fn get_presence(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<PresenceEvent>>;
|
||||
/// Returns the latest presence event for the given user in the given room.
|
||||
fn get_presence(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<PresenceEvent>>;
|
||||
|
||||
/// Pings the presence of the given user in the given room, setting the specified state.
|
||||
fn ping_presence(&self, user_id: &UserId, new_state: PresenceState) -> Result<()>;
|
||||
/// Pings the presence of the given user in the given room, setting the
|
||||
/// specified state.
|
||||
fn ping_presence(&self, user_id: &UserId, new_state: PresenceState) -> Result<()>;
|
||||
|
||||
/// Adds a presence event which will be saved until a new event replaces it.
|
||||
fn set_presence(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
user_id: &UserId,
|
||||
presence_state: PresenceState,
|
||||
currently_active: Option<bool>,
|
||||
last_active_ago: Option<UInt>,
|
||||
status_msg: Option<String>,
|
||||
) -> Result<()>;
|
||||
/// Adds a presence event which will be saved until a new event replaces it.
|
||||
fn set_presence(
|
||||
&self, room_id: &RoomId, user_id: &UserId, presence_state: PresenceState, currently_active: Option<bool>,
|
||||
last_active_ago: Option<UInt>, status_msg: Option<String>,
|
||||
) -> Result<()>;
|
||||
|
||||
/// Removes the presence record for the given user from the database.
|
||||
fn remove_presence(&self, user_id: &UserId) -> Result<()>;
|
||||
/// Removes the presence record for the given user from the database.
|
||||
fn remove_presence(&self, user_id: &UserId) -> Result<()>;
|
||||
|
||||
/// Returns the most recent presence updates that happened after the event with id `since`.
|
||||
fn presence_since<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
since: u64,
|
||||
) -> Box<dyn Iterator<Item = (OwnedUserId, u64, PresenceEvent)> + 'a>;
|
||||
/// Returns the most recent presence updates that happened after the event
|
||||
/// with id `since`.
|
||||
fn presence_since<'a>(
|
||||
&'a self, room_id: &RoomId, since: u64,
|
||||
) -> Box<dyn Iterator<Item = (OwnedUserId, u64, PresenceEvent)> + 'a>;
|
||||
}
|
||||
|
|
|
@ -5,9 +5,9 @@ use std::time::Duration;
|
|||
pub use data::Data;
|
||||
use futures_util::{stream::FuturesUnordered, StreamExt};
|
||||
use ruma::{
|
||||
events::presence::{PresenceEvent, PresenceEventContent},
|
||||
presence::PresenceState,
|
||||
OwnedUserId, RoomId, UInt, UserId,
|
||||
events::presence::{PresenceEvent, PresenceEventContent},
|
||||
presence::PresenceState,
|
||||
OwnedUserId, RoomId, UInt, UserId,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::{sync::mpsc, time::sleep};
|
||||
|
@ -15,197 +15,164 @@ use tracing::debug;
|
|||
|
||||
use crate::{services, utils, Error, Result};
|
||||
|
||||
/// Represents data required to be kept in order to implement the presence specification.
|
||||
/// Represents data required to be kept in order to implement the presence
|
||||
/// specification.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct Presence {
|
||||
pub state: PresenceState,
|
||||
pub currently_active: bool,
|
||||
pub last_active_ts: u64,
|
||||
pub last_count: u64,
|
||||
pub status_msg: Option<String>,
|
||||
pub state: PresenceState,
|
||||
pub currently_active: bool,
|
||||
pub last_active_ts: u64,
|
||||
pub last_count: u64,
|
||||
pub status_msg: Option<String>,
|
||||
}
|
||||
|
||||
impl Presence {
|
||||
pub fn new(
|
||||
state: PresenceState,
|
||||
currently_active: bool,
|
||||
last_active_ts: u64,
|
||||
last_count: u64,
|
||||
status_msg: Option<String>,
|
||||
) -> Self {
|
||||
Self {
|
||||
state,
|
||||
currently_active,
|
||||
last_active_ts,
|
||||
last_count,
|
||||
status_msg,
|
||||
}
|
||||
}
|
||||
pub fn new(
|
||||
state: PresenceState, currently_active: bool, last_active_ts: u64, last_count: u64, status_msg: Option<String>,
|
||||
) -> Self {
|
||||
Self {
|
||||
state,
|
||||
currently_active,
|
||||
last_active_ts,
|
||||
last_count,
|
||||
status_msg,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_json_bytes(bytes: &[u8]) -> Result<Self> {
|
||||
serde_json::from_slice(bytes)
|
||||
.map_err(|_| Error::bad_database("Invalid presence data in database"))
|
||||
}
|
||||
pub fn from_json_bytes(bytes: &[u8]) -> Result<Self> {
|
||||
serde_json::from_slice(bytes).map_err(|_| Error::bad_database("Invalid presence data in database"))
|
||||
}
|
||||
|
||||
pub fn to_json_bytes(&self) -> Result<Vec<u8>> {
|
||||
serde_json::to_vec(self)
|
||||
.map_err(|_| Error::bad_database("Could not serialize Presence to JSON"))
|
||||
}
|
||||
pub fn to_json_bytes(&self) -> Result<Vec<u8>> {
|
||||
serde_json::to_vec(self).map_err(|_| Error::bad_database("Could not serialize Presence to JSON"))
|
||||
}
|
||||
|
||||
/// Creates a PresenceEvent from available data.
|
||||
pub fn to_presence_event(&self, user_id: &UserId) -> Result<PresenceEvent> {
|
||||
let now = utils::millis_since_unix_epoch();
|
||||
let last_active_ago = if self.currently_active {
|
||||
None
|
||||
} else {
|
||||
Some(UInt::new_saturating(
|
||||
now.saturating_sub(self.last_active_ts),
|
||||
))
|
||||
};
|
||||
/// Creates a PresenceEvent from available data.
|
||||
pub fn to_presence_event(&self, user_id: &UserId) -> Result<PresenceEvent> {
|
||||
let now = utils::millis_since_unix_epoch();
|
||||
let last_active_ago = if self.currently_active {
|
||||
None
|
||||
} else {
|
||||
Some(UInt::new_saturating(now.saturating_sub(self.last_active_ts)))
|
||||
};
|
||||
|
||||
Ok(PresenceEvent {
|
||||
sender: user_id.to_owned(),
|
||||
content: PresenceEventContent {
|
||||
presence: self.state.clone(),
|
||||
status_msg: self.status_msg.clone(),
|
||||
currently_active: Some(self.currently_active),
|
||||
last_active_ago,
|
||||
displayname: services().users.displayname(user_id)?,
|
||||
avatar_url: services().users.avatar_url(user_id)?,
|
||||
},
|
||||
})
|
||||
}
|
||||
Ok(PresenceEvent {
|
||||
sender: user_id.to_owned(),
|
||||
content: PresenceEventContent {
|
||||
presence: self.state.clone(),
|
||||
status_msg: self.status_msg.clone(),
|
||||
currently_active: Some(self.currently_active),
|
||||
last_active_ago,
|
||||
displayname: services().users.displayname(user_id)?,
|
||||
avatar_url: services().users.avatar_url(user_id)?,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Returns the latest presence event for the given user in the given room.
|
||||
pub fn get_presence(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
user_id: &UserId,
|
||||
) -> Result<Option<PresenceEvent>> {
|
||||
self.db.get_presence(room_id, user_id)
|
||||
}
|
||||
/// Returns the latest presence event for the given user in the given room.
|
||||
pub fn get_presence(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<PresenceEvent>> {
|
||||
self.db.get_presence(room_id, user_id)
|
||||
}
|
||||
|
||||
/// Pings the presence of the given user in the given room, setting the specified state.
|
||||
pub fn ping_presence(&self, user_id: &UserId, new_state: PresenceState) -> Result<()> {
|
||||
self.db.ping_presence(user_id, new_state)
|
||||
}
|
||||
/// Pings the presence of the given user in the given room, setting the
|
||||
/// specified state.
|
||||
pub fn ping_presence(&self, user_id: &UserId, new_state: PresenceState) -> Result<()> {
|
||||
self.db.ping_presence(user_id, new_state)
|
||||
}
|
||||
|
||||
/// Adds a presence event which will be saved until a new event replaces it.
|
||||
pub fn set_presence(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
user_id: &UserId,
|
||||
presence_state: PresenceState,
|
||||
currently_active: Option<bool>,
|
||||
last_active_ago: Option<UInt>,
|
||||
status_msg: Option<String>,
|
||||
) -> Result<()> {
|
||||
self.db.set_presence(
|
||||
room_id,
|
||||
user_id,
|
||||
presence_state,
|
||||
currently_active,
|
||||
last_active_ago,
|
||||
status_msg,
|
||||
)
|
||||
}
|
||||
/// Adds a presence event which will be saved until a new event replaces it.
|
||||
pub fn set_presence(
|
||||
&self, room_id: &RoomId, user_id: &UserId, presence_state: PresenceState, currently_active: Option<bool>,
|
||||
last_active_ago: Option<UInt>, status_msg: Option<String>,
|
||||
) -> Result<()> {
|
||||
self.db.set_presence(room_id, user_id, presence_state, currently_active, last_active_ago, status_msg)
|
||||
}
|
||||
|
||||
/// Removes the presence record for the given user from the database.
|
||||
pub fn remove_presence(&self, user_id: &UserId) -> Result<()> {
|
||||
self.db.remove_presence(user_id)
|
||||
}
|
||||
/// Removes the presence record for the given user from the database.
|
||||
pub fn remove_presence(&self, user_id: &UserId) -> Result<()> { self.db.remove_presence(user_id) }
|
||||
|
||||
/// Returns the most recent presence updates that happened after the event with id `since`.
|
||||
pub fn presence_since(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
since: u64,
|
||||
) -> Box<dyn Iterator<Item = (OwnedUserId, u64, PresenceEvent)>> {
|
||||
self.db.presence_since(room_id, since)
|
||||
}
|
||||
/// Returns the most recent presence updates that happened after the event
|
||||
/// with id `since`.
|
||||
pub fn presence_since(
|
||||
&self, room_id: &RoomId, since: u64,
|
||||
) -> Box<dyn Iterator<Item = (OwnedUserId, u64, PresenceEvent)>> {
|
||||
self.db.presence_since(room_id, since)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn presence_handler(
|
||||
mut presence_timer_receiver: mpsc::UnboundedReceiver<(OwnedUserId, Duration)>,
|
||||
mut presence_timer_receiver: mpsc::UnboundedReceiver<(OwnedUserId, Duration)>,
|
||||
) -> Result<()> {
|
||||
let mut presence_timers = FuturesUnordered::new();
|
||||
let mut presence_timers = FuturesUnordered::new();
|
||||
|
||||
loop {
|
||||
debug!("Number of presence timers: {}", presence_timers.len());
|
||||
loop {
|
||||
debug!("Number of presence timers: {}", presence_timers.len());
|
||||
|
||||
tokio::select! {
|
||||
Some((user_id, timeout)) = presence_timer_receiver.recv() => {
|
||||
debug!("Adding timer for user '{user_id}': Timeout {timeout:?}");
|
||||
presence_timers.push(presence_timer(user_id, timeout));
|
||||
}
|
||||
tokio::select! {
|
||||
Some((user_id, timeout)) = presence_timer_receiver.recv() => {
|
||||
debug!("Adding timer for user '{user_id}': Timeout {timeout:?}");
|
||||
presence_timers.push(presence_timer(user_id, timeout));
|
||||
}
|
||||
|
||||
Some(user_id) = presence_timers.next() => {
|
||||
process_presence_timer(user_id)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(user_id) = presence_timers.next() => {
|
||||
process_presence_timer(user_id)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn presence_timer(user_id: OwnedUserId, timeout: Duration) -> OwnedUserId {
|
||||
sleep(timeout).await;
|
||||
sleep(timeout).await;
|
||||
|
||||
user_id
|
||||
user_id
|
||||
}
|
||||
|
||||
fn process_presence_timer(user_id: OwnedUserId) -> Result<()> {
|
||||
let idle_timeout = services().globals.config.presence_idle_timeout_s * 1_000;
|
||||
let offline_timeout = services().globals.config.presence_offline_timeout_s * 1_000;
|
||||
let idle_timeout = services().globals.config.presence_idle_timeout_s * 1_000;
|
||||
let offline_timeout = services().globals.config.presence_offline_timeout_s * 1_000;
|
||||
|
||||
let mut presence_state = PresenceState::Offline;
|
||||
let mut last_active_ago = None;
|
||||
let mut status_msg = None;
|
||||
let mut presence_state = PresenceState::Offline;
|
||||
let mut last_active_ago = None;
|
||||
let mut status_msg = None;
|
||||
|
||||
for room_id in services().rooms.state_cache.rooms_joined(&user_id) {
|
||||
let presence_event = services()
|
||||
.rooms
|
||||
.edus
|
||||
.presence
|
||||
.get_presence(&room_id?, &user_id)?;
|
||||
for room_id in services().rooms.state_cache.rooms_joined(&user_id) {
|
||||
let presence_event = services().rooms.edus.presence.get_presence(&room_id?, &user_id)?;
|
||||
|
||||
if let Some(presence_event) = presence_event {
|
||||
presence_state = presence_event.content.presence;
|
||||
last_active_ago = presence_event.content.last_active_ago;
|
||||
status_msg = presence_event.content.status_msg;
|
||||
if let Some(presence_event) = presence_event {
|
||||
presence_state = presence_event.content.presence;
|
||||
last_active_ago = presence_event.content.last_active_ago;
|
||||
status_msg = presence_event.content.status_msg;
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let new_state = match (&presence_state, last_active_ago.map(u64::from)) {
|
||||
(PresenceState::Online, Some(ago)) if ago >= idle_timeout => {
|
||||
Some(PresenceState::Unavailable)
|
||||
}
|
||||
(PresenceState::Unavailable, Some(ago)) if ago >= offline_timeout => {
|
||||
Some(PresenceState::Offline)
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
let new_state = match (&presence_state, last_active_ago.map(u64::from)) {
|
||||
(PresenceState::Online, Some(ago)) if ago >= idle_timeout => Some(PresenceState::Unavailable),
|
||||
(PresenceState::Unavailable, Some(ago)) if ago >= offline_timeout => Some(PresenceState::Offline),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
debug!("Processed presence timer for user '{user_id}': Old state = {presence_state}, New state = {new_state:?}");
|
||||
debug!("Processed presence timer for user '{user_id}': Old state = {presence_state}, New state = {new_state:?}");
|
||||
|
||||
if let Some(new_state) = new_state {
|
||||
for room_id in services().rooms.state_cache.rooms_joined(&user_id) {
|
||||
services().rooms.edus.presence.set_presence(
|
||||
&room_id?,
|
||||
&user_id,
|
||||
new_state.clone(),
|
||||
Some(false),
|
||||
last_active_ago,
|
||||
status_msg.clone(),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
if let Some(new_state) = new_state {
|
||||
for room_id in services().rooms.state_cache.rooms_joined(&user_id) {
|
||||
services().rooms.edus.presence.set_presence(
|
||||
&room_id?,
|
||||
&user_id,
|
||||
new_state.clone(),
|
||||
Some(false),
|
||||
last_active_ago,
|
||||
status_msg.clone(),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,32 +1,28 @@
|
|||
use crate::Result;
|
||||
use ruma::{
|
||||
events::{receipt::ReceiptEvent, AnySyncEphemeralRoomEvent},
|
||||
serde::Raw,
|
||||
OwnedUserId, RoomId, UserId,
|
||||
events::{receipt::ReceiptEvent, AnySyncEphemeralRoomEvent},
|
||||
serde::Raw,
|
||||
OwnedUserId, RoomId, UserId,
|
||||
};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
type AnySyncEphemeralRoomEventIter<'a> =
|
||||
Box<dyn Iterator<Item = Result<(OwnedUserId, u64, Raw<AnySyncEphemeralRoomEvent>)>> + 'a>;
|
||||
Box<dyn Iterator<Item = Result<(OwnedUserId, u64, Raw<AnySyncEphemeralRoomEvent>)>> + 'a>;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Replaces the previous read receipt.
|
||||
fn readreceipt_update(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
event: ReceiptEvent,
|
||||
) -> Result<()>;
|
||||
/// Replaces the previous read receipt.
|
||||
fn readreceipt_update(&self, user_id: &UserId, room_id: &RoomId, event: ReceiptEvent) -> Result<()>;
|
||||
|
||||
/// Returns an iterator over the most recent read_receipts in a room that happened after the event with id `since`.
|
||||
fn readreceipts_since(&self, room_id: &RoomId, since: u64)
|
||||
-> AnySyncEphemeralRoomEventIter<'_>;
|
||||
/// Returns an iterator over the most recent read_receipts in a room that
|
||||
/// happened after the event with id `since`.
|
||||
fn readreceipts_since(&self, room_id: &RoomId, since: u64) -> AnySyncEphemeralRoomEventIter<'_>;
|
||||
|
||||
/// Sets a private read marker at `count`.
|
||||
fn private_read_set(&self, room_id: &RoomId, user_id: &UserId, count: u64) -> Result<()>;
|
||||
/// Sets a private read marker at `count`.
|
||||
fn private_read_set(&self, room_id: &RoomId, user_id: &UserId, count: u64) -> Result<()>;
|
||||
|
||||
/// Returns the private read marker.
|
||||
fn private_read_get(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>>;
|
||||
/// Returns the private read marker.
|
||||
fn private_read_get(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>>;
|
||||
|
||||
/// Returns the count of the last typing update in this room.
|
||||
fn last_privateread_update(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64>;
|
||||
/// Returns the count of the last typing update in this room.
|
||||
fn last_privateread_update(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64>;
|
||||
}
|
||||
|
|
|
@ -1,55 +1,43 @@
|
|||
mod data;
|
||||
|
||||
pub use data::Data;
|
||||
|
||||
use crate::Result;
|
||||
use ruma::{events::receipt::ReceiptEvent, serde::Raw, OwnedUserId, RoomId, UserId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Replaces the previous read receipt.
|
||||
pub fn readreceipt_update(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
event: ReceiptEvent,
|
||||
) -> Result<()> {
|
||||
self.db.readreceipt_update(user_id, room_id, event)
|
||||
}
|
||||
/// Replaces the previous read receipt.
|
||||
pub fn readreceipt_update(&self, user_id: &UserId, room_id: &RoomId, event: ReceiptEvent) -> Result<()> {
|
||||
self.db.readreceipt_update(user_id, room_id, event)
|
||||
}
|
||||
|
||||
/// Returns an iterator over the most recent read_receipts in a room that happened after the event with id `since`.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn readreceipts_since<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
since: u64,
|
||||
) -> impl Iterator<
|
||||
Item = Result<(
|
||||
OwnedUserId,
|
||||
u64,
|
||||
Raw<ruma::events::AnySyncEphemeralRoomEvent>,
|
||||
)>,
|
||||
> + 'a {
|
||||
self.db.readreceipts_since(room_id, since)
|
||||
}
|
||||
/// Returns an iterator over the most recent read_receipts in a room that
|
||||
/// happened after the event with id `since`.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn readreceipts_since<'a>(
|
||||
&'a self, room_id: &RoomId, since: u64,
|
||||
) -> impl Iterator<Item = Result<(OwnedUserId, u64, Raw<ruma::events::AnySyncEphemeralRoomEvent>)>> + 'a {
|
||||
self.db.readreceipts_since(room_id, since)
|
||||
}
|
||||
|
||||
/// Sets a private read marker at `count`.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn private_read_set(&self, room_id: &RoomId, user_id: &UserId, count: u64) -> Result<()> {
|
||||
self.db.private_read_set(room_id, user_id, count)
|
||||
}
|
||||
/// Sets a private read marker at `count`.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn private_read_set(&self, room_id: &RoomId, user_id: &UserId, count: u64) -> Result<()> {
|
||||
self.db.private_read_set(room_id, user_id, count)
|
||||
}
|
||||
|
||||
/// Returns the private read marker.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn private_read_get(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>> {
|
||||
self.db.private_read_get(room_id, user_id)
|
||||
}
|
||||
/// Returns the private read marker.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn private_read_get(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>> {
|
||||
self.db.private_read_get(room_id, user_id)
|
||||
}
|
||||
|
||||
/// Returns the count of the last typing update in this room.
|
||||
pub fn last_privateread_update(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
|
||||
self.db.last_privateread_update(user_id, room_id)
|
||||
}
|
||||
/// Returns the count of the last typing update in this room.
|
||||
pub fn last_privateread_update(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
|
||||
self.db.last_privateread_update(user_id, room_id)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,21 +1,23 @@
|
|||
use crate::Result;
|
||||
use ruma::{OwnedUserId, RoomId, UserId};
|
||||
use std::collections::HashSet;
|
||||
|
||||
use ruma::{OwnedUserId, RoomId, UserId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Sets a user as typing until the timeout timestamp is reached or roomtyping_remove is
|
||||
/// called.
|
||||
fn typing_add(&self, user_id: &UserId, room_id: &RoomId, timeout: u64) -> Result<()>;
|
||||
/// Sets a user as typing until the timeout timestamp is reached or
|
||||
/// roomtyping_remove is called.
|
||||
fn typing_add(&self, user_id: &UserId, room_id: &RoomId, timeout: u64) -> Result<()>;
|
||||
|
||||
/// Removes a user from typing before the timeout is reached.
|
||||
fn typing_remove(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
/// Removes a user from typing before the timeout is reached.
|
||||
fn typing_remove(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
/// Makes sure that typing events with old timestamps get removed.
|
||||
fn typings_maintain(&self, room_id: &RoomId) -> Result<()>;
|
||||
/// Makes sure that typing events with old timestamps get removed.
|
||||
fn typings_maintain(&self, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
/// Returns the count of the last typing update in this room.
|
||||
fn last_typing_update(&self, room_id: &RoomId) -> Result<u64>;
|
||||
/// Returns the count of the last typing update in this room.
|
||||
fn last_typing_update(&self, room_id: &RoomId) -> Result<u64>;
|
||||
|
||||
/// Returns all user ids currently typing.
|
||||
fn typings_all(&self, room_id: &RoomId) -> Result<HashSet<OwnedUserId>>;
|
||||
/// Returns all user ids currently typing.
|
||||
fn typings_all(&self, room_id: &RoomId) -> Result<HashSet<OwnedUserId>>;
|
||||
}
|
||||
|
|
|
@ -6,44 +6,41 @@ use ruma::{events::SyncEphemeralRoomEvent, RoomId, UserId};
|
|||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Sets a user as typing until the timeout timestamp is reached or roomtyping_remove is
|
||||
/// called.
|
||||
pub fn typing_add(&self, user_id: &UserId, room_id: &RoomId, timeout: u64) -> Result<()> {
|
||||
self.db.typing_add(user_id, room_id, timeout)
|
||||
}
|
||||
/// Sets a user as typing until the timeout timestamp is reached or
|
||||
/// roomtyping_remove is called.
|
||||
pub fn typing_add(&self, user_id: &UserId, room_id: &RoomId, timeout: u64) -> Result<()> {
|
||||
self.db.typing_add(user_id, room_id, timeout)
|
||||
}
|
||||
|
||||
/// Removes a user from typing before the timeout is reached.
|
||||
pub fn typing_remove(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
|
||||
self.db.typing_remove(user_id, room_id)
|
||||
}
|
||||
/// Removes a user from typing before the timeout is reached.
|
||||
pub fn typing_remove(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
|
||||
self.db.typing_remove(user_id, room_id)
|
||||
}
|
||||
|
||||
/// Makes sure that typing events with old timestamps get removed.
|
||||
fn typings_maintain(&self, room_id: &RoomId) -> Result<()> {
|
||||
self.db.typings_maintain(room_id)
|
||||
}
|
||||
/// Makes sure that typing events with old timestamps get removed.
|
||||
fn typings_maintain(&self, room_id: &RoomId) -> Result<()> { self.db.typings_maintain(room_id) }
|
||||
|
||||
/// Returns the count of the last typing update in this room.
|
||||
pub fn last_typing_update(&self, room_id: &RoomId) -> Result<u64> {
|
||||
self.typings_maintain(room_id)?;
|
||||
/// Returns the count of the last typing update in this room.
|
||||
pub fn last_typing_update(&self, room_id: &RoomId) -> Result<u64> {
|
||||
self.typings_maintain(room_id)?;
|
||||
|
||||
self.db.last_typing_update(room_id)
|
||||
}
|
||||
self.db.last_typing_update(room_id)
|
||||
}
|
||||
|
||||
/// Returns a new typing EDU.
|
||||
pub fn typings_all(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
) -> Result<SyncEphemeralRoomEvent<ruma::events::typing::TypingEventContent>> {
|
||||
let user_ids = self.db.typings_all(room_id)?;
|
||||
/// Returns a new typing EDU.
|
||||
pub fn typings_all(
|
||||
&self, room_id: &RoomId,
|
||||
) -> Result<SyncEphemeralRoomEvent<ruma::events::typing::TypingEventContent>> {
|
||||
let user_ids = self.db.typings_all(room_id)?;
|
||||
|
||||
Ok(SyncEphemeralRoomEvent {
|
||||
content: ruma::events::typing::TypingEventContent {
|
||||
user_ids: user_ids.into_iter().collect(),
|
||||
},
|
||||
})
|
||||
}
|
||||
Ok(SyncEphemeralRoomEvent {
|
||||
content: ruma::events::typing::TypingEventContent {
|
||||
user_ids: user_ids.into_iter().collect(),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,27 +1,16 @@
|
|||
use crate::Result;
|
||||
use ruma::{DeviceId, RoomId, UserId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn lazy_load_was_sent_before(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
room_id: &RoomId,
|
||||
ll_user: &UserId,
|
||||
) -> Result<bool>;
|
||||
fn lazy_load_was_sent_before(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, room_id: &RoomId, ll_user: &UserId,
|
||||
) -> Result<bool>;
|
||||
|
||||
fn lazy_load_confirm_delivery(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
room_id: &RoomId,
|
||||
confirmed_user_ids: &mut dyn Iterator<Item = &UserId>,
|
||||
) -> Result<()>;
|
||||
fn lazy_load_confirm_delivery(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, room_id: &RoomId,
|
||||
confirmed_user_ids: &mut dyn Iterator<Item = &UserId>,
|
||||
) -> Result<()>;
|
||||
|
||||
fn lazy_load_reset(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
room_id: &RoomId,
|
||||
) -> Result<()>;
|
||||
fn lazy_load_reset(&self, user_id: &UserId, device_id: &DeviceId, room_id: &RoomId) -> Result<()>;
|
||||
}
|
||||
|
|
|
@ -1,92 +1,62 @@
|
|||
mod data;
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
sync::Mutex,
|
||||
collections::{HashMap, HashSet},
|
||||
sync::Mutex,
|
||||
};
|
||||
|
||||
pub use data::Data;
|
||||
use ruma::{DeviceId, OwnedDeviceId, OwnedRoomId, OwnedUserId, RoomId, UserId};
|
||||
|
||||
use super::timeline::PduCount;
|
||||
use crate::Result;
|
||||
|
||||
use super::timeline::PduCount;
|
||||
|
||||
type LazyLoadWaitingMutex =
|
||||
Mutex<HashMap<(OwnedUserId, OwnedDeviceId, OwnedRoomId, PduCount), HashSet<OwnedUserId>>>;
|
||||
type LazyLoadWaitingMutex = Mutex<HashMap<(OwnedUserId, OwnedDeviceId, OwnedRoomId, PduCount), HashSet<OwnedUserId>>>;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
|
||||
pub lazy_load_waiting: LazyLoadWaitingMutex,
|
||||
pub lazy_load_waiting: LazyLoadWaitingMutex,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn lazy_load_was_sent_before(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
room_id: &RoomId,
|
||||
ll_user: &UserId,
|
||||
) -> Result<bool> {
|
||||
self.db
|
||||
.lazy_load_was_sent_before(user_id, device_id, room_id, ll_user)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn lazy_load_was_sent_before(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, room_id: &RoomId, ll_user: &UserId,
|
||||
) -> Result<bool> {
|
||||
self.db.lazy_load_was_sent_before(user_id, device_id, room_id, ll_user)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn lazy_load_mark_sent(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
room_id: &RoomId,
|
||||
lazy_load: HashSet<OwnedUserId>,
|
||||
count: PduCount,
|
||||
) {
|
||||
self.lazy_load_waiting.lock().unwrap().insert(
|
||||
(
|
||||
user_id.to_owned(),
|
||||
device_id.to_owned(),
|
||||
room_id.to_owned(),
|
||||
count,
|
||||
),
|
||||
lazy_load,
|
||||
);
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn lazy_load_mark_sent(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, room_id: &RoomId, lazy_load: HashSet<OwnedUserId>,
|
||||
count: PduCount,
|
||||
) {
|
||||
self.lazy_load_waiting
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert((user_id.to_owned(), device_id.to_owned(), room_id.to_owned(), count), lazy_load);
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn lazy_load_confirm_delivery(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
room_id: &RoomId,
|
||||
since: PduCount,
|
||||
) -> Result<()> {
|
||||
if let Some(user_ids) = self.lazy_load_waiting.lock().unwrap().remove(&(
|
||||
user_id.to_owned(),
|
||||
device_id.to_owned(),
|
||||
room_id.to_owned(),
|
||||
since,
|
||||
)) {
|
||||
self.db.lazy_load_confirm_delivery(
|
||||
user_id,
|
||||
device_id,
|
||||
room_id,
|
||||
&mut user_ids.iter().map(|u| &**u),
|
||||
)?;
|
||||
} else {
|
||||
// Ignore
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn lazy_load_confirm_delivery(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, room_id: &RoomId, since: PduCount,
|
||||
) -> Result<()> {
|
||||
if let Some(user_ids) = self.lazy_load_waiting.lock().unwrap().remove(&(
|
||||
user_id.to_owned(),
|
||||
device_id.to_owned(),
|
||||
room_id.to_owned(),
|
||||
since,
|
||||
)) {
|
||||
self.db.lazy_load_confirm_delivery(user_id, device_id, room_id, &mut user_ids.iter().map(|u| &**u))?;
|
||||
} else {
|
||||
// Ignore
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn lazy_load_reset(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
room_id: &RoomId,
|
||||
) -> Result<()> {
|
||||
self.db.lazy_load_reset(user_id, device_id, room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn lazy_load_reset(&self, user_id: &UserId, device_id: &DeviceId, room_id: &RoomId) -> Result<()> {
|
||||
self.db.lazy_load_reset(user_id, device_id, room_id)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
use crate::Result;
|
||||
use ruma::{OwnedRoomId, RoomId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn exists(&self, room_id: &RoomId) -> Result<bool>;
|
||||
fn iter_ids<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a>;
|
||||
fn is_disabled(&self, room_id: &RoomId) -> Result<bool>;
|
||||
fn disable_room(&self, room_id: &RoomId, disabled: bool) -> Result<()>;
|
||||
fn is_banned(&self, room_id: &RoomId) -> Result<bool>;
|
||||
fn ban_room(&self, room_id: &RoomId, banned: bool) -> Result<()>;
|
||||
fn list_banned_rooms<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a>;
|
||||
fn exists(&self, room_id: &RoomId) -> Result<bool>;
|
||||
fn iter_ids<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a>;
|
||||
fn is_disabled(&self, room_id: &RoomId) -> Result<bool>;
|
||||
fn disable_room(&self, room_id: &RoomId, disabled: bool) -> Result<()>;
|
||||
fn is_banned(&self, room_id: &RoomId) -> Result<bool>;
|
||||
fn ban_room(&self, room_id: &RoomId, banned: bool) -> Result<()>;
|
||||
fn list_banned_rooms<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a>;
|
||||
}
|
||||
|
|
|
@ -6,37 +6,27 @@ use ruma::{OwnedRoomId, RoomId};
|
|||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Checks if a room exists.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn exists(&self, room_id: &RoomId) -> Result<bool> {
|
||||
self.db.exists(room_id)
|
||||
}
|
||||
/// Checks if a room exists.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn exists(&self, room_id: &RoomId) -> Result<bool> { self.db.exists(room_id) }
|
||||
|
||||
pub fn iter_ids<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a> {
|
||||
self.db.iter_ids()
|
||||
}
|
||||
pub fn iter_ids<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a> { self.db.iter_ids() }
|
||||
|
||||
pub fn is_disabled(&self, room_id: &RoomId) -> Result<bool> {
|
||||
self.db.is_disabled(room_id)
|
||||
}
|
||||
pub fn is_disabled(&self, room_id: &RoomId) -> Result<bool> { self.db.is_disabled(room_id) }
|
||||
|
||||
pub fn disable_room(&self, room_id: &RoomId, disabled: bool) -> Result<()> {
|
||||
self.db.disable_room(room_id, disabled)
|
||||
}
|
||||
pub fn disable_room(&self, room_id: &RoomId, disabled: bool) -> Result<()> {
|
||||
self.db.disable_room(room_id, disabled)
|
||||
}
|
||||
|
||||
pub fn is_banned(&self, room_id: &RoomId) -> Result<bool> {
|
||||
self.db.is_banned(room_id)
|
||||
}
|
||||
pub fn is_banned(&self, room_id: &RoomId) -> Result<bool> { self.db.is_banned(room_id) }
|
||||
|
||||
pub fn ban_room(&self, room_id: &RoomId, banned: bool) -> Result<()> {
|
||||
self.db.ban_room(room_id, banned)
|
||||
}
|
||||
pub fn ban_room(&self, room_id: &RoomId, banned: bool) -> Result<()> { self.db.ban_room(room_id, banned) }
|
||||
|
||||
pub fn list_banned_rooms<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a> {
|
||||
self.db.list_banned_rooms()
|
||||
}
|
||||
pub fn list_banned_rooms<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a> {
|
||||
self.db.list_banned_rooms()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,44 +19,44 @@ pub mod timeline;
|
|||
pub mod user;
|
||||
|
||||
pub trait Data:
|
||||
alias::Data
|
||||
+ auth_chain::Data
|
||||
+ directory::Data
|
||||
+ edus::Data
|
||||
+ lazy_loading::Data
|
||||
+ metadata::Data
|
||||
+ outlier::Data
|
||||
+ pdu_metadata::Data
|
||||
+ search::Data
|
||||
+ short::Data
|
||||
+ state::Data
|
||||
+ state_accessor::Data
|
||||
+ state_cache::Data
|
||||
+ state_compressor::Data
|
||||
+ timeline::Data
|
||||
+ threads::Data
|
||||
+ user::Data
|
||||
alias::Data
|
||||
+ auth_chain::Data
|
||||
+ directory::Data
|
||||
+ edus::Data
|
||||
+ lazy_loading::Data
|
||||
+ metadata::Data
|
||||
+ outlier::Data
|
||||
+ pdu_metadata::Data
|
||||
+ search::Data
|
||||
+ short::Data
|
||||
+ state::Data
|
||||
+ state_accessor::Data
|
||||
+ state_cache::Data
|
||||
+ state_compressor::Data
|
||||
+ timeline::Data
|
||||
+ threads::Data
|
||||
+ user::Data
|
||||
{
|
||||
}
|
||||
|
||||
pub struct Service {
|
||||
pub alias: alias::Service,
|
||||
pub auth_chain: auth_chain::Service,
|
||||
pub directory: directory::Service,
|
||||
pub edus: edus::Service,
|
||||
pub event_handler: event_handler::Service,
|
||||
pub lazy_loading: lazy_loading::Service,
|
||||
pub metadata: metadata::Service,
|
||||
pub outlier: outlier::Service,
|
||||
pub pdu_metadata: pdu_metadata::Service,
|
||||
pub search: search::Service,
|
||||
pub short: short::Service,
|
||||
pub state: state::Service,
|
||||
pub state_accessor: state_accessor::Service,
|
||||
pub state_cache: state_cache::Service,
|
||||
pub state_compressor: state_compressor::Service,
|
||||
pub timeline: timeline::Service,
|
||||
pub threads: threads::Service,
|
||||
pub spaces: spaces::Service,
|
||||
pub user: user::Service,
|
||||
pub alias: alias::Service,
|
||||
pub auth_chain: auth_chain::Service,
|
||||
pub directory: directory::Service,
|
||||
pub edus: edus::Service,
|
||||
pub event_handler: event_handler::Service,
|
||||
pub lazy_loading: lazy_loading::Service,
|
||||
pub metadata: metadata::Service,
|
||||
pub outlier: outlier::Service,
|
||||
pub pdu_metadata: pdu_metadata::Service,
|
||||
pub search: search::Service,
|
||||
pub short: short::Service,
|
||||
pub state: state::Service,
|
||||
pub state_accessor: state_accessor::Service,
|
||||
pub state_cache: state_cache::Service,
|
||||
pub state_compressor: state_compressor::Service,
|
||||
pub timeline: timeline::Service,
|
||||
pub threads: threads::Service,
|
||||
pub spaces: spaces::Service,
|
||||
pub user: user::Service,
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use ruma::{CanonicalJsonObject, EventId};
|
|||
use crate::{PduEvent, Result};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>>;
|
||||
fn get_outlier_pdu(&self, event_id: &EventId) -> Result<Option<PduEvent>>;
|
||||
fn add_pdu_outlier(&self, event_id: &EventId, pdu: &CanonicalJsonObject) -> Result<()>;
|
||||
fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>>;
|
||||
fn get_outlier_pdu(&self, event_id: &EventId) -> Result<Option<PduEvent>>;
|
||||
fn add_pdu_outlier(&self, event_id: &EventId, pdu: &CanonicalJsonObject) -> Result<()>;
|
||||
}
|
||||
|
|
|
@ -6,23 +6,21 @@ use ruma::{CanonicalJsonObject, EventId};
|
|||
use crate::{PduEvent, Result};
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Returns the pdu from the outlier tree.
|
||||
pub fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
|
||||
self.db.get_outlier_pdu_json(event_id)
|
||||
}
|
||||
/// Returns the pdu from the outlier tree.
|
||||
pub fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>> {
|
||||
self.db.get_outlier_pdu_json(event_id)
|
||||
}
|
||||
|
||||
/// Returns the pdu from the outlier tree.
|
||||
pub fn get_pdu_outlier(&self, event_id: &EventId) -> Result<Option<PduEvent>> {
|
||||
self.db.get_outlier_pdu(event_id)
|
||||
}
|
||||
/// Returns the pdu from the outlier tree.
|
||||
pub fn get_pdu_outlier(&self, event_id: &EventId) -> Result<Option<PduEvent>> { self.db.get_outlier_pdu(event_id) }
|
||||
|
||||
/// Append the PDU as an outlier.
|
||||
#[tracing::instrument(skip(self, pdu))]
|
||||
pub fn add_pdu_outlier(&self, event_id: &EventId, pdu: &CanonicalJsonObject) -> Result<()> {
|
||||
self.db.add_pdu_outlier(event_id, pdu)
|
||||
}
|
||||
/// Append the PDU as an outlier.
|
||||
#[tracing::instrument(skip(self, pdu))]
|
||||
pub fn add_pdu_outlier(&self, event_id: &EventId, pdu: &CanonicalJsonObject) -> Result<()> {
|
||||
self.db.add_pdu_outlier(event_id, pdu)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,20 +1,17 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use crate::{service::rooms::timeline::PduCount, PduEvent, Result};
|
||||
use ruma::{EventId, RoomId, UserId};
|
||||
|
||||
use crate::{service::rooms::timeline::PduCount, PduEvent, Result};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn add_relation(&self, from: u64, to: u64) -> Result<()>;
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn relations_until<'a>(
|
||||
&'a self,
|
||||
user_id: &'a UserId,
|
||||
room_id: u64,
|
||||
target: u64,
|
||||
until: PduCount,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<(PduCount, PduEvent)>> + 'a>>;
|
||||
fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()>;
|
||||
fn is_event_referenced(&self, room_id: &RoomId, event_id: &EventId) -> Result<bool>;
|
||||
fn mark_event_soft_failed(&self, event_id: &EventId) -> Result<()>;
|
||||
fn is_event_soft_failed(&self, event_id: &EventId) -> Result<bool>;
|
||||
fn add_relation(&self, from: u64, to: u64) -> Result<()>;
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn relations_until<'a>(
|
||||
&'a self, user_id: &'a UserId, room_id: u64, target: u64, until: PduCount,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<(PduCount, PduEvent)>> + 'a>>;
|
||||
fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()>;
|
||||
fn is_event_referenced(&self, room_id: &RoomId, event_id: &EventId) -> Result<bool>;
|
||||
fn mark_event_soft_failed(&self, event_id: &EventId) -> Result<()>;
|
||||
fn is_event_soft_failed(&self, event_id: &EventId) -> Result<bool>;
|
||||
}
|
||||
|
|
|
@ -3,188 +3,166 @@ use std::sync::Arc;
|
|||
|
||||
pub use data::Data;
|
||||
use ruma::{
|
||||
api::client::relations::get_relating_events,
|
||||
events::{relation::RelationType, TimelineEventType},
|
||||
EventId, RoomId, UserId,
|
||||
api::client::relations::get_relating_events,
|
||||
events::{relation::RelationType, TimelineEventType},
|
||||
EventId, RoomId, UserId,
|
||||
};
|
||||
use serde::Deserialize;
|
||||
|
||||
use super::timeline::PduCount;
|
||||
use crate::{services, PduEvent, Result};
|
||||
|
||||
use super::timeline::PduCount;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
struct ExtractRelType {
|
||||
rel_type: RelationType,
|
||||
rel_type: RelationType,
|
||||
}
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
struct ExtractRelatesToEventId {
|
||||
#[serde(rename = "m.relates_to")]
|
||||
relates_to: ExtractRelType,
|
||||
#[serde(rename = "m.relates_to")]
|
||||
relates_to: ExtractRelType,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
#[tracing::instrument(skip(self, from, to))]
|
||||
pub fn add_relation(&self, from: PduCount, to: PduCount) -> Result<()> {
|
||||
match (from, to) {
|
||||
(PduCount::Normal(f), PduCount::Normal(t)) => self.db.add_relation(f, t),
|
||||
_ => {
|
||||
// TODO: Relations with backfilled pdus
|
||||
#[tracing::instrument(skip(self, from, to))]
|
||||
pub fn add_relation(&self, from: PduCount, to: PduCount) -> Result<()> {
|
||||
match (from, to) {
|
||||
(PduCount::Normal(f), PduCount::Normal(t)) => self.db.add_relation(f, t),
|
||||
_ => {
|
||||
// TODO: Relations with backfilled pdus
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn paginate_relations_with_filter(
|
||||
&self,
|
||||
sender_user: &UserId,
|
||||
room_id: &RoomId,
|
||||
target: &EventId,
|
||||
filter_event_type: Option<TimelineEventType>,
|
||||
filter_rel_type: Option<RelationType>,
|
||||
from: PduCount,
|
||||
to: Option<PduCount>,
|
||||
limit: usize,
|
||||
) -> Result<get_relating_events::v1::Response> {
|
||||
let next_token;
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn paginate_relations_with_filter(
|
||||
&self, sender_user: &UserId, room_id: &RoomId, target: &EventId, filter_event_type: Option<TimelineEventType>,
|
||||
filter_rel_type: Option<RelationType>, from: PduCount, to: Option<PduCount>, limit: usize,
|
||||
) -> Result<get_relating_events::v1::Response> {
|
||||
let next_token;
|
||||
|
||||
//TODO: Fix ruma: match body.dir {
|
||||
match ruma::api::Direction::Backward {
|
||||
ruma::api::Direction::Forward => {
|
||||
let events_after: Vec<_> = services()
|
||||
.rooms
|
||||
.pdu_metadata
|
||||
.relations_until(sender_user, room_id, target, from)? // TODO: should be relations_after
|
||||
.filter(|r| {
|
||||
r.as_ref().map_or(true, |(_, pdu)| {
|
||||
filter_event_type.as_ref().map_or(true, |t| &pdu.kind == t)
|
||||
&& if let Ok(content) =
|
||||
serde_json::from_str::<ExtractRelatesToEventId>(
|
||||
pdu.content.get(),
|
||||
)
|
||||
{
|
||||
filter_rel_type
|
||||
.as_ref()
|
||||
.map_or(true, |r| &content.relates_to.rel_type == r)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
})
|
||||
.take(limit)
|
||||
.filter_map(std::result::Result::ok) // Filter out buggy events
|
||||
.filter(|(_, pdu)| {
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.user_can_see_event(sender_user, room_id, &pdu.event_id)
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.take_while(|&(k, _)| Some(k) != to) // Stop at `to`
|
||||
.collect();
|
||||
//TODO: Fix ruma: match body.dir {
|
||||
match ruma::api::Direction::Backward {
|
||||
ruma::api::Direction::Forward => {
|
||||
let events_after: Vec<_> = services()
|
||||
.rooms
|
||||
.pdu_metadata
|
||||
.relations_until(sender_user, room_id, target, from)? // TODO: should be relations_after
|
||||
.filter(|r| {
|
||||
r.as_ref().map_or(true, |(_, pdu)| {
|
||||
filter_event_type.as_ref().map_or(true, |t| &pdu.kind == t)
|
||||
&& if let Ok(content) =
|
||||
serde_json::from_str::<ExtractRelatesToEventId>(pdu.content.get())
|
||||
{
|
||||
filter_rel_type
|
||||
.as_ref()
|
||||
.map_or(true, |r| &content.relates_to.rel_type == r)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
})
|
||||
.take(limit)
|
||||
.filter_map(std::result::Result::ok) // Filter out buggy events
|
||||
.filter(|(_, pdu)| {
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.user_can_see_event(sender_user, room_id, &pdu.event_id)
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.take_while(|&(k, _)| Some(k) != to) // Stop at `to`
|
||||
.collect();
|
||||
|
||||
next_token = events_after.last().map(|(count, _)| count).copied();
|
||||
next_token = events_after.last().map(|(count, _)| count).copied();
|
||||
|
||||
let events_after: Vec<_> = events_after
|
||||
.into_iter()
|
||||
.rev() // relations are always most recent first
|
||||
.map(|(_, pdu)| pdu.to_message_like_event())
|
||||
.collect();
|
||||
let events_after: Vec<_> = events_after
|
||||
.into_iter()
|
||||
.rev() // relations are always most recent first
|
||||
.map(|(_, pdu)| pdu.to_message_like_event())
|
||||
.collect();
|
||||
|
||||
Ok(get_relating_events::v1::Response {
|
||||
chunk: events_after,
|
||||
next_batch: next_token.map(|t| t.stringify()),
|
||||
prev_batch: Some(from.stringify()),
|
||||
})
|
||||
}
|
||||
ruma::api::Direction::Backward => {
|
||||
let events_before: Vec<_> = services()
|
||||
.rooms
|
||||
.pdu_metadata
|
||||
.relations_until(sender_user, room_id, target, from)?
|
||||
.filter(|r| {
|
||||
r.as_ref().map_or(true, |(_, pdu)| {
|
||||
filter_event_type.as_ref().map_or(true, |t| &pdu.kind == t)
|
||||
&& if let Ok(content) =
|
||||
serde_json::from_str::<ExtractRelatesToEventId>(
|
||||
pdu.content.get(),
|
||||
)
|
||||
{
|
||||
filter_rel_type
|
||||
.as_ref()
|
||||
.map_or(true, |r| &content.relates_to.rel_type == r)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
})
|
||||
.take(limit)
|
||||
.filter_map(std::result::Result::ok) // Filter out buggy events
|
||||
.filter(|(_, pdu)| {
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.user_can_see_event(sender_user, room_id, &pdu.event_id)
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.take_while(|&(k, _)| Some(k) != to) // Stop at `to`
|
||||
.collect();
|
||||
Ok(get_relating_events::v1::Response {
|
||||
chunk: events_after,
|
||||
next_batch: next_token.map(|t| t.stringify()),
|
||||
prev_batch: Some(from.stringify()),
|
||||
})
|
||||
},
|
||||
ruma::api::Direction::Backward => {
|
||||
let events_before: Vec<_> = services()
|
||||
.rooms
|
||||
.pdu_metadata
|
||||
.relations_until(sender_user, room_id, target, from)?
|
||||
.filter(|r| {
|
||||
r.as_ref().map_or(true, |(_, pdu)| {
|
||||
filter_event_type.as_ref().map_or(true, |t| &pdu.kind == t)
|
||||
&& if let Ok(content) =
|
||||
serde_json::from_str::<ExtractRelatesToEventId>(pdu.content.get())
|
||||
{
|
||||
filter_rel_type
|
||||
.as_ref()
|
||||
.map_or(true, |r| &content.relates_to.rel_type == r)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
})
|
||||
.take(limit)
|
||||
.filter_map(std::result::Result::ok) // Filter out buggy events
|
||||
.filter(|(_, pdu)| {
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.user_can_see_event(sender_user, room_id, &pdu.event_id)
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.take_while(|&(k, _)| Some(k) != to) // Stop at `to`
|
||||
.collect();
|
||||
|
||||
next_token = events_before.last().map(|(count, _)| count).copied();
|
||||
next_token = events_before.last().map(|(count, _)| count).copied();
|
||||
|
||||
let events_before: Vec<_> = events_before
|
||||
.into_iter()
|
||||
.map(|(_, pdu)| pdu.to_message_like_event())
|
||||
.collect();
|
||||
let events_before: Vec<_> =
|
||||
events_before.into_iter().map(|(_, pdu)| pdu.to_message_like_event()).collect();
|
||||
|
||||
Ok(get_relating_events::v1::Response {
|
||||
chunk: events_before,
|
||||
next_batch: next_token.map(|t| t.stringify()),
|
||||
prev_batch: Some(from.stringify()),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(get_relating_events::v1::Response {
|
||||
chunk: events_before,
|
||||
next_batch: next_token.map(|t| t.stringify()),
|
||||
prev_batch: Some(from.stringify()),
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn relations_until<'a>(
|
||||
&'a self,
|
||||
user_id: &'a UserId,
|
||||
room_id: &'a RoomId,
|
||||
target: &'a EventId,
|
||||
until: PduCount,
|
||||
) -> Result<impl Iterator<Item = Result<(PduCount, PduEvent)>> + 'a> {
|
||||
let room_id = services().rooms.short.get_or_create_shortroomid(room_id)?;
|
||||
let target = match services().rooms.timeline.get_pdu_count(target)? {
|
||||
Some(PduCount::Normal(c)) => c,
|
||||
// TODO: Support backfilled relations
|
||||
_ => 0, // This will result in an empty iterator
|
||||
};
|
||||
self.db.relations_until(user_id, room_id, target, until)
|
||||
}
|
||||
pub fn relations_until<'a>(
|
||||
&'a self, user_id: &'a UserId, room_id: &'a RoomId, target: &'a EventId, until: PduCount,
|
||||
) -> Result<impl Iterator<Item = Result<(PduCount, PduEvent)>> + 'a> {
|
||||
let room_id = services().rooms.short.get_or_create_shortroomid(room_id)?;
|
||||
let target = match services().rooms.timeline.get_pdu_count(target)? {
|
||||
Some(PduCount::Normal(c)) => c,
|
||||
// TODO: Support backfilled relations
|
||||
_ => 0, // This will result in an empty iterator
|
||||
};
|
||||
self.db.relations_until(user_id, room_id, target, until)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, room_id, event_ids))]
|
||||
pub fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()> {
|
||||
self.db.mark_as_referenced(room_id, event_ids)
|
||||
}
|
||||
#[tracing::instrument(skip(self, room_id, event_ids))]
|
||||
pub fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()> {
|
||||
self.db.mark_as_referenced(room_id, event_ids)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn is_event_referenced(&self, room_id: &RoomId, event_id: &EventId) -> Result<bool> {
|
||||
self.db.is_event_referenced(room_id, event_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn is_event_referenced(&self, room_id: &RoomId, event_id: &EventId) -> Result<bool> {
|
||||
self.db.is_event_referenced(room_id, event_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn mark_event_soft_failed(&self, event_id: &EventId) -> Result<()> {
|
||||
self.db.mark_event_soft_failed(event_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn mark_event_soft_failed(&self, event_id: &EventId) -> Result<()> { self.db.mark_event_soft_failed(event_id) }
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn is_event_soft_failed(&self, event_id: &EventId) -> Result<bool> {
|
||||
self.db.is_event_soft_failed(event_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn is_event_soft_failed(&self, event_id: &EventId) -> Result<bool> { self.db.is_event_soft_failed(event_id) }
|
||||
}
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
use crate::Result;
|
||||
use ruma::RoomId;
|
||||
|
||||
use crate::Result;
|
||||
|
||||
type SearchPdusResult<'a> = Result<Option<(Box<dyn Iterator<Item = Vec<u8>> + 'a>, Vec<String>)>>;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn index_pdu(&self, shortroomid: u64, pdu_id: &[u8], message_body: &str) -> Result<()>;
|
||||
fn index_pdu(&self, shortroomid: u64, pdu_id: &[u8], message_body: &str) -> Result<()>;
|
||||
|
||||
fn search_pdus<'a>(&'a self, room_id: &RoomId, search_string: &str) -> SearchPdusResult<'a>;
|
||||
fn search_pdus<'a>(&'a self, room_id: &RoomId, search_string: &str) -> SearchPdusResult<'a>;
|
||||
}
|
||||
|
|
|
@ -1,26 +1,24 @@
|
|||
mod data;
|
||||
|
||||
pub use data::Data;
|
||||
|
||||
use crate::Result;
|
||||
use ruma::RoomId;
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn index_pdu(&self, shortroomid: u64, pdu_id: &[u8], message_body: &str) -> Result<()> {
|
||||
self.db.index_pdu(shortroomid, pdu_id, message_body)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn index_pdu(&self, shortroomid: u64, pdu_id: &[u8], message_body: &str) -> Result<()> {
|
||||
self.db.index_pdu(shortroomid, pdu_id, message_body)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn search_pdus<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
search_string: &str,
|
||||
) -> Result<Option<(impl Iterator<Item = Vec<u8>> + 'a, Vec<String>)>> {
|
||||
self.db.search_pdus(room_id, search_string)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn search_pdus<'a>(
|
||||
&'a self, room_id: &RoomId, search_string: &str,
|
||||
) -> Result<Option<(impl Iterator<Item = Vec<u8>> + 'a, Vec<String>)>> {
|
||||
self.db.search_pdus(room_id, search_string)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,31 +1,24 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use crate::Result;
|
||||
use ruma::{events::StateEventType, EventId, RoomId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn get_or_create_shorteventid(&self, event_id: &EventId) -> Result<u64>;
|
||||
fn get_or_create_shorteventid(&self, event_id: &EventId) -> Result<u64>;
|
||||
|
||||
fn get_shortstatekey(
|
||||
&self,
|
||||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<u64>>;
|
||||
fn get_shortstatekey(&self, event_type: &StateEventType, state_key: &str) -> Result<Option<u64>>;
|
||||
|
||||
fn get_or_create_shortstatekey(
|
||||
&self,
|
||||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<u64>;
|
||||
fn get_or_create_shortstatekey(&self, event_type: &StateEventType, state_key: &str) -> Result<u64>;
|
||||
|
||||
fn get_eventid_from_short(&self, shorteventid: u64) -> Result<Arc<EventId>>;
|
||||
fn get_eventid_from_short(&self, shorteventid: u64) -> Result<Arc<EventId>>;
|
||||
|
||||
fn get_statekey_from_short(&self, shortstatekey: u64) -> Result<(StateEventType, String)>;
|
||||
fn get_statekey_from_short(&self, shortstatekey: u64) -> Result<(StateEventType, String)>;
|
||||
|
||||
/// Returns (shortstatehash, already_existed)
|
||||
fn get_or_create_shortstatehash(&self, state_hash: &[u8]) -> Result<(u64, bool)>;
|
||||
/// Returns (shortstatehash, already_existed)
|
||||
fn get_or_create_shortstatehash(&self, state_hash: &[u8]) -> Result<(u64, bool)>;
|
||||
|
||||
fn get_shortroomid(&self, room_id: &RoomId) -> Result<Option<u64>>;
|
||||
fn get_shortroomid(&self, room_id: &RoomId) -> Result<Option<u64>>;
|
||||
|
||||
fn get_or_create_shortroomid(&self, room_id: &RoomId) -> Result<u64>;
|
||||
fn get_or_create_shortroomid(&self, room_id: &RoomId) -> Result<u64>;
|
||||
}
|
||||
|
|
|
@ -7,48 +7,38 @@ use ruma::{events::StateEventType, EventId, RoomId};
|
|||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
pub fn get_or_create_shorteventid(&self, event_id: &EventId) -> Result<u64> {
|
||||
self.db.get_or_create_shorteventid(event_id)
|
||||
}
|
||||
pub fn get_or_create_shorteventid(&self, event_id: &EventId) -> Result<u64> {
|
||||
self.db.get_or_create_shorteventid(event_id)
|
||||
}
|
||||
|
||||
pub fn get_shortstatekey(
|
||||
&self,
|
||||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<u64>> {
|
||||
self.db.get_shortstatekey(event_type, state_key)
|
||||
}
|
||||
pub fn get_shortstatekey(&self, event_type: &StateEventType, state_key: &str) -> Result<Option<u64>> {
|
||||
self.db.get_shortstatekey(event_type, state_key)
|
||||
}
|
||||
|
||||
pub fn get_or_create_shortstatekey(
|
||||
&self,
|
||||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<u64> {
|
||||
self.db.get_or_create_shortstatekey(event_type, state_key)
|
||||
}
|
||||
pub fn get_or_create_shortstatekey(&self, event_type: &StateEventType, state_key: &str) -> Result<u64> {
|
||||
self.db.get_or_create_shortstatekey(event_type, state_key)
|
||||
}
|
||||
|
||||
pub fn get_eventid_from_short(&self, shorteventid: u64) -> Result<Arc<EventId>> {
|
||||
self.db.get_eventid_from_short(shorteventid)
|
||||
}
|
||||
pub fn get_eventid_from_short(&self, shorteventid: u64) -> Result<Arc<EventId>> {
|
||||
self.db.get_eventid_from_short(shorteventid)
|
||||
}
|
||||
|
||||
pub fn get_statekey_from_short(&self, shortstatekey: u64) -> Result<(StateEventType, String)> {
|
||||
self.db.get_statekey_from_short(shortstatekey)
|
||||
}
|
||||
pub fn get_statekey_from_short(&self, shortstatekey: u64) -> Result<(StateEventType, String)> {
|
||||
self.db.get_statekey_from_short(shortstatekey)
|
||||
}
|
||||
|
||||
/// Returns (shortstatehash, already_existed)
|
||||
pub fn get_or_create_shortstatehash(&self, state_hash: &[u8]) -> Result<(u64, bool)> {
|
||||
self.db.get_or_create_shortstatehash(state_hash)
|
||||
}
|
||||
/// Returns (shortstatehash, already_existed)
|
||||
pub fn get_or_create_shortstatehash(&self, state_hash: &[u8]) -> Result<(u64, bool)> {
|
||||
self.db.get_or_create_shortstatehash(state_hash)
|
||||
}
|
||||
|
||||
pub fn get_shortroomid(&self, room_id: &RoomId) -> Result<Option<u64>> {
|
||||
self.db.get_shortroomid(room_id)
|
||||
}
|
||||
pub fn get_shortroomid(&self, room_id: &RoomId) -> Result<Option<u64>> { self.db.get_shortroomid(room_id) }
|
||||
|
||||
pub fn get_or_create_shortroomid(&self, room_id: &RoomId) -> Result<u64> {
|
||||
self.db.get_or_create_shortroomid(room_id)
|
||||
}
|
||||
pub fn get_or_create_shortroomid(&self, room_id: &RoomId) -> Result<u64> {
|
||||
self.db.get_or_create_shortroomid(room_id)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,505 +2,419 @@ use std::sync::{Arc, Mutex};
|
|||
|
||||
use lru_cache::LruCache;
|
||||
use ruma::{
|
||||
api::{
|
||||
client::{
|
||||
error::ErrorKind,
|
||||
space::{get_hierarchy, SpaceHierarchyRoomsChunk},
|
||||
},
|
||||
federation,
|
||||
},
|
||||
events::{
|
||||
room::{
|
||||
avatar::RoomAvatarEventContent,
|
||||
canonical_alias::RoomCanonicalAliasEventContent,
|
||||
create::RoomCreateEventContent,
|
||||
guest_access::{GuestAccess, RoomGuestAccessEventContent},
|
||||
history_visibility::{HistoryVisibility, RoomHistoryVisibilityEventContent},
|
||||
join_rules::{self, AllowRule, JoinRule, RoomJoinRulesEventContent},
|
||||
topic::RoomTopicEventContent,
|
||||
},
|
||||
space::child::SpaceChildEventContent,
|
||||
StateEventType,
|
||||
},
|
||||
space::SpaceRoomJoinRule,
|
||||
OwnedRoomId, RoomId, UserId,
|
||||
api::{
|
||||
client::{
|
||||
error::ErrorKind,
|
||||
space::{get_hierarchy, SpaceHierarchyRoomsChunk},
|
||||
},
|
||||
federation,
|
||||
},
|
||||
events::{
|
||||
room::{
|
||||
avatar::RoomAvatarEventContent,
|
||||
canonical_alias::RoomCanonicalAliasEventContent,
|
||||
create::RoomCreateEventContent,
|
||||
guest_access::{GuestAccess, RoomGuestAccessEventContent},
|
||||
history_visibility::{HistoryVisibility, RoomHistoryVisibilityEventContent},
|
||||
join_rules::{self, AllowRule, JoinRule, RoomJoinRulesEventContent},
|
||||
topic::RoomTopicEventContent,
|
||||
},
|
||||
space::child::SpaceChildEventContent,
|
||||
StateEventType,
|
||||
},
|
||||
space::SpaceRoomJoinRule,
|
||||
OwnedRoomId, RoomId, UserId,
|
||||
};
|
||||
|
||||
use tracing::{debug, error, warn};
|
||||
|
||||
use crate::{services, Error, PduEvent, Result};
|
||||
|
||||
pub enum CachedJoinRule {
|
||||
//Simplified(SpaceRoomJoinRule),
|
||||
Full(JoinRule),
|
||||
//Simplified(SpaceRoomJoinRule),
|
||||
Full(JoinRule),
|
||||
}
|
||||
|
||||
pub struct CachedSpaceChunk {
|
||||
chunk: SpaceHierarchyRoomsChunk,
|
||||
children: Vec<OwnedRoomId>,
|
||||
join_rule: CachedJoinRule,
|
||||
chunk: SpaceHierarchyRoomsChunk,
|
||||
children: Vec<OwnedRoomId>,
|
||||
join_rule: CachedJoinRule,
|
||||
}
|
||||
|
||||
pub struct Service {
|
||||
pub roomid_spacechunk_cache: Mutex<LruCache<OwnedRoomId, Option<CachedSpaceChunk>>>,
|
||||
pub roomid_spacechunk_cache: Mutex<LruCache<OwnedRoomId, Option<CachedSpaceChunk>>>,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
pub async fn get_hierarchy(
|
||||
&self,
|
||||
sender_user: &UserId,
|
||||
room_id: &RoomId,
|
||||
limit: usize,
|
||||
skip: usize,
|
||||
max_depth: usize,
|
||||
suggested_only: bool,
|
||||
) -> Result<get_hierarchy::v1::Response> {
|
||||
let mut left_to_skip = skip;
|
||||
pub async fn get_hierarchy(
|
||||
&self, sender_user: &UserId, room_id: &RoomId, limit: usize, skip: usize, max_depth: usize,
|
||||
suggested_only: bool,
|
||||
) -> Result<get_hierarchy::v1::Response> {
|
||||
let mut left_to_skip = skip;
|
||||
|
||||
let mut rooms_in_path = Vec::new();
|
||||
let mut stack = vec![vec![room_id.to_owned()]];
|
||||
let mut results = Vec::new();
|
||||
let mut rooms_in_path = Vec::new();
|
||||
let mut stack = vec![vec![room_id.to_owned()]];
|
||||
let mut results = Vec::new();
|
||||
|
||||
while let Some(current_room) = {
|
||||
while stack.last().map_or(false, std::vec::Vec::is_empty) {
|
||||
stack.pop();
|
||||
}
|
||||
if !stack.is_empty() {
|
||||
stack.last_mut().and_then(std::vec::Vec::pop)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} {
|
||||
rooms_in_path.push(current_room.clone());
|
||||
if results.len() >= limit {
|
||||
break;
|
||||
}
|
||||
while let Some(current_room) = {
|
||||
while stack.last().map_or(false, std::vec::Vec::is_empty) {
|
||||
stack.pop();
|
||||
}
|
||||
if !stack.is_empty() {
|
||||
stack.last_mut().and_then(std::vec::Vec::pop)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} {
|
||||
rooms_in_path.push(current_room.clone());
|
||||
if results.len() >= limit {
|
||||
break;
|
||||
}
|
||||
|
||||
if let Some(cached) = self
|
||||
.roomid_spacechunk_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.get_mut(¤t_room.clone())
|
||||
.as_ref()
|
||||
{
|
||||
if let Some(cached) = cached {
|
||||
let allowed = match &cached.join_rule {
|
||||
//CachedJoinRule::Simplified(s) => {
|
||||
//self.handle_simplified_join_rule(s, sender_user, ¤t_room)?
|
||||
//}
|
||||
CachedJoinRule::Full(f) => {
|
||||
self.handle_join_rule(f, sender_user, ¤t_room)?
|
||||
}
|
||||
};
|
||||
if allowed {
|
||||
if left_to_skip > 0 {
|
||||
left_to_skip -= 1;
|
||||
} else {
|
||||
results.push(cached.chunk.clone());
|
||||
}
|
||||
if rooms_in_path.len() < max_depth {
|
||||
stack.push(cached.children.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if let Some(cached) = self.roomid_spacechunk_cache.lock().unwrap().get_mut(¤t_room.clone()).as_ref() {
|
||||
if let Some(cached) = cached {
|
||||
let allowed = match &cached.join_rule {
|
||||
//CachedJoinRule::Simplified(s) => {
|
||||
//self.handle_simplified_join_rule(s, sender_user, ¤t_room)?
|
||||
//}
|
||||
CachedJoinRule::Full(f) => self.handle_join_rule(f, sender_user, ¤t_room)?,
|
||||
};
|
||||
if allowed {
|
||||
if left_to_skip > 0 {
|
||||
left_to_skip -= 1;
|
||||
} else {
|
||||
results.push(cached.chunk.clone());
|
||||
}
|
||||
if rooms_in_path.len() < max_depth {
|
||||
stack.push(cached.children.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(current_shortstatehash) = services()
|
||||
.rooms
|
||||
.state
|
||||
.get_room_shortstatehash(¤t_room)?
|
||||
{
|
||||
let state = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.state_full_ids(current_shortstatehash)
|
||||
.await?;
|
||||
if let Some(current_shortstatehash) = services().rooms.state.get_room_shortstatehash(¤t_room)? {
|
||||
let state = services().rooms.state_accessor.state_full_ids(current_shortstatehash).await?;
|
||||
|
||||
let mut children_ids = Vec::new();
|
||||
let mut children_pdus = Vec::new();
|
||||
for (key, id) in state {
|
||||
let (event_type, state_key) =
|
||||
services().rooms.short.get_statekey_from_short(key)?;
|
||||
if event_type != StateEventType::SpaceChild {
|
||||
continue;
|
||||
}
|
||||
let mut children_ids = Vec::new();
|
||||
let mut children_pdus = Vec::new();
|
||||
for (key, id) in state {
|
||||
let (event_type, state_key) = services().rooms.short.get_statekey_from_short(key)?;
|
||||
if event_type != StateEventType::SpaceChild {
|
||||
continue;
|
||||
}
|
||||
|
||||
let pdu = services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu(&id)?
|
||||
.ok_or_else(|| Error::bad_database("Event in space state not found"))?;
|
||||
let pdu = services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu(&id)?
|
||||
.ok_or_else(|| Error::bad_database("Event in space state not found"))?;
|
||||
|
||||
if serde_json::from_str::<SpaceChildEventContent>(pdu.content.get())
|
||||
.ok()
|
||||
.map(|c| c.via)
|
||||
.map_or(true, |v| v.is_empty())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if serde_json::from_str::<SpaceChildEventContent>(pdu.content.get())
|
||||
.ok()
|
||||
.map(|c| c.via)
|
||||
.map_or(true, |v| v.is_empty())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Ok(room_id) = OwnedRoomId::try_from(state_key) {
|
||||
children_ids.push(room_id);
|
||||
children_pdus.push(pdu);
|
||||
}
|
||||
}
|
||||
if let Ok(room_id) = OwnedRoomId::try_from(state_key) {
|
||||
children_ids.push(room_id);
|
||||
children_pdus.push(pdu);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Sort children
|
||||
children_ids.reverse();
|
||||
// TODO: Sort children
|
||||
children_ids.reverse();
|
||||
|
||||
let chunk = self
|
||||
.get_room_chunk(sender_user, ¤t_room, children_pdus)
|
||||
.await;
|
||||
if let Ok(chunk) = chunk {
|
||||
if left_to_skip > 0 {
|
||||
left_to_skip -= 1;
|
||||
} else {
|
||||
results.push(chunk.clone());
|
||||
}
|
||||
let join_rule = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(¤t_room, &StateEventType::RoomJoinRules, "")?
|
||||
.map(|s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomJoinRulesEventContent| c.join_rule)
|
||||
.map_err(|e| {
|
||||
error!("Invalid room join rule event in database: {}", e);
|
||||
Error::BadDatabase("Invalid room join rule event in database.")
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or(JoinRule::Invite);
|
||||
let chunk = self.get_room_chunk(sender_user, ¤t_room, children_pdus).await;
|
||||
if let Ok(chunk) = chunk {
|
||||
if left_to_skip > 0 {
|
||||
left_to_skip -= 1;
|
||||
} else {
|
||||
results.push(chunk.clone());
|
||||
}
|
||||
let join_rule = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(¤t_room, &StateEventType::RoomJoinRules, "")?
|
||||
.map(|s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomJoinRulesEventContent| c.join_rule)
|
||||
.map_err(|e| {
|
||||
error!("Invalid room join rule event in database: {}", e);
|
||||
Error::BadDatabase("Invalid room join rule event in database.")
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or(JoinRule::Invite);
|
||||
|
||||
self.roomid_spacechunk_cache.lock().unwrap().insert(
|
||||
current_room.clone(),
|
||||
Some(CachedSpaceChunk {
|
||||
chunk,
|
||||
children: children_ids.clone(),
|
||||
join_rule: CachedJoinRule::Full(join_rule),
|
||||
}),
|
||||
);
|
||||
}
|
||||
self.roomid_spacechunk_cache.lock().unwrap().insert(
|
||||
current_room.clone(),
|
||||
Some(CachedSpaceChunk {
|
||||
chunk,
|
||||
children: children_ids.clone(),
|
||||
join_rule: CachedJoinRule::Full(join_rule),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if rooms_in_path.len() < max_depth {
|
||||
stack.push(children_ids);
|
||||
}
|
||||
} else if let Some(server) = current_room.server_name() {
|
||||
if server == services().globals.server_name() {
|
||||
continue;
|
||||
}
|
||||
if rooms_in_path.len() < max_depth {
|
||||
stack.push(children_ids);
|
||||
}
|
||||
} else if let Some(server) = current_room.server_name() {
|
||||
if server == services().globals.server_name() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if !results.is_empty() {
|
||||
// Early return so the client can see some data already
|
||||
break;
|
||||
}
|
||||
if !results.is_empty() {
|
||||
// Early return so the client can see some data already
|
||||
break;
|
||||
}
|
||||
|
||||
debug!("Asking {server} for /hierarchy");
|
||||
if let Ok(response) = services()
|
||||
.sending
|
||||
.send_federation_request(
|
||||
server,
|
||||
federation::space::get_hierarchy::v1::Request {
|
||||
room_id: current_room.clone(),
|
||||
suggested_only,
|
||||
},
|
||||
)
|
||||
.await
|
||||
{
|
||||
debug!("Got response from {server} for /hierarchy\n{response:?}");
|
||||
let chunk = SpaceHierarchyRoomsChunk {
|
||||
canonical_alias: response.room.canonical_alias,
|
||||
name: response.room.name,
|
||||
num_joined_members: response.room.num_joined_members,
|
||||
room_id: response.room.room_id,
|
||||
topic: response.room.topic,
|
||||
world_readable: response.room.world_readable,
|
||||
guest_can_join: response.room.guest_can_join,
|
||||
avatar_url: response.room.avatar_url,
|
||||
join_rule: response.room.join_rule.clone(),
|
||||
room_type: response.room.room_type,
|
||||
children_state: response.room.children_state,
|
||||
};
|
||||
let children = response
|
||||
.children
|
||||
.iter()
|
||||
.map(|c| c.room_id.clone())
|
||||
.collect::<Vec<_>>();
|
||||
debug!("Asking {server} for /hierarchy");
|
||||
if let Ok(response) = services()
|
||||
.sending
|
||||
.send_federation_request(
|
||||
server,
|
||||
federation::space::get_hierarchy::v1::Request {
|
||||
room_id: current_room.clone(),
|
||||
suggested_only,
|
||||
},
|
||||
)
|
||||
.await
|
||||
{
|
||||
debug!("Got response from {server} for /hierarchy\n{response:?}");
|
||||
let chunk = SpaceHierarchyRoomsChunk {
|
||||
canonical_alias: response.room.canonical_alias,
|
||||
name: response.room.name,
|
||||
num_joined_members: response.room.num_joined_members,
|
||||
room_id: response.room.room_id,
|
||||
topic: response.room.topic,
|
||||
world_readable: response.room.world_readable,
|
||||
guest_can_join: response.room.guest_can_join,
|
||||
avatar_url: response.room.avatar_url,
|
||||
join_rule: response.room.join_rule.clone(),
|
||||
room_type: response.room.room_type,
|
||||
children_state: response.room.children_state,
|
||||
};
|
||||
let children = response.children.iter().map(|c| c.room_id.clone()).collect::<Vec<_>>();
|
||||
|
||||
let join_rule = match response.room.join_rule {
|
||||
SpaceRoomJoinRule::Invite => JoinRule::Invite,
|
||||
SpaceRoomJoinRule::Knock => JoinRule::Knock,
|
||||
SpaceRoomJoinRule::Private => JoinRule::Private,
|
||||
SpaceRoomJoinRule::Restricted => {
|
||||
JoinRule::Restricted(join_rules::Restricted {
|
||||
allow: response
|
||||
.room
|
||||
.allowed_room_ids
|
||||
.into_iter()
|
||||
.map(AllowRule::room_membership)
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
SpaceRoomJoinRule::KnockRestricted => {
|
||||
JoinRule::KnockRestricted(join_rules::Restricted {
|
||||
allow: response
|
||||
.room
|
||||
.allowed_room_ids
|
||||
.into_iter()
|
||||
.map(AllowRule::room_membership)
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
SpaceRoomJoinRule::Public => JoinRule::Public,
|
||||
_ => return Err(Error::BadServerResponse("Unknown join rule")),
|
||||
};
|
||||
if self.handle_join_rule(&join_rule, sender_user, ¤t_room)? {
|
||||
if left_to_skip > 0 {
|
||||
left_to_skip -= 1;
|
||||
} else {
|
||||
results.push(chunk.clone());
|
||||
}
|
||||
if rooms_in_path.len() < max_depth {
|
||||
stack.push(children.clone());
|
||||
}
|
||||
}
|
||||
let join_rule = match response.room.join_rule {
|
||||
SpaceRoomJoinRule::Invite => JoinRule::Invite,
|
||||
SpaceRoomJoinRule::Knock => JoinRule::Knock,
|
||||
SpaceRoomJoinRule::Private => JoinRule::Private,
|
||||
SpaceRoomJoinRule::Restricted => JoinRule::Restricted(join_rules::Restricted {
|
||||
allow: response.room.allowed_room_ids.into_iter().map(AllowRule::room_membership).collect(),
|
||||
}),
|
||||
SpaceRoomJoinRule::KnockRestricted => JoinRule::KnockRestricted(join_rules::Restricted {
|
||||
allow: response.room.allowed_room_ids.into_iter().map(AllowRule::room_membership).collect(),
|
||||
}),
|
||||
SpaceRoomJoinRule::Public => JoinRule::Public,
|
||||
_ => return Err(Error::BadServerResponse("Unknown join rule")),
|
||||
};
|
||||
if self.handle_join_rule(&join_rule, sender_user, ¤t_room)? {
|
||||
if left_to_skip > 0 {
|
||||
left_to_skip -= 1;
|
||||
} else {
|
||||
results.push(chunk.clone());
|
||||
}
|
||||
if rooms_in_path.len() < max_depth {
|
||||
stack.push(children.clone());
|
||||
}
|
||||
}
|
||||
|
||||
self.roomid_spacechunk_cache.lock().unwrap().insert(
|
||||
current_room.clone(),
|
||||
Some(CachedSpaceChunk {
|
||||
chunk,
|
||||
children,
|
||||
join_rule: CachedJoinRule::Full(join_rule),
|
||||
}),
|
||||
);
|
||||
self.roomid_spacechunk_cache.lock().unwrap().insert(
|
||||
current_room.clone(),
|
||||
Some(CachedSpaceChunk {
|
||||
chunk,
|
||||
children,
|
||||
join_rule: CachedJoinRule::Full(join_rule),
|
||||
}),
|
||||
);
|
||||
|
||||
/* TODO:
|
||||
for child in response.children {
|
||||
roomid_spacechunk_cache.insert(
|
||||
current_room.clone(),
|
||||
CachedSpaceChunk {
|
||||
chunk: child.chunk,
|
||||
children,
|
||||
join_rule,
|
||||
},
|
||||
);
|
||||
}
|
||||
*/
|
||||
} else {
|
||||
self.roomid_spacechunk_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert(current_room.clone(), None);
|
||||
}
|
||||
}
|
||||
}
|
||||
/* TODO:
|
||||
for child in response.children {
|
||||
roomid_spacechunk_cache.insert(
|
||||
current_room.clone(),
|
||||
CachedSpaceChunk {
|
||||
chunk: child.chunk,
|
||||
children,
|
||||
join_rule,
|
||||
},
|
||||
);
|
||||
}
|
||||
*/
|
||||
} else {
|
||||
self.roomid_spacechunk_cache.lock().unwrap().insert(current_room.clone(), None);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(get_hierarchy::v1::Response {
|
||||
next_batch: if results.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some((skip + results.len()).to_string())
|
||||
},
|
||||
rooms: results,
|
||||
})
|
||||
}
|
||||
Ok(get_hierarchy::v1::Response {
|
||||
next_batch: if results.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some((skip + results.len()).to_string())
|
||||
},
|
||||
rooms: results,
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_room_chunk(
|
||||
&self,
|
||||
sender_user: &UserId,
|
||||
room_id: &RoomId,
|
||||
children: Vec<Arc<PduEvent>>,
|
||||
) -> Result<SpaceHierarchyRoomsChunk> {
|
||||
Ok(SpaceHierarchyRoomsChunk {
|
||||
canonical_alias: services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomCanonicalAlias, "")?
|
||||
.map_or(Ok(None), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomCanonicalAliasEventContent| c.alias)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Invalid canonical alias event in database.")
|
||||
})
|
||||
})?,
|
||||
name: services().rooms.state_accessor.get_name(room_id)?,
|
||||
num_joined_members: services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.room_joined_count(room_id)?
|
||||
.unwrap_or_else(|| {
|
||||
warn!("Room {} has no member count", room_id);
|
||||
0
|
||||
})
|
||||
.try_into()
|
||||
.expect("user count should not be that big"),
|
||||
room_id: room_id.to_owned(),
|
||||
topic: services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomTopic, "")?
|
||||
.map_or(Ok(None), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomTopicEventContent| Some(c.topic))
|
||||
.map_err(|_| {
|
||||
error!("Invalid room topic event in database for room {}", room_id);
|
||||
Error::bad_database("Invalid room topic event in database.")
|
||||
})
|
||||
})
|
||||
.unwrap_or(None),
|
||||
world_readable: services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomHistoryVisibility, "")?
|
||||
.map_or(Ok(false), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomHistoryVisibilityEventContent| {
|
||||
c.history_visibility == HistoryVisibility::WorldReadable
|
||||
})
|
||||
.map_err(|_| {
|
||||
Error::bad_database(
|
||||
"Invalid room history visibility event in database.",
|
||||
)
|
||||
})
|
||||
})?,
|
||||
guest_can_join: services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomGuestAccess, "")?
|
||||
.map_or(Ok(false), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomGuestAccessEventContent| {
|
||||
c.guest_access == GuestAccess::CanJoin
|
||||
})
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Invalid room guest access event in database.")
|
||||
})
|
||||
})?,
|
||||
avatar_url: services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomAvatar, "")?
|
||||
.map(|s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomAvatarEventContent| c.url)
|
||||
.map_err(|_| Error::bad_database("Invalid room avatar event in database."))
|
||||
})
|
||||
.transpose()?
|
||||
// url is now an Option<String> so we must flatten
|
||||
.flatten(),
|
||||
join_rule: {
|
||||
let join_rule = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomJoinRules, "")?
|
||||
.map(|s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomJoinRulesEventContent| c.join_rule)
|
||||
.map_err(|e| {
|
||||
error!("Invalid room join rule event in database: {}", e);
|
||||
Error::BadDatabase("Invalid room join rule event in database.")
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or(JoinRule::Invite);
|
||||
async fn get_room_chunk(
|
||||
&self, sender_user: &UserId, room_id: &RoomId, children: Vec<Arc<PduEvent>>,
|
||||
) -> Result<SpaceHierarchyRoomsChunk> {
|
||||
Ok(SpaceHierarchyRoomsChunk {
|
||||
canonical_alias: services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomCanonicalAlias, "")?
|
||||
.map_or(Ok(None), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomCanonicalAliasEventContent| c.alias)
|
||||
.map_err(|_| Error::bad_database("Invalid canonical alias event in database."))
|
||||
})?,
|
||||
name: services().rooms.state_accessor.get_name(room_id)?,
|
||||
num_joined_members: services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.room_joined_count(room_id)?
|
||||
.unwrap_or_else(|| {
|
||||
warn!("Room {} has no member count", room_id);
|
||||
0
|
||||
})
|
||||
.try_into()
|
||||
.expect("user count should not be that big"),
|
||||
room_id: room_id.to_owned(),
|
||||
topic: services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomTopic, "")?
|
||||
.map_or(Ok(None), |s| {
|
||||
serde_json::from_str(s.content.get()).map(|c: RoomTopicEventContent| Some(c.topic)).map_err(|_| {
|
||||
error!("Invalid room topic event in database for room {}", room_id);
|
||||
Error::bad_database("Invalid room topic event in database.")
|
||||
})
|
||||
})
|
||||
.unwrap_or(None),
|
||||
world_readable: services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomHistoryVisibility, "")?
|
||||
.map_or(Ok(false), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomHistoryVisibilityEventContent| {
|
||||
c.history_visibility == HistoryVisibility::WorldReadable
|
||||
})
|
||||
.map_err(|_| Error::bad_database("Invalid room history visibility event in database."))
|
||||
})?,
|
||||
guest_can_join: services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomGuestAccess, "")?
|
||||
.map_or(Ok(false), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomGuestAccessEventContent| c.guest_access == GuestAccess::CanJoin)
|
||||
.map_err(|_| Error::bad_database("Invalid room guest access event in database."))
|
||||
})?,
|
||||
avatar_url: services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomAvatar, "")?
|
||||
.map(|s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomAvatarEventContent| c.url)
|
||||
.map_err(|_| Error::bad_database("Invalid room avatar event in database."))
|
||||
})
|
||||
.transpose()?
|
||||
// url is now an Option<String> so we must flatten
|
||||
.flatten(),
|
||||
join_rule: {
|
||||
let join_rule = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomJoinRules, "")?
|
||||
.map(|s| {
|
||||
serde_json::from_str(s.content.get()).map(|c: RoomJoinRulesEventContent| c.join_rule).map_err(
|
||||
|e| {
|
||||
error!("Invalid room join rule event in database: {}", e);
|
||||
Error::BadDatabase("Invalid room join rule event in database.")
|
||||
},
|
||||
)
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or(JoinRule::Invite);
|
||||
|
||||
if !self.handle_join_rule(&join_rule, sender_user, room_id)? {
|
||||
debug!("User is not allowed to see room {room_id}");
|
||||
// This error will be caught later
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"User is not allowed to see the room",
|
||||
));
|
||||
}
|
||||
if !self.handle_join_rule(&join_rule, sender_user, room_id)? {
|
||||
debug!("User is not allowed to see room {room_id}");
|
||||
// This error will be caught later
|
||||
return Err(Error::BadRequest(ErrorKind::Forbidden, "User is not allowed to see the room"));
|
||||
}
|
||||
|
||||
self.translate_joinrule(&join_rule)?
|
||||
},
|
||||
room_type: services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomCreate, "")?
|
||||
.map(|s| {
|
||||
serde_json::from_str::<RoomCreateEventContent>(s.content.get()).map_err(|e| {
|
||||
error!("Invalid room create event in database: {}", e);
|
||||
Error::BadDatabase("Invalid room create event in database.")
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.and_then(|e| e.room_type),
|
||||
children_state: children
|
||||
.into_iter()
|
||||
.map(|pdu| pdu.to_stripped_spacechild_state_event())
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
self.translate_joinrule(&join_rule)?
|
||||
},
|
||||
room_type: services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomCreate, "")?
|
||||
.map(|s| {
|
||||
serde_json::from_str::<RoomCreateEventContent>(s.content.get()).map_err(|e| {
|
||||
error!("Invalid room create event in database: {}", e);
|
||||
Error::BadDatabase("Invalid room create event in database.")
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.and_then(|e| e.room_type),
|
||||
children_state: children.into_iter().map(|pdu| pdu.to_stripped_spacechild_state_event()).collect(),
|
||||
})
|
||||
}
|
||||
|
||||
fn translate_joinrule(&self, join_rule: &JoinRule) -> Result<SpaceRoomJoinRule> {
|
||||
match join_rule {
|
||||
JoinRule::Invite => Ok(SpaceRoomJoinRule::Invite),
|
||||
JoinRule::Knock => Ok(SpaceRoomJoinRule::Knock),
|
||||
JoinRule::Private => Ok(SpaceRoomJoinRule::Private),
|
||||
JoinRule::Restricted(_) => Ok(SpaceRoomJoinRule::Restricted),
|
||||
JoinRule::KnockRestricted(_) => Ok(SpaceRoomJoinRule::KnockRestricted),
|
||||
JoinRule::Public => Ok(SpaceRoomJoinRule::Public),
|
||||
_ => Err(Error::BadServerResponse("Unknown join rule")),
|
||||
}
|
||||
}
|
||||
fn translate_joinrule(&self, join_rule: &JoinRule) -> Result<SpaceRoomJoinRule> {
|
||||
match join_rule {
|
||||
JoinRule::Invite => Ok(SpaceRoomJoinRule::Invite),
|
||||
JoinRule::Knock => Ok(SpaceRoomJoinRule::Knock),
|
||||
JoinRule::Private => Ok(SpaceRoomJoinRule::Private),
|
||||
JoinRule::Restricted(_) => Ok(SpaceRoomJoinRule::Restricted),
|
||||
JoinRule::KnockRestricted(_) => Ok(SpaceRoomJoinRule::KnockRestricted),
|
||||
JoinRule::Public => Ok(SpaceRoomJoinRule::Public),
|
||||
_ => Err(Error::BadServerResponse("Unknown join rule")),
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_simplified_join_rule(
|
||||
&self,
|
||||
join_rule: &SpaceRoomJoinRule,
|
||||
sender_user: &UserId,
|
||||
room_id: &RoomId,
|
||||
) -> Result<bool> {
|
||||
let allowed = match join_rule {
|
||||
SpaceRoomJoinRule::Public => true,
|
||||
SpaceRoomJoinRule::Knock => true,
|
||||
SpaceRoomJoinRule::Invite => services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, room_id)?,
|
||||
_ => false,
|
||||
};
|
||||
fn handle_simplified_join_rule(
|
||||
&self, join_rule: &SpaceRoomJoinRule, sender_user: &UserId, room_id: &RoomId,
|
||||
) -> Result<bool> {
|
||||
let allowed = match join_rule {
|
||||
SpaceRoomJoinRule::Public => true,
|
||||
SpaceRoomJoinRule::Knock => true,
|
||||
SpaceRoomJoinRule::Invite => services().rooms.state_cache.is_joined(sender_user, room_id)?,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
Ok(allowed)
|
||||
}
|
||||
Ok(allowed)
|
||||
}
|
||||
|
||||
fn handle_join_rule(
|
||||
&self,
|
||||
join_rule: &JoinRule,
|
||||
sender_user: &UserId,
|
||||
room_id: &RoomId,
|
||||
) -> Result<bool> {
|
||||
if self.handle_simplified_join_rule(
|
||||
&self.translate_joinrule(join_rule)?,
|
||||
sender_user,
|
||||
room_id,
|
||||
)? {
|
||||
return Ok(true);
|
||||
}
|
||||
fn handle_join_rule(&self, join_rule: &JoinRule, sender_user: &UserId, room_id: &RoomId) -> Result<bool> {
|
||||
if self.handle_simplified_join_rule(&self.translate_joinrule(join_rule)?, sender_user, room_id)? {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
match join_rule {
|
||||
JoinRule::Restricted(r) => {
|
||||
for rule in &r.allow {
|
||||
if let join_rules::AllowRule::RoomMembership(rm) = rule {
|
||||
if let Ok(true) = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(sender_user, &rm.room_id)
|
||||
{
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
match join_rule {
|
||||
JoinRule::Restricted(r) => {
|
||||
for rule in &r.allow {
|
||||
if let join_rules::AllowRule::RoomMembership(rm) = rule {
|
||||
if let Ok(true) = services().rooms.state_cache.is_joined(sender_user, &rm.room_id) {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
JoinRule::KnockRestricted(_) => {
|
||||
// TODO: Check rules
|
||||
Ok(false)
|
||||
}
|
||||
_ => Ok(false),
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
},
|
||||
JoinRule::KnockRestricted(_) => {
|
||||
// TODO: Check rules
|
||||
Ok(false)
|
||||
},
|
||||
_ => Ok(false),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,31 +1,33 @@
|
|||
use crate::Result;
|
||||
use ruma::{EventId, OwnedEventId, RoomId};
|
||||
use std::{collections::HashSet, sync::Arc};
|
||||
|
||||
use ruma::{EventId, OwnedEventId, RoomId};
|
||||
use tokio::sync::MutexGuard;
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Returns the last state hash key added to the db for the given room.
|
||||
fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>>;
|
||||
/// Returns the last state hash key added to the db for the given room.
|
||||
fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>>;
|
||||
|
||||
/// Set the state hash to a new version, but does not update state_cache.
|
||||
fn set_room_state(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
new_shortstatehash: u64,
|
||||
_mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
|
||||
) -> Result<()>;
|
||||
/// Set the state hash to a new version, but does not update state_cache.
|
||||
fn set_room_state(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
new_shortstatehash: u64,
|
||||
_mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
|
||||
) -> Result<()>;
|
||||
|
||||
/// Associates a state with an event.
|
||||
fn set_event_state(&self, shorteventid: u64, shortstatehash: u64) -> Result<()>;
|
||||
/// Associates a state with an event.
|
||||
fn set_event_state(&self, shorteventid: u64, shortstatehash: u64) -> Result<()>;
|
||||
|
||||
/// Returns all events we would send as the prev_events of the next event.
|
||||
fn get_forward_extremities(&self, room_id: &RoomId) -> Result<HashSet<Arc<EventId>>>;
|
||||
/// Returns all events we would send as the prev_events of the next event.
|
||||
fn get_forward_extremities(&self, room_id: &RoomId) -> Result<HashSet<Arc<EventId>>>;
|
||||
|
||||
/// Replace the forward extremities of the room.
|
||||
fn set_forward_extremities(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_ids: Vec<OwnedEventId>,
|
||||
_mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
|
||||
) -> Result<()>;
|
||||
/// Replace the forward extremities of the room.
|
||||
fn set_forward_extremities(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_ids: Vec<OwnedEventId>,
|
||||
_mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
|
||||
) -> Result<()>;
|
||||
}
|
||||
|
|
|
@ -1,430 +1,329 @@
|
|||
mod data;
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
sync::Arc,
|
||||
collections::{HashMap, HashSet},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
pub use data::Data;
|
||||
use ruma::{
|
||||
api::client::error::ErrorKind,
|
||||
events::{
|
||||
room::{create::RoomCreateEventContent, member::RoomMemberEventContent},
|
||||
AnyStrippedStateEvent, StateEventType, TimelineEventType,
|
||||
},
|
||||
serde::Raw,
|
||||
state_res::{self, StateMap},
|
||||
EventId, OwnedEventId, RoomId, RoomVersionId, UserId,
|
||||
api::client::error::ErrorKind,
|
||||
events::{
|
||||
room::{create::RoomCreateEventContent, member::RoomMemberEventContent},
|
||||
AnyStrippedStateEvent, StateEventType, TimelineEventType,
|
||||
},
|
||||
serde::Raw,
|
||||
state_res::{self, StateMap},
|
||||
EventId, OwnedEventId, RoomId, RoomVersionId, UserId,
|
||||
};
|
||||
use tokio::sync::MutexGuard;
|
||||
use tracing::warn;
|
||||
|
||||
use super::state_compressor::CompressedStateEvent;
|
||||
use crate::{services, utils::calculate_hash, Error, PduEvent, Result};
|
||||
|
||||
use super::state_compressor::CompressedStateEvent;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Set the room to the given statehash and update caches.
|
||||
pub async fn force_state(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
shortstatehash: u64,
|
||||
statediffnew: Arc<HashSet<CompressedStateEvent>>,
|
||||
_statediffremoved: Arc<HashSet<CompressedStateEvent>>,
|
||||
state_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
|
||||
) -> Result<()> {
|
||||
for event_id in statediffnew.iter().filter_map(|new| {
|
||||
services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.parse_compressed_state_event(new)
|
||||
.ok()
|
||||
.map(|(_, id)| id)
|
||||
}) {
|
||||
let pdu = match services().rooms.timeline.get_pdu_json(&event_id)? {
|
||||
Some(pdu) => pdu,
|
||||
None => continue,
|
||||
};
|
||||
/// Set the room to the given statehash and update caches.
|
||||
pub async fn force_state(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
shortstatehash: u64,
|
||||
statediffnew: Arc<HashSet<CompressedStateEvent>>,
|
||||
_statediffremoved: Arc<HashSet<CompressedStateEvent>>,
|
||||
state_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
|
||||
) -> Result<()> {
|
||||
for event_id in statediffnew.iter().filter_map(|new| {
|
||||
services().rooms.state_compressor.parse_compressed_state_event(new).ok().map(|(_, id)| id)
|
||||
}) {
|
||||
let pdu = match services().rooms.timeline.get_pdu_json(&event_id)? {
|
||||
Some(pdu) => pdu,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let pdu: PduEvent = match serde_json::from_str(
|
||||
&serde_json::to_string(&pdu).expect("CanonicalJsonObj can be serialized to JSON"),
|
||||
) {
|
||||
Ok(pdu) => pdu,
|
||||
Err(_) => continue,
|
||||
};
|
||||
let pdu: PduEvent = match serde_json::from_str(
|
||||
&serde_json::to_string(&pdu).expect("CanonicalJsonObj can be serialized to JSON"),
|
||||
) {
|
||||
Ok(pdu) => pdu,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
match pdu.kind {
|
||||
TimelineEventType::RoomMember => {
|
||||
let membership_event =
|
||||
match serde_json::from_str::<RoomMemberEventContent>(pdu.content.get()) {
|
||||
Ok(e) => e,
|
||||
Err(_) => continue,
|
||||
};
|
||||
match pdu.kind {
|
||||
TimelineEventType::RoomMember => {
|
||||
let membership_event = match serde_json::from_str::<RoomMemberEventContent>(pdu.content.get()) {
|
||||
Ok(e) => e,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
let state_key = match pdu.state_key {
|
||||
Some(k) => k,
|
||||
None => continue,
|
||||
};
|
||||
let state_key = match pdu.state_key {
|
||||
Some(k) => k,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let user_id = match UserId::parse(state_key) {
|
||||
Ok(id) => id,
|
||||
Err(_) => continue,
|
||||
};
|
||||
let user_id = match UserId::parse(state_key) {
|
||||
Ok(id) => id,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.update_membership(
|
||||
room_id,
|
||||
&user_id,
|
||||
membership_event,
|
||||
&pdu.sender,
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
TimelineEventType::SpaceChild => {
|
||||
services()
|
||||
.rooms
|
||||
.spaces
|
||||
.roomid_spacechunk_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.remove(&pdu.room_id);
|
||||
}
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.update_membership(room_id, &user_id, membership_event, &pdu.sender, None, false)
|
||||
.await?;
|
||||
},
|
||||
TimelineEventType::SpaceChild => {
|
||||
services().rooms.spaces.roomid_spacechunk_cache.lock().unwrap().remove(&pdu.room_id);
|
||||
},
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
|
||||
services().rooms.state_cache.update_joined_count(room_id)?;
|
||||
services().rooms.state_cache.update_joined_count(room_id)?;
|
||||
|
||||
self.db
|
||||
.set_room_state(room_id, shortstatehash, state_lock)?;
|
||||
self.db.set_room_state(room_id, shortstatehash, state_lock)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Generates a new StateHash and associates it with the incoming event.
|
||||
///
|
||||
/// This adds all current state events (not including the incoming event)
|
||||
/// to `stateid_pduid` and adds the incoming event to `eventid_statehash`.
|
||||
#[tracing::instrument(skip(self, state_ids_compressed))]
|
||||
pub fn set_event_state(
|
||||
&self,
|
||||
event_id: &EventId,
|
||||
room_id: &RoomId,
|
||||
state_ids_compressed: Arc<HashSet<CompressedStateEvent>>,
|
||||
) -> Result<u64> {
|
||||
let shorteventid = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shorteventid(event_id)?;
|
||||
/// Generates a new StateHash and associates it with the incoming event.
|
||||
///
|
||||
/// This adds all current state events (not including the incoming event)
|
||||
/// to `stateid_pduid` and adds the incoming event to `eventid_statehash`.
|
||||
#[tracing::instrument(skip(self, state_ids_compressed))]
|
||||
pub fn set_event_state(
|
||||
&self, event_id: &EventId, room_id: &RoomId, state_ids_compressed: Arc<HashSet<CompressedStateEvent>>,
|
||||
) -> Result<u64> {
|
||||
let shorteventid = services().rooms.short.get_or_create_shorteventid(event_id)?;
|
||||
|
||||
let previous_shortstatehash = self.db.get_room_shortstatehash(room_id)?;
|
||||
let previous_shortstatehash = self.db.get_room_shortstatehash(room_id)?;
|
||||
|
||||
let state_hash = calculate_hash(
|
||||
&state_ids_compressed
|
||||
.iter()
|
||||
.map(|s| &s[..])
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
let state_hash = calculate_hash(&state_ids_compressed.iter().map(|s| &s[..]).collect::<Vec<_>>());
|
||||
|
||||
let (shortstatehash, already_existed) = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shortstatehash(&state_hash)?;
|
||||
let (shortstatehash, already_existed) = services().rooms.short.get_or_create_shortstatehash(&state_hash)?;
|
||||
|
||||
if !already_existed {
|
||||
let states_parents = previous_shortstatehash.map_or_else(
|
||||
|| Ok(Vec::new()),
|
||||
|p| {
|
||||
services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.load_shortstatehash_info(p)
|
||||
},
|
||||
)?;
|
||||
if !already_existed {
|
||||
let states_parents = previous_shortstatehash.map_or_else(
|
||||
|| Ok(Vec::new()),
|
||||
|p| services().rooms.state_compressor.load_shortstatehash_info(p),
|
||||
)?;
|
||||
|
||||
let (statediffnew, statediffremoved) =
|
||||
if let Some(parent_stateinfo) = states_parents.last() {
|
||||
let statediffnew: HashSet<_> = state_ids_compressed
|
||||
.difference(&parent_stateinfo.1)
|
||||
.copied()
|
||||
.collect();
|
||||
let (statediffnew, statediffremoved) = if let Some(parent_stateinfo) = states_parents.last() {
|
||||
let statediffnew: HashSet<_> = state_ids_compressed.difference(&parent_stateinfo.1).copied().collect();
|
||||
|
||||
let statediffremoved: HashSet<_> = parent_stateinfo
|
||||
.1
|
||||
.difference(&state_ids_compressed)
|
||||
.copied()
|
||||
.collect();
|
||||
let statediffremoved: HashSet<_> =
|
||||
parent_stateinfo.1.difference(&state_ids_compressed).copied().collect();
|
||||
|
||||
(Arc::new(statediffnew), Arc::new(statediffremoved))
|
||||
} else {
|
||||
(state_ids_compressed, Arc::new(HashSet::new()))
|
||||
};
|
||||
services().rooms.state_compressor.save_state_from_diff(
|
||||
shortstatehash,
|
||||
statediffnew,
|
||||
statediffremoved,
|
||||
1_000_000, // high number because no state will be based on this one
|
||||
states_parents,
|
||||
)?;
|
||||
}
|
||||
(Arc::new(statediffnew), Arc::new(statediffremoved))
|
||||
} else {
|
||||
(state_ids_compressed, Arc::new(HashSet::new()))
|
||||
};
|
||||
services().rooms.state_compressor.save_state_from_diff(
|
||||
shortstatehash,
|
||||
statediffnew,
|
||||
statediffremoved,
|
||||
1_000_000, // high number because no state will be based on this one
|
||||
states_parents,
|
||||
)?;
|
||||
}
|
||||
|
||||
self.db.set_event_state(shorteventid, shortstatehash)?;
|
||||
self.db.set_event_state(shorteventid, shortstatehash)?;
|
||||
|
||||
Ok(shortstatehash)
|
||||
}
|
||||
Ok(shortstatehash)
|
||||
}
|
||||
|
||||
/// Generates a new StateHash and associates it with the incoming event.
|
||||
///
|
||||
/// This adds all current state events (not including the incoming event)
|
||||
/// to `stateid_pduid` and adds the incoming event to `eventid_statehash`.
|
||||
#[tracing::instrument(skip(self, new_pdu))]
|
||||
pub fn append_to_state(&self, new_pdu: &PduEvent) -> Result<u64> {
|
||||
let shorteventid = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shorteventid(&new_pdu.event_id)?;
|
||||
/// Generates a new StateHash and associates it with the incoming event.
|
||||
///
|
||||
/// This adds all current state events (not including the incoming event)
|
||||
/// to `stateid_pduid` and adds the incoming event to `eventid_statehash`.
|
||||
#[tracing::instrument(skip(self, new_pdu))]
|
||||
pub fn append_to_state(&self, new_pdu: &PduEvent) -> Result<u64> {
|
||||
let shorteventid = services().rooms.short.get_or_create_shorteventid(&new_pdu.event_id)?;
|
||||
|
||||
let previous_shortstatehash = self.get_room_shortstatehash(&new_pdu.room_id)?;
|
||||
let previous_shortstatehash = self.get_room_shortstatehash(&new_pdu.room_id)?;
|
||||
|
||||
if let Some(p) = previous_shortstatehash {
|
||||
self.db.set_event_state(shorteventid, p)?;
|
||||
}
|
||||
if let Some(p) = previous_shortstatehash {
|
||||
self.db.set_event_state(shorteventid, p)?;
|
||||
}
|
||||
|
||||
if let Some(state_key) = &new_pdu.state_key {
|
||||
let states_parents = previous_shortstatehash.map_or_else(
|
||||
|| Ok(Vec::new()),
|
||||
|p| {
|
||||
services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.load_shortstatehash_info(p)
|
||||
},
|
||||
)?;
|
||||
if let Some(state_key) = &new_pdu.state_key {
|
||||
let states_parents = previous_shortstatehash.map_or_else(
|
||||
|| Ok(Vec::new()),
|
||||
|p| services().rooms.state_compressor.load_shortstatehash_info(p),
|
||||
)?;
|
||||
|
||||
let shortstatekey = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shortstatekey(&new_pdu.kind.to_string().into(), state_key)?;
|
||||
let shortstatekey =
|
||||
services().rooms.short.get_or_create_shortstatekey(&new_pdu.kind.to_string().into(), state_key)?;
|
||||
|
||||
let new = services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.compress_state_event(shortstatekey, &new_pdu.event_id)?;
|
||||
let new = services().rooms.state_compressor.compress_state_event(shortstatekey, &new_pdu.event_id)?;
|
||||
|
||||
let replaces = states_parents
|
||||
.last()
|
||||
.map(|info| {
|
||||
info.1
|
||||
.iter()
|
||||
.find(|bytes| bytes.starts_with(&shortstatekey.to_be_bytes()))
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let replaces = states_parents
|
||||
.last()
|
||||
.map(|info| info.1.iter().find(|bytes| bytes.starts_with(&shortstatekey.to_be_bytes())))
|
||||
.unwrap_or_default();
|
||||
|
||||
if Some(&new) == replaces {
|
||||
return Ok(previous_shortstatehash.expect("must exist"));
|
||||
}
|
||||
if Some(&new) == replaces {
|
||||
return Ok(previous_shortstatehash.expect("must exist"));
|
||||
}
|
||||
|
||||
// TODO: statehash with deterministic inputs
|
||||
let shortstatehash = services().globals.next_count()?;
|
||||
// TODO: statehash with deterministic inputs
|
||||
let shortstatehash = services().globals.next_count()?;
|
||||
|
||||
let mut statediffnew = HashSet::new();
|
||||
statediffnew.insert(new);
|
||||
let mut statediffnew = HashSet::new();
|
||||
statediffnew.insert(new);
|
||||
|
||||
let mut statediffremoved = HashSet::new();
|
||||
if let Some(replaces) = replaces {
|
||||
statediffremoved.insert(*replaces);
|
||||
}
|
||||
let mut statediffremoved = HashSet::new();
|
||||
if let Some(replaces) = replaces {
|
||||
statediffremoved.insert(*replaces);
|
||||
}
|
||||
|
||||
services().rooms.state_compressor.save_state_from_diff(
|
||||
shortstatehash,
|
||||
Arc::new(statediffnew),
|
||||
Arc::new(statediffremoved),
|
||||
2,
|
||||
states_parents,
|
||||
)?;
|
||||
services().rooms.state_compressor.save_state_from_diff(
|
||||
shortstatehash,
|
||||
Arc::new(statediffnew),
|
||||
Arc::new(statediffremoved),
|
||||
2,
|
||||
states_parents,
|
||||
)?;
|
||||
|
||||
Ok(shortstatehash)
|
||||
} else {
|
||||
Ok(previous_shortstatehash.expect("first event in room must be a state event"))
|
||||
}
|
||||
}
|
||||
Ok(shortstatehash)
|
||||
} else {
|
||||
Ok(previous_shortstatehash.expect("first event in room must be a state event"))
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, invite_event))]
|
||||
pub fn calculate_invite_state(
|
||||
&self,
|
||||
invite_event: &PduEvent,
|
||||
) -> Result<Vec<Raw<AnyStrippedStateEvent>>> {
|
||||
let mut state = Vec::new();
|
||||
// Add recommended events
|
||||
if let Some(e) = services().rooms.state_accessor.room_state_get(
|
||||
&invite_event.room_id,
|
||||
&StateEventType::RoomCreate,
|
||||
"",
|
||||
)? {
|
||||
state.push(e.to_stripped_state_event());
|
||||
}
|
||||
if let Some(e) = services().rooms.state_accessor.room_state_get(
|
||||
&invite_event.room_id,
|
||||
&StateEventType::RoomJoinRules,
|
||||
"",
|
||||
)? {
|
||||
state.push(e.to_stripped_state_event());
|
||||
}
|
||||
if let Some(e) = services().rooms.state_accessor.room_state_get(
|
||||
&invite_event.room_id,
|
||||
&StateEventType::RoomCanonicalAlias,
|
||||
"",
|
||||
)? {
|
||||
state.push(e.to_stripped_state_event());
|
||||
}
|
||||
if let Some(e) = services().rooms.state_accessor.room_state_get(
|
||||
&invite_event.room_id,
|
||||
&StateEventType::RoomAvatar,
|
||||
"",
|
||||
)? {
|
||||
state.push(e.to_stripped_state_event());
|
||||
}
|
||||
if let Some(e) = services().rooms.state_accessor.room_state_get(
|
||||
&invite_event.room_id,
|
||||
&StateEventType::RoomName,
|
||||
"",
|
||||
)? {
|
||||
state.push(e.to_stripped_state_event());
|
||||
}
|
||||
if let Some(e) = services().rooms.state_accessor.room_state_get(
|
||||
&invite_event.room_id,
|
||||
&StateEventType::RoomMember,
|
||||
invite_event.sender.as_str(),
|
||||
)? {
|
||||
state.push(e.to_stripped_state_event());
|
||||
}
|
||||
#[tracing::instrument(skip(self, invite_event))]
|
||||
pub fn calculate_invite_state(&self, invite_event: &PduEvent) -> Result<Vec<Raw<AnyStrippedStateEvent>>> {
|
||||
let mut state = Vec::new();
|
||||
// Add recommended events
|
||||
if let Some(e) =
|
||||
services().rooms.state_accessor.room_state_get(&invite_event.room_id, &StateEventType::RoomCreate, "")?
|
||||
{
|
||||
state.push(e.to_stripped_state_event());
|
||||
}
|
||||
if let Some(e) =
|
||||
services().rooms.state_accessor.room_state_get(&invite_event.room_id, &StateEventType::RoomJoinRules, "")?
|
||||
{
|
||||
state.push(e.to_stripped_state_event());
|
||||
}
|
||||
if let Some(e) = services().rooms.state_accessor.room_state_get(
|
||||
&invite_event.room_id,
|
||||
&StateEventType::RoomCanonicalAlias,
|
||||
"",
|
||||
)? {
|
||||
state.push(e.to_stripped_state_event());
|
||||
}
|
||||
if let Some(e) =
|
||||
services().rooms.state_accessor.room_state_get(&invite_event.room_id, &StateEventType::RoomAvatar, "")?
|
||||
{
|
||||
state.push(e.to_stripped_state_event());
|
||||
}
|
||||
if let Some(e) =
|
||||
services().rooms.state_accessor.room_state_get(&invite_event.room_id, &StateEventType::RoomName, "")?
|
||||
{
|
||||
state.push(e.to_stripped_state_event());
|
||||
}
|
||||
if let Some(e) = services().rooms.state_accessor.room_state_get(
|
||||
&invite_event.room_id,
|
||||
&StateEventType::RoomMember,
|
||||
invite_event.sender.as_str(),
|
||||
)? {
|
||||
state.push(e.to_stripped_state_event());
|
||||
}
|
||||
|
||||
state.push(invite_event.to_stripped_state_event());
|
||||
Ok(state)
|
||||
}
|
||||
state.push(invite_event.to_stripped_state_event());
|
||||
Ok(state)
|
||||
}
|
||||
|
||||
/// Set the state hash to a new version, but does not update state_cache.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn set_room_state(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
shortstatehash: u64,
|
||||
mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
|
||||
) -> Result<()> {
|
||||
self.db.set_room_state(room_id, shortstatehash, mutex_lock)
|
||||
}
|
||||
/// Set the state hash to a new version, but does not update state_cache.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn set_room_state(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
shortstatehash: u64,
|
||||
mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
|
||||
) -> Result<()> {
|
||||
self.db.set_room_state(room_id, shortstatehash, mutex_lock)
|
||||
}
|
||||
|
||||
/// Returns the room's version.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_room_version(&self, room_id: &RoomId) -> Result<RoomVersionId> {
|
||||
let create_event = services().rooms.state_accessor.room_state_get(
|
||||
room_id,
|
||||
&StateEventType::RoomCreate,
|
||||
"",
|
||||
)?;
|
||||
/// Returns the room's version.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_room_version(&self, room_id: &RoomId) -> Result<RoomVersionId> {
|
||||
let create_event = services().rooms.state_accessor.room_state_get(room_id, &StateEventType::RoomCreate, "")?;
|
||||
|
||||
let create_event_content: RoomCreateEventContent = create_event
|
||||
.as_ref()
|
||||
.map(|create_event| {
|
||||
serde_json::from_str(create_event.content.get()).map_err(|e| {
|
||||
warn!("Invalid create event: {}", e);
|
||||
Error::bad_database("Invalid create event in db.")
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.ok_or_else(|| Error::BadRequest(ErrorKind::InvalidParam, "No create event found"))?;
|
||||
let create_event_content: RoomCreateEventContent = create_event
|
||||
.as_ref()
|
||||
.map(|create_event| {
|
||||
serde_json::from_str(create_event.content.get()).map_err(|e| {
|
||||
warn!("Invalid create event: {}", e);
|
||||
Error::bad_database("Invalid create event in db.")
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.ok_or_else(|| Error::BadRequest(ErrorKind::InvalidParam, "No create event found"))?;
|
||||
|
||||
Ok(create_event_content.room_version)
|
||||
}
|
||||
Ok(create_event_content.room_version)
|
||||
}
|
||||
|
||||
pub fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>> {
|
||||
self.db.get_room_shortstatehash(room_id)
|
||||
}
|
||||
pub fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>> {
|
||||
self.db.get_room_shortstatehash(room_id)
|
||||
}
|
||||
|
||||
pub fn get_forward_extremities(&self, room_id: &RoomId) -> Result<HashSet<Arc<EventId>>> {
|
||||
self.db.get_forward_extremities(room_id)
|
||||
}
|
||||
pub fn get_forward_extremities(&self, room_id: &RoomId) -> Result<HashSet<Arc<EventId>>> {
|
||||
self.db.get_forward_extremities(room_id)
|
||||
}
|
||||
|
||||
pub fn set_forward_extremities(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_ids: Vec<OwnedEventId>,
|
||||
state_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
|
||||
) -> Result<()> {
|
||||
self.db
|
||||
.set_forward_extremities(room_id, event_ids, state_lock)
|
||||
}
|
||||
pub fn set_forward_extremities(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_ids: Vec<OwnedEventId>,
|
||||
state_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
|
||||
) -> Result<()> {
|
||||
self.db.set_forward_extremities(room_id, event_ids, state_lock)
|
||||
}
|
||||
|
||||
/// This fetches auth events from the current state.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_auth_events(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
kind: &TimelineEventType,
|
||||
sender: &UserId,
|
||||
state_key: Option<&str>,
|
||||
content: &serde_json::value::RawValue,
|
||||
) -> Result<StateMap<Arc<PduEvent>>> {
|
||||
let shortstatehash = if let Some(current_shortstatehash) =
|
||||
services().rooms.state.get_room_shortstatehash(room_id)?
|
||||
{
|
||||
current_shortstatehash
|
||||
} else {
|
||||
return Ok(HashMap::new());
|
||||
};
|
||||
/// This fetches auth events from the current state.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_auth_events(
|
||||
&self, room_id: &RoomId, kind: &TimelineEventType, sender: &UserId, state_key: Option<&str>,
|
||||
content: &serde_json::value::RawValue,
|
||||
) -> Result<StateMap<Arc<PduEvent>>> {
|
||||
let shortstatehash =
|
||||
if let Some(current_shortstatehash) = services().rooms.state.get_room_shortstatehash(room_id)? {
|
||||
current_shortstatehash
|
||||
} else {
|
||||
return Ok(HashMap::new());
|
||||
};
|
||||
|
||||
let auth_events = state_res::auth_types_for_event(kind, sender, state_key, content)
|
||||
.expect("content is a valid JSON object");
|
||||
let auth_events =
|
||||
state_res::auth_types_for_event(kind, sender, state_key, content).expect("content is a valid JSON object");
|
||||
|
||||
let mut sauthevents = auth_events
|
||||
.into_iter()
|
||||
.filter_map(|(event_type, state_key)| {
|
||||
services()
|
||||
.rooms
|
||||
.short
|
||||
.get_shortstatekey(&event_type.to_string().into(), &state_key)
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|s| (s, (event_type, state_key)))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
let mut sauthevents = auth_events
|
||||
.into_iter()
|
||||
.filter_map(|(event_type, state_key)| {
|
||||
services()
|
||||
.rooms
|
||||
.short
|
||||
.get_shortstatekey(&event_type.to_string().into(), &state_key)
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|s| (s, (event_type, state_key)))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let full_state = services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.load_shortstatehash_info(shortstatehash)?
|
||||
.pop()
|
||||
.expect("there is always one layer")
|
||||
.1;
|
||||
let full_state = services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.load_shortstatehash_info(shortstatehash)?
|
||||
.pop()
|
||||
.expect("there is always one layer")
|
||||
.1;
|
||||
|
||||
Ok(full_state
|
||||
.iter()
|
||||
.filter_map(|compressed| {
|
||||
services()
|
||||
.rooms
|
||||
.state_compressor
|
||||
.parse_compressed_state_event(compressed)
|
||||
.ok()
|
||||
})
|
||||
.filter_map(|(shortstatekey, event_id)| {
|
||||
sauthevents.remove(&shortstatekey).map(|k| (k, event_id))
|
||||
})
|
||||
.filter_map(|(k, event_id)| {
|
||||
services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu(&event_id)
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|pdu| (k, pdu))
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
Ok(full_state
|
||||
.iter()
|
||||
.filter_map(|compressed| services().rooms.state_compressor.parse_compressed_state_event(compressed).ok())
|
||||
.filter_map(|(shortstatekey, event_id)| sauthevents.remove(&shortstatekey).map(|k| (k, event_id)))
|
||||
.filter_map(|(k, event_id)| services().rooms.timeline.get_pdu(&event_id).ok().flatten().map(|pdu| (k, pdu)))
|
||||
.collect())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,53 +7,39 @@ use crate::{PduEvent, Result};
|
|||
|
||||
#[async_trait]
|
||||
pub trait Data: Send + Sync {
|
||||
/// Builds a StateMap by iterating over all keys that start
|
||||
/// with state_hash, this gives the full state for the given state_hash.
|
||||
async fn state_full_ids(&self, shortstatehash: u64) -> Result<HashMap<u64, Arc<EventId>>>;
|
||||
/// Builds a StateMap by iterating over all keys that start
|
||||
/// with state_hash, this gives the full state for the given state_hash.
|
||||
async fn state_full_ids(&self, shortstatehash: u64) -> Result<HashMap<u64, Arc<EventId>>>;
|
||||
|
||||
async fn state_full(
|
||||
&self,
|
||||
shortstatehash: u64,
|
||||
) -> Result<HashMap<(StateEventType, String), Arc<PduEvent>>>;
|
||||
async fn state_full(&self, shortstatehash: u64) -> Result<HashMap<(StateEventType, String), Arc<PduEvent>>>;
|
||||
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`, `state_key`).
|
||||
fn state_get_id(
|
||||
&self,
|
||||
shortstatehash: u64,
|
||||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<Arc<EventId>>>;
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`,
|
||||
/// `state_key`).
|
||||
fn state_get_id(
|
||||
&self, shortstatehash: u64, event_type: &StateEventType, state_key: &str,
|
||||
) -> Result<Option<Arc<EventId>>>;
|
||||
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`, `state_key`).
|
||||
fn state_get(
|
||||
&self,
|
||||
shortstatehash: u64,
|
||||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<Arc<PduEvent>>>;
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`,
|
||||
/// `state_key`).
|
||||
fn state_get(
|
||||
&self, shortstatehash: u64, event_type: &StateEventType, state_key: &str,
|
||||
) -> Result<Option<Arc<PduEvent>>>;
|
||||
|
||||
/// Returns the state hash for this pdu.
|
||||
fn pdu_shortstatehash(&self, event_id: &EventId) -> Result<Option<u64>>;
|
||||
/// Returns the state hash for this pdu.
|
||||
fn pdu_shortstatehash(&self, event_id: &EventId) -> Result<Option<u64>>;
|
||||
|
||||
/// Returns the full room state.
|
||||
async fn room_state_full(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
) -> Result<HashMap<(StateEventType, String), Arc<PduEvent>>>;
|
||||
/// Returns the full room state.
|
||||
async fn room_state_full(&self, room_id: &RoomId) -> Result<HashMap<(StateEventType, String), Arc<PduEvent>>>;
|
||||
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`, `state_key`).
|
||||
fn room_state_get_id(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<Arc<EventId>>>;
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`,
|
||||
/// `state_key`).
|
||||
fn room_state_get_id(
|
||||
&self, room_id: &RoomId, event_type: &StateEventType, state_key: &str,
|
||||
) -> Result<Option<Arc<EventId>>>;
|
||||
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`, `state_key`).
|
||||
fn room_state_get(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<Arc<PduEvent>>>;
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`,
|
||||
/// `state_key`).
|
||||
fn room_state_get(
|
||||
&self, room_id: &RoomId, event_type: &StateEventType, state_key: &str,
|
||||
) -> Result<Option<Arc<PduEvent>>>;
|
||||
}
|
||||
|
|
|
@ -1,316 +1,259 @@
|
|||
mod data;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{Arc, Mutex},
|
||||
collections::HashMap,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
pub use data::Data;
|
||||
use lru_cache::LruCache;
|
||||
use ruma::{
|
||||
events::{
|
||||
room::{
|
||||
avatar::RoomAvatarEventContent,
|
||||
history_visibility::{HistoryVisibility, RoomHistoryVisibilityEventContent},
|
||||
member::{MembershipState, RoomMemberEventContent},
|
||||
name::RoomNameEventContent,
|
||||
},
|
||||
StateEventType,
|
||||
},
|
||||
EventId, OwnedServerName, OwnedUserId, RoomId, ServerName, UserId,
|
||||
events::{
|
||||
room::{
|
||||
avatar::RoomAvatarEventContent,
|
||||
history_visibility::{HistoryVisibility, RoomHistoryVisibilityEventContent},
|
||||
member::{MembershipState, RoomMemberEventContent},
|
||||
name::RoomNameEventContent,
|
||||
},
|
||||
StateEventType,
|
||||
},
|
||||
EventId, OwnedServerName, OwnedUserId, RoomId, ServerName, UserId,
|
||||
};
|
||||
use tracing::error;
|
||||
|
||||
use crate::{services, Error, PduEvent, Result};
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub server_visibility_cache: Mutex<LruCache<(OwnedServerName, u64), bool>>,
|
||||
pub user_visibility_cache: Mutex<LruCache<(OwnedUserId, u64), bool>>,
|
||||
pub db: &'static dyn Data,
|
||||
pub server_visibility_cache: Mutex<LruCache<(OwnedServerName, u64), bool>>,
|
||||
pub user_visibility_cache: Mutex<LruCache<(OwnedUserId, u64), bool>>,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Builds a StateMap by iterating over all keys that start
|
||||
/// with state_hash, this gives the full state for the given state_hash.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub async fn state_full_ids(&self, shortstatehash: u64) -> Result<HashMap<u64, Arc<EventId>>> {
|
||||
self.db.state_full_ids(shortstatehash).await
|
||||
}
|
||||
/// Builds a StateMap by iterating over all keys that start
|
||||
/// with state_hash, this gives the full state for the given state_hash.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub async fn state_full_ids(&self, shortstatehash: u64) -> Result<HashMap<u64, Arc<EventId>>> {
|
||||
self.db.state_full_ids(shortstatehash).await
|
||||
}
|
||||
|
||||
pub async fn state_full(
|
||||
&self,
|
||||
shortstatehash: u64,
|
||||
) -> Result<HashMap<(StateEventType, String), Arc<PduEvent>>> {
|
||||
self.db.state_full(shortstatehash).await
|
||||
}
|
||||
pub async fn state_full(&self, shortstatehash: u64) -> Result<HashMap<(StateEventType, String), Arc<PduEvent>>> {
|
||||
self.db.state_full(shortstatehash).await
|
||||
}
|
||||
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`, `state_key`).
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn state_get_id(
|
||||
&self,
|
||||
shortstatehash: u64,
|
||||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<Arc<EventId>>> {
|
||||
self.db.state_get_id(shortstatehash, event_type, state_key)
|
||||
}
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`,
|
||||
/// `state_key`).
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn state_get_id(
|
||||
&self, shortstatehash: u64, event_type: &StateEventType, state_key: &str,
|
||||
) -> Result<Option<Arc<EventId>>> {
|
||||
self.db.state_get_id(shortstatehash, event_type, state_key)
|
||||
}
|
||||
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`, `state_key`).
|
||||
pub fn state_get(
|
||||
&self,
|
||||
shortstatehash: u64,
|
||||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<Arc<PduEvent>>> {
|
||||
self.db.state_get(shortstatehash, event_type, state_key)
|
||||
}
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`,
|
||||
/// `state_key`).
|
||||
pub fn state_get(
|
||||
&self, shortstatehash: u64, event_type: &StateEventType, state_key: &str,
|
||||
) -> Result<Option<Arc<PduEvent>>> {
|
||||
self.db.state_get(shortstatehash, event_type, state_key)
|
||||
}
|
||||
|
||||
/// Get membership for given user in state
|
||||
fn user_membership(&self, shortstatehash: u64, user_id: &UserId) -> Result<MembershipState> {
|
||||
self.state_get(
|
||||
shortstatehash,
|
||||
&StateEventType::RoomMember,
|
||||
user_id.as_str(),
|
||||
)?
|
||||
.map_or(Ok(MembershipState::Leave), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomMemberEventContent| c.membership)
|
||||
.map_err(|_| Error::bad_database("Invalid room membership event in database."))
|
||||
})
|
||||
}
|
||||
/// Get membership for given user in state
|
||||
fn user_membership(&self, shortstatehash: u64, user_id: &UserId) -> Result<MembershipState> {
|
||||
self.state_get(shortstatehash, &StateEventType::RoomMember, user_id.as_str())?.map_or(
|
||||
Ok(MembershipState::Leave),
|
||||
|s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomMemberEventContent| c.membership)
|
||||
.map_err(|_| Error::bad_database("Invalid room membership event in database."))
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// The user was a joined member at this state (potentially in the past)
|
||||
fn user_was_joined(&self, shortstatehash: u64, user_id: &UserId) -> bool {
|
||||
self.user_membership(shortstatehash, user_id)
|
||||
.is_ok_and(|s| s == MembershipState::Join) // Return sensible default, i.e. false
|
||||
}
|
||||
/// The user was a joined member at this state (potentially in the past)
|
||||
fn user_was_joined(&self, shortstatehash: u64, user_id: &UserId) -> bool {
|
||||
self.user_membership(shortstatehash, user_id).is_ok_and(|s| s == MembershipState::Join)
|
||||
// Return sensible default, i.e.
|
||||
// false
|
||||
}
|
||||
|
||||
/// The user was an invited or joined room member at this state (potentially
|
||||
/// in the past)
|
||||
fn user_was_invited(&self, shortstatehash: u64, user_id: &UserId) -> bool {
|
||||
self.user_membership(shortstatehash, user_id)
|
||||
.is_ok_and(|s| s == MembershipState::Join || s == MembershipState::Invite)
|
||||
// Return sensible default, i.e. false
|
||||
}
|
||||
/// The user was an invited or joined room member at this state (potentially
|
||||
/// in the past)
|
||||
fn user_was_invited(&self, shortstatehash: u64, user_id: &UserId) -> bool {
|
||||
self.user_membership(shortstatehash, user_id)
|
||||
.is_ok_and(|s| s == MembershipState::Join || s == MembershipState::Invite)
|
||||
// Return sensible default, i.e. false
|
||||
}
|
||||
|
||||
/// Whether a server is allowed to see an event through federation, based on
|
||||
/// the room's history_visibility at that event's state.
|
||||
#[tracing::instrument(skip(self, origin, room_id, event_id))]
|
||||
pub fn server_can_see_event(
|
||||
&self,
|
||||
origin: &ServerName,
|
||||
room_id: &RoomId,
|
||||
event_id: &EventId,
|
||||
) -> Result<bool> {
|
||||
let Some(shortstatehash) = self.pdu_shortstatehash(event_id)? else {
|
||||
return Ok(true);
|
||||
};
|
||||
/// Whether a server is allowed to see an event through federation, based on
|
||||
/// the room's history_visibility at that event's state.
|
||||
#[tracing::instrument(skip(self, origin, room_id, event_id))]
|
||||
pub fn server_can_see_event(&self, origin: &ServerName, room_id: &RoomId, event_id: &EventId) -> Result<bool> {
|
||||
let Some(shortstatehash) = self.pdu_shortstatehash(event_id)? else {
|
||||
return Ok(true);
|
||||
};
|
||||
|
||||
if let Some(visibility) = self
|
||||
.server_visibility_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.get_mut(&(origin.to_owned(), shortstatehash))
|
||||
{
|
||||
return Ok(*visibility);
|
||||
}
|
||||
if let Some(visibility) =
|
||||
self.server_visibility_cache.lock().unwrap().get_mut(&(origin.to_owned(), shortstatehash))
|
||||
{
|
||||
return Ok(*visibility);
|
||||
}
|
||||
|
||||
let history_visibility = self
|
||||
.state_get(shortstatehash, &StateEventType::RoomHistoryVisibility, "")?
|
||||
.map_or(Ok(HistoryVisibility::Shared), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomHistoryVisibilityEventContent| c.history_visibility)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Invalid history visibility event in database.")
|
||||
})
|
||||
})?;
|
||||
let history_visibility = self.state_get(shortstatehash, &StateEventType::RoomHistoryVisibility, "")?.map_or(
|
||||
Ok(HistoryVisibility::Shared),
|
||||
|s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomHistoryVisibilityEventContent| c.history_visibility)
|
||||
.map_err(|_| Error::bad_database("Invalid history visibility event in database."))
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut current_server_members = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.room_members(room_id)
|
||||
.filter_map(std::result::Result::ok)
|
||||
.filter(|member| member.server_name() == origin);
|
||||
let mut current_server_members = services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.room_members(room_id)
|
||||
.filter_map(std::result::Result::ok)
|
||||
.filter(|member| member.server_name() == origin);
|
||||
|
||||
let visibility = match history_visibility {
|
||||
HistoryVisibility::WorldReadable | HistoryVisibility::Shared => true,
|
||||
HistoryVisibility::Invited => {
|
||||
// Allow if any member on requesting server was AT LEAST invited, else deny
|
||||
current_server_members.any(|member| self.user_was_invited(shortstatehash, &member))
|
||||
}
|
||||
HistoryVisibility::Joined => {
|
||||
// Allow if any member on requested server was joined, else deny
|
||||
current_server_members.any(|member| self.user_was_joined(shortstatehash, &member))
|
||||
}
|
||||
_ => {
|
||||
error!("Unknown history visibility {history_visibility}");
|
||||
false
|
||||
}
|
||||
};
|
||||
let visibility = match history_visibility {
|
||||
HistoryVisibility::WorldReadable | HistoryVisibility::Shared => true,
|
||||
HistoryVisibility::Invited => {
|
||||
// Allow if any member on requesting server was AT LEAST invited, else deny
|
||||
current_server_members.any(|member| self.user_was_invited(shortstatehash, &member))
|
||||
},
|
||||
HistoryVisibility::Joined => {
|
||||
// Allow if any member on requested server was joined, else deny
|
||||
current_server_members.any(|member| self.user_was_joined(shortstatehash, &member))
|
||||
},
|
||||
_ => {
|
||||
error!("Unknown history visibility {history_visibility}");
|
||||
false
|
||||
},
|
||||
};
|
||||
|
||||
self.server_visibility_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert((origin.to_owned(), shortstatehash), visibility);
|
||||
self.server_visibility_cache.lock().unwrap().insert((origin.to_owned(), shortstatehash), visibility);
|
||||
|
||||
Ok(visibility)
|
||||
}
|
||||
Ok(visibility)
|
||||
}
|
||||
|
||||
/// Whether a user is allowed to see an event, based on
|
||||
/// the room's history_visibility at that event's state.
|
||||
#[tracing::instrument(skip(self, user_id, room_id, event_id))]
|
||||
pub fn user_can_see_event(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
event_id: &EventId,
|
||||
) -> Result<bool> {
|
||||
let shortstatehash = match self.pdu_shortstatehash(event_id)? {
|
||||
Some(shortstatehash) => shortstatehash,
|
||||
None => return Ok(true),
|
||||
};
|
||||
/// Whether a user is allowed to see an event, based on
|
||||
/// the room's history_visibility at that event's state.
|
||||
#[tracing::instrument(skip(self, user_id, room_id, event_id))]
|
||||
pub fn user_can_see_event(&self, user_id: &UserId, room_id: &RoomId, event_id: &EventId) -> Result<bool> {
|
||||
let shortstatehash = match self.pdu_shortstatehash(event_id)? {
|
||||
Some(shortstatehash) => shortstatehash,
|
||||
None => return Ok(true),
|
||||
};
|
||||
|
||||
if let Some(visibility) = self
|
||||
.user_visibility_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.get_mut(&(user_id.to_owned(), shortstatehash))
|
||||
{
|
||||
return Ok(*visibility);
|
||||
}
|
||||
if let Some(visibility) =
|
||||
self.user_visibility_cache.lock().unwrap().get_mut(&(user_id.to_owned(), shortstatehash))
|
||||
{
|
||||
return Ok(*visibility);
|
||||
}
|
||||
|
||||
let currently_member = services().rooms.state_cache.is_joined(user_id, room_id)?;
|
||||
let currently_member = services().rooms.state_cache.is_joined(user_id, room_id)?;
|
||||
|
||||
let history_visibility = self
|
||||
.state_get(shortstatehash, &StateEventType::RoomHistoryVisibility, "")?
|
||||
.map_or(Ok(HistoryVisibility::Shared), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomHistoryVisibilityEventContent| c.history_visibility)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Invalid history visibility event in database.")
|
||||
})
|
||||
})?;
|
||||
let history_visibility = self.state_get(shortstatehash, &StateEventType::RoomHistoryVisibility, "")?.map_or(
|
||||
Ok(HistoryVisibility::Shared),
|
||||
|s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomHistoryVisibilityEventContent| c.history_visibility)
|
||||
.map_err(|_| Error::bad_database("Invalid history visibility event in database."))
|
||||
},
|
||||
)?;
|
||||
|
||||
let visibility = match history_visibility {
|
||||
HistoryVisibility::WorldReadable => true,
|
||||
HistoryVisibility::Shared => currently_member,
|
||||
HistoryVisibility::Invited => {
|
||||
// Allow if any member on requesting server was AT LEAST invited, else deny
|
||||
self.user_was_invited(shortstatehash, user_id)
|
||||
}
|
||||
HistoryVisibility::Joined => {
|
||||
// Allow if any member on requested server was joined, else deny
|
||||
self.user_was_joined(shortstatehash, user_id)
|
||||
}
|
||||
_ => {
|
||||
error!("Unknown history visibility {history_visibility}");
|
||||
false
|
||||
}
|
||||
};
|
||||
let visibility = match history_visibility {
|
||||
HistoryVisibility::WorldReadable => true,
|
||||
HistoryVisibility::Shared => currently_member,
|
||||
HistoryVisibility::Invited => {
|
||||
// Allow if any member on requesting server was AT LEAST invited, else deny
|
||||
self.user_was_invited(shortstatehash, user_id)
|
||||
},
|
||||
HistoryVisibility::Joined => {
|
||||
// Allow if any member on requested server was joined, else deny
|
||||
self.user_was_joined(shortstatehash, user_id)
|
||||
},
|
||||
_ => {
|
||||
error!("Unknown history visibility {history_visibility}");
|
||||
false
|
||||
},
|
||||
};
|
||||
|
||||
self.user_visibility_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert((user_id.to_owned(), shortstatehash), visibility);
|
||||
self.user_visibility_cache.lock().unwrap().insert((user_id.to_owned(), shortstatehash), visibility);
|
||||
|
||||
Ok(visibility)
|
||||
}
|
||||
Ok(visibility)
|
||||
}
|
||||
|
||||
/// Whether a user is allowed to see an event, based on
|
||||
/// the room's history_visibility at that event's state.
|
||||
#[tracing::instrument(skip(self, user_id, room_id))]
|
||||
pub fn user_can_see_state_events(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
|
||||
let currently_member = services().rooms.state_cache.is_joined(user_id, room_id)?;
|
||||
/// Whether a user is allowed to see an event, based on
|
||||
/// the room's history_visibility at that event's state.
|
||||
#[tracing::instrument(skip(self, user_id, room_id))]
|
||||
pub fn user_can_see_state_events(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
|
||||
let currently_member = services().rooms.state_cache.is_joined(user_id, room_id)?;
|
||||
|
||||
let history_visibility = self
|
||||
.room_state_get(room_id, &StateEventType::RoomHistoryVisibility, "")?
|
||||
.map_or(Ok(HistoryVisibility::Shared), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomHistoryVisibilityEventContent| c.history_visibility)
|
||||
.map_err(|_| {
|
||||
Error::bad_database("Invalid history visibility event in database.")
|
||||
})
|
||||
})?;
|
||||
let history_visibility = self.room_state_get(room_id, &StateEventType::RoomHistoryVisibility, "")?.map_or(
|
||||
Ok(HistoryVisibility::Shared),
|
||||
|s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomHistoryVisibilityEventContent| c.history_visibility)
|
||||
.map_err(|_| Error::bad_database("Invalid history visibility event in database."))
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(currently_member || history_visibility == HistoryVisibility::WorldReadable)
|
||||
}
|
||||
Ok(currently_member || history_visibility == HistoryVisibility::WorldReadable)
|
||||
}
|
||||
|
||||
/// Returns the state hash for this pdu.
|
||||
pub fn pdu_shortstatehash(&self, event_id: &EventId) -> Result<Option<u64>> {
|
||||
self.db.pdu_shortstatehash(event_id)
|
||||
}
|
||||
/// Returns the state hash for this pdu.
|
||||
pub fn pdu_shortstatehash(&self, event_id: &EventId) -> Result<Option<u64>> { self.db.pdu_shortstatehash(event_id) }
|
||||
|
||||
/// Returns the full room state.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub async fn room_state_full(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
) -> Result<HashMap<(StateEventType, String), Arc<PduEvent>>> {
|
||||
self.db.room_state_full(room_id).await
|
||||
}
|
||||
/// Returns the full room state.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub async fn room_state_full(&self, room_id: &RoomId) -> Result<HashMap<(StateEventType, String), Arc<PduEvent>>> {
|
||||
self.db.room_state_full(room_id).await
|
||||
}
|
||||
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`, `state_key`).
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_state_get_id(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<Arc<EventId>>> {
|
||||
self.db.room_state_get_id(room_id, event_type, state_key)
|
||||
}
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`,
|
||||
/// `state_key`).
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_state_get_id(
|
||||
&self, room_id: &RoomId, event_type: &StateEventType, state_key: &str,
|
||||
) -> Result<Option<Arc<EventId>>> {
|
||||
self.db.room_state_get_id(room_id, event_type, state_key)
|
||||
}
|
||||
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`, `state_key`).
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_state_get(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_type: &StateEventType,
|
||||
state_key: &str,
|
||||
) -> Result<Option<Arc<PduEvent>>> {
|
||||
self.db.room_state_get(room_id, event_type, state_key)
|
||||
}
|
||||
/// Returns a single PDU from `room_id` with key (`event_type`,
|
||||
/// `state_key`).
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_state_get(
|
||||
&self, room_id: &RoomId, event_type: &StateEventType, state_key: &str,
|
||||
) -> Result<Option<Arc<PduEvent>>> {
|
||||
self.db.room_state_get(room_id, event_type, state_key)
|
||||
}
|
||||
|
||||
pub fn get_name(&self, room_id: &RoomId) -> Result<Option<String>> {
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomName, "")?
|
||||
.map_or(Ok(None), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map(|c: RoomNameEventContent| Some(c.name))
|
||||
.map_err(|e| {
|
||||
error!(
|
||||
"Invalid room name event in database for room {}. {}",
|
||||
room_id, e
|
||||
);
|
||||
Error::bad_database("Invalid room name event in database.")
|
||||
})
|
||||
})
|
||||
}
|
||||
pub fn get_name(&self, room_id: &RoomId) -> Result<Option<String>> {
|
||||
services().rooms.state_accessor.room_state_get(room_id, &StateEventType::RoomName, "")?.map_or(Ok(None), |s| {
|
||||
serde_json::from_str(s.content.get()).map(|c: RoomNameEventContent| Some(c.name)).map_err(|e| {
|
||||
error!("Invalid room name event in database for room {}. {}", room_id, e);
|
||||
Error::bad_database("Invalid room name event in database.")
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_avatar(&self, room_id: &RoomId) -> Result<ruma::JsOption<RoomAvatarEventContent>> {
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomAvatar, "")?
|
||||
.map_or(Ok(ruma::JsOption::Undefined), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map_err(|_| Error::bad_database("Invalid room avatar event in database."))
|
||||
})
|
||||
}
|
||||
pub fn get_avatar(&self, room_id: &RoomId) -> Result<ruma::JsOption<RoomAvatarEventContent>> {
|
||||
services().rooms.state_accessor.room_state_get(room_id, &StateEventType::RoomAvatar, "")?.map_or(
|
||||
Ok(ruma::JsOption::Undefined),
|
||||
|s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map_err(|_| Error::bad_database("Invalid room avatar event in database."))
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_member(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
user_id: &UserId,
|
||||
) -> Result<Option<RoomMemberEventContent>> {
|
||||
services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomMember, user_id.as_str())?
|
||||
.map_or(Ok(None), |s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map_err(|_| Error::bad_database("Invalid room member event in database."))
|
||||
})
|
||||
}
|
||||
pub fn get_member(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<RoomMemberEventContent>> {
|
||||
services().rooms.state_accessor.room_state_get(room_id, &StateEventType::RoomMember, user_id.as_str())?.map_or(
|
||||
Ok(None),
|
||||
|s| {
|
||||
serde_json::from_str(s.content.get())
|
||||
.map_err(|_| Error::bad_database("Invalid room member event in database."))
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,112 +1,79 @@
|
|||
use std::{collections::HashSet, sync::Arc};
|
||||
|
||||
use crate::Result;
|
||||
use ruma::{
|
||||
api::appservice::Registration,
|
||||
events::{AnyStrippedStateEvent, AnySyncStateEvent},
|
||||
serde::Raw,
|
||||
OwnedRoomId, OwnedServerName, OwnedUserId, RoomId, ServerName, UserId,
|
||||
api::appservice::Registration,
|
||||
events::{AnyStrippedStateEvent, AnySyncStateEvent},
|
||||
serde::Raw,
|
||||
OwnedRoomId, OwnedServerName, OwnedUserId, RoomId, ServerName, UserId,
|
||||
};
|
||||
|
||||
type StrippedStateEventIter<'a> =
|
||||
Box<dyn Iterator<Item = Result<(OwnedRoomId, Vec<Raw<AnyStrippedStateEvent>>)>> + 'a>;
|
||||
use crate::Result;
|
||||
|
||||
type AnySyncStateEventIter<'a> =
|
||||
Box<dyn Iterator<Item = Result<(OwnedRoomId, Vec<Raw<AnySyncStateEvent>>)>> + 'a>;
|
||||
type StrippedStateEventIter<'a> = Box<dyn Iterator<Item = Result<(OwnedRoomId, Vec<Raw<AnyStrippedStateEvent>>)>> + 'a>;
|
||||
|
||||
type AnySyncStateEventIter<'a> = Box<dyn Iterator<Item = Result<(OwnedRoomId, Vec<Raw<AnySyncStateEvent>>)>> + 'a>;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn mark_as_once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
fn mark_as_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
fn mark_as_invited(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>,
|
||||
) -> Result<()>;
|
||||
fn mark_as_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
fn mark_as_once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
fn mark_as_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
fn mark_as_invited(
|
||||
&self, user_id: &UserId, room_id: &RoomId, last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>,
|
||||
) -> Result<()>;
|
||||
fn mark_as_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
fn update_joined_count(&self, room_id: &RoomId) -> Result<()>;
|
||||
fn update_joined_count(&self, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
fn get_our_real_users(&self, room_id: &RoomId) -> Result<Arc<HashSet<OwnedUserId>>>;
|
||||
fn get_our_real_users(&self, room_id: &RoomId) -> Result<Arc<HashSet<OwnedUserId>>>;
|
||||
|
||||
fn appservice_in_room(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
appservice: &(String, Registration),
|
||||
) -> Result<bool>;
|
||||
fn appservice_in_room(&self, room_id: &RoomId, appservice: &(String, Registration)) -> Result<bool>;
|
||||
|
||||
/// Makes a user forget a room.
|
||||
fn forget(&self, room_id: &RoomId, user_id: &UserId) -> Result<()>;
|
||||
/// Makes a user forget a room.
|
||||
fn forget(&self, room_id: &RoomId, user_id: &UserId) -> Result<()>;
|
||||
|
||||
/// Returns an iterator of all servers participating in this room.
|
||||
fn room_servers<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedServerName>> + 'a>;
|
||||
/// Returns an iterator of all servers participating in this room.
|
||||
fn room_servers<'a>(&'a self, room_id: &RoomId) -> Box<dyn Iterator<Item = Result<OwnedServerName>> + 'a>;
|
||||
|
||||
fn server_in_room(&self, server: &ServerName, room_id: &RoomId) -> Result<bool>;
|
||||
fn server_in_room(&self, server: &ServerName, room_id: &RoomId) -> Result<bool>;
|
||||
|
||||
/// Returns an iterator of all rooms a server participates in (as far as we know).
|
||||
fn server_rooms<'a>(
|
||||
&'a self,
|
||||
server: &ServerName,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a>;
|
||||
/// Returns an iterator of all rooms a server participates in (as far as we
|
||||
/// know).
|
||||
fn server_rooms<'a>(&'a self, server: &ServerName) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a>;
|
||||
|
||||
/// Returns an iterator over all joined members of a room.
|
||||
fn room_members<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedUserId>> + 'a>;
|
||||
/// Returns an iterator over all joined members of a room.
|
||||
fn room_members<'a>(&'a self, room_id: &RoomId) -> Box<dyn Iterator<Item = Result<OwnedUserId>> + 'a>;
|
||||
|
||||
fn room_joined_count(&self, room_id: &RoomId) -> Result<Option<u64>>;
|
||||
fn room_joined_count(&self, room_id: &RoomId) -> Result<Option<u64>>;
|
||||
|
||||
fn room_invited_count(&self, room_id: &RoomId) -> Result<Option<u64>>;
|
||||
fn room_invited_count(&self, room_id: &RoomId) -> Result<Option<u64>>;
|
||||
|
||||
/// Returns an iterator over all User IDs who ever joined a room.
|
||||
fn room_useroncejoined<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedUserId>> + 'a>;
|
||||
/// Returns an iterator over all User IDs who ever joined a room.
|
||||
fn room_useroncejoined<'a>(&'a self, room_id: &RoomId) -> Box<dyn Iterator<Item = Result<OwnedUserId>> + 'a>;
|
||||
|
||||
/// Returns an iterator over all invited members of a room.
|
||||
fn room_members_invited<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedUserId>> + 'a>;
|
||||
/// Returns an iterator over all invited members of a room.
|
||||
fn room_members_invited<'a>(&'a self, room_id: &RoomId) -> Box<dyn Iterator<Item = Result<OwnedUserId>> + 'a>;
|
||||
|
||||
fn get_invite_count(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>>;
|
||||
fn get_invite_count(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>>;
|
||||
|
||||
fn get_left_count(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>>;
|
||||
fn get_left_count(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>>;
|
||||
|
||||
/// Returns an iterator over all rooms this user joined.
|
||||
fn rooms_joined<'a>(
|
||||
&'a self,
|
||||
user_id: &UserId,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a>;
|
||||
/// Returns an iterator over all rooms this user joined.
|
||||
fn rooms_joined<'a>(&'a self, user_id: &UserId) -> Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a>;
|
||||
|
||||
/// Returns an iterator over all rooms a user was invited to.
|
||||
fn rooms_invited<'a>(&'a self, user_id: &UserId) -> StrippedStateEventIter<'a>;
|
||||
/// Returns an iterator over all rooms a user was invited to.
|
||||
fn rooms_invited<'a>(&'a self, user_id: &UserId) -> StrippedStateEventIter<'a>;
|
||||
|
||||
fn invite_state(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
) -> Result<Option<Vec<Raw<AnyStrippedStateEvent>>>>;
|
||||
fn invite_state(&self, user_id: &UserId, room_id: &RoomId) -> Result<Option<Vec<Raw<AnyStrippedStateEvent>>>>;
|
||||
|
||||
fn left_state(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
) -> Result<Option<Vec<Raw<AnyStrippedStateEvent>>>>;
|
||||
fn left_state(&self, user_id: &UserId, room_id: &RoomId) -> Result<Option<Vec<Raw<AnyStrippedStateEvent>>>>;
|
||||
|
||||
/// Returns an iterator over all rooms a user left.
|
||||
fn rooms_left<'a>(&'a self, user_id: &UserId) -> AnySyncStateEventIter<'a>;
|
||||
/// Returns an iterator over all rooms a user left.
|
||||
fn rooms_left<'a>(&'a self, user_id: &UserId) -> AnySyncStateEventIter<'a>;
|
||||
|
||||
fn once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool>;
|
||||
fn once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool>;
|
||||
|
||||
fn is_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool>;
|
||||
fn is_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool>;
|
||||
|
||||
fn is_invited(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool>;
|
||||
fn is_invited(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool>;
|
||||
|
||||
fn is_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool>;
|
||||
fn is_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool>;
|
||||
}
|
||||
|
|
|
@ -1,393 +1,311 @@
|
|||
use std::{collections::HashSet, sync::Arc};
|
||||
|
||||
use ruma::api::federation;
|
||||
pub use data::Data;
|
||||
use ruma::{
|
||||
api::appservice::Registration,
|
||||
events::{
|
||||
direct::DirectEvent,
|
||||
ignored_user_list::IgnoredUserListEvent,
|
||||
room::{
|
||||
create::RoomCreateEventContent,
|
||||
member::{MembershipState, RoomMemberEventContent},
|
||||
},
|
||||
AnyStrippedStateEvent, AnySyncStateEvent, GlobalAccountDataEventType,
|
||||
RoomAccountDataEventType, StateEventType,
|
||||
},
|
||||
serde::Raw,
|
||||
OwnedRoomId, OwnedServerName, OwnedUserId, RoomId, ServerName, UserId,
|
||||
api::{appservice::Registration, federation},
|
||||
events::{
|
||||
direct::DirectEvent,
|
||||
ignored_user_list::IgnoredUserListEvent,
|
||||
room::{
|
||||
create::RoomCreateEventContent,
|
||||
member::{MembershipState, RoomMemberEventContent},
|
||||
},
|
||||
AnyStrippedStateEvent, AnySyncStateEvent, GlobalAccountDataEventType, RoomAccountDataEventType, StateEventType,
|
||||
},
|
||||
serde::Raw,
|
||||
OwnedRoomId, OwnedServerName, OwnedUserId, RoomId, ServerName, UserId,
|
||||
};
|
||||
use tracing::warn;
|
||||
|
||||
pub use data::Data;
|
||||
|
||||
use crate::{services, Error, Result};
|
||||
|
||||
mod data;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Update current membership data.
|
||||
#[tracing::instrument(skip(self, last_state))]
|
||||
pub async fn update_membership(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
user_id: &UserId,
|
||||
membership_event: RoomMemberEventContent,
|
||||
sender: &UserId,
|
||||
last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>,
|
||||
update_joined_count: bool,
|
||||
) -> Result<()> {
|
||||
let membership = membership_event.membership;
|
||||
/// Update current membership data.
|
||||
#[tracing::instrument(skip(self, last_state))]
|
||||
pub async fn update_membership(
|
||||
&self, room_id: &RoomId, user_id: &UserId, membership_event: RoomMemberEventContent, sender: &UserId,
|
||||
last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>, update_joined_count: bool,
|
||||
) -> Result<()> {
|
||||
let membership = membership_event.membership;
|
||||
|
||||
// Keep track what remote users exist by adding them as "deactivated" users
|
||||
if user_id.server_name() != services().globals.server_name() {
|
||||
if !services().users.exists(user_id)? {
|
||||
services().users.create(user_id, None)?;
|
||||
}
|
||||
// Keep track what remote users exist by adding them as "deactivated" users
|
||||
if user_id.server_name() != services().globals.server_name() {
|
||||
if !services().users.exists(user_id)? {
|
||||
services().users.create(user_id, None)?;
|
||||
}
|
||||
|
||||
// Try to update our local copy of the user if ours does not match
|
||||
if ((services().users.displayname(user_id)? != membership_event.displayname)
|
||||
|| (services().users.avatar_url(user_id)? != membership_event.avatar_url)
|
||||
|| (services().users.blurhash(user_id)? != membership_event.blurhash))
|
||||
&& (membership != MembershipState::Leave)
|
||||
{
|
||||
let response = services()
|
||||
.sending
|
||||
.send_federation_request(
|
||||
user_id.server_name(),
|
||||
federation::query::get_profile_information::v1::Request {
|
||||
user_id: user_id.into(),
|
||||
field: None, // we want the full user's profile to update locally too
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
// Try to update our local copy of the user if ours does not match
|
||||
if ((services().users.displayname(user_id)? != membership_event.displayname)
|
||||
|| (services().users.avatar_url(user_id)? != membership_event.avatar_url)
|
||||
|| (services().users.blurhash(user_id)? != membership_event.blurhash))
|
||||
&& (membership != MembershipState::Leave)
|
||||
{
|
||||
let response = services()
|
||||
.sending
|
||||
.send_federation_request(
|
||||
user_id.server_name(),
|
||||
federation::query::get_profile_information::v1::Request {
|
||||
user_id: user_id.into(),
|
||||
field: None, // we want the full user's profile to update locally too
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
services()
|
||||
.users
|
||||
.set_displayname(user_id, response.displayname.clone())
|
||||
.await?;
|
||||
services()
|
||||
.users
|
||||
.set_avatar_url(user_id, response.avatar_url)
|
||||
.await?;
|
||||
services()
|
||||
.users
|
||||
.set_blurhash(user_id, response.blurhash)
|
||||
.await?;
|
||||
};
|
||||
}
|
||||
services().users.set_displayname(user_id, response.displayname.clone()).await?;
|
||||
services().users.set_avatar_url(user_id, response.avatar_url).await?;
|
||||
services().users.set_blurhash(user_id, response.blurhash).await?;
|
||||
};
|
||||
}
|
||||
|
||||
match &membership {
|
||||
MembershipState::Join => {
|
||||
// Check if the user never joined this room
|
||||
if !self.once_joined(user_id, room_id)? {
|
||||
// Add the user ID to the join list then
|
||||
self.db.mark_as_once_joined(user_id, room_id)?;
|
||||
match &membership {
|
||||
MembershipState::Join => {
|
||||
// Check if the user never joined this room
|
||||
if !self.once_joined(user_id, room_id)? {
|
||||
// Add the user ID to the join list then
|
||||
self.db.mark_as_once_joined(user_id, room_id)?;
|
||||
|
||||
// Check if the room has a predecessor
|
||||
if let Some(predecessor) = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomCreate, "")?
|
||||
.and_then(|create| serde_json::from_str(create.content.get()).ok())
|
||||
.and_then(|content: RoomCreateEventContent| content.predecessor)
|
||||
{
|
||||
// Copy user settings from predecessor to the current room:
|
||||
// - Push rules
|
||||
//
|
||||
// TODO: finish this once push rules are implemented.
|
||||
//
|
||||
// let mut push_rules_event_content: PushRulesEvent = account_data
|
||||
// .get(
|
||||
// None,
|
||||
// user_id,
|
||||
// EventType::PushRules,
|
||||
// )?;
|
||||
//
|
||||
// NOTE: find where `predecessor.room_id` match
|
||||
// and update to `room_id`.
|
||||
//
|
||||
// account_data
|
||||
// .update(
|
||||
// None,
|
||||
// user_id,
|
||||
// EventType::PushRules,
|
||||
// &push_rules_event_content,
|
||||
// globals,
|
||||
// )
|
||||
// .ok();
|
||||
// Check if the room has a predecessor
|
||||
if let Some(predecessor) = services()
|
||||
.rooms
|
||||
.state_accessor
|
||||
.room_state_get(room_id, &StateEventType::RoomCreate, "")?
|
||||
.and_then(|create| serde_json::from_str(create.content.get()).ok())
|
||||
.and_then(|content: RoomCreateEventContent| content.predecessor)
|
||||
{
|
||||
// Copy user settings from predecessor to the current room:
|
||||
// - Push rules
|
||||
//
|
||||
// TODO: finish this once push rules are implemented.
|
||||
//
|
||||
// let mut push_rules_event_content: PushRulesEvent = account_data
|
||||
// .get(
|
||||
// None,
|
||||
// user_id,
|
||||
// EventType::PushRules,
|
||||
// )?;
|
||||
//
|
||||
// NOTE: find where `predecessor.room_id` match
|
||||
// and update to `room_id`.
|
||||
//
|
||||
// account_data
|
||||
// .update(
|
||||
// None,
|
||||
// user_id,
|
||||
// EventType::PushRules,
|
||||
// &push_rules_event_content,
|
||||
// globals,
|
||||
// )
|
||||
// .ok();
|
||||
|
||||
// Copy old tags to new room
|
||||
if let Some(tag_event) = services()
|
||||
.account_data
|
||||
.get(
|
||||
Some(&predecessor.room_id),
|
||||
user_id,
|
||||
RoomAccountDataEventType::Tag,
|
||||
)?
|
||||
.map(|event| {
|
||||
serde_json::from_str(event.get()).map_err(|e| {
|
||||
warn!("Invalid account data event in db: {e:?}");
|
||||
Error::BadDatabase("Invalid account data event in db.")
|
||||
})
|
||||
})
|
||||
{
|
||||
services()
|
||||
.account_data
|
||||
.update(
|
||||
Some(room_id),
|
||||
user_id,
|
||||
RoomAccountDataEventType::Tag,
|
||||
&tag_event?,
|
||||
)
|
||||
.ok();
|
||||
};
|
||||
// Copy old tags to new room
|
||||
if let Some(tag_event) = services()
|
||||
.account_data
|
||||
.get(Some(&predecessor.room_id), user_id, RoomAccountDataEventType::Tag)?
|
||||
.map(|event| {
|
||||
serde_json::from_str(event.get()).map_err(|e| {
|
||||
warn!("Invalid account data event in db: {e:?}");
|
||||
Error::BadDatabase("Invalid account data event in db.")
|
||||
})
|
||||
}) {
|
||||
services()
|
||||
.account_data
|
||||
.update(Some(room_id), user_id, RoomAccountDataEventType::Tag, &tag_event?)
|
||||
.ok();
|
||||
};
|
||||
|
||||
// Copy direct chat flag
|
||||
if let Some(direct_event) = services()
|
||||
.account_data
|
||||
.get(
|
||||
None,
|
||||
user_id,
|
||||
GlobalAccountDataEventType::Direct.to_string().into(),
|
||||
)?
|
||||
.map(|event| {
|
||||
serde_json::from_str::<DirectEvent>(event.get()).map_err(|e| {
|
||||
warn!("Invalid account data event in db: {e:?}");
|
||||
Error::BadDatabase("Invalid account data event in db.")
|
||||
})
|
||||
})
|
||||
{
|
||||
let mut direct_event = direct_event?;
|
||||
let mut room_ids_updated = false;
|
||||
// Copy direct chat flag
|
||||
if let Some(direct_event) = services()
|
||||
.account_data
|
||||
.get(None, user_id, GlobalAccountDataEventType::Direct.to_string().into())?
|
||||
.map(|event| {
|
||||
serde_json::from_str::<DirectEvent>(event.get()).map_err(|e| {
|
||||
warn!("Invalid account data event in db: {e:?}");
|
||||
Error::BadDatabase("Invalid account data event in db.")
|
||||
})
|
||||
}) {
|
||||
let mut direct_event = direct_event?;
|
||||
let mut room_ids_updated = false;
|
||||
|
||||
for room_ids in direct_event.content.0.values_mut() {
|
||||
if room_ids.iter().any(|r| r == &predecessor.room_id) {
|
||||
room_ids.push(room_id.to_owned());
|
||||
room_ids_updated = true;
|
||||
}
|
||||
}
|
||||
for room_ids in direct_event.content.0.values_mut() {
|
||||
if room_ids.iter().any(|r| r == &predecessor.room_id) {
|
||||
room_ids.push(room_id.to_owned());
|
||||
room_ids_updated = true;
|
||||
}
|
||||
}
|
||||
|
||||
if room_ids_updated {
|
||||
services().account_data.update(
|
||||
None,
|
||||
user_id,
|
||||
GlobalAccountDataEventType::Direct.to_string().into(),
|
||||
&serde_json::to_value(&direct_event)
|
||||
.expect("to json always works"),
|
||||
)?;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
if room_ids_updated {
|
||||
services().account_data.update(
|
||||
None,
|
||||
user_id,
|
||||
GlobalAccountDataEventType::Direct.to_string().into(),
|
||||
&serde_json::to_value(&direct_event).expect("to json always works"),
|
||||
)?;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
self.db.mark_as_joined(user_id, room_id)?;
|
||||
}
|
||||
MembershipState::Invite => {
|
||||
// We want to know if the sender is ignored by the receiver
|
||||
let is_ignored = services()
|
||||
.account_data
|
||||
.get(
|
||||
None, // Ignored users are in global account data
|
||||
user_id, // Receiver
|
||||
GlobalAccountDataEventType::IgnoredUserList
|
||||
.to_string()
|
||||
.into(),
|
||||
)?
|
||||
.map(|event| {
|
||||
serde_json::from_str::<IgnoredUserListEvent>(event.get()).map_err(|e| {
|
||||
warn!("Invalid account data event in db: {e:?}");
|
||||
Error::BadDatabase("Invalid account data event in db.")
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.map_or(false, |ignored| {
|
||||
ignored
|
||||
.content
|
||||
.ignored_users
|
||||
.iter()
|
||||
.any(|(user, _details)| user == sender)
|
||||
});
|
||||
self.db.mark_as_joined(user_id, room_id)?;
|
||||
},
|
||||
MembershipState::Invite => {
|
||||
// We want to know if the sender is ignored by the receiver
|
||||
let is_ignored = services()
|
||||
.account_data
|
||||
.get(
|
||||
None, // Ignored users are in global account data
|
||||
user_id, // Receiver
|
||||
GlobalAccountDataEventType::IgnoredUserList.to_string().into(),
|
||||
)?
|
||||
.map(|event| {
|
||||
serde_json::from_str::<IgnoredUserListEvent>(event.get()).map_err(|e| {
|
||||
warn!("Invalid account data event in db: {e:?}");
|
||||
Error::BadDatabase("Invalid account data event in db.")
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.map_or(false, |ignored| {
|
||||
ignored.content.ignored_users.iter().any(|(user, _details)| user == sender)
|
||||
});
|
||||
|
||||
if is_ignored {
|
||||
return Ok(());
|
||||
}
|
||||
if is_ignored {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.db.mark_as_invited(user_id, room_id, last_state)?;
|
||||
}
|
||||
MembershipState::Leave | MembershipState::Ban => {
|
||||
self.db.mark_as_left(user_id, room_id)?;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
self.db.mark_as_invited(user_id, room_id, last_state)?;
|
||||
},
|
||||
MembershipState::Leave | MembershipState::Ban => {
|
||||
self.db.mark_as_left(user_id, room_id)?;
|
||||
},
|
||||
_ => {},
|
||||
}
|
||||
|
||||
if update_joined_count {
|
||||
self.update_joined_count(room_id)?;
|
||||
}
|
||||
if update_joined_count {
|
||||
self.update_joined_count(room_id)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, room_id))]
|
||||
pub fn update_joined_count(&self, room_id: &RoomId) -> Result<()> {
|
||||
self.db.update_joined_count(room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self, room_id))]
|
||||
pub fn update_joined_count(&self, room_id: &RoomId) -> Result<()> { self.db.update_joined_count(room_id) }
|
||||
|
||||
#[tracing::instrument(skip(self, room_id))]
|
||||
pub fn get_our_real_users(&self, room_id: &RoomId) -> Result<Arc<HashSet<OwnedUserId>>> {
|
||||
self.db.get_our_real_users(room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self, room_id))]
|
||||
pub fn get_our_real_users(&self, room_id: &RoomId) -> Result<Arc<HashSet<OwnedUserId>>> {
|
||||
self.db.get_our_real_users(room_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, room_id, appservice))]
|
||||
pub fn appservice_in_room(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
appservice: &(String, Registration),
|
||||
) -> Result<bool> {
|
||||
self.db.appservice_in_room(room_id, appservice)
|
||||
}
|
||||
#[tracing::instrument(skip(self, room_id, appservice))]
|
||||
pub fn appservice_in_room(&self, room_id: &RoomId, appservice: &(String, Registration)) -> Result<bool> {
|
||||
self.db.appservice_in_room(room_id, appservice)
|
||||
}
|
||||
|
||||
/// Makes a user forget a room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn forget(&self, room_id: &RoomId, user_id: &UserId) -> Result<()> {
|
||||
self.db.forget(room_id, user_id)
|
||||
}
|
||||
/// Makes a user forget a room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn forget(&self, room_id: &RoomId, user_id: &UserId) -> Result<()> { self.db.forget(room_id, user_id) }
|
||||
|
||||
/// Returns an iterator of all servers participating in this room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_servers<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
) -> impl Iterator<Item = Result<OwnedServerName>> + 'a {
|
||||
self.db.room_servers(room_id)
|
||||
}
|
||||
/// Returns an iterator of all servers participating in this room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_servers<'a>(&'a self, room_id: &RoomId) -> impl Iterator<Item = Result<OwnedServerName>> + 'a {
|
||||
self.db.room_servers(room_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn server_in_room(&self, server: &ServerName, room_id: &RoomId) -> Result<bool> {
|
||||
self.db.server_in_room(server, room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn server_in_room(&self, server: &ServerName, room_id: &RoomId) -> Result<bool> {
|
||||
self.db.server_in_room(server, room_id)
|
||||
}
|
||||
|
||||
/// Returns an iterator of all rooms a server participates in (as far as we know).
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn server_rooms<'a>(
|
||||
&'a self,
|
||||
server: &ServerName,
|
||||
) -> impl Iterator<Item = Result<OwnedRoomId>> + 'a {
|
||||
self.db.server_rooms(server)
|
||||
}
|
||||
/// Returns an iterator of all rooms a server participates in (as far as we
|
||||
/// know).
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn server_rooms<'a>(&'a self, server: &ServerName) -> impl Iterator<Item = Result<OwnedRoomId>> + 'a {
|
||||
self.db.server_rooms(server)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all joined members of a room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_members<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
) -> impl Iterator<Item = Result<OwnedUserId>> + 'a {
|
||||
self.db.room_members(room_id)
|
||||
}
|
||||
/// Returns an iterator over all joined members of a room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_members<'a>(&'a self, room_id: &RoomId) -> impl Iterator<Item = Result<OwnedUserId>> + 'a {
|
||||
self.db.room_members(room_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_joined_count(&self, room_id: &RoomId) -> Result<Option<u64>> {
|
||||
self.db.room_joined_count(room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_joined_count(&self, room_id: &RoomId) -> Result<Option<u64>> { self.db.room_joined_count(room_id) }
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_invited_count(&self, room_id: &RoomId) -> Result<Option<u64>> {
|
||||
self.db.room_invited_count(room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_invited_count(&self, room_id: &RoomId) -> Result<Option<u64>> { self.db.room_invited_count(room_id) }
|
||||
|
||||
/// Returns an iterator over all User IDs who ever joined a room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_useroncejoined<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
) -> impl Iterator<Item = Result<OwnedUserId>> + 'a {
|
||||
self.db.room_useroncejoined(room_id)
|
||||
}
|
||||
/// Returns an iterator over all User IDs who ever joined a room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_useroncejoined<'a>(&'a self, room_id: &RoomId) -> impl Iterator<Item = Result<OwnedUserId>> + 'a {
|
||||
self.db.room_useroncejoined(room_id)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all invited members of a room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_members_invited<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
) -> impl Iterator<Item = Result<OwnedUserId>> + 'a {
|
||||
self.db.room_members_invited(room_id)
|
||||
}
|
||||
/// Returns an iterator over all invited members of a room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn room_members_invited<'a>(&'a self, room_id: &RoomId) -> impl Iterator<Item = Result<OwnedUserId>> + 'a {
|
||||
self.db.room_members_invited(room_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_invite_count(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>> {
|
||||
self.db.get_invite_count(room_id, user_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_invite_count(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>> {
|
||||
self.db.get_invite_count(room_id, user_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_left_count(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>> {
|
||||
self.db.get_left_count(room_id, user_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_left_count(&self, room_id: &RoomId, user_id: &UserId) -> Result<Option<u64>> {
|
||||
self.db.get_left_count(room_id, user_id)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all rooms this user joined.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn rooms_joined<'a>(
|
||||
&'a self,
|
||||
user_id: &UserId,
|
||||
) -> impl Iterator<Item = Result<OwnedRoomId>> + 'a {
|
||||
self.db.rooms_joined(user_id)
|
||||
}
|
||||
/// Returns an iterator over all rooms this user joined.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn rooms_joined<'a>(&'a self, user_id: &UserId) -> impl Iterator<Item = Result<OwnedRoomId>> + 'a {
|
||||
self.db.rooms_joined(user_id)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all rooms a user was invited to.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn rooms_invited<'a>(
|
||||
&'a self,
|
||||
user_id: &UserId,
|
||||
) -> impl Iterator<Item = Result<(OwnedRoomId, Vec<Raw<AnyStrippedStateEvent>>)>> + 'a {
|
||||
self.db.rooms_invited(user_id)
|
||||
}
|
||||
/// Returns an iterator over all rooms a user was invited to.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn rooms_invited<'a>(
|
||||
&'a self, user_id: &UserId,
|
||||
) -> impl Iterator<Item = Result<(OwnedRoomId, Vec<Raw<AnyStrippedStateEvent>>)>> + 'a {
|
||||
self.db.rooms_invited(user_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn invite_state(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
) -> Result<Option<Vec<Raw<AnyStrippedStateEvent>>>> {
|
||||
self.db.invite_state(user_id, room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn invite_state(&self, user_id: &UserId, room_id: &RoomId) -> Result<Option<Vec<Raw<AnyStrippedStateEvent>>>> {
|
||||
self.db.invite_state(user_id, room_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn left_state(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
) -> Result<Option<Vec<Raw<AnyStrippedStateEvent>>>> {
|
||||
self.db.left_state(user_id, room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn left_state(&self, user_id: &UserId, room_id: &RoomId) -> Result<Option<Vec<Raw<AnyStrippedStateEvent>>>> {
|
||||
self.db.left_state(user_id, room_id)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all rooms a user left.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn rooms_left<'a>(
|
||||
&'a self,
|
||||
user_id: &UserId,
|
||||
) -> impl Iterator<Item = Result<(OwnedRoomId, Vec<Raw<AnySyncStateEvent>>)>> + 'a {
|
||||
self.db.rooms_left(user_id)
|
||||
}
|
||||
/// Returns an iterator over all rooms a user left.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn rooms_left<'a>(
|
||||
&'a self, user_id: &UserId,
|
||||
) -> impl Iterator<Item = Result<(OwnedRoomId, Vec<Raw<AnySyncStateEvent>>)>> + 'a {
|
||||
self.db.rooms_left(user_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
|
||||
self.db.once_joined(user_id, room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
|
||||
self.db.once_joined(user_id, room_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn is_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
|
||||
self.db.is_joined(user_id, room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn is_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> { self.db.is_joined(user_id, room_id) }
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn is_invited(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
|
||||
self.db.is_invited(user_id, room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn is_invited(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
|
||||
self.db.is_invited(user_id, room_id)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn is_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
|
||||
self.db.is_left(user_id, room_id)
|
||||
}
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn is_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> { self.db.is_left(user_id, room_id) }
|
||||
}
|
||||
|
|
|
@ -4,12 +4,12 @@ use super::CompressedStateEvent;
|
|||
use crate::Result;
|
||||
|
||||
pub struct StateDiff {
|
||||
pub parent: Option<u64>,
|
||||
pub added: Arc<HashSet<CompressedStateEvent>>,
|
||||
pub removed: Arc<HashSet<CompressedStateEvent>>,
|
||||
pub parent: Option<u64>,
|
||||
pub added: Arc<HashSet<CompressedStateEvent>>,
|
||||
pub removed: Arc<HashSet<CompressedStateEvent>>,
|
||||
}
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn get_statediff(&self, shortstatehash: u64) -> Result<StateDiff>;
|
||||
fn save_statediff(&self, shortstatehash: u64, diff: StateDiff) -> Result<()>;
|
||||
fn get_statediff(&self, shortstatehash: u64) -> Result<StateDiff>;
|
||||
fn save_statediff(&self, shortstatehash: u64, diff: StateDiff) -> Result<()>;
|
||||
}
|
||||
|
|
|
@ -1,325 +1,276 @@
|
|||
pub mod data;
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
mem::size_of,
|
||||
sync::{Arc, Mutex},
|
||||
collections::HashSet,
|
||||
mem::size_of,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
pub use data::Data;
|
||||
use lru_cache::LruCache;
|
||||
use ruma::{EventId, RoomId};
|
||||
|
||||
use self::data::StateDiff;
|
||||
use crate::{services, utils, Result};
|
||||
|
||||
use self::data::StateDiff;
|
||||
|
||||
type StateInfoLruCache = Mutex<
|
||||
LruCache<
|
||||
u64,
|
||||
Vec<(
|
||||
u64, // sstatehash
|
||||
Arc<HashSet<CompressedStateEvent>>, // full state
|
||||
Arc<HashSet<CompressedStateEvent>>, // added
|
||||
Arc<HashSet<CompressedStateEvent>>, // removed
|
||||
)>,
|
||||
>,
|
||||
LruCache<
|
||||
u64,
|
||||
Vec<(
|
||||
u64, // sstatehash
|
||||
Arc<HashSet<CompressedStateEvent>>, // full state
|
||||
Arc<HashSet<CompressedStateEvent>>, // added
|
||||
Arc<HashSet<CompressedStateEvent>>, // removed
|
||||
)>,
|
||||
>,
|
||||
>;
|
||||
|
||||
type ShortStateInfoResult = Result<
|
||||
Vec<(
|
||||
u64, // sstatehash
|
||||
Arc<HashSet<CompressedStateEvent>>, // full state
|
||||
Arc<HashSet<CompressedStateEvent>>, // added
|
||||
Arc<HashSet<CompressedStateEvent>>, // removed
|
||||
)>,
|
||||
Vec<(
|
||||
u64, // sstatehash
|
||||
Arc<HashSet<CompressedStateEvent>>, // full state
|
||||
Arc<HashSet<CompressedStateEvent>>, // added
|
||||
Arc<HashSet<CompressedStateEvent>>, // removed
|
||||
)>,
|
||||
>;
|
||||
|
||||
type ParentStatesVec = Vec<(
|
||||
u64, // sstatehash
|
||||
Arc<HashSet<CompressedStateEvent>>, // full state
|
||||
Arc<HashSet<CompressedStateEvent>>, // added
|
||||
Arc<HashSet<CompressedStateEvent>>, // removed
|
||||
u64, // sstatehash
|
||||
Arc<HashSet<CompressedStateEvent>>, // full state
|
||||
Arc<HashSet<CompressedStateEvent>>, // added
|
||||
Arc<HashSet<CompressedStateEvent>>, // removed
|
||||
)>;
|
||||
|
||||
type HashSetCompressStateEvent = Result<(
|
||||
u64,
|
||||
Arc<HashSet<CompressedStateEvent>>,
|
||||
Arc<HashSet<CompressedStateEvent>>,
|
||||
)>;
|
||||
type HashSetCompressStateEvent = Result<(u64, Arc<HashSet<CompressedStateEvent>>, Arc<HashSet<CompressedStateEvent>>)>;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
|
||||
pub stateinfo_cache: StateInfoLruCache,
|
||||
pub stateinfo_cache: StateInfoLruCache,
|
||||
}
|
||||
|
||||
pub type CompressedStateEvent = [u8; 2 * size_of::<u64>()];
|
||||
|
||||
impl Service {
|
||||
/// Returns a stack with info on shortstatehash, full state, added diff and removed diff for the selected shortstatehash and each parent layer.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn load_shortstatehash_info(&self, shortstatehash: u64) -> ShortStateInfoResult {
|
||||
if let Some(r) = self
|
||||
.stateinfo_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.get_mut(&shortstatehash)
|
||||
{
|
||||
return Ok(r.clone());
|
||||
}
|
||||
/// Returns a stack with info on shortstatehash, full state, added diff and
|
||||
/// removed diff for the selected shortstatehash and each parent layer.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn load_shortstatehash_info(&self, shortstatehash: u64) -> ShortStateInfoResult {
|
||||
if let Some(r) = self.stateinfo_cache.lock().unwrap().get_mut(&shortstatehash) {
|
||||
return Ok(r.clone());
|
||||
}
|
||||
|
||||
let StateDiff {
|
||||
parent,
|
||||
added,
|
||||
removed,
|
||||
} = self.db.get_statediff(shortstatehash)?;
|
||||
let StateDiff {
|
||||
parent,
|
||||
added,
|
||||
removed,
|
||||
} = self.db.get_statediff(shortstatehash)?;
|
||||
|
||||
if let Some(parent) = parent {
|
||||
let mut response = self.load_shortstatehash_info(parent)?;
|
||||
let mut state = (*response.last().unwrap().1).clone();
|
||||
state.extend(added.iter().copied());
|
||||
let removed = (*removed).clone();
|
||||
for r in &removed {
|
||||
state.remove(r);
|
||||
}
|
||||
if let Some(parent) = parent {
|
||||
let mut response = self.load_shortstatehash_info(parent)?;
|
||||
let mut state = (*response.last().unwrap().1).clone();
|
||||
state.extend(added.iter().copied());
|
||||
let removed = (*removed).clone();
|
||||
for r in &removed {
|
||||
state.remove(r);
|
||||
}
|
||||
|
||||
response.push((shortstatehash, Arc::new(state), added, Arc::new(removed)));
|
||||
response.push((shortstatehash, Arc::new(state), added, Arc::new(removed)));
|
||||
|
||||
self.stateinfo_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert(shortstatehash, response.clone());
|
||||
self.stateinfo_cache.lock().unwrap().insert(shortstatehash, response.clone());
|
||||
|
||||
Ok(response)
|
||||
} else {
|
||||
let response = vec![(shortstatehash, added.clone(), added, removed)];
|
||||
self.stateinfo_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert(shortstatehash, response.clone());
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
Ok(response)
|
||||
} else {
|
||||
let response = vec![(shortstatehash, added.clone(), added, removed)];
|
||||
self.stateinfo_cache.lock().unwrap().insert(shortstatehash, response.clone());
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compress_state_event(
|
||||
&self,
|
||||
shortstatekey: u64,
|
||||
event_id: &EventId,
|
||||
) -> Result<CompressedStateEvent> {
|
||||
let mut v = shortstatekey.to_be_bytes().to_vec();
|
||||
v.extend_from_slice(
|
||||
&services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shorteventid(event_id)?
|
||||
.to_be_bytes(),
|
||||
);
|
||||
Ok(v.try_into().expect("we checked the size above"))
|
||||
}
|
||||
pub fn compress_state_event(&self, shortstatekey: u64, event_id: &EventId) -> Result<CompressedStateEvent> {
|
||||
let mut v = shortstatekey.to_be_bytes().to_vec();
|
||||
v.extend_from_slice(&services().rooms.short.get_or_create_shorteventid(event_id)?.to_be_bytes());
|
||||
Ok(v.try_into().expect("we checked the size above"))
|
||||
}
|
||||
|
||||
/// Returns shortstatekey, event id
|
||||
pub fn parse_compressed_state_event(
|
||||
&self,
|
||||
compressed_event: &CompressedStateEvent,
|
||||
) -> Result<(u64, Arc<EventId>)> {
|
||||
Ok((
|
||||
utils::u64_from_bytes(&compressed_event[0..size_of::<u64>()])
|
||||
.expect("bytes have right length"),
|
||||
services().rooms.short.get_eventid_from_short(
|
||||
utils::u64_from_bytes(&compressed_event[size_of::<u64>()..])
|
||||
.expect("bytes have right length"),
|
||||
)?,
|
||||
))
|
||||
}
|
||||
/// Returns shortstatekey, event id
|
||||
pub fn parse_compressed_state_event(&self, compressed_event: &CompressedStateEvent) -> Result<(u64, Arc<EventId>)> {
|
||||
Ok((
|
||||
utils::u64_from_bytes(&compressed_event[0..size_of::<u64>()]).expect("bytes have right length"),
|
||||
services().rooms.short.get_eventid_from_short(
|
||||
utils::u64_from_bytes(&compressed_event[size_of::<u64>()..]).expect("bytes have right length"),
|
||||
)?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Creates a new shortstatehash that often is just a diff to an already existing
|
||||
/// shortstatehash and therefore very efficient.
|
||||
///
|
||||
/// There are multiple layers of diffs. The bottom layer 0 always contains the full state. Layer
|
||||
/// 1 contains diffs to states of layer 0, layer 2 diffs to layer 1 and so on. If layer n > 0
|
||||
/// grows too big, it will be combined with layer n-1 to create a new diff on layer n-1 that's
|
||||
/// based on layer n-2. If that layer is also too big, it will recursively fix above layers too.
|
||||
///
|
||||
/// * `shortstatehash` - Shortstatehash of this state
|
||||
/// * `statediffnew` - Added to base. Each vec is shortstatekey+shorteventid
|
||||
/// * `statediffremoved` - Removed from base. Each vec is shortstatekey+shorteventid
|
||||
/// * `diff_to_sibling` - Approximately how much the diff grows each time for this layer
|
||||
/// * `parent_states` - A stack with info on shortstatehash, full state, added diff and removed diff for each parent layer
|
||||
#[tracing::instrument(skip(
|
||||
self,
|
||||
statediffnew,
|
||||
statediffremoved,
|
||||
diff_to_sibling,
|
||||
parent_states
|
||||
))]
|
||||
pub fn save_state_from_diff(
|
||||
&self,
|
||||
shortstatehash: u64,
|
||||
statediffnew: Arc<HashSet<CompressedStateEvent>>,
|
||||
statediffremoved: Arc<HashSet<CompressedStateEvent>>,
|
||||
diff_to_sibling: usize,
|
||||
mut parent_states: ParentStatesVec,
|
||||
) -> Result<()> {
|
||||
let diffsum = statediffnew.len() + statediffremoved.len();
|
||||
/// Creates a new shortstatehash that often is just a diff to an already
|
||||
/// existing shortstatehash and therefore very efficient.
|
||||
///
|
||||
/// There are multiple layers of diffs. The bottom layer 0 always contains
|
||||
/// the full state. Layer 1 contains diffs to states of layer 0, layer 2
|
||||
/// diffs to layer 1 and so on. If layer n > 0 grows too big, it will be
|
||||
/// combined with layer n-1 to create a new diff on layer n-1 that's
|
||||
/// based on layer n-2. If that layer is also too big, it will recursively
|
||||
/// fix above layers too.
|
||||
///
|
||||
/// * `shortstatehash` - Shortstatehash of this state
|
||||
/// * `statediffnew` - Added to base. Each vec is shortstatekey+shorteventid
|
||||
/// * `statediffremoved` - Removed from base. Each vec is
|
||||
/// shortstatekey+shorteventid
|
||||
/// * `diff_to_sibling` - Approximately how much the diff grows each time
|
||||
/// for this layer
|
||||
/// * `parent_states` - A stack with info on shortstatehash, full state,
|
||||
/// added diff and removed diff for each parent layer
|
||||
#[tracing::instrument(skip(self, statediffnew, statediffremoved, diff_to_sibling, parent_states))]
|
||||
pub fn save_state_from_diff(
|
||||
&self, shortstatehash: u64, statediffnew: Arc<HashSet<CompressedStateEvent>>,
|
||||
statediffremoved: Arc<HashSet<CompressedStateEvent>>, diff_to_sibling: usize,
|
||||
mut parent_states: ParentStatesVec,
|
||||
) -> Result<()> {
|
||||
let diffsum = statediffnew.len() + statediffremoved.len();
|
||||
|
||||
if parent_states.len() > 3 {
|
||||
// Number of layers
|
||||
// To many layers, we have to go deeper
|
||||
let parent = parent_states.pop().unwrap();
|
||||
if parent_states.len() > 3 {
|
||||
// Number of layers
|
||||
// To many layers, we have to go deeper
|
||||
let parent = parent_states.pop().unwrap();
|
||||
|
||||
let mut parent_new = (*parent.2).clone();
|
||||
let mut parent_removed = (*parent.3).clone();
|
||||
let mut parent_new = (*parent.2).clone();
|
||||
let mut parent_removed = (*parent.3).clone();
|
||||
|
||||
for removed in statediffremoved.iter() {
|
||||
if !parent_new.remove(removed) {
|
||||
// It was not added in the parent and we removed it
|
||||
parent_removed.insert(*removed);
|
||||
}
|
||||
// Else it was added in the parent and we removed it again. We can forget this change
|
||||
}
|
||||
for removed in statediffremoved.iter() {
|
||||
if !parent_new.remove(removed) {
|
||||
// It was not added in the parent and we removed it
|
||||
parent_removed.insert(*removed);
|
||||
}
|
||||
// Else it was added in the parent and we removed it again. We
|
||||
// can forget this change
|
||||
}
|
||||
|
||||
for new in statediffnew.iter() {
|
||||
if !parent_removed.remove(new) {
|
||||
// It was not touched in the parent and we added it
|
||||
parent_new.insert(*new);
|
||||
}
|
||||
// Else it was removed in the parent and we added it again. We can forget this change
|
||||
}
|
||||
for new in statediffnew.iter() {
|
||||
if !parent_removed.remove(new) {
|
||||
// It was not touched in the parent and we added it
|
||||
parent_new.insert(*new);
|
||||
}
|
||||
// Else it was removed in the parent and we added it again. We
|
||||
// can forget this change
|
||||
}
|
||||
|
||||
self.save_state_from_diff(
|
||||
shortstatehash,
|
||||
Arc::new(parent_new),
|
||||
Arc::new(parent_removed),
|
||||
diffsum,
|
||||
parent_states,
|
||||
)?;
|
||||
self.save_state_from_diff(
|
||||
shortstatehash,
|
||||
Arc::new(parent_new),
|
||||
Arc::new(parent_removed),
|
||||
diffsum,
|
||||
parent_states,
|
||||
)?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if parent_states.is_empty() {
|
||||
// There is no parent layer, create a new state
|
||||
self.db.save_statediff(
|
||||
shortstatehash,
|
||||
StateDiff {
|
||||
parent: None,
|
||||
added: statediffnew,
|
||||
removed: statediffremoved,
|
||||
},
|
||||
)?;
|
||||
if parent_states.is_empty() {
|
||||
// There is no parent layer, create a new state
|
||||
self.db.save_statediff(
|
||||
shortstatehash,
|
||||
StateDiff {
|
||||
parent: None,
|
||||
added: statediffnew,
|
||||
removed: statediffremoved,
|
||||
},
|
||||
)?;
|
||||
|
||||
return Ok(());
|
||||
};
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// Else we have two options.
|
||||
// 1. We add the current diff on top of the parent layer.
|
||||
// 2. We replace a layer above
|
||||
// Else we have two options.
|
||||
// 1. We add the current diff on top of the parent layer.
|
||||
// 2. We replace a layer above
|
||||
|
||||
let parent = parent_states.pop().unwrap();
|
||||
let parent_diff = parent.2.len() + parent.3.len();
|
||||
let parent = parent_states.pop().unwrap();
|
||||
let parent_diff = parent.2.len() + parent.3.len();
|
||||
|
||||
if diffsum * diffsum >= 2 * diff_to_sibling * parent_diff {
|
||||
// Diff too big, we replace above layer(s)
|
||||
let mut parent_new = (*parent.2).clone();
|
||||
let mut parent_removed = (*parent.3).clone();
|
||||
if diffsum * diffsum >= 2 * diff_to_sibling * parent_diff {
|
||||
// Diff too big, we replace above layer(s)
|
||||
let mut parent_new = (*parent.2).clone();
|
||||
let mut parent_removed = (*parent.3).clone();
|
||||
|
||||
for removed in statediffremoved.iter() {
|
||||
if !parent_new.remove(removed) {
|
||||
// It was not added in the parent and we removed it
|
||||
parent_removed.insert(*removed);
|
||||
}
|
||||
// Else it was added in the parent and we removed it again. We can forget this change
|
||||
}
|
||||
for removed in statediffremoved.iter() {
|
||||
if !parent_new.remove(removed) {
|
||||
// It was not added in the parent and we removed it
|
||||
parent_removed.insert(*removed);
|
||||
}
|
||||
// Else it was added in the parent and we removed it again. We
|
||||
// can forget this change
|
||||
}
|
||||
|
||||
for new in statediffnew.iter() {
|
||||
if !parent_removed.remove(new) {
|
||||
// It was not touched in the parent and we added it
|
||||
parent_new.insert(*new);
|
||||
}
|
||||
// Else it was removed in the parent and we added it again. We can forget this change
|
||||
}
|
||||
for new in statediffnew.iter() {
|
||||
if !parent_removed.remove(new) {
|
||||
// It was not touched in the parent and we added it
|
||||
parent_new.insert(*new);
|
||||
}
|
||||
// Else it was removed in the parent and we added it again. We
|
||||
// can forget this change
|
||||
}
|
||||
|
||||
self.save_state_from_diff(
|
||||
shortstatehash,
|
||||
Arc::new(parent_new),
|
||||
Arc::new(parent_removed),
|
||||
diffsum,
|
||||
parent_states,
|
||||
)?;
|
||||
} else {
|
||||
// Diff small enough, we add diff as layer on top of parent
|
||||
self.db.save_statediff(
|
||||
shortstatehash,
|
||||
StateDiff {
|
||||
parent: Some(parent.0),
|
||||
added: statediffnew,
|
||||
removed: statediffremoved,
|
||||
},
|
||||
)?;
|
||||
}
|
||||
self.save_state_from_diff(
|
||||
shortstatehash,
|
||||
Arc::new(parent_new),
|
||||
Arc::new(parent_removed),
|
||||
diffsum,
|
||||
parent_states,
|
||||
)?;
|
||||
} else {
|
||||
// Diff small enough, we add diff as layer on top of parent
|
||||
self.db.save_statediff(
|
||||
shortstatehash,
|
||||
StateDiff {
|
||||
parent: Some(parent.0),
|
||||
added: statediffnew,
|
||||
removed: statediffremoved,
|
||||
},
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the new shortstatehash, and the state diff from the previous room state
|
||||
pub fn save_state(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
new_state_ids_compressed: Arc<HashSet<CompressedStateEvent>>,
|
||||
) -> HashSetCompressStateEvent {
|
||||
let previous_shortstatehash = services().rooms.state.get_room_shortstatehash(room_id)?;
|
||||
/// Returns the new shortstatehash, and the state diff from the previous
|
||||
/// room state
|
||||
pub fn save_state(
|
||||
&self, room_id: &RoomId, new_state_ids_compressed: Arc<HashSet<CompressedStateEvent>>,
|
||||
) -> HashSetCompressStateEvent {
|
||||
let previous_shortstatehash = services().rooms.state.get_room_shortstatehash(room_id)?;
|
||||
|
||||
let state_hash = utils::calculate_hash(
|
||||
&new_state_ids_compressed
|
||||
.iter()
|
||||
.map(|bytes| &bytes[..])
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
let state_hash =
|
||||
utils::calculate_hash(&new_state_ids_compressed.iter().map(|bytes| &bytes[..]).collect::<Vec<_>>());
|
||||
|
||||
let (new_shortstatehash, already_existed) = services()
|
||||
.rooms
|
||||
.short
|
||||
.get_or_create_shortstatehash(&state_hash)?;
|
||||
let (new_shortstatehash, already_existed) = services().rooms.short.get_or_create_shortstatehash(&state_hash)?;
|
||||
|
||||
if Some(new_shortstatehash) == previous_shortstatehash {
|
||||
return Ok((
|
||||
new_shortstatehash,
|
||||
Arc::new(HashSet::new()),
|
||||
Arc::new(HashSet::new()),
|
||||
));
|
||||
}
|
||||
if Some(new_shortstatehash) == previous_shortstatehash {
|
||||
return Ok((new_shortstatehash, Arc::new(HashSet::new()), Arc::new(HashSet::new())));
|
||||
}
|
||||
|
||||
let states_parents = previous_shortstatehash
|
||||
.map_or_else(|| Ok(Vec::new()), |p| self.load_shortstatehash_info(p))?;
|
||||
let states_parents =
|
||||
previous_shortstatehash.map_or_else(|| Ok(Vec::new()), |p| self.load_shortstatehash_info(p))?;
|
||||
|
||||
let (statediffnew, statediffremoved) = if let Some(parent_stateinfo) = states_parents.last()
|
||||
{
|
||||
let statediffnew: HashSet<_> = new_state_ids_compressed
|
||||
.difference(&parent_stateinfo.1)
|
||||
.copied()
|
||||
.collect();
|
||||
let (statediffnew, statediffremoved) = if let Some(parent_stateinfo) = states_parents.last() {
|
||||
let statediffnew: HashSet<_> = new_state_ids_compressed.difference(&parent_stateinfo.1).copied().collect();
|
||||
|
||||
let statediffremoved: HashSet<_> = parent_stateinfo
|
||||
.1
|
||||
.difference(&new_state_ids_compressed)
|
||||
.copied()
|
||||
.collect();
|
||||
let statediffremoved: HashSet<_> =
|
||||
parent_stateinfo.1.difference(&new_state_ids_compressed).copied().collect();
|
||||
|
||||
(Arc::new(statediffnew), Arc::new(statediffremoved))
|
||||
} else {
|
||||
(new_state_ids_compressed, Arc::new(HashSet::new()))
|
||||
};
|
||||
(Arc::new(statediffnew), Arc::new(statediffremoved))
|
||||
} else {
|
||||
(new_state_ids_compressed, Arc::new(HashSet::new()))
|
||||
};
|
||||
|
||||
if !already_existed {
|
||||
self.save_state_from_diff(
|
||||
new_shortstatehash,
|
||||
statediffnew.clone(),
|
||||
statediffremoved.clone(),
|
||||
2, // every state change is 2 event changes on average
|
||||
states_parents,
|
||||
)?;
|
||||
};
|
||||
if !already_existed {
|
||||
self.save_state_from_diff(
|
||||
new_shortstatehash,
|
||||
statediffnew.clone(),
|
||||
statediffremoved.clone(),
|
||||
2, // every state change is 2 event changes on average
|
||||
states_parents,
|
||||
)?;
|
||||
};
|
||||
|
||||
Ok((new_shortstatehash, statediffnew, statediffremoved))
|
||||
}
|
||||
Ok((new_shortstatehash, statediffnew, statediffremoved))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,17 +1,14 @@
|
|||
use crate::{PduEvent, Result};
|
||||
use ruma::{api::client::threads::get_threads::v1::IncludeThreads, OwnedUserId, RoomId, UserId};
|
||||
|
||||
use crate::{PduEvent, Result};
|
||||
|
||||
type PduEventIterResult<'a> = Result<Box<dyn Iterator<Item = Result<(u64, PduEvent)>> + 'a>>;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn threads_until<'a>(
|
||||
&'a self,
|
||||
user_id: &'a UserId,
|
||||
room_id: &'a RoomId,
|
||||
until: u64,
|
||||
include: &'a IncludeThreads,
|
||||
) -> PduEventIterResult<'a>;
|
||||
fn threads_until<'a>(
|
||||
&'a self, user_id: &'a UserId, room_id: &'a RoomId, until: u64, include: &'a IncludeThreads,
|
||||
) -> PduEventIterResult<'a>;
|
||||
|
||||
fn update_participants(&self, root_id: &[u8], participants: &[OwnedUserId]) -> Result<()>;
|
||||
fn get_participants(&self, root_id: &[u8]) -> Result<Option<Vec<OwnedUserId>>>;
|
||||
fn update_participants(&self, root_id: &[u8], participants: &[OwnedUserId]) -> Result<()>;
|
||||
fn get_participants(&self, root_id: &[u8]) -> Result<Option<Vec<OwnedUserId>>>;
|
||||
}
|
||||
|
|
|
@ -4,115 +4,92 @@ use std::collections::BTreeMap;
|
|||
|
||||
pub use data::Data;
|
||||
use ruma::{
|
||||
api::client::{error::ErrorKind, threads::get_threads::v1::IncludeThreads},
|
||||
events::relation::BundledThread,
|
||||
uint, CanonicalJsonValue, EventId, RoomId, UserId,
|
||||
api::client::{error::ErrorKind, threads::get_threads::v1::IncludeThreads},
|
||||
events::relation::BundledThread,
|
||||
uint, CanonicalJsonValue, EventId, RoomId, UserId,
|
||||
};
|
||||
|
||||
use serde_json::json;
|
||||
|
||||
use crate::{services, Error, PduEvent, Result};
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
pub fn threads_until<'a>(
|
||||
&'a self,
|
||||
user_id: &'a UserId,
|
||||
room_id: &'a RoomId,
|
||||
until: u64,
|
||||
include: &'a IncludeThreads,
|
||||
) -> Result<impl Iterator<Item = Result<(u64, PduEvent)>> + 'a> {
|
||||
self.db.threads_until(user_id, room_id, until, include)
|
||||
}
|
||||
pub fn threads_until<'a>(
|
||||
&'a self, user_id: &'a UserId, room_id: &'a RoomId, until: u64, include: &'a IncludeThreads,
|
||||
) -> Result<impl Iterator<Item = Result<(u64, PduEvent)>> + 'a> {
|
||||
self.db.threads_until(user_id, room_id, until, include)
|
||||
}
|
||||
|
||||
pub fn add_to_thread(&self, root_event_id: &EventId, pdu: &PduEvent) -> Result<()> {
|
||||
let root_id = &services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_id(root_event_id)?
|
||||
.ok_or_else(|| {
|
||||
Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Invalid event id in thread message",
|
||||
)
|
||||
})?;
|
||||
pub fn add_to_thread(&self, root_event_id: &EventId, pdu: &PduEvent) -> Result<()> {
|
||||
let root_id = &services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_id(root_event_id)?
|
||||
.ok_or_else(|| Error::BadRequest(ErrorKind::InvalidParam, "Invalid event id in thread message"))?;
|
||||
|
||||
let root_pdu = services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_from_id(root_id)?
|
||||
.ok_or_else(|| {
|
||||
Error::BadRequest(ErrorKind::InvalidParam, "Thread root pdu not found")
|
||||
})?;
|
||||
let root_pdu = services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_from_id(root_id)?
|
||||
.ok_or_else(|| Error::BadRequest(ErrorKind::InvalidParam, "Thread root pdu not found"))?;
|
||||
|
||||
let mut root_pdu_json = services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_json_from_id(root_id)?
|
||||
.ok_or_else(|| {
|
||||
Error::BadRequest(ErrorKind::InvalidParam, "Thread root pdu not found")
|
||||
})?;
|
||||
let mut root_pdu_json = services()
|
||||
.rooms
|
||||
.timeline
|
||||
.get_pdu_json_from_id(root_id)?
|
||||
.ok_or_else(|| Error::BadRequest(ErrorKind::InvalidParam, "Thread root pdu not found"))?;
|
||||
|
||||
if let CanonicalJsonValue::Object(unsigned) = root_pdu_json
|
||||
.entry("unsigned".to_owned())
|
||||
.or_insert_with(|| CanonicalJsonValue::Object(BTreeMap::default()))
|
||||
{
|
||||
if let Some(mut relations) = unsigned
|
||||
.get("m.relations")
|
||||
.and_then(|r| r.as_object())
|
||||
.and_then(|r| r.get("m.thread"))
|
||||
.and_then(|relations| {
|
||||
serde_json::from_value::<BundledThread>(relations.clone().into()).ok()
|
||||
})
|
||||
{
|
||||
// Thread already existed
|
||||
relations.count += uint!(1);
|
||||
relations.latest_event = pdu.to_message_like_event();
|
||||
if let CanonicalJsonValue::Object(unsigned) = root_pdu_json
|
||||
.entry("unsigned".to_owned())
|
||||
.or_insert_with(|| CanonicalJsonValue::Object(BTreeMap::default()))
|
||||
{
|
||||
if let Some(mut relations) = unsigned
|
||||
.get("m.relations")
|
||||
.and_then(|r| r.as_object())
|
||||
.and_then(|r| r.get("m.thread"))
|
||||
.and_then(|relations| serde_json::from_value::<BundledThread>(relations.clone().into()).ok())
|
||||
{
|
||||
// Thread already existed
|
||||
relations.count += uint!(1);
|
||||
relations.latest_event = pdu.to_message_like_event();
|
||||
|
||||
let content = serde_json::to_value(relations).expect("to_value always works");
|
||||
let content = serde_json::to_value(relations).expect("to_value always works");
|
||||
|
||||
unsigned.insert(
|
||||
"m.relations".to_owned(),
|
||||
json!({ "m.thread": content })
|
||||
.try_into()
|
||||
.expect("thread is valid json"),
|
||||
);
|
||||
} else {
|
||||
// New thread
|
||||
let relations = BundledThread {
|
||||
latest_event: pdu.to_message_like_event(),
|
||||
count: uint!(1),
|
||||
current_user_participated: true,
|
||||
};
|
||||
unsigned.insert(
|
||||
"m.relations".to_owned(),
|
||||
json!({ "m.thread": content }).try_into().expect("thread is valid json"),
|
||||
);
|
||||
} else {
|
||||
// New thread
|
||||
let relations = BundledThread {
|
||||
latest_event: pdu.to_message_like_event(),
|
||||
count: uint!(1),
|
||||
current_user_participated: true,
|
||||
};
|
||||
|
||||
let content = serde_json::to_value(relations).expect("to_value always works");
|
||||
let content = serde_json::to_value(relations).expect("to_value always works");
|
||||
|
||||
unsigned.insert(
|
||||
"m.relations".to_owned(),
|
||||
json!({ "m.thread": content })
|
||||
.try_into()
|
||||
.expect("thread is valid json"),
|
||||
);
|
||||
}
|
||||
unsigned.insert(
|
||||
"m.relations".to_owned(),
|
||||
json!({ "m.thread": content }).try_into().expect("thread is valid json"),
|
||||
);
|
||||
}
|
||||
|
||||
services()
|
||||
.rooms
|
||||
.timeline
|
||||
.replace_pdu(root_id, &root_pdu_json, &root_pdu)?;
|
||||
}
|
||||
services().rooms.timeline.replace_pdu(root_id, &root_pdu_json, &root_pdu)?;
|
||||
}
|
||||
|
||||
let mut users = Vec::new();
|
||||
if let Some(userids) = self.db.get_participants(root_id)? {
|
||||
users.extend_from_slice(&userids);
|
||||
users.push(pdu.sender.clone());
|
||||
} else {
|
||||
users.push(root_pdu.sender);
|
||||
users.push(pdu.sender.clone());
|
||||
}
|
||||
let mut users = Vec::new();
|
||||
if let Some(userids) = self.db.get_participants(root_id)? {
|
||||
users.extend_from_slice(&userids);
|
||||
users.push(pdu.sender.clone());
|
||||
} else {
|
||||
users.push(root_pdu.sender);
|
||||
users.push(pdu.sender.clone());
|
||||
}
|
||||
|
||||
self.db.update_participants(root_id, &users)
|
||||
}
|
||||
self.db.update_participants(root_id, &users)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,92 +2,67 @@ use std::sync::Arc;
|
|||
|
||||
use ruma::{CanonicalJsonObject, EventId, OwnedUserId, RoomId, UserId};
|
||||
|
||||
use super::PduCount;
|
||||
use crate::{PduEvent, Result};
|
||||
|
||||
use super::PduCount;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn last_timeline_count(&self, sender_user: &UserId, room_id: &RoomId) -> Result<PduCount>;
|
||||
fn last_timeline_count(&self, sender_user: &UserId, room_id: &RoomId) -> Result<PduCount>;
|
||||
|
||||
/// Returns the `count` of this pdu's id.
|
||||
fn get_pdu_count(&self, event_id: &EventId) -> Result<Option<PduCount>>;
|
||||
/// Returns the `count` of this pdu's id.
|
||||
fn get_pdu_count(&self, event_id: &EventId) -> Result<Option<PduCount>>;
|
||||
|
||||
/// Returns the json of a pdu.
|
||||
fn get_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>>;
|
||||
/// Returns the json of a pdu.
|
||||
fn get_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>>;
|
||||
|
||||
/// Returns the json of a pdu.
|
||||
fn get_non_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>>;
|
||||
/// Returns the json of a pdu.
|
||||
fn get_non_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>>;
|
||||
|
||||
/// Returns the pdu's id.
|
||||
fn get_pdu_id(&self, event_id: &EventId) -> Result<Option<Vec<u8>>>;
|
||||
/// Returns the pdu's id.
|
||||
fn get_pdu_id(&self, event_id: &EventId) -> Result<Option<Vec<u8>>>;
|
||||
|
||||
/// Returns the pdu.
|
||||
///
|
||||
/// Checks the `eventid_outlierpdu` Tree if not found in the timeline.
|
||||
fn get_non_outlier_pdu(&self, event_id: &EventId) -> Result<Option<PduEvent>>;
|
||||
/// Returns the pdu.
|
||||
///
|
||||
/// Checks the `eventid_outlierpdu` Tree if not found in the timeline.
|
||||
fn get_non_outlier_pdu(&self, event_id: &EventId) -> Result<Option<PduEvent>>;
|
||||
|
||||
/// Returns the pdu.
|
||||
///
|
||||
/// Checks the `eventid_outlierpdu` Tree if not found in the timeline.
|
||||
fn get_pdu(&self, event_id: &EventId) -> Result<Option<Arc<PduEvent>>>;
|
||||
/// Returns the pdu.
|
||||
///
|
||||
/// Checks the `eventid_outlierpdu` Tree if not found in the timeline.
|
||||
fn get_pdu(&self, event_id: &EventId) -> Result<Option<Arc<PduEvent>>>;
|
||||
|
||||
/// Returns the pdu.
|
||||
///
|
||||
/// This does __NOT__ check the outliers `Tree`.
|
||||
fn get_pdu_from_id(&self, pdu_id: &[u8]) -> Result<Option<PduEvent>>;
|
||||
/// Returns the pdu.
|
||||
///
|
||||
/// This does __NOT__ check the outliers `Tree`.
|
||||
fn get_pdu_from_id(&self, pdu_id: &[u8]) -> Result<Option<PduEvent>>;
|
||||
|
||||
/// Returns the pdu as a `BTreeMap<String, CanonicalJsonValue>`.
|
||||
fn get_pdu_json_from_id(&self, pdu_id: &[u8]) -> Result<Option<CanonicalJsonObject>>;
|
||||
/// Returns the pdu as a `BTreeMap<String, CanonicalJsonValue>`.
|
||||
fn get_pdu_json_from_id(&self, pdu_id: &[u8]) -> Result<Option<CanonicalJsonObject>>;
|
||||
|
||||
/// Adds a new pdu to the timeline
|
||||
fn append_pdu(
|
||||
&self,
|
||||
pdu_id: &[u8],
|
||||
pdu: &PduEvent,
|
||||
json: &CanonicalJsonObject,
|
||||
count: u64,
|
||||
) -> Result<()>;
|
||||
/// Adds a new pdu to the timeline
|
||||
fn append_pdu(&self, pdu_id: &[u8], pdu: &PduEvent, json: &CanonicalJsonObject, count: u64) -> Result<()>;
|
||||
|
||||
// Adds a new pdu to the backfilled timeline
|
||||
fn prepend_backfill_pdu(
|
||||
&self,
|
||||
pdu_id: &[u8],
|
||||
event_id: &EventId,
|
||||
json: &CanonicalJsonObject,
|
||||
) -> Result<()>;
|
||||
// Adds a new pdu to the backfilled timeline
|
||||
fn prepend_backfill_pdu(&self, pdu_id: &[u8], event_id: &EventId, json: &CanonicalJsonObject) -> Result<()>;
|
||||
|
||||
/// Removes a pdu and creates a new one with the same id.
|
||||
fn replace_pdu(
|
||||
&self,
|
||||
pdu_id: &[u8],
|
||||
pdu_json: &CanonicalJsonObject,
|
||||
pdu: &PduEvent,
|
||||
) -> Result<()>;
|
||||
/// Removes a pdu and creates a new one with the same id.
|
||||
fn replace_pdu(&self, pdu_id: &[u8], pdu_json: &CanonicalJsonObject, pdu: &PduEvent) -> Result<()>;
|
||||
|
||||
/// Returns an iterator over all events and their tokens in a room that happened before the
|
||||
/// event with id `until` in reverse-chronological order.
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn pdus_until<'a>(
|
||||
&'a self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
until: PduCount,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<(PduCount, PduEvent)>> + 'a>>;
|
||||
/// Returns an iterator over all events and their tokens in a room that
|
||||
/// happened before the event with id `until` in reverse-chronological
|
||||
/// order.
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn pdus_until<'a>(
|
||||
&'a self, user_id: &UserId, room_id: &RoomId, until: PduCount,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<(PduCount, PduEvent)>> + 'a>>;
|
||||
|
||||
/// Returns an iterator over all events in a room that happened after the event with id `from`
|
||||
/// in chronological order.
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn pdus_after<'a>(
|
||||
&'a self,
|
||||
user_id: &UserId,
|
||||
room_id: &RoomId,
|
||||
from: PduCount,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<(PduCount, PduEvent)>> + 'a>>;
|
||||
/// Returns an iterator over all events in a room that happened after the
|
||||
/// event with id `from` in chronological order.
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn pdus_after<'a>(
|
||||
&'a self, user_id: &UserId, room_id: &RoomId, from: PduCount,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<(PduCount, PduEvent)>> + 'a>>;
|
||||
|
||||
fn increment_notification_counts(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
notifies: Vec<OwnedUserId>,
|
||||
highlights: Vec<OwnedUserId>,
|
||||
) -> Result<()>;
|
||||
fn increment_notification_counts(
|
||||
&self, room_id: &RoomId, notifies: Vec<OwnedUserId>, highlights: Vec<OwnedUserId>,
|
||||
) -> Result<()>;
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,27 +1,22 @@
|
|||
use crate::Result;
|
||||
use ruma::{OwnedRoomId, OwnedUserId, RoomId, UserId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
fn notification_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64>;
|
||||
fn notification_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64>;
|
||||
|
||||
fn highlight_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64>;
|
||||
fn highlight_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64>;
|
||||
|
||||
// Returns the count at which the last reset_notification_counts was called
|
||||
fn last_notification_read(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64>;
|
||||
// Returns the count at which the last reset_notification_counts was called
|
||||
fn last_notification_read(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64>;
|
||||
|
||||
fn associate_token_shortstatehash(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
token: u64,
|
||||
shortstatehash: u64,
|
||||
) -> Result<()>;
|
||||
fn associate_token_shortstatehash(&self, room_id: &RoomId, token: u64, shortstatehash: u64) -> Result<()>;
|
||||
|
||||
fn get_token_shortstatehash(&self, room_id: &RoomId, token: u64) -> Result<Option<u64>>;
|
||||
fn get_token_shortstatehash(&self, room_id: &RoomId, token: u64) -> Result<Option<u64>>;
|
||||
|
||||
fn get_shared_rooms<'a>(
|
||||
&'a self,
|
||||
users: Vec<OwnedUserId>,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a>>;
|
||||
fn get_shared_rooms<'a>(
|
||||
&'a self, users: Vec<OwnedUserId>,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<OwnedRoomId>> + 'a>>;
|
||||
}
|
||||
|
|
|
@ -6,44 +6,35 @@ use ruma::{OwnedRoomId, OwnedUserId, RoomId, UserId};
|
|||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
pub fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
|
||||
self.db.reset_notification_counts(user_id, room_id)
|
||||
}
|
||||
pub fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
|
||||
self.db.reset_notification_counts(user_id, room_id)
|
||||
}
|
||||
|
||||
pub fn notification_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
|
||||
self.db.notification_count(user_id, room_id)
|
||||
}
|
||||
pub fn notification_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
|
||||
self.db.notification_count(user_id, room_id)
|
||||
}
|
||||
|
||||
pub fn highlight_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
|
||||
self.db.highlight_count(user_id, room_id)
|
||||
}
|
||||
pub fn highlight_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
|
||||
self.db.highlight_count(user_id, room_id)
|
||||
}
|
||||
|
||||
pub fn last_notification_read(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
|
||||
self.db.last_notification_read(user_id, room_id)
|
||||
}
|
||||
pub fn last_notification_read(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
|
||||
self.db.last_notification_read(user_id, room_id)
|
||||
}
|
||||
|
||||
pub fn associate_token_shortstatehash(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
token: u64,
|
||||
shortstatehash: u64,
|
||||
) -> Result<()> {
|
||||
self.db
|
||||
.associate_token_shortstatehash(room_id, token, shortstatehash)
|
||||
}
|
||||
pub fn associate_token_shortstatehash(&self, room_id: &RoomId, token: u64, shortstatehash: u64) -> Result<()> {
|
||||
self.db.associate_token_shortstatehash(room_id, token, shortstatehash)
|
||||
}
|
||||
|
||||
pub fn get_token_shortstatehash(&self, room_id: &RoomId, token: u64) -> Result<Option<u64>> {
|
||||
self.db.get_token_shortstatehash(room_id, token)
|
||||
}
|
||||
pub fn get_token_shortstatehash(&self, room_id: &RoomId, token: u64) -> Result<Option<u64>> {
|
||||
self.db.get_token_shortstatehash(room_id, token)
|
||||
}
|
||||
|
||||
pub fn get_shared_rooms(
|
||||
&self,
|
||||
users: Vec<OwnedUserId>,
|
||||
) -> Result<impl Iterator<Item = Result<OwnedRoomId>>> {
|
||||
self.db.get_shared_rooms(users)
|
||||
}
|
||||
pub fn get_shared_rooms(&self, users: Vec<OwnedUserId>) -> Result<impl Iterator<Item = Result<OwnedRoomId>>> {
|
||||
self.db.get_shared_rooms(users)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,28 +1,22 @@
|
|||
use ruma::ServerName;
|
||||
|
||||
use super::{OutgoingKind, SendingEventType};
|
||||
use crate::Result;
|
||||
|
||||
use super::{OutgoingKind, SendingEventType};
|
||||
|
||||
type OutgoingSendingIter<'a> =
|
||||
Box<dyn Iterator<Item = Result<(Vec<u8>, OutgoingKind, SendingEventType)>> + 'a>;
|
||||
type OutgoingSendingIter<'a> = Box<dyn Iterator<Item = Result<(Vec<u8>, OutgoingKind, SendingEventType)>> + 'a>;
|
||||
type SendingEventTypeIter<'a> = Box<dyn Iterator<Item = Result<(Vec<u8>, SendingEventType)>> + 'a>;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn active_requests(&self) -> OutgoingSendingIter<'_>;
|
||||
fn active_requests_for(&self, outgoing_kind: &OutgoingKind) -> SendingEventTypeIter<'_>;
|
||||
fn delete_active_request(&self, key: Vec<u8>) -> Result<()>;
|
||||
fn delete_all_active_requests_for(&self, outgoing_kind: &OutgoingKind) -> Result<()>;
|
||||
fn delete_all_requests_for(&self, outgoing_kind: &OutgoingKind) -> Result<()>;
|
||||
fn queue_requests(
|
||||
&self,
|
||||
requests: &[(&OutgoingKind, SendingEventType)],
|
||||
) -> Result<Vec<Vec<u8>>>;
|
||||
fn queued_requests<'a>(
|
||||
&'a self,
|
||||
outgoing_kind: &OutgoingKind,
|
||||
) -> Box<dyn Iterator<Item = Result<(SendingEventType, Vec<u8>)>> + 'a>;
|
||||
fn mark_as_active(&self, events: &[(SendingEventType, Vec<u8>)]) -> Result<()>;
|
||||
fn set_latest_educount(&self, server_name: &ServerName, educount: u64) -> Result<()>;
|
||||
fn get_latest_educount(&self, server_name: &ServerName) -> Result<u64>;
|
||||
fn active_requests(&self) -> OutgoingSendingIter<'_>;
|
||||
fn active_requests_for(&self, outgoing_kind: &OutgoingKind) -> SendingEventTypeIter<'_>;
|
||||
fn delete_active_request(&self, key: Vec<u8>) -> Result<()>;
|
||||
fn delete_all_active_requests_for(&self, outgoing_kind: &OutgoingKind) -> Result<()>;
|
||||
fn delete_all_requests_for(&self, outgoing_kind: &OutgoingKind) -> Result<()>;
|
||||
fn queue_requests(&self, requests: &[(&OutgoingKind, SendingEventType)]) -> Result<Vec<Vec<u8>>>;
|
||||
fn queued_requests<'a>(
|
||||
&'a self, outgoing_kind: &OutgoingKind,
|
||||
) -> Box<dyn Iterator<Item = Result<(SendingEventType, Vec<u8>)>> + 'a>;
|
||||
fn mark_as_active(&self, events: &[(SendingEventType, Vec<u8>)]) -> Result<()>;
|
||||
fn set_latest_educount(&self, server_name: &ServerName, educount: u64) -> Result<()>;
|
||||
fn get_latest_educount(&self, server_name: &ServerName) -> Result<u64>;
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,19 +1,13 @@
|
|||
use crate::Result;
|
||||
use ruma::{DeviceId, TransactionId, UserId};
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn add_txnid(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: Option<&DeviceId>,
|
||||
txn_id: &TransactionId,
|
||||
data: &[u8],
|
||||
) -> Result<()>;
|
||||
use crate::Result;
|
||||
|
||||
fn existing_txnid(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: Option<&DeviceId>,
|
||||
txn_id: &TransactionId,
|
||||
) -> Result<Option<Vec<u8>>>;
|
||||
pub trait Data: Send + Sync {
|
||||
fn add_txnid(
|
||||
&self, user_id: &UserId, device_id: Option<&DeviceId>, txn_id: &TransactionId, data: &[u8],
|
||||
) -> Result<()>;
|
||||
|
||||
fn existing_txnid(
|
||||
&self, user_id: &UserId, device_id: Option<&DeviceId>, txn_id: &TransactionId,
|
||||
) -> Result<Option<Vec<u8>>>;
|
||||
}
|
||||
|
|
|
@ -1,31 +1,24 @@
|
|||
mod data;
|
||||
|
||||
pub use data::Data;
|
||||
|
||||
use crate::Result;
|
||||
use ruma::{DeviceId, TransactionId, UserId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
pub fn add_txnid(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: Option<&DeviceId>,
|
||||
txn_id: &TransactionId,
|
||||
data: &[u8],
|
||||
) -> Result<()> {
|
||||
self.db.add_txnid(user_id, device_id, txn_id, data)
|
||||
}
|
||||
pub fn add_txnid(
|
||||
&self, user_id: &UserId, device_id: Option<&DeviceId>, txn_id: &TransactionId, data: &[u8],
|
||||
) -> Result<()> {
|
||||
self.db.add_txnid(user_id, device_id, txn_id, data)
|
||||
}
|
||||
|
||||
pub fn existing_txnid(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: Option<&DeviceId>,
|
||||
txn_id: &TransactionId,
|
||||
) -> Result<Option<Vec<u8>>> {
|
||||
self.db.existing_txnid(user_id, device_id, txn_id)
|
||||
}
|
||||
pub fn existing_txnid(
|
||||
&self, user_id: &UserId, device_id: Option<&DeviceId>, txn_id: &TransactionId,
|
||||
) -> Result<Option<Vec<u8>>> {
|
||||
self.db.existing_txnid(user_id, device_id, txn_id)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,34 +1,17 @@
|
|||
use crate::Result;
|
||||
use ruma::{api::client::uiaa::UiaaInfo, CanonicalJsonValue, DeviceId, UserId};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
fn set_uiaa_request(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
session: &str,
|
||||
request: &CanonicalJsonValue,
|
||||
) -> Result<()>;
|
||||
fn set_uiaa_request(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, session: &str, request: &CanonicalJsonValue,
|
||||
) -> Result<()>;
|
||||
|
||||
fn get_uiaa_request(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
session: &str,
|
||||
) -> Option<CanonicalJsonValue>;
|
||||
fn get_uiaa_request(&self, user_id: &UserId, device_id: &DeviceId, session: &str) -> Option<CanonicalJsonValue>;
|
||||
|
||||
fn update_uiaa_session(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
session: &str,
|
||||
uiaainfo: Option<&UiaaInfo>,
|
||||
) -> Result<()>;
|
||||
fn update_uiaa_session(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, session: &str, uiaainfo: Option<&UiaaInfo>,
|
||||
) -> Result<()>;
|
||||
|
||||
fn get_uiaa_session(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
session: &str,
|
||||
) -> Result<UiaaInfo>;
|
||||
fn get_uiaa_session(&self, user_id: &UserId, device_id: &DeviceId, session: &str) -> Result<UiaaInfo>;
|
||||
}
|
||||
|
|
|
@ -2,159 +2,142 @@ mod data;
|
|||
|
||||
use argon2::{PasswordHash, PasswordVerifier};
|
||||
pub use data::Data;
|
||||
|
||||
use ruma::{
|
||||
api::client::{
|
||||
error::ErrorKind,
|
||||
uiaa::{AuthData, AuthType, Password, UiaaInfo, UserIdentifier},
|
||||
},
|
||||
CanonicalJsonValue, DeviceId, UserId,
|
||||
api::client::{
|
||||
error::ErrorKind,
|
||||
uiaa::{AuthData, AuthType, Password, UiaaInfo, UserIdentifier},
|
||||
},
|
||||
CanonicalJsonValue, DeviceId, UserId,
|
||||
};
|
||||
use tracing::error;
|
||||
|
||||
use crate::{api::client_server::SESSION_ID_LENGTH, services, utils, Error, Result};
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub db: &'static dyn Data,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Creates a new Uiaa session. Make sure the session token is unique.
|
||||
pub fn create(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
uiaainfo: &UiaaInfo,
|
||||
json_body: &CanonicalJsonValue,
|
||||
) -> Result<()> {
|
||||
self.db.set_uiaa_request(
|
||||
user_id,
|
||||
device_id,
|
||||
uiaainfo.session.as_ref().expect("session should be set"), // TODO: better session error handling (why is it optional in ruma?)
|
||||
json_body,
|
||||
)?;
|
||||
self.db.update_uiaa_session(
|
||||
user_id,
|
||||
device_id,
|
||||
uiaainfo.session.as_ref().expect("session should be set"),
|
||||
Some(uiaainfo),
|
||||
)
|
||||
}
|
||||
/// Creates a new Uiaa session. Make sure the session token is unique.
|
||||
pub fn create(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, uiaainfo: &UiaaInfo, json_body: &CanonicalJsonValue,
|
||||
) -> Result<()> {
|
||||
self.db.set_uiaa_request(
|
||||
user_id,
|
||||
device_id,
|
||||
uiaainfo.session.as_ref().expect("session should be set"), /* TODO: better session error handling (why
|
||||
* is it optional in ruma?) */
|
||||
json_body,
|
||||
)?;
|
||||
self.db.update_uiaa_session(
|
||||
user_id,
|
||||
device_id,
|
||||
uiaainfo.session.as_ref().expect("session should be set"),
|
||||
Some(uiaainfo),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn try_auth(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
auth: &AuthData,
|
||||
uiaainfo: &UiaaInfo,
|
||||
) -> Result<(bool, UiaaInfo)> {
|
||||
let mut uiaainfo = auth.session().map_or_else(
|
||||
|| Ok(uiaainfo.clone()),
|
||||
|session| self.db.get_uiaa_session(user_id, device_id, session),
|
||||
)?;
|
||||
pub fn try_auth(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, auth: &AuthData, uiaainfo: &UiaaInfo,
|
||||
) -> Result<(bool, UiaaInfo)> {
|
||||
let mut uiaainfo = auth.session().map_or_else(
|
||||
|| Ok(uiaainfo.clone()),
|
||||
|session| self.db.get_uiaa_session(user_id, device_id, session),
|
||||
)?;
|
||||
|
||||
if uiaainfo.session.is_none() {
|
||||
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
|
||||
}
|
||||
if uiaainfo.session.is_none() {
|
||||
uiaainfo.session = Some(utils::random_string(SESSION_ID_LENGTH));
|
||||
}
|
||||
|
||||
match auth {
|
||||
// Find out what the user completed
|
||||
AuthData::Password(Password {
|
||||
identifier,
|
||||
password,
|
||||
..
|
||||
}) => {
|
||||
let UserIdentifier::UserIdOrLocalpart(username) = identifier else {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Unrecognized,
|
||||
"Identifier type not recognized.",
|
||||
));
|
||||
};
|
||||
match auth {
|
||||
// Find out what the user completed
|
||||
AuthData::Password(Password {
|
||||
identifier,
|
||||
password,
|
||||
..
|
||||
}) => {
|
||||
let UserIdentifier::UserIdOrLocalpart(username) = identifier else {
|
||||
return Err(Error::BadRequest(ErrorKind::Unrecognized, "Identifier type not recognized."));
|
||||
};
|
||||
|
||||
let user_id = UserId::parse_with_server_name(
|
||||
username.clone(),
|
||||
services().globals.server_name(),
|
||||
)
|
||||
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "User ID is invalid."))?;
|
||||
let user_id = UserId::parse_with_server_name(username.clone(), services().globals.server_name())
|
||||
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "User ID is invalid."))?;
|
||||
|
||||
// Check if password is correct
|
||||
if let Some(hash) = services().users.password_hash(&user_id)? {
|
||||
let hash_matches = services()
|
||||
.globals
|
||||
.argon
|
||||
.verify_password(
|
||||
password.as_bytes(),
|
||||
&PasswordHash::new(&hash).expect("valid hash in database"),
|
||||
)
|
||||
.is_ok();
|
||||
// Check if password is correct
|
||||
if let Some(hash) = services().users.password_hash(&user_id)? {
|
||||
let hash_matches = services()
|
||||
.globals
|
||||
.argon
|
||||
.verify_password(
|
||||
password.as_bytes(),
|
||||
&PasswordHash::new(&hash).expect("valid hash in database"),
|
||||
)
|
||||
.is_ok();
|
||||
|
||||
if !hash_matches {
|
||||
uiaainfo.auth_error = Some(ruma::api::client::error::StandardErrorBody {
|
||||
kind: ErrorKind::Forbidden,
|
||||
message: "Invalid username or password.".to_owned(),
|
||||
});
|
||||
return Ok((false, uiaainfo));
|
||||
}
|
||||
}
|
||||
if !hash_matches {
|
||||
uiaainfo.auth_error = Some(ruma::api::client::error::StandardErrorBody {
|
||||
kind: ErrorKind::Forbidden,
|
||||
message: "Invalid username or password.".to_owned(),
|
||||
});
|
||||
return Ok((false, uiaainfo));
|
||||
}
|
||||
}
|
||||
|
||||
// Password was correct! Let's add it to `completed`
|
||||
uiaainfo.completed.push(AuthType::Password);
|
||||
}
|
||||
AuthData::RegistrationToken(t) => {
|
||||
if Some(t.token.trim()) == services().globals.config.registration_token.as_deref() {
|
||||
uiaainfo.completed.push(AuthType::RegistrationToken);
|
||||
} else {
|
||||
uiaainfo.auth_error = Some(ruma::api::client::error::StandardErrorBody {
|
||||
kind: ErrorKind::Forbidden,
|
||||
message: "Invalid registration token.".to_owned(),
|
||||
});
|
||||
return Ok((false, uiaainfo));
|
||||
}
|
||||
}
|
||||
AuthData::Dummy(_) => {
|
||||
uiaainfo.completed.push(AuthType::Dummy);
|
||||
}
|
||||
k => error!("type not supported: {:?}", k),
|
||||
}
|
||||
// Password was correct! Let's add it to `completed`
|
||||
uiaainfo.completed.push(AuthType::Password);
|
||||
},
|
||||
AuthData::RegistrationToken(t) => {
|
||||
if Some(t.token.trim()) == services().globals.config.registration_token.as_deref() {
|
||||
uiaainfo.completed.push(AuthType::RegistrationToken);
|
||||
} else {
|
||||
uiaainfo.auth_error = Some(ruma::api::client::error::StandardErrorBody {
|
||||
kind: ErrorKind::Forbidden,
|
||||
message: "Invalid registration token.".to_owned(),
|
||||
});
|
||||
return Ok((false, uiaainfo));
|
||||
}
|
||||
},
|
||||
AuthData::Dummy(_) => {
|
||||
uiaainfo.completed.push(AuthType::Dummy);
|
||||
},
|
||||
k => error!("type not supported: {:?}", k),
|
||||
}
|
||||
|
||||
// Check if a flow now succeeds
|
||||
let mut completed = false;
|
||||
'flows: for flow in &mut uiaainfo.flows {
|
||||
for stage in &flow.stages {
|
||||
if !uiaainfo.completed.contains(stage) {
|
||||
continue 'flows;
|
||||
}
|
||||
}
|
||||
// We didn't break, so this flow succeeded!
|
||||
completed = true;
|
||||
}
|
||||
// Check if a flow now succeeds
|
||||
let mut completed = false;
|
||||
'flows: for flow in &mut uiaainfo.flows {
|
||||
for stage in &flow.stages {
|
||||
if !uiaainfo.completed.contains(stage) {
|
||||
continue 'flows;
|
||||
}
|
||||
}
|
||||
// We didn't break, so this flow succeeded!
|
||||
completed = true;
|
||||
}
|
||||
|
||||
if !completed {
|
||||
self.db.update_uiaa_session(
|
||||
user_id,
|
||||
device_id,
|
||||
uiaainfo.session.as_ref().expect("session is always set"),
|
||||
Some(&uiaainfo),
|
||||
)?;
|
||||
return Ok((false, uiaainfo));
|
||||
}
|
||||
if !completed {
|
||||
self.db.update_uiaa_session(
|
||||
user_id,
|
||||
device_id,
|
||||
uiaainfo.session.as_ref().expect("session is always set"),
|
||||
Some(&uiaainfo),
|
||||
)?;
|
||||
return Ok((false, uiaainfo));
|
||||
}
|
||||
|
||||
// UIAA was successful! Remove this session and return true
|
||||
self.db.update_uiaa_session(
|
||||
user_id,
|
||||
device_id,
|
||||
uiaainfo.session.as_ref().expect("session is always set"),
|
||||
None,
|
||||
)?;
|
||||
Ok((true, uiaainfo))
|
||||
}
|
||||
// UIAA was successful! Remove this session and return true
|
||||
self.db.update_uiaa_session(
|
||||
user_id,
|
||||
device_id,
|
||||
uiaainfo.session.as_ref().expect("session is always set"),
|
||||
None,
|
||||
)?;
|
||||
Ok((true, uiaainfo))
|
||||
}
|
||||
|
||||
pub fn get_uiaa_request(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
session: &str,
|
||||
) -> Option<CanonicalJsonValue> {
|
||||
self.db.get_uiaa_request(user_id, device_id, session)
|
||||
}
|
||||
pub fn get_uiaa_request(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, session: &str,
|
||||
) -> Option<CanonicalJsonValue> {
|
||||
self.db.get_uiaa_request(user_id, device_id, session)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,214 +1,146 @@
|
|||
use crate::Result;
|
||||
use ruma::{
|
||||
api::client::{device::Device, filter::FilterDefinition},
|
||||
encryption::{CrossSigningKey, DeviceKeys, OneTimeKey},
|
||||
events::AnyToDeviceEvent,
|
||||
serde::Raw,
|
||||
DeviceId, DeviceKeyAlgorithm, DeviceKeyId, OwnedDeviceId, OwnedDeviceKeyId, OwnedMxcUri,
|
||||
OwnedUserId, UInt, UserId,
|
||||
};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use ruma::{
|
||||
api::client::{device::Device, filter::FilterDefinition},
|
||||
encryption::{CrossSigningKey, DeviceKeys, OneTimeKey},
|
||||
events::AnyToDeviceEvent,
|
||||
serde::Raw,
|
||||
DeviceId, DeviceKeyAlgorithm, DeviceKeyId, OwnedDeviceId, OwnedDeviceKeyId, OwnedMxcUri, OwnedUserId, UInt, UserId,
|
||||
};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
/// Check if a user has an account on this homeserver.
|
||||
fn exists(&self, user_id: &UserId) -> Result<bool>;
|
||||
/// Check if a user has an account on this homeserver.
|
||||
fn exists(&self, user_id: &UserId) -> Result<bool>;
|
||||
|
||||
/// Check if account is deactivated
|
||||
fn is_deactivated(&self, user_id: &UserId) -> Result<bool>;
|
||||
/// Check if account is deactivated
|
||||
fn is_deactivated(&self, user_id: &UserId) -> Result<bool>;
|
||||
|
||||
/// Returns the number of users registered on this server.
|
||||
fn count(&self) -> Result<usize>;
|
||||
/// Returns the number of users registered on this server.
|
||||
fn count(&self) -> Result<usize>;
|
||||
|
||||
/// Find out which user an access token belongs to.
|
||||
fn find_from_token(&self, token: &str) -> Result<Option<(OwnedUserId, String)>>;
|
||||
/// Find out which user an access token belongs to.
|
||||
fn find_from_token(&self, token: &str) -> Result<Option<(OwnedUserId, String)>>;
|
||||
|
||||
/// Returns an iterator over all users on this homeserver.
|
||||
fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedUserId>> + 'a>;
|
||||
/// Returns an iterator over all users on this homeserver.
|
||||
fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = Result<OwnedUserId>> + 'a>;
|
||||
|
||||
/// Returns a list of local users as list of usernames.
|
||||
///
|
||||
/// A user account is considered `local` if the length of it's password is greater then zero.
|
||||
fn list_local_users(&self) -> Result<Vec<String>>;
|
||||
/// Returns a list of local users as list of usernames.
|
||||
///
|
||||
/// A user account is considered `local` if the length of it's password is
|
||||
/// greater then zero.
|
||||
fn list_local_users(&self) -> Result<Vec<String>>;
|
||||
|
||||
/// Returns the password hash for the given user.
|
||||
fn password_hash(&self, user_id: &UserId) -> Result<Option<String>>;
|
||||
/// Returns the password hash for the given user.
|
||||
fn password_hash(&self, user_id: &UserId) -> Result<Option<String>>;
|
||||
|
||||
/// Hash and set the user's password to the Argon2 hash
|
||||
fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()>;
|
||||
/// Hash and set the user's password to the Argon2 hash
|
||||
fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()>;
|
||||
|
||||
/// Returns the displayname of a user on this homeserver.
|
||||
fn displayname(&self, user_id: &UserId) -> Result<Option<String>>;
|
||||
/// Returns the displayname of a user on this homeserver.
|
||||
fn displayname(&self, user_id: &UserId) -> Result<Option<String>>;
|
||||
|
||||
/// Sets a new displayname or removes it if displayname is None. You still need to nofify all rooms of this change.
|
||||
fn set_displayname(&self, user_id: &UserId, displayname: Option<String>) -> Result<()>;
|
||||
/// Sets a new displayname or removes it if displayname is None. You still
|
||||
/// need to nofify all rooms of this change.
|
||||
fn set_displayname(&self, user_id: &UserId, displayname: Option<String>) -> Result<()>;
|
||||
|
||||
/// Get the avatar_url of a user.
|
||||
fn avatar_url(&self, user_id: &UserId) -> Result<Option<OwnedMxcUri>>;
|
||||
/// Get the avatar_url of a user.
|
||||
fn avatar_url(&self, user_id: &UserId) -> Result<Option<OwnedMxcUri>>;
|
||||
|
||||
/// Sets a new avatar_url or removes it if avatar_url is None.
|
||||
fn set_avatar_url(&self, user_id: &UserId, avatar_url: Option<OwnedMxcUri>) -> Result<()>;
|
||||
/// Sets a new avatar_url or removes it if avatar_url is None.
|
||||
fn set_avatar_url(&self, user_id: &UserId, avatar_url: Option<OwnedMxcUri>) -> Result<()>;
|
||||
|
||||
/// Get the blurhash of a user.
|
||||
fn blurhash(&self, user_id: &UserId) -> Result<Option<String>>;
|
||||
/// Get the blurhash of a user.
|
||||
fn blurhash(&self, user_id: &UserId) -> Result<Option<String>>;
|
||||
|
||||
/// Sets a new avatar_url or removes it if avatar_url is None.
|
||||
fn set_blurhash(&self, user_id: &UserId, blurhash: Option<String>) -> Result<()>;
|
||||
/// Sets a new avatar_url or removes it if avatar_url is None.
|
||||
fn set_blurhash(&self, user_id: &UserId, blurhash: Option<String>) -> Result<()>;
|
||||
|
||||
/// Adds a new device to a user.
|
||||
fn create_device(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
token: &str,
|
||||
initial_device_display_name: Option<String>,
|
||||
) -> Result<()>;
|
||||
/// Adds a new device to a user.
|
||||
fn create_device(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, token: &str, initial_device_display_name: Option<String>,
|
||||
) -> Result<()>;
|
||||
|
||||
/// Removes a device from a user.
|
||||
fn remove_device(&self, user_id: &UserId, device_id: &DeviceId) -> Result<()>;
|
||||
/// Removes a device from a user.
|
||||
fn remove_device(&self, user_id: &UserId, device_id: &DeviceId) -> Result<()>;
|
||||
|
||||
/// Returns an iterator over all device ids of this user.
|
||||
fn all_device_ids<'a>(
|
||||
&'a self,
|
||||
user_id: &UserId,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedDeviceId>> + 'a>;
|
||||
/// Returns an iterator over all device ids of this user.
|
||||
fn all_device_ids<'a>(&'a self, user_id: &UserId) -> Box<dyn Iterator<Item = Result<OwnedDeviceId>> + 'a>;
|
||||
|
||||
/// Replaces the access token of one device.
|
||||
fn set_token(&self, user_id: &UserId, device_id: &DeviceId, token: &str) -> Result<()>;
|
||||
/// Replaces the access token of one device.
|
||||
fn set_token(&self, user_id: &UserId, device_id: &DeviceId, token: &str) -> Result<()>;
|
||||
|
||||
fn add_one_time_key(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
one_time_key_key: &DeviceKeyId,
|
||||
one_time_key_value: &Raw<OneTimeKey>,
|
||||
) -> Result<()>;
|
||||
fn add_one_time_key(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, one_time_key_key: &DeviceKeyId,
|
||||
one_time_key_value: &Raw<OneTimeKey>,
|
||||
) -> Result<()>;
|
||||
|
||||
fn last_one_time_keys_update(&self, user_id: &UserId) -> Result<u64>;
|
||||
fn last_one_time_keys_update(&self, user_id: &UserId) -> Result<u64>;
|
||||
|
||||
fn take_one_time_key(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
key_algorithm: &DeviceKeyAlgorithm,
|
||||
) -> Result<Option<(OwnedDeviceKeyId, Raw<OneTimeKey>)>>;
|
||||
fn take_one_time_key(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, key_algorithm: &DeviceKeyAlgorithm,
|
||||
) -> Result<Option<(OwnedDeviceKeyId, Raw<OneTimeKey>)>>;
|
||||
|
||||
fn count_one_time_keys(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
) -> Result<BTreeMap<DeviceKeyAlgorithm, UInt>>;
|
||||
fn count_one_time_keys(&self, user_id: &UserId, device_id: &DeviceId)
|
||||
-> Result<BTreeMap<DeviceKeyAlgorithm, UInt>>;
|
||||
|
||||
fn add_device_keys(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
device_keys: &Raw<DeviceKeys>,
|
||||
) -> Result<()>;
|
||||
fn add_device_keys(&self, user_id: &UserId, device_id: &DeviceId, device_keys: &Raw<DeviceKeys>) -> Result<()>;
|
||||
|
||||
fn add_cross_signing_keys(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
master_key: &Raw<CrossSigningKey>,
|
||||
self_signing_key: &Option<Raw<CrossSigningKey>>,
|
||||
user_signing_key: &Option<Raw<CrossSigningKey>>,
|
||||
notify: bool,
|
||||
) -> Result<()>;
|
||||
fn add_cross_signing_keys(
|
||||
&self, user_id: &UserId, master_key: &Raw<CrossSigningKey>, self_signing_key: &Option<Raw<CrossSigningKey>>,
|
||||
user_signing_key: &Option<Raw<CrossSigningKey>>, notify: bool,
|
||||
) -> Result<()>;
|
||||
|
||||
fn sign_key(
|
||||
&self,
|
||||
target_id: &UserId,
|
||||
key_id: &str,
|
||||
signature: (String, String),
|
||||
sender_id: &UserId,
|
||||
) -> Result<()>;
|
||||
fn sign_key(&self, target_id: &UserId, key_id: &str, signature: (String, String), sender_id: &UserId)
|
||||
-> Result<()>;
|
||||
|
||||
fn keys_changed<'a>(
|
||||
&'a self,
|
||||
user_or_room_id: &str,
|
||||
from: u64,
|
||||
to: Option<u64>,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedUserId>> + 'a>;
|
||||
fn keys_changed<'a>(
|
||||
&'a self, user_or_room_id: &str, from: u64, to: Option<u64>,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedUserId>> + 'a>;
|
||||
|
||||
fn mark_device_key_update(&self, user_id: &UserId) -> Result<()>;
|
||||
fn mark_device_key_update(&self, user_id: &UserId) -> Result<()>;
|
||||
|
||||
fn get_device_keys(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
) -> Result<Option<Raw<DeviceKeys>>>;
|
||||
fn get_device_keys(&self, user_id: &UserId, device_id: &DeviceId) -> Result<Option<Raw<DeviceKeys>>>;
|
||||
|
||||
fn parse_master_key(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
master_key: &Raw<CrossSigningKey>,
|
||||
) -> Result<(Vec<u8>, CrossSigningKey)>;
|
||||
fn parse_master_key(
|
||||
&self, user_id: &UserId, master_key: &Raw<CrossSigningKey>,
|
||||
) -> Result<(Vec<u8>, CrossSigningKey)>;
|
||||
|
||||
fn get_key(
|
||||
&self,
|
||||
key: &[u8],
|
||||
sender_user: Option<&UserId>,
|
||||
user_id: &UserId,
|
||||
allowed_signatures: &dyn Fn(&UserId) -> bool,
|
||||
) -> Result<Option<Raw<CrossSigningKey>>>;
|
||||
fn get_key(
|
||||
&self, key: &[u8], sender_user: Option<&UserId>, user_id: &UserId, allowed_signatures: &dyn Fn(&UserId) -> bool,
|
||||
) -> Result<Option<Raw<CrossSigningKey>>>;
|
||||
|
||||
fn get_master_key(
|
||||
&self,
|
||||
sender_user: Option<&UserId>,
|
||||
user_id: &UserId,
|
||||
allowed_signatures: &dyn Fn(&UserId) -> bool,
|
||||
) -> Result<Option<Raw<CrossSigningKey>>>;
|
||||
fn get_master_key(
|
||||
&self, sender_user: Option<&UserId>, user_id: &UserId, allowed_signatures: &dyn Fn(&UserId) -> bool,
|
||||
) -> Result<Option<Raw<CrossSigningKey>>>;
|
||||
|
||||
fn get_self_signing_key(
|
||||
&self,
|
||||
sender_user: Option<&UserId>,
|
||||
user_id: &UserId,
|
||||
allowed_signatures: &dyn Fn(&UserId) -> bool,
|
||||
) -> Result<Option<Raw<CrossSigningKey>>>;
|
||||
fn get_self_signing_key(
|
||||
&self, sender_user: Option<&UserId>, user_id: &UserId, allowed_signatures: &dyn Fn(&UserId) -> bool,
|
||||
) -> Result<Option<Raw<CrossSigningKey>>>;
|
||||
|
||||
fn get_user_signing_key(&self, user_id: &UserId) -> Result<Option<Raw<CrossSigningKey>>>;
|
||||
fn get_user_signing_key(&self, user_id: &UserId) -> Result<Option<Raw<CrossSigningKey>>>;
|
||||
|
||||
fn add_to_device_event(
|
||||
&self,
|
||||
sender: &UserId,
|
||||
target_user_id: &UserId,
|
||||
target_device_id: &DeviceId,
|
||||
event_type: &str,
|
||||
content: serde_json::Value,
|
||||
) -> Result<()>;
|
||||
fn add_to_device_event(
|
||||
&self, sender: &UserId, target_user_id: &UserId, target_device_id: &DeviceId, event_type: &str,
|
||||
content: serde_json::Value,
|
||||
) -> Result<()>;
|
||||
|
||||
fn get_to_device_events(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
) -> Result<Vec<Raw<AnyToDeviceEvent>>>;
|
||||
fn get_to_device_events(&self, user_id: &UserId, device_id: &DeviceId) -> Result<Vec<Raw<AnyToDeviceEvent>>>;
|
||||
|
||||
fn remove_to_device_events(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
until: u64,
|
||||
) -> Result<()>;
|
||||
fn remove_to_device_events(&self, user_id: &UserId, device_id: &DeviceId, until: u64) -> Result<()>;
|
||||
|
||||
fn update_device_metadata(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
device: &Device,
|
||||
) -> Result<()>;
|
||||
fn update_device_metadata(&self, user_id: &UserId, device_id: &DeviceId, device: &Device) -> Result<()>;
|
||||
|
||||
/// Get device metadata.
|
||||
fn get_device_metadata(&self, user_id: &UserId, device_id: &DeviceId)
|
||||
-> Result<Option<Device>>;
|
||||
/// Get device metadata.
|
||||
fn get_device_metadata(&self, user_id: &UserId, device_id: &DeviceId) -> Result<Option<Device>>;
|
||||
|
||||
fn get_devicelist_version(&self, user_id: &UserId) -> Result<Option<u64>>;
|
||||
fn get_devicelist_version(&self, user_id: &UserId) -> Result<Option<u64>>;
|
||||
|
||||
fn all_devices_metadata<'a>(
|
||||
&'a self,
|
||||
user_id: &UserId,
|
||||
) -> Box<dyn Iterator<Item = Result<Device>> + 'a>;
|
||||
fn all_devices_metadata<'a>(&'a self, user_id: &UserId) -> Box<dyn Iterator<Item = Result<Device>> + 'a>;
|
||||
|
||||
/// Creates a new sync filter. Returns the filter id.
|
||||
fn create_filter(&self, user_id: &UserId, filter: &FilterDefinition) -> Result<String>;
|
||||
/// Creates a new sync filter. Returns the filter id.
|
||||
fn create_filter(&self, user_id: &UserId, filter: &FilterDefinition) -> Result<String>;
|
||||
|
||||
fn get_filter(&self, user_id: &UserId, filter_id: &str) -> Result<Option<FilterDefinition>>;
|
||||
fn get_filter(&self, user_id: &UserId, filter_id: &str) -> Result<Option<FilterDefinition>>;
|
||||
}
|
||||
|
|
|
@ -1,630 +1,428 @@
|
|||
mod data;
|
||||
use std::{
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
mem,
|
||||
sync::{Arc, Mutex},
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
mem,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
pub use data::Data;
|
||||
use ruma::{
|
||||
api::client::{
|
||||
device::Device,
|
||||
error::ErrorKind,
|
||||
filter::FilterDefinition,
|
||||
sync::sync_events::{
|
||||
self,
|
||||
v4::{ExtensionsConfig, SyncRequestList},
|
||||
},
|
||||
},
|
||||
encryption::{CrossSigningKey, DeviceKeys, OneTimeKey},
|
||||
events::AnyToDeviceEvent,
|
||||
serde::Raw,
|
||||
DeviceId, DeviceKeyAlgorithm, DeviceKeyId, OwnedDeviceId, OwnedDeviceKeyId, OwnedMxcUri,
|
||||
OwnedRoomId, OwnedUserId, RoomAliasId, UInt, UserId,
|
||||
api::client::{
|
||||
device::Device,
|
||||
error::ErrorKind,
|
||||
filter::FilterDefinition,
|
||||
sync::sync_events::{
|
||||
self,
|
||||
v4::{ExtensionsConfig, SyncRequestList},
|
||||
},
|
||||
},
|
||||
encryption::{CrossSigningKey, DeviceKeys, OneTimeKey},
|
||||
events::AnyToDeviceEvent,
|
||||
serde::Raw,
|
||||
DeviceId, DeviceKeyAlgorithm, DeviceKeyId, OwnedDeviceId, OwnedDeviceKeyId, OwnedMxcUri, OwnedRoomId, OwnedUserId,
|
||||
RoomAliasId, UInt, UserId,
|
||||
};
|
||||
|
||||
use crate::{services, Error, Result};
|
||||
|
||||
pub struct SlidingSyncCache {
|
||||
lists: BTreeMap<String, SyncRequestList>,
|
||||
subscriptions: BTreeMap<OwnedRoomId, sync_events::v4::RoomSubscription>,
|
||||
known_rooms: BTreeMap<String, BTreeMap<OwnedRoomId, u64>>, // For every room, the roomsince number
|
||||
extensions: ExtensionsConfig,
|
||||
lists: BTreeMap<String, SyncRequestList>,
|
||||
subscriptions: BTreeMap<OwnedRoomId, sync_events::v4::RoomSubscription>,
|
||||
known_rooms: BTreeMap<String, BTreeMap<OwnedRoomId, u64>>, // For every room, the roomsince number
|
||||
extensions: ExtensionsConfig,
|
||||
}
|
||||
|
||||
type DbConnections =
|
||||
Mutex<BTreeMap<(OwnedUserId, OwnedDeviceId, String), Arc<Mutex<SlidingSyncCache>>>>;
|
||||
type DbConnections = Mutex<BTreeMap<(OwnedUserId, OwnedDeviceId, String), Arc<Mutex<SlidingSyncCache>>>>;
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub connections: DbConnections,
|
||||
pub db: &'static dyn Data,
|
||||
pub connections: DbConnections,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
/// Check if a user has an account on this homeserver.
|
||||
pub fn exists(&self, user_id: &UserId) -> Result<bool> {
|
||||
self.db.exists(user_id)
|
||||
}
|
||||
/// Check if a user has an account on this homeserver.
|
||||
pub fn exists(&self, user_id: &UserId) -> Result<bool> { self.db.exists(user_id) }
|
||||
|
||||
pub fn forget_sync_request_connection(
|
||||
&self,
|
||||
user_id: OwnedUserId,
|
||||
device_id: OwnedDeviceId,
|
||||
conn_id: String,
|
||||
) {
|
||||
self.connections
|
||||
.lock()
|
||||
.unwrap()
|
||||
.remove(&(user_id, device_id, conn_id));
|
||||
}
|
||||
pub fn forget_sync_request_connection(&self, user_id: OwnedUserId, device_id: OwnedDeviceId, conn_id: String) {
|
||||
self.connections.lock().unwrap().remove(&(user_id, device_id, conn_id));
|
||||
}
|
||||
|
||||
pub fn update_sync_request_with_cache(
|
||||
&self,
|
||||
user_id: OwnedUserId,
|
||||
device_id: OwnedDeviceId,
|
||||
request: &mut sync_events::v4::Request,
|
||||
) -> BTreeMap<String, BTreeMap<OwnedRoomId, u64>> {
|
||||
let Some(conn_id) = request.conn_id.clone() else {
|
||||
return BTreeMap::new();
|
||||
};
|
||||
pub fn update_sync_request_with_cache(
|
||||
&self, user_id: OwnedUserId, device_id: OwnedDeviceId, request: &mut sync_events::v4::Request,
|
||||
) -> BTreeMap<String, BTreeMap<OwnedRoomId, u64>> {
|
||||
let Some(conn_id) = request.conn_id.clone() else {
|
||||
return BTreeMap::new();
|
||||
};
|
||||
|
||||
let mut cache = self.connections.lock().unwrap();
|
||||
let cached = Arc::clone(
|
||||
cache
|
||||
.entry((user_id, device_id, conn_id))
|
||||
.or_insert_with(|| {
|
||||
Arc::new(Mutex::new(SlidingSyncCache {
|
||||
lists: BTreeMap::new(),
|
||||
subscriptions: BTreeMap::new(),
|
||||
known_rooms: BTreeMap::new(),
|
||||
extensions: ExtensionsConfig::default(),
|
||||
}))
|
||||
}),
|
||||
);
|
||||
let cached = &mut cached.lock().unwrap();
|
||||
drop(cache);
|
||||
let mut cache = self.connections.lock().unwrap();
|
||||
let cached = Arc::clone(cache.entry((user_id, device_id, conn_id)).or_insert_with(|| {
|
||||
Arc::new(Mutex::new(SlidingSyncCache {
|
||||
lists: BTreeMap::new(),
|
||||
subscriptions: BTreeMap::new(),
|
||||
known_rooms: BTreeMap::new(),
|
||||
extensions: ExtensionsConfig::default(),
|
||||
}))
|
||||
}));
|
||||
let cached = &mut cached.lock().unwrap();
|
||||
drop(cache);
|
||||
|
||||
for (list_id, list) in &mut request.lists {
|
||||
if let Some(cached_list) = cached.lists.get(list_id) {
|
||||
if list.sort.is_empty() {
|
||||
list.sort = cached_list.sort.clone();
|
||||
};
|
||||
if list.room_details.required_state.is_empty() {
|
||||
list.room_details.required_state =
|
||||
cached_list.room_details.required_state.clone();
|
||||
};
|
||||
list.room_details.timeline_limit = list
|
||||
.room_details
|
||||
.timeline_limit
|
||||
.or(cached_list.room_details.timeline_limit);
|
||||
list.include_old_rooms = list
|
||||
.include_old_rooms
|
||||
.clone()
|
||||
.or_else(|| cached_list.include_old_rooms.clone());
|
||||
match (&mut list.filters, cached_list.filters.clone()) {
|
||||
(Some(list_filters), Some(cached_filters)) => {
|
||||
list_filters.is_dm = list_filters.is_dm.or(cached_filters.is_dm);
|
||||
if list_filters.spaces.is_empty() {
|
||||
list_filters.spaces = cached_filters.spaces;
|
||||
}
|
||||
list_filters.is_encrypted =
|
||||
list_filters.is_encrypted.or(cached_filters.is_encrypted);
|
||||
list_filters.is_invite =
|
||||
list_filters.is_invite.or(cached_filters.is_invite);
|
||||
if list_filters.room_types.is_empty() {
|
||||
list_filters.room_types = cached_filters.room_types;
|
||||
}
|
||||
if list_filters.not_room_types.is_empty() {
|
||||
list_filters.not_room_types = cached_filters.not_room_types;
|
||||
}
|
||||
list_filters.room_name_like = list_filters
|
||||
.room_name_like
|
||||
.clone()
|
||||
.or(cached_filters.room_name_like);
|
||||
if list_filters.tags.is_empty() {
|
||||
list_filters.tags = cached_filters.tags;
|
||||
}
|
||||
if list_filters.not_tags.is_empty() {
|
||||
list_filters.not_tags = cached_filters.not_tags;
|
||||
}
|
||||
}
|
||||
(_, Some(cached_filters)) => list.filters = Some(cached_filters),
|
||||
(Some(list_filters), _) => list.filters = Some(list_filters.clone()),
|
||||
(_, _) => {}
|
||||
}
|
||||
if list.bump_event_types.is_empty() {
|
||||
list.bump_event_types = cached_list.bump_event_types.clone();
|
||||
};
|
||||
}
|
||||
cached.lists.insert(list_id.clone(), list.clone());
|
||||
}
|
||||
for (list_id, list) in &mut request.lists {
|
||||
if let Some(cached_list) = cached.lists.get(list_id) {
|
||||
if list.sort.is_empty() {
|
||||
list.sort = cached_list.sort.clone();
|
||||
};
|
||||
if list.room_details.required_state.is_empty() {
|
||||
list.room_details.required_state = cached_list.room_details.required_state.clone();
|
||||
};
|
||||
list.room_details.timeline_limit =
|
||||
list.room_details.timeline_limit.or(cached_list.room_details.timeline_limit);
|
||||
list.include_old_rooms =
|
||||
list.include_old_rooms.clone().or_else(|| cached_list.include_old_rooms.clone());
|
||||
match (&mut list.filters, cached_list.filters.clone()) {
|
||||
(Some(list_filters), Some(cached_filters)) => {
|
||||
list_filters.is_dm = list_filters.is_dm.or(cached_filters.is_dm);
|
||||
if list_filters.spaces.is_empty() {
|
||||
list_filters.spaces = cached_filters.spaces;
|
||||
}
|
||||
list_filters.is_encrypted = list_filters.is_encrypted.or(cached_filters.is_encrypted);
|
||||
list_filters.is_invite = list_filters.is_invite.or(cached_filters.is_invite);
|
||||
if list_filters.room_types.is_empty() {
|
||||
list_filters.room_types = cached_filters.room_types;
|
||||
}
|
||||
if list_filters.not_room_types.is_empty() {
|
||||
list_filters.not_room_types = cached_filters.not_room_types;
|
||||
}
|
||||
list_filters.room_name_like =
|
||||
list_filters.room_name_like.clone().or(cached_filters.room_name_like);
|
||||
if list_filters.tags.is_empty() {
|
||||
list_filters.tags = cached_filters.tags;
|
||||
}
|
||||
if list_filters.not_tags.is_empty() {
|
||||
list_filters.not_tags = cached_filters.not_tags;
|
||||
}
|
||||
},
|
||||
(_, Some(cached_filters)) => list.filters = Some(cached_filters),
|
||||
(Some(list_filters), _) => list.filters = Some(list_filters.clone()),
|
||||
(..) => {},
|
||||
}
|
||||
if list.bump_event_types.is_empty() {
|
||||
list.bump_event_types = cached_list.bump_event_types.clone();
|
||||
};
|
||||
}
|
||||
cached.lists.insert(list_id.clone(), list.clone());
|
||||
}
|
||||
|
||||
cached
|
||||
.subscriptions
|
||||
.extend(request.room_subscriptions.clone());
|
||||
request
|
||||
.room_subscriptions
|
||||
.extend(cached.subscriptions.clone());
|
||||
cached.subscriptions.extend(request.room_subscriptions.clone());
|
||||
request.room_subscriptions.extend(cached.subscriptions.clone());
|
||||
|
||||
request.extensions.e2ee.enabled = request
|
||||
.extensions
|
||||
.e2ee
|
||||
.enabled
|
||||
.or(cached.extensions.e2ee.enabled);
|
||||
request.extensions.e2ee.enabled = request.extensions.e2ee.enabled.or(cached.extensions.e2ee.enabled);
|
||||
|
||||
request.extensions.to_device.enabled = request
|
||||
.extensions
|
||||
.to_device
|
||||
.enabled
|
||||
.or(cached.extensions.to_device.enabled);
|
||||
request.extensions.to_device.enabled =
|
||||
request.extensions.to_device.enabled.or(cached.extensions.to_device.enabled);
|
||||
|
||||
request.extensions.account_data.enabled = request
|
||||
.extensions
|
||||
.account_data
|
||||
.enabled
|
||||
.or(cached.extensions.account_data.enabled);
|
||||
request.extensions.account_data.lists = request
|
||||
.extensions
|
||||
.account_data
|
||||
.lists
|
||||
.clone()
|
||||
.or_else(|| cached.extensions.account_data.lists.clone());
|
||||
request.extensions.account_data.rooms = request
|
||||
.extensions
|
||||
.account_data
|
||||
.rooms
|
||||
.clone()
|
||||
.or_else(|| cached.extensions.account_data.rooms.clone());
|
||||
request.extensions.account_data.enabled =
|
||||
request.extensions.account_data.enabled.or(cached.extensions.account_data.enabled);
|
||||
request.extensions.account_data.lists =
|
||||
request.extensions.account_data.lists.clone().or_else(|| cached.extensions.account_data.lists.clone());
|
||||
request.extensions.account_data.rooms =
|
||||
request.extensions.account_data.rooms.clone().or_else(|| cached.extensions.account_data.rooms.clone());
|
||||
|
||||
cached.extensions = request.extensions.clone();
|
||||
cached.extensions = request.extensions.clone();
|
||||
|
||||
cached.known_rooms.clone()
|
||||
}
|
||||
cached.known_rooms.clone()
|
||||
}
|
||||
|
||||
pub fn update_sync_subscriptions(
|
||||
&self,
|
||||
user_id: OwnedUserId,
|
||||
device_id: OwnedDeviceId,
|
||||
conn_id: String,
|
||||
subscriptions: BTreeMap<OwnedRoomId, sync_events::v4::RoomSubscription>,
|
||||
) {
|
||||
let mut cache = self.connections.lock().unwrap();
|
||||
let cached = Arc::clone(
|
||||
cache
|
||||
.entry((user_id, device_id, conn_id))
|
||||
.or_insert_with(|| {
|
||||
Arc::new(Mutex::new(SlidingSyncCache {
|
||||
lists: BTreeMap::new(),
|
||||
subscriptions: BTreeMap::new(),
|
||||
known_rooms: BTreeMap::new(),
|
||||
extensions: ExtensionsConfig::default(),
|
||||
}))
|
||||
}),
|
||||
);
|
||||
let cached = &mut cached.lock().unwrap();
|
||||
drop(cache);
|
||||
pub fn update_sync_subscriptions(
|
||||
&self, user_id: OwnedUserId, device_id: OwnedDeviceId, conn_id: String,
|
||||
subscriptions: BTreeMap<OwnedRoomId, sync_events::v4::RoomSubscription>,
|
||||
) {
|
||||
let mut cache = self.connections.lock().unwrap();
|
||||
let cached = Arc::clone(cache.entry((user_id, device_id, conn_id)).or_insert_with(|| {
|
||||
Arc::new(Mutex::new(SlidingSyncCache {
|
||||
lists: BTreeMap::new(),
|
||||
subscriptions: BTreeMap::new(),
|
||||
known_rooms: BTreeMap::new(),
|
||||
extensions: ExtensionsConfig::default(),
|
||||
}))
|
||||
}));
|
||||
let cached = &mut cached.lock().unwrap();
|
||||
drop(cache);
|
||||
|
||||
cached.subscriptions = subscriptions;
|
||||
}
|
||||
cached.subscriptions = subscriptions;
|
||||
}
|
||||
|
||||
pub fn update_sync_known_rooms(
|
||||
&self,
|
||||
user_id: OwnedUserId,
|
||||
device_id: OwnedDeviceId,
|
||||
conn_id: String,
|
||||
list_id: String,
|
||||
new_cached_rooms: BTreeSet<OwnedRoomId>,
|
||||
globalsince: u64,
|
||||
) {
|
||||
let mut cache = self.connections.lock().unwrap();
|
||||
let cached = Arc::clone(
|
||||
cache
|
||||
.entry((user_id, device_id, conn_id))
|
||||
.or_insert_with(|| {
|
||||
Arc::new(Mutex::new(SlidingSyncCache {
|
||||
lists: BTreeMap::new(),
|
||||
subscriptions: BTreeMap::new(),
|
||||
known_rooms: BTreeMap::new(),
|
||||
extensions: ExtensionsConfig::default(),
|
||||
}))
|
||||
}),
|
||||
);
|
||||
let cached = &mut cached.lock().unwrap();
|
||||
drop(cache);
|
||||
pub fn update_sync_known_rooms(
|
||||
&self, user_id: OwnedUserId, device_id: OwnedDeviceId, conn_id: String, list_id: String,
|
||||
new_cached_rooms: BTreeSet<OwnedRoomId>, globalsince: u64,
|
||||
) {
|
||||
let mut cache = self.connections.lock().unwrap();
|
||||
let cached = Arc::clone(cache.entry((user_id, device_id, conn_id)).or_insert_with(|| {
|
||||
Arc::new(Mutex::new(SlidingSyncCache {
|
||||
lists: BTreeMap::new(),
|
||||
subscriptions: BTreeMap::new(),
|
||||
known_rooms: BTreeMap::new(),
|
||||
extensions: ExtensionsConfig::default(),
|
||||
}))
|
||||
}));
|
||||
let cached = &mut cached.lock().unwrap();
|
||||
drop(cache);
|
||||
|
||||
for (roomid, lastsince) in cached
|
||||
.known_rooms
|
||||
.entry(list_id.clone())
|
||||
.or_default()
|
||||
.iter_mut()
|
||||
{
|
||||
if !new_cached_rooms.contains(roomid) {
|
||||
*lastsince = 0;
|
||||
}
|
||||
}
|
||||
let list = cached.known_rooms.entry(list_id).or_default();
|
||||
for roomid in new_cached_rooms {
|
||||
list.insert(roomid, globalsince);
|
||||
}
|
||||
}
|
||||
for (roomid, lastsince) in cached.known_rooms.entry(list_id.clone()).or_default().iter_mut() {
|
||||
if !new_cached_rooms.contains(roomid) {
|
||||
*lastsince = 0;
|
||||
}
|
||||
}
|
||||
let list = cached.known_rooms.entry(list_id).or_default();
|
||||
for roomid in new_cached_rooms {
|
||||
list.insert(roomid, globalsince);
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if account is deactivated
|
||||
pub fn is_deactivated(&self, user_id: &UserId) -> Result<bool> {
|
||||
self.db.is_deactivated(user_id)
|
||||
}
|
||||
/// Check if account is deactivated
|
||||
pub fn is_deactivated(&self, user_id: &UserId) -> Result<bool> { self.db.is_deactivated(user_id) }
|
||||
|
||||
/// Check if a user is an admin
|
||||
pub fn is_admin(&self, user_id: &UserId) -> Result<bool> {
|
||||
let admin_room_alias_id =
|
||||
RoomAliasId::parse(format!("#admins:{}", services().globals.server_name()))
|
||||
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid alias."))?;
|
||||
let admin_room_id = services()
|
||||
.rooms
|
||||
.alias
|
||||
.resolve_local_alias(&admin_room_alias_id)?
|
||||
.unwrap();
|
||||
/// Check if a user is an admin
|
||||
pub fn is_admin(&self, user_id: &UserId) -> Result<bool> {
|
||||
let admin_room_alias_id = RoomAliasId::parse(format!("#admins:{}", services().globals.server_name()))
|
||||
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid alias."))?;
|
||||
let admin_room_id = services().rooms.alias.resolve_local_alias(&admin_room_alias_id)?.unwrap();
|
||||
|
||||
services()
|
||||
.rooms
|
||||
.state_cache
|
||||
.is_joined(user_id, &admin_room_id)
|
||||
}
|
||||
services().rooms.state_cache.is_joined(user_id, &admin_room_id)
|
||||
}
|
||||
|
||||
/// Create a new user account on this homeserver.
|
||||
pub fn create(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
|
||||
self.db.set_password(user_id, password)?;
|
||||
Ok(())
|
||||
}
|
||||
/// Create a new user account on this homeserver.
|
||||
pub fn create(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
|
||||
self.db.set_password(user_id, password)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the number of users registered on this server.
|
||||
pub fn count(&self) -> Result<usize> {
|
||||
self.db.count()
|
||||
}
|
||||
/// Returns the number of users registered on this server.
|
||||
pub fn count(&self) -> Result<usize> { self.db.count() }
|
||||
|
||||
/// Find out which user an access token belongs to.
|
||||
pub fn find_from_token(&self, token: &str) -> Result<Option<(OwnedUserId, String)>> {
|
||||
self.db.find_from_token(token)
|
||||
}
|
||||
/// Find out which user an access token belongs to.
|
||||
pub fn find_from_token(&self, token: &str) -> Result<Option<(OwnedUserId, String)>> {
|
||||
self.db.find_from_token(token)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all users on this homeserver.
|
||||
pub fn iter(&self) -> impl Iterator<Item = Result<OwnedUserId>> + '_ {
|
||||
self.db.iter()
|
||||
}
|
||||
/// Returns an iterator over all users on this homeserver.
|
||||
pub fn iter(&self) -> impl Iterator<Item = Result<OwnedUserId>> + '_ { self.db.iter() }
|
||||
|
||||
/// Returns a list of local users as list of usernames.
|
||||
///
|
||||
/// A user account is considered `local` if the length of it's password is greater then zero.
|
||||
pub fn list_local_users(&self) -> Result<Vec<String>> {
|
||||
self.db.list_local_users()
|
||||
}
|
||||
/// Returns a list of local users as list of usernames.
|
||||
///
|
||||
/// A user account is considered `local` if the length of it's password is
|
||||
/// greater then zero.
|
||||
pub fn list_local_users(&self) -> Result<Vec<String>> { self.db.list_local_users() }
|
||||
|
||||
/// Returns the password hash for the given user.
|
||||
pub fn password_hash(&self, user_id: &UserId) -> Result<Option<String>> {
|
||||
self.db.password_hash(user_id)
|
||||
}
|
||||
/// Returns the password hash for the given user.
|
||||
pub fn password_hash(&self, user_id: &UserId) -> Result<Option<String>> { self.db.password_hash(user_id) }
|
||||
|
||||
/// Hash and set the user's password to the Argon2 hash
|
||||
pub fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
|
||||
self.db.set_password(user_id, password)
|
||||
}
|
||||
/// Hash and set the user's password to the Argon2 hash
|
||||
pub fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
|
||||
self.db.set_password(user_id, password)
|
||||
}
|
||||
|
||||
/// Returns the displayname of a user on this homeserver.
|
||||
pub fn displayname(&self, user_id: &UserId) -> Result<Option<String>> {
|
||||
self.db.displayname(user_id)
|
||||
}
|
||||
/// Returns the displayname of a user on this homeserver.
|
||||
pub fn displayname(&self, user_id: &UserId) -> Result<Option<String>> { self.db.displayname(user_id) }
|
||||
|
||||
/// Sets a new displayname or removes it if displayname is None. You still need to nofify all rooms of this change.
|
||||
pub async fn set_displayname(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
displayname: Option<String>,
|
||||
) -> Result<()> {
|
||||
self.db.set_displayname(user_id, displayname)
|
||||
}
|
||||
/// Sets a new displayname or removes it if displayname is None. You still
|
||||
/// need to nofify all rooms of this change.
|
||||
pub async fn set_displayname(&self, user_id: &UserId, displayname: Option<String>) -> Result<()> {
|
||||
self.db.set_displayname(user_id, displayname)
|
||||
}
|
||||
|
||||
/// Get the avatar_url of a user.
|
||||
pub fn avatar_url(&self, user_id: &UserId) -> Result<Option<OwnedMxcUri>> {
|
||||
self.db.avatar_url(user_id)
|
||||
}
|
||||
/// Get the avatar_url of a user.
|
||||
pub fn avatar_url(&self, user_id: &UserId) -> Result<Option<OwnedMxcUri>> { self.db.avatar_url(user_id) }
|
||||
|
||||
/// Sets a new avatar_url or removes it if avatar_url is None.
|
||||
pub async fn set_avatar_url(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
avatar_url: Option<OwnedMxcUri>,
|
||||
) -> Result<()> {
|
||||
self.db.set_avatar_url(user_id, avatar_url)
|
||||
}
|
||||
/// Sets a new avatar_url or removes it if avatar_url is None.
|
||||
pub async fn set_avatar_url(&self, user_id: &UserId, avatar_url: Option<OwnedMxcUri>) -> Result<()> {
|
||||
self.db.set_avatar_url(user_id, avatar_url)
|
||||
}
|
||||
|
||||
/// Get the blurhash of a user.
|
||||
pub fn blurhash(&self, user_id: &UserId) -> Result<Option<String>> {
|
||||
self.db.blurhash(user_id)
|
||||
}
|
||||
/// Get the blurhash of a user.
|
||||
pub fn blurhash(&self, user_id: &UserId) -> Result<Option<String>> { self.db.blurhash(user_id) }
|
||||
|
||||
/// Sets a new avatar_url or removes it if avatar_url is None.
|
||||
pub async fn set_blurhash(&self, user_id: &UserId, blurhash: Option<String>) -> Result<()> {
|
||||
self.db.set_blurhash(user_id, blurhash)
|
||||
}
|
||||
/// Sets a new avatar_url or removes it if avatar_url is None.
|
||||
pub async fn set_blurhash(&self, user_id: &UserId, blurhash: Option<String>) -> Result<()> {
|
||||
self.db.set_blurhash(user_id, blurhash)
|
||||
}
|
||||
|
||||
/// Adds a new device to a user.
|
||||
pub fn create_device(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
token: &str,
|
||||
initial_device_display_name: Option<String>,
|
||||
) -> Result<()> {
|
||||
self.db
|
||||
.create_device(user_id, device_id, token, initial_device_display_name)
|
||||
}
|
||||
/// Adds a new device to a user.
|
||||
pub fn create_device(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, token: &str, initial_device_display_name: Option<String>,
|
||||
) -> Result<()> {
|
||||
self.db.create_device(user_id, device_id, token, initial_device_display_name)
|
||||
}
|
||||
|
||||
/// Removes a device from a user.
|
||||
pub fn remove_device(&self, user_id: &UserId, device_id: &DeviceId) -> Result<()> {
|
||||
self.db.remove_device(user_id, device_id)
|
||||
}
|
||||
/// Removes a device from a user.
|
||||
pub fn remove_device(&self, user_id: &UserId, device_id: &DeviceId) -> Result<()> {
|
||||
self.db.remove_device(user_id, device_id)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all device ids of this user.
|
||||
pub fn all_device_ids<'a>(
|
||||
&'a self,
|
||||
user_id: &UserId,
|
||||
) -> impl Iterator<Item = Result<OwnedDeviceId>> + 'a {
|
||||
self.db.all_device_ids(user_id)
|
||||
}
|
||||
/// Returns an iterator over all device ids of this user.
|
||||
pub fn all_device_ids<'a>(&'a self, user_id: &UserId) -> impl Iterator<Item = Result<OwnedDeviceId>> + 'a {
|
||||
self.db.all_device_ids(user_id)
|
||||
}
|
||||
|
||||
/// Replaces the access token of one device.
|
||||
pub fn set_token(&self, user_id: &UserId, device_id: &DeviceId, token: &str) -> Result<()> {
|
||||
self.db.set_token(user_id, device_id, token)
|
||||
}
|
||||
/// Replaces the access token of one device.
|
||||
pub fn set_token(&self, user_id: &UserId, device_id: &DeviceId, token: &str) -> Result<()> {
|
||||
self.db.set_token(user_id, device_id, token)
|
||||
}
|
||||
|
||||
pub fn add_one_time_key(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
one_time_key_key: &DeviceKeyId,
|
||||
one_time_key_value: &Raw<OneTimeKey>,
|
||||
) -> Result<()> {
|
||||
self.db
|
||||
.add_one_time_key(user_id, device_id, one_time_key_key, one_time_key_value)
|
||||
}
|
||||
pub fn add_one_time_key(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, one_time_key_key: &DeviceKeyId,
|
||||
one_time_key_value: &Raw<OneTimeKey>,
|
||||
) -> Result<()> {
|
||||
self.db.add_one_time_key(user_id, device_id, one_time_key_key, one_time_key_value)
|
||||
}
|
||||
|
||||
pub fn last_one_time_keys_update(&self, user_id: &UserId) -> Result<u64> {
|
||||
self.db.last_one_time_keys_update(user_id)
|
||||
}
|
||||
pub fn last_one_time_keys_update(&self, user_id: &UserId) -> Result<u64> {
|
||||
self.db.last_one_time_keys_update(user_id)
|
||||
}
|
||||
|
||||
pub fn take_one_time_key(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
key_algorithm: &DeviceKeyAlgorithm,
|
||||
) -> Result<Option<(OwnedDeviceKeyId, Raw<OneTimeKey>)>> {
|
||||
self.db.take_one_time_key(user_id, device_id, key_algorithm)
|
||||
}
|
||||
pub fn take_one_time_key(
|
||||
&self, user_id: &UserId, device_id: &DeviceId, key_algorithm: &DeviceKeyAlgorithm,
|
||||
) -> Result<Option<(OwnedDeviceKeyId, Raw<OneTimeKey>)>> {
|
||||
self.db.take_one_time_key(user_id, device_id, key_algorithm)
|
||||
}
|
||||
|
||||
pub fn count_one_time_keys(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
) -> Result<BTreeMap<DeviceKeyAlgorithm, UInt>> {
|
||||
self.db.count_one_time_keys(user_id, device_id)
|
||||
}
|
||||
pub fn count_one_time_keys(
|
||||
&self, user_id: &UserId, device_id: &DeviceId,
|
||||
) -> Result<BTreeMap<DeviceKeyAlgorithm, UInt>> {
|
||||
self.db.count_one_time_keys(user_id, device_id)
|
||||
}
|
||||
|
||||
pub fn add_device_keys(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
device_keys: &Raw<DeviceKeys>,
|
||||
) -> Result<()> {
|
||||
self.db.add_device_keys(user_id, device_id, device_keys)
|
||||
}
|
||||
pub fn add_device_keys(&self, user_id: &UserId, device_id: &DeviceId, device_keys: &Raw<DeviceKeys>) -> Result<()> {
|
||||
self.db.add_device_keys(user_id, device_id, device_keys)
|
||||
}
|
||||
|
||||
pub fn add_cross_signing_keys(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
master_key: &Raw<CrossSigningKey>,
|
||||
self_signing_key: &Option<Raw<CrossSigningKey>>,
|
||||
user_signing_key: &Option<Raw<CrossSigningKey>>,
|
||||
notify: bool,
|
||||
) -> Result<()> {
|
||||
self.db.add_cross_signing_keys(
|
||||
user_id,
|
||||
master_key,
|
||||
self_signing_key,
|
||||
user_signing_key,
|
||||
notify,
|
||||
)
|
||||
}
|
||||
pub fn add_cross_signing_keys(
|
||||
&self, user_id: &UserId, master_key: &Raw<CrossSigningKey>, self_signing_key: &Option<Raw<CrossSigningKey>>,
|
||||
user_signing_key: &Option<Raw<CrossSigningKey>>, notify: bool,
|
||||
) -> Result<()> {
|
||||
self.db.add_cross_signing_keys(user_id, master_key, self_signing_key, user_signing_key, notify)
|
||||
}
|
||||
|
||||
pub fn sign_key(
|
||||
&self,
|
||||
target_id: &UserId,
|
||||
key_id: &str,
|
||||
signature: (String, String),
|
||||
sender_id: &UserId,
|
||||
) -> Result<()> {
|
||||
self.db.sign_key(target_id, key_id, signature, sender_id)
|
||||
}
|
||||
pub fn sign_key(
|
||||
&self, target_id: &UserId, key_id: &str, signature: (String, String), sender_id: &UserId,
|
||||
) -> Result<()> {
|
||||
self.db.sign_key(target_id, key_id, signature, sender_id)
|
||||
}
|
||||
|
||||
pub fn keys_changed<'a>(
|
||||
&'a self,
|
||||
user_or_room_id: &str,
|
||||
from: u64,
|
||||
to: Option<u64>,
|
||||
) -> impl Iterator<Item = Result<OwnedUserId>> + 'a {
|
||||
self.db.keys_changed(user_or_room_id, from, to)
|
||||
}
|
||||
pub fn keys_changed<'a>(
|
||||
&'a self, user_or_room_id: &str, from: u64, to: Option<u64>,
|
||||
) -> impl Iterator<Item = Result<OwnedUserId>> + 'a {
|
||||
self.db.keys_changed(user_or_room_id, from, to)
|
||||
}
|
||||
|
||||
pub fn mark_device_key_update(&self, user_id: &UserId) -> Result<()> {
|
||||
self.db.mark_device_key_update(user_id)
|
||||
}
|
||||
pub fn mark_device_key_update(&self, user_id: &UserId) -> Result<()> { self.db.mark_device_key_update(user_id) }
|
||||
|
||||
pub fn get_device_keys(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
) -> Result<Option<Raw<DeviceKeys>>> {
|
||||
self.db.get_device_keys(user_id, device_id)
|
||||
}
|
||||
pub fn get_device_keys(&self, user_id: &UserId, device_id: &DeviceId) -> Result<Option<Raw<DeviceKeys>>> {
|
||||
self.db.get_device_keys(user_id, device_id)
|
||||
}
|
||||
|
||||
pub fn parse_master_key(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
master_key: &Raw<CrossSigningKey>,
|
||||
) -> Result<(Vec<u8>, CrossSigningKey)> {
|
||||
self.db.parse_master_key(user_id, master_key)
|
||||
}
|
||||
pub fn parse_master_key(
|
||||
&self, user_id: &UserId, master_key: &Raw<CrossSigningKey>,
|
||||
) -> Result<(Vec<u8>, CrossSigningKey)> {
|
||||
self.db.parse_master_key(user_id, master_key)
|
||||
}
|
||||
|
||||
pub fn get_key(
|
||||
&self,
|
||||
key: &[u8],
|
||||
sender_user: Option<&UserId>,
|
||||
user_id: &UserId,
|
||||
allowed_signatures: &dyn Fn(&UserId) -> bool,
|
||||
) -> Result<Option<Raw<CrossSigningKey>>> {
|
||||
self.db
|
||||
.get_key(key, sender_user, user_id, allowed_signatures)
|
||||
}
|
||||
pub fn get_key(
|
||||
&self, key: &[u8], sender_user: Option<&UserId>, user_id: &UserId, allowed_signatures: &dyn Fn(&UserId) -> bool,
|
||||
) -> Result<Option<Raw<CrossSigningKey>>> {
|
||||
self.db.get_key(key, sender_user, user_id, allowed_signatures)
|
||||
}
|
||||
|
||||
pub fn get_master_key(
|
||||
&self,
|
||||
sender_user: Option<&UserId>,
|
||||
user_id: &UserId,
|
||||
allowed_signatures: &dyn Fn(&UserId) -> bool,
|
||||
) -> Result<Option<Raw<CrossSigningKey>>> {
|
||||
self.db
|
||||
.get_master_key(sender_user, user_id, allowed_signatures)
|
||||
}
|
||||
pub fn get_master_key(
|
||||
&self, sender_user: Option<&UserId>, user_id: &UserId, allowed_signatures: &dyn Fn(&UserId) -> bool,
|
||||
) -> Result<Option<Raw<CrossSigningKey>>> {
|
||||
self.db.get_master_key(sender_user, user_id, allowed_signatures)
|
||||
}
|
||||
|
||||
pub fn get_self_signing_key(
|
||||
&self,
|
||||
sender_user: Option<&UserId>,
|
||||
user_id: &UserId,
|
||||
allowed_signatures: &dyn Fn(&UserId) -> bool,
|
||||
) -> Result<Option<Raw<CrossSigningKey>>> {
|
||||
self.db
|
||||
.get_self_signing_key(sender_user, user_id, allowed_signatures)
|
||||
}
|
||||
pub fn get_self_signing_key(
|
||||
&self, sender_user: Option<&UserId>, user_id: &UserId, allowed_signatures: &dyn Fn(&UserId) -> bool,
|
||||
) -> Result<Option<Raw<CrossSigningKey>>> {
|
||||
self.db.get_self_signing_key(sender_user, user_id, allowed_signatures)
|
||||
}
|
||||
|
||||
pub fn get_user_signing_key(&self, user_id: &UserId) -> Result<Option<Raw<CrossSigningKey>>> {
|
||||
self.db.get_user_signing_key(user_id)
|
||||
}
|
||||
pub fn get_user_signing_key(&self, user_id: &UserId) -> Result<Option<Raw<CrossSigningKey>>> {
|
||||
self.db.get_user_signing_key(user_id)
|
||||
}
|
||||
|
||||
pub fn add_to_device_event(
|
||||
&self,
|
||||
sender: &UserId,
|
||||
target_user_id: &UserId,
|
||||
target_device_id: &DeviceId,
|
||||
event_type: &str,
|
||||
content: serde_json::Value,
|
||||
) -> Result<()> {
|
||||
self.db.add_to_device_event(
|
||||
sender,
|
||||
target_user_id,
|
||||
target_device_id,
|
||||
event_type,
|
||||
content,
|
||||
)
|
||||
}
|
||||
pub fn add_to_device_event(
|
||||
&self, sender: &UserId, target_user_id: &UserId, target_device_id: &DeviceId, event_type: &str,
|
||||
content: serde_json::Value,
|
||||
) -> Result<()> {
|
||||
self.db.add_to_device_event(sender, target_user_id, target_device_id, event_type, content)
|
||||
}
|
||||
|
||||
pub fn get_to_device_events(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
) -> Result<Vec<Raw<AnyToDeviceEvent>>> {
|
||||
self.db.get_to_device_events(user_id, device_id)
|
||||
}
|
||||
pub fn get_to_device_events(&self, user_id: &UserId, device_id: &DeviceId) -> Result<Vec<Raw<AnyToDeviceEvent>>> {
|
||||
self.db.get_to_device_events(user_id, device_id)
|
||||
}
|
||||
|
||||
pub fn remove_to_device_events(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
until: u64,
|
||||
) -> Result<()> {
|
||||
self.db.remove_to_device_events(user_id, device_id, until)
|
||||
}
|
||||
pub fn remove_to_device_events(&self, user_id: &UserId, device_id: &DeviceId, until: u64) -> Result<()> {
|
||||
self.db.remove_to_device_events(user_id, device_id, until)
|
||||
}
|
||||
|
||||
pub fn update_device_metadata(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
device: &Device,
|
||||
) -> Result<()> {
|
||||
self.db.update_device_metadata(user_id, device_id, device)
|
||||
}
|
||||
pub fn update_device_metadata(&self, user_id: &UserId, device_id: &DeviceId, device: &Device) -> Result<()> {
|
||||
self.db.update_device_metadata(user_id, device_id, device)
|
||||
}
|
||||
|
||||
/// Get device metadata.
|
||||
pub fn get_device_metadata(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
device_id: &DeviceId,
|
||||
) -> Result<Option<Device>> {
|
||||
self.db.get_device_metadata(user_id, device_id)
|
||||
}
|
||||
/// Get device metadata.
|
||||
pub fn get_device_metadata(&self, user_id: &UserId, device_id: &DeviceId) -> Result<Option<Device>> {
|
||||
self.db.get_device_metadata(user_id, device_id)
|
||||
}
|
||||
|
||||
pub fn get_devicelist_version(&self, user_id: &UserId) -> Result<Option<u64>> {
|
||||
self.db.get_devicelist_version(user_id)
|
||||
}
|
||||
pub fn get_devicelist_version(&self, user_id: &UserId) -> Result<Option<u64>> {
|
||||
self.db.get_devicelist_version(user_id)
|
||||
}
|
||||
|
||||
pub fn all_devices_metadata<'a>(
|
||||
&'a self,
|
||||
user_id: &UserId,
|
||||
) -> impl Iterator<Item = Result<Device>> + 'a {
|
||||
self.db.all_devices_metadata(user_id)
|
||||
}
|
||||
pub fn all_devices_metadata<'a>(&'a self, user_id: &UserId) -> impl Iterator<Item = Result<Device>> + 'a {
|
||||
self.db.all_devices_metadata(user_id)
|
||||
}
|
||||
|
||||
/// Deactivate account
|
||||
pub fn deactivate_account(&self, user_id: &UserId) -> Result<()> {
|
||||
// Remove all associated devices
|
||||
for device_id in self.all_device_ids(user_id) {
|
||||
self.remove_device(user_id, &device_id?)?;
|
||||
}
|
||||
/// Deactivate account
|
||||
pub fn deactivate_account(&self, user_id: &UserId) -> Result<()> {
|
||||
// Remove all associated devices
|
||||
for device_id in self.all_device_ids(user_id) {
|
||||
self.remove_device(user_id, &device_id?)?;
|
||||
}
|
||||
|
||||
// Set the password to "" to indicate a deactivated account. Hashes will never result in an
|
||||
// empty string, so the user will not be able to log in again. Systems like changing the
|
||||
// password without logging in should check if the account is deactivated.
|
||||
self.db.set_password(user_id, None)?;
|
||||
// Set the password to "" to indicate a deactivated account. Hashes will never
|
||||
// result in an empty string, so the user will not be able to log in again.
|
||||
// Systems like changing the password without logging in should check if the
|
||||
// account is deactivated.
|
||||
self.db.set_password(user_id, None)?;
|
||||
|
||||
// TODO: Unhook 3PID
|
||||
Ok(())
|
||||
}
|
||||
// TODO: Unhook 3PID
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Creates a new sync filter. Returns the filter id.
|
||||
pub fn create_filter(&self, user_id: &UserId, filter: &FilterDefinition) -> Result<String> {
|
||||
self.db.create_filter(user_id, filter)
|
||||
}
|
||||
/// Creates a new sync filter. Returns the filter id.
|
||||
pub fn create_filter(&self, user_id: &UserId, filter: &FilterDefinition) -> Result<String> {
|
||||
self.db.create_filter(user_id, filter)
|
||||
}
|
||||
|
||||
pub fn get_filter(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
filter_id: &str,
|
||||
) -> Result<Option<FilterDefinition>> {
|
||||
self.db.get_filter(user_id, filter_id)
|
||||
}
|
||||
pub fn get_filter(&self, user_id: &UserId, filter_id: &str) -> Result<Option<FilterDefinition>> {
|
||||
self.db.get_filter(user_id, filter_id)
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensure that a user only sees signatures from themselves and the target user
|
||||
pub fn clean_signatures<F: Fn(&UserId) -> bool>(
|
||||
cross_signing_key: &mut serde_json::Value,
|
||||
sender_user: Option<&UserId>,
|
||||
user_id: &UserId,
|
||||
allowed_signatures: F,
|
||||
cross_signing_key: &mut serde_json::Value, sender_user: Option<&UserId>, user_id: &UserId, allowed_signatures: F,
|
||||
) -> Result<(), Error> {
|
||||
if let Some(signatures) = cross_signing_key
|
||||
.get_mut("signatures")
|
||||
.and_then(|v| v.as_object_mut())
|
||||
{
|
||||
// Don't allocate for the full size of the current signatures, but require
|
||||
// at most one resize if nothing is dropped
|
||||
let new_capacity = signatures.len() / 2;
|
||||
for (user, signature) in
|
||||
mem::replace(signatures, serde_json::Map::with_capacity(new_capacity))
|
||||
{
|
||||
let sid = <&UserId>::try_from(user.as_str())
|
||||
.map_err(|_| Error::bad_database("Invalid user ID in database."))?;
|
||||
if sender_user == Some(user_id) || sid == user_id || allowed_signatures(sid) {
|
||||
signatures.insert(user, signature);
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(signatures) = cross_signing_key.get_mut("signatures").and_then(|v| v.as_object_mut()) {
|
||||
// Don't allocate for the full size of the current signatures, but require
|
||||
// at most one resize if nothing is dropped
|
||||
let new_capacity = signatures.len() / 2;
|
||||
for (user, signature) in mem::replace(signatures, serde_json::Map::with_capacity(new_capacity)) {
|
||||
let sid =
|
||||
<&UserId>::try_from(user.as_str()).map_err(|_| Error::bad_database("Invalid user ID in database."))?;
|
||||
if sender_user == Some(user_id) || sid == user_id || allowed_signatures(sid) {
|
||||
signatures.insert(user, signature);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(())
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue