messing around with arcs
This commit is contained in:
parent
face766e0f
commit
cff52d7ebb
77 changed files with 598 additions and 434 deletions
|
@ -1,7 +1,7 @@
|
|||
use ruma::{RoomId, RoomAliasId};
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
/// Creates or updates the alias to the given room id.
|
||||
fn set_alias(
|
||||
&self,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::collections::HashSet;
|
||||
use std::{collections::HashSet, sync::Arc};
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data {
|
||||
fn get_cached_eventid_authchain(&self, shorteventid: u64) -> Result<Option<HashSet<u64>>>;
|
||||
fn cache_eventid_authchain(&self, shorteventid: u64, auth_chain: &HashSet<u64>) -> Result<()>;
|
||||
pub trait Data: Send + Sync {
|
||||
fn get_cached_eventid_authchain(&self, shorteventid: &[u64]) -> Result<Option<Arc<HashSet<u64>>>>;
|
||||
fn cache_auth_chain(&self, shorteventid: Vec<u64>, auth_chain: Arc<HashSet<u64>>) -> Result<()>;
|
||||
}
|
||||
|
|
|
@ -15,41 +15,11 @@ impl Service {
|
|||
&'a self,
|
||||
key: &[u64],
|
||||
) -> Result<Option<Arc<HashSet<u64>>>> {
|
||||
// Check RAM cache
|
||||
if let Some(result) = self.auth_chain_cache.lock().unwrap().get_mut(key.to_be_bytes()) {
|
||||
return Ok(Some(Arc::clone(result)));
|
||||
}
|
||||
|
||||
// We only save auth chains for single events in the db
|
||||
if key.len() == 1 {
|
||||
// Check DB cache
|
||||
if let Some(chain) = self.db.get_cached_eventid_authchain(key[0])
|
||||
{
|
||||
let chain = Arc::new(chain);
|
||||
|
||||
// Cache in RAM
|
||||
self.auth_chain_cache
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert(vec![key[0]], Arc::clone(&chain));
|
||||
|
||||
return Ok(Some(chain));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
self.db.get_cached_eventid_authchain(key)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn cache_auth_chain(&self, key: Vec<u64>, auth_chain: Arc<HashSet<u64>>) -> Result<()> {
|
||||
// Only persist single events in db
|
||||
if key.len() == 1 {
|
||||
self.db.cache_auth_chain(key[0], auth_chain)?;
|
||||
}
|
||||
|
||||
// Cache in RAM
|
||||
self.auth_chain_cache.lock().unwrap().insert(key, auth_chain);
|
||||
|
||||
Ok(())
|
||||
self.db.cache_auth_chain(key, auth_chain)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use ruma::RoomId;
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
/// Adds the room to the public room directory
|
||||
fn set_public(&self, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::collections::HashMap;
|
|||
use ruma::{UserId, RoomId, events::presence::PresenceEvent};
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
/// Adds a presence event which will be saved until a new event replaces it.
|
||||
///
|
||||
/// Note: This method takes a RoomId because presence updates are always bound to rooms to
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use ruma::{RoomId, events::receipt::ReceiptEvent, UserId, serde::Raw};
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
/// Replaces the previous read receipt.
|
||||
fn readreceipt_update(
|
||||
&self,
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::collections::HashSet;
|
|||
use crate::Result;
|
||||
use ruma::{UserId, RoomId};
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
/// Sets a user as typing until the timeout timestamp is reached or roomtyping_remove is
|
||||
/// called.
|
||||
fn typing_add(&self, user_id: &UserId, room_id: &RoomId, timeout: u64) -> Result<()>;
|
||||
|
|
|
@ -117,7 +117,7 @@ impl Service {
|
|||
room_id,
|
||||
pub_key_map,
|
||||
incoming_pdu.prev_events.clone(),
|
||||
).await;
|
||||
).await?;
|
||||
|
||||
let mut errors = 0;
|
||||
for prev_id in dbg!(sorted_prev_events) {
|
||||
|
@ -240,7 +240,7 @@ impl Service {
|
|||
r
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(create_event, value, pub_key_map))]
|
||||
#[tracing::instrument(skip(self, create_event, value, pub_key_map))]
|
||||
fn handle_outlier_pdu<'a>(
|
||||
&self,
|
||||
origin: &'a ServerName,
|
||||
|
@ -272,7 +272,7 @@ impl Service {
|
|||
RoomVersion::new(room_version_id).expect("room version is supported");
|
||||
|
||||
let mut val = match ruma::signatures::verify_event(
|
||||
&*pub_key_map.read().map_err(|_| "RwLock is poisoned.")?,
|
||||
&*pub_key_map.read().expect("RwLock is poisoned."),
|
||||
&value,
|
||||
room_version_id,
|
||||
) {
|
||||
|
@ -301,7 +301,7 @@ impl Service {
|
|||
let incoming_pdu = serde_json::from_value::<PduEvent>(
|
||||
serde_json::to_value(&val).expect("CanonicalJsonObj is a valid JsonValue"),
|
||||
)
|
||||
.map_err(|_| "Event is not a valid PDU.".to_owned())?;
|
||||
.map_err(|_| Error::bad_database("Event is not a valid PDU."))?;
|
||||
|
||||
// 4. fetch any missing auth events doing all checks listed here starting at 1. These are not timeline events
|
||||
// 5. Reject "due to auth events" if can't get all the auth events or some of the auth events are also rejected "due to auth events"
|
||||
|
@ -329,7 +329,7 @@ impl Service {
|
|||
// Build map of auth events
|
||||
let mut auth_events = HashMap::new();
|
||||
for id in &incoming_pdu.auth_events {
|
||||
let auth_event = match services().rooms.get_pdu(id)? {
|
||||
let auth_event = match services().rooms.timeline.get_pdu(id)? {
|
||||
Some(e) => e,
|
||||
None => {
|
||||
warn!("Could not find auth event {}", id);
|
||||
|
@ -373,7 +373,8 @@ impl Service {
|
|||
&incoming_pdu,
|
||||
None::<PduEvent>, // TODO: third party invite
|
||||
|k, s| auth_events.get(&(k.to_string().into(), s.to_owned())),
|
||||
)? {
|
||||
).map_err(|_e| Error::BadRequest(ErrorKind::InvalidParam, "Auth check failed"))?
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Auth check failed",
|
||||
|
@ -385,6 +386,7 @@ impl Service {
|
|||
// 7. Persist the event as an outlier.
|
||||
services()
|
||||
.rooms
|
||||
.outlier
|
||||
.add_pdu_outlier(&incoming_pdu.event_id, &val)?;
|
||||
|
||||
info!("Added pdu as outlier.");
|
||||
|
@ -393,7 +395,7 @@ impl Service {
|
|||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(incoming_pdu, val, create_event, pub_key_map))]
|
||||
#[tracing::instrument(skip(self, incoming_pdu, val, create_event, pub_key_map))]
|
||||
pub async fn upgrade_outlier_to_timeline_pdu(
|
||||
&self,
|
||||
incoming_pdu: Arc<PduEvent>,
|
||||
|
@ -412,7 +414,7 @@ impl Service {
|
|||
.rooms
|
||||
.pdu_metadata.is_event_soft_failed(&incoming_pdu.event_id)?
|
||||
{
|
||||
return Err("Event has been soft failed".into());
|
||||
return Err(Error::BadRequest(ErrorKind::InvalidParam, "Event has been soft failed"));
|
||||
}
|
||||
|
||||
info!("Upgrading {} to timeline pdu", incoming_pdu.event_id);
|
||||
|
@ -1130,7 +1132,8 @@ impl Service {
|
|||
room_id: &RoomId,
|
||||
pub_key_map: &RwLock<BTreeMap<String, BTreeMap<String, Base64>>>,
|
||||
initial_set: Vec<Arc<EventId>>,
|
||||
) -> Vec<(Arc<EventId>, HashMap<Arc<EventId>, (Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>)> {
|
||||
) -> Result<(Vec<Arc<EventId>>, HashMap<Arc<EventId>,
|
||||
(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>)> {
|
||||
let mut graph: HashMap<Arc<EventId>, _> = HashMap::new();
|
||||
let mut eventid_info = HashMap::new();
|
||||
let mut todo_outlier_stack: Vec<Arc<EventId>> = initial_set;
|
||||
|
@ -1164,6 +1167,7 @@ impl Service {
|
|||
if let Some(json) = json_opt.or_else(|| {
|
||||
services()
|
||||
.rooms
|
||||
.outlier
|
||||
.get_outlier_pdu_json(&prev_event_id)
|
||||
.ok()
|
||||
.flatten()
|
||||
|
@ -1209,9 +1213,9 @@ impl Service {
|
|||
.map_or_else(|| uint!(0), |info| info.0.origin_server_ts),
|
||||
),
|
||||
))
|
||||
})?;
|
||||
}).map_err(|_| Error::bad_database("Error sorting prev events"))?;
|
||||
|
||||
(sorted, eventid_info)
|
||||
Ok((sorted, eventid_info))
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use ruma::{RoomId, DeviceId, UserId};
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
fn lazy_load_was_sent_before(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use ruma::RoomId;
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
fn exists(&self, room_id: &RoomId) -> Result<bool>;
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ use ruma::{signatures::CanonicalJsonObject, EventId};
|
|||
|
||||
use crate::{PduEvent, Result};
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
fn get_outlier_pdu_json(&self, event_id: &EventId) -> Result<Option<CanonicalJsonObject>>;
|
||||
fn get_outlier_pdu(&self, event_id: &EventId) -> Result<Option<PduEvent>>;
|
||||
fn add_pdu_outlier(&self, event_id: &EventId, pdu: &CanonicalJsonObject) -> Result<()>;
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::sync::Arc;
|
|||
use ruma::{EventId, RoomId};
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) -> Result<()>;
|
||||
fn is_event_referenced(&self, room_id: &RoomId, event_id: &EventId) -> Result<bool>;
|
||||
fn mark_event_soft_failed(&self, event_id: &EventId) -> Result<()>;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use ruma::RoomId;
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
fn index_pdu<'a>(&self, shortroomid: u64, pdu_id: &[u8], message_body: String) -> Result<()>;
|
||||
|
||||
fn search_pdus<'a>(
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
}
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
use std::sync::Arc;
|
||||
use std::{sync::MutexGuard, collections::HashSet};
|
||||
use std::collections::HashSet;
|
||||
use crate::Result;
|
||||
use ruma::{EventId, RoomId};
|
||||
use tokio::sync::MutexGuard;
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
/// Returns the last state hash key added to the db for the given room.
|
||||
fn get_room_shortstatehash(&self, room_id: &RoomId) -> Result<Option<u64>>;
|
||||
|
||||
|
@ -21,7 +22,7 @@ pub trait Data {
|
|||
/// Replace the forward extremities of the room.
|
||||
fn set_forward_extremities<'a>(&self,
|
||||
room_id: &RoomId,
|
||||
event_ids: &dyn Iterator<Item = &'a EventId>,
|
||||
event_ids: &mut dyn Iterator<Item = &'a EventId>,
|
||||
_mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room state mutex
|
||||
) -> Result<()>;
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ pub struct Service {
|
|||
|
||||
impl Service {
|
||||
/// Set the room to the given statehash and update caches.
|
||||
pub fn force_state(
|
||||
pub async fn force_state(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
shortstatehash: u64,
|
||||
|
@ -28,7 +28,7 @@ impl Service {
|
|||
.roomid_mutex_state
|
||||
.write()
|
||||
.unwrap()
|
||||
.entry(body.room_id.to_owned())
|
||||
.entry(room_id.to_owned())
|
||||
.or_default(),
|
||||
);
|
||||
let state_lock = mutex_state.lock().await;
|
||||
|
@ -74,10 +74,10 @@ impl Service {
|
|||
Err(_) => continue,
|
||||
};
|
||||
|
||||
services().room.state_cache.update_membership(room_id, &user_id, membership, &pdu.sender, None, false)?;
|
||||
services().rooms.state_cache.update_membership(room_id, &user_id, membership, &pdu.sender, None, false)?;
|
||||
}
|
||||
|
||||
services().room.state_cache.update_joined_count(room_id)?;
|
||||
services().rooms.state_cache.update_joined_count(room_id)?;
|
||||
|
||||
self.db.set_room_state(room_id, shortstatehash, &state_lock);
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ use ruma::{EventId, events::StateEventType, RoomId};
|
|||
use crate::{Result, PduEvent};
|
||||
|
||||
#[async_trait]
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
/// Builds a StateMap by iterating over all keys that start
|
||||
/// with state_hash, this gives the full state for the given state_hash.
|
||||
async fn state_full_ids(&self, shortstatehash: u64) -> Result<BTreeMap<u64, Arc<EventId>>>;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use ruma::{UserId, RoomId, serde::Raw, events::AnyStrippedStateEvent};
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
fn mark_as_once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
fn mark_as_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
fn mark_as_invited(&self, user_id: &UserId, room_id: &RoomId, last_state: Option<Vec<Raw<AnyStrippedStateEvent>>>) -> Result<()>;
|
||||
|
|
|
@ -9,7 +9,7 @@ pub struct StateDiff {
|
|||
pub removed: HashSet<CompressedStateEvent>,
|
||||
}
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
fn get_statediff(&self, shortstatehash: u64) -> Result<StateDiff>;
|
||||
fn save_statediff(&self, shortstatehash: u64, diff: StateDiff) -> Result<()>;
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ use ruma::{signatures::CanonicalJsonObject, EventId, UserId, RoomId};
|
|||
|
||||
use crate::{Result, PduEvent};
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
fn last_timeline_count(&self, sender_user: &UserId, room_id: &RoomId) -> Result<u64>;
|
||||
|
||||
/// Returns the `count` of this pdu's id.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use ruma::{UserId, RoomId};
|
||||
use crate::Result;
|
||||
|
||||
pub trait Data {
|
||||
pub trait Data: Send + Sync {
|
||||
fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()>;
|
||||
|
||||
fn notification_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64>;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue