remove additional unnecessary Arc

Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
Jason Volk 2025-04-02 06:28:34 +00:00
parent b7109131e2
commit 6a073b4fa4
6 changed files with 15 additions and 33 deletions

View file

@ -1,6 +1,5 @@
use std::{ use std::{
collections::{BTreeMap, HashSet, VecDeque, hash_map}, collections::{BTreeMap, HashSet, VecDeque, hash_map},
sync::Arc,
time::Instant, time::Instant,
}; };
@ -8,7 +7,6 @@ use conduwuit::{
PduEvent, debug, debug_error, debug_warn, implement, pdu, trace, PduEvent, debug, debug_error, debug_warn, implement, pdu, trace,
utils::continue_exponential_backoff_secs, warn, utils::continue_exponential_backoff_secs, warn,
}; };
use futures::TryFutureExt;
use ruma::{ use ruma::{
CanonicalJsonValue, OwnedEventId, RoomId, ServerName, api::federation::event::get_event, CanonicalJsonValue, OwnedEventId, RoomId, ServerName, api::federation::event::get_event,
}; };
@ -31,7 +29,7 @@ pub(super) async fn fetch_and_handle_outliers<'a>(
events: &'a [OwnedEventId], events: &'a [OwnedEventId],
create_event: &'a PduEvent, create_event: &'a PduEvent,
room_id: &'a RoomId, room_id: &'a RoomId,
) -> Vec<(Arc<PduEvent>, Option<BTreeMap<String, CanonicalJsonValue>>)> { ) -> Vec<(PduEvent, Option<BTreeMap<String, CanonicalJsonValue>>)> {
let back_off = |id| match self let back_off = |id| match self
.services .services
.globals .globals
@ -53,7 +51,7 @@ pub(super) async fn fetch_and_handle_outliers<'a>(
// a. Look in the main timeline (pduid_pdu tree) // a. Look in the main timeline (pduid_pdu tree)
// b. Look at outlier pdu tree // b. Look at outlier pdu tree
// (get_pdu_json checks both) // (get_pdu_json checks both)
if let Ok(local_pdu) = self.services.timeline.get_pdu(id).map_ok(Arc::new).await { if let Ok(local_pdu) = self.services.timeline.get_pdu(id).await {
trace!("Found {id} in db"); trace!("Found {id} in db");
events_with_auth_events.push((id, Some(local_pdu), vec![])); events_with_auth_events.push((id, Some(local_pdu), vec![]));
continue; continue;

View file

@ -1,7 +1,4 @@
use std::{ use std::collections::{BTreeMap, HashMap, HashSet, VecDeque};
collections::{BTreeMap, HashMap, HashSet, VecDeque},
sync::Arc,
};
use conduwuit::{ use conduwuit::{
PduEvent, Result, debug_warn, err, implement, PduEvent, Result, debug_warn, err, implement,
@ -31,7 +28,7 @@ pub(super) async fn fetch_prev(
initial_set: Vec<OwnedEventId>, initial_set: Vec<OwnedEventId>,
) -> Result<( ) -> Result<(
Vec<OwnedEventId>, Vec<OwnedEventId>,
HashMap<OwnedEventId, (Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>, HashMap<OwnedEventId, (PduEvent, BTreeMap<String, CanonicalJsonValue>)>,
)> { )> {
let mut graph: HashMap<OwnedEventId, _> = HashMap::with_capacity(initial_set.len()); let mut graph: HashMap<OwnedEventId, _> = HashMap::with_capacity(initial_set.len());
let mut eventid_info = HashMap::new(); let mut eventid_info = HashMap::new();

View file

@ -1,12 +1,9 @@
use std::{ use std::collections::{BTreeMap, HashMap, hash_map};
collections::{BTreeMap, HashMap, hash_map},
sync::Arc,
};
use conduwuit::{ use conduwuit::{
Err, Error, PduEvent, Result, debug, debug_info, err, implement, state_res, trace, warn, Err, Error, PduEvent, Result, debug, debug_info, err, implement, state_res, trace, warn,
}; };
use futures::{TryFutureExt, future::ready}; use futures::future::ready;
use ruma::{ use ruma::{
CanonicalJsonObject, CanonicalJsonValue, EventId, RoomId, ServerName, CanonicalJsonObject, CanonicalJsonValue, EventId, RoomId, ServerName,
api::client::error::ErrorKind, events::StateEventType, api::client::error::ErrorKind, events::StateEventType,
@ -24,7 +21,7 @@ pub(super) async fn handle_outlier_pdu<'a>(
room_id: &'a RoomId, room_id: &'a RoomId,
mut value: CanonicalJsonObject, mut value: CanonicalJsonObject,
auth_events_known: bool, auth_events_known: bool,
) -> Result<(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)> { ) -> Result<(PduEvent, BTreeMap<String, CanonicalJsonValue>)> {
// 1. Remove unsigned field // 1. Remove unsigned field
value.remove("unsigned"); value.remove("unsigned");
@ -95,7 +92,7 @@ pub(super) async fn handle_outlier_pdu<'a>(
// Build map of auth events // Build map of auth events
let mut auth_events = HashMap::with_capacity(incoming_pdu.auth_events.len()); let mut auth_events = HashMap::with_capacity(incoming_pdu.auth_events.len());
for id in &incoming_pdu.auth_events { for id in &incoming_pdu.auth_events {
let Ok(auth_event) = self.services.timeline.get_pdu(id).map_ok(Arc::new).await else { let Ok(auth_event) = self.services.timeline.get_pdu(id).await else {
warn!("Could not find auth event {id}"); warn!("Could not find auth event {id}");
continue; continue;
}; };
@ -123,15 +120,10 @@ pub(super) async fn handle_outlier_pdu<'a>(
// The original create event must be in the auth events // The original create event must be in the auth events
if !matches!( if !matches!(
auth_events auth_events.get(&(StateEventType::RoomCreate, String::new().into())),
.get(&(StateEventType::RoomCreate, String::new().into()))
.map(AsRef::as_ref),
Some(_) | None Some(_) | None
) { ) {
return Err(Error::BadRequest( return Err!(Request(InvalidParam("Incoming event refers to wrong create event.")));
ErrorKind::InvalidParam,
"Incoming event refers to wrong create event.",
));
} }
let state_fetch = |ty: &StateEventType, sk: &str| { let state_fetch = |ty: &StateEventType, sk: &str| {
@ -161,5 +153,5 @@ pub(super) async fn handle_outlier_pdu<'a>(
trace!("Added pdu as outlier."); trace!("Added pdu as outlier.");
Ok((Arc::new(incoming_pdu), val)) Ok((incoming_pdu, val))
} }

View file

@ -1,6 +1,5 @@
use std::{ use std::{
collections::{BTreeMap, HashMap}, collections::{BTreeMap, HashMap},
sync::Arc,
time::Instant, time::Instant,
}; };
@ -24,10 +23,7 @@ pub(super) async fn handle_prev_pdu<'a>(
origin: &'a ServerName, origin: &'a ServerName,
event_id: &'a EventId, event_id: &'a EventId,
room_id: &'a RoomId, room_id: &'a RoomId,
eventid_info: &mut HashMap< eventid_info: &mut HashMap<OwnedEventId, (PduEvent, BTreeMap<String, CanonicalJsonValue>)>,
OwnedEventId,
(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>),
>,
create_event: &PduEvent, create_event: &PduEvent,
first_ts_in_room: UInt, first_ts_in_room: UInt,
prev_id: &EventId, prev_id: &EventId,

View file

@ -2,7 +2,6 @@ use std::{
borrow::Borrow, borrow::Borrow,
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
iter::Iterator, iter::Iterator,
sync::Arc,
}; };
use conduwuit::{ use conduwuit::{
@ -20,7 +19,7 @@ use crate::rooms::short::ShortStateHash;
#[tracing::instrument(name = "state", level = "debug", skip_all)] #[tracing::instrument(name = "state", level = "debug", skip_all)]
pub(super) async fn state_at_incoming_degree_one( pub(super) async fn state_at_incoming_degree_one(
&self, &self,
incoming_pdu: &Arc<PduEvent>, incoming_pdu: &PduEvent,
) -> Result<Option<HashMap<u64, OwnedEventId>>> { ) -> Result<Option<HashMap<u64, OwnedEventId>>> {
let prev_event = &incoming_pdu.prev_events[0]; let prev_event = &incoming_pdu.prev_events[0];
let Ok(prev_event_sstatehash) = self let Ok(prev_event_sstatehash) = self
@ -67,7 +66,7 @@ pub(super) async fn state_at_incoming_degree_one(
#[tracing::instrument(name = "state", level = "debug", skip_all)] #[tracing::instrument(name = "state", level = "debug", skip_all)]
pub(super) async fn state_at_incoming_resolved( pub(super) async fn state_at_incoming_resolved(
&self, &self,
incoming_pdu: &Arc<PduEvent>, incoming_pdu: &PduEvent,
room_id: &RoomId, room_id: &RoomId,
room_version_id: &RoomVersionId, room_version_id: &RoomVersionId,
) -> Result<Option<HashMap<u64, OwnedEventId>>> { ) -> Result<Option<HashMap<u64, OwnedEventId>>> {

View file

@ -18,7 +18,7 @@ use crate::rooms::{
#[implement(super::Service)] #[implement(super::Service)]
pub(super) async fn upgrade_outlier_to_timeline_pdu( pub(super) async fn upgrade_outlier_to_timeline_pdu(
&self, &self,
incoming_pdu: Arc<PduEvent>, incoming_pdu: PduEvent,
val: BTreeMap<String, CanonicalJsonValue>, val: BTreeMap<String, CanonicalJsonValue>,
create_event: &PduEvent, create_event: &PduEvent,
origin: &ServerName, origin: &ServerName,