eliminate Arc impl for trait Event

Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
Jason Volk 2025-04-11 01:29:26 +00:00 committed by Jade Ellis
parent 200df676e9
commit 3639b93658
No known key found for this signature in database
GPG key ID: 8705A2A3EBF77BD2
5 changed files with 42 additions and 82 deletions

View file

@ -2,7 +2,6 @@ use std::{
borrow::Borrow, borrow::Borrow,
fmt::{Debug, Display}, fmt::{Debug, Display},
hash::Hash, hash::Hash,
sync::Arc,
}; };
use ruma::{EventId, MilliSecondsSinceUnixEpoch, RoomId, UserId, events::TimelineEventType}; use ruma::{EventId, MilliSecondsSinceUnixEpoch, RoomId, UserId, events::TimelineEventType};
@ -72,31 +71,3 @@ impl<T: Event> Event for &T {
fn redacts(&self) -> Option<&Self::Id> { (*self).redacts() } fn redacts(&self) -> Option<&Self::Id> { (*self).redacts() }
} }
impl<T: Event> Event for Arc<T> {
type Id = T::Id;
fn event_id(&self) -> &Self::Id { (**self).event_id() }
fn room_id(&self) -> &RoomId { (**self).room_id() }
fn sender(&self) -> &UserId { (**self).sender() }
fn origin_server_ts(&self) -> MilliSecondsSinceUnixEpoch { (**self).origin_server_ts() }
fn event_type(&self) -> &TimelineEventType { (**self).event_type() }
fn content(&self) -> &RawJsonValue { (**self).content() }
fn state_key(&self) -> Option<&str> { (**self).state_key() }
fn prev_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ {
(**self).prev_events()
}
fn auth_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ {
(**self).auth_events()
}
fn redacts(&self) -> Option<&Self::Id> { (**self).redacts() }
}

View file

@ -4,10 +4,7 @@ extern crate test;
use std::{ use std::{
borrow::Borrow, borrow::Borrow,
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
sync::{ sync::atomic::{AtomicU64, Ordering::SeqCst},
Arc,
atomic::{AtomicU64, Ordering::SeqCst},
},
}; };
use futures::{future, future::ready}; use futures::{future, future::ready};
@ -64,7 +61,7 @@ fn resolution_shallow_auth_chain(c: &mut test::Bencher) {
c.iter(|| async { c.iter(|| async {
let ev_map = store.0.clone(); let ev_map = store.0.clone();
let state_sets = [&state_at_bob, &state_at_charlie]; let state_sets = [&state_at_bob, &state_at_charlie];
let fetch = |id: OwnedEventId| ready(ev_map.get(&id).map(Arc::clone)); let fetch = |id: OwnedEventId| ready(ev_map.get(&id).clone());
let exists = |id: OwnedEventId| ready(ev_map.get(&id).is_some()); let exists = |id: OwnedEventId| ready(ev_map.get(&id).is_some());
let auth_chain_sets: Vec<HashSet<_>> = state_sets let auth_chain_sets: Vec<HashSet<_>> = state_sets
.iter() .iter()
@ -148,7 +145,7 @@ fn resolve_deeper_event_set(c: &mut test::Bencher) {
}) })
.collect(); .collect();
let fetch = |id: OwnedEventId| ready(inner.get(&id).map(Arc::clone)); let fetch = |id: OwnedEventId| ready(inner.get(&id).clone());
let exists = |id: OwnedEventId| ready(inner.get(&id).is_some()); let exists = |id: OwnedEventId| ready(inner.get(&id).is_some());
let _ = match state_res::resolve( let _ = match state_res::resolve(
&RoomVersionId::V6, &RoomVersionId::V6,
@ -171,20 +168,20 @@ fn resolve_deeper_event_set(c: &mut test::Bencher) {
// IMPLEMENTATION DETAILS AHEAD // IMPLEMENTATION DETAILS AHEAD
// //
/////////////////////////////////////////////////////////////////////*/ /////////////////////////////////////////////////////////////////////*/
struct TestStore<E: Event>(HashMap<OwnedEventId, Arc<E>>); struct TestStore<E: Event>(HashMap<OwnedEventId, E>);
#[allow(unused)] #[allow(unused)]
impl<E: Event> TestStore<E> { impl<E: Event + Clone> TestStore<E> {
fn get_event(&self, room_id: &RoomId, event_id: &EventId) -> Result<Arc<E>> { fn get_event(&self, room_id: &RoomId, event_id: &EventId) -> Result<E> {
self.0 self.0
.get(event_id) .get(event_id)
.map(Arc::clone) .cloned()
.ok_or_else(|| Error::NotFound(format!("{} not found", event_id))) .ok_or_else(|| Error::NotFound(format!("{} not found", event_id)))
} }
/// Returns the events that correspond to the `event_ids` sorted in the same /// Returns the events that correspond to the `event_ids` sorted in the same
/// order. /// order.
fn get_events(&self, room_id: &RoomId, event_ids: &[OwnedEventId]) -> Result<Vec<Arc<E>>> { fn get_events(&self, room_id: &RoomId, event_ids: &[OwnedEventId]) -> Result<Vec<E>> {
let mut events = vec![]; let mut events = vec![];
for id in event_ids { for id in event_ids {
events.push(self.get_event(room_id, id)?); events.push(self.get_event(room_id, id)?);
@ -264,7 +261,7 @@ impl TestStore<PduEvent> {
&[], &[],
); );
let cre = create_event.event_id().to_owned(); let cre = create_event.event_id().to_owned();
self.0.insert(cre.clone(), Arc::clone(&create_event)); self.0.insert(cre.clone(), create_event.clone());
let alice_mem = to_pdu_event( let alice_mem = to_pdu_event(
"IMA", "IMA",
@ -276,7 +273,7 @@ impl TestStore<PduEvent> {
&[cre.clone()], &[cre.clone()],
); );
self.0 self.0
.insert(alice_mem.event_id().to_owned(), Arc::clone(&alice_mem)); .insert(alice_mem.event_id().to_owned(), alice_mem.clone());
let join_rules = to_pdu_event( let join_rules = to_pdu_event(
"IJR", "IJR",
@ -383,7 +380,7 @@ fn to_pdu_event<S>(
content: Box<RawJsonValue>, content: Box<RawJsonValue>,
auth_events: &[S], auth_events: &[S],
prev_events: &[S], prev_events: &[S],
) -> Arc<PduEvent> ) -> PduEvent
where where
S: AsRef<str>, S: AsRef<str>,
{ {
@ -407,7 +404,7 @@ where
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let state_key = state_key.map(ToOwned::to_owned); let state_key = state_key.map(ToOwned::to_owned);
Arc::new(PduEvent { PduEvent {
event_id: id.try_into().unwrap(), event_id: id.try_into().unwrap(),
rest: Pdu::RoomV3Pdu(RoomV3Pdu { rest: Pdu::RoomV3Pdu(RoomV3Pdu {
room_id: room_id().to_owned(), room_id: room_id().to_owned(),
@ -424,12 +421,12 @@ where
hashes: EventHash::new(String::new()), hashes: EventHash::new(String::new()),
signatures: Signatures::new(), signatures: Signatures::new(),
}), }),
}) }
} }
// all graphs start with these input events // all graphs start with these input events
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn INITIAL_EVENTS() -> HashMap<OwnedEventId, Arc<PduEvent>> { fn INITIAL_EVENTS() -> HashMap<OwnedEventId, PduEvent> {
vec![ vec![
to_pdu_event::<&EventId>( to_pdu_event::<&EventId>(
"CREATE", "CREATE",
@ -511,7 +508,7 @@ fn INITIAL_EVENTS() -> HashMap<OwnedEventId, Arc<PduEvent>> {
// all graphs start with these input events // all graphs start with these input events
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn BAN_STATE_SET() -> HashMap<OwnedEventId, Arc<PduEvent>> { fn BAN_STATE_SET() -> HashMap<OwnedEventId, PduEvent> {
vec![ vec![
to_pdu_event( to_pdu_event(
"PA", "PA",

View file

@ -1112,8 +1112,6 @@ fn verify_third_party_invite(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::sync::Arc;
use ruma::events::{ use ruma::events::{
StateEventType, TimelineEventType, StateEventType, TimelineEventType,
room::{ room::{
@ -1143,7 +1141,7 @@ mod tests {
let auth_events = events let auth_events = events
.values() .values()
.map(|ev| (ev.event_type().with_state_key(ev.state_key().unwrap()), Arc::clone(ev))) .map(|ev| (ev.event_type().with_state_key(ev.state_key().unwrap()), ev.clone()))
.collect::<StateMap<_>>(); .collect::<StateMap<_>>();
let requester = to_pdu_event( let requester = to_pdu_event(
@ -1188,7 +1186,7 @@ mod tests {
let auth_events = events let auth_events = events
.values() .values()
.map(|ev| (ev.event_type().with_state_key(ev.state_key().unwrap()), Arc::clone(ev))) .map(|ev| (ev.event_type().with_state_key(ev.state_key().unwrap()), ev.clone()))
.collect::<StateMap<_>>(); .collect::<StateMap<_>>();
let requester = to_pdu_event( let requester = to_pdu_event(
@ -1233,7 +1231,7 @@ mod tests {
let auth_events = events let auth_events = events
.values() .values()
.map(|ev| (ev.event_type().with_state_key(ev.state_key().unwrap()), Arc::clone(ev))) .map(|ev| (ev.event_type().with_state_key(ev.state_key().unwrap()), ev.clone()))
.collect::<StateMap<_>>(); .collect::<StateMap<_>>();
let requester = to_pdu_event( let requester = to_pdu_event(
@ -1278,7 +1276,7 @@ mod tests {
let auth_events = events let auth_events = events
.values() .values()
.map(|ev| (ev.event_type().with_state_key(ev.state_key().unwrap()), Arc::clone(ev))) .map(|ev| (ev.event_type().with_state_key(ev.state_key().unwrap()), ev.clone()))
.collect::<StateMap<_>>(); .collect::<StateMap<_>>();
let requester = to_pdu_event( let requester = to_pdu_event(
@ -1340,7 +1338,7 @@ mod tests {
let auth_events = events let auth_events = events
.values() .values()
.map(|ev| (ev.event_type().with_state_key(ev.state_key().unwrap()), Arc::clone(ev))) .map(|ev| (ev.event_type().with_state_key(ev.state_key().unwrap()), ev.clone()))
.collect::<StateMap<_>>(); .collect::<StateMap<_>>();
let requester = to_pdu_event( let requester = to_pdu_event(
@ -1412,7 +1410,7 @@ mod tests {
let auth_events = events let auth_events = events
.values() .values()
.map(|ev| (ev.event_type().with_state_key(ev.state_key().unwrap()), Arc::clone(ev))) .map(|ev| (ev.event_type().with_state_key(ev.state_key().unwrap()), ev.clone()))
.collect::<StateMap<_>>(); .collect::<StateMap<_>>();
let requester = to_pdu_event( let requester = to_pdu_event(

View file

@ -861,10 +861,7 @@ where
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::{ use std::collections::{HashMap, HashSet};
collections::{HashMap, HashSet},
sync::Arc,
};
use maplit::{hashmap, hashset}; use maplit::{hashmap, hashset};
use rand::seq::SliceRandom; use rand::seq::SliceRandom;
@ -906,7 +903,7 @@ mod tests {
let power_events = event_map let power_events = event_map
.values() .values()
.filter(|&pdu| is_power_event(&**pdu)) .filter(|&pdu| is_power_event(&*pdu))
.map(|pdu| pdu.event_id.clone()) .map(|pdu| pdu.event_id.clone())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -1489,7 +1486,7 @@ mod tests {
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn BAN_STATE_SET() -> HashMap<OwnedEventId, Arc<PduEvent>> { fn BAN_STATE_SET() -> HashMap<OwnedEventId, PduEvent> {
vec![ vec![
to_pdu_event( to_pdu_event(
"PA", "PA",
@ -1534,7 +1531,7 @@ mod tests {
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn JOIN_RULE() -> HashMap<OwnedEventId, Arc<PduEvent>> { fn JOIN_RULE() -> HashMap<OwnedEventId, PduEvent> {
vec![ vec![
to_pdu_event( to_pdu_event(
"JR", "JR",

View file

@ -1,10 +1,7 @@
use std::{ use std::{
borrow::Borrow, borrow::Borrow,
collections::{BTreeMap, HashMap, HashSet}, collections::{BTreeMap, HashMap, HashSet},
sync::{ sync::atomic::{AtomicU64, Ordering::SeqCst},
Arc,
atomic::{AtomicU64, Ordering::SeqCst},
},
}; };
use futures::future::ready; use futures::future::ready;
@ -36,7 +33,7 @@ use crate::{
static SERVER_TIMESTAMP: AtomicU64 = AtomicU64::new(0); static SERVER_TIMESTAMP: AtomicU64 = AtomicU64::new(0);
pub(crate) async fn do_check( pub(crate) async fn do_check(
events: &[Arc<PduEvent>], events: &[PduEvent],
edges: Vec<Vec<OwnedEventId>>, edges: Vec<Vec<OwnedEventId>>,
expected_state_ids: Vec<OwnedEventId>, expected_state_ids: Vec<OwnedEventId>,
) { ) {
@ -85,7 +82,7 @@ pub(crate) async fn do_check(
} }
// event_id -> PduEvent // event_id -> PduEvent
let mut event_map: HashMap<OwnedEventId, Arc<PduEvent>> = HashMap::new(); let mut event_map: HashMap<OwnedEventId, PduEvent> = HashMap::new();
// event_id -> StateMap<OwnedEventId> // event_id -> StateMap<OwnedEventId>
let mut state_at_event: HashMap<OwnedEventId, StateMap<OwnedEventId>> = HashMap::new(); let mut state_at_event: HashMap<OwnedEventId, StateMap<OwnedEventId>> = HashMap::new();
@ -194,7 +191,7 @@ pub(crate) async fn do_check(
store.0.insert(ev_id.to_owned(), event.clone()); store.0.insert(ev_id.to_owned(), event.clone());
state_at_event.insert(node, state_after); state_at_event.insert(node, state_after);
event_map.insert(event_id.to_owned(), Arc::clone(store.0.get(ev_id).unwrap())); event_map.insert(event_id.to_owned(), store.0.get(ev_id).unwrap().clone());
} }
let mut expected_state = StateMap::new(); let mut expected_state = StateMap::new();
@ -235,10 +232,10 @@ pub(crate) async fn do_check(
} }
#[allow(clippy::exhaustive_structs)] #[allow(clippy::exhaustive_structs)]
pub(crate) struct TestStore<E: Event>(pub(crate) HashMap<OwnedEventId, Arc<E>>); pub(crate) struct TestStore<E: Event>(pub(crate) HashMap<OwnedEventId, E>);
impl<E: Event> TestStore<E> { impl<E: Event + Clone> TestStore<E> {
pub(crate) fn get_event(&self, _: &RoomId, event_id: &EventId) -> Result<Arc<E>> { pub(crate) fn get_event(&self, _: &RoomId, event_id: &EventId) -> Result<E> {
self.0 self.0
.get(event_id) .get(event_id)
.cloned() .cloned()
@ -288,7 +285,7 @@ impl TestStore<PduEvent> {
&[], &[],
); );
let cre = create_event.event_id().to_owned(); let cre = create_event.event_id().to_owned();
self.0.insert(cre.clone(), Arc::clone(&create_event)); self.0.insert(cre.clone(), create_event.clone());
let alice_mem = to_pdu_event( let alice_mem = to_pdu_event(
"IMA", "IMA",
@ -300,7 +297,7 @@ impl TestStore<PduEvent> {
&[cre.clone()], &[cre.clone()],
); );
self.0 self.0
.insert(alice_mem.event_id().to_owned(), Arc::clone(&alice_mem)); .insert(alice_mem.event_id().to_owned(), alice_mem.clone());
let join_rules = to_pdu_event( let join_rules = to_pdu_event(
"IJR", "IJR",
@ -399,7 +396,7 @@ pub(crate) fn to_init_pdu_event(
ev_type: TimelineEventType, ev_type: TimelineEventType,
state_key: Option<&str>, state_key: Option<&str>,
content: Box<RawJsonValue>, content: Box<RawJsonValue>,
) -> Arc<PduEvent> { ) -> PduEvent {
let ts = SERVER_TIMESTAMP.fetch_add(1, SeqCst); let ts = SERVER_TIMESTAMP.fetch_add(1, SeqCst);
let id = if id.contains('$') { let id = if id.contains('$') {
id.to_owned() id.to_owned()
@ -408,7 +405,7 @@ pub(crate) fn to_init_pdu_event(
}; };
let state_key = state_key.map(ToOwned::to_owned); let state_key = state_key.map(ToOwned::to_owned);
Arc::new(PduEvent { PduEvent {
event_id: id.try_into().unwrap(), event_id: id.try_into().unwrap(),
rest: Pdu::RoomV3Pdu(RoomV3Pdu { rest: Pdu::RoomV3Pdu(RoomV3Pdu {
room_id: room_id().to_owned(), room_id: room_id().to_owned(),
@ -425,7 +422,7 @@ pub(crate) fn to_init_pdu_event(
hashes: EventHash::new("".to_owned()), hashes: EventHash::new("".to_owned()),
signatures: ServerSignatures::default(), signatures: ServerSignatures::default(),
}), }),
}) }
} }
pub(crate) fn to_pdu_event<S>( pub(crate) fn to_pdu_event<S>(
@ -436,7 +433,7 @@ pub(crate) fn to_pdu_event<S>(
content: Box<RawJsonValue>, content: Box<RawJsonValue>,
auth_events: &[S], auth_events: &[S],
prev_events: &[S], prev_events: &[S],
) -> Arc<PduEvent> ) -> PduEvent
where where
S: AsRef<str>, S: AsRef<str>,
{ {
@ -458,7 +455,7 @@ where
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let state_key = state_key.map(ToOwned::to_owned); let state_key = state_key.map(ToOwned::to_owned);
Arc::new(PduEvent { PduEvent {
event_id: id.try_into().unwrap(), event_id: id.try_into().unwrap(),
rest: Pdu::RoomV3Pdu(RoomV3Pdu { rest: Pdu::RoomV3Pdu(RoomV3Pdu {
room_id: room_id().to_owned(), room_id: room_id().to_owned(),
@ -475,12 +472,12 @@ where
hashes: EventHash::new("".to_owned()), hashes: EventHash::new("".to_owned()),
signatures: ServerSignatures::default(), signatures: ServerSignatures::default(),
}), }),
}) }
} }
// all graphs start with these input events // all graphs start with these input events
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub(crate) fn INITIAL_EVENTS() -> HashMap<OwnedEventId, Arc<PduEvent>> { pub(crate) fn INITIAL_EVENTS() -> HashMap<OwnedEventId, PduEvent> {
vec![ vec![
to_pdu_event::<&EventId>( to_pdu_event::<&EventId>(
"CREATE", "CREATE",
@ -562,7 +559,7 @@ pub(crate) fn INITIAL_EVENTS() -> HashMap<OwnedEventId, Arc<PduEvent>> {
// all graphs start with these input events // all graphs start with these input events
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub(crate) fn INITIAL_EVENTS_CREATE_ROOM() -> HashMap<OwnedEventId, Arc<PduEvent>> { pub(crate) fn INITIAL_EVENTS_CREATE_ROOM() -> HashMap<OwnedEventId, PduEvent> {
vec![to_pdu_event::<&EventId>( vec![to_pdu_event::<&EventId>(
"CREATE", "CREATE",
alice(), alice(),