refactor for stronger RawPduId type

implement standard traits for PduCount

enable serde for arrayvec

typedef various shortid's

pducount simplifications

split parts of pdu_metadata service to core/pdu and api/relations

remove some yields; improve var names/syntax

tweak types for limit timeline limit arguments

Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
Jason Volk 2024-11-02 06:12:54 +00:00
parent 2e4d9cb37c
commit 9da523c004
41 changed files with 796 additions and 573 deletions

View file

@ -2,15 +2,21 @@ use std::{mem::size_of, sync::Arc};
use conduit::{
result::LogErr,
utils,
utils::{stream::TryIgnore, ReadyExt},
utils::{stream::TryIgnore, u64_from_u8, ReadyExt},
PduCount, PduEvent,
};
use database::Map;
use futures::{Stream, StreamExt};
use ruma::{api::Direction, EventId, RoomId, UserId};
use crate::{rooms, Dep};
use crate::{
rooms,
rooms::{
short::{ShortEventId, ShortRoomId},
timeline::{PduId, RawPduId},
},
Dep,
};
pub(super) struct Data {
tofrom_relation: Arc<Map>,
@ -46,35 +52,36 @@ impl Data {
}
pub(super) fn get_relations<'a>(
&'a self, user_id: &'a UserId, shortroomid: u64, target: u64, until: PduCount, dir: Direction,
&'a self, user_id: &'a UserId, shortroomid: ShortRoomId, target: ShortEventId, from: PduCount, dir: Direction,
) -> impl Stream<Item = PdusIterItem> + Send + '_ {
let prefix = target.to_be_bytes().to_vec();
let mut current = prefix.clone();
let count_raw = match until {
PduCount::Normal(x) => x.saturating_sub(1),
PduCount::Backfilled(x) => {
current.extend_from_slice(&0_u64.to_be_bytes());
u64::MAX.saturating_sub(x).saturating_sub(1)
},
};
current.extend_from_slice(&count_raw.to_be_bytes());
let current: RawPduId = PduId {
shortroomid,
shorteventid: from,
}
.into();
match dir {
Direction::Forward => self.tofrom_relation.raw_keys_from(&current).boxed(),
Direction::Backward => self.tofrom_relation.rev_raw_keys_from(&current).boxed(),
}
.ignore_err()
.ready_take_while(move |key| key.starts_with(&prefix))
.map(|to_from| utils::u64_from_u8(&to_from[(size_of::<u64>())..]))
.filter_map(move |from| async move {
let mut pduid = shortroomid.to_be_bytes().to_vec();
pduid.extend_from_slice(&from.to_be_bytes());
let mut pdu = self.services.timeline.get_pdu_from_id(&pduid).await.ok()?;
.ready_take_while(move |key| key.starts_with(&target.to_be_bytes()))
.map(|to_from| u64_from_u8(&to_from[8..16]))
.map(PduCount::from_unsigned)
.filter_map(move |shorteventid| async move {
let pdu_id: RawPduId = PduId {
shortroomid,
shorteventid,
}
.into();
let mut pdu = self.services.timeline.get_pdu_from_id(&pdu_id).await.ok()?;
if pdu.sender != user_id {
pdu.remove_transaction_id().log_err().ok();
}
Some((PduCount::Normal(from), pdu))
Some((shorteventid, pdu))
})
}

View file

@ -1,18 +1,9 @@
mod data;
use std::sync::Arc;
use conduit::{
at,
utils::{result::FlatOk, stream::ReadyExt, IterStream},
PduCount, Result,
};
use futures::{FutureExt, StreamExt};
use ruma::{
api::{client::relations::get_relating_events, Direction},
events::{relation::RelationType, TimelineEventType},
EventId, RoomId, UInt, UserId,
};
use serde::Deserialize;
use conduit::{PduCount, Result};
use futures::StreamExt;
use ruma::{api::Direction, EventId, RoomId, UserId};
use self::data::{Data, PdusIterItem};
use crate::{rooms, Dep};
@ -24,26 +15,14 @@ pub struct Service {
struct Services {
short: Dep<rooms::short::Service>,
state_accessor: Dep<rooms::state_accessor::Service>,
timeline: Dep<rooms::timeline::Service>,
}
#[derive(Clone, Debug, Deserialize)]
struct ExtractRelType {
rel_type: RelationType,
}
#[derive(Clone, Debug, Deserialize)]
struct ExtractRelatesToEventId {
#[serde(rename = "m.relates_to")]
relates_to: ExtractRelType,
}
impl crate::Service for Service {
fn build(args: crate::Args<'_>) -> Result<Arc<Self>> {
Ok(Arc::new(Self {
services: Services {
short: args.depend::<rooms::short::Service>("rooms::short"),
state_accessor: args.depend::<rooms::state_accessor::Service>("rooms::state_accessor"),
timeline: args.depend::<rooms::timeline::Service>("rooms::timeline"),
},
db: Data::new(&args),
@ -64,82 +43,9 @@ impl Service {
}
}
#[allow(clippy::too_many_arguments)]
pub async fn paginate_relations_with_filter(
&self, sender_user: &UserId, room_id: &RoomId, target: &EventId, filter_event_type: Option<TimelineEventType>,
filter_rel_type: Option<RelationType>, from: Option<&str>, to: Option<&str>, limit: Option<UInt>,
recurse: bool, dir: Direction,
) -> Result<get_relating_events::v1::Response> {
let from = from
.map(PduCount::try_from_string)
.transpose()?
.unwrap_or_else(|| match dir {
Direction::Forward => PduCount::min(),
Direction::Backward => PduCount::max(),
});
let to = to.map(PduCount::try_from_string).flat_ok();
// Use limit or else 30, with maximum 100
let limit: usize = limit
.map(TryInto::try_into)
.flat_ok()
.unwrap_or(30)
.min(100);
// Spec (v1.10) recommends depth of at least 3
let depth: u8 = if recurse {
3
} else {
1
};
let events: Vec<PdusIterItem> = self
.get_relations(sender_user, room_id, target, from, limit, depth, dir)
.await
.into_iter()
.filter(|(_, pdu)| {
filter_event_type
.as_ref()
.is_none_or(|kind| *kind == pdu.kind)
})
.filter(|(_, pdu)| {
filter_rel_type.as_ref().is_none_or(|rel_type| {
pdu.get_content()
.map(|c: ExtractRelatesToEventId| c.relates_to.rel_type)
.is_ok_and(|r| r == *rel_type)
})
})
.stream()
.filter_map(|item| self.visibility_filter(sender_user, item))
.ready_take_while(|(count, _)| Some(*count) != to)
.take(limit)
.collect()
.boxed()
.await;
let next_batch = match dir {
Direction::Backward => events.first(),
Direction::Forward => events.last(),
}
.map(at!(0))
.map(|t| t.stringify());
Ok(get_relating_events::v1::Response {
next_batch,
prev_batch: Some(from.stringify()),
recursion_depth: recurse.then_some(depth.into()),
chunk: events
.into_iter()
.map(at!(1))
.map(|pdu| pdu.to_message_like_event())
.collect(),
})
}
#[allow(clippy::too_many_arguments)]
pub async fn get_relations(
&self, user_id: &UserId, room_id: &RoomId, target: &EventId, until: PduCount, limit: usize, max_depth: u8,
&self, user_id: &UserId, room_id: &RoomId, target: &EventId, from: PduCount, limit: usize, max_depth: u8,
dir: Direction,
) -> Vec<PdusIterItem> {
let room_id = self.services.short.get_or_create_shortroomid(room_id).await;
@ -152,7 +58,7 @@ impl Service {
let mut pdus: Vec<_> = self
.db
.get_relations(user_id, room_id, target, until, dir)
.get_relations(user_id, room_id, target, from, dir)
.collect()
.await;
@ -167,7 +73,7 @@ impl Service {
let relations: Vec<_> = self
.db
.get_relations(user_id, room_id, target, until, dir)
.get_relations(user_id, room_id, target, from, dir)
.collect()
.await;
@ -186,16 +92,6 @@ impl Service {
pdus
}
async fn visibility_filter(&self, sender_user: &UserId, item: PdusIterItem) -> Option<PdusIterItem> {
let (_, pdu) = &item;
self.services
.state_accessor
.user_can_see_event(sender_user, &pdu.room_id, &pdu.event_id)
.await
.then_some(item)
}
#[inline]
#[tracing::instrument(skip_all, level = "debug")]
pub fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) {