refactor for stronger RawPduId type
implement standard traits for PduCount enable serde for arrayvec typedef various shortid's pducount simplifications split parts of pdu_metadata service to core/pdu and api/relations remove some yields; improve var names/syntax tweak types for limit timeline limit arguments Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
parent
2e4d9cb37c
commit
9da523c004
41 changed files with 796 additions and 573 deletions
|
@ -168,12 +168,12 @@ pub(crate) async fn get_context_route(
|
|||
|
||||
start: events_before
|
||||
.last()
|
||||
.map_or_else(|| base_token.stringify(), |(count, _)| count.stringify())
|
||||
.map_or_else(|| base_token.to_string(), |(count, _)| count.to_string())
|
||||
.into(),
|
||||
|
||||
end: events_after
|
||||
.last()
|
||||
.map_or_else(|| base_token.stringify(), |(count, _)| count.stringify())
|
||||
.map_or_else(|| base_token.to_string(), |(count, _)| count.to_string())
|
||||
.into(),
|
||||
|
||||
events_before: events_before
|
||||
|
|
|
@ -1376,15 +1376,12 @@ pub(crate) async fn invite_helper(
|
|||
)
|
||||
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Origin field is invalid."))?;
|
||||
|
||||
let pdu_id: Vec<u8> = services
|
||||
let pdu_id = services
|
||||
.rooms
|
||||
.event_handler
|
||||
.handle_incoming_pdu(&origin, room_id, &event_id, value, true)
|
||||
.await?
|
||||
.ok_or(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Could not accept incoming PDU as timeline event.",
|
||||
))?;
|
||||
.ok_or_else(|| err!(Request(InvalidParam("Could not accept incoming PDU as timeline event."))))?;
|
||||
|
||||
services.sending.send_pdu_room(room_id, &pdu_id).await?;
|
||||
return Ok(());
|
||||
|
|
|
@ -62,19 +62,17 @@ pub(crate) async fn get_message_events_route(
|
|||
let room_id = &body.room_id;
|
||||
let filter = &body.filter;
|
||||
|
||||
let from_default = match body.dir {
|
||||
Direction::Forward => PduCount::min(),
|
||||
Direction::Backward => PduCount::max(),
|
||||
};
|
||||
|
||||
let from = body
|
||||
let from: PduCount = body
|
||||
.from
|
||||
.as_deref()
|
||||
.map(PduCount::try_from_string)
|
||||
.map(str::parse)
|
||||
.transpose()?
|
||||
.unwrap_or(from_default);
|
||||
.unwrap_or_else(|| match body.dir {
|
||||
Direction::Forward => PduCount::min(),
|
||||
Direction::Backward => PduCount::max(),
|
||||
});
|
||||
|
||||
let to = body.to.as_deref().map(PduCount::try_from_string).flat_ok();
|
||||
let to: Option<PduCount> = body.to.as_deref().map(str::parse).flat_ok();
|
||||
|
||||
let limit: usize = body
|
||||
.limit
|
||||
|
@ -156,8 +154,8 @@ pub(crate) async fn get_message_events_route(
|
|||
.collect();
|
||||
|
||||
Ok(get_message_events::v3::Response {
|
||||
start: from.stringify(),
|
||||
end: next_token.as_ref().map(PduCount::stringify),
|
||||
start: from.to_string(),
|
||||
end: next_token.as_ref().map(PduCount::to_string),
|
||||
chunk,
|
||||
state,
|
||||
})
|
||||
|
|
|
@ -1,34 +1,43 @@
|
|||
use axum::extract::State;
|
||||
use ruma::api::client::relations::{
|
||||
get_relating_events, get_relating_events_with_rel_type, get_relating_events_with_rel_type_and_event_type,
|
||||
use conduit::{
|
||||
at,
|
||||
utils::{result::FlatOk, IterStream, ReadyExt},
|
||||
PduCount, Result,
|
||||
};
|
||||
use futures::{FutureExt, StreamExt};
|
||||
use ruma::{
|
||||
api::{
|
||||
client::relations::{
|
||||
get_relating_events, get_relating_events_with_rel_type, get_relating_events_with_rel_type_and_event_type,
|
||||
},
|
||||
Direction,
|
||||
},
|
||||
events::{relation::RelationType, TimelineEventType},
|
||||
EventId, RoomId, UInt, UserId,
|
||||
};
|
||||
use service::{rooms::timeline::PdusIterItem, Services};
|
||||
|
||||
use crate::{Result, Ruma};
|
||||
use crate::Ruma;
|
||||
|
||||
/// # `GET /_matrix/client/r0/rooms/{roomId}/relations/{eventId}/{relType}/{eventType}`
|
||||
pub(crate) async fn get_relating_events_with_rel_type_and_event_type_route(
|
||||
State(services): State<crate::State>, body: Ruma<get_relating_events_with_rel_type_and_event_type::v1::Request>,
|
||||
) -> Result<get_relating_events_with_rel_type_and_event_type::v1::Response> {
|
||||
let sender_user = body.sender_user.as_deref().expect("user is authenticated");
|
||||
|
||||
let res = services
|
||||
.rooms
|
||||
.pdu_metadata
|
||||
.paginate_relations_with_filter(
|
||||
sender_user,
|
||||
&body.room_id,
|
||||
&body.event_id,
|
||||
body.event_type.clone().into(),
|
||||
body.rel_type.clone().into(),
|
||||
body.from.as_deref(),
|
||||
body.to.as_deref(),
|
||||
body.limit,
|
||||
body.recurse,
|
||||
body.dir,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(get_relating_events_with_rel_type_and_event_type::v1::Response {
|
||||
paginate_relations_with_filter(
|
||||
&services,
|
||||
body.sender_user(),
|
||||
&body.room_id,
|
||||
&body.event_id,
|
||||
body.event_type.clone().into(),
|
||||
body.rel_type.clone().into(),
|
||||
body.from.as_deref(),
|
||||
body.to.as_deref(),
|
||||
body.limit,
|
||||
body.recurse,
|
||||
body.dir,
|
||||
)
|
||||
.await
|
||||
.map(|res| get_relating_events_with_rel_type_and_event_type::v1::Response {
|
||||
chunk: res.chunk,
|
||||
next_batch: res.next_batch,
|
||||
prev_batch: res.prev_batch,
|
||||
|
@ -40,26 +49,21 @@ pub(crate) async fn get_relating_events_with_rel_type_and_event_type_route(
|
|||
pub(crate) async fn get_relating_events_with_rel_type_route(
|
||||
State(services): State<crate::State>, body: Ruma<get_relating_events_with_rel_type::v1::Request>,
|
||||
) -> Result<get_relating_events_with_rel_type::v1::Response> {
|
||||
let sender_user = body.sender_user.as_deref().expect("user is authenticated");
|
||||
|
||||
let res = services
|
||||
.rooms
|
||||
.pdu_metadata
|
||||
.paginate_relations_with_filter(
|
||||
sender_user,
|
||||
&body.room_id,
|
||||
&body.event_id,
|
||||
None,
|
||||
body.rel_type.clone().into(),
|
||||
body.from.as_deref(),
|
||||
body.to.as_deref(),
|
||||
body.limit,
|
||||
body.recurse,
|
||||
body.dir,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(get_relating_events_with_rel_type::v1::Response {
|
||||
paginate_relations_with_filter(
|
||||
&services,
|
||||
body.sender_user(),
|
||||
&body.room_id,
|
||||
&body.event_id,
|
||||
None,
|
||||
body.rel_type.clone().into(),
|
||||
body.from.as_deref(),
|
||||
body.to.as_deref(),
|
||||
body.limit,
|
||||
body.recurse,
|
||||
body.dir,
|
||||
)
|
||||
.await
|
||||
.map(|res| get_relating_events_with_rel_type::v1::Response {
|
||||
chunk: res.chunk,
|
||||
next_batch: res.next_batch,
|
||||
prev_batch: res.prev_batch,
|
||||
|
@ -71,22 +75,103 @@ pub(crate) async fn get_relating_events_with_rel_type_route(
|
|||
pub(crate) async fn get_relating_events_route(
|
||||
State(services): State<crate::State>, body: Ruma<get_relating_events::v1::Request>,
|
||||
) -> Result<get_relating_events::v1::Response> {
|
||||
let sender_user = body.sender_user.as_deref().expect("user is authenticated");
|
||||
paginate_relations_with_filter(
|
||||
&services,
|
||||
body.sender_user(),
|
||||
&body.room_id,
|
||||
&body.event_id,
|
||||
None,
|
||||
None,
|
||||
body.from.as_deref(),
|
||||
body.to.as_deref(),
|
||||
body.limit,
|
||||
body.recurse,
|
||||
body.dir,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn paginate_relations_with_filter(
|
||||
services: &Services, sender_user: &UserId, room_id: &RoomId, target: &EventId,
|
||||
filter_event_type: Option<TimelineEventType>, filter_rel_type: Option<RelationType>, from: Option<&str>,
|
||||
to: Option<&str>, limit: Option<UInt>, recurse: bool, dir: Direction,
|
||||
) -> Result<get_relating_events::v1::Response> {
|
||||
let from: PduCount = from
|
||||
.map(str::parse)
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| match dir {
|
||||
Direction::Forward => PduCount::min(),
|
||||
Direction::Backward => PduCount::max(),
|
||||
});
|
||||
|
||||
let to: Option<PduCount> = to.map(str::parse).flat_ok();
|
||||
|
||||
// Use limit or else 30, with maximum 100
|
||||
let limit: usize = limit
|
||||
.map(TryInto::try_into)
|
||||
.flat_ok()
|
||||
.unwrap_or(30)
|
||||
.min(100);
|
||||
|
||||
// Spec (v1.10) recommends depth of at least 3
|
||||
let depth: u8 = if recurse {
|
||||
3
|
||||
} else {
|
||||
1
|
||||
};
|
||||
|
||||
let events: Vec<PdusIterItem> = services
|
||||
.rooms
|
||||
.pdu_metadata
|
||||
.get_relations(sender_user, room_id, target, from, limit, depth, dir)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter(|(_, pdu)| {
|
||||
filter_event_type
|
||||
.as_ref()
|
||||
.is_none_or(|kind| *kind == pdu.kind)
|
||||
})
|
||||
.filter(|(_, pdu)| {
|
||||
filter_rel_type
|
||||
.as_ref()
|
||||
.is_none_or(|rel_type| pdu.relation_type_equal(rel_type))
|
||||
})
|
||||
.stream()
|
||||
.filter_map(|item| visibility_filter(services, sender_user, item))
|
||||
.ready_take_while(|(count, _)| Some(*count) != to)
|
||||
.take(limit)
|
||||
.collect()
|
||||
.boxed()
|
||||
.await;
|
||||
|
||||
let next_batch = match dir {
|
||||
Direction::Backward => events.first(),
|
||||
Direction::Forward => events.last(),
|
||||
}
|
||||
.map(at!(0))
|
||||
.as_ref()
|
||||
.map(ToString::to_string);
|
||||
|
||||
Ok(get_relating_events::v1::Response {
|
||||
next_batch,
|
||||
prev_batch: Some(from.to_string()),
|
||||
recursion_depth: recurse.then_some(depth.into()),
|
||||
chunk: events
|
||||
.into_iter()
|
||||
.map(at!(1))
|
||||
.map(|pdu| pdu.to_message_like_event())
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
|
||||
async fn visibility_filter(services: &Services, sender_user: &UserId, item: PdusIterItem) -> Option<PdusIterItem> {
|
||||
let (_, pdu) = &item;
|
||||
|
||||
services
|
||||
.rooms
|
||||
.pdu_metadata
|
||||
.paginate_relations_with_filter(
|
||||
sender_user,
|
||||
&body.room_id,
|
||||
&body.event_id,
|
||||
None,
|
||||
None,
|
||||
body.from.as_deref(),
|
||||
body.to.as_deref(),
|
||||
body.limit,
|
||||
body.recurse,
|
||||
body.dir,
|
||||
)
|
||||
.state_accessor
|
||||
.user_can_see_event(sender_user, &pdu.room_id, &pdu.event_id)
|
||||
.await
|
||||
.then_some(item)
|
||||
}
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
mod v3;
|
||||
mod v4;
|
||||
|
||||
use conduit::{
|
||||
utils::{math::usize_from_u64_truncated, ReadyExt},
|
||||
PduCount,
|
||||
};
|
||||
use conduit::{utils::ReadyExt, PduCount};
|
||||
use futures::StreamExt;
|
||||
use ruma::{RoomId, UserId};
|
||||
|
||||
|
@ -12,7 +9,7 @@ pub(crate) use self::{v3::sync_events_route, v4::sync_events_v4_route};
|
|||
use crate::{service::Services, Error, PduEvent, Result};
|
||||
|
||||
async fn load_timeline(
|
||||
services: &Services, sender_user: &UserId, room_id: &RoomId, roomsincecount: PduCount, limit: u64,
|
||||
services: &Services, sender_user: &UserId, room_id: &RoomId, roomsincecount: PduCount, limit: usize,
|
||||
) -> Result<(Vec<(PduCount, PduEvent)>, bool), Error> {
|
||||
let last_timeline_count = services
|
||||
.rooms
|
||||
|
@ -29,12 +26,12 @@ async fn load_timeline(
|
|||
.timeline
|
||||
.pdus_until(sender_user, room_id, PduCount::max())
|
||||
.await?
|
||||
.ready_take_while(|(pducount, _)| pducount > &roomsincecount);
|
||||
.ready_take_while(|(pducount, _)| *pducount > roomsincecount);
|
||||
|
||||
// Take the last events for the timeline
|
||||
let timeline_pdus: Vec<_> = non_timeline_pdus
|
||||
.by_ref()
|
||||
.take(usize_from_u64_truncated(limit))
|
||||
.take(limit)
|
||||
.collect::<Vec<_>>()
|
||||
.await
|
||||
.into_iter()
|
||||
|
|
|
@ -432,28 +432,26 @@ async fn handle_left_room(
|
|||
|
||||
left_state_ids.insert(leave_shortstatekey, left_event_id);
|
||||
|
||||
let mut i: u8 = 0;
|
||||
for (key, id) in left_state_ids {
|
||||
if full_state || since_state_ids.get(&key) != Some(&id) {
|
||||
let (event_type, state_key) = services.rooms.short.get_statekey_from_short(key).await?;
|
||||
for (shortstatekey, event_id) in left_state_ids {
|
||||
if full_state || since_state_ids.get(&shortstatekey) != Some(&event_id) {
|
||||
let (event_type, state_key) = services
|
||||
.rooms
|
||||
.short
|
||||
.get_statekey_from_short(shortstatekey)
|
||||
.await?;
|
||||
|
||||
// TODO: Delete "element_hacks" when this is resolved: https://github.com/vector-im/element-web/issues/22565
|
||||
if !lazy_load_enabled
|
||||
|| event_type != StateEventType::RoomMember
|
||||
|| full_state
|
||||
// TODO: Delete the following line when this is resolved: https://github.com/vector-im/element-web/issues/22565
|
||||
|| (cfg!(feature = "element_hacks") && *sender_user == state_key)
|
||||
|| event_type != StateEventType::RoomMember
|
||||
|| full_state
|
||||
|| (cfg!(feature = "element_hacks") && *sender_user == state_key)
|
||||
{
|
||||
let Ok(pdu) = services.rooms.timeline.get_pdu(&id).await else {
|
||||
error!("Pdu in state not found: {}", id);
|
||||
let Ok(pdu) = services.rooms.timeline.get_pdu(&event_id).await else {
|
||||
error!("Pdu in state not found: {event_id}");
|
||||
continue;
|
||||
};
|
||||
|
||||
left_state_events.push(pdu.to_sync_state_event());
|
||||
|
||||
i = i.wrapping_add(1);
|
||||
if i % 100 == 0 {
|
||||
tokio::task::yield_now().await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -542,7 +540,7 @@ async fn load_joined_room(
|
|||
let insert_lock = services.rooms.timeline.mutex_insert.lock(room_id).await;
|
||||
drop(insert_lock);
|
||||
|
||||
let (timeline_pdus, limited) = load_timeline(services, sender_user, room_id, sincecount, 10).await?;
|
||||
let (timeline_pdus, limited) = load_timeline(services, sender_user, room_id, sincecount, 10_usize).await?;
|
||||
|
||||
let send_notification_counts = !timeline_pdus.is_empty()
|
||||
|| services
|
||||
|
@ -678,8 +676,7 @@ async fn load_joined_room(
|
|||
let mut state_events = Vec::new();
|
||||
let mut lazy_loaded = HashSet::new();
|
||||
|
||||
let mut i: u8 = 0;
|
||||
for (shortstatekey, id) in current_state_ids {
|
||||
for (shortstatekey, event_id) in current_state_ids {
|
||||
let (event_type, state_key) = services
|
||||
.rooms
|
||||
.short
|
||||
|
@ -687,24 +684,22 @@ async fn load_joined_room(
|
|||
.await?;
|
||||
|
||||
if event_type != StateEventType::RoomMember {
|
||||
let Ok(pdu) = services.rooms.timeline.get_pdu(&id).await else {
|
||||
error!("Pdu in state not found: {id}");
|
||||
let Ok(pdu) = services.rooms.timeline.get_pdu(&event_id).await else {
|
||||
error!("Pdu in state not found: {event_id}");
|
||||
continue;
|
||||
};
|
||||
state_events.push(pdu);
|
||||
|
||||
i = i.wrapping_add(1);
|
||||
if i % 100 == 0 {
|
||||
tokio::task::yield_now().await;
|
||||
}
|
||||
} else if !lazy_load_enabled
|
||||
|| full_state
|
||||
|| timeline_users.contains(&state_key)
|
||||
// TODO: Delete the following line when this is resolved: https://github.com/vector-im/element-web/issues/22565
|
||||
|| (cfg!(feature = "element_hacks") && *sender_user == state_key)
|
||||
state_events.push(pdu);
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO: Delete "element_hacks" when this is resolved: https://github.com/vector-im/element-web/issues/22565
|
||||
if !lazy_load_enabled
|
||||
|| full_state || timeline_users.contains(&state_key)
|
||||
|| (cfg!(feature = "element_hacks") && *sender_user == state_key)
|
||||
{
|
||||
let Ok(pdu) = services.rooms.timeline.get_pdu(&id).await else {
|
||||
error!("Pdu in state not found: {id}");
|
||||
let Ok(pdu) = services.rooms.timeline.get_pdu(&event_id).await else {
|
||||
error!("Pdu in state not found: {event_id}");
|
||||
continue;
|
||||
};
|
||||
|
||||
|
@ -712,12 +707,8 @@ async fn load_joined_room(
|
|||
if let Ok(uid) = UserId::parse(&state_key) {
|
||||
lazy_loaded.insert(uid);
|
||||
}
|
||||
state_events.push(pdu);
|
||||
|
||||
i = i.wrapping_add(1);
|
||||
if i % 100 == 0 {
|
||||
tokio::task::yield_now().await;
|
||||
}
|
||||
state_events.push(pdu);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ use axum::extract::State;
|
|||
use conduit::{
|
||||
debug, error, extract_variant,
|
||||
utils::{
|
||||
math::{ruma_from_usize, usize_from_ruma},
|
||||
math::{ruma_from_usize, usize_from_ruma, usize_from_u64_truncated},
|
||||
BoolExt, IterStream, ReadyExt, TryFutureExtExt,
|
||||
},
|
||||
warn, Error, PduCount, Result,
|
||||
|
@ -350,14 +350,16 @@ pub(crate) async fn sync_events_v4_route(
|
|||
|
||||
new_known_rooms.extend(room_ids.iter().cloned());
|
||||
for room_id in &room_ids {
|
||||
let todo_room = todo_rooms
|
||||
.entry(room_id.clone())
|
||||
.or_insert((BTreeSet::new(), 0, u64::MAX));
|
||||
let todo_room =
|
||||
todo_rooms
|
||||
.entry(room_id.clone())
|
||||
.or_insert((BTreeSet::new(), 0_usize, u64::MAX));
|
||||
|
||||
let limit = list
|
||||
let limit: usize = list
|
||||
.room_details
|
||||
.timeline_limit
|
||||
.map_or(10, u64::from)
|
||||
.map(u64::from)
|
||||
.map_or(10, usize_from_u64_truncated)
|
||||
.min(100);
|
||||
|
||||
todo_room
|
||||
|
@ -406,8 +408,14 @@ pub(crate) async fn sync_events_v4_route(
|
|||
}
|
||||
let todo_room = todo_rooms
|
||||
.entry(room_id.clone())
|
||||
.or_insert((BTreeSet::new(), 0, u64::MAX));
|
||||
let limit = room.timeline_limit.map_or(10, u64::from).min(100);
|
||||
.or_insert((BTreeSet::new(), 0_usize, u64::MAX));
|
||||
|
||||
let limit: usize = room
|
||||
.timeline_limit
|
||||
.map(u64::from)
|
||||
.map_or(10, usize_from_u64_truncated)
|
||||
.min(100);
|
||||
|
||||
todo_room.0.extend(room.required_state.iter().cloned());
|
||||
todo_room.1 = todo_room.1.max(limit);
|
||||
// 0 means unknown because it got out of date
|
||||
|
|
|
@ -1,19 +1,14 @@
|
|||
use axum::extract::State;
|
||||
use conduit::PduEvent;
|
||||
use conduit::{PduCount, PduEvent};
|
||||
use futures::StreamExt;
|
||||
use ruma::{
|
||||
api::client::{error::ErrorKind, threads::get_threads},
|
||||
uint,
|
||||
};
|
||||
use ruma::{api::client::threads::get_threads, uint};
|
||||
|
||||
use crate::{Error, Result, Ruma};
|
||||
use crate::{Result, Ruma};
|
||||
|
||||
/// # `GET /_matrix/client/r0/rooms/{roomId}/threads`
|
||||
pub(crate) async fn get_threads_route(
|
||||
State(services): State<crate::State>, body: Ruma<get_threads::v1::Request>,
|
||||
State(services): State<crate::State>, ref body: Ruma<get_threads::v1::Request>,
|
||||
) -> Result<get_threads::v1::Response> {
|
||||
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
|
||||
|
||||
// Use limit or else 10, with maximum 100
|
||||
let limit = body
|
||||
.limit
|
||||
|
@ -22,38 +17,39 @@ pub(crate) async fn get_threads_route(
|
|||
.unwrap_or(10)
|
||||
.min(100);
|
||||
|
||||
let from = if let Some(from) = &body.from {
|
||||
from.parse()
|
||||
.map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, ""))?
|
||||
} else {
|
||||
u64::MAX
|
||||
};
|
||||
let from: PduCount = body
|
||||
.from
|
||||
.as_deref()
|
||||
.map(str::parse)
|
||||
.transpose()?
|
||||
.unwrap_or_else(PduCount::max);
|
||||
|
||||
let room_id = &body.room_id;
|
||||
let threads: Vec<(u64, PduEvent)> = services
|
||||
let threads: Vec<(PduCount, PduEvent)> = services
|
||||
.rooms
|
||||
.threads
|
||||
.threads_until(sender_user, &body.room_id, from, &body.include)
|
||||
.threads_until(body.sender_user(), &body.room_id, from, &body.include)
|
||||
.await?
|
||||
.take(limit)
|
||||
.filter_map(|(count, pdu)| async move {
|
||||
services
|
||||
.rooms
|
||||
.state_accessor
|
||||
.user_can_see_event(sender_user, room_id, &pdu.event_id)
|
||||
.user_can_see_event(body.sender_user(), &body.room_id, &pdu.event_id)
|
||||
.await
|
||||
.then_some((count, pdu))
|
||||
})
|
||||
.collect()
|
||||
.await;
|
||||
|
||||
let next_batch = threads.last().map(|(count, _)| count.to_string());
|
||||
|
||||
Ok(get_threads::v1::Response {
|
||||
next_batch: threads
|
||||
.last()
|
||||
.map(|(count, _)| count)
|
||||
.map(ToString::to_string),
|
||||
|
||||
chunk: threads
|
||||
.into_iter()
|
||||
.map(|(_, pdu)| pdu.to_room_event())
|
||||
.collect(),
|
||||
next_batch,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -156,12 +156,12 @@ async fn create_join_event(
|
|||
.lock(room_id)
|
||||
.await;
|
||||
|
||||
let pdu_id: Vec<u8> = services
|
||||
let pdu_id = services
|
||||
.rooms
|
||||
.event_handler
|
||||
.handle_incoming_pdu(&origin, room_id, &event_id, value.clone(), true)
|
||||
.await?
|
||||
.ok_or_else(|| Error::BadRequest(ErrorKind::InvalidParam, "Could not accept as timeline event."))?;
|
||||
.ok_or_else(|| err!(Request(InvalidParam("Could not accept as timeline event."))))?;
|
||||
|
||||
drop(mutex_lock);
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#![allow(deprecated)]
|
||||
|
||||
use axum::extract::State;
|
||||
use conduit::{utils::ReadyExt, Error, Result};
|
||||
use conduit::{err, utils::ReadyExt, Error, Result};
|
||||
use ruma::{
|
||||
api::{client::error::ErrorKind, federation::membership::create_leave_event},
|
||||
events::{
|
||||
|
@ -142,12 +142,12 @@ async fn create_leave_event(
|
|||
.lock(room_id)
|
||||
.await;
|
||||
|
||||
let pdu_id: Vec<u8> = services
|
||||
let pdu_id = services
|
||||
.rooms
|
||||
.event_handler
|
||||
.handle_incoming_pdu(origin, room_id, &event_id, value, true)
|
||||
.await?
|
||||
.ok_or_else(|| Error::BadRequest(ErrorKind::InvalidParam, "Could not accept as timeline event."))?;
|
||||
.ok_or_else(|| err!(Request(InvalidParam("Could not accept as timeline event."))))?;
|
||||
|
||||
drop(mutex_lock);
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue