refactor various Arc<EventId> to OwnedEventId

Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
Jason Volk 2024-12-28 00:57:02 +00:00 committed by strawberry
parent 5a335933b8
commit 6458f4b195
29 changed files with 142 additions and 152 deletions

28
Cargo.lock generated
View file

@ -3175,7 +3175,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma" name = "ruma"
version = "0.10.1" version = "0.10.1"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"assign", "assign",
"js_int", "js_int",
@ -3197,7 +3197,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-appservice-api" name = "ruma-appservice-api"
version = "0.10.0" version = "0.10.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"js_int", "js_int",
"ruma-common", "ruma-common",
@ -3209,7 +3209,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-client-api" name = "ruma-client-api"
version = "0.18.0" version = "0.18.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"as_variant", "as_variant",
"assign", "assign",
@ -3232,7 +3232,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-common" name = "ruma-common"
version = "0.13.0" version = "0.13.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"as_variant", "as_variant",
"base64 0.22.1", "base64 0.22.1",
@ -3250,6 +3250,7 @@ dependencies = [
"serde", "serde",
"serde_html_form", "serde_html_form",
"serde_json", "serde_json",
"smallvec",
"thiserror 2.0.7", "thiserror 2.0.7",
"time", "time",
"tracing", "tracing",
@ -3262,7 +3263,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-events" name = "ruma-events"
version = "0.28.1" version = "0.28.1"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"as_variant", "as_variant",
"indexmap 2.7.0", "indexmap 2.7.0",
@ -3276,6 +3277,7 @@ dependencies = [
"ruma-macros", "ruma-macros",
"serde", "serde",
"serde_json", "serde_json",
"smallvec",
"thiserror 2.0.7", "thiserror 2.0.7",
"tracing", "tracing",
"url", "url",
@ -3286,7 +3288,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-federation-api" name = "ruma-federation-api"
version = "0.9.0" version = "0.9.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"bytes", "bytes",
"http", "http",
@ -3304,7 +3306,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-identifiers-validation" name = "ruma-identifiers-validation"
version = "0.9.5" version = "0.9.5"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"js_int", "js_int",
"thiserror 2.0.7", "thiserror 2.0.7",
@ -3313,7 +3315,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-identity-service-api" name = "ruma-identity-service-api"
version = "0.9.0" version = "0.9.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"js_int", "js_int",
"ruma-common", "ruma-common",
@ -3323,7 +3325,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-macros" name = "ruma-macros"
version = "0.13.0" version = "0.13.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"proc-macro-crate", "proc-macro-crate",
@ -3338,7 +3340,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-push-gateway-api" name = "ruma-push-gateway-api"
version = "0.9.0" version = "0.9.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"js_int", "js_int",
"ruma-common", "ruma-common",
@ -3350,7 +3352,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-server-util" name = "ruma-server-util"
version = "0.3.0" version = "0.3.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"headers", "headers",
"http", "http",
@ -3363,7 +3365,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-signatures" name = "ruma-signatures"
version = "0.15.0" version = "0.15.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"base64 0.22.1", "base64 0.22.1",
"ed25519-dalek", "ed25519-dalek",
@ -3379,7 +3381,7 @@ dependencies = [
[[package]] [[package]]
name = "ruma-state-res" name = "ruma-state-res"
version = "0.11.0" version = "0.11.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792" source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [ dependencies = [
"futures-util", "futures-util",
"js_int", "js_int",

View file

@ -35,6 +35,7 @@ features = [
"const_generics", "const_generics",
"const_new", "const_new",
"serde", "serde",
"union",
"write", "write",
] ]
@ -335,7 +336,7 @@ version = "0.1.2"
[workspace.dependencies.ruma] [workspace.dependencies.ruma]
git = "https://github.com/girlbossceo/ruwuma" git = "https://github.com/girlbossceo/ruwuma"
#branch = "conduwuit-changes" #branch = "conduwuit-changes"
rev = "d3ed3194ebe96b921d06d1d3e607f0bf7873f792" rev = "54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
features = [ features = [
"compat", "compat",
"rand", "rand",

View file

@ -2,7 +2,6 @@ use std::{
collections::HashMap, collections::HashMap,
fmt::Write, fmt::Write,
iter::once, iter::once,
sync::Arc,
time::{Instant, SystemTime}, time::{Instant, SystemTime},
}; };
@ -13,7 +12,8 @@ use futures::{FutureExt, StreamExt};
use ruma::{ use ruma::{
api::{client::error::ErrorKind, federation::event::get_room_state}, api::{client::error::ErrorKind, federation::event::get_room_state},
events::room::message::RoomMessageEventContent, events::room::message::RoomMessageEventContent,
CanonicalJsonObject, EventId, OwnedRoomOrAliasId, RoomId, RoomVersionId, ServerName, CanonicalJsonObject, EventId, OwnedEventId, OwnedRoomOrAliasId, RoomId, RoomVersionId,
ServerName,
}; };
use service::rooms::state_compressor::HashSetCompressStateEvent; use service::rooms::state_compressor::HashSetCompressStateEvent;
use tracing_subscriber::EnvFilter; use tracing_subscriber::EnvFilter;
@ -598,14 +598,14 @@ pub(super) async fn force_set_room_state_from_server(
let room_version = self.services.rooms.state.get_room_version(&room_id).await?; let room_version = self.services.rooms.state.get_room_version(&room_id).await?;
let mut state: HashMap<u64, Arc<EventId>> = HashMap::new(); let mut state: HashMap<u64, OwnedEventId> = HashMap::new();
let remote_state_response = self let remote_state_response = self
.services .services
.sending .sending
.send_federation_request(&server_name, get_room_state::v1::Request { .send_federation_request(&server_name, get_room_state::v1::Request {
room_id: room_id.clone().into(), room_id: room_id.clone().into(),
event_id: first_pdu.event_id.clone().into(), event_id: first_pdu.event_id.clone(),
}) })
.await?; .await?;
@ -677,7 +677,7 @@ pub(super) async fn force_set_room_state_from_server(
.services .services
.rooms .rooms
.event_handler .event_handler
.resolve_state(room_id.clone().as_ref(), &room_version, state) .resolve_state(&room_id, &room_version, state)
.await?; .await?;
info!("Forcing new room state"); info!("Forcing new room state");

View file

@ -922,7 +922,7 @@ pub(super) async fn redact_event(
PduBuilder { PduBuilder {
redacts: Some(event.event_id.clone()), redacts: Some(event.event_id.clone()),
..PduBuilder::timeline(&RoomRedactionEventContent { ..PduBuilder::timeline(&RoomRedactionEventContent {
redacts: Some(event.event_id.clone().into()), redacts: Some(event.event_id.clone()),
reason: Some(reason), reason: Some(reason),
}) })
}, },

View file

@ -24,7 +24,7 @@ pub(crate) async fn redact_event_route(
.timeline .timeline
.build_and_append_pdu( .build_and_append_pdu(
PduBuilder { PduBuilder {
redacts: Some(body.event_id.clone().into()), redacts: Some(body.event_id.clone()),
..PduBuilder::timeline(&RoomRedactionEventContent { ..PduBuilder::timeline(&RoomRedactionEventContent {
redacts: Some(body.event_id.clone()), redacts: Some(body.event_id.clone()),
reason: body.reason.clone(), reason: body.reason.clone(),
@ -38,5 +38,5 @@ pub(crate) async fn redact_event_route(
drop(state_lock); drop(state_lock);
Ok(redact_event::v3::Response { event_id: event_id.into() }) Ok(redact_event::v3::Response { event_id })
} }

View file

@ -92,5 +92,5 @@ pub(crate) async fn send_message_event_route(
drop(state_lock); drop(state_lock);
Ok(send_message_event::v3::Response { event_id: event_id.into() }) Ok(send_message_event::v3::Response { event_id })
} }

View file

@ -1,5 +1,3 @@
use std::sync::Arc;
use axum::extract::State; use axum::extract::State;
use conduwuit::{err, pdu::PduBuilder, utils::BoolExt, Err, Error, PduEvent, Result}; use conduwuit::{err, pdu::PduBuilder, utils::BoolExt, Err, Error, PduEvent, Result};
use ruma::{ use ruma::{
@ -16,7 +14,7 @@ use ruma::{
AnyStateEventContent, StateEventType, AnyStateEventContent, StateEventType,
}, },
serde::Raw, serde::Raw,
EventId, RoomId, UserId, OwnedEventId, RoomId, UserId,
}; };
use service::Services; use service::Services;
@ -50,8 +48,7 @@ pub(crate) async fn send_state_event_for_key_route(
None None
}, },
) )
.await? .await?,
.into(),
}) })
} }
@ -177,7 +174,7 @@ async fn send_state_event_for_key_helper(
json: &Raw<AnyStateEventContent>, json: &Raw<AnyStateEventContent>,
state_key: String, state_key: String,
timestamp: Option<ruma::MilliSecondsSinceUnixEpoch>, timestamp: Option<ruma::MilliSecondsSinceUnixEpoch>,
) -> Result<Arc<EventId>> { ) -> Result<OwnedEventId> {
allowed_to_send_state_event(services, room_id, event_type, json).await?; allowed_to_send_state_event(services, room_id, event_type, json).await?;
let state_lock = services.rooms.state.mutex.lock(room_id).await; let state_lock = services.rooms.state.mutex.lock(room_id).await;
let event_id = services let event_id = services

View file

@ -441,7 +441,7 @@ async fn handle_left_room(
// This is just a rejected invite, not a room we know // This is just a rejected invite, not a room we know
// Insert a leave event anyways // Insert a leave event anyways
let event = PduEvent { let event = PduEvent {
event_id: EventId::new(services.globals.server_name()).into(), event_id: EventId::new(services.globals.server_name()),
sender: sender_user.to_owned(), sender: sender_user.to_owned(),
origin: None, origin: None,
origin_server_ts: utils::millis_since_unix_epoch() origin_server_ts: utils::millis_since_unix_epoch()

View file

@ -6,7 +6,7 @@ use conduwuit::{
debug, debug_warn, err, error, result::LogErr, trace, utils::ReadyExt, warn, Err, Error, debug, debug_warn, err, error, result::LogErr, trace, utils::ReadyExt, warn, Err, Error,
Result, Result,
}; };
use futures::StreamExt; use futures::{FutureExt, StreamExt};
use ruma::{ use ruma::{
api::{ api::{
client::error::ErrorKind, client::error::ErrorKind,
@ -74,8 +74,13 @@ pub(crate) async fn send_transaction_message_route(
); );
let resolved_map = let resolved_map =
handle_pdus(&services, &client, &body.pdus, body.origin(), &txn_start_time).await?; handle_pdus(&services, &client, &body.pdus, body.origin(), &txn_start_time)
handle_edus(&services, &client, &body.edus, body.origin()).await; .boxed()
.await?;
handle_edus(&services, &client, &body.edus, body.origin())
.boxed()
.await;
debug!( debug!(
pdus = ?body.pdus.len(), pdus = ?body.pdus.len(),

View file

@ -2,6 +2,7 @@
use axum::extract::State; use axum::extract::State;
use conduwuit::{err, Err, Result}; use conduwuit::{err, Err, Result};
use futures::FutureExt;
use ruma::{ use ruma::{
api::federation::membership::create_leave_event, api::federation::membership::create_leave_event,
events::{ events::{
@ -154,10 +155,15 @@ async fn create_leave_event(
.rooms .rooms
.event_handler .event_handler
.handle_incoming_pdu(origin, room_id, &event_id, value, true) .handle_incoming_pdu(origin, room_id, &event_id, value, true)
.boxed()
.await? .await?
.ok_or_else(|| err!(Request(InvalidParam("Could not accept as timeline event."))))?; .ok_or_else(|| err!(Request(InvalidParam("Could not accept as timeline event."))))?;
drop(mutex_lock); drop(mutex_lock);
services.sending.send_pdu_room(room_id, &pdu_id).await services
.sending
.send_pdu_room(room_id, &pdu_id)
.boxed()
.await
} }

View file

@ -1,8 +1,8 @@
use std::{collections::BTreeMap, sync::Arc}; use std::collections::BTreeMap;
use ruma::{ use ruma::{
events::{EventContent, MessageLikeEventType, StateEventType, TimelineEventType}, events::{EventContent, MessageLikeEventType, StateEventType, TimelineEventType},
EventId, MilliSecondsSinceUnixEpoch, MilliSecondsSinceUnixEpoch, OwnedEventId,
}; };
use serde::Deserialize; use serde::Deserialize;
use serde_json::value::{to_raw_value, RawValue as RawJsonValue}; use serde_json::value::{to_raw_value, RawValue as RawJsonValue};
@ -19,7 +19,7 @@ pub struct Builder {
pub state_key: Option<String>, pub state_key: Option<String>,
pub redacts: Option<Arc<EventId>>, pub redacts: Option<OwnedEventId>,
/// For timestamped messaging, should only be used for appservices. /// For timestamped messaging, should only be used for appservices.
/// Will be set to current time if None /// Will be set to current time if None

View file

@ -1,13 +1,11 @@
use std::sync::Arc;
pub use ruma::state_res::Event; pub use ruma::state_res::Event;
use ruma::{events::TimelineEventType, EventId, MilliSecondsSinceUnixEpoch, RoomId, UserId}; use ruma::{events::TimelineEventType, MilliSecondsSinceUnixEpoch, OwnedEventId, RoomId, UserId};
use serde_json::value::RawValue as RawJsonValue; use serde_json::value::RawValue as RawJsonValue;
use super::Pdu; use super::Pdu;
impl Event for Pdu { impl Event for Pdu {
type Id = Arc<EventId>; type Id = OwnedEventId;
fn event_id(&self) -> &Self::Id { &self.event_id } fn event_id(&self) -> &Self::Id { &self.event_id }

View file

@ -12,11 +12,11 @@ mod strip;
mod tests; mod tests;
mod unsigned; mod unsigned;
use std::{cmp::Ordering, sync::Arc}; use std::cmp::Ordering;
use ruma::{ use ruma::{
events::TimelineEventType, CanonicalJsonObject, CanonicalJsonValue, EventId, OwnedRoomId, events::TimelineEventType, CanonicalJsonObject, CanonicalJsonValue, EventId, OwnedEventId,
OwnedUserId, UInt, OwnedRoomId, OwnedUserId, UInt,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::value::RawValue as RawJsonValue; use serde_json::value::RawValue as RawJsonValue;
@ -35,7 +35,7 @@ use crate::Result;
/// Persistent Data Unit (Event) /// Persistent Data Unit (Event)
#[derive(Clone, Deserialize, Serialize, Debug)] #[derive(Clone, Deserialize, Serialize, Debug)]
pub struct Pdu { pub struct Pdu {
pub event_id: Arc<EventId>, pub event_id: OwnedEventId,
pub room_id: OwnedRoomId, pub room_id: OwnedRoomId,
pub sender: OwnedUserId, pub sender: OwnedUserId,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
@ -46,11 +46,11 @@ pub struct Pdu {
pub content: Box<RawJsonValue>, pub content: Box<RawJsonValue>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub state_key: Option<String>, pub state_key: Option<String>,
pub prev_events: Vec<Arc<EventId>>, pub prev_events: Vec<OwnedEventId>,
pub depth: UInt, pub depth: UInt,
pub auth_events: Vec<Arc<EventId>>, pub auth_events: Vec<OwnedEventId>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub redacts: Option<Arc<EventId>>, pub redacts: Option<OwnedEventId>,
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
pub unsigned: Option<Box<RawJsonValue>>, pub unsigned: Option<Box<RawJsonValue>>,
pub hashes: EventHash, pub hashes: EventHash,

View file

@ -1,9 +1,7 @@
use std::sync::Arc;
use ruma::{ use ruma::{
canonical_json::redact_content_in_place, canonical_json::redact_content_in_place,
events::{room::redaction::RoomRedactionEventContent, TimelineEventType}, events::{room::redaction::RoomRedactionEventContent, TimelineEventType},
EventId, RoomVersionId, OwnedEventId, RoomVersionId,
}; };
use serde::Deserialize; use serde::Deserialize;
use serde_json::{ use serde_json::{
@ -73,15 +71,15 @@ pub fn is_redacted(&self) -> bool {
/// > such events over the Client-Server API. /// > such events over the Client-Server API.
#[implement(super::Pdu)] #[implement(super::Pdu)]
#[must_use] #[must_use]
pub fn copy_redacts(&self) -> (Option<Arc<EventId>>, Box<RawJsonValue>) { pub fn copy_redacts(&self) -> (Option<OwnedEventId>, Box<RawJsonValue>) {
if self.kind == TimelineEventType::RoomRedaction { if self.kind == TimelineEventType::RoomRedaction {
if let Ok(mut content) = if let Ok(mut content) =
serde_json::from_str::<RoomRedactionEventContent>(self.content.get()) serde_json::from_str::<RoomRedactionEventContent>(self.content.get())
{ {
if let Some(redacts) = content.redacts { if let Some(redacts) = content.redacts {
return (Some(redacts.into()), self.content.clone()); return (Some(redacts), self.content.clone());
} else if let Some(redacts) = self.redacts.clone() { } else if let Some(redacts) = self.redacts.clone() {
content.redacts = Some(redacts.into()); content.redacts = Some(redacts);
return ( return (
self.redacts.clone(), self.redacts.clone(),
to_raw_value(&content).expect("Must be valid, we only added redacts field"), to_raw_value(&content).expect("Must be valid, we only added redacts field"),

View file

@ -12,7 +12,7 @@ use conduwuit::{
validated, warn, Err, Result, validated, warn, Err, Result,
}; };
use futures::{Stream, StreamExt}; use futures::{Stream, StreamExt};
use ruma::{EventId, RoomId}; use ruma::{EventId, OwnedEventId, RoomId};
use self::data::Data; use self::data::Data;
use crate::{rooms, rooms::short::ShortEventId, Dep}; use crate::{rooms, rooms::short::ShortEventId, Dep};
@ -46,7 +46,7 @@ impl Service {
&'a self, &'a self,
room_id: &RoomId, room_id: &RoomId,
starting_events: I, starting_events: I,
) -> Result<impl Stream<Item = Arc<EventId>> + Send + '_> ) -> Result<impl Stream<Item = OwnedEventId> + Send + '_>
where where
I: Iterator<Item = &'a EventId> + Clone + Debug + ExactSizeIterator + Send + 'a, I: Iterator<Item = &'a EventId> + Clone + Debug + ExactSizeIterator + Send + 'a,
{ {
@ -63,7 +63,7 @@ impl Service {
&'a self, &'a self,
room_id: &RoomId, room_id: &RoomId,
starting_events: I, starting_events: I,
) -> Result<Vec<Arc<EventId>>> ) -> Result<Vec<OwnedEventId>>
where where
I: Iterator<Item = &'a EventId> + Clone + Debug + ExactSizeIterator + Send + 'a, I: Iterator<Item = &'a EventId> + Clone + Debug + ExactSizeIterator + Send + 'a,
{ {
@ -185,7 +185,7 @@ impl Service {
room_id: &RoomId, room_id: &RoomId,
event_id: &EventId, event_id: &EventId,
) -> Result<HashSet<ShortEventId>> { ) -> Result<HashSet<ShortEventId>> {
let mut todo = vec![Arc::from(event_id)]; let mut todo = vec![event_id.to_owned()];
let mut found = HashSet::new(); let mut found = HashSet::new();
while let Some(event_id) = todo.pop() { while let Some(event_id) = todo.pop() {

View file

@ -10,7 +10,7 @@ use conduwuit::{
}; };
use futures::TryFutureExt; use futures::TryFutureExt;
use ruma::{ use ruma::{
api::federation::event::get_event, CanonicalJsonValue, EventId, RoomId, RoomVersionId, api::federation::event::get_event, CanonicalJsonValue, OwnedEventId, RoomId, RoomVersionId,
ServerName, ServerName,
}; };
@ -27,7 +27,7 @@ use ruma::{
pub(super) async fn fetch_and_handle_outliers<'a>( pub(super) async fn fetch_and_handle_outliers<'a>(
&self, &self,
origin: &'a ServerName, origin: &'a ServerName,
events: &'a [Arc<EventId>], events: &'a [OwnedEventId],
create_event: &'a PduEvent, create_event: &'a PduEvent,
room_id: &'a RoomId, room_id: &'a RoomId,
room_version_id: &'a RoomVersionId, room_version_id: &'a RoomVersionId,
@ -62,7 +62,7 @@ pub(super) async fn fetch_and_handle_outliers<'a>(
// c. Ask origin server over federation // c. Ask origin server over federation
// We also handle its auth chain here so we don't get a stack overflow in // We also handle its auth chain here so we don't get a stack overflow in
// handle_outlier_pdu. // handle_outlier_pdu.
let mut todo_auth_events = vec![Arc::clone(id)]; let mut todo_auth_events = vec![id.clone()];
let mut events_in_reverse_order = Vec::with_capacity(todo_auth_events.len()); let mut events_in_reverse_order = Vec::with_capacity(todo_auth_events.len());
let mut events_all = HashSet::with_capacity(todo_auth_events.len()); let mut events_all = HashSet::with_capacity(todo_auth_events.len());
while let Some(next_id) = todo_auth_events.pop() { while let Some(next_id) = todo_auth_events.pop() {
@ -124,14 +124,15 @@ pub(super) async fn fetch_and_handle_outliers<'a>(
); );
} }
if let Some(auth_events) = value.get("auth_events").and_then(|c| c.as_array()) if let Some(auth_events) = value
.get("auth_events")
.and_then(CanonicalJsonValue::as_array)
{ {
for auth_event in auth_events { for auth_event in auth_events {
if let Ok(auth_event) = if let Ok(auth_event) =
serde_json::from_value(auth_event.clone().into()) serde_json::from_value::<OwnedEventId>(auth_event.clone().into())
{ {
let a: Arc<EventId> = auth_event; todo_auth_events.push(auth_event);
todo_auth_events.push(a);
} else { } else {
warn!("Auth event id is not valid"); warn!("Auth event id is not valid");
} }
@ -201,7 +202,7 @@ pub(super) async fn fetch_and_handle_outliers<'a>(
}, },
| Err(e) => { | Err(e) => {
warn!("Authentication of event {next_id} failed: {e:?}"); warn!("Authentication of event {next_id} failed: {e:?}");
back_off(next_id.into()); back_off(next_id);
}, },
} }
} }

View file

@ -8,7 +8,7 @@ use futures::{future, FutureExt};
use ruma::{ use ruma::{
int, int,
state_res::{self}, state_res::{self},
uint, CanonicalJsonValue, EventId, MilliSecondsSinceUnixEpoch, RoomId, RoomVersionId, uint, CanonicalJsonValue, MilliSecondsSinceUnixEpoch, OwnedEventId, RoomId, RoomVersionId,
ServerName, ServerName,
}; };
@ -23,14 +23,14 @@ pub(super) async fn fetch_prev(
create_event: &PduEvent, create_event: &PduEvent,
room_id: &RoomId, room_id: &RoomId,
room_version_id: &RoomVersionId, room_version_id: &RoomVersionId,
initial_set: Vec<Arc<EventId>>, initial_set: Vec<OwnedEventId>,
) -> Result<( ) -> Result<(
Vec<Arc<EventId>>, Vec<OwnedEventId>,
HashMap<Arc<EventId>, (Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>, HashMap<OwnedEventId, (Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>,
)> { )> {
let mut graph: HashMap<Arc<EventId>, _> = HashMap::with_capacity(initial_set.len()); let mut graph: HashMap<OwnedEventId, _> = HashMap::with_capacity(initial_set.len());
let mut eventid_info = HashMap::new(); let mut eventid_info = HashMap::new();
let mut todo_outlier_stack: Vec<Arc<EventId>> = initial_set; let mut todo_outlier_stack: Vec<OwnedEventId> = initial_set;
let first_pdu_in_room = self.services.timeline.first_pdu_in_room(room_id).await?; let first_pdu_in_room = self.services.timeline.first_pdu_in_room(room_id).await?;

View file

@ -1,15 +1,14 @@
use std::{ use std::collections::{hash_map, HashMap};
collections::{hash_map, HashMap},
sync::Arc,
};
use conduwuit::{debug, implement, warn, Err, Error, PduEvent, Result}; use conduwuit::{debug, implement, warn, Err, Error, PduEvent, Result};
use futures::FutureExt; use futures::FutureExt;
use ruma::{ use ruma::{
api::federation::event::get_room_state_ids, events::StateEventType, EventId, RoomId, api::federation::event::get_room_state_ids, events::StateEventType, EventId, OwnedEventId,
RoomVersionId, ServerName, RoomId, RoomVersionId, ServerName,
}; };
use crate::rooms::short::ShortStateKey;
/// Call /state_ids to find out what the state at this pdu is. We trust the /// Call /state_ids to find out what the state at this pdu is. We trust the
/// server's response to some extend (sic), but we still do a lot of checks /// server's response to some extend (sic), but we still do a lot of checks
/// on the events /// on the events
@ -22,31 +21,25 @@ pub(super) async fn fetch_state(
room_id: &RoomId, room_id: &RoomId,
room_version_id: &RoomVersionId, room_version_id: &RoomVersionId,
event_id: &EventId, event_id: &EventId,
) -> Result<Option<HashMap<u64, Arc<EventId>>>> { ) -> Result<Option<HashMap<u64, OwnedEventId>>> {
debug!("Fetching state ids"); debug!("Fetching state ids");
let res = self let res = self
.services .services
.sending .sending
.send_federation_request(origin, get_room_state_ids::v1::Request { .send_federation_request(origin, get_room_state_ids::v1::Request {
room_id: room_id.to_owned(), room_id: room_id.to_owned(),
event_id: (*event_id).to_owned(), event_id: event_id.to_owned(),
}) })
.await .await
.inspect_err(|e| warn!("Fetching state for event failed: {e}"))?; .inspect_err(|e| warn!("Fetching state for event failed: {e}"))?;
debug!("Fetching state events"); debug!("Fetching state events");
let collect = res
.pdu_ids
.iter()
.map(|x| Arc::from(&**x))
.collect::<Vec<_>>();
let state_vec = self let state_vec = self
.fetch_and_handle_outliers(origin, &collect, create_event, room_id, room_version_id) .fetch_and_handle_outliers(origin, &res.pdu_ids, create_event, room_id, room_version_id)
.boxed() .boxed()
.await; .await;
let mut state: HashMap<_, Arc<EventId>> = HashMap::with_capacity(state_vec.len()); let mut state: HashMap<ShortStateKey, OwnedEventId> = HashMap::with_capacity(state_vec.len());
for (pdu, _) in state_vec { for (pdu, _) in state_vec {
let state_key = pdu let state_key = pdu
.state_key .state_key
@ -61,10 +54,10 @@ pub(super) async fn fetch_state(
match state.entry(shortstatekey) { match state.entry(shortstatekey) {
| hash_map::Entry::Vacant(v) => { | hash_map::Entry::Vacant(v) => {
v.insert(Arc::from(&*pdu.event_id)); v.insert(pdu.event_id.clone());
}, },
| hash_map::Entry::Occupied(_) => | hash_map::Entry::Occupied(_) =>
return Err(Error::bad_database( return Err!(Database(
"State event's type and state_key combination exists multiple times.", "State event's type and state_key combination exists multiple times.",
)), )),
} }
@ -77,7 +70,7 @@ pub(super) async fn fetch_state(
.get_shortstatekey(&StateEventType::RoomCreate, "") .get_shortstatekey(&StateEventType::RoomCreate, "")
.await?; .await?;
if state.get(&create_shortstatekey).map(AsRef::as_ref) != Some(&create_event.event_id) { if state.get(&create_shortstatekey) != Some(&create_event.event_id) {
return Err!(Database("Incoming event refers to wrong create event.")); return Err!(Database("Incoming event refers to wrong create event."));
} }

View file

@ -147,7 +147,7 @@ pub async fn handle_incoming_pdu<'a>(
.bad_event_ratelimiter .bad_event_ratelimiter
.write() .write()
.expect("locked") .expect("locked")
.entry(prev_id.into()) .entry(prev_id)
{ {
| Entry::Vacant(e) => { | Entry::Vacant(e) => {
e.insert((now, 1)); e.insert((now, 1));

View file

@ -79,19 +79,13 @@ pub(super) async fn handle_outlier_pdu<'a>(
// the auth events are also rejected "due to auth events" // the auth events are also rejected "due to auth events"
// NOTE: Step 5 is not applied anymore because it failed too often // NOTE: Step 5 is not applied anymore because it failed too often
debug!("Fetching auth events"); debug!("Fetching auth events");
Box::pin( Box::pin(self.fetch_and_handle_outliers(
self.fetch_and_handle_outliers(
origin, origin,
&incoming_pdu &incoming_pdu.auth_events,
.auth_events
.iter()
.map(|x| Arc::from(&**x))
.collect::<Vec<Arc<EventId>>>(),
create_event, create_event,
room_id, room_id,
&room_version_id, &room_version_id,
), ))
)
.await; .await;
} }

View file

@ -5,9 +5,9 @@ use std::{
}; };
use conduwuit::{ use conduwuit::{
debug, implement, utils::math::continue_exponential_backoff_secs, Error, PduEvent, Result, debug, implement, utils::math::continue_exponential_backoff_secs, Err, PduEvent, Result,
}; };
use ruma::{api::client::error::ErrorKind, CanonicalJsonValue, EventId, RoomId, ServerName}; use ruma::{CanonicalJsonValue, EventId, OwnedEventId, RoomId, ServerName};
#[implement(super::Service)] #[implement(super::Service)]
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
@ -22,7 +22,7 @@ pub(super) async fn handle_prev_pdu<'a>(
event_id: &'a EventId, event_id: &'a EventId,
room_id: &'a RoomId, room_id: &'a RoomId,
eventid_info: &mut HashMap< eventid_info: &mut HashMap<
Arc<EventId>, OwnedEventId,
(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>), (Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>),
>, >,
create_event: &Arc<PduEvent>, create_event: &Arc<PduEvent>,
@ -31,14 +31,10 @@ pub(super) async fn handle_prev_pdu<'a>(
) -> Result { ) -> Result {
// Check for disabled again because it might have changed // Check for disabled again because it might have changed
if self.services.metadata.is_disabled(room_id).await { if self.services.metadata.is_disabled(room_id).await {
debug!( return Err!(Request(Forbidden(debug_warn!(
"Federaton of room {room_id} is currently disabled on this server. Request by \ "Federaton of room {room_id} is currently disabled on this server. Request by \
origin {origin} and event ID {event_id}" origin {origin} and event ID {event_id}"
); ))));
return Err(Error::BadRequest(
ErrorKind::forbidden(),
"Federation of this room is currently disabled on this server.",
));
} }
if let Some((time, tries)) = self if let Some((time, tries)) = self

View file

@ -23,7 +23,7 @@ use conduwuit::{
}; };
use futures::TryFutureExt; use futures::TryFutureExt;
use ruma::{ use ruma::{
events::room::create::RoomCreateEventContent, state_res::RoomVersion, EventId, OwnedEventId, events::room::create::RoomCreateEventContent, state_res::RoomVersion, OwnedEventId,
OwnedRoomId, RoomId, RoomVersionId, OwnedRoomId, RoomId, RoomVersionId,
}; };
@ -97,11 +97,11 @@ impl crate::Service for Service {
} }
impl Service { impl Service {
async fn event_exists(&self, event_id: Arc<EventId>) -> bool { async fn event_exists(&self, event_id: OwnedEventId) -> bool {
self.services.timeline.pdu_exists(&event_id).await self.services.timeline.pdu_exists(&event_id).await
} }
async fn event_fetch(&self, event_id: Arc<EventId>) -> Option<Arc<PduEvent>> { async fn event_fetch(&self, event_id: OwnedEventId) -> Option<Arc<PduEvent>> {
self.services self.services
.timeline .timeline
.get_pdu(&event_id) .get_pdu(&event_id)

View file

@ -12,7 +12,7 @@ use conduwuit::{
use futures::{FutureExt, StreamExt, TryFutureExt}; use futures::{FutureExt, StreamExt, TryFutureExt};
use ruma::{ use ruma::{
state_res::{self, StateMap}, state_res::{self, StateMap},
EventId, RoomId, RoomVersionId, OwnedEventId, RoomId, RoomVersionId,
}; };
use crate::rooms::state_compressor::CompressedStateEvent; use crate::rooms::state_compressor::CompressedStateEvent;
@ -23,7 +23,7 @@ pub async fn resolve_state(
&self, &self,
room_id: &RoomId, room_id: &RoomId,
room_version_id: &RoomVersionId, room_version_id: &RoomVersionId,
incoming_state: HashMap<u64, Arc<EventId>>, incoming_state: HashMap<u64, OwnedEventId>,
) -> Result<Arc<HashSet<CompressedStateEvent>>> { ) -> Result<Arc<HashSet<CompressedStateEvent>>> {
debug!("Loading current room state ids"); debug!("Loading current room state ids");
let current_sstatehash = self let current_sstatehash = self
@ -44,7 +44,7 @@ pub async fn resolve_state(
for state in &fork_states { for state in &fork_states {
let starting_events = state.values().map(Borrow::borrow); let starting_events = state.values().map(Borrow::borrow);
let auth_chain: HashSet<Arc<EventId>> = self let auth_chain: HashSet<OwnedEventId> = self
.services .services
.auth_chain .auth_chain
.get_event_ids(room_id, starting_events) .get_event_ids(room_id, starting_events)
@ -56,7 +56,7 @@ pub async fn resolve_state(
} }
debug!("Loading fork states"); debug!("Loading fork states");
let fork_states: Vec<StateMap<Arc<EventId>>> = fork_states let fork_states: Vec<StateMap<OwnedEventId>> = fork_states
.into_iter() .into_iter()
.stream() .stream()
.wide_then(|fork_state| { .wide_then(|fork_state| {
@ -113,9 +113,9 @@ pub async fn resolve_state(
pub async fn state_resolution( pub async fn state_resolution(
&self, &self,
room_version: &RoomVersionId, room_version: &RoomVersionId,
state_sets: &[StateMap<Arc<EventId>>], state_sets: &[StateMap<OwnedEventId>],
auth_chain_sets: &Vec<HashSet<Arc<EventId>>>, auth_chain_sets: &Vec<HashSet<OwnedEventId>>,
) -> Result<StateMap<Arc<EventId>>> { ) -> Result<StateMap<OwnedEventId>> {
//TODO: ??? //TODO: ???
let _lock = self.services.globals.stateres_mutex.lock(); let _lock = self.services.globals.stateres_mutex.lock();

View file

@ -11,7 +11,7 @@ use conduwuit::{
PduEvent, Result, PduEvent, Result,
}; };
use futures::{FutureExt, StreamExt}; use futures::{FutureExt, StreamExt};
use ruma::{state_res::StateMap, EventId, RoomId, RoomVersionId}; use ruma::{state_res::StateMap, OwnedEventId, RoomId, RoomVersionId};
// TODO: if we know the prev_events of the incoming event we can avoid the // TODO: if we know the prev_events of the incoming event we can avoid the
#[implement(super::Service)] #[implement(super::Service)]
@ -20,8 +20,8 @@ use ruma::{state_res::StateMap, EventId, RoomId, RoomVersionId};
pub(super) async fn state_at_incoming_degree_one( pub(super) async fn state_at_incoming_degree_one(
&self, &self,
incoming_pdu: &Arc<PduEvent>, incoming_pdu: &Arc<PduEvent>,
) -> Result<Option<HashMap<u64, Arc<EventId>>>> { ) -> Result<Option<HashMap<u64, OwnedEventId>>> {
let prev_event = &*incoming_pdu.prev_events[0]; let prev_event = &incoming_pdu.prev_events[0];
let Ok(prev_event_sstatehash) = self let Ok(prev_event_sstatehash) = self
.services .services
.state_accessor .state_accessor
@ -56,7 +56,7 @@ pub(super) async fn state_at_incoming_degree_one(
.get_or_create_shortstatekey(&prev_pdu.kind.to_string().into(), state_key) .get_or_create_shortstatekey(&prev_pdu.kind.to_string().into(), state_key)
.await; .await;
state.insert(shortstatekey, Arc::from(prev_event)); state.insert(shortstatekey, prev_event.clone());
// Now it's the state after the pdu // Now it's the state after the pdu
} }
@ -72,7 +72,7 @@ pub(super) async fn state_at_incoming_resolved(
incoming_pdu: &Arc<PduEvent>, incoming_pdu: &Arc<PduEvent>,
room_id: &RoomId, room_id: &RoomId,
room_version_id: &RoomVersionId, room_version_id: &RoomVersionId,
) -> Result<Option<HashMap<u64, Arc<EventId>>>> { ) -> Result<Option<HashMap<u64, OwnedEventId>>> {
debug!("Calculating state at event using state res"); debug!("Calculating state at event using state res");
let mut extremity_sstatehashes = HashMap::with_capacity(incoming_pdu.prev_events.len()); let mut extremity_sstatehashes = HashMap::with_capacity(incoming_pdu.prev_events.len());
@ -142,7 +142,7 @@ pub(super) async fn state_at_incoming_resolved(
starting_events.push(id.borrow()); starting_events.push(id.borrow());
} }
let auth_chain: HashSet<Arc<EventId>> = self let auth_chain: HashSet<OwnedEventId> = self
.services .services
.auth_chain .auth_chain
.get_event_ids(room_id, starting_events.into_iter()) .get_event_ids(room_id, starting_events.into_iter())

View file

@ -282,7 +282,7 @@ pub(super) async fn upgrade_outlier_to_timeline_pdu(
} }
trace!("Appending pdu to timeline"); trace!("Appending pdu to timeline");
extremities.insert(incoming_pdu.event_id.clone().into()); extremities.insert(incoming_pdu.event_id.clone());
// Now that the event has passed all auth it is added into the timeline. // Now that the event has passed all auth it is added into the timeline.
// We use the `state_at_event` instead of `state_after` so we accurately // We use the `state_at_event` instead of `state_after` so we accurately

View file

@ -88,7 +88,11 @@ impl Data {
}) })
} }
pub(super) fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) { #[inline]
pub(super) fn mark_as_referenced<'a, I>(&self, room_id: &RoomId, event_ids: I)
where
I: Iterator<Item = &'a EventId>,
{
for prev in event_ids { for prev in event_ids {
let key = (room_id, prev); let key = (room_id, prev);
self.referencedevents.put_raw(key, []); self.referencedevents.put_raw(key, []);

View file

@ -98,9 +98,11 @@ impl Service {
pdus pdus
} }
#[inline]
#[tracing::instrument(skip_all, level = "debug")] #[tracing::instrument(skip_all, level = "debug")]
pub fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) { pub fn mark_as_referenced<'a, I>(&self, room_id: &RoomId, event_ids: I)
where
I: Iterator<Item = &'a EventId>,
{
self.db.mark_as_referenced(room_id, event_ids); self.db.mark_as_referenced(room_id, event_ids);
} }

View file

@ -77,7 +77,7 @@ impl Service {
let pdu = self.services.timeline.get_pdu_from_id(&pdu_id).await?; let pdu = self.services.timeline.get_pdu_from_id(&pdu_id).await?;
let event_id: OwnedEventId = pdu.event_id.into(); let event_id: OwnedEventId = pdu.event_id;
let user_id: OwnedUserId = user_id.to_owned(); let user_id: OwnedUserId = user_id.to_owned();
let content: BTreeMap<OwnedEventId, Receipts> = BTreeMap::from_iter([( let content: BTreeMap<OwnedEventId, Receipts> = BTreeMap::from_iter([(
event_id, event_id,

View file

@ -10,7 +10,7 @@ use std::{
use conduwuit::{ use conduwuit::{
debug, debug_warn, err, error, implement, info, debug, debug_warn, err, error, implement, info,
pdu::{EventHash, PduBuilder, PduCount, PduEvent}, pdu::{gen_event_id, EventHash, PduBuilder, PduCount, PduEvent},
utils::{self, stream::TryIgnore, IterStream, MutexMap, MutexMapGuard, ReadyExt}, utils::{self, stream::TryIgnore, IterStream, MutexMap, MutexMapGuard, ReadyExt},
validated, warn, Err, Error, Result, Server, validated, warn, Err, Error, Result, Server,
}; };
@ -371,7 +371,7 @@ impl Service {
// We must keep track of all events that have been referenced. // We must keep track of all events that have been referenced.
self.services self.services
.pdu_metadata .pdu_metadata
.mark_as_referenced(&pdu.room_id, &pdu.prev_events); .mark_as_referenced(&pdu.room_id, pdu.prev_events.iter().map(AsRef::as_ref));
self.services self.services
.state .state
@ -681,12 +681,12 @@ impl Service {
timestamp, timestamp,
} = pdu_builder; } = pdu_builder;
let prev_events: Vec<_> = self let prev_events: Vec<OwnedEventId> = self
.services .services
.state .state
.get_forward_extremities(room_id) .get_forward_extremities(room_id)
.take(20) .take(20)
.map(Arc::from) .map(Into::into)
.collect() .collect()
.await; .await;
@ -834,17 +834,10 @@ impl Service {
} }
// Generate event id // Generate event id
pdu.event_id = EventId::parse_arc(format!( pdu.event_id = gen_event_id(&pdu_json, &room_version_id)?;
"${}",
ruma::signatures::reference_hash(&pdu_json, &room_version_id)
.expect("ruma can calculate reference hashes")
))
.expect("ruma's reference hashes are valid event ids");
pdu_json.insert( pdu_json
"event_id".to_owned(), .insert("event_id".into(), CanonicalJsonValue::String(pdu.event_id.clone().into()));
CanonicalJsonValue::String(pdu.event_id.as_str().to_owned()),
);
// Generate short event id // Generate short event id
let _shorteventid = self let _shorteventid = self
@ -867,7 +860,7 @@ impl Service {
room_id: &RoomId, room_id: &RoomId,
state_lock: &RoomMutexGuard, /* Take mutex guard to make sure users get the room state state_lock: &RoomMutexGuard, /* Take mutex guard to make sure users get the room state
* mutex */ * mutex */
) -> Result<Arc<EventId>> { ) -> Result<OwnedEventId> {
let (pdu, pdu_json) = self let (pdu, pdu_json) = self
.create_hash_and_sign_event(pdu_builder, sender, room_id, state_lock) .create_hash_and_sign_event(pdu_builder, sender, room_id, state_lock)
.await?; .await?;
@ -987,7 +980,7 @@ impl Service {
if soft_fail { if soft_fail {
self.services self.services
.pdu_metadata .pdu_metadata
.mark_as_referenced(&pdu.room_id, &pdu.prev_events); .mark_as_referenced(&pdu.room_id, pdu.prev_events.iter().map(AsRef::as_ref));
self.services self.services
.state .state
@ -1170,7 +1163,7 @@ impl Service {
backfill_server, backfill_server,
federation::backfill::get_backfill::v1::Request { federation::backfill::get_backfill::v1::Request {
room_id: room_id.to_owned(), room_id: room_id.to_owned(),
v: vec![first_pdu.1.event_id.as_ref().to_owned()], v: vec![first_pdu.1.event_id.clone()],
limit: uint!(100), limit: uint!(100),
}, },
) )