move core::pdu and core::state_res into core::matrix::

Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
Jason Volk 2025-04-04 03:30:13 +00:00
parent 4e5b87d0cd
commit 532dfd004d
91 changed files with 266 additions and 205 deletions

102
src/core/matrix/event.rs Normal file
View file

@ -0,0 +1,102 @@
use std::{
borrow::Borrow,
fmt::{Debug, Display},
hash::Hash,
sync::Arc,
};
use ruma::{EventId, MilliSecondsSinceUnixEpoch, RoomId, UserId, events::TimelineEventType};
use serde_json::value::RawValue as RawJsonValue;
/// Abstraction of a PDU so users can have their own PDU types.
pub trait Event {
type Id: Clone + Debug + Display + Eq + Ord + Hash + Send + Borrow<EventId>;
/// The `EventId` of this event.
fn event_id(&self) -> &Self::Id;
/// The `RoomId` of this event.
fn room_id(&self) -> &RoomId;
/// The `UserId` of this event.
fn sender(&self) -> &UserId;
/// The time of creation on the originating server.
fn origin_server_ts(&self) -> MilliSecondsSinceUnixEpoch;
/// The event type.
fn event_type(&self) -> &TimelineEventType;
/// The event's content.
fn content(&self) -> &RawJsonValue;
/// The state key for this event.
fn state_key(&self) -> Option<&str>;
/// The events before this event.
// Requires GATs to avoid boxing (and TAIT for making it convenient).
fn prev_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_;
/// All the authenticating events for this event.
// Requires GATs to avoid boxing (and TAIT for making it convenient).
fn auth_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_;
/// If this event is a redaction event this is the event it redacts.
fn redacts(&self) -> Option<&Self::Id>;
}
impl<T: Event> Event for &T {
type Id = T::Id;
fn event_id(&self) -> &Self::Id { (*self).event_id() }
fn room_id(&self) -> &RoomId { (*self).room_id() }
fn sender(&self) -> &UserId { (*self).sender() }
fn origin_server_ts(&self) -> MilliSecondsSinceUnixEpoch { (*self).origin_server_ts() }
fn event_type(&self) -> &TimelineEventType { (*self).event_type() }
fn content(&self) -> &RawJsonValue { (*self).content() }
fn state_key(&self) -> Option<&str> { (*self).state_key() }
fn prev_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ {
(*self).prev_events()
}
fn auth_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ {
(*self).auth_events()
}
fn redacts(&self) -> Option<&Self::Id> { (*self).redacts() }
}
impl<T: Event> Event for Arc<T> {
type Id = T::Id;
fn event_id(&self) -> &Self::Id { (**self).event_id() }
fn room_id(&self) -> &RoomId { (**self).room_id() }
fn sender(&self) -> &UserId { (**self).sender() }
fn origin_server_ts(&self) -> MilliSecondsSinceUnixEpoch { (**self).origin_server_ts() }
fn event_type(&self) -> &TimelineEventType { (**self).event_type() }
fn content(&self) -> &RawJsonValue { (**self).content() }
fn state_key(&self) -> Option<&str> { (**self).state_key() }
fn prev_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ {
(**self).prev_events()
}
fn auth_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ {
(**self).auth_events()
}
fn redacts(&self) -> Option<&Self::Id> { (**self).redacts() }
}

9
src/core/matrix/mod.rs Normal file
View file

@ -0,0 +1,9 @@
//! Core Matrix Library
pub mod event;
pub mod pdu;
pub mod state_res;
pub use event::Event;
pub use pdu::{PduBuilder, PduCount, PduEvent, PduId, RawPduId, StateKey};
pub use state_res::{EventTypeExt, RoomVersion, StateMap, TypeStateKey};

127
src/core/matrix/pdu.rs Normal file
View file

@ -0,0 +1,127 @@
mod builder;
mod content;
mod count;
mod event_id;
mod filter;
mod id;
mod raw_id;
mod redact;
mod relation;
mod state_key;
mod strip;
#[cfg(test)]
mod tests;
mod unsigned;
use std::cmp::Ordering;
use ruma::{
CanonicalJsonObject, CanonicalJsonValue, EventId, MilliSecondsSinceUnixEpoch, OwnedEventId,
OwnedRoomId, OwnedServerName, OwnedUserId, RoomId, UInt, UserId, events::TimelineEventType,
};
use serde::{Deserialize, Serialize};
use serde_json::value::RawValue as RawJsonValue;
pub use self::{
Count as PduCount, Id as PduId, Pdu as PduEvent, RawId as RawPduId,
builder::{Builder, Builder as PduBuilder},
count::Count,
event_id::*,
id::*,
raw_id::*,
state_key::{ShortStateKey, StateKey},
};
use super::Event;
use crate::Result;
/// Persistent Data Unit (Event)
#[derive(Clone, Deserialize, Serialize, Debug)]
pub struct Pdu {
pub event_id: OwnedEventId,
pub room_id: OwnedRoomId,
pub sender: OwnedUserId,
#[serde(skip_serializing_if = "Option::is_none")]
pub origin: Option<OwnedServerName>,
pub origin_server_ts: UInt,
#[serde(rename = "type")]
pub kind: TimelineEventType,
pub content: Box<RawJsonValue>,
#[serde(skip_serializing_if = "Option::is_none")]
pub state_key: Option<StateKey>,
pub prev_events: Vec<OwnedEventId>,
pub depth: UInt,
pub auth_events: Vec<OwnedEventId>,
#[serde(skip_serializing_if = "Option::is_none")]
pub redacts: Option<OwnedEventId>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub unsigned: Option<Box<RawJsonValue>>,
pub hashes: EventHash,
#[serde(default, skip_serializing_if = "Option::is_none")]
// BTreeMap<Box<ServerName>, BTreeMap<ServerSigningKeyId, String>>
pub signatures: Option<Box<RawJsonValue>>,
}
/// Content hashes of a PDU.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct EventHash {
/// The SHA-256 hash.
pub sha256: String,
}
impl Pdu {
pub fn from_id_val(event_id: &EventId, mut json: CanonicalJsonObject) -> Result<Self> {
let event_id = CanonicalJsonValue::String(event_id.into());
json.insert("event_id".into(), event_id);
serde_json::to_value(json)
.and_then(serde_json::from_value)
.map_err(Into::into)
}
}
impl Event for Pdu {
type Id = OwnedEventId;
fn event_id(&self) -> &Self::Id { &self.event_id }
fn room_id(&self) -> &RoomId { &self.room_id }
fn sender(&self) -> &UserId { &self.sender }
fn event_type(&self) -> &TimelineEventType { &self.kind }
fn content(&self) -> &RawJsonValue { &self.content }
fn origin_server_ts(&self) -> MilliSecondsSinceUnixEpoch {
MilliSecondsSinceUnixEpoch(self.origin_server_ts)
}
fn state_key(&self) -> Option<&str> { self.state_key.as_deref() }
fn prev_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ {
self.prev_events.iter()
}
fn auth_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ {
self.auth_events.iter()
}
fn redacts(&self) -> Option<&Self::Id> { self.redacts.as_ref() }
}
/// Prevent derived equality which wouldn't limit itself to event_id
impl Eq for Pdu {}
/// Equality determined by the Pdu's ID, not the memory representations.
impl PartialEq for Pdu {
fn eq(&self, other: &Self) -> bool { self.event_id == other.event_id }
}
/// Ordering determined by the Pdu's ID, not the memory representations.
impl Ord for Pdu {
fn cmp(&self, other: &Self) -> Ordering { self.event_id.cmp(&other.event_id) }
}
/// Ordering determined by the Pdu's ID, not the memory representations.
impl PartialOrd for Pdu {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) }
}

View file

@ -0,0 +1,72 @@
use std::collections::BTreeMap;
use ruma::{
MilliSecondsSinceUnixEpoch, OwnedEventId,
events::{EventContent, MessageLikeEventType, StateEventType, TimelineEventType},
};
use serde::Deserialize;
use serde_json::value::{RawValue as RawJsonValue, to_raw_value};
use super::StateKey;
/// Build the start of a PDU in order to add it to the Database.
#[derive(Debug, Deserialize)]
pub struct Builder {
#[serde(rename = "type")]
pub event_type: TimelineEventType,
pub content: Box<RawJsonValue>,
pub unsigned: Option<Unsigned>,
pub state_key: Option<StateKey>,
pub redacts: Option<OwnedEventId>,
/// For timestamped messaging, should only be used for appservices.
/// Will be set to current time if None
pub timestamp: Option<MilliSecondsSinceUnixEpoch>,
}
type Unsigned = BTreeMap<String, serde_json::Value>;
impl Builder {
pub fn state<S, T>(state_key: S, content: &T) -> Self
where
T: EventContent<EventType = StateEventType>,
S: Into<StateKey>,
{
Self {
event_type: content.event_type().into(),
content: to_raw_value(content)
.expect("Builder failed to serialize state event content to RawValue"),
state_key: Some(state_key.into()),
..Self::default()
}
}
pub fn timeline<T>(content: &T) -> Self
where
T: EventContent<EventType = MessageLikeEventType>,
{
Self {
event_type: content.event_type().into(),
content: to_raw_value(content)
.expect("Builder failed to serialize timeline event content to RawValue"),
..Self::default()
}
}
}
impl Default for Builder {
fn default() -> Self {
Self {
event_type: "m.room.message".into(),
content: Box::<RawJsonValue>::default(),
unsigned: None,
state_key: None,
redacts: None,
timestamp: None,
}
}
}

View file

@ -0,0 +1,20 @@
use serde::Deserialize;
use serde_json::value::Value as JsonValue;
use crate::{Result, err, implement};
#[must_use]
#[implement(super::Pdu)]
pub fn get_content_as_value(&self) -> JsonValue {
self.get_content()
.expect("pdu content must be a valid JSON value")
}
#[implement(super::Pdu)]
pub fn get_content<T>(&self) -> Result<T>
where
T: for<'de> Deserialize<'de>,
{
serde_json::from_str(self.content.get())
.map_err(|e| err!(Database("Failed to deserialize pdu content into type: {e}")))
}

View file

@ -0,0 +1,174 @@
#![allow(clippy::cast_possible_wrap, clippy::cast_sign_loss, clippy::as_conversions)]
use std::{cmp::Ordering, fmt, fmt::Display, str::FromStr};
use ruma::api::Direction;
use crate::{Error, Result, err};
#[derive(Hash, PartialEq, Eq, Clone, Copy, Debug)]
pub enum Count {
Normal(u64),
Backfilled(i64),
}
impl Count {
#[inline]
#[must_use]
pub fn from_unsigned(unsigned: u64) -> Self { Self::from_signed(unsigned as i64) }
#[inline]
#[must_use]
pub fn from_signed(signed: i64) -> Self {
match signed {
| i64::MIN..=0 => Self::Backfilled(signed),
| _ => Self::Normal(signed as u64),
}
}
#[inline]
#[must_use]
pub fn into_unsigned(self) -> u64 {
self.debug_assert_valid();
match self {
| Self::Normal(i) => i,
| Self::Backfilled(i) => i as u64,
}
}
#[inline]
#[must_use]
pub fn into_signed(self) -> i64 {
self.debug_assert_valid();
match self {
| Self::Normal(i) => i as i64,
| Self::Backfilled(i) => i,
}
}
#[inline]
#[must_use]
pub fn into_normal(self) -> Self {
self.debug_assert_valid();
match self {
| Self::Normal(i) => Self::Normal(i),
| Self::Backfilled(_) => Self::Normal(0),
}
}
#[inline]
pub fn checked_inc(self, dir: Direction) -> Result<Self, Error> {
match dir {
| Direction::Forward => self.checked_add(1),
| Direction::Backward => self.checked_sub(1),
}
}
#[inline]
pub fn checked_add(self, add: u64) -> Result<Self, Error> {
Ok(match self {
| Self::Normal(i) => Self::Normal(
i.checked_add(add)
.ok_or_else(|| err!(Arithmetic("Count::Normal overflow")))?,
),
| Self::Backfilled(i) => Self::Backfilled(
i.checked_add(add as i64)
.ok_or_else(|| err!(Arithmetic("Count::Backfilled overflow")))?,
),
})
}
#[inline]
pub fn checked_sub(self, sub: u64) -> Result<Self, Error> {
Ok(match self {
| Self::Normal(i) => Self::Normal(
i.checked_sub(sub)
.ok_or_else(|| err!(Arithmetic("Count::Normal underflow")))?,
),
| Self::Backfilled(i) => Self::Backfilled(
i.checked_sub(sub as i64)
.ok_or_else(|| err!(Arithmetic("Count::Backfilled underflow")))?,
),
})
}
#[inline]
#[must_use]
pub fn saturating_inc(self, dir: Direction) -> Self {
match dir {
| Direction::Forward => self.saturating_add(1),
| Direction::Backward => self.saturating_sub(1),
}
}
#[inline]
#[must_use]
pub fn saturating_add(self, add: u64) -> Self {
match self {
| Self::Normal(i) => Self::Normal(i.saturating_add(add)),
| Self::Backfilled(i) => Self::Backfilled(i.saturating_add(add as i64)),
}
}
#[inline]
#[must_use]
pub fn saturating_sub(self, sub: u64) -> Self {
match self {
| Self::Normal(i) => Self::Normal(i.saturating_sub(sub)),
| Self::Backfilled(i) => Self::Backfilled(i.saturating_sub(sub as i64)),
}
}
#[inline]
#[must_use]
pub const fn min() -> Self { Self::Backfilled(i64::MIN) }
#[inline]
#[must_use]
pub const fn max() -> Self { Self::Normal(i64::MAX as u64) }
#[inline]
pub(crate) fn debug_assert_valid(&self) {
if let Self::Backfilled(i) = self {
debug_assert!(*i <= 0, "Backfilled sequence must be negative");
}
}
}
impl Display for Count {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
self.debug_assert_valid();
match self {
| Self::Normal(i) => write!(f, "{i}"),
| Self::Backfilled(i) => write!(f, "{i}"),
}
}
}
impl From<i64> for Count {
#[inline]
fn from(signed: i64) -> Self { Self::from_signed(signed) }
}
impl From<u64> for Count {
#[inline]
fn from(unsigned: u64) -> Self { Self::from_unsigned(unsigned) }
}
impl FromStr for Count {
type Err = Error;
fn from_str(token: &str) -> Result<Self, Self::Err> { Ok(Self::from_signed(token.parse()?)) }
}
impl PartialOrd for Count {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) }
}
impl Ord for Count {
fn cmp(&self, other: &Self) -> Ordering { self.into_signed().cmp(&other.into_signed()) }
}
impl Default for Count {
fn default() -> Self { Self::Normal(0) }
}

View file

@ -0,0 +1,31 @@
use ruma::{CanonicalJsonObject, OwnedEventId, RoomVersionId};
use serde_json::value::RawValue as RawJsonValue;
use crate::{Result, err};
/// Generates a correct eventId for the incoming pdu.
///
/// Returns a tuple of the new `EventId` and the PDU as a `BTreeMap<String,
/// CanonicalJsonValue>`.
pub fn gen_event_id_canonical_json(
pdu: &RawJsonValue,
room_version_id: &RoomVersionId,
) -> Result<(OwnedEventId, CanonicalJsonObject)> {
let value: CanonicalJsonObject = serde_json::from_str(pdu.get())
.map_err(|e| err!(BadServerResponse(warn!("Error parsing incoming event: {e:?}"))))?;
let event_id = gen_event_id(&value, room_version_id)?;
Ok((event_id, value))
}
/// Generates a correct eventId for the incoming pdu.
pub fn gen_event_id(
value: &CanonicalJsonObject,
room_version_id: &RoomVersionId,
) -> Result<OwnedEventId> {
let reference_hash = ruma::signatures::reference_hash(value, room_version_id)?;
let event_id: OwnedEventId = format!("${reference_hash}").try_into()?;
Ok(event_id)
}

View file

@ -0,0 +1,90 @@
use ruma::api::client::filter::{RoomEventFilter, UrlFilter};
use serde_json::Value;
use crate::{implement, is_equal_to};
#[implement(super::Pdu)]
#[must_use]
pub fn matches(&self, filter: &RoomEventFilter) -> bool {
if !self.matches_sender(filter) {
return false;
}
if !self.matches_room(filter) {
return false;
}
if !self.matches_type(filter) {
return false;
}
if !self.matches_url(filter) {
return false;
}
true
}
#[implement(super::Pdu)]
fn matches_room(&self, filter: &RoomEventFilter) -> bool {
if filter.not_rooms.contains(&self.room_id) {
return false;
}
if let Some(rooms) = filter.rooms.as_ref() {
if !rooms.contains(&self.room_id) {
return false;
}
}
true
}
#[implement(super::Pdu)]
fn matches_sender(&self, filter: &RoomEventFilter) -> bool {
if filter.not_senders.contains(&self.sender) {
return false;
}
if let Some(senders) = filter.senders.as_ref() {
if !senders.contains(&self.sender) {
return false;
}
}
true
}
#[implement(super::Pdu)]
fn matches_type(&self, filter: &RoomEventFilter) -> bool {
let event_type = &self.kind.to_cow_str();
if filter.not_types.iter().any(is_equal_to!(event_type)) {
return false;
}
if let Some(types) = filter.types.as_ref() {
if !types.iter().any(is_equal_to!(event_type)) {
return false;
}
}
true
}
#[implement(super::Pdu)]
fn matches_url(&self, filter: &RoomEventFilter) -> bool {
let Some(url_filter) = filter.url_filter.as_ref() else {
return true;
};
//TODO: might be better to use Ruma's Raw rather than serde here
let url = serde_json::from_str::<Value>(self.content.get())
.expect("parsing content JSON failed")
.get("url")
.is_some_and(Value::is_string);
match url_filter {
| UrlFilter::EventsWithUrl => url,
| UrlFilter::EventsWithoutUrl => !url,
}
}

22
src/core/matrix/pdu/id.rs Normal file
View file

@ -0,0 +1,22 @@
use super::{Count, RawId};
use crate::utils::u64_from_u8x8;
pub type ShortRoomId = ShortId;
pub type ShortEventId = ShortId;
pub type ShortId = u64;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct Id {
pub shortroomid: ShortRoomId,
pub shorteventid: Count,
}
impl From<RawId> for Id {
#[inline]
fn from(raw: RawId) -> Self {
Self {
shortroomid: u64_from_u8x8(raw.shortroomid()),
shorteventid: Count::from_unsigned(u64_from_u8x8(raw.shorteventid())),
}
}
}

View file

@ -0,0 +1,113 @@
use arrayvec::ArrayVec;
use super::{Count, Id, ShortEventId, ShortId, ShortRoomId};
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum RawId {
Normal(RawIdNormal),
Backfilled(RawIdBackfilled),
}
type RawIdNormal = [u8; RawId::NORMAL_LEN];
type RawIdBackfilled = [u8; RawId::BACKFILLED_LEN];
const INT_LEN: usize = size_of::<ShortId>();
impl RawId {
const BACKFILLED_LEN: usize = size_of::<ShortRoomId>() + INT_LEN + size_of::<ShortEventId>();
const MAX_LEN: usize = Self::BACKFILLED_LEN;
const NORMAL_LEN: usize = size_of::<ShortRoomId>() + size_of::<ShortEventId>();
#[inline]
#[must_use]
pub fn pdu_count(&self) -> Count {
let id: Id = (*self).into();
id.shorteventid
}
#[inline]
#[must_use]
pub fn shortroomid(self) -> [u8; INT_LEN] {
match self {
| Self::Normal(raw) => raw[0..INT_LEN]
.try_into()
.expect("normal raw shortroomid array from slice"),
| Self::Backfilled(raw) => raw[0..INT_LEN]
.try_into()
.expect("backfilled raw shortroomid array from slice"),
}
}
#[inline]
#[must_use]
pub fn shorteventid(self) -> [u8; INT_LEN] {
match self {
| Self::Normal(raw) => raw[INT_LEN..INT_LEN * 2]
.try_into()
.expect("normal raw shorteventid array from slice"),
| Self::Backfilled(raw) => raw[INT_LEN * 2..INT_LEN * 3]
.try_into()
.expect("backfilled raw shorteventid array from slice"),
}
}
#[inline]
#[must_use]
pub fn as_bytes(&self) -> &[u8] {
match self {
| Self::Normal(raw) => raw,
| Self::Backfilled(raw) => raw,
}
}
}
impl AsRef<[u8]> for RawId {
#[inline]
fn as_ref(&self) -> &[u8] { self.as_bytes() }
}
impl From<&[u8]> for RawId {
#[inline]
fn from(id: &[u8]) -> Self {
match id.len() {
| Self::NORMAL_LEN => Self::Normal(
id[0..Self::NORMAL_LEN]
.try_into()
.expect("normal RawId from [u8]"),
),
| Self::BACKFILLED_LEN => Self::Backfilled(
id[0..Self::BACKFILLED_LEN]
.try_into()
.expect("backfilled RawId from [u8]"),
),
| _ => unimplemented!("unrecognized RawId length"),
}
}
}
impl From<Id> for RawId {
#[inline]
fn from(id: Id) -> Self {
const MAX_LEN: usize = RawId::MAX_LEN;
type RawVec = ArrayVec<u8, MAX_LEN>;
let mut vec = RawVec::new();
vec.extend(id.shortroomid.to_be_bytes());
id.shorteventid.debug_assert_valid();
match id.shorteventid {
| Count::Normal(shorteventid) => {
vec.extend(shorteventid.to_be_bytes());
Self::Normal(vec.as_ref().try_into().expect("RawVec into RawId::Normal"))
},
| Count::Backfilled(shorteventid) => {
vec.extend(0_u64.to_be_bytes());
vec.extend(shorteventid.to_be_bytes());
Self::Backfilled(
vec.as_ref()
.try_into()
.expect("RawVec into RawId::Backfilled"),
)
},
}
}
}

View file

@ -0,0 +1,117 @@
use ruma::{
OwnedEventId, RoomVersionId,
canonical_json::redact_content_in_place,
events::{TimelineEventType, room::redaction::RoomRedactionEventContent},
};
use serde::Deserialize;
use serde_json::{
json,
value::{RawValue as RawJsonValue, to_raw_value},
};
use crate::{Error, Result, implement};
#[derive(Deserialize)]
struct ExtractRedactedBecause {
redacted_because: Option<serde::de::IgnoredAny>,
}
#[implement(super::Pdu)]
pub fn redact(&mut self, room_version_id: &RoomVersionId, reason: &Self) -> Result {
self.unsigned = None;
let mut content = serde_json::from_str(self.content.get())
.map_err(|_| Error::bad_database("PDU in db has invalid content."))?;
redact_content_in_place(&mut content, room_version_id, self.kind.to_string())
.map_err(|e| Error::Redaction(self.sender.server_name().to_owned(), e))?;
self.unsigned = Some(
to_raw_value(&json!({
"redacted_because": serde_json::to_value(reason).expect("to_value(Pdu) always works")
}))
.expect("to string always works"),
);
self.content = to_raw_value(&content).expect("to string always works");
Ok(())
}
#[implement(super::Pdu)]
#[must_use]
pub fn is_redacted(&self) -> bool {
let Some(unsigned) = &self.unsigned else {
return false;
};
let Ok(unsigned) = ExtractRedactedBecause::deserialize(&**unsigned) else {
return false;
};
unsigned.redacted_because.is_some()
}
/// Copies the `redacts` property of the event to the `content` dict and
/// vice-versa.
///
/// This follows the specification's
/// [recommendation](https://spec.matrix.org/v1.10/rooms/v11/#moving-the-redacts-property-of-mroomredaction-events-to-a-content-property):
///
/// > For backwards-compatibility with older clients, servers should add a
/// > redacts
/// > property to the top level of m.room.redaction events in when serving
/// > such events
/// > over the Client-Server API.
///
/// > For improved compatibility with newer clients, servers should add a
/// > redacts property
/// > to the content of m.room.redaction events in older room versions when
/// > serving
/// > such events over the Client-Server API.
#[implement(super::Pdu)]
#[must_use]
pub fn copy_redacts(&self) -> (Option<OwnedEventId>, Box<RawJsonValue>) {
if self.kind == TimelineEventType::RoomRedaction {
if let Ok(mut content) =
serde_json::from_str::<RoomRedactionEventContent>(self.content.get())
{
match content.redacts {
| Some(redacts) => {
return (Some(redacts), self.content.clone());
},
| _ => match self.redacts.clone() {
| Some(redacts) => {
content.redacts = Some(redacts);
return (
self.redacts.clone(),
to_raw_value(&content)
.expect("Must be valid, we only added redacts field"),
);
},
| _ => {},
},
}
}
}
(self.redacts.clone(), self.content.clone())
}
#[implement(super::Pdu)]
#[must_use]
pub fn redacts_id(&self, room_version: &RoomVersionId) -> Option<OwnedEventId> {
use RoomVersionId::*;
if self.kind != TimelineEventType::RoomRedaction {
return None;
}
match *room_version {
| V1 | V2 | V3 | V4 | V5 | V6 | V7 | V8 | V9 | V10 => self.redacts.clone(),
| _ =>
self.get_content::<RoomRedactionEventContent>()
.ok()?
.redacts,
}
}

View file

@ -0,0 +1,22 @@
use ruma::events::relation::RelationType;
use serde::Deserialize;
use crate::implement;
#[derive(Clone, Debug, Deserialize)]
struct ExtractRelType {
rel_type: RelationType,
}
#[derive(Clone, Debug, Deserialize)]
struct ExtractRelatesToEventId {
#[serde(rename = "m.relates_to")]
relates_to: ExtractRelType,
}
#[implement(super::Pdu)]
#[must_use]
pub fn relation_type_equal(&self, rel_type: &RelationType) -> bool {
self.get_content()
.map(|c: ExtractRelatesToEventId| c.relates_to.rel_type)
.is_ok_and(|r| r == *rel_type)
}

View file

@ -0,0 +1,8 @@
use smallstr::SmallString;
use super::ShortId;
pub type StateKey = SmallString<[u8; INLINE_SIZE]>;
pub type ShortStateKey = ShortId;
const INLINE_SIZE: usize = 48;

View file

@ -0,0 +1,288 @@
use ruma::{
events::{
AnyEphemeralRoomEvent, AnyMessageLikeEvent, AnyStateEvent, AnyStrippedStateEvent,
AnySyncStateEvent, AnySyncTimelineEvent, AnyTimelineEvent, StateEvent,
room::member::RoomMemberEventContent, space::child::HierarchySpaceChildEvent,
},
serde::Raw,
};
use serde_json::{json, value::Value as JsonValue};
use crate::implement;
/// This only works for events that are also AnyRoomEvents.
#[must_use]
#[implement(super::Pdu)]
pub fn into_any_event(self) -> Raw<AnyEphemeralRoomEvent> {
serde_json::from_value(self.into_any_event_value()).expect("Raw::from_value always works")
}
/// This only works for events that are also AnyRoomEvents.
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn into_any_event_value(self) -> JsonValue {
let (redacts, content) = self.copy_redacts();
let mut json = json!({
"content": content,
"type": self.kind,
"event_id": self.event_id,
"sender": self.sender,
"origin_server_ts": self.origin_server_ts,
"room_id": self.room_id,
});
if let Some(unsigned) = &self.unsigned {
json["unsigned"] = json!(unsigned);
}
if let Some(state_key) = &self.state_key {
json["state_key"] = json!(state_key);
}
if let Some(redacts) = &redacts {
json["redacts"] = json!(redacts);
}
json
}
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn into_room_event(self) -> Raw<AnyTimelineEvent> { self.to_room_event() }
#[implement(super::Pdu)]
#[must_use]
pub fn to_room_event(&self) -> Raw<AnyTimelineEvent> {
serde_json::from_value(self.to_room_event_value()).expect("Raw::from_value always works")
}
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn to_room_event_value(&self) -> JsonValue {
let (redacts, content) = self.copy_redacts();
let mut json = json!({
"content": content,
"type": self.kind,
"event_id": self.event_id,
"sender": self.sender,
"origin_server_ts": self.origin_server_ts,
"room_id": self.room_id,
});
if let Some(unsigned) = &self.unsigned {
json["unsigned"] = json!(unsigned);
}
if let Some(state_key) = &self.state_key {
json["state_key"] = json!(state_key);
}
if let Some(redacts) = &redacts {
json["redacts"] = json!(redacts);
}
json
}
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn into_message_like_event(self) -> Raw<AnyMessageLikeEvent> { self.to_message_like_event() }
#[implement(super::Pdu)]
#[must_use]
pub fn to_message_like_event(&self) -> Raw<AnyMessageLikeEvent> {
serde_json::from_value(self.to_message_like_event_value())
.expect("Raw::from_value always works")
}
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn to_message_like_event_value(&self) -> JsonValue {
let (redacts, content) = self.copy_redacts();
let mut json = json!({
"content": content,
"type": self.kind,
"event_id": self.event_id,
"sender": self.sender,
"origin_server_ts": self.origin_server_ts,
"room_id": self.room_id,
});
if let Some(unsigned) = &self.unsigned {
json["unsigned"] = json!(unsigned);
}
if let Some(state_key) = &self.state_key {
json["state_key"] = json!(state_key);
}
if let Some(redacts) = &redacts {
json["redacts"] = json!(redacts);
}
json
}
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn into_sync_room_event(self) -> Raw<AnySyncTimelineEvent> { self.to_sync_room_event() }
#[implement(super::Pdu)]
#[must_use]
pub fn to_sync_room_event(&self) -> Raw<AnySyncTimelineEvent> {
serde_json::from_value(self.to_sync_room_event_value()).expect("Raw::from_value always works")
}
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn to_sync_room_event_value(&self) -> JsonValue {
let (redacts, content) = self.copy_redacts();
let mut json = json!({
"content": content,
"type": self.kind,
"event_id": self.event_id,
"sender": self.sender,
"origin_server_ts": self.origin_server_ts,
});
if let Some(unsigned) = &self.unsigned {
json["unsigned"] = json!(unsigned);
}
if let Some(state_key) = &self.state_key {
json["state_key"] = json!(state_key);
}
if let Some(redacts) = &redacts {
json["redacts"] = json!(redacts);
}
json
}
#[implement(super::Pdu)]
#[must_use]
pub fn into_state_event(self) -> Raw<AnyStateEvent> {
serde_json::from_value(self.into_state_event_value()).expect("Raw::from_value always works")
}
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn into_state_event_value(self) -> JsonValue {
let mut json = json!({
"content": self.content,
"type": self.kind,
"event_id": self.event_id,
"sender": self.sender,
"origin_server_ts": self.origin_server_ts,
"room_id": self.room_id,
"state_key": self.state_key,
});
if let Some(unsigned) = self.unsigned {
json["unsigned"] = json!(unsigned);
}
json
}
#[implement(super::Pdu)]
#[must_use]
pub fn into_sync_state_event(self) -> Raw<AnySyncStateEvent> {
serde_json::from_value(self.into_sync_state_event_value())
.expect("Raw::from_value always works")
}
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn into_sync_state_event_value(self) -> JsonValue {
let mut json = json!({
"content": self.content,
"type": self.kind,
"event_id": self.event_id,
"sender": self.sender,
"origin_server_ts": self.origin_server_ts,
"state_key": self.state_key,
});
if let Some(unsigned) = &self.unsigned {
json["unsigned"] = json!(unsigned);
}
json
}
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn into_stripped_state_event(self) -> Raw<AnyStrippedStateEvent> {
self.to_stripped_state_event()
}
#[implement(super::Pdu)]
#[must_use]
pub fn to_stripped_state_event(&self) -> Raw<AnyStrippedStateEvent> {
serde_json::from_value(self.to_stripped_state_event_value())
.expect("Raw::from_value always works")
}
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn to_stripped_state_event_value(&self) -> JsonValue {
json!({
"content": self.content,
"type": self.kind,
"sender": self.sender,
"state_key": self.state_key,
})
}
#[implement(super::Pdu)]
#[must_use]
pub fn into_stripped_spacechild_state_event(self) -> Raw<HierarchySpaceChildEvent> {
serde_json::from_value(self.into_stripped_spacechild_state_event_value())
.expect("Raw::from_value always works")
}
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn into_stripped_spacechild_state_event_value(self) -> JsonValue {
json!({
"content": self.content,
"type": self.kind,
"sender": self.sender,
"state_key": self.state_key,
"origin_server_ts": self.origin_server_ts,
})
}
#[implement(super::Pdu)]
#[must_use]
pub fn into_member_event(self) -> Raw<StateEvent<RoomMemberEventContent>> {
serde_json::from_value(self.into_member_event_value()).expect("Raw::from_value always works")
}
#[implement(super::Pdu)]
#[must_use]
#[inline]
pub fn into_member_event_value(self) -> JsonValue {
let mut json = json!({
"content": self.content,
"type": self.kind,
"event_id": self.event_id,
"sender": self.sender,
"origin_server_ts": self.origin_server_ts,
"redacts": self.redacts,
"room_id": self.room_id,
"state_key": self.state_key,
});
if let Some(unsigned) = self.unsigned {
json["unsigned"] = json!(unsigned);
}
json
}

View file

@ -0,0 +1,17 @@
use super::Count;
#[test]
fn backfilled_parse() {
let count: Count = "-987654".parse().expect("parse() failed");
let backfilled = matches!(count, Count::Backfilled(_));
assert!(backfilled, "not backfilled variant");
}
#[test]
fn normal_parse() {
let count: Count = "987654".parse().expect("parse() failed");
let backfilled = matches!(count, Count::Backfilled(_));
assert!(!backfilled, "backfilled variant");
}

View file

@ -0,0 +1,116 @@
use std::collections::BTreeMap;
use ruma::MilliSecondsSinceUnixEpoch;
use serde::Deserialize;
use serde_json::value::{RawValue as RawJsonValue, Value as JsonValue, to_raw_value};
use super::Pdu;
use crate::{Result, err, implement, is_true};
#[implement(Pdu)]
pub fn remove_transaction_id(&mut self) -> Result {
use BTreeMap as Map;
let Some(unsigned) = &self.unsigned else {
return Ok(());
};
let mut unsigned: Map<&str, Box<RawJsonValue>> = serde_json::from_str(unsigned.get())
.map_err(|e| err!(Database("Invalid unsigned in pdu event: {e}")))?;
unsigned.remove("transaction_id");
self.unsigned = to_raw_value(&unsigned)
.map(Some)
.expect("unsigned is valid");
Ok(())
}
#[implement(Pdu)]
pub fn add_age(&mut self) -> Result {
use BTreeMap as Map;
let mut unsigned: Map<&str, Box<RawJsonValue>> = self
.unsigned
.as_deref()
.map(RawJsonValue::get)
.map_or_else(|| Ok(Map::new()), serde_json::from_str)
.map_err(|e| err!(Database("Invalid unsigned in pdu event: {e}")))?;
// deliberately allowing for the possibility of negative age
let now: i128 = MilliSecondsSinceUnixEpoch::now().get().into();
let then: i128 = self.origin_server_ts.into();
let this_age = now.saturating_sub(then);
unsigned.insert("age", to_raw_value(&this_age)?);
self.unsigned = Some(to_raw_value(&unsigned)?);
Ok(())
}
#[implement(Pdu)]
pub fn add_relation(&mut self, name: &str, pdu: Option<&Pdu>) -> Result {
use serde_json::Map;
let mut unsigned: Map<String, JsonValue> = self
.unsigned
.as_deref()
.map(RawJsonValue::get)
.map_or_else(|| Ok(Map::new()), serde_json::from_str)
.map_err(|e| err!(Database("Invalid unsigned in pdu event: {e}")))?;
let pdu = pdu
.map(serde_json::to_value)
.transpose()?
.unwrap_or_else(|| JsonValue::Object(Map::new()));
unsigned
.entry("m.relations")
.or_insert(JsonValue::Object(Map::new()))
.as_object_mut()
.map(|object| object.insert(name.to_owned(), pdu));
self.unsigned = Some(to_raw_value(&unsigned)?);
Ok(())
}
#[implement(Pdu)]
pub fn contains_unsigned_property<F>(&self, property: &str, is_type: F) -> bool
where
F: FnOnce(&JsonValue) -> bool,
{
self.get_unsigned_as_value()
.get(property)
.map(is_type)
.is_some_and(is_true!())
}
#[implement(Pdu)]
pub fn get_unsigned_property<T>(&self, property: &str) -> Result<T>
where
T: for<'de> Deserialize<'de>,
{
self.get_unsigned_as_value()
.get_mut(property)
.map(JsonValue::take)
.map(serde_json::from_value)
.ok_or(err!(Request(NotFound("property not found in unsigned object"))))?
.map_err(|e| err!(Database("Failed to deserialize unsigned.{property} into type: {e}")))
}
#[implement(Pdu)]
#[must_use]
pub fn get_unsigned_as_value(&self) -> JsonValue {
self.get_unsigned::<JsonValue>().unwrap_or_default()
}
#[implement(Pdu)]
pub fn get_unsigned<T>(&self) -> Result<JsonValue> {
self.unsigned
.as_ref()
.map(|raw| raw.get())
.map(serde_json::from_str)
.ok_or(err!(Request(NotFound("\"unsigned\" property not found in pdu"))))?
.map_err(|e| err!(Database("Failed to deserialize \"unsigned\" into value: {e}")))
}

View file

@ -0,0 +1,17 @@
//! Permission is hereby granted, free of charge, to any person obtaining a copy
//! of this software and associated documentation files (the "Software"), to
//! deal in the Software without restriction, including without limitation the
//! rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
//! sell copies of the Software, and to permit persons to whom the Software is
//! furnished to do so, subject to the following conditions:
//! The above copyright notice and this permission notice shall be included in
//! all copies or substantial portions of the Software.
//! THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
//! IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
//! FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
//! AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
//! LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
//! FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
//! IN THE SOFTWARE.

View file

@ -0,0 +1,672 @@
#[cfg(conduwuit_bench)]
extern crate test;
use std::{
borrow::Borrow,
collections::{HashMap, HashSet},
sync::{
Arc,
atomic::{AtomicU64, Ordering::SeqCst},
},
};
use futures::{future, future::ready};
use maplit::{btreemap, hashmap, hashset};
use ruma::{
EventId, MilliSecondsSinceUnixEpoch, OwnedEventId, RoomId, RoomVersionId, Signatures, UserId,
events::{
StateEventType, TimelineEventType,
pdu::{EventHash, Pdu, RoomV3Pdu},
room::{
join_rules::{JoinRule, RoomJoinRulesEventContent},
member::{MembershipState, RoomMemberEventContent},
},
},
int, room_id, uint, user_id,
};
use serde_json::{
json,
value::{RawValue as RawJsonValue, to_raw_value as to_raw_json_value},
};
use self::event::PduEvent;
use crate::state_res::{self as state_res, Error, Event, Result, StateMap};
static SERVER_TIMESTAMP: AtomicU64 = AtomicU64::new(0);
#[cfg(conduwuit_bench)]
#[cfg_attr(conduwuit_bench, bench)]
fn lexico_topo_sort(c: &mut test::Bencher) {
let graph = hashmap! {
event_id("l") => hashset![event_id("o")],
event_id("m") => hashset![event_id("n"), event_id("o")],
event_id("n") => hashset![event_id("o")],
event_id("o") => hashset![], // "o" has zero outgoing edges but 4 incoming edges
event_id("p") => hashset![event_id("o")],
};
c.iter(|| {
let _ = state_res::lexicographical_topological_sort(&graph, &|_| {
future::ok((int!(0), MilliSecondsSinceUnixEpoch(uint!(0))))
});
});
}
#[cfg(conduwuit_bench)]
#[cfg_attr(conduwuit_bench, bench)]
fn resolution_shallow_auth_chain(c: &mut test::Bencher) {
let parallel_fetches = 32;
let mut store = TestStore(hashmap! {});
// build up the DAG
let (state_at_bob, state_at_charlie, _) = store.set_up();
c.iter(|| async {
let ev_map = store.0.clone();
let state_sets = [&state_at_bob, &state_at_charlie];
let fetch = |id: OwnedEventId| ready(ev_map.get(&id).map(Arc::clone));
let exists = |id: OwnedEventId| ready(ev_map.get(&id).is_some());
let auth_chain_sets: Vec<HashSet<_>> = state_sets
.iter()
.map(|map| {
store
.auth_event_ids(room_id(), map.values().cloned().collect())
.unwrap()
})
.collect();
let _ = match state_res::resolve(
&RoomVersionId::V6,
state_sets.into_iter(),
&auth_chain_sets,
&fetch,
&exists,
parallel_fetches,
)
.await
{
| Ok(state) => state,
| Err(e) => panic!("{e}"),
};
});
}
#[cfg(conduwuit_bench)]
#[cfg_attr(conduwuit_bench, bench)]
fn resolve_deeper_event_set(c: &mut test::Bencher) {
let parallel_fetches = 32;
let mut inner = INITIAL_EVENTS();
let ban = BAN_STATE_SET();
inner.extend(ban);
let store = TestStore(inner.clone());
let state_set_a = [
inner.get(&event_id("CREATE")).unwrap(),
inner.get(&event_id("IJR")).unwrap(),
inner.get(&event_id("IMA")).unwrap(),
inner.get(&event_id("IMB")).unwrap(),
inner.get(&event_id("IMC")).unwrap(),
inner.get(&event_id("MB")).unwrap(),
inner.get(&event_id("PA")).unwrap(),
]
.iter()
.map(|ev| {
(
(ev.event_type().clone().into(), ev.state_key().unwrap().into()),
ev.event_id().to_owned(),
)
})
.collect::<StateMap<_>>();
let state_set_b = [
inner.get(&event_id("CREATE")).unwrap(),
inner.get(&event_id("IJR")).unwrap(),
inner.get(&event_id("IMA")).unwrap(),
inner.get(&event_id("IMB")).unwrap(),
inner.get(&event_id("IMC")).unwrap(),
inner.get(&event_id("IME")).unwrap(),
inner.get(&event_id("PA")).unwrap(),
]
.iter()
.map(|ev| {
(
(ev.event_type().clone().into(), ev.state_key().unwrap().into()),
ev.event_id().to_owned(),
)
})
.collect::<StateMap<_>>();
c.iter(|| async {
let state_sets = [&state_set_a, &state_set_b];
let auth_chain_sets: Vec<HashSet<_>> = state_sets
.iter()
.map(|map| {
store
.auth_event_ids(room_id(), map.values().cloned().collect())
.unwrap()
})
.collect();
let fetch = |id: OwnedEventId| ready(inner.get(&id).map(Arc::clone));
let exists = |id: OwnedEventId| ready(inner.get(&id).is_some());
let _ = match state_res::resolve(
&RoomVersionId::V6,
state_sets.into_iter(),
&auth_chain_sets,
&fetch,
&exists,
parallel_fetches,
)
.await
{
| Ok(state) => state,
| Err(_) => panic!("resolution failed during benchmarking"),
};
});
}
//*/////////////////////////////////////////////////////////////////////
//
// IMPLEMENTATION DETAILS AHEAD
//
/////////////////////////////////////////////////////////////////////*/
struct TestStore<E: Event>(HashMap<OwnedEventId, Arc<E>>);
#[allow(unused)]
impl<E: Event> TestStore<E> {
fn get_event(&self, room_id: &RoomId, event_id: &EventId) -> Result<Arc<E>> {
self.0
.get(event_id)
.map(Arc::clone)
.ok_or_else(|| Error::NotFound(format!("{} not found", event_id)))
}
/// Returns the events that correspond to the `event_ids` sorted in the same
/// order.
fn get_events(&self, room_id: &RoomId, event_ids: &[OwnedEventId]) -> Result<Vec<Arc<E>>> {
let mut events = vec![];
for id in event_ids {
events.push(self.get_event(room_id, id)?);
}
Ok(events)
}
/// Returns a Vec of the related auth events to the given `event`.
fn auth_event_ids(&self, room_id: &RoomId, event_ids: Vec<E::Id>) -> Result<HashSet<E::Id>> {
let mut result = HashSet::new();
let mut stack = event_ids;
// DFS for auth event chain
while !stack.is_empty() {
let ev_id = stack.pop().unwrap();
if result.contains(&ev_id) {
continue;
}
result.insert(ev_id.clone());
let event = self.get_event(room_id, ev_id.borrow())?;
stack.extend(event.auth_events().map(ToOwned::to_owned));
}
Ok(result)
}
/// Returns a vector representing the difference in auth chains of the given
/// `events`.
fn auth_chain_diff(
&self,
room_id: &RoomId,
event_ids: Vec<Vec<E::Id>>,
) -> Result<Vec<E::Id>> {
let mut auth_chain_sets = vec![];
for ids in event_ids {
// TODO state store `auth_event_ids` returns self in the event ids list
// when an event returns `auth_event_ids` self is not contained
let chain = self
.auth_event_ids(room_id, ids)?
.into_iter()
.collect::<HashSet<_>>();
auth_chain_sets.push(chain);
}
if let Some(first) = auth_chain_sets.first().cloned() {
let common = auth_chain_sets
.iter()
.skip(1)
.fold(first, |a, b| a.intersection(b).cloned().collect::<HashSet<_>>());
Ok(auth_chain_sets
.into_iter()
.flatten()
.filter(|id| !common.contains(id.borrow()))
.collect())
} else {
Ok(vec![])
}
}
}
impl TestStore<PduEvent> {
#[allow(clippy::type_complexity)]
fn set_up(
&mut self,
) -> (StateMap<OwnedEventId>, StateMap<OwnedEventId>, StateMap<OwnedEventId>) {
let create_event = to_pdu_event::<&EventId>(
"CREATE",
alice(),
TimelineEventType::RoomCreate,
Some(""),
to_raw_json_value(&json!({ "creator": alice() })).unwrap(),
&[],
&[],
);
let cre = create_event.event_id().to_owned();
self.0.insert(cre.clone(), Arc::clone(&create_event));
let alice_mem = to_pdu_event(
"IMA",
alice(),
TimelineEventType::RoomMember,
Some(alice().to_string().as_str()),
member_content_join(),
&[cre.clone()],
&[cre.clone()],
);
self.0
.insert(alice_mem.event_id().to_owned(), Arc::clone(&alice_mem));
let join_rules = to_pdu_event(
"IJR",
alice(),
TimelineEventType::RoomJoinRules,
Some(""),
to_raw_json_value(&RoomJoinRulesEventContent::new(JoinRule::Public)).unwrap(),
&[cre.clone(), alice_mem.event_id().to_owned()],
&[alice_mem.event_id().to_owned()],
);
self.0
.insert(join_rules.event_id().to_owned(), join_rules.clone());
// Bob and Charlie join at the same time, so there is a fork
// this will be represented in the state_sets when we resolve
let bob_mem = to_pdu_event(
"IMB",
bob(),
TimelineEventType::RoomMember,
Some(bob().to_string().as_str()),
member_content_join(),
&[cre.clone(), join_rules.event_id().to_owned()],
&[join_rules.event_id().to_owned()],
);
self.0
.insert(bob_mem.event_id().to_owned(), bob_mem.clone());
let charlie_mem = to_pdu_event(
"IMC",
charlie(),
TimelineEventType::RoomMember,
Some(charlie().to_string().as_str()),
member_content_join(),
&[cre, join_rules.event_id().to_owned()],
&[join_rules.event_id().to_owned()],
);
self.0
.insert(charlie_mem.event_id().to_owned(), charlie_mem.clone());
let state_at_bob = [&create_event, &alice_mem, &join_rules, &bob_mem]
.iter()
.map(|ev| {
(
(ev.event_type().clone().into(), ev.state_key().unwrap().into()),
ev.event_id().to_owned(),
)
})
.collect::<StateMap<_>>();
let state_at_charlie = [&create_event, &alice_mem, &join_rules, &charlie_mem]
.iter()
.map(|ev| {
(
(ev.event_type().clone().into(), ev.state_key().unwrap().into()),
ev.event_id().to_owned(),
)
})
.collect::<StateMap<_>>();
let expected = [&create_event, &alice_mem, &join_rules, &bob_mem, &charlie_mem]
.iter()
.map(|ev| {
(
(ev.event_type().clone().into(), ev.state_key().unwrap().into()),
ev.event_id().to_owned(),
)
})
.collect::<StateMap<_>>();
(state_at_bob, state_at_charlie, expected)
}
}
fn event_id(id: &str) -> OwnedEventId {
if id.contains('$') {
return id.try_into().unwrap();
}
format!("${}:foo", id).try_into().unwrap()
}
fn alice() -> &'static UserId { user_id!("@alice:foo") }
fn bob() -> &'static UserId { user_id!("@bob:foo") }
fn charlie() -> &'static UserId { user_id!("@charlie:foo") }
fn ella() -> &'static UserId { user_id!("@ella:foo") }
fn room_id() -> &'static RoomId { room_id!("!test:foo") }
fn member_content_ban() -> Box<RawJsonValue> {
to_raw_json_value(&RoomMemberEventContent::new(MembershipState::Ban)).unwrap()
}
fn member_content_join() -> Box<RawJsonValue> {
to_raw_json_value(&RoomMemberEventContent::new(MembershipState::Join)).unwrap()
}
fn to_pdu_event<S>(
id: &str,
sender: &UserId,
ev_type: TimelineEventType,
state_key: Option<&str>,
content: Box<RawJsonValue>,
auth_events: &[S],
prev_events: &[S],
) -> Arc<PduEvent>
where
S: AsRef<str>,
{
// We don't care if the addition happens in order just that it is atomic
// (each event has its own value)
let ts = SERVER_TIMESTAMP.fetch_add(1, SeqCst);
let id = if id.contains('$') {
id.to_owned()
} else {
format!("${}:foo", id)
};
let auth_events = auth_events
.iter()
.map(AsRef::as_ref)
.map(event_id)
.collect::<Vec<_>>();
let prev_events = prev_events
.iter()
.map(AsRef::as_ref)
.map(event_id)
.collect::<Vec<_>>();
let state_key = state_key.map(ToOwned::to_owned);
Arc::new(PduEvent {
event_id: id.try_into().unwrap(),
rest: Pdu::RoomV3Pdu(RoomV3Pdu {
room_id: room_id().to_owned(),
sender: sender.to_owned(),
origin_server_ts: MilliSecondsSinceUnixEpoch(ts.try_into().unwrap()),
state_key,
kind: ev_type,
content,
redacts: None,
unsigned: btreemap! {},
auth_events,
prev_events,
depth: uint!(0),
hashes: EventHash::new(String::new()),
signatures: Signatures::new(),
}),
})
}
// all graphs start with these input events
#[allow(non_snake_case)]
fn INITIAL_EVENTS() -> HashMap<OwnedEventId, Arc<PduEvent>> {
vec![
to_pdu_event::<&EventId>(
"CREATE",
alice(),
TimelineEventType::RoomCreate,
Some(""),
to_raw_json_value(&json!({ "creator": alice() })).unwrap(),
&[],
&[],
),
to_pdu_event(
"IMA",
alice(),
TimelineEventType::RoomMember,
Some(alice().as_str()),
member_content_join(),
&["CREATE"],
&["CREATE"],
),
to_pdu_event(
"IPOWER",
alice(),
TimelineEventType::RoomPowerLevels,
Some(""),
to_raw_json_value(&json!({ "users": { alice(): 100 } })).unwrap(),
&["CREATE", "IMA"],
&["IMA"],
),
to_pdu_event(
"IJR",
alice(),
TimelineEventType::RoomJoinRules,
Some(""),
to_raw_json_value(&RoomJoinRulesEventContent::new(JoinRule::Public)).unwrap(),
&["CREATE", "IMA", "IPOWER"],
&["IPOWER"],
),
to_pdu_event(
"IMB",
bob(),
TimelineEventType::RoomMember,
Some(bob().to_string().as_str()),
member_content_join(),
&["CREATE", "IJR", "IPOWER"],
&["IJR"],
),
to_pdu_event(
"IMC",
charlie(),
TimelineEventType::RoomMember,
Some(charlie().to_string().as_str()),
member_content_join(),
&["CREATE", "IJR", "IPOWER"],
&["IMB"],
),
to_pdu_event::<&EventId>(
"START",
charlie(),
TimelineEventType::RoomTopic,
Some(""),
to_raw_json_value(&json!({})).unwrap(),
&[],
&[],
),
to_pdu_event::<&EventId>(
"END",
charlie(),
TimelineEventType::RoomTopic,
Some(""),
to_raw_json_value(&json!({})).unwrap(),
&[],
&[],
),
]
.into_iter()
.map(|ev| (ev.event_id().to_owned(), ev))
.collect()
}
// all graphs start with these input events
#[allow(non_snake_case)]
fn BAN_STATE_SET() -> HashMap<OwnedEventId, Arc<PduEvent>> {
vec![
to_pdu_event(
"PA",
alice(),
TimelineEventType::RoomPowerLevels,
Some(""),
to_raw_json_value(&json!({ "users": { alice(): 100, bob(): 50 } })).unwrap(),
&["CREATE", "IMA", "IPOWER"], // auth_events
&["START"], // prev_events
),
to_pdu_event(
"PB",
alice(),
TimelineEventType::RoomPowerLevels,
Some(""),
to_raw_json_value(&json!({ "users": { alice(): 100, bob(): 50 } })).unwrap(),
&["CREATE", "IMA", "IPOWER"],
&["END"],
),
to_pdu_event(
"MB",
alice(),
TimelineEventType::RoomMember,
Some(ella().as_str()),
member_content_ban(),
&["CREATE", "IMA", "PB"],
&["PA"],
),
to_pdu_event(
"IME",
ella(),
TimelineEventType::RoomMember,
Some(ella().as_str()),
member_content_join(),
&["CREATE", "IJR", "PA"],
&["MB"],
),
]
.into_iter()
.map(|ev| (ev.event_id().to_owned(), ev))
.collect()
}
/// Convenience trait for adding event type plus state key to state maps.
trait EventTypeExt {
fn with_state_key(self, state_key: impl Into<String>) -> (StateEventType, String);
}
impl EventTypeExt for &TimelineEventType {
fn with_state_key(self, state_key: impl Into<String>) -> (StateEventType, String) {
(self.to_string().into(), state_key.into())
}
}
mod event {
use ruma::{
MilliSecondsSinceUnixEpoch, OwnedEventId, RoomId, UserId,
events::{TimelineEventType, pdu::Pdu},
};
use serde::{Deserialize, Serialize};
use serde_json::value::RawValue as RawJsonValue;
use super::Event;
impl Event for PduEvent {
type Id = OwnedEventId;
fn event_id(&self) -> &Self::Id { &self.event_id }
fn room_id(&self) -> &RoomId {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => &ev.room_id,
| Pdu::RoomV3Pdu(ev) => &ev.room_id,
#[cfg(not(feature = "unstable-exhaustive-types"))]
| _ => unreachable!("new PDU version"),
}
}
fn sender(&self) -> &UserId {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => &ev.sender,
| Pdu::RoomV3Pdu(ev) => &ev.sender,
#[cfg(not(feature = "unstable-exhaustive-types"))]
| _ => unreachable!("new PDU version"),
}
}
fn event_type(&self) -> &TimelineEventType {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => &ev.kind,
| Pdu::RoomV3Pdu(ev) => &ev.kind,
#[cfg(not(feature = "unstable-exhaustive-types"))]
| _ => unreachable!("new PDU version"),
}
}
fn content(&self) -> &RawJsonValue {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => &ev.content,
| Pdu::RoomV3Pdu(ev) => &ev.content,
#[cfg(not(feature = "unstable-exhaustive-types"))]
| _ => unreachable!("new PDU version"),
}
}
fn origin_server_ts(&self) -> MilliSecondsSinceUnixEpoch {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => ev.origin_server_ts,
| Pdu::RoomV3Pdu(ev) => ev.origin_server_ts,
#[cfg(not(feature = "unstable-exhaustive-types"))]
| _ => unreachable!("new PDU version"),
}
}
fn state_key(&self) -> Option<&str> {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => ev.state_key.as_deref(),
| Pdu::RoomV3Pdu(ev) => ev.state_key.as_deref(),
#[cfg(not(feature = "unstable-exhaustive-types"))]
| _ => unreachable!("new PDU version"),
}
}
fn prev_events(&self) -> Box<dyn DoubleEndedIterator<Item = &Self::Id> + Send + '_> {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => Box::new(ev.prev_events.iter().map(|(id, _)| id)),
| Pdu::RoomV3Pdu(ev) => Box::new(ev.prev_events.iter()),
#[cfg(not(feature = "unstable-exhaustive-types"))]
| _ => unreachable!("new PDU version"),
}
}
fn auth_events(&self) -> Box<dyn DoubleEndedIterator<Item = &Self::Id> + Send + '_> {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => Box::new(ev.auth_events.iter().map(|(id, _)| id)),
| Pdu::RoomV3Pdu(ev) => Box::new(ev.auth_events.iter()),
#[cfg(not(feature = "unstable-exhaustive-types"))]
| _ => unreachable!("new PDU version"),
}
}
fn redacts(&self) -> Option<&Self::Id> {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => ev.redacts.as_ref(),
| Pdu::RoomV3Pdu(ev) => ev.redacts.as_ref(),
#[cfg(not(feature = "unstable-exhaustive-types"))]
| _ => unreachable!("new PDU version"),
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub(crate) struct PduEvent {
pub(crate) event_id: OwnedEventId,
#[serde(flatten)]
pub(crate) rest: Pdu,
}
}

View file

@ -0,0 +1,23 @@
use serde_json::Error as JsonError;
use thiserror::Error;
/// Represents the various errors that arise when resolving state.
#[derive(Error, Debug)]
#[non_exhaustive]
pub enum Error {
/// A deserialization error.
#[error(transparent)]
SerdeJson(#[from] JsonError),
/// The given option or version is unsupported.
#[error("Unsupported room version: {0}")]
Unsupported(String),
/// The given event was not found.
#[error("Not found error: {0}")]
NotFound(String),
/// Invalid fields in the given PDU.
#[error("Invalid PDU: {0}")]
InvalidPdu(String),
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,104 @@
11/29/2020 BRANCH: timo-spec-comp REV: d2a85669cc6056679ce6ca0fde4658a879ad2b08
lexicographical topological sort
time: [1.7123 us 1.7157 us 1.7199 us]
change: [-1.7584% -1.5433% -1.3205%] (p = 0.00 < 0.05)
Performance has improved.
Found 8 outliers among 100 measurements (8.00%)
2 (2.00%) low mild
5 (5.00%) high mild
1 (1.00%) high severe
resolve state of 5 events one fork
time: [10.981 us 10.998 us 11.020 us]
Found 3 outliers among 100 measurements (3.00%)
3 (3.00%) high mild
resolve state of 10 events 3 conflicting
time: [26.858 us 26.946 us 27.037 us]
11/29/2020 BRANCH: event-trait REV: f0eb1310efd49d722979f57f20bd1ac3592b0479
lexicographical topological sort
time: [1.7686 us 1.7738 us 1.7810 us]
change: [-3.2752% -2.4634% -1.7635%] (p = 0.00 < 0.05)
Performance has improved.
Found 1 outliers among 100 measurements (1.00%)
1 (1.00%) high severe
resolve state of 5 events one fork
time: [10.643 us 10.656 us 10.669 us]
change: [-4.9990% -3.8078% -2.8319%] (p = 0.00 < 0.05)
Performance has improved.
Found 1 outliers among 100 measurements (1.00%)
1 (1.00%) high severe
resolve state of 10 events 3 conflicting
time: [29.149 us 29.252 us 29.375 us]
change: [-0.8433% -0.3270% +0.2656%] (p = 0.25 > 0.05)
No change in performance detected.
Found 1 outliers among 100 measurements (1.00%)
1 (1.00%) high mild
4/26/2020 BRANCH: fix-test-serde REV:
lexicographical topological sort
time: [1.6793 us 1.6823 us 1.6857 us]
Found 9 outliers among 100 measurements (9.00%)
1 (1.00%) low mild
4 (4.00%) high mild
4 (4.00%) high severe
resolve state of 5 events one fork
time: [9.9993 us 10.062 us 10.159 us]
Found 9 outliers among 100 measurements (9.00%)
7 (7.00%) high mild
2 (2.00%) high severe
resolve state of 10 events 3 conflicting
time: [26.004 us 26.092 us 26.195 us]
Found 16 outliers among 100 measurements (16.00%)
11 (11.00%) high mild
5 (5.00%) high severe
6/30/2021 BRANCH: state-closure REV: 174c3e2a72232ad75b3fb14b3551f5f746f4fe84
lexicographical topological sort
time: [1.5496 us 1.5536 us 1.5586 us]
Found 9 outliers among 100 measurements (9.00%)
1 (1.00%) low mild
1 (1.00%) high mild
7 (7.00%) high severe
resolve state of 5 events one fork
time: [10.319 us 10.333 us 10.347 us]
Found 2 outliers among 100 measurements (2.00%)
2 (2.00%) high severe
resolve state of 10 events 3 conflicting
time: [25.770 us 25.805 us 25.839 us]
Found 7 outliers among 100 measurements (7.00%)
5 (5.00%) high mild
2 (2.00%) high severe
7/20/2021 BRANCH stateres-result REV:
This marks the switch to HashSet/Map
lexicographical topological sort
time: [1.8122 us 1.8177 us 1.8233 us]
change: [+15.205% +15.919% +16.502%] (p = 0.00 < 0.05)
Performance has regressed.
Found 7 outliers among 100 measurements (7.00%)
5 (5.00%) high mild
2 (2.00%) high severe
resolve state of 5 events one fork
time: [11.966 us 12.010 us 12.059 us]
change: [+16.089% +16.730% +17.469%] (p = 0.00 < 0.05)
Performance has regressed.
Found 7 outliers among 100 measurements (7.00%)
3 (3.00%) high mild
4 (4.00%) high severe
resolve state of 10 events 3 conflicting
time: [29.092 us 29.201 us 29.311 us]
change: [+12.447% +12.847% +13.280%] (p = 0.00 < 0.05)
Performance has regressed.
Found 9 outliers among 100 measurements (9.00%)
6 (6.00%) high mild
3 (3.00%) high severe

View file

@ -0,0 +1,256 @@
use std::collections::BTreeMap;
use ruma::{
Int, OwnedUserId, UserId,
events::{TimelineEventType, room::power_levels::RoomPowerLevelsEventContent},
power_levels::{NotificationPowerLevels, default_power_level},
serde::{
deserialize_v1_powerlevel, vec_deserialize_int_powerlevel_values,
vec_deserialize_v1_powerlevel_values,
},
};
use serde::Deserialize;
use serde_json::{Error, from_str as from_json_str};
use super::{Result, RoomVersion};
use crate::error;
#[derive(Deserialize)]
struct IntRoomPowerLevelsEventContent {
#[serde(default = "default_power_level")]
ban: Int,
#[serde(default)]
events: BTreeMap<TimelineEventType, Int>,
#[serde(default)]
events_default: Int,
#[serde(default)]
invite: Int,
#[serde(default = "default_power_level")]
kick: Int,
#[serde(default = "default_power_level")]
redact: Int,
#[serde(default = "default_power_level")]
state_default: Int,
#[serde(default)]
users: BTreeMap<OwnedUserId, Int>,
#[serde(default)]
users_default: Int,
#[serde(default)]
notifications: IntNotificationPowerLevels,
}
impl From<IntRoomPowerLevelsEventContent> for RoomPowerLevelsEventContent {
fn from(int_pl: IntRoomPowerLevelsEventContent) -> Self {
let IntRoomPowerLevelsEventContent {
ban,
events,
events_default,
invite,
kick,
redact,
state_default,
users,
users_default,
notifications,
} = int_pl;
let mut pl = Self::new();
pl.ban = ban;
pl.events = events;
pl.events_default = events_default;
pl.invite = invite;
pl.kick = kick;
pl.redact = redact;
pl.state_default = state_default;
pl.users = users;
pl.users_default = users_default;
pl.notifications = notifications.into();
pl
}
}
#[derive(Deserialize)]
struct IntNotificationPowerLevels {
#[serde(default = "default_power_level")]
room: Int,
}
impl Default for IntNotificationPowerLevels {
fn default() -> Self { Self { room: default_power_level() } }
}
impl From<IntNotificationPowerLevels> for NotificationPowerLevels {
fn from(int_notif: IntNotificationPowerLevels) -> Self {
let mut notif = Self::new();
notif.room = int_notif.room;
notif
}
}
#[inline]
pub(crate) fn deserialize_power_levels(
content: &str,
room_version: &RoomVersion,
) -> Option<RoomPowerLevelsEventContent> {
if room_version.integer_power_levels {
deserialize_integer_power_levels(content)
} else {
deserialize_legacy_power_levels(content)
}
}
fn deserialize_integer_power_levels(content: &str) -> Option<RoomPowerLevelsEventContent> {
match from_json_str::<IntRoomPowerLevelsEventContent>(content) {
| Ok(content) => Some(content.into()),
| Err(_) => {
error!("m.room.power_levels event is not valid with integer values");
None
},
}
}
fn deserialize_legacy_power_levels(content: &str) -> Option<RoomPowerLevelsEventContent> {
match from_json_str(content) {
| Ok(content) => Some(content),
| Err(_) => {
error!(
"m.room.power_levels event is not valid with integer or string integer values"
);
None
},
}
}
#[derive(Deserialize)]
pub(crate) struct PowerLevelsContentFields {
#[serde(default, deserialize_with = "vec_deserialize_v1_powerlevel_values")]
pub(crate) users: Vec<(OwnedUserId, Int)>,
#[serde(default, deserialize_with = "deserialize_v1_powerlevel")]
pub(crate) users_default: Int,
}
impl PowerLevelsContentFields {
pub(crate) fn get_user_power(&self, user_id: &UserId) -> Option<&Int> {
let comparator = |item: &(OwnedUserId, Int)| {
let item: &UserId = &item.0;
item.cmp(user_id)
};
self.users
.binary_search_by(comparator)
.ok()
.and_then(|idx| self.users.get(idx).map(|item| &item.1))
}
}
#[derive(Deserialize)]
struct IntPowerLevelsContentFields {
#[serde(default, deserialize_with = "vec_deserialize_int_powerlevel_values")]
users: Vec<(OwnedUserId, Int)>,
#[serde(default)]
users_default: Int,
}
impl From<IntPowerLevelsContentFields> for PowerLevelsContentFields {
fn from(pl: IntPowerLevelsContentFields) -> Self {
let IntPowerLevelsContentFields { users, users_default } = pl;
Self { users, users_default }
}
}
#[inline]
pub(crate) fn deserialize_power_levels_content_fields(
content: &str,
room_version: &RoomVersion,
) -> Result<PowerLevelsContentFields, Error> {
if room_version.integer_power_levels {
deserialize_integer_power_levels_content_fields(content)
} else {
deserialize_legacy_power_levels_content_fields(content)
}
}
fn deserialize_integer_power_levels_content_fields(
content: &str,
) -> Result<PowerLevelsContentFields, Error> {
from_json_str::<IntPowerLevelsContentFields>(content).map(Into::into)
}
fn deserialize_legacy_power_levels_content_fields(
content: &str,
) -> Result<PowerLevelsContentFields, Error> {
from_json_str(content)
}
#[derive(Deserialize)]
pub(crate) struct PowerLevelsContentInvite {
#[serde(default, deserialize_with = "deserialize_v1_powerlevel")]
pub(crate) invite: Int,
}
#[derive(Deserialize)]
struct IntPowerLevelsContentInvite {
#[serde(default)]
invite: Int,
}
impl From<IntPowerLevelsContentInvite> for PowerLevelsContentInvite {
fn from(pl: IntPowerLevelsContentInvite) -> Self {
let IntPowerLevelsContentInvite { invite } = pl;
Self { invite }
}
}
pub(crate) fn deserialize_power_levels_content_invite(
content: &str,
room_version: &RoomVersion,
) -> Result<PowerLevelsContentInvite, Error> {
if room_version.integer_power_levels {
from_json_str::<IntPowerLevelsContentInvite>(content).map(Into::into)
} else {
from_json_str(content)
}
}
#[derive(Deserialize)]
pub(crate) struct PowerLevelsContentRedact {
#[serde(default = "default_power_level", deserialize_with = "deserialize_v1_powerlevel")]
pub(crate) redact: Int,
}
#[derive(Deserialize)]
pub(crate) struct IntPowerLevelsContentRedact {
#[serde(default = "default_power_level")]
redact: Int,
}
impl From<IntPowerLevelsContentRedact> for PowerLevelsContentRedact {
fn from(pl: IntPowerLevelsContentRedact) -> Self {
let IntPowerLevelsContentRedact { redact } = pl;
Self { redact }
}
}
pub(crate) fn deserialize_power_levels_content_redact(
content: &str,
room_version: &RoomVersion,
) -> Result<PowerLevelsContentRedact, Error> {
if room_version.integer_power_levels {
from_json_str::<IntPowerLevelsContentRedact>(content).map(Into::into)
} else {
from_json_str(content)
}
}

View file

@ -0,0 +1,150 @@
use ruma::RoomVersionId;
use super::{Error, Result};
#[derive(Debug)]
#[allow(clippy::exhaustive_enums)]
pub enum RoomDisposition {
/// A room version that has a stable specification.
Stable,
/// A room version that is not yet fully specified.
Unstable,
}
#[derive(Debug)]
#[cfg_attr(not(feature = "unstable-exhaustive-types"), non_exhaustive)]
pub enum EventFormatVersion {
/// $id:server event id format
V1,
/// MSC1659-style $hash event id format: introduced for room v3
V2,
/// MSC1884-style $hash format: introduced for room v4
V3,
}
#[derive(Debug)]
#[cfg_attr(not(feature = "unstable-exhaustive-types"), non_exhaustive)]
pub enum StateResolutionVersion {
/// State resolution for rooms at version 1.
V1,
/// State resolution for room at version 2 or later.
V2,
}
#[cfg_attr(not(feature = "unstable-exhaustive-types"), non_exhaustive)]
#[allow(clippy::struct_excessive_bools)]
pub struct RoomVersion {
/// The stability of this room.
pub disposition: RoomDisposition,
/// The format of the EventId.
pub event_format: EventFormatVersion,
/// Which state resolution algorithm is used.
pub state_res: StateResolutionVersion,
// FIXME: not sure what this one means?
pub enforce_key_validity: bool,
/// `m.room.aliases` had special auth rules and redaction rules
/// before room version 6.
///
/// before MSC2261/MSC2432,
pub special_case_aliases_auth: bool,
/// Strictly enforce canonical json, do not allow:
/// * Integers outside the range of [-2 ^ 53 + 1, 2 ^ 53 - 1]
/// * Floats
/// * NaN, Infinity, -Infinity
pub strict_canonicaljson: bool,
/// Verify notifications key while checking m.room.power_levels.
///
/// bool: MSC2209: Check 'notifications'
pub limit_notifications_power_levels: bool,
/// Extra rules when verifying redaction events.
pub extra_redaction_checks: bool,
/// Allow knocking in event authentication.
///
/// See [room v7 specification](https://spec.matrix.org/latest/rooms/v7/) for more information.
pub allow_knocking: bool,
/// Adds support for the restricted join rule.
///
/// See: [MSC3289](https://github.com/matrix-org/matrix-spec-proposals/pull/3289) for more information.
pub restricted_join_rules: bool,
/// Adds support for the knock_restricted join rule.
///
/// See: [MSC3787](https://github.com/matrix-org/matrix-spec-proposals/pull/3787) for more information.
pub knock_restricted_join_rule: bool,
/// Enforces integer power levels.
///
/// See: [MSC3667](https://github.com/matrix-org/matrix-spec-proposals/pull/3667) for more information.
pub integer_power_levels: bool,
/// Determine the room creator using the `m.room.create` event's `sender`,
/// instead of the event content's `creator` field.
///
/// See: [MSC2175](https://github.com/matrix-org/matrix-spec-proposals/pull/2175) for more information.
pub use_room_create_sender: bool,
}
impl RoomVersion {
pub const V1: Self = Self {
disposition: RoomDisposition::Stable,
event_format: EventFormatVersion::V1,
state_res: StateResolutionVersion::V1,
enforce_key_validity: false,
special_case_aliases_auth: true,
strict_canonicaljson: false,
limit_notifications_power_levels: false,
extra_redaction_checks: true,
allow_knocking: false,
restricted_join_rules: false,
knock_restricted_join_rule: false,
integer_power_levels: false,
use_room_create_sender: false,
};
pub const V10: Self = Self {
knock_restricted_join_rule: true,
integer_power_levels: true,
..Self::V9
};
pub const V11: Self = Self {
use_room_create_sender: true,
..Self::V10
};
pub const V2: Self = Self {
state_res: StateResolutionVersion::V2,
..Self::V1
};
pub const V3: Self = Self {
event_format: EventFormatVersion::V2,
extra_redaction_checks: false,
..Self::V2
};
pub const V4: Self = Self {
event_format: EventFormatVersion::V3,
..Self::V3
};
pub const V5: Self = Self { enforce_key_validity: true, ..Self::V4 };
pub const V6: Self = Self {
special_case_aliases_auth: false,
strict_canonicaljson: true,
limit_notifications_power_levels: true,
..Self::V5
};
pub const V7: Self = Self { allow_knocking: true, ..Self::V6 };
pub const V8: Self = Self { restricted_join_rules: true, ..Self::V7 };
pub const V9: Self = Self::V8;
pub fn new(version: &RoomVersionId) -> Result<Self> {
Ok(match version {
| RoomVersionId::V1 => Self::V1,
| RoomVersionId::V2 => Self::V2,
| RoomVersionId::V3 => Self::V3,
| RoomVersionId::V4 => Self::V4,
| RoomVersionId::V5 => Self::V5,
| RoomVersionId::V6 => Self::V6,
| RoomVersionId::V7 => Self::V7,
| RoomVersionId::V8 => Self::V8,
| RoomVersionId::V9 => Self::V9,
| RoomVersionId::V10 => Self::V10,
| RoomVersionId::V11 => Self::V11,
| ver => return Err(Error::Unsupported(format!("found version `{ver}`"))),
})
}
}

View file

@ -0,0 +1,694 @@
use std::{
borrow::Borrow,
collections::{BTreeMap, HashMap, HashSet},
sync::{
Arc,
atomic::{AtomicU64, Ordering::SeqCst},
},
};
use futures::future::ready;
use ruma::{
EventId, MilliSecondsSinceUnixEpoch, OwnedEventId, RoomId, RoomVersionId, ServerSignatures,
UserId, event_id,
events::{
TimelineEventType,
pdu::{EventHash, Pdu, RoomV3Pdu},
room::{
join_rules::{JoinRule, RoomJoinRulesEventContent},
member::{MembershipState, RoomMemberEventContent},
},
},
int, room_id, uint, user_id,
};
use serde_json::{
json,
value::{RawValue as RawJsonValue, to_raw_value as to_raw_json_value},
};
pub(crate) use self::event::PduEvent;
use super::auth_types_for_event;
use crate::{
Result, info,
matrix::{Event, EventTypeExt, StateMap},
};
static SERVER_TIMESTAMP: AtomicU64 = AtomicU64::new(0);
pub(crate) async fn do_check(
events: &[Arc<PduEvent>],
edges: Vec<Vec<OwnedEventId>>,
expected_state_ids: Vec<OwnedEventId>,
) {
// To activate logging use `RUST_LOG=debug cargo t`
let init_events = INITIAL_EVENTS();
let mut store = TestStore(
init_events
.values()
.chain(events)
.map(|ev| (ev.event_id().to_owned(), ev.clone()))
.collect(),
);
// This will be lexi_topo_sorted for resolution
let mut graph = HashMap::new();
// This is the same as in `resolve` event_id -> OriginalStateEvent
let mut fake_event_map = HashMap::new();
// Create the DB of events that led up to this point
// TODO maybe clean up some of these clones it is just tests but...
for ev in init_events.values().chain(events) {
graph.insert(ev.event_id().to_owned(), HashSet::new());
fake_event_map.insert(ev.event_id().to_owned(), ev.clone());
}
for pair in INITIAL_EDGES().windows(2) {
if let [a, b] = &pair {
graph
.entry(a.to_owned())
.or_insert_with(HashSet::new)
.insert(b.clone());
}
}
for edge_list in edges {
for pair in edge_list.windows(2) {
if let [a, b] = &pair {
graph
.entry(a.to_owned())
.or_insert_with(HashSet::new)
.insert(b.clone());
}
}
}
// event_id -> PduEvent
let mut event_map: HashMap<OwnedEventId, Arc<PduEvent>> = HashMap::new();
// event_id -> StateMap<OwnedEventId>
let mut state_at_event: HashMap<OwnedEventId, StateMap<OwnedEventId>> = HashMap::new();
// Resolve the current state and add it to the state_at_event map then continue
// on in "time"
for node in super::lexicographical_topological_sort(&graph, &|_id| async {
Ok((int!(0), MilliSecondsSinceUnixEpoch(uint!(0))))
})
.await
.unwrap()
{
let fake_event = fake_event_map.get(&node).unwrap();
let event_id = fake_event.event_id().to_owned();
let prev_events = graph.get(&node).unwrap();
let state_before: StateMap<OwnedEventId> = if prev_events.is_empty() {
HashMap::new()
} else if prev_events.len() == 1 {
state_at_event
.get(prev_events.iter().next().unwrap())
.unwrap()
.clone()
} else {
let state_sets = prev_events
.iter()
.filter_map(|k| state_at_event.get(k))
.collect::<Vec<_>>();
info!(
"{:#?}",
state_sets
.iter()
.map(|map| map
.iter()
.map(|((ty, key), id)| format!("(({ty}{key:?}), {id})"))
.collect::<Vec<_>>())
.collect::<Vec<_>>()
);
let auth_chain_sets: Vec<_> = state_sets
.iter()
.map(|map| {
store
.auth_event_ids(room_id(), map.values().cloned().collect())
.unwrap()
})
.collect();
let event_map = &event_map;
let fetch = |id: <PduEvent as Event>::Id| ready(event_map.get(&id).cloned());
let exists = |id: <PduEvent as Event>::Id| ready(event_map.get(&id).is_some());
let resolved = super::resolve(
&RoomVersionId::V6,
state_sets,
&auth_chain_sets,
&fetch,
&exists,
1,
)
.await;
match resolved {
| Ok(state) => state,
| Err(e) => panic!("resolution for {node} failed: {e}"),
}
};
let mut state_after = state_before.clone();
let ty = fake_event.event_type();
let key = fake_event.state_key().unwrap();
state_after.insert(ty.with_state_key(key), event_id.to_owned());
let auth_types = auth_types_for_event(
fake_event.event_type(),
fake_event.sender(),
fake_event.state_key(),
fake_event.content(),
)
.unwrap();
let mut auth_events = vec![];
for key in auth_types {
if state_before.contains_key(&key) {
auth_events.push(state_before[&key].clone());
}
}
// TODO The event is just remade, adding the auth_events and prev_events here
// the `to_pdu_event` was split into `init` and the fn below, could be better
let e = fake_event;
let ev_id = e.event_id();
let event = to_pdu_event(
e.event_id().as_str(),
e.sender(),
e.event_type().clone(),
e.state_key(),
e.content().to_owned(),
&auth_events,
&prev_events.iter().cloned().collect::<Vec<_>>(),
);
// We have to update our store, an actual user of this lib would
// be giving us state from a DB.
store.0.insert(ev_id.to_owned(), event.clone());
state_at_event.insert(node, state_after);
event_map.insert(event_id.to_owned(), Arc::clone(store.0.get(ev_id).unwrap()));
}
let mut expected_state = StateMap::new();
for node in expected_state_ids {
let ev = event_map.get(&node).unwrap_or_else(|| {
panic!(
"{node} not found in {:?}",
event_map
.keys()
.map(ToString::to_string)
.collect::<Vec<_>>()
)
});
let key = ev.event_type().with_state_key(ev.state_key().unwrap());
expected_state.insert(key, node);
}
let start_state = state_at_event.get(event_id!("$START:foo")).unwrap();
let end_state = state_at_event
.get(event_id!("$END:foo"))
.unwrap()
.iter()
.filter(|(k, v)| {
expected_state.contains_key(k)
|| start_state.get(k) != Some(*v)
// Filter out the dummy messages events.
// These act as points in time where there should be a known state to
// test against.
&& **k != ("m.room.message".into(), "dummy".into())
})
.map(|(k, v)| (k.clone(), v.clone()))
.collect::<StateMap<OwnedEventId>>();
assert_eq!(expected_state, end_state);
}
#[allow(clippy::exhaustive_structs)]
pub(crate) struct TestStore<E: Event>(pub(crate) HashMap<OwnedEventId, Arc<E>>);
impl<E: Event> TestStore<E> {
pub(crate) fn get_event(&self, _: &RoomId, event_id: &EventId) -> Result<Arc<E>> {
self.0
.get(event_id)
.cloned()
.ok_or_else(|| super::Error::NotFound(format!("{event_id} not found")))
.map_err(Into::into)
}
/// Returns a Vec of the related auth events to the given `event`.
pub(crate) fn auth_event_ids(
&self,
room_id: &RoomId,
event_ids: Vec<E::Id>,
) -> Result<HashSet<E::Id>> {
let mut result = HashSet::new();
let mut stack = event_ids;
// DFS for auth event chain
while let Some(ev_id) = stack.pop() {
if result.contains(&ev_id) {
continue;
}
result.insert(ev_id.clone());
let event = self.get_event(room_id, ev_id.borrow())?;
stack.extend(event.auth_events().map(ToOwned::to_owned));
}
Ok(result)
}
}
// A StateStore implementation for testing
#[allow(clippy::type_complexity)]
impl TestStore<PduEvent> {
pub(crate) fn set_up(
&mut self,
) -> (StateMap<OwnedEventId>, StateMap<OwnedEventId>, StateMap<OwnedEventId>) {
let create_event = to_pdu_event::<&EventId>(
"CREATE",
alice(),
TimelineEventType::RoomCreate,
Some(""),
to_raw_json_value(&json!({ "creator": alice() })).unwrap(),
&[],
&[],
);
let cre = create_event.event_id().to_owned();
self.0.insert(cre.clone(), Arc::clone(&create_event));
let alice_mem = to_pdu_event(
"IMA",
alice(),
TimelineEventType::RoomMember,
Some(alice().as_str()),
member_content_join(),
&[cre.clone()],
&[cre.clone()],
);
self.0
.insert(alice_mem.event_id().to_owned(), Arc::clone(&alice_mem));
let join_rules = to_pdu_event(
"IJR",
alice(),
TimelineEventType::RoomJoinRules,
Some(""),
to_raw_json_value(&RoomJoinRulesEventContent::new(JoinRule::Public)).unwrap(),
&[cre.clone(), alice_mem.event_id().to_owned()],
&[alice_mem.event_id().to_owned()],
);
self.0
.insert(join_rules.event_id().to_owned(), join_rules.clone());
// Bob and Charlie join at the same time, so there is a fork
// this will be represented in the state_sets when we resolve
let bob_mem = to_pdu_event(
"IMB",
bob(),
TimelineEventType::RoomMember,
Some(bob().as_str()),
member_content_join(),
&[cre.clone(), join_rules.event_id().to_owned()],
&[join_rules.event_id().to_owned()],
);
self.0
.insert(bob_mem.event_id().to_owned(), bob_mem.clone());
let charlie_mem = to_pdu_event(
"IMC",
charlie(),
TimelineEventType::RoomMember,
Some(charlie().as_str()),
member_content_join(),
&[cre, join_rules.event_id().to_owned()],
&[join_rules.event_id().to_owned()],
);
self.0
.insert(charlie_mem.event_id().to_owned(), charlie_mem.clone());
let state_at_bob = [&create_event, &alice_mem, &join_rules, &bob_mem]
.iter()
.map(|e| {
(e.event_type().with_state_key(e.state_key().unwrap()), e.event_id().to_owned())
})
.collect::<StateMap<_>>();
let state_at_charlie = [&create_event, &alice_mem, &join_rules, &charlie_mem]
.iter()
.map(|e| {
(e.event_type().with_state_key(e.state_key().unwrap()), e.event_id().to_owned())
})
.collect::<StateMap<_>>();
let expected = [&create_event, &alice_mem, &join_rules, &bob_mem, &charlie_mem]
.iter()
.map(|e| {
(e.event_type().with_state_key(e.state_key().unwrap()), e.event_id().to_owned())
})
.collect::<StateMap<_>>();
(state_at_bob, state_at_charlie, expected)
}
}
pub(crate) fn event_id(id: &str) -> OwnedEventId {
if id.contains('$') {
return id.try_into().unwrap();
}
format!("${id}:foo").try_into().unwrap()
}
pub(crate) fn alice() -> &'static UserId { user_id!("@alice:foo") }
pub(crate) fn bob() -> &'static UserId { user_id!("@bob:foo") }
pub(crate) fn charlie() -> &'static UserId { user_id!("@charlie:foo") }
pub(crate) fn ella() -> &'static UserId { user_id!("@ella:foo") }
pub(crate) fn zara() -> &'static UserId { user_id!("@zara:foo") }
pub(crate) fn room_id() -> &'static RoomId { room_id!("!test:foo") }
pub(crate) fn member_content_ban() -> Box<RawJsonValue> {
to_raw_json_value(&RoomMemberEventContent::new(MembershipState::Ban)).unwrap()
}
pub(crate) fn member_content_join() -> Box<RawJsonValue> {
to_raw_json_value(&RoomMemberEventContent::new(MembershipState::Join)).unwrap()
}
pub(crate) fn to_init_pdu_event(
id: &str,
sender: &UserId,
ev_type: TimelineEventType,
state_key: Option<&str>,
content: Box<RawJsonValue>,
) -> Arc<PduEvent> {
let ts = SERVER_TIMESTAMP.fetch_add(1, SeqCst);
let id = if id.contains('$') {
id.to_owned()
} else {
format!("${id}:foo")
};
let state_key = state_key.map(ToOwned::to_owned);
Arc::new(PduEvent {
event_id: id.try_into().unwrap(),
rest: Pdu::RoomV3Pdu(RoomV3Pdu {
room_id: room_id().to_owned(),
sender: sender.to_owned(),
origin_server_ts: MilliSecondsSinceUnixEpoch(ts.try_into().unwrap()),
state_key,
kind: ev_type,
content,
redacts: None,
unsigned: BTreeMap::new(),
auth_events: vec![],
prev_events: vec![],
depth: uint!(0),
hashes: EventHash::new("".to_owned()),
signatures: ServerSignatures::default(),
}),
})
}
pub(crate) fn to_pdu_event<S>(
id: &str,
sender: &UserId,
ev_type: TimelineEventType,
state_key: Option<&str>,
content: Box<RawJsonValue>,
auth_events: &[S],
prev_events: &[S],
) -> Arc<PduEvent>
where
S: AsRef<str>,
{
let ts = SERVER_TIMESTAMP.fetch_add(1, SeqCst);
let id = if id.contains('$') {
id.to_owned()
} else {
format!("${id}:foo")
};
let auth_events = auth_events
.iter()
.map(AsRef::as_ref)
.map(event_id)
.collect::<Vec<_>>();
let prev_events = prev_events
.iter()
.map(AsRef::as_ref)
.map(event_id)
.collect::<Vec<_>>();
let state_key = state_key.map(ToOwned::to_owned);
Arc::new(PduEvent {
event_id: id.try_into().unwrap(),
rest: Pdu::RoomV3Pdu(RoomV3Pdu {
room_id: room_id().to_owned(),
sender: sender.to_owned(),
origin_server_ts: MilliSecondsSinceUnixEpoch(ts.try_into().unwrap()),
state_key,
kind: ev_type,
content,
redacts: None,
unsigned: BTreeMap::new(),
auth_events,
prev_events,
depth: uint!(0),
hashes: EventHash::new("".to_owned()),
signatures: ServerSignatures::default(),
}),
})
}
// all graphs start with these input events
#[allow(non_snake_case)]
pub(crate) fn INITIAL_EVENTS() -> HashMap<OwnedEventId, Arc<PduEvent>> {
vec![
to_pdu_event::<&EventId>(
"CREATE",
alice(),
TimelineEventType::RoomCreate,
Some(""),
to_raw_json_value(&json!({ "creator": alice() })).unwrap(),
&[],
&[],
),
to_pdu_event(
"IMA",
alice(),
TimelineEventType::RoomMember,
Some(alice().as_str()),
member_content_join(),
&["CREATE"],
&["CREATE"],
),
to_pdu_event(
"IPOWER",
alice(),
TimelineEventType::RoomPowerLevels,
Some(""),
to_raw_json_value(&json!({ "users": { alice(): 100 } })).unwrap(),
&["CREATE", "IMA"],
&["IMA"],
),
to_pdu_event(
"IJR",
alice(),
TimelineEventType::RoomJoinRules,
Some(""),
to_raw_json_value(&RoomJoinRulesEventContent::new(JoinRule::Public)).unwrap(),
&["CREATE", "IMA", "IPOWER"],
&["IPOWER"],
),
to_pdu_event(
"IMB",
bob(),
TimelineEventType::RoomMember,
Some(bob().as_str()),
member_content_join(),
&["CREATE", "IJR", "IPOWER"],
&["IJR"],
),
to_pdu_event(
"IMC",
charlie(),
TimelineEventType::RoomMember,
Some(charlie().as_str()),
member_content_join(),
&["CREATE", "IJR", "IPOWER"],
&["IMB"],
),
to_pdu_event::<&EventId>(
"START",
charlie(),
TimelineEventType::RoomMessage,
Some("dummy"),
to_raw_json_value(&json!({})).unwrap(),
&[],
&[],
),
to_pdu_event::<&EventId>(
"END",
charlie(),
TimelineEventType::RoomMessage,
Some("dummy"),
to_raw_json_value(&json!({})).unwrap(),
&[],
&[],
),
]
.into_iter()
.map(|ev| (ev.event_id().to_owned(), ev))
.collect()
}
// all graphs start with these input events
#[allow(non_snake_case)]
pub(crate) fn INITIAL_EVENTS_CREATE_ROOM() -> HashMap<OwnedEventId, Arc<PduEvent>> {
vec![to_pdu_event::<&EventId>(
"CREATE",
alice(),
TimelineEventType::RoomCreate,
Some(""),
to_raw_json_value(&json!({ "creator": alice() })).unwrap(),
&[],
&[],
)]
.into_iter()
.map(|ev| (ev.event_id().to_owned(), ev))
.collect()
}
#[allow(non_snake_case)]
pub(crate) fn INITIAL_EDGES() -> Vec<OwnedEventId> {
vec!["START", "IMC", "IMB", "IJR", "IPOWER", "IMA", "CREATE"]
.into_iter()
.map(event_id)
.collect::<Vec<_>>()
}
pub(crate) mod event {
use ruma::{
MilliSecondsSinceUnixEpoch, OwnedEventId, RoomId, UserId,
events::{TimelineEventType, pdu::Pdu},
};
use serde::{Deserialize, Serialize};
use serde_json::value::RawValue as RawJsonValue;
use crate::Event;
impl Event for PduEvent {
type Id = OwnedEventId;
fn event_id(&self) -> &Self::Id { &self.event_id }
fn room_id(&self) -> &RoomId {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => &ev.room_id,
| Pdu::RoomV3Pdu(ev) => &ev.room_id,
#[allow(unreachable_patterns)]
| _ => unreachable!("new PDU version"),
}
}
fn sender(&self) -> &UserId {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => &ev.sender,
| Pdu::RoomV3Pdu(ev) => &ev.sender,
#[allow(unreachable_patterns)]
| _ => unreachable!("new PDU version"),
}
}
fn event_type(&self) -> &TimelineEventType {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => &ev.kind,
| Pdu::RoomV3Pdu(ev) => &ev.kind,
#[allow(unreachable_patterns)]
| _ => unreachable!("new PDU version"),
}
}
fn content(&self) -> &RawJsonValue {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => &ev.content,
| Pdu::RoomV3Pdu(ev) => &ev.content,
#[allow(unreachable_patterns)]
| _ => unreachable!("new PDU version"),
}
}
fn origin_server_ts(&self) -> MilliSecondsSinceUnixEpoch {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => ev.origin_server_ts,
| Pdu::RoomV3Pdu(ev) => ev.origin_server_ts,
#[allow(unreachable_patterns)]
| _ => unreachable!("new PDU version"),
}
}
fn state_key(&self) -> Option<&str> {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => ev.state_key.as_deref(),
| Pdu::RoomV3Pdu(ev) => ev.state_key.as_deref(),
#[allow(unreachable_patterns)]
| _ => unreachable!("new PDU version"),
}
}
#[allow(refining_impl_trait)]
fn prev_events(&self) -> Box<dyn DoubleEndedIterator<Item = &Self::Id> + Send + '_> {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => Box::new(ev.prev_events.iter().map(|(id, _)| id)),
| Pdu::RoomV3Pdu(ev) => Box::new(ev.prev_events.iter()),
#[allow(unreachable_patterns)]
| _ => unreachable!("new PDU version"),
}
}
#[allow(refining_impl_trait)]
fn auth_events(&self) -> Box<dyn DoubleEndedIterator<Item = &Self::Id> + Send + '_> {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => Box::new(ev.auth_events.iter().map(|(id, _)| id)),
| Pdu::RoomV3Pdu(ev) => Box::new(ev.auth_events.iter()),
#[allow(unreachable_patterns)]
| _ => unreachable!("new PDU version"),
}
}
fn redacts(&self) -> Option<&Self::Id> {
match &self.rest {
| Pdu::RoomV1Pdu(ev) => ev.redacts.as_ref(),
| Pdu::RoomV3Pdu(ev) => ev.redacts.as_ref(),
#[allow(unreachable_patterns)]
| _ => unreachable!("new PDU version"),
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[allow(clippy::exhaustive_structs)]
pub(crate) struct PduEvent {
pub(crate) event_id: OwnedEventId,
#[serde(flatten)]
pub(crate) rest: Pdu,
}
}