Database Refactor

combine service/users data w/ mod unit

split sliding sync related out of service/users

instrument database entry points

remove increment crap from database interface

de-wrap all database get() calls

de-wrap all database insert() calls

de-wrap all database remove() calls

refactor database interface for async streaming

add query key serializer for database

implement Debug for result handle

add query deserializer for database

add deserialization trait for option handle

start a stream utils suite

de-wrap/asyncify/type-query count_one_time_keys()

de-wrap/asyncify users count

add admin query users command suite

de-wrap/asyncify users exists

de-wrap/partially asyncify user filter related

asyncify/de-wrap users device/keys related

asyncify/de-wrap user auth/misc related

asyncify/de-wrap users blurhash

asyncify/de-wrap account_data get; merge Data into Service

partial asyncify/de-wrap uiaa; merge Data into Service

partially asyncify/de-wrap transaction_ids get; merge Data into Service

partially asyncify/de-wrap key_backups; merge Data into Service

asyncify/de-wrap pusher service getters; merge Data into Service

asyncify/de-wrap rooms alias getters/some iterators

asyncify/de-wrap rooms directory getters/iterator

partially asyncify/de-wrap rooms lazy-loading

partially asyncify/de-wrap rooms metadata

asyncify/dewrap rooms outlier

asyncify/dewrap rooms pdu_metadata

dewrap/partially asyncify rooms read receipt

de-wrap rooms search service

de-wrap/partially asyncify rooms user service

partial de-wrap rooms state_compressor

de-wrap rooms state_cache

de-wrap room state et al

de-wrap rooms timeline service

additional users device/keys related

de-wrap/asyncify sender

asyncify services

refactor database to TryFuture/TryStream

refactor services for TryFuture/TryStream

asyncify api handlers

additional asyncification for admin module

abstract stream related; support reverse streams

additional stream conversions

asyncify state-res related

Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
Jason Volk 2024-08-08 17:18:30 +00:00 committed by strawberry
parent 6001014078
commit 946ca364e0
203 changed files with 12202 additions and 10709 deletions

View file

@ -67,6 +67,7 @@ ctor.workspace = true
cyborgtime.workspace = true
either.workspace = true
figment.workspace = true
futures.workspace = true
http-body-util.workspace = true
http.workspace = true
image.workspace = true

View file

@ -86,7 +86,7 @@ pub enum Error {
#[error("There was a problem with the '{0}' directive in your configuration: {1}")]
Config(&'static str, Cow<'static, str>),
#[error("{0}")]
Conflict(&'static str), // This is only needed for when a room alias already exists
Conflict(Cow<'static, str>), // This is only needed for when a room alias already exists
#[error(transparent)]
ContentDisposition(#[from] ruma::http_headers::ContentDispositionParseError),
#[error("{0}")]
@ -107,6 +107,8 @@ pub enum Error {
Request(ruma::api::client::error::ErrorKind, Cow<'static, str>, http::StatusCode),
#[error(transparent)]
Ruma(#[from] ruma::api::client::error::Error),
#[error(transparent)]
StateRes(#[from] ruma::state_res::Error),
#[error("uiaa")]
Uiaa(ruma::api::client::uiaa::UiaaInfo),

View file

@ -3,8 +3,6 @@ mod count;
use std::{cmp::Ordering, collections::BTreeMap, sync::Arc};
pub use builder::PduBuilder;
pub use count::PduCount;
use ruma::{
canonical_json::redact_content_in_place,
events::{
@ -23,7 +21,8 @@ use serde_json::{
value::{to_raw_value, RawValue as RawJsonValue},
};
use crate::{err, warn, Error};
pub use self::{builder::PduBuilder, count::PduCount};
use crate::{err, warn, Error, Result};
#[derive(Deserialize)]
struct ExtractRedactedBecause {
@ -65,11 +64,12 @@ pub struct PduEvent {
impl PduEvent {
#[tracing::instrument(skip(self), level = "debug")]
pub fn redact(&mut self, room_version_id: RoomVersionId, reason: &Self) -> crate::Result<()> {
pub fn redact(&mut self, room_version_id: RoomVersionId, reason: &Self) -> Result<()> {
self.unsigned = None;
let mut content = serde_json::from_str(self.content.get())
.map_err(|_| Error::bad_database("PDU in db has invalid content."))?;
redact_content_in_place(&mut content, &room_version_id, self.kind.to_string())
.map_err(|e| Error::Redaction(self.sender.server_name().to_owned(), e))?;
@ -98,31 +98,38 @@ impl PduEvent {
unsigned.redacted_because.is_some()
}
pub fn remove_transaction_id(&mut self) -> crate::Result<()> {
if let Some(unsigned) = &self.unsigned {
let mut unsigned: BTreeMap<String, Box<RawJsonValue>> = serde_json::from_str(unsigned.get())
.map_err(|_| Error::bad_database("Invalid unsigned in pdu event"))?;
unsigned.remove("transaction_id");
self.unsigned = Some(to_raw_value(&unsigned).expect("unsigned is valid"));
}
pub fn remove_transaction_id(&mut self) -> Result<()> {
let Some(unsigned) = &self.unsigned else {
return Ok(());
};
let mut unsigned: BTreeMap<String, Box<RawJsonValue>> =
serde_json::from_str(unsigned.get()).map_err(|e| err!(Database("Invalid unsigned in pdu event: {e}")))?;
unsigned.remove("transaction_id");
self.unsigned = to_raw_value(&unsigned)
.map(Some)
.expect("unsigned is valid");
Ok(())
}
pub fn add_age(&mut self) -> crate::Result<()> {
pub fn add_age(&mut self) -> Result<()> {
let mut unsigned: BTreeMap<String, Box<RawJsonValue>> = self
.unsigned
.as_ref()
.map_or_else(|| Ok(BTreeMap::new()), |u| serde_json::from_str(u.get()))
.map_err(|_| Error::bad_database("Invalid unsigned in pdu event"))?;
.map_err(|e| err!(Database("Invalid unsigned in pdu event: {e}")))?;
// deliberately allowing for the possibility of negative age
let now: i128 = MilliSecondsSinceUnixEpoch::now().get().into();
let then: i128 = self.origin_server_ts.into();
let this_age = now.saturating_sub(then);
unsigned.insert("age".to_owned(), to_raw_value(&this_age).unwrap());
self.unsigned = Some(to_raw_value(&unsigned).expect("unsigned is valid"));
unsigned.insert("age".to_owned(), to_raw_value(&this_age).expect("age is valid"));
self.unsigned = to_raw_value(&unsigned)
.map(Some)
.expect("unsigned is valid");
Ok(())
}
@ -369,9 +376,9 @@ impl state_res::Event for PduEvent {
fn state_key(&self) -> Option<&str> { self.state_key.as_deref() }
fn prev_events(&self) -> Box<dyn DoubleEndedIterator<Item = &Self::Id> + '_> { Box::new(self.prev_events.iter()) }
fn prev_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ { self.prev_events.iter() }
fn auth_events(&self) -> Box<dyn DoubleEndedIterator<Item = &Self::Id> + '_> { Box::new(self.auth_events.iter()) }
fn auth_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ { self.auth_events.iter() }
fn redacts(&self) -> Option<&Self::Id> { self.redacts.as_ref() }
}
@ -395,7 +402,7 @@ impl Ord for PduEvent {
/// CanonicalJsonValue>`.
pub fn gen_event_id_canonical_json(
pdu: &RawJsonValue, room_version_id: &RoomVersionId,
) -> crate::Result<(OwnedEventId, CanonicalJsonObject)> {
) -> Result<(OwnedEventId, CanonicalJsonObject)> {
let value: CanonicalJsonObject = serde_json::from_str(pdu.get())
.map_err(|e| err!(BadServerResponse(warn!("Error parsing incoming event: {e:?}"))))?;

View file

@ -1,18 +1,14 @@
use std::fmt;
use std::fmt::Debug;
use tracing::Level;
use super::{DebugInspect, Result};
use crate::error;
pub trait LogDebugErr<T, E>
where
E: fmt::Debug,
{
pub trait LogDebugErr<T, E: Debug> {
#[must_use]
fn err_debug_log(self, level: Level) -> Self;
#[inline]
#[must_use]
fn log_debug_err(self) -> Self
where
@ -22,15 +18,9 @@ where
}
}
impl<T, E> LogDebugErr<T, E> for Result<T, E>
where
E: fmt::Debug,
{
impl<T, E: Debug> LogDebugErr<T, E> for Result<T, E> {
#[inline]
fn err_debug_log(self, level: Level) -> Self
where
Self: Sized,
{
fn err_debug_log(self, level: Level) -> Self {
self.debug_inspect_err(|error| error::inspect_debug_log_level(&error, level))
}
}

View file

@ -1,18 +1,14 @@
use std::fmt;
use std::fmt::Display;
use tracing::Level;
use super::Result;
use crate::error;
pub trait LogErr<T, E>
where
E: fmt::Display,
{
pub trait LogErr<T, E: Display> {
#[must_use]
fn err_log(self, level: Level) -> Self;
#[inline]
#[must_use]
fn log_err(self) -> Self
where
@ -22,15 +18,7 @@ where
}
}
impl<T, E> LogErr<T, E> for Result<T, E>
where
E: fmt::Display,
{
impl<T, E: Display> LogErr<T, E> for Result<T, E> {
#[inline]
fn err_log(self, level: Level) -> Self
where
Self: Sized,
{
self.inspect_err(|error| error::inspect_log_level(&error, level))
}
fn err_log(self, level: Level) -> Self { self.inspect_err(|error| error::inspect_log_level(&error, level)) }
}

View file

@ -1,25 +0,0 @@
use std::cmp::Ordering;
#[allow(clippy::impl_trait_in_params)]
pub fn common_elements(
mut iterators: impl Iterator<Item = impl Iterator<Item = Vec<u8>>>, check_order: impl Fn(&[u8], &[u8]) -> Ordering,
) -> Option<impl Iterator<Item = Vec<u8>>> {
let first_iterator = iterators.next()?;
let mut other_iterators = iterators.map(Iterator::peekable).collect::<Vec<_>>();
Some(first_iterator.filter(move |target| {
other_iterators.iter_mut().all(|it| {
while let Some(element) = it.peek() {
match check_order(element, target) {
Ordering::Greater => return false, // We went too far
Ordering::Equal => return true, // Element is in both iters
Ordering::Less => {
// Keep searching
it.next();
},
}
}
false
})
}))
}

View file

@ -1,4 +1,3 @@
pub mod algorithm;
pub mod bytes;
pub mod content_disposition;
pub mod debug;
@ -9,25 +8,30 @@ pub mod json;
pub mod math;
pub mod mutex_map;
pub mod rand;
pub mod set;
pub mod stream;
pub mod string;
pub mod sys;
mod tests;
pub mod time;
pub use ::conduit_macros::implement;
pub use ::ctor::{ctor, dtor};
pub use algorithm::common_elements;
pub use bytes::{increment, u64_from_bytes, u64_from_u8, u64_from_u8x8};
pub use conduit_macros::implement;
pub use debug::slice_truncated as debug_slice_truncated;
pub use hash::calculate_hash;
pub use html::Escape as HtmlEscape;
pub use json::{deserialize_from_str, to_canonical_object};
pub use math::clamp;
pub use mutex_map::{Guard as MutexMapGuard, MutexMap};
pub use rand::string as random_string;
pub use string::{str_from_bytes, string_from_bytes};
pub use sys::available_parallelism;
pub use time::now_millis as millis_since_unix_epoch;
pub use self::{
bytes::{increment, u64_from_bytes, u64_from_u8, u64_from_u8x8},
debug::slice_truncated as debug_slice_truncated,
hash::calculate_hash,
html::Escape as HtmlEscape,
json::{deserialize_from_str, to_canonical_object},
math::clamp,
mutex_map::{Guard as MutexMapGuard, MutexMap},
rand::string as random_string,
stream::{IterStream, ReadyExt, TryReadyExt},
string::{str_from_bytes, string_from_bytes},
sys::available_parallelism,
time::now_millis as millis_since_unix_epoch,
};
#[inline]
pub fn exchange<T>(state: &mut T, source: T) -> T { std::mem::replace(state, source) }

47
src/core/utils/set.rs Normal file
View file

@ -0,0 +1,47 @@
use std::cmp::{Eq, Ord};
use crate::{is_equal_to, is_less_than};
/// Intersection of sets
///
/// Outputs the set of elements common to all input sets. Inputs do not have to
/// be sorted. If inputs are sorted a more optimized function is available in
/// this suite and should be used.
pub fn intersection<Item, Iter, Iters>(mut input: Iters) -> impl Iterator<Item = Item> + Send
where
Iters: Iterator<Item = Iter> + Clone + Send,
Iter: Iterator<Item = Item> + Send,
Item: Eq + Send,
{
input.next().into_iter().flat_map(move |first| {
let input = input.clone();
first.filter(move |targ| {
input
.clone()
.all(|mut other| other.any(is_equal_to!(*targ)))
})
})
}
/// Intersection of sets
///
/// Outputs the set of elements common to all input sets. Inputs must be sorted.
pub fn intersection_sorted<Item, Iter, Iters>(mut input: Iters) -> impl Iterator<Item = Item> + Send
where
Iters: Iterator<Item = Iter> + Clone + Send,
Iter: Iterator<Item = Item> + Send,
Item: Eq + Ord + Send,
{
input.next().into_iter().flat_map(move |first| {
let mut input = input.clone().collect::<Vec<_>>();
first.filter(move |targ| {
input.iter_mut().all(|it| {
it.by_ref()
.skip_while(is_less_than!(targ))
.peekable()
.peek()
.is_some_and(is_equal_to!(targ))
})
})
})
}

View file

@ -0,0 +1,20 @@
use std::clone::Clone;
use futures::{stream::Map, Stream, StreamExt};
pub trait Cloned<'a, T, S>
where
S: Stream<Item = &'a T>,
T: Clone + 'a,
{
fn cloned(self) -> Map<S, fn(&T) -> T>;
}
impl<'a, T, S> Cloned<'a, T, S> for S
where
S: Stream<Item = &'a T>,
T: Clone + 'a,
{
#[inline]
fn cloned(self) -> Map<S, fn(&T) -> T> { self.map(Clone::clone) }
}

View file

@ -0,0 +1,17 @@
use futures::{Stream, StreamExt, TryStream};
use crate::Result;
pub trait TryExpect<'a, Item> {
fn expect_ok(self) -> impl Stream<Item = Item> + Send + 'a;
}
impl<'a, T, Item> TryExpect<'a, Item> for T
where
T: Stream<Item = Result<Item>> + TryStream + Send + 'a,
{
#[inline]
fn expect_ok(self: T) -> impl Stream<Item = Item> + Send + 'a {
self.map(|res| res.expect("stream expectation failure"))
}
}

View file

@ -0,0 +1,21 @@
use futures::{future::ready, Stream, StreamExt, TryStream};
use crate::{Error, Result};
pub trait TryIgnore<'a, Item> {
fn ignore_err(self) -> impl Stream<Item = Item> + Send + 'a;
fn ignore_ok(self) -> impl Stream<Item = Error> + Send + 'a;
}
impl<'a, T, Item> TryIgnore<'a, Item> for T
where
T: Stream<Item = Result<Item>> + TryStream + Send + 'a,
Item: Send + 'a,
{
#[inline]
fn ignore_err(self: T) -> impl Stream<Item = Item> + Send + 'a { self.filter_map(|res| ready(res.ok())) }
#[inline]
fn ignore_ok(self: T) -> impl Stream<Item = Error> + Send + 'a { self.filter_map(|res| ready(res.err())) }
}

View file

@ -0,0 +1,27 @@
use futures::{
stream,
stream::{Stream, TryStream},
StreamExt,
};
pub trait IterStream<I: IntoIterator + Send> {
/// Convert an Iterator into a Stream
fn stream(self) -> impl Stream<Item = <I as IntoIterator>::Item> + Send;
/// Convert an Iterator into a TryStream
fn try_stream(self) -> impl TryStream<Ok = <I as IntoIterator>::Item, Error = crate::Error> + Send;
}
impl<I> IterStream<I> for I
where
I: IntoIterator + Send,
<I as IntoIterator>::IntoIter: Send,
{
#[inline]
fn stream(self) -> impl Stream<Item = <I as IntoIterator>::Item> + Send { stream::iter(self) }
#[inline]
fn try_stream(self) -> impl TryStream<Ok = <I as IntoIterator>::Item, Error = crate::Error> + Send {
self.stream().map(Ok)
}
}

View file

@ -0,0 +1,13 @@
mod cloned;
mod expect;
mod ignore;
mod iter_stream;
mod ready;
mod try_ready;
pub use cloned::Cloned;
pub use expect::TryExpect;
pub use ignore::TryIgnore;
pub use iter_stream::IterStream;
pub use ready::ReadyExt;
pub use try_ready::TryReadyExt;

View file

@ -0,0 +1,109 @@
//! Synchronous combinator extensions to futures::Stream
use futures::{
future::{ready, Ready},
stream::{Any, Filter, FilterMap, Fold, ForEach, SkipWhile, Stream, StreamExt, TakeWhile},
};
/// Synchronous combinators to augment futures::StreamExt. Most Stream
/// combinators take asynchronous arguments, but often only simple predicates
/// are required to steer a Stream like an Iterator. This suite provides a
/// convenience to reduce boilerplate by de-cluttering non-async predicates.
///
/// This interface is not necessarily complete; feel free to add as-needed.
pub trait ReadyExt<Item, S>
where
S: Stream<Item = Item> + Send + ?Sized,
Self: Stream + Send + Sized,
{
fn ready_any<F>(self, f: F) -> Any<Self, Ready<bool>, impl FnMut(S::Item) -> Ready<bool>>
where
F: Fn(S::Item) -> bool;
fn ready_filter<'a, F>(self, f: F) -> Filter<Self, Ready<bool>, impl FnMut(&S::Item) -> Ready<bool> + 'a>
where
F: Fn(&S::Item) -> bool + 'a;
fn ready_filter_map<F, U>(self, f: F) -> FilterMap<Self, Ready<Option<U>>, impl FnMut(S::Item) -> Ready<Option<U>>>
where
F: Fn(S::Item) -> Option<U>;
fn ready_fold<T, F>(self, init: T, f: F) -> Fold<Self, Ready<T>, T, impl FnMut(T, S::Item) -> Ready<T>>
where
F: Fn(T, S::Item) -> T;
fn ready_for_each<F>(self, f: F) -> ForEach<Self, Ready<()>, impl FnMut(S::Item) -> Ready<()>>
where
F: FnMut(S::Item);
fn ready_take_while<'a, F>(self, f: F) -> TakeWhile<Self, Ready<bool>, impl FnMut(&S::Item) -> Ready<bool> + 'a>
where
F: Fn(&S::Item) -> bool + 'a;
fn ready_skip_while<'a, F>(self, f: F) -> SkipWhile<Self, Ready<bool>, impl FnMut(&S::Item) -> Ready<bool> + 'a>
where
F: Fn(&S::Item) -> bool + 'a;
}
impl<Item, S> ReadyExt<Item, S> for S
where
S: Stream<Item = Item> + Send + ?Sized,
Self: Stream + Send + Sized,
{
#[inline]
fn ready_any<F>(self, f: F) -> Any<Self, Ready<bool>, impl FnMut(S::Item) -> Ready<bool>>
where
F: Fn(S::Item) -> bool,
{
self.any(move |t| ready(f(t)))
}
#[inline]
fn ready_filter<'a, F>(self, f: F) -> Filter<Self, Ready<bool>, impl FnMut(&S::Item) -> Ready<bool> + 'a>
where
F: Fn(&S::Item) -> bool + 'a,
{
self.filter(move |t| ready(f(t)))
}
#[inline]
fn ready_filter_map<F, U>(self, f: F) -> FilterMap<Self, Ready<Option<U>>, impl FnMut(S::Item) -> Ready<Option<U>>>
where
F: Fn(S::Item) -> Option<U>,
{
self.filter_map(move |t| ready(f(t)))
}
#[inline]
fn ready_fold<T, F>(self, init: T, f: F) -> Fold<Self, Ready<T>, T, impl FnMut(T, S::Item) -> Ready<T>>
where
F: Fn(T, S::Item) -> T,
{
self.fold(init, move |a, t| ready(f(a, t)))
}
#[inline]
#[allow(clippy::unit_arg)]
fn ready_for_each<F>(self, mut f: F) -> ForEach<Self, Ready<()>, impl FnMut(S::Item) -> Ready<()>>
where
F: FnMut(S::Item),
{
self.for_each(move |t| ready(f(t)))
}
#[inline]
fn ready_take_while<'a, F>(self, f: F) -> TakeWhile<Self, Ready<bool>, impl FnMut(&S::Item) -> Ready<bool> + 'a>
where
F: Fn(&S::Item) -> bool + 'a,
{
self.take_while(move |t| ready(f(t)))
}
#[inline]
fn ready_skip_while<'a, F>(self, f: F) -> SkipWhile<Self, Ready<bool>, impl FnMut(&S::Item) -> Ready<bool> + 'a>
where
F: Fn(&S::Item) -> bool + 'a,
{
self.skip_while(move |t| ready(f(t)))
}
}

View file

@ -0,0 +1,35 @@
//! Synchronous combinator extensions to futures::TryStream
use futures::{
future::{ready, Ready},
stream::{AndThen, TryStream, TryStreamExt},
};
use crate::Result;
/// Synchronous combinators to augment futures::TryStreamExt.
///
/// This interface is not necessarily complete; feel free to add as-needed.
pub trait TryReadyExt<T, E, S>
where
S: TryStream<Ok = T, Error = E, Item = Result<T, E>> + Send + ?Sized,
Self: TryStream + Send + Sized,
{
fn ready_and_then<U, F>(self, f: F) -> AndThen<Self, Ready<Result<U, E>>, impl FnMut(S::Ok) -> Ready<Result<U, E>>>
where
F: Fn(S::Ok) -> Result<U, E>;
}
impl<T, E, S> TryReadyExt<T, E, S> for S
where
S: TryStream<Ok = T, Error = E, Item = Result<T, E>> + Send + ?Sized,
Self: TryStream + Send + Sized,
{
#[inline]
fn ready_and_then<U, F>(self, f: F) -> AndThen<Self, Ready<Result<U, E>>, impl FnMut(S::Ok) -> Ready<Result<U, E>>>
where
F: Fn(S::Ok) -> Result<U, E>,
{
self.and_then(move |t| ready(f(t)))
}
}

View file

@ -107,3 +107,133 @@ async fn mutex_map_contend() {
tokio::try_join!(join_b, join_a).expect("joined");
assert!(map.is_empty(), "Must be empty");
}
#[test]
#[allow(clippy::iter_on_single_items, clippy::many_single_char_names)]
fn set_intersection_none() {
use utils::set::intersection;
let a: [&str; 0] = [];
let b: [&str; 0] = [];
let i = [a.iter(), b.iter()];
let r = intersection(i.into_iter());
assert_eq!(r.count(), 0);
let a: [&str; 0] = [];
let b = ["abc", "def"];
let i = [a.iter(), b.iter()];
let r = intersection(i.into_iter());
assert_eq!(r.count(), 0);
let i = [b.iter(), a.iter()];
let r = intersection(i.into_iter());
assert_eq!(r.count(), 0);
let i = [a.iter()];
let r = intersection(i.into_iter());
assert_eq!(r.count(), 0);
let a = ["foo", "bar", "baz"];
let b = ["def", "hij", "klm", "nop"];
let i = [a.iter(), b.iter()];
let r = intersection(i.into_iter());
assert_eq!(r.count(), 0);
}
#[test]
#[allow(clippy::iter_on_single_items, clippy::many_single_char_names)]
fn set_intersection_all() {
use utils::set::intersection;
let a = ["foo"];
let b = ["foo"];
let i = [a.iter(), b.iter()];
let r = intersection(i.into_iter());
assert!(r.eq(["foo"].iter()));
let a = ["foo", "bar"];
let b = ["bar", "foo"];
let i = [a.iter(), b.iter()];
let r = intersection(i.into_iter());
assert!(r.eq(["foo", "bar"].iter()));
let i = [b.iter()];
let r = intersection(i.into_iter());
assert!(r.eq(["bar", "foo"].iter()));
let a = ["foo", "bar", "baz"];
let b = ["baz", "foo", "bar"];
let c = ["bar", "baz", "foo"];
let i = [a.iter(), b.iter(), c.iter()];
let r = intersection(i.into_iter());
assert!(r.eq(["foo", "bar", "baz"].iter()));
}
#[test]
#[allow(clippy::iter_on_single_items, clippy::many_single_char_names)]
fn set_intersection_some() {
use utils::set::intersection;
let a = ["foo"];
let b = ["bar", "foo"];
let i = [a.iter(), b.iter()];
let r = intersection(i.into_iter());
assert!(r.eq(["foo"].iter()));
let i = [b.iter(), a.iter()];
let r = intersection(i.into_iter());
assert!(r.eq(["foo"].iter()));
let a = ["abcdef", "foo", "hijkl", "abc"];
let b = ["hij", "bar", "baz", "abc", "foo"];
let c = ["abc", "xyz", "foo", "ghi"];
let i = [a.iter(), b.iter(), c.iter()];
let r = intersection(i.into_iter());
assert!(r.eq(["foo", "abc"].iter()));
}
#[test]
#[allow(clippy::iter_on_single_items, clippy::many_single_char_names)]
fn set_intersection_sorted_some() {
use utils::set::intersection_sorted;
let a = ["bar"];
let b = ["bar", "foo"];
let i = [a.iter(), b.iter()];
let r = intersection_sorted(i.into_iter());
assert!(r.eq(["bar"].iter()));
let i = [b.iter(), a.iter()];
let r = intersection_sorted(i.into_iter());
assert!(r.eq(["bar"].iter()));
let a = ["aaa", "ccc", "eee", "ggg"];
let b = ["aaa", "bbb", "ccc", "ddd", "eee"];
let c = ["bbb", "ccc", "eee", "fff"];
let i = [a.iter(), b.iter(), c.iter()];
let r = intersection_sorted(i.into_iter());
assert!(r.eq(["ccc", "eee"].iter()));
}
#[test]
#[allow(clippy::iter_on_single_items, clippy::many_single_char_names)]
fn set_intersection_sorted_all() {
use utils::set::intersection_sorted;
let a = ["foo"];
let b = ["foo"];
let i = [a.iter(), b.iter()];
let r = intersection_sorted(i.into_iter());
assert!(r.eq(["foo"].iter()));
let a = ["bar", "foo"];
let b = ["bar", "foo"];
let i = [a.iter(), b.iter()];
let r = intersection_sorted(i.into_iter());
assert!(r.eq(["bar", "foo"].iter()));
let i = [b.iter()];
let r = intersection_sorted(i.into_iter());
assert!(r.eq(["bar", "foo"].iter()));
let a = ["bar", "baz", "foo"];
let b = ["bar", "baz", "foo"];
let c = ["bar", "baz", "foo"];
let i = [a.iter(), b.iter(), c.iter()];
let r = intersection_sorted(i.into_iter());
assert!(r.eq(["bar", "baz", "foo"].iter()));
}