From d0b4a619af08030a28f3a445fb8031bafc3cf90a Mon Sep 17 00:00:00 2001
From: Jason Volk <jason@zemos.net>
Date: Sun, 26 Jan 2025 03:30:34 +0000
Subject: [PATCH] furnish batch interface with trait

Signed-off-by: Jason Volk <jason@zemos.net>
---
 src/database/map.rs                   |  3 ++
 src/database/map/get.rs               | 52 +---------------------
 src/database/map/get_batch.rs         | 45 ++++++++-----------
 src/database/map/qry.rs               | 54 +++++++++++++++++++++++
 src/database/map/qry_batch.rs         | 63 +++++++++++++++++++++++++++
 src/database/mod.rs                   |  2 +-
 src/service/rooms/lazy_loading/mod.rs | 10 +++--
 src/service/rooms/short/mod.rs        | 19 ++++----
 8 files changed, 155 insertions(+), 93 deletions(-)
 create mode 100644 src/database/map/qry.rs
 create mode 100644 src/database/map/qry_batch.rs

diff --git a/src/database/map.rs b/src/database/map.rs
index 97e90659..5176c529 100644
--- a/src/database/map.rs
+++ b/src/database/map.rs
@@ -9,6 +9,8 @@ mod keys_from;
 mod keys_prefix;
 mod open;
 mod options;
+mod qry;
+mod qry_batch;
 mod remove;
 mod rev_keys;
 mod rev_keys_from;
@@ -37,6 +39,7 @@ pub(crate) use self::options::{
 	cache_iter_options_default, cache_read_options_default, iter_options_default,
 	read_options_default, write_options_default,
 };
+pub use self::{get_batch::Get, qry_batch::Qry};
 use crate::{watchers::Watchers, Engine};
 
 pub struct Map {
diff --git a/src/database/map/get.rs b/src/database/map/get.rs
index 73182042..d6c65be2 100644
--- a/src/database/map/get.rs
+++ b/src/database/map/get.rs
@@ -1,65 +1,15 @@
-use std::{convert::AsRef, fmt::Debug, io::Write, sync::Arc};
+use std::{convert::AsRef, fmt::Debug, sync::Arc};
 
-use arrayvec::ArrayVec;
 use conduwuit::{err, implement, utils::result::MapExpect, Err, Result};
 use futures::{future::ready, Future, FutureExt, TryFutureExt};
 use rocksdb::{DBPinnableSlice, ReadOptions};
-use serde::Serialize;
 use tokio::task;
 
 use crate::{
-	keyval::KeyBuf,
-	ser,
 	util::{is_incomplete, map_err, or_else},
 	Handle,
 };
 
-/// Fetch a value from the database into cache, returning a reference-handle
-/// asynchronously. The key is serialized into an allocated buffer to perform
-/// the query.
-#[implement(super::Map)]
-#[inline]
-pub fn qry<K>(self: &Arc<Self>, key: &K) -> impl Future<Output = Result<Handle<'_>>> + Send
-where
-	K: Serialize + ?Sized + Debug,
-{
-	let mut buf = KeyBuf::new();
-	self.bqry(key, &mut buf)
-}
-
-/// Fetch a value from the database into cache, returning a reference-handle
-/// asynchronously. The key is serialized into a fixed-sized buffer to perform
-/// the query. The maximum size is supplied as const generic parameter.
-#[implement(super::Map)]
-#[inline]
-pub fn aqry<const MAX: usize, K>(
-	self: &Arc<Self>,
-	key: &K,
-) -> impl Future<Output = Result<Handle<'_>>> + Send
-where
-	K: Serialize + ?Sized + Debug,
-{
-	let mut buf = ArrayVec::<u8, MAX>::new();
-	self.bqry(key, &mut buf)
-}
-
-/// Fetch a value from the database into cache, returning a reference-handle
-/// asynchronously. The key is serialized into a user-supplied Writer.
-#[implement(super::Map)]
-#[tracing::instrument(skip(self, buf), level = "trace")]
-pub fn bqry<K, B>(
-	self: &Arc<Self>,
-	key: &K,
-	buf: &mut B,
-) -> impl Future<Output = Result<Handle<'_>>> + Send
-where
-	K: Serialize + ?Sized + Debug,
-	B: Write + AsRef<[u8]>,
-{
-	let key = ser::serialize(buf, key).expect("failed to serialize query key");
-	self.get(key)
-}
-
 /// Fetch a value from the database into cache, returning a reference-handle
 /// asynchronously. The key is referenced directly to perform the query.
 #[implement(super::Map)]
diff --git a/src/database/map/get_batch.rs b/src/database/map/get_batch.rs
index ee9269e3..ab9c1dc8 100644
--- a/src/database/map/get_batch.rs
+++ b/src/database/map/get_batch.rs
@@ -1,4 +1,4 @@
-use std::{convert::AsRef, fmt::Debug, sync::Arc};
+use std::{convert::AsRef, sync::Arc};
 
 use conduwuit::{
 	implement,
@@ -10,43 +10,34 @@ use conduwuit::{
 };
 use futures::{Stream, StreamExt, TryStreamExt};
 use rocksdb::{DBPinnableSlice, ReadOptions};
-use serde::Serialize;
 
 use super::get::{cached_handle_from, handle_from};
-use crate::{keyval::KeyBuf, ser, Handle};
+use crate::Handle;
 
-#[implement(super::Map)]
-#[tracing::instrument(skip(self, keys), level = "trace")]
-pub fn qry_batch<'a, S, K>(
-	self: &'a Arc<Self>,
-	keys: S,
-) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
+pub trait Get<'a, K, S>
 where
+	Self: Sized,
 	S: Stream<Item = K> + Send + 'a,
-	K: Serialize + Debug + 'a,
+	K: AsRef<[u8]> + Send + Sync + 'a,
 {
-	use crate::pool::Get;
+	fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a;
+}
 
-	keys.ready_chunks(automatic_amplification())
-		.widen_then(automatic_width(), |chunk| {
-			let keys = chunk
-				.iter()
-				.map(ser::serialize_to::<KeyBuf, _>)
-				.map(|result| result.expect("failed to serialize query key"))
-				.map(Into::into)
-				.collect();
-
-			self.db
-				.pool
-				.execute_get(Get { map: self.clone(), key: keys, res: None })
-		})
-		.map_ok(|results| results.into_iter().stream())
-		.try_flatten()
+impl<'a, K, S> Get<'a, K, S> for S
+where
+	Self: Sized,
+	S: Stream<Item = K> + Send + 'a,
+	K: AsRef<[u8]> + Send + Sync + 'a,
+{
+	#[inline]
+	fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a {
+		map.get_batch(self)
+	}
 }
 
 #[implement(super::Map)]
 #[tracing::instrument(skip(self, keys), level = "trace")]
-pub fn get_batch<'a, S, K>(
+pub(crate) fn get_batch<'a, S, K>(
 	self: &'a Arc<Self>,
 	keys: S,
 ) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
diff --git a/src/database/map/qry.rs b/src/database/map/qry.rs
new file mode 100644
index 00000000..401eba43
--- /dev/null
+++ b/src/database/map/qry.rs
@@ -0,0 +1,54 @@
+use std::{convert::AsRef, fmt::Debug, io::Write, sync::Arc};
+
+use arrayvec::ArrayVec;
+use conduwuit::{implement, Result};
+use futures::Future;
+use serde::Serialize;
+
+use crate::{keyval::KeyBuf, ser, Handle};
+
+/// Fetch a value from the database into cache, returning a reference-handle
+/// asynchronously. The key is serialized into an allocated buffer to perform
+/// the query.
+#[implement(super::Map)]
+#[inline]
+pub fn qry<K>(self: &Arc<Self>, key: &K) -> impl Future<Output = Result<Handle<'_>>> + Send
+where
+	K: Serialize + ?Sized + Debug,
+{
+	let mut buf = KeyBuf::new();
+	self.bqry(key, &mut buf)
+}
+
+/// Fetch a value from the database into cache, returning a reference-handle
+/// asynchronously. The key is serialized into a fixed-sized buffer to perform
+/// the query. The maximum size is supplied as const generic parameter.
+#[implement(super::Map)]
+#[inline]
+pub fn aqry<const MAX: usize, K>(
+	self: &Arc<Self>,
+	key: &K,
+) -> impl Future<Output = Result<Handle<'_>>> + Send
+where
+	K: Serialize + ?Sized + Debug,
+{
+	let mut buf = ArrayVec::<u8, MAX>::new();
+	self.bqry(key, &mut buf)
+}
+
+/// Fetch a value from the database into cache, returning a reference-handle
+/// asynchronously. The key is serialized into a user-supplied Writer.
+#[implement(super::Map)]
+#[tracing::instrument(skip(self, buf), level = "trace")]
+pub fn bqry<K, B>(
+	self: &Arc<Self>,
+	key: &K,
+	buf: &mut B,
+) -> impl Future<Output = Result<Handle<'_>>> + Send
+where
+	K: Serialize + ?Sized + Debug,
+	B: Write + AsRef<[u8]>,
+{
+	let key = ser::serialize(buf, key).expect("failed to serialize query key");
+	self.get(key)
+}
diff --git a/src/database/map/qry_batch.rs b/src/database/map/qry_batch.rs
new file mode 100644
index 00000000..31817c48
--- /dev/null
+++ b/src/database/map/qry_batch.rs
@@ -0,0 +1,63 @@
+use std::{fmt::Debug, sync::Arc};
+
+use conduwuit::{
+	implement,
+	utils::{
+		stream::{automatic_amplification, automatic_width, WidebandExt},
+		IterStream,
+	},
+	Result,
+};
+use futures::{Stream, StreamExt, TryStreamExt};
+use serde::Serialize;
+
+use crate::{keyval::KeyBuf, ser, Handle};
+
+pub trait Qry<'a, K, S>
+where
+	S: Stream<Item = K> + Send + 'a,
+	K: Serialize + Debug,
+{
+	fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a;
+}
+
+impl<'a, K, S> Qry<'a, K, S> for S
+where
+	Self: 'a,
+	S: Stream<Item = K> + Send + 'a,
+	K: Serialize + Debug + 'a,
+{
+	#[inline]
+	fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a {
+		map.qry_batch(self)
+	}
+}
+
+#[implement(super::Map)]
+#[tracing::instrument(skip(self, keys), level = "trace")]
+pub(crate) fn qry_batch<'a, S, K>(
+	self: &'a Arc<Self>,
+	keys: S,
+) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
+where
+	S: Stream<Item = K> + Send + 'a,
+	K: Serialize + Debug + 'a,
+{
+	use crate::pool::Get;
+
+	keys.ready_chunks(automatic_amplification())
+		.widen_then(automatic_width(), |chunk| {
+			let keys = chunk
+				.iter()
+				.map(ser::serialize_to::<KeyBuf, _>)
+				.map(|result| result.expect("failed to serialize query key"))
+				.map(Into::into)
+				.collect();
+
+			self.db
+				.pool
+				.execute_get(Get { map: self.clone(), key: keys, res: None })
+		})
+		.map_ok(|results| results.into_iter().stream())
+		.try_flatten()
+}
diff --git a/src/database/mod.rs b/src/database/mod.rs
index 42b7f5e3..4f8e2ad9 100644
--- a/src/database/mod.rs
+++ b/src/database/mod.rs
@@ -30,7 +30,7 @@ pub use self::{
 	deserialized::Deserialized,
 	handle::Handle,
 	keyval::{serialize_key, serialize_val, KeyVal, Slice},
-	map::{compact, Map},
+	map::{compact, Get, Map, Qry},
 	ser::{serialize, serialize_to, serialize_to_vec, Cbor, Interfix, Json, Separator, SEP},
 };
 pub(crate) use self::{
diff --git a/src/service/rooms/lazy_loading/mod.rs b/src/service/rooms/lazy_loading/mod.rs
index 67274ff1..a6e00271 100644
--- a/src/service/rooms/lazy_loading/mod.rs
+++ b/src/service/rooms/lazy_loading/mod.rs
@@ -7,7 +7,7 @@ use conduwuit::{
 	utils::{stream::TryIgnore, IterStream, ReadyExt},
 	Result,
 };
-use database::{Database, Deserialized, Handle, Interfix, Map};
+use database::{Database, Deserialized, Handle, Interfix, Map, Qry};
 use futures::{pin_mut, Stream, StreamExt};
 use ruma::{api::client::filter::LazyLoadOptions, DeviceId, OwnedUserId, RoomId, UserId};
 
@@ -115,9 +115,11 @@ where
 	let make_key =
 		|sender: &'a UserId| -> Key<'a> { (ctx.user_id, ctx.device_id, ctx.room_id, sender) };
 
-	self.db
-		.lazyloadedids
-		.qry_batch(senders.clone().stream().map(make_key))
+	senders
+		.clone()
+		.stream()
+		.map(make_key)
+		.qry(&self.db.lazyloadedids)
 		.map(into_status)
 		.zip(senders.stream())
 		.map(move |(status, sender)| {
diff --git a/src/service/rooms/short/mod.rs b/src/service/rooms/short/mod.rs
index 4a591592..dd586d02 100644
--- a/src/service/rooms/short/mod.rs
+++ b/src/service/rooms/short/mod.rs
@@ -2,7 +2,7 @@ use std::{borrow::Borrow, fmt::Debug, mem::size_of_val, sync::Arc};
 
 pub use conduwuit::pdu::{ShortEventId, ShortId, ShortRoomId};
 use conduwuit::{err, implement, utils, utils::IterStream, Result};
-use database::{Deserialized, Map};
+use database::{Deserialized, Get, Map, Qry};
 use futures::{Stream, StreamExt};
 use ruma::{events::StateEventType, EventId, RoomId};
 use serde::Deserialize;
@@ -67,9 +67,10 @@ pub fn multi_get_or_create_shorteventid<'a, I>(
 where
 	I: Iterator<Item = &'a EventId> + Clone + Debug + Send + 'a,
 {
-	self.db
-		.eventid_shorteventid
-		.get_batch(event_ids.clone().stream())
+	event_ids
+		.clone()
+		.stream()
+		.get(&self.db.eventid_shorteventid)
 		.zip(event_ids.into_iter().stream())
 		.map(|(result, event_id)| match result {
 			| Ok(ref short) => utils::u64_from_u8(short),
@@ -171,9 +172,8 @@ where
 	Id: for<'de> Deserialize<'de> + Sized + ToOwned + 'a,
 	<Id as ToOwned>::Owned: Borrow<EventId>,
 {
-	self.db
-		.shorteventid_eventid
-		.qry_batch(shorteventid)
+	shorteventid
+		.qry(&self.db.shorteventid_eventid)
 		.map(Deserialized::deserialized)
 }
 
@@ -204,9 +204,8 @@ pub fn multi_get_statekey_from_short<'a, S>(
 where
 	S: Stream<Item = ShortStateKey> + Send + 'a,
 {
-	self.db
-		.shortstatekey_statekey
-		.qry_batch(shortstatekey)
+	shortstatekey
+		.qry(&self.db.shortstatekey_statekey)
 		.map(Deserialized::deserialized)
 }