Database Refactor
combine service/users data w/ mod unit split sliding sync related out of service/users instrument database entry points remove increment crap from database interface de-wrap all database get() calls de-wrap all database insert() calls de-wrap all database remove() calls refactor database interface for async streaming add query key serializer for database implement Debug for result handle add query deserializer for database add deserialization trait for option handle start a stream utils suite de-wrap/asyncify/type-query count_one_time_keys() de-wrap/asyncify users count add admin query users command suite de-wrap/asyncify users exists de-wrap/partially asyncify user filter related asyncify/de-wrap users device/keys related asyncify/de-wrap user auth/misc related asyncify/de-wrap users blurhash asyncify/de-wrap account_data get; merge Data into Service partial asyncify/de-wrap uiaa; merge Data into Service partially asyncify/de-wrap transaction_ids get; merge Data into Service partially asyncify/de-wrap key_backups; merge Data into Service asyncify/de-wrap pusher service getters; merge Data into Service asyncify/de-wrap rooms alias getters/some iterators asyncify/de-wrap rooms directory getters/iterator partially asyncify/de-wrap rooms lazy-loading partially asyncify/de-wrap rooms metadata asyncify/dewrap rooms outlier asyncify/dewrap rooms pdu_metadata dewrap/partially asyncify rooms read receipt de-wrap rooms search service de-wrap/partially asyncify rooms user service partial de-wrap rooms state_compressor de-wrap rooms state_cache de-wrap room state et al de-wrap rooms timeline service additional users device/keys related de-wrap/asyncify sender asyncify services refactor database to TryFuture/TryStream refactor services for TryFuture/TryStream asyncify api handlers additional asyncification for admin module abstract stream related; support reverse streams additional stream conversions asyncify state-res related Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
parent
6001014078
commit
946ca364e0
203 changed files with 12202 additions and 10709 deletions
|
@ -37,8 +37,11 @@ zstd_compression = [
|
|||
[dependencies]
|
||||
conduit-core.workspace = true
|
||||
const-str.workspace = true
|
||||
futures.workspace = true
|
||||
log.workspace = true
|
||||
rust-rocksdb.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
tokio.workspace = true
|
||||
tracing.workspace = true
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ impl Database {
|
|||
pub fn cork_and_sync(&self) -> Cork { Cork::new(&self.db, true, true) }
|
||||
|
||||
#[inline]
|
||||
pub fn iter_maps(&self) -> impl Iterator<Item = (&MapsKey, &MapsVal)> + '_ { self.map.iter() }
|
||||
pub fn iter_maps(&self) -> impl Iterator<Item = (&MapsKey, &MapsVal)> + Send + '_ { self.map.iter() }
|
||||
}
|
||||
|
||||
impl Index<&str> for Database {
|
||||
|
|
261
src/database/de.rs
Normal file
261
src/database/de.rs
Normal file
|
@ -0,0 +1,261 @@
|
|||
use conduit::{checked, debug::DebugInspect, err, utils::string, Error, Result};
|
||||
use serde::{
|
||||
de,
|
||||
de::{DeserializeSeed, Visitor},
|
||||
Deserialize,
|
||||
};
|
||||
|
||||
pub(crate) fn from_slice<'a, T>(buf: &'a [u8]) -> Result<T>
|
||||
where
|
||||
T: Deserialize<'a>,
|
||||
{
|
||||
let mut deserializer = Deserializer {
|
||||
buf,
|
||||
pos: 0,
|
||||
};
|
||||
|
||||
T::deserialize(&mut deserializer).debug_inspect(|_| {
|
||||
deserializer
|
||||
.finished()
|
||||
.expect("deserialization failed to consume trailing bytes");
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) struct Deserializer<'de> {
|
||||
buf: &'de [u8],
|
||||
pos: usize,
|
||||
}
|
||||
|
||||
/// Directive to ignore a record. This type can be used to skip deserialization
|
||||
/// until the next separator is found.
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Ignore;
|
||||
|
||||
impl<'de> Deserializer<'de> {
|
||||
const SEP: u8 = b'\xFF';
|
||||
|
||||
fn finished(&self) -> Result<()> {
|
||||
let pos = self.pos;
|
||||
let len = self.buf.len();
|
||||
let parsed = &self.buf[0..pos];
|
||||
let unparsed = &self.buf[pos..];
|
||||
let remain = checked!(len - pos)?;
|
||||
let trailing_sep = remain == 1 && unparsed[0] == Self::SEP;
|
||||
(remain == 0 || trailing_sep)
|
||||
.then_some(())
|
||||
.ok_or(err!(SerdeDe(
|
||||
"{remain} trailing of {len} bytes not deserialized.\n{parsed:?}\n{unparsed:?}",
|
||||
)))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn record_next(&mut self) -> &'de [u8] {
|
||||
self.buf[self.pos..]
|
||||
.split(|b| *b == Deserializer::SEP)
|
||||
.inspect(|record| self.inc_pos(record.len()))
|
||||
.next()
|
||||
.expect("remainder of buf even if SEP was not found")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn record_trail(&mut self) -> &'de [u8] {
|
||||
let record = &self.buf[self.pos..];
|
||||
self.inc_pos(record.len());
|
||||
record
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn record_start(&mut self) {
|
||||
let started = self.pos != 0;
|
||||
debug_assert!(
|
||||
!started || self.buf[self.pos] == Self::SEP,
|
||||
"Missing expected record separator at current position"
|
||||
);
|
||||
|
||||
self.inc_pos(started.into());
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn inc_pos(&mut self, n: usize) {
|
||||
self.pos = self.pos.saturating_add(n);
|
||||
debug_assert!(self.pos <= self.buf.len(), "pos out of range");
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'de: 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
||||
type Error = Error;
|
||||
|
||||
fn deserialize_map<V>(self, _visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
unimplemented!("deserialize Map not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_seq(self)
|
||||
}
|
||||
|
||||
fn deserialize_tuple<V>(self, _len: usize, visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_seq(self)
|
||||
}
|
||||
|
||||
fn deserialize_tuple_struct<V>(self, _name: &'static str, _len: usize, visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_seq(self)
|
||||
}
|
||||
|
||||
fn deserialize_struct<V>(
|
||||
self, _name: &'static str, _fields: &'static [&'static str], _visitor: V,
|
||||
) -> Result<V::Value>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
unimplemented!("deserialize Struct not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_unit_struct<V>(self, name: &'static str, visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
match name {
|
||||
"Ignore" => self.record_next(),
|
||||
_ => unimplemented!("Unrecognized deserialization Directive {name:?}"),
|
||||
};
|
||||
|
||||
visitor.visit_unit()
|
||||
}
|
||||
|
||||
fn deserialize_newtype_struct<V>(self, _name: &'static str, _visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
unimplemented!("deserialize Newtype Struct not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_enum<V>(
|
||||
self, _name: &'static str, _variants: &'static [&'static str], _visitor: V,
|
||||
) -> Result<V::Value>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
unimplemented!("deserialize Enum not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_option<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize Option not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_bool<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize bool not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_i8<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize i8 not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_i16<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize i16 not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_i32<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize i32 not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_i64<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
|
||||
let bytes: [u8; size_of::<i64>()] = self.buf[self.pos..].try_into()?;
|
||||
self.pos = self.pos.saturating_add(size_of::<i64>());
|
||||
visitor.visit_i64(i64::from_be_bytes(bytes))
|
||||
}
|
||||
|
||||
fn deserialize_u8<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize u8 not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_u16<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize u16 not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_u32<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize u32 not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_u64<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
|
||||
let bytes: [u8; size_of::<u64>()] = self.buf[self.pos..].try_into()?;
|
||||
self.pos = self.pos.saturating_add(size_of::<u64>());
|
||||
visitor.visit_u64(u64::from_be_bytes(bytes))
|
||||
}
|
||||
|
||||
fn deserialize_f32<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize f32 not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_f64<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize f64 not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_char<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize char not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_str<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
|
||||
let input = self.record_next();
|
||||
let out = string::str_from_bytes(input)?;
|
||||
visitor.visit_borrowed_str(out)
|
||||
}
|
||||
|
||||
fn deserialize_string<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
|
||||
let input = self.record_next();
|
||||
let out = string::string_from_bytes(input)?;
|
||||
visitor.visit_string(out)
|
||||
}
|
||||
|
||||
fn deserialize_bytes<V: Visitor<'de>>(self, visitor: V) -> Result<V::Value> {
|
||||
let input = self.record_trail();
|
||||
visitor.visit_borrowed_bytes(input)
|
||||
}
|
||||
|
||||
fn deserialize_byte_buf<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize Byte Buf not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_unit<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize Unit Struct not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_identifier<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize Identifier not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_ignored_any<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize Ignored Any not implemented")
|
||||
}
|
||||
|
||||
fn deserialize_any<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unimplemented!("deserialize any not implemented")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'de: 'a> de::SeqAccess<'de> for &'a mut Deserializer<'de> {
|
||||
type Error = Error;
|
||||
|
||||
fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>>
|
||||
where
|
||||
T: DeserializeSeed<'de>,
|
||||
{
|
||||
if self.pos >= self.buf.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
self.record_start();
|
||||
seed.deserialize(&mut **self).map(Some)
|
||||
}
|
||||
}
|
34
src/database/deserialized.rs
Normal file
34
src/database/deserialized.rs
Normal file
|
@ -0,0 +1,34 @@
|
|||
use std::convert::identity;
|
||||
|
||||
use conduit::Result;
|
||||
use serde::Deserialize;
|
||||
|
||||
pub trait Deserialized {
|
||||
fn map_de<T, U, F>(self, f: F) -> Result<U>
|
||||
where
|
||||
F: FnOnce(T) -> U,
|
||||
T: for<'de> Deserialize<'de>;
|
||||
|
||||
fn map_json<T, U, F>(self, f: F) -> Result<U>
|
||||
where
|
||||
F: FnOnce(T) -> U,
|
||||
T: for<'de> Deserialize<'de>;
|
||||
|
||||
#[inline]
|
||||
fn deserialized<T>(self) -> Result<T>
|
||||
where
|
||||
T: for<'de> Deserialize<'de>,
|
||||
Self: Sized,
|
||||
{
|
||||
self.map_de(identity::<T>)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn deserialized_json<T>(self) -> Result<T>
|
||||
where
|
||||
T: for<'de> Deserialize<'de>,
|
||||
Self: Sized,
|
||||
{
|
||||
self.map_json(identity::<T>)
|
||||
}
|
||||
}
|
|
@ -106,7 +106,7 @@ impl Engine {
|
|||
}))
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
#[tracing::instrument(skip(self), level = "trace")]
|
||||
pub(crate) fn open_cf(&self, name: &str) -> Result<Arc<BoundColumnFamily<'_>>> {
|
||||
let mut cfs = self.cfs.lock().expect("locked");
|
||||
if !cfs.contains(name) {
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
use std::ops::Deref;
|
||||
use std::{fmt, fmt::Debug, ops::Deref};
|
||||
|
||||
use conduit::Result;
|
||||
use rocksdb::DBPinnableSlice;
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
|
||||
use crate::{keyval::deserialize_val, Deserialized, Slice};
|
||||
|
||||
pub struct Handle<'a> {
|
||||
val: DBPinnableSlice<'a>,
|
||||
|
@ -14,14 +18,91 @@ impl<'a> From<DBPinnableSlice<'a>> for Handle<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl Debug for Handle<'_> {
|
||||
fn fmt(&self, out: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let val: &Slice = self;
|
||||
let ptr = val.as_ptr();
|
||||
let len = val.len();
|
||||
write!(out, "Handle {{val: {{ptr: {ptr:?}, len: {len}}}}}")
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Handle<'_> {
|
||||
#[inline]
|
||||
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
let bytes: &Slice = self;
|
||||
serializer.serialize_bytes(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Handle<'_> {
|
||||
type Target = [u8];
|
||||
type Target = Slice;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target { &self.val }
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for Handle<'_> {
|
||||
impl AsRef<Slice> for Handle<'_> {
|
||||
#[inline]
|
||||
fn as_ref(&self) -> &[u8] { &self.val }
|
||||
fn as_ref(&self) -> &Slice { &self.val }
|
||||
}
|
||||
|
||||
impl Deserialized for Result<Handle<'_>> {
|
||||
#[inline]
|
||||
fn map_json<T, U, F>(self, f: F) -> Result<U>
|
||||
where
|
||||
F: FnOnce(T) -> U,
|
||||
T: for<'de> Deserialize<'de>,
|
||||
{
|
||||
self?.map_json(f)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn map_de<T, U, F>(self, f: F) -> Result<U>
|
||||
where
|
||||
F: FnOnce(T) -> U,
|
||||
T: for<'de> Deserialize<'de>,
|
||||
{
|
||||
self?.map_de(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Deserialized for Result<&'a Handle<'a>> {
|
||||
#[inline]
|
||||
fn map_json<T, U, F>(self, f: F) -> Result<U>
|
||||
where
|
||||
F: FnOnce(T) -> U,
|
||||
T: for<'de> Deserialize<'de>,
|
||||
{
|
||||
self.and_then(|handle| handle.map_json(f))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn map_de<T, U, F>(self, f: F) -> Result<U>
|
||||
where
|
||||
F: FnOnce(T) -> U,
|
||||
T: for<'de> Deserialize<'de>,
|
||||
{
|
||||
self.and_then(|handle| handle.map_de(f))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Deserialized for &'a Handle<'a> {
|
||||
fn map_json<T, U, F>(self, f: F) -> Result<U>
|
||||
where
|
||||
F: FnOnce(T) -> U,
|
||||
T: for<'de> Deserialize<'de>,
|
||||
{
|
||||
serde_json::from_slice::<T>(self.as_ref())
|
||||
.map_err(Into::into)
|
||||
.map(f)
|
||||
}
|
||||
|
||||
fn map_de<T, U, F>(self, f: F) -> Result<U>
|
||||
where
|
||||
F: FnOnce(T) -> U,
|
||||
T: for<'de> Deserialize<'de>,
|
||||
{
|
||||
deserialize_val(self.as_ref()).map(f)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,110 +0,0 @@
|
|||
use std::{iter::FusedIterator, sync::Arc};
|
||||
|
||||
use conduit::Result;
|
||||
use rocksdb::{ColumnFamily, DBRawIteratorWithThreadMode, Direction, IteratorMode, ReadOptions};
|
||||
|
||||
use crate::{
|
||||
engine::Db,
|
||||
result,
|
||||
slice::{OwnedKeyVal, OwnedKeyValPair},
|
||||
Engine,
|
||||
};
|
||||
|
||||
type Cursor<'cursor> = DBRawIteratorWithThreadMode<'cursor, Db>;
|
||||
|
||||
struct State<'cursor> {
|
||||
cursor: Cursor<'cursor>,
|
||||
direction: Direction,
|
||||
valid: bool,
|
||||
init: bool,
|
||||
}
|
||||
|
||||
impl<'cursor> State<'cursor> {
|
||||
pub(crate) fn new(
|
||||
db: &'cursor Arc<Engine>, cf: &'cursor Arc<ColumnFamily>, opts: ReadOptions, mode: &IteratorMode<'_>,
|
||||
) -> Self {
|
||||
let mut cursor = db.db.raw_iterator_cf_opt(&**cf, opts);
|
||||
let direction = into_direction(mode);
|
||||
let valid = seek_init(&mut cursor, mode);
|
||||
Self {
|
||||
cursor,
|
||||
direction,
|
||||
valid,
|
||||
init: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Iter<'cursor> {
|
||||
state: State<'cursor>,
|
||||
}
|
||||
|
||||
impl<'cursor> Iter<'cursor> {
|
||||
pub(crate) fn new(
|
||||
db: &'cursor Arc<Engine>, cf: &'cursor Arc<ColumnFamily>, opts: ReadOptions, mode: &IteratorMode<'_>,
|
||||
) -> Self {
|
||||
Self {
|
||||
state: State::new(db, cf, opts, mode),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for Iter<'_> {
|
||||
type Item = OwnedKeyValPair;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if !self.state.init && self.state.valid {
|
||||
seek_next(&mut self.state.cursor, self.state.direction);
|
||||
} else if self.state.init {
|
||||
self.state.init = false;
|
||||
}
|
||||
|
||||
self.state
|
||||
.cursor
|
||||
.item()
|
||||
.map(OwnedKeyVal::from)
|
||||
.map(OwnedKeyVal::to_tuple)
|
||||
.or_else(|| {
|
||||
when_invalid(&mut self.state).expect("iterator invalidated due to error");
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for Iter<'_> {}
|
||||
|
||||
fn when_invalid(state: &mut State<'_>) -> Result<()> {
|
||||
state.valid = false;
|
||||
result(state.cursor.status())
|
||||
}
|
||||
|
||||
fn seek_next(cursor: &mut Cursor<'_>, direction: Direction) {
|
||||
match direction {
|
||||
Direction::Forward => cursor.next(),
|
||||
Direction::Reverse => cursor.prev(),
|
||||
}
|
||||
}
|
||||
|
||||
fn seek_init(cursor: &mut Cursor<'_>, mode: &IteratorMode<'_>) -> bool {
|
||||
use Direction::{Forward, Reverse};
|
||||
use IteratorMode::{End, From, Start};
|
||||
|
||||
match mode {
|
||||
Start => cursor.seek_to_first(),
|
||||
End => cursor.seek_to_last(),
|
||||
From(key, Forward) => cursor.seek(key),
|
||||
From(key, Reverse) => cursor.seek_for_prev(key),
|
||||
};
|
||||
|
||||
cursor.valid()
|
||||
}
|
||||
|
||||
fn into_direction(mode: &IteratorMode<'_>) -> Direction {
|
||||
use Direction::{Forward, Reverse};
|
||||
use IteratorMode::{End, From, Start};
|
||||
|
||||
match mode {
|
||||
Start | From(_, Forward) => Forward,
|
||||
End | From(_, Reverse) => Reverse,
|
||||
}
|
||||
}
|
83
src/database/keyval.rs
Normal file
83
src/database/keyval.rs
Normal file
|
@ -0,0 +1,83 @@
|
|||
use conduit::Result;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::de;
|
||||
|
||||
pub(crate) type OwnedKeyVal = (Vec<u8>, Vec<u8>);
|
||||
pub(crate) type OwnedKey = Vec<u8>;
|
||||
pub(crate) type OwnedVal = Vec<u8>;
|
||||
|
||||
pub type KeyVal<'a, K = &'a Slice, V = &'a Slice> = (Key<'a, K>, Val<'a, V>);
|
||||
pub type Key<'a, T = &'a Slice> = T;
|
||||
pub type Val<'a, T = &'a Slice> = T;
|
||||
|
||||
pub type Slice = [u8];
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn _expect_deserialize<'a, K, V>(kv: Result<KeyVal<'a>>) -> KeyVal<'a, K, V>
|
||||
where
|
||||
K: Deserialize<'a>,
|
||||
V: Deserialize<'a>,
|
||||
{
|
||||
result_deserialize(kv).expect("failed to deserialize result key/val")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn _expect_deserialize_key<'a, K>(key: Result<Key<'a>>) -> Key<'a, K>
|
||||
where
|
||||
K: Deserialize<'a>,
|
||||
{
|
||||
result_deserialize_key(key).expect("failed to deserialize result key")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn result_deserialize<'a, K, V>(kv: Result<KeyVal<'a>>) -> Result<KeyVal<'a, K, V>>
|
||||
where
|
||||
K: Deserialize<'a>,
|
||||
V: Deserialize<'a>,
|
||||
{
|
||||
deserialize(kv?)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn result_deserialize_key<'a, K>(key: Result<Key<'a>>) -> Result<Key<'a, K>>
|
||||
where
|
||||
K: Deserialize<'a>,
|
||||
{
|
||||
deserialize_key(key?)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn deserialize<'a, K, V>(kv: KeyVal<'a>) -> Result<KeyVal<'a, K, V>>
|
||||
where
|
||||
K: Deserialize<'a>,
|
||||
V: Deserialize<'a>,
|
||||
{
|
||||
Ok((deserialize_key::<K>(kv.0)?, deserialize_val::<V>(kv.1)?))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn deserialize_key<'a, K>(key: Key<'a>) -> Result<Key<'a, K>>
|
||||
where
|
||||
K: Deserialize<'a>,
|
||||
{
|
||||
de::from_slice::<K>(key)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn deserialize_val<'a, V>(val: Val<'a>) -> Result<Val<'a, V>>
|
||||
where
|
||||
V: Deserialize<'a>,
|
||||
{
|
||||
de::from_slice::<V>(val)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn to_owned(kv: KeyVal<'_>) -> OwnedKeyVal { (kv.0.to_owned(), kv.1.to_owned()) }
|
||||
|
||||
#[inline]
|
||||
pub fn key<K, V>(kv: KeyVal<'_, K, V>) -> Key<'_, K> { kv.0 }
|
||||
|
||||
#[inline]
|
||||
pub fn val<K, V>(kv: KeyVal<'_, K, V>) -> Val<'_, V> { kv.1 }
|
|
@ -1,15 +1,39 @@
|
|||
use std::{ffi::CStr, future::Future, mem::size_of, pin::Pin, sync::Arc};
|
||||
mod count;
|
||||
mod keys;
|
||||
mod keys_from;
|
||||
mod keys_prefix;
|
||||
mod rev_keys;
|
||||
mod rev_keys_from;
|
||||
mod rev_keys_prefix;
|
||||
mod rev_stream;
|
||||
mod rev_stream_from;
|
||||
mod rev_stream_prefix;
|
||||
mod stream;
|
||||
mod stream_from;
|
||||
mod stream_prefix;
|
||||
|
||||
use conduit::{utils, Result};
|
||||
use rocksdb::{
|
||||
AsColumnFamilyRef, ColumnFamily, Direction, IteratorMode, ReadOptions, WriteBatchWithTransaction, WriteOptions,
|
||||
use std::{
|
||||
convert::AsRef,
|
||||
ffi::CStr,
|
||||
fmt,
|
||||
fmt::{Debug, Display},
|
||||
future::Future,
|
||||
io::Write,
|
||||
pin::Pin,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use conduit::{err, Result};
|
||||
use futures::future;
|
||||
use rocksdb::{AsColumnFamilyRef, ColumnFamily, ReadOptions, WriteBatchWithTransaction, WriteOptions};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::{
|
||||
or_else, result,
|
||||
slice::{Byte, Key, KeyVal, OwnedKey, OwnedKeyValPair, OwnedVal, Val},
|
||||
keyval::{OwnedKey, OwnedVal},
|
||||
ser,
|
||||
util::{map_err, or_else},
|
||||
watchers::Watchers,
|
||||
Engine, Handle, Iter,
|
||||
Engine, Handle,
|
||||
};
|
||||
|
||||
pub struct Map {
|
||||
|
@ -21,8 +45,6 @@ pub struct Map {
|
|||
read_options: ReadOptions,
|
||||
}
|
||||
|
||||
type OwnedKeyValPairIter<'a> = Box<dyn Iterator<Item = OwnedKeyValPair> + Send + 'a>;
|
||||
|
||||
impl Map {
|
||||
pub(crate) fn open(db: &Arc<Engine>, name: &str) -> Result<Arc<Self>> {
|
||||
Ok(Arc::new(Self {
|
||||
|
@ -35,14 +57,125 @@ impl Map {
|
|||
}))
|
||||
}
|
||||
|
||||
pub fn get(&self, key: &Key) -> Result<Option<Handle<'_>>> {
|
||||
let read_options = &self.read_options;
|
||||
let res = self.db.db.get_pinned_cf_opt(&self.cf(), key, read_options);
|
||||
|
||||
Ok(result(res)?.map(Handle::from))
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn del<K>(&self, key: &K)
|
||||
where
|
||||
K: Serialize + ?Sized + Debug,
|
||||
{
|
||||
let mut buf = Vec::<u8>::with_capacity(64);
|
||||
self.bdel(key, &mut buf);
|
||||
}
|
||||
|
||||
pub fn multi_get(&self, keys: &[&Key]) -> Result<Vec<Option<OwnedVal>>> {
|
||||
#[tracing::instrument(skip(self, buf), fields(%self), level = "trace")]
|
||||
pub fn bdel<K, B>(&self, key: &K, buf: &mut B)
|
||||
where
|
||||
K: Serialize + ?Sized + Debug,
|
||||
B: Write + AsRef<[u8]>,
|
||||
{
|
||||
let key = ser::serialize(buf, key).expect("failed to serialize deletion key");
|
||||
self.remove(&key);
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace")]
|
||||
pub fn remove<K>(&self, key: &K)
|
||||
where
|
||||
K: AsRef<[u8]> + ?Sized + Debug,
|
||||
{
|
||||
let write_options = &self.write_options;
|
||||
self.db
|
||||
.db
|
||||
.delete_cf_opt(&self.cf(), key, write_options)
|
||||
.or_else(or_else)
|
||||
.expect("database remove error");
|
||||
|
||||
if !self.db.corked() {
|
||||
self.db.flush().expect("database flush error");
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, value), fields(%self), level = "trace")]
|
||||
pub fn insert<K, V>(&self, key: &K, value: &V)
|
||||
where
|
||||
K: AsRef<[u8]> + ?Sized + Debug,
|
||||
V: AsRef<[u8]> + ?Sized,
|
||||
{
|
||||
let write_options = &self.write_options;
|
||||
self.db
|
||||
.db
|
||||
.put_cf_opt(&self.cf(), key, value, write_options)
|
||||
.or_else(or_else)
|
||||
.expect("database insert error");
|
||||
|
||||
if !self.db.corked() {
|
||||
self.db.flush().expect("database flush error");
|
||||
}
|
||||
|
||||
self.watchers.wake(key.as_ref());
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn insert_batch<'a, I, K, V>(&'a self, iter: I)
|
||||
where
|
||||
I: Iterator<Item = &'a (K, V)> + Send + Debug,
|
||||
K: AsRef<[u8]> + Sized + Debug + 'a,
|
||||
V: AsRef<[u8]> + Sized + 'a,
|
||||
{
|
||||
let mut batch = WriteBatchWithTransaction::<false>::default();
|
||||
for (key, val) in iter {
|
||||
batch.put_cf(&self.cf(), key.as_ref(), val.as_ref());
|
||||
}
|
||||
|
||||
let write_options = &self.write_options;
|
||||
self.db
|
||||
.db
|
||||
.write_opt(batch, write_options)
|
||||
.or_else(or_else)
|
||||
.expect("database insert batch error");
|
||||
|
||||
if !self.db.corked() {
|
||||
self.db.flush().expect("database flush error");
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn qry<K>(&self, key: &K) -> impl Future<Output = Result<Handle<'_>>> + Send
|
||||
where
|
||||
K: Serialize + ?Sized + Debug,
|
||||
{
|
||||
let mut buf = Vec::<u8>::with_capacity(64);
|
||||
self.bqry(key, &mut buf)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, buf), fields(%self), level = "trace")]
|
||||
pub fn bqry<K, B>(&self, key: &K, buf: &mut B) -> impl Future<Output = Result<Handle<'_>>> + Send
|
||||
where
|
||||
K: Serialize + ?Sized + Debug,
|
||||
B: Write + AsRef<[u8]>,
|
||||
{
|
||||
let key = ser::serialize(buf, key).expect("failed to serialize query key");
|
||||
let val = self.get(key);
|
||||
future::ready(val)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn get<K>(&self, key: &K) -> Result<Handle<'_>>
|
||||
where
|
||||
K: AsRef<[u8]> + ?Sized + Debug,
|
||||
{
|
||||
self.db
|
||||
.db
|
||||
.get_pinned_cf_opt(&self.cf(), key, &self.read_options)
|
||||
.map_err(map_err)?
|
||||
.map(Handle::from)
|
||||
.ok_or(err!(Request(NotFound("Not found in database"))))
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn multi_get<'a, I, K>(&self, keys: I) -> Vec<Option<OwnedVal>>
|
||||
where
|
||||
I: Iterator<Item = &'a K> + ExactSizeIterator + Send + Debug,
|
||||
K: AsRef<[u8]> + Sized + Debug + 'a,
|
||||
{
|
||||
// Optimization can be `true` if key vector is pre-sorted **by the column
|
||||
// comparator**.
|
||||
const SORTED: bool = false;
|
||||
|
@ -57,140 +190,25 @@ impl Map {
|
|||
match res {
|
||||
Ok(Some(res)) => ret.push(Some((*res).to_vec())),
|
||||
Ok(None) => ret.push(None),
|
||||
Err(e) => return or_else(e),
|
||||
Err(e) => or_else(e).expect("database multiget error"),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ret)
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn insert(&self, key: &Key, value: &Val) -> Result<()> {
|
||||
let write_options = &self.write_options;
|
||||
self.db
|
||||
.db
|
||||
.put_cf_opt(&self.cf(), key, value, write_options)
|
||||
.or_else(or_else)?;
|
||||
|
||||
if !self.db.corked() {
|
||||
self.db.flush()?;
|
||||
}
|
||||
|
||||
self.watchers.wake(key);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn insert_batch<'a, I>(&'a self, iter: I) -> Result<()>
|
||||
#[inline]
|
||||
pub fn watch_prefix<'a, K>(&'a self, prefix: &K) -> Pin<Box<dyn Future<Output = ()> + Send + 'a>>
|
||||
where
|
||||
I: Iterator<Item = KeyVal<'a>>,
|
||||
K: AsRef<[u8]> + ?Sized + Debug,
|
||||
{
|
||||
let mut batch = WriteBatchWithTransaction::<false>::default();
|
||||
for KeyVal(key, value) in iter {
|
||||
batch.put_cf(&self.cf(), key, value);
|
||||
}
|
||||
|
||||
let write_options = &self.write_options;
|
||||
let res = self.db.db.write_opt(batch, write_options);
|
||||
|
||||
if !self.db.corked() {
|
||||
self.db.flush()?;
|
||||
}
|
||||
|
||||
result(res)
|
||||
}
|
||||
|
||||
pub fn remove(&self, key: &Key) -> Result<()> {
|
||||
let write_options = &self.write_options;
|
||||
let res = self.db.db.delete_cf_opt(&self.cf(), key, write_options);
|
||||
|
||||
if !self.db.corked() {
|
||||
self.db.flush()?;
|
||||
}
|
||||
|
||||
result(res)
|
||||
}
|
||||
|
||||
pub fn remove_batch<'a, I>(&'a self, iter: I) -> Result<()>
|
||||
where
|
||||
I: Iterator<Item = &'a Key>,
|
||||
{
|
||||
let mut batch = WriteBatchWithTransaction::<false>::default();
|
||||
for key in iter {
|
||||
batch.delete_cf(&self.cf(), key);
|
||||
}
|
||||
|
||||
let write_options = &self.write_options;
|
||||
let res = self.db.db.write_opt(batch, write_options);
|
||||
|
||||
if !self.db.corked() {
|
||||
self.db.flush()?;
|
||||
}
|
||||
|
||||
result(res)
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> OwnedKeyValPairIter<'_> {
|
||||
let mode = IteratorMode::Start;
|
||||
let read_options = read_options_default();
|
||||
Box::new(Iter::new(&self.db, &self.cf, read_options, &mode))
|
||||
}
|
||||
|
||||
pub fn iter_from(&self, from: &Key, reverse: bool) -> OwnedKeyValPairIter<'_> {
|
||||
let direction = if reverse {
|
||||
Direction::Reverse
|
||||
} else {
|
||||
Direction::Forward
|
||||
};
|
||||
let mode = IteratorMode::From(from, direction);
|
||||
let read_options = read_options_default();
|
||||
Box::new(Iter::new(&self.db, &self.cf, read_options, &mode))
|
||||
}
|
||||
|
||||
pub fn scan_prefix(&self, prefix: OwnedKey) -> OwnedKeyValPairIter<'_> {
|
||||
let mode = IteratorMode::From(&prefix, Direction::Forward);
|
||||
let read_options = read_options_default();
|
||||
Box::new(Iter::new(&self.db, &self.cf, read_options, &mode).take_while(move |(k, _)| k.starts_with(&prefix)))
|
||||
}
|
||||
|
||||
pub fn increment(&self, key: &Key) -> Result<[Byte; size_of::<u64>()]> {
|
||||
let old = self.get(key)?;
|
||||
let new = utils::increment(old.as_deref());
|
||||
self.insert(key, &new)?;
|
||||
|
||||
if !self.db.corked() {
|
||||
self.db.flush()?;
|
||||
}
|
||||
|
||||
Ok(new)
|
||||
}
|
||||
|
||||
pub fn increment_batch<'a, I>(&'a self, iter: I) -> Result<()>
|
||||
where
|
||||
I: Iterator<Item = &'a Key>,
|
||||
{
|
||||
let mut batch = WriteBatchWithTransaction::<false>::default();
|
||||
for key in iter {
|
||||
let old = self.get(key)?;
|
||||
let new = utils::increment(old.as_deref());
|
||||
batch.put_cf(&self.cf(), key, new);
|
||||
}
|
||||
|
||||
let write_options = &self.write_options;
|
||||
let res = self.db.db.write_opt(batch, write_options);
|
||||
|
||||
if !self.db.corked() {
|
||||
self.db.flush()?;
|
||||
}
|
||||
|
||||
result(res)
|
||||
}
|
||||
|
||||
pub fn watch_prefix<'a>(&'a self, prefix: &Key) -> Pin<Box<dyn Future<Output = ()> + Send + 'a>> {
|
||||
self.watchers.watch(prefix)
|
||||
self.watchers.watch(prefix.as_ref())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn property_integer(&self, name: &CStr) -> Result<u64> { self.db.property_integer(&self.cf(), name) }
|
||||
|
||||
#[inline]
|
||||
pub fn property(&self, name: &str) -> Result<String> { self.db.property(&self.cf(), name) }
|
||||
|
||||
#[inline]
|
||||
|
@ -199,12 +217,12 @@ impl Map {
|
|||
fn cf(&self) -> impl AsColumnFamilyRef + '_ { &*self.cf }
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a Map {
|
||||
type IntoIter = Box<dyn Iterator<Item = Self::Item> + Send + 'a>;
|
||||
type Item = OwnedKeyValPair;
|
||||
impl Debug for Map {
|
||||
fn fmt(&self, out: &mut fmt::Formatter<'_>) -> fmt::Result { write!(out, "Map {{name: {0}}}", self.name) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn into_iter(self) -> Self::IntoIter { self.iter() }
|
||||
impl Display for Map {
|
||||
fn fmt(&self, out: &mut fmt::Formatter<'_>) -> fmt::Result { write!(out, "{0}", self.name) }
|
||||
}
|
||||
|
||||
fn open(db: &Arc<Engine>, name: &str) -> Result<Arc<ColumnFamily>> {
|
||||
|
|
36
src/database/map/count.rs
Normal file
36
src/database/map/count.rs
Normal file
|
@ -0,0 +1,36 @@
|
|||
use std::{fmt::Debug, future::Future};
|
||||
|
||||
use conduit::implement;
|
||||
use futures::stream::StreamExt;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::de::Ignore;
|
||||
|
||||
/// Count the total number of entries in the map.
|
||||
#[implement(super::Map)]
|
||||
#[inline]
|
||||
pub fn count(&self) -> impl Future<Output = usize> + Send + '_ { self.keys::<Ignore>().count() }
|
||||
|
||||
/// Count the number of entries in the map starting from a lower-bound.
|
||||
///
|
||||
/// - From is a structured key
|
||||
#[implement(super::Map)]
|
||||
#[inline]
|
||||
pub fn count_from<'a, P>(&'a self, from: &P) -> impl Future<Output = usize> + Send + 'a
|
||||
where
|
||||
P: Serialize + ?Sized + Debug + 'a,
|
||||
{
|
||||
self.keys_from::<Ignore, P>(from).count()
|
||||
}
|
||||
|
||||
/// Count the number of entries in the map matching a prefix.
|
||||
///
|
||||
/// - Prefix is structured key
|
||||
#[implement(super::Map)]
|
||||
#[inline]
|
||||
pub fn count_prefix<'a, P>(&'a self, prefix: &P) -> impl Future<Output = usize> + Send + 'a
|
||||
where
|
||||
P: Serialize + ?Sized + Debug + 'a,
|
||||
{
|
||||
self.keys_prefix::<Ignore, P>(prefix).count()
|
||||
}
|
21
src/database/map/keys.rs
Normal file
21
src/database/map/keys.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
use conduit::{implement, Result};
|
||||
use futures::{Stream, StreamExt};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{keyval, keyval::Key, stream};
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn keys<'a, K>(&'a self) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
K: Deserialize<'a> + Send,
|
||||
{
|
||||
self.raw_keys().map(keyval::result_deserialize_key::<K>)
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn raw_keys(&self) -> impl Stream<Item = Result<Key<'_>>> + Send {
|
||||
let opts = super::read_options_default();
|
||||
stream::Keys::new(&self.db, &self.cf, opts, None)
|
||||
}
|
49
src/database/map/keys_from.rs
Normal file
49
src/database/map/keys_from.rs
Normal file
|
@ -0,0 +1,49 @@
|
|||
use std::{convert::AsRef, fmt::Debug};
|
||||
|
||||
use conduit::{implement, Result};
|
||||
use futures::{Stream, StreamExt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{keyval, keyval::Key, ser, stream};
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn keys_from<'a, K, P>(&'a self, from: &P) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
{
|
||||
self.keys_raw_from(from)
|
||||
.map(keyval::result_deserialize_key::<K>)
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn keys_raw_from<P>(&self, from: &P) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
let key = ser::serialize_to_vec(from).expect("failed to serialize query key");
|
||||
self.raw_keys_from(&key)
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn keys_from_raw<'a, K, P>(&'a self, from: &P) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync,
|
||||
K: Deserialize<'a> + Send,
|
||||
{
|
||||
self.raw_keys_from(from)
|
||||
.map(keyval::result_deserialize_key::<K>)
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn raw_keys_from<P>(&self, from: &P) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug,
|
||||
{
|
||||
let opts = super::read_options_default();
|
||||
stream::Keys::new(&self.db, &self.cf, opts, Some(from.as_ref()))
|
||||
}
|
54
src/database/map/keys_prefix.rs
Normal file
54
src/database/map/keys_prefix.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
use std::{convert::AsRef, fmt::Debug};
|
||||
|
||||
use conduit::{implement, Result};
|
||||
use futures::{
|
||||
future,
|
||||
stream::{Stream, StreamExt},
|
||||
TryStreamExt,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{keyval, keyval::Key, ser};
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn keys_prefix<'a, K, P>(&'a self, prefix: &P) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
{
|
||||
self.keys_raw_prefix(prefix)
|
||||
.map(keyval::result_deserialize_key::<K>)
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn keys_raw_prefix<P>(&self, prefix: &P) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
let key = ser::serialize_to_vec(prefix).expect("failed to serialize query key");
|
||||
self.raw_keys_from(&key)
|
||||
.try_take_while(move |k: &Key<'_>| future::ok(k.starts_with(&key)))
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn keys_prefix_raw<'a, K, P>(&'a self, prefix: &'a P) -> impl Stream<Item = Result<Key<'_, K>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
K: Deserialize<'a> + Send + 'a,
|
||||
{
|
||||
self.raw_keys_prefix(prefix)
|
||||
.map(keyval::result_deserialize_key::<K>)
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn raw_keys_prefix<'a, P>(&'a self, prefix: &'a P) -> impl Stream<Item = Result<Key<'_>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
{
|
||||
self.raw_keys_from(prefix)
|
||||
.try_take_while(|k: &Key<'_>| future::ok(k.starts_with(prefix.as_ref())))
|
||||
}
|
21
src/database/map/rev_keys.rs
Normal file
21
src/database/map/rev_keys.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
use conduit::{implement, Result};
|
||||
use futures::{Stream, StreamExt};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{keyval, keyval::Key, stream};
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_keys<'a, K>(&'a self) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
K: Deserialize<'a> + Send,
|
||||
{
|
||||
self.rev_raw_keys().map(keyval::result_deserialize_key::<K>)
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_raw_keys(&self) -> impl Stream<Item = Result<Key<'_>>> + Send {
|
||||
let opts = super::read_options_default();
|
||||
stream::KeysRev::new(&self.db, &self.cf, opts, None)
|
||||
}
|
49
src/database/map/rev_keys_from.rs
Normal file
49
src/database/map/rev_keys_from.rs
Normal file
|
@ -0,0 +1,49 @@
|
|||
use std::{convert::AsRef, fmt::Debug};
|
||||
|
||||
use conduit::{implement, Result};
|
||||
use futures::{Stream, StreamExt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{keyval, keyval::Key, ser, stream};
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_keys_from<'a, K, P>(&'a self, from: &P) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
{
|
||||
self.rev_keys_raw_from(from)
|
||||
.map(keyval::result_deserialize_key::<K>)
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_keys_raw_from<P>(&self, from: &P) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
let key = ser::serialize_to_vec(from).expect("failed to serialize query key");
|
||||
self.rev_raw_keys_from(&key)
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_keys_from_raw<'a, K, P>(&'a self, from: &P) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync,
|
||||
K: Deserialize<'a> + Send,
|
||||
{
|
||||
self.rev_raw_keys_from(from)
|
||||
.map(keyval::result_deserialize_key::<K>)
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_raw_keys_from<P>(&self, from: &P) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug,
|
||||
{
|
||||
let opts = super::read_options_default();
|
||||
stream::KeysRev::new(&self.db, &self.cf, opts, Some(from.as_ref()))
|
||||
}
|
54
src/database/map/rev_keys_prefix.rs
Normal file
54
src/database/map/rev_keys_prefix.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
use std::{convert::AsRef, fmt::Debug};
|
||||
|
||||
use conduit::{implement, Result};
|
||||
use futures::{
|
||||
future,
|
||||
stream::{Stream, StreamExt},
|
||||
TryStreamExt,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{keyval, keyval::Key, ser};
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_keys_prefix<'a, K, P>(&'a self, prefix: &P) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
{
|
||||
self.rev_keys_raw_prefix(prefix)
|
||||
.map(keyval::result_deserialize_key::<K>)
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_keys_raw_prefix<P>(&self, prefix: &P) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
let key = ser::serialize_to_vec(prefix).expect("failed to serialize query key");
|
||||
self.rev_raw_keys_from(&key)
|
||||
.try_take_while(move |k: &Key<'_>| future::ok(k.starts_with(&key)))
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_keys_prefix_raw<'a, K, P>(&'a self, prefix: &'a P) -> impl Stream<Item = Result<Key<'_, K>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
K: Deserialize<'a> + Send + 'a,
|
||||
{
|
||||
self.rev_raw_keys_prefix(prefix)
|
||||
.map(keyval::result_deserialize_key::<K>)
|
||||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_raw_keys_prefix<'a, P>(&'a self, prefix: &'a P) -> impl Stream<Item = Result<Key<'_>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
{
|
||||
self.rev_raw_keys_from(prefix)
|
||||
.try_take_while(|k: &Key<'_>| future::ok(k.starts_with(prefix.as_ref())))
|
||||
}
|
29
src/database/map/rev_stream.rs
Normal file
29
src/database/map/rev_stream.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
use conduit::{implement, Result};
|
||||
use futures::stream::{Stream, StreamExt};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{keyval, keyval::KeyVal, stream};
|
||||
|
||||
/// Iterate key-value entries in the map from the end.
|
||||
///
|
||||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_stream<'a, K, V>(&'a self) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
K: Deserialize<'a> + Send,
|
||||
V: Deserialize<'a> + Send,
|
||||
{
|
||||
self.rev_raw_stream()
|
||||
.map(keyval::result_deserialize::<K, V>)
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map from the end.
|
||||
///
|
||||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_raw_stream(&self) -> impl Stream<Item = Result<KeyVal<'_>>> + Send {
|
||||
let opts = super::read_options_default();
|
||||
stream::ItemsRev::new(&self.db, &self.cf, opts, None)
|
||||
}
|
68
src/database/map/rev_stream_from.rs
Normal file
68
src/database/map/rev_stream_from.rs
Normal file
|
@ -0,0 +1,68 @@
|
|||
use std::{convert::AsRef, fmt::Debug};
|
||||
|
||||
use conduit::{implement, Result};
|
||||
use futures::stream::{Stream, StreamExt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{keyval, keyval::KeyVal, ser, stream};
|
||||
|
||||
/// Iterate key-value entries in the map starting from upper-bound.
|
||||
///
|
||||
/// - Query is serialized
|
||||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_stream_from<'a, K, V, P>(&'a self, from: &P) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
V: Deserialize<'a> + Send,
|
||||
{
|
||||
let key = ser::serialize_to_vec(from).expect("failed to serialize query key");
|
||||
self.rev_stream_raw_from(&key)
|
||||
.map(keyval::result_deserialize::<K, V>)
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map starting from upper-bound.
|
||||
///
|
||||
/// - Query is serialized
|
||||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_stream_raw_from<P>(&self, from: &P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
let key = ser::serialize_to_vec(from).expect("failed to serialize query key");
|
||||
self.rev_raw_stream_from(&key)
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map starting from upper-bound.
|
||||
///
|
||||
/// - Query is raw
|
||||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_stream_from_raw<'a, K, V, P>(&'a self, from: &P) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync,
|
||||
K: Deserialize<'a> + Send,
|
||||
V: Deserialize<'a> + Send,
|
||||
{
|
||||
self.rev_raw_stream_from(from)
|
||||
.map(keyval::result_deserialize::<K, V>)
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map starting from upper-bound.
|
||||
///
|
||||
/// - Query is raw
|
||||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_raw_stream_from<P>(&self, from: &P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug,
|
||||
{
|
||||
let opts = super::read_options_default();
|
||||
stream::ItemsRev::new(&self.db, &self.cf, opts, Some(from.as_ref()))
|
||||
}
|
74
src/database/map/rev_stream_prefix.rs
Normal file
74
src/database/map/rev_stream_prefix.rs
Normal file
|
@ -0,0 +1,74 @@
|
|||
use std::{convert::AsRef, fmt::Debug};
|
||||
|
||||
use conduit::{implement, Result};
|
||||
use futures::{
|
||||
future,
|
||||
stream::{Stream, StreamExt},
|
||||
TryStreamExt,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{keyval, keyval::KeyVal, ser};
|
||||
|
||||
/// Iterate key-value entries in the map where the key matches a prefix.
|
||||
///
|
||||
/// - Query is serialized
|
||||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_stream_prefix<'a, K, V, P>(&'a self, prefix: &P) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
V: Deserialize<'a> + Send,
|
||||
{
|
||||
self.rev_stream_raw_prefix(prefix)
|
||||
.map(keyval::result_deserialize::<K, V>)
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map where the key matches a prefix.
|
||||
///
|
||||
/// - Query is serialized
|
||||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_stream_raw_prefix<P>(&self, prefix: &P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
let key = ser::serialize_to_vec(prefix).expect("failed to serialize query key");
|
||||
self.rev_raw_stream_from(&key)
|
||||
.try_take_while(move |(k, _): &KeyVal<'_>| future::ok(k.starts_with(&key)))
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map where the key matches a prefix.
|
||||
///
|
||||
/// - Query is raw
|
||||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_stream_prefix_raw<'a, K, V, P>(
|
||||
&'a self, prefix: &'a P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
K: Deserialize<'a> + Send + 'a,
|
||||
V: Deserialize<'a> + Send + 'a,
|
||||
{
|
||||
self.rev_raw_stream_prefix(prefix)
|
||||
.map(keyval::result_deserialize::<K, V>)
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map where the key matches a prefix.
|
||||
///
|
||||
/// - Query is raw
|
||||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn rev_raw_stream_prefix<'a, P>(&'a self, prefix: &'a P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
{
|
||||
self.rev_raw_stream_from(prefix)
|
||||
.try_take_while(|(k, _): &KeyVal<'_>| future::ok(k.starts_with(prefix.as_ref())))
|
||||
}
|
28
src/database/map/stream.rs
Normal file
28
src/database/map/stream.rs
Normal file
|
@ -0,0 +1,28 @@
|
|||
use conduit::{implement, Result};
|
||||
use futures::stream::{Stream, StreamExt};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{keyval, keyval::KeyVal, stream};
|
||||
|
||||
/// Iterate key-value entries in the map from the beginning.
|
||||
///
|
||||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn stream<'a, K, V>(&'a self) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
K: Deserialize<'a> + Send,
|
||||
V: Deserialize<'a> + Send,
|
||||
{
|
||||
self.raw_stream().map(keyval::result_deserialize::<K, V>)
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map from the beginning.
|
||||
///
|
||||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn raw_stream(&self) -> impl Stream<Item = Result<KeyVal<'_>>> + Send {
|
||||
let opts = super::read_options_default();
|
||||
stream::Items::new(&self.db, &self.cf, opts, None)
|
||||
}
|
68
src/database/map/stream_from.rs
Normal file
68
src/database/map/stream_from.rs
Normal file
|
@ -0,0 +1,68 @@
|
|||
use std::{convert::AsRef, fmt::Debug};
|
||||
|
||||
use conduit::{implement, Result};
|
||||
use futures::stream::{Stream, StreamExt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{keyval, keyval::KeyVal, ser, stream};
|
||||
|
||||
/// Iterate key-value entries in the map starting from lower-bound.
|
||||
///
|
||||
/// - Query is serialized
|
||||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn stream_from<'a, K, V, P>(&'a self, from: &P) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
V: Deserialize<'a> + Send,
|
||||
{
|
||||
let key = ser::serialize_to_vec(from).expect("failed to serialize query key");
|
||||
self.stream_raw_from(&key)
|
||||
.map(keyval::result_deserialize::<K, V>)
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map starting from lower-bound.
|
||||
///
|
||||
/// - Query is serialized
|
||||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn stream_raw_from<P>(&self, from: &P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
let key = ser::serialize_to_vec(from).expect("failed to serialize query key");
|
||||
self.raw_stream_from(&key)
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map starting from lower-bound.
|
||||
///
|
||||
/// - Query is raw
|
||||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn stream_from_raw<'a, K, V, P>(&'a self, from: &P) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync,
|
||||
K: Deserialize<'a> + Send,
|
||||
V: Deserialize<'a> + Send,
|
||||
{
|
||||
self.raw_stream_from(from)
|
||||
.map(keyval::result_deserialize::<K, V>)
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map starting from lower-bound.
|
||||
///
|
||||
/// - Query is raw
|
||||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn raw_stream_from<P>(&self, from: &P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug,
|
||||
{
|
||||
let opts = super::read_options_default();
|
||||
stream::Items::new(&self.db, &self.cf, opts, Some(from.as_ref()))
|
||||
}
|
74
src/database/map/stream_prefix.rs
Normal file
74
src/database/map/stream_prefix.rs
Normal file
|
@ -0,0 +1,74 @@
|
|||
use std::{convert::AsRef, fmt::Debug};
|
||||
|
||||
use conduit::{implement, Result};
|
||||
use futures::{
|
||||
future,
|
||||
stream::{Stream, StreamExt},
|
||||
TryStreamExt,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{keyval, keyval::KeyVal, ser};
|
||||
|
||||
/// Iterate key-value entries in the map where the key matches a prefix.
|
||||
///
|
||||
/// - Query is serialized
|
||||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn stream_prefix<'a, K, V, P>(&'a self, prefix: &P) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
V: Deserialize<'a> + Send,
|
||||
{
|
||||
self.stream_raw_prefix(prefix)
|
||||
.map(keyval::result_deserialize::<K, V>)
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map where the key matches a prefix.
|
||||
///
|
||||
/// - Query is serialized
|
||||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn stream_raw_prefix<P>(&self, prefix: &P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
let key = ser::serialize_to_vec(prefix).expect("failed to serialize query key");
|
||||
self.raw_stream_from(&key)
|
||||
.try_take_while(move |(k, _): &KeyVal<'_>| future::ok(k.starts_with(&key)))
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map where the key matches a prefix.
|
||||
///
|
||||
/// - Query is raw
|
||||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn stream_prefix_raw<'a, K, V, P>(
|
||||
&'a self, prefix: &'a P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
K: Deserialize<'a> + Send + 'a,
|
||||
V: Deserialize<'a> + Send + 'a,
|
||||
{
|
||||
self.raw_stream_prefix(prefix)
|
||||
.map(keyval::result_deserialize::<K, V>)
|
||||
}
|
||||
|
||||
/// Iterate key-value entries in the map where the key matches a prefix.
|
||||
///
|
||||
/// - Query is raw
|
||||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), fields(%self), level = "trace")]
|
||||
pub fn raw_stream_prefix<'a, P>(&'a self, prefix: &'a P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
{
|
||||
self.raw_stream_from(prefix)
|
||||
.try_take_while(|(k, _): &KeyVal<'_>| future::ok(k.starts_with(prefix.as_ref())))
|
||||
}
|
|
@ -1,25 +1,35 @@
|
|||
mod cork;
|
||||
mod database;
|
||||
mod de;
|
||||
mod deserialized;
|
||||
mod engine;
|
||||
mod handle;
|
||||
mod iter;
|
||||
pub mod keyval;
|
||||
mod map;
|
||||
pub mod maps;
|
||||
mod opts;
|
||||
mod slice;
|
||||
mod ser;
|
||||
mod stream;
|
||||
mod util;
|
||||
mod watchers;
|
||||
|
||||
pub(crate) use self::{
|
||||
engine::Engine,
|
||||
util::{or_else, result},
|
||||
};
|
||||
|
||||
extern crate conduit_core as conduit;
|
||||
extern crate rust_rocksdb as rocksdb;
|
||||
|
||||
pub use database::Database;
|
||||
pub(crate) use engine::Engine;
|
||||
pub use handle::Handle;
|
||||
pub use iter::Iter;
|
||||
pub use map::Map;
|
||||
pub use slice::{Key, KeyVal, OwnedKey, OwnedKeyVal, OwnedVal, Val};
|
||||
pub(crate) use util::{or_else, result};
|
||||
pub use self::{
|
||||
database::Database,
|
||||
de::Ignore,
|
||||
deserialized::Deserialized,
|
||||
handle::Handle,
|
||||
keyval::{KeyVal, Slice},
|
||||
map::Map,
|
||||
ser::{Interfix, Separator},
|
||||
};
|
||||
|
||||
conduit::mod_ctor! {}
|
||||
conduit::mod_dtor! {}
|
||||
|
|
315
src/database/ser.rs
Normal file
315
src/database/ser.rs
Normal file
|
@ -0,0 +1,315 @@
|
|||
use std::io::Write;
|
||||
|
||||
use conduit::{err, result::DebugInspect, utils::exchange, Error, Result};
|
||||
use serde::{ser, Serialize};
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn serialize_to_vec<T>(val: &T) -> Result<Vec<u8>>
|
||||
where
|
||||
T: Serialize + ?Sized,
|
||||
{
|
||||
let mut buf = Vec::with_capacity(64);
|
||||
serialize(&mut buf, val)?;
|
||||
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn serialize<'a, W, T>(out: &'a mut W, val: &'a T) -> Result<&'a [u8]>
|
||||
where
|
||||
W: Write + AsRef<[u8]>,
|
||||
T: Serialize + ?Sized,
|
||||
{
|
||||
let mut serializer = Serializer {
|
||||
out,
|
||||
depth: 0,
|
||||
sep: false,
|
||||
fin: false,
|
||||
};
|
||||
|
||||
val.serialize(&mut serializer)
|
||||
.map_err(|error| err!(SerdeSer("{error}")))
|
||||
.debug_inspect(|()| {
|
||||
debug_assert_eq!(serializer.depth, 0, "Serialization completed at non-zero recursion level");
|
||||
})?;
|
||||
|
||||
Ok((*out).as_ref())
|
||||
}
|
||||
|
||||
pub(crate) struct Serializer<'a, W: Write> {
|
||||
out: &'a mut W,
|
||||
depth: u32,
|
||||
sep: bool,
|
||||
fin: bool,
|
||||
}
|
||||
|
||||
/// Directive to force separator serialization specifically for prefix keying
|
||||
/// use. This is a quirk of the database schema and prefix iterations.
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct Interfix;
|
||||
|
||||
/// Directive to force separator serialization. Separators are usually
|
||||
/// serialized automatically.
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct Separator;
|
||||
|
||||
impl<W: Write> Serializer<'_, W> {
|
||||
const SEP: &'static [u8] = b"\xFF";
|
||||
|
||||
fn sequence_start(&mut self) {
|
||||
debug_assert!(!self.is_finalized(), "Sequence start with finalization set");
|
||||
debug_assert!(!self.sep, "Sequence start with separator set");
|
||||
if cfg!(debug_assertions) {
|
||||
self.depth = self.depth.saturating_add(1);
|
||||
}
|
||||
}
|
||||
|
||||
fn sequence_end(&mut self) {
|
||||
self.sep = false;
|
||||
if cfg!(debug_assertions) {
|
||||
self.depth = self.depth.saturating_sub(1);
|
||||
}
|
||||
}
|
||||
|
||||
fn record_start(&mut self) -> Result<()> {
|
||||
debug_assert!(!self.is_finalized(), "Starting a record after serialization finalized");
|
||||
exchange(&mut self.sep, true)
|
||||
.then(|| self.separator())
|
||||
.unwrap_or(Ok(()))
|
||||
}
|
||||
|
||||
fn separator(&mut self) -> Result<()> {
|
||||
debug_assert!(!self.is_finalized(), "Writing a separator after serialization finalized");
|
||||
self.out.write_all(Self::SEP).map_err(Into::into)
|
||||
}
|
||||
|
||||
fn set_finalized(&mut self) {
|
||||
debug_assert!(!self.is_finalized(), "Finalization already set");
|
||||
if cfg!(debug_assertions) {
|
||||
self.fin = true;
|
||||
}
|
||||
}
|
||||
|
||||
fn is_finalized(&self) -> bool { self.fin }
|
||||
}
|
||||
|
||||
impl<W: Write> ser::Serializer for &mut Serializer<'_, W> {
|
||||
type Error = Error;
|
||||
type Ok = ();
|
||||
type SerializeMap = Self;
|
||||
type SerializeSeq = Self;
|
||||
type SerializeStruct = Self;
|
||||
type SerializeStructVariant = Self;
|
||||
type SerializeTuple = Self;
|
||||
type SerializeTupleStruct = Self;
|
||||
type SerializeTupleVariant = Self;
|
||||
|
||||
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap> {
|
||||
unimplemented!("serialize Map not implemented")
|
||||
}
|
||||
|
||||
fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq> {
|
||||
self.sequence_start();
|
||||
self.record_start()?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple> {
|
||||
self.sequence_start();
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn serialize_tuple_struct(self, _name: &'static str, _len: usize) -> Result<Self::SerializeTupleStruct> {
|
||||
self.sequence_start();
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn serialize_tuple_variant(
|
||||
self, _name: &'static str, _idx: u32, _var: &'static str, _len: usize,
|
||||
) -> Result<Self::SerializeTupleVariant> {
|
||||
self.sequence_start();
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn serialize_struct(self, _name: &'static str, _len: usize) -> Result<Self::SerializeStruct> {
|
||||
self.sequence_start();
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn serialize_struct_variant(
|
||||
self, _name: &'static str, _idx: u32, _var: &'static str, _len: usize,
|
||||
) -> Result<Self::SerializeStructVariant> {
|
||||
self.sequence_start();
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn serialize_newtype_struct<T: Serialize + ?Sized>(self, _name: &'static str, _value: &T) -> Result<Self::Ok> {
|
||||
unimplemented!("serialize New Type Struct not implemented")
|
||||
}
|
||||
|
||||
fn serialize_newtype_variant<T: Serialize + ?Sized>(
|
||||
self, _name: &'static str, _idx: u32, _var: &'static str, _value: &T,
|
||||
) -> Result<Self::Ok> {
|
||||
unimplemented!("serialize New Type Variant not implemented")
|
||||
}
|
||||
|
||||
fn serialize_unit_struct(self, name: &'static str) -> Result<Self::Ok> {
|
||||
match name {
|
||||
"Interfix" => {
|
||||
self.set_finalized();
|
||||
},
|
||||
"Separator" => {
|
||||
self.separator()?;
|
||||
},
|
||||
_ => unimplemented!("Unrecognized serialization directive: {name:?}"),
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn serialize_unit_variant(self, _name: &'static str, _idx: u32, _var: &'static str) -> Result<Self::Ok> {
|
||||
unimplemented!("serialize Unit Variant not implemented")
|
||||
}
|
||||
|
||||
fn serialize_some<T: Serialize + ?Sized>(self, val: &T) -> Result<Self::Ok> { val.serialize(self) }
|
||||
|
||||
fn serialize_none(self) -> Result<Self::Ok> { Ok(()) }
|
||||
|
||||
fn serialize_char(self, v: char) -> Result<Self::Ok> {
|
||||
let mut buf: [u8; 4] = [0; 4];
|
||||
self.serialize_str(v.encode_utf8(&mut buf))
|
||||
}
|
||||
|
||||
fn serialize_str(self, v: &str) -> Result<Self::Ok> { self.serialize_bytes(v.as_bytes()) }
|
||||
|
||||
fn serialize_bytes(self, v: &[u8]) -> Result<Self::Ok> { self.out.write_all(v).map_err(Error::Io) }
|
||||
|
||||
fn serialize_f64(self, _v: f64) -> Result<Self::Ok> { unimplemented!("serialize f64 not implemented") }
|
||||
|
||||
fn serialize_f32(self, _v: f32) -> Result<Self::Ok> { unimplemented!("serialize f32 not implemented") }
|
||||
|
||||
fn serialize_i64(self, v: i64) -> Result<Self::Ok> { self.out.write_all(&v.to_be_bytes()).map_err(Error::Io) }
|
||||
|
||||
fn serialize_i32(self, _v: i32) -> Result<Self::Ok> { unimplemented!("serialize i32 not implemented") }
|
||||
|
||||
fn serialize_i16(self, _v: i16) -> Result<Self::Ok> { unimplemented!("serialize i16 not implemented") }
|
||||
|
||||
fn serialize_i8(self, _v: i8) -> Result<Self::Ok> { unimplemented!("serialize i8 not implemented") }
|
||||
|
||||
fn serialize_u64(self, v: u64) -> Result<Self::Ok> { self.out.write_all(&v.to_be_bytes()).map_err(Error::Io) }
|
||||
|
||||
fn serialize_u32(self, _v: u32) -> Result<Self::Ok> { unimplemented!("serialize u32 not implemented") }
|
||||
|
||||
fn serialize_u16(self, _v: u16) -> Result<Self::Ok> { unimplemented!("serialize u16 not implemented") }
|
||||
|
||||
fn serialize_u8(self, v: u8) -> Result<Self::Ok> { self.out.write_all(&[v]).map_err(Error::Io) }
|
||||
|
||||
fn serialize_bool(self, _v: bool) -> Result<Self::Ok> { unimplemented!("serialize bool not implemented") }
|
||||
|
||||
fn serialize_unit(self) -> Result<Self::Ok> { unimplemented!("serialize unit not implemented") }
|
||||
}
|
||||
|
||||
impl<W: Write> ser::SerializeMap for &mut Serializer<'_, W> {
|
||||
type Error = Error;
|
||||
type Ok = ();
|
||||
|
||||
fn serialize_key<T: Serialize + ?Sized>(&mut self, _key: &T) -> Result<Self::Ok> {
|
||||
unimplemented!("serialize Map Key not implemented")
|
||||
}
|
||||
|
||||
fn serialize_value<T: Serialize + ?Sized>(&mut self, _val: &T) -> Result<Self::Ok> {
|
||||
unimplemented!("serialize Map Val not implemented")
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok> {
|
||||
self.sequence_end();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<W: Write> ser::SerializeSeq for &mut Serializer<'_, W> {
|
||||
type Error = Error;
|
||||
type Ok = ();
|
||||
|
||||
fn serialize_element<T: Serialize + ?Sized>(&mut self, val: &T) -> Result<Self::Ok> { val.serialize(&mut **self) }
|
||||
|
||||
fn end(self) -> Result<Self::Ok> {
|
||||
self.sequence_end();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<W: Write> ser::SerializeStruct for &mut Serializer<'_, W> {
|
||||
type Error = Error;
|
||||
type Ok = ();
|
||||
|
||||
fn serialize_field<T: Serialize + ?Sized>(&mut self, _key: &'static str, val: &T) -> Result<Self::Ok> {
|
||||
self.record_start()?;
|
||||
val.serialize(&mut **self)
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok> {
|
||||
self.sequence_end();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<W: Write> ser::SerializeStructVariant for &mut Serializer<'_, W> {
|
||||
type Error = Error;
|
||||
type Ok = ();
|
||||
|
||||
fn serialize_field<T: Serialize + ?Sized>(&mut self, _key: &'static str, val: &T) -> Result<Self::Ok> {
|
||||
self.record_start()?;
|
||||
val.serialize(&mut **self)
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok> {
|
||||
self.sequence_end();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<W: Write> ser::SerializeTuple for &mut Serializer<'_, W> {
|
||||
type Error = Error;
|
||||
type Ok = ();
|
||||
|
||||
fn serialize_element<T: Serialize + ?Sized>(&mut self, val: &T) -> Result<Self::Ok> {
|
||||
self.record_start()?;
|
||||
val.serialize(&mut **self)
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok> {
|
||||
self.sequence_end();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<W: Write> ser::SerializeTupleStruct for &mut Serializer<'_, W> {
|
||||
type Error = Error;
|
||||
type Ok = ();
|
||||
|
||||
fn serialize_field<T: Serialize + ?Sized>(&mut self, val: &T) -> Result<Self::Ok> {
|
||||
self.record_start()?;
|
||||
val.serialize(&mut **self)
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok> {
|
||||
self.sequence_end();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<W: Write> ser::SerializeTupleVariant for &mut Serializer<'_, W> {
|
||||
type Error = Error;
|
||||
type Ok = ();
|
||||
|
||||
fn serialize_field<T: Serialize + ?Sized>(&mut self, val: &T) -> Result<Self::Ok> {
|
||||
self.record_start()?;
|
||||
val.serialize(&mut **self)
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok> {
|
||||
self.sequence_end();
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,57 +0,0 @@
|
|||
pub struct OwnedKeyVal(pub OwnedKey, pub OwnedVal);
|
||||
pub(crate) type OwnedKeyValPair = (OwnedKey, OwnedVal);
|
||||
pub type OwnedVal = Vec<Byte>;
|
||||
pub type OwnedKey = Vec<Byte>;
|
||||
|
||||
pub struct KeyVal<'item>(pub &'item Key, pub &'item Val);
|
||||
pub(crate) type KeyValPair<'item> = (&'item Key, &'item Val);
|
||||
pub type Val = [Byte];
|
||||
pub type Key = [Byte];
|
||||
|
||||
pub(crate) type Byte = u8;
|
||||
|
||||
impl OwnedKeyVal {
|
||||
#[must_use]
|
||||
pub fn as_slice(&self) -> KeyVal<'_> { KeyVal(&self.0, &self.1) }
|
||||
|
||||
#[must_use]
|
||||
pub fn to_tuple(self) -> OwnedKeyValPair { (self.0, self.1) }
|
||||
}
|
||||
|
||||
impl From<OwnedKeyValPair> for OwnedKeyVal {
|
||||
fn from((key, val): OwnedKeyValPair) -> Self { Self(key, val) }
|
||||
}
|
||||
|
||||
impl From<&KeyVal<'_>> for OwnedKeyVal {
|
||||
#[inline]
|
||||
fn from(slice: &KeyVal<'_>) -> Self { slice.to_owned() }
|
||||
}
|
||||
|
||||
impl From<KeyValPair<'_>> for OwnedKeyVal {
|
||||
fn from((key, val): KeyValPair<'_>) -> Self { Self(Vec::from(key), Vec::from(val)) }
|
||||
}
|
||||
|
||||
impl From<OwnedKeyVal> for OwnedKeyValPair {
|
||||
fn from(val: OwnedKeyVal) -> Self { val.to_tuple() }
|
||||
}
|
||||
|
||||
impl KeyVal<'_> {
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn to_owned(&self) -> OwnedKeyVal { OwnedKeyVal::from(self) }
|
||||
|
||||
#[must_use]
|
||||
pub fn as_tuple(&self) -> KeyValPair<'_> { (self.0, self.1) }
|
||||
}
|
||||
|
||||
impl<'a> From<&'a OwnedKeyVal> for KeyVal<'a> {
|
||||
fn from(owned: &'a OwnedKeyVal) -> Self { owned.as_slice() }
|
||||
}
|
||||
|
||||
impl<'a> From<&'a OwnedKeyValPair> for KeyVal<'a> {
|
||||
fn from((key, val): &'a OwnedKeyValPair) -> Self { KeyVal(key.as_slice(), val.as_slice()) }
|
||||
}
|
||||
|
||||
impl<'a> From<KeyValPair<'a>> for KeyVal<'a> {
|
||||
fn from((key, val): KeyValPair<'a>) -> Self { KeyVal(key, val) }
|
||||
}
|
122
src/database/stream.rs
Normal file
122
src/database/stream.rs
Normal file
|
@ -0,0 +1,122 @@
|
|||
mod items;
|
||||
mod items_rev;
|
||||
mod keys;
|
||||
mod keys_rev;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use conduit::{utils::exchange, Error, Result};
|
||||
use rocksdb::{ColumnFamily, DBRawIteratorWithThreadMode, ReadOptions};
|
||||
|
||||
pub(crate) use self::{items::Items, items_rev::ItemsRev, keys::Keys, keys_rev::KeysRev};
|
||||
use crate::{
|
||||
engine::Db,
|
||||
keyval::{Key, KeyVal, Val},
|
||||
util::map_err,
|
||||
Engine, Slice,
|
||||
};
|
||||
|
||||
struct State<'a> {
|
||||
inner: Inner<'a>,
|
||||
seek: bool,
|
||||
init: bool,
|
||||
}
|
||||
|
||||
trait Cursor<'a, T> {
|
||||
fn state(&self) -> &State<'a>;
|
||||
|
||||
fn fetch(&self) -> Option<T>;
|
||||
|
||||
fn seek(&mut self);
|
||||
|
||||
fn get(&self) -> Option<Result<T>> {
|
||||
self.fetch()
|
||||
.map(Ok)
|
||||
.or_else(|| self.state().status().map(Err))
|
||||
}
|
||||
|
||||
fn seek_and_get(&mut self) -> Option<Result<T>> {
|
||||
self.seek();
|
||||
self.get()
|
||||
}
|
||||
}
|
||||
|
||||
type Inner<'a> = DBRawIteratorWithThreadMode<'a, Db>;
|
||||
type From<'a> = Option<Key<'a>>;
|
||||
|
||||
impl<'a> State<'a> {
|
||||
fn new(db: &'a Arc<Engine>, cf: &'a Arc<ColumnFamily>, opts: ReadOptions) -> Self {
|
||||
Self {
|
||||
inner: db.db.raw_iterator_cf_opt(&**cf, opts),
|
||||
init: true,
|
||||
seek: false,
|
||||
}
|
||||
}
|
||||
|
||||
fn init_fwd(mut self, from: From<'_>) -> Self {
|
||||
if let Some(key) = from {
|
||||
self.inner.seek(key);
|
||||
self.seek = true;
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
fn init_rev(mut self, from: From<'_>) -> Self {
|
||||
if let Some(key) = from {
|
||||
self.inner.seek_for_prev(key);
|
||||
self.seek = true;
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
fn seek_fwd(&mut self) {
|
||||
if !exchange(&mut self.init, false) {
|
||||
self.inner.next();
|
||||
} else if !self.seek {
|
||||
self.inner.seek_to_first();
|
||||
}
|
||||
}
|
||||
|
||||
fn seek_rev(&mut self) {
|
||||
if !exchange(&mut self.init, false) {
|
||||
self.inner.prev();
|
||||
} else if !self.seek {
|
||||
self.inner.seek_to_last();
|
||||
}
|
||||
}
|
||||
|
||||
fn fetch_key(&self) -> Option<Key<'_>> { self.inner.key().map(Key::from) }
|
||||
|
||||
fn _fetch_val(&self) -> Option<Val<'_>> { self.inner.value().map(Val::from) }
|
||||
|
||||
fn fetch(&self) -> Option<KeyVal<'_>> { self.inner.item().map(KeyVal::from) }
|
||||
|
||||
fn status(&self) -> Option<Error> { self.inner.status().map_err(map_err).err() }
|
||||
|
||||
fn valid(&self) -> bool { self.inner.valid() }
|
||||
}
|
||||
|
||||
fn keyval_longevity<'a, 'b: 'a>(item: KeyVal<'a>) -> KeyVal<'b> {
|
||||
(slice_longevity::<'a, 'b>(item.0), slice_longevity::<'a, 'b>(item.1))
|
||||
}
|
||||
|
||||
fn slice_longevity<'a, 'b: 'a>(item: &'a Slice) -> &'b Slice {
|
||||
// SAFETY: The lifetime of the data returned by the rocksdb cursor is only valid
|
||||
// between each movement of the cursor. It is hereby unsafely extended to match
|
||||
// the lifetime of the cursor itself. This is due to the limitation of the
|
||||
// Stream trait where the Item is incapable of conveying a lifetime; this is due
|
||||
// to GAT's being unstable during its development. This unsafety can be removed
|
||||
// as soon as this limitation is addressed by an upcoming version.
|
||||
//
|
||||
// We have done our best to mitigate the implications of this in conjunction
|
||||
// with the deserialization API such that borrows being held across movements of
|
||||
// the cursor do not happen accidentally. The compiler will still error when
|
||||
// values herein produced try to leave a closure passed to a StreamExt API. But
|
||||
// escapes can happen if you explicitly and intentionally attempt it, and there
|
||||
// will be no compiler error or warning. This is primarily the case with
|
||||
// calling collect() without a preceding map(ToOwned::to_owned). A collection
|
||||
// of references here is illegal, but this will not be enforced by the compiler.
|
||||
unsafe { std::mem::transmute(item) }
|
||||
}
|
44
src/database/stream/items.rs
Normal file
44
src/database/stream/items.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
use std::{pin::Pin, sync::Arc};
|
||||
|
||||
use conduit::Result;
|
||||
use futures::{
|
||||
stream::FusedStream,
|
||||
task::{Context, Poll},
|
||||
Stream,
|
||||
};
|
||||
use rocksdb::{ColumnFamily, ReadOptions};
|
||||
|
||||
use super::{keyval_longevity, Cursor, From, State};
|
||||
use crate::{keyval::KeyVal, Engine};
|
||||
|
||||
pub(crate) struct Items<'a> {
|
||||
state: State<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Items<'a> {
|
||||
pub(crate) fn new(db: &'a Arc<Engine>, cf: &'a Arc<ColumnFamily>, opts: ReadOptions, from: From<'_>) -> Self {
|
||||
Self {
|
||||
state: State::new(db, cf, opts).init_fwd(from),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a, KeyVal<'a>> for Items<'a> {
|
||||
fn state(&self) -> &State<'a> { &self.state }
|
||||
|
||||
fn fetch(&self) -> Option<KeyVal<'a>> { self.state.fetch().map(keyval_longevity) }
|
||||
|
||||
fn seek(&mut self) { self.state.seek_fwd(); }
|
||||
}
|
||||
|
||||
impl<'a> Stream for Items<'a> {
|
||||
type Item = Result<KeyVal<'a>>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, _ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
Poll::Ready(self.seek_and_get())
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedStream for Items<'_> {
|
||||
fn is_terminated(&self) -> bool { !self.state.init && !self.state.valid() }
|
||||
}
|
44
src/database/stream/items_rev.rs
Normal file
44
src/database/stream/items_rev.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
use std::{pin::Pin, sync::Arc};
|
||||
|
||||
use conduit::Result;
|
||||
use futures::{
|
||||
stream::FusedStream,
|
||||
task::{Context, Poll},
|
||||
Stream,
|
||||
};
|
||||
use rocksdb::{ColumnFamily, ReadOptions};
|
||||
|
||||
use super::{keyval_longevity, Cursor, From, State};
|
||||
use crate::{keyval::KeyVal, Engine};
|
||||
|
||||
pub(crate) struct ItemsRev<'a> {
|
||||
state: State<'a>,
|
||||
}
|
||||
|
||||
impl<'a> ItemsRev<'a> {
|
||||
pub(crate) fn new(db: &'a Arc<Engine>, cf: &'a Arc<ColumnFamily>, opts: ReadOptions, from: From<'_>) -> Self {
|
||||
Self {
|
||||
state: State::new(db, cf, opts).init_rev(from),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a, KeyVal<'a>> for ItemsRev<'a> {
|
||||
fn state(&self) -> &State<'a> { &self.state }
|
||||
|
||||
fn fetch(&self) -> Option<KeyVal<'a>> { self.state.fetch().map(keyval_longevity) }
|
||||
|
||||
fn seek(&mut self) { self.state.seek_rev(); }
|
||||
}
|
||||
|
||||
impl<'a> Stream for ItemsRev<'a> {
|
||||
type Item = Result<KeyVal<'a>>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, _ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
Poll::Ready(self.seek_and_get())
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedStream for ItemsRev<'_> {
|
||||
fn is_terminated(&self) -> bool { !self.state.init && !self.state.valid() }
|
||||
}
|
44
src/database/stream/keys.rs
Normal file
44
src/database/stream/keys.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
use std::{pin::Pin, sync::Arc};
|
||||
|
||||
use conduit::Result;
|
||||
use futures::{
|
||||
stream::FusedStream,
|
||||
task::{Context, Poll},
|
||||
Stream,
|
||||
};
|
||||
use rocksdb::{ColumnFamily, ReadOptions};
|
||||
|
||||
use super::{slice_longevity, Cursor, From, State};
|
||||
use crate::{keyval::Key, Engine};
|
||||
|
||||
pub(crate) struct Keys<'a> {
|
||||
state: State<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Keys<'a> {
|
||||
pub(crate) fn new(db: &'a Arc<Engine>, cf: &'a Arc<ColumnFamily>, opts: ReadOptions, from: From<'_>) -> Self {
|
||||
Self {
|
||||
state: State::new(db, cf, opts).init_fwd(from),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a, Key<'a>> for Keys<'a> {
|
||||
fn state(&self) -> &State<'a> { &self.state }
|
||||
|
||||
fn fetch(&self) -> Option<Key<'a>> { self.state.fetch_key().map(slice_longevity) }
|
||||
|
||||
fn seek(&mut self) { self.state.seek_fwd(); }
|
||||
}
|
||||
|
||||
impl<'a> Stream for Keys<'a> {
|
||||
type Item = Result<Key<'a>>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, _ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
Poll::Ready(self.seek_and_get())
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedStream for Keys<'_> {
|
||||
fn is_terminated(&self) -> bool { !self.state.init && !self.state.valid() }
|
||||
}
|
44
src/database/stream/keys_rev.rs
Normal file
44
src/database/stream/keys_rev.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
use std::{pin::Pin, sync::Arc};
|
||||
|
||||
use conduit::Result;
|
||||
use futures::{
|
||||
stream::FusedStream,
|
||||
task::{Context, Poll},
|
||||
Stream,
|
||||
};
|
||||
use rocksdb::{ColumnFamily, ReadOptions};
|
||||
|
||||
use super::{slice_longevity, Cursor, From, State};
|
||||
use crate::{keyval::Key, Engine};
|
||||
|
||||
pub(crate) struct KeysRev<'a> {
|
||||
state: State<'a>,
|
||||
}
|
||||
|
||||
impl<'a> KeysRev<'a> {
|
||||
pub(crate) fn new(db: &'a Arc<Engine>, cf: &'a Arc<ColumnFamily>, opts: ReadOptions, from: From<'_>) -> Self {
|
||||
Self {
|
||||
state: State::new(db, cf, opts).init_rev(from),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a, Key<'a>> for KeysRev<'a> {
|
||||
fn state(&self) -> &State<'a> { &self.state }
|
||||
|
||||
fn fetch(&self) -> Option<Key<'a>> { self.state.fetch_key().map(slice_longevity) }
|
||||
|
||||
fn seek(&mut self) { self.state.seek_rev(); }
|
||||
}
|
||||
|
||||
impl<'a> Stream for KeysRev<'a> {
|
||||
type Item = Result<Key<'a>>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, _ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
Poll::Ready(self.seek_and_get())
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedStream for KeysRev<'_> {
|
||||
fn is_terminated(&self) -> bool { !self.state.init && !self.state.valid() }
|
||||
}
|
|
@ -1,4 +1,16 @@
|
|||
use conduit::{err, Result};
|
||||
use rocksdb::{Direction, IteratorMode};
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn _into_direction(mode: &IteratorMode<'_>) -> Direction {
|
||||
use Direction::{Forward, Reverse};
|
||||
use IteratorMode::{End, From, Start};
|
||||
|
||||
match mode {
|
||||
Start | From(_, Forward) => Forward,
|
||||
End | From(_, Reverse) => Reverse,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn result<T>(r: std::result::Result<T, rocksdb::Error>) -> Result<T, conduit::Error> {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue