apply new rustfmt.toml changes, fix some clippy lints
Signed-off-by: strawberry <strawberry@puppygock.gay>
This commit is contained in:
parent
0317cc8cc5
commit
77e0b76408
296 changed files with 7147 additions and 4300 deletions
|
@ -26,11 +26,7 @@ impl Cork {
|
|||
#[inline]
|
||||
pub(super) fn new(db: &Arc<Engine>, flush: bool, sync: bool) -> Self {
|
||||
db.cork();
|
||||
Self {
|
||||
db: db.clone(),
|
||||
flush,
|
||||
sync,
|
||||
}
|
||||
Self { db: db.clone(), flush, sync }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -17,10 +17,7 @@ impl Database {
|
|||
/// Load an existing database or create a new one.
|
||||
pub async fn open(server: &Arc<Server>) -> Result<Arc<Self>> {
|
||||
let db = Engine::open(server).await?;
|
||||
Ok(Arc::new(Self {
|
||||
db: db.clone(),
|
||||
maps: maps::open(&db)?,
|
||||
}))
|
||||
Ok(Arc::new(Self { db: db.clone(), maps: maps::open(&db)? }))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -31,7 +28,9 @@ impl Database {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&MapsKey, &MapsVal)> + Send + '_ { self.maps.iter() }
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&MapsKey, &MapsVal)> + Send + '_ {
|
||||
self.maps.iter()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn keys(&self) -> impl Iterator<Item = &MapsKey> + Send + '_ { self.maps.keys() }
|
||||
|
|
|
@ -13,11 +13,7 @@ pub(crate) fn from_slice<'a, T>(buf: &'a [u8]) -> Result<T>
|
|||
where
|
||||
T: Deserialize<'a>,
|
||||
{
|
||||
let mut deserializer = Deserializer {
|
||||
buf,
|
||||
pos: 0,
|
||||
seq: false,
|
||||
};
|
||||
let mut deserializer = Deserializer { buf, pos: 0, seq: false };
|
||||
|
||||
T::deserialize(&mut deserializer).debug_inspect(|_| {
|
||||
deserializer
|
||||
|
@ -169,7 +165,12 @@ impl<'a, 'de: 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
|||
visitor.visit_seq(self)
|
||||
}
|
||||
|
||||
fn deserialize_tuple_struct<V>(self, _name: &'static str, _len: usize, visitor: V) -> Result<V::Value>
|
||||
fn deserialize_tuple_struct<V>(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_len: usize,
|
||||
visitor: V,
|
||||
) -> Result<V::Value>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
|
@ -186,7 +187,12 @@ impl<'a, 'de: 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
|||
d.deserialize_map(visitor).map_err(Into::into)
|
||||
}
|
||||
|
||||
fn deserialize_struct<V>(self, name: &'static str, fields: &'static [&'static str], visitor: V) -> Result<V::Value>
|
||||
fn deserialize_struct<V>(
|
||||
self,
|
||||
name: &'static str,
|
||||
fields: &'static [&'static str],
|
||||
visitor: V,
|
||||
) -> Result<V::Value>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
|
@ -201,9 +207,9 @@ impl<'a, 'de: 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
|||
V: Visitor<'de>,
|
||||
{
|
||||
match name {
|
||||
"Ignore" => self.record_ignore(),
|
||||
"IgnoreAll" => self.record_ignore_all(),
|
||||
_ => unhandled!("Unrecognized deserialization Directive {name:?}"),
|
||||
| "Ignore" => self.record_ignore(),
|
||||
| "IgnoreAll" => self.record_ignore_all(),
|
||||
| _ => unhandled!("Unrecognized deserialization Directive {name:?}"),
|
||||
};
|
||||
|
||||
visitor.visit_unit()
|
||||
|
@ -214,13 +220,16 @@ impl<'a, 'de: 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
|||
V: Visitor<'de>,
|
||||
{
|
||||
match name {
|
||||
"$serde_json::private::RawValue" => visitor.visit_map(self),
|
||||
_ => visitor.visit_newtype_struct(self),
|
||||
| "$serde_json::private::RawValue" => visitor.visit_map(self),
|
||||
| _ => visitor.visit_newtype_struct(self),
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_enum<V>(
|
||||
self, _name: &'static str, _variants: &'static [&'static str], _visitor: V,
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variants: &'static [&'static str],
|
||||
_visitor: V,
|
||||
) -> Result<V::Value>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
|
@ -260,7 +269,10 @@ impl<'a, 'de: 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
|||
}
|
||||
|
||||
fn deserialize_u8<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
unhandled!("deserialize u8 not implemented; try dereferencing the Handle for [u8] access instead")
|
||||
unhandled!(
|
||||
"deserialize u8 not implemented; try dereferencing the Handle for [u8] access \
|
||||
instead"
|
||||
)
|
||||
}
|
||||
|
||||
fn deserialize_u16<V: Visitor<'de>>(self, _visitor: V) -> Result<V::Value> {
|
||||
|
@ -338,8 +350,8 @@ impl<'a, 'de: 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
|||
);
|
||||
|
||||
match self.record_peek_byte() {
|
||||
Some(b'{') => self.deserialize_map(visitor),
|
||||
_ => self.deserialize_str(visitor),
|
||||
| Some(b'{') => self.deserialize_map(visitor),
|
||||
| _ => self.deserialize_str(visitor),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,12 +6,14 @@ use std::{
|
|||
sync::{atomic::AtomicU32, Arc, Mutex, RwLock},
|
||||
};
|
||||
|
||||
use conduwuit::{debug, error, info, utils::time::rfc2822_from_seconds, warn, Err, Result, Server};
|
||||
use conduwuit::{
|
||||
debug, error, info, utils::time::rfc2822_from_seconds, warn, Err, Result, Server,
|
||||
};
|
||||
use rocksdb::{
|
||||
backup::{BackupEngine, BackupEngineOptions},
|
||||
perf::get_memory_usage_stats,
|
||||
AsColumnFamilyRef, BoundColumnFamily, Cache, ColumnFamilyDescriptor, DBCommon, DBWithThreadMode, Env, LogLevel,
|
||||
MultiThreaded, Options,
|
||||
AsColumnFamilyRef, BoundColumnFamily, Cache, ColumnFamilyDescriptor, DBCommon,
|
||||
DBWithThreadMode, Env, LogLevel, MultiThreaded, Options,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
|
@ -169,11 +171,13 @@ impl Engine {
|
|||
|
||||
pub fn memory_usage(&self) -> Result<String> {
|
||||
let mut res = String::new();
|
||||
let stats = get_memory_usage_stats(Some(&[&self.db]), Some(&[&self.row_cache])).or_else(or_else)?;
|
||||
let stats = get_memory_usage_stats(Some(&[&self.db]), Some(&[&self.row_cache]))
|
||||
.or_else(or_else)?;
|
||||
let mibs = |input| f64::from(u32::try_from(input / 1024).unwrap_or(0)) / 1024.0;
|
||||
writeln!(
|
||||
res,
|
||||
"Memory buffers: {:.2} MiB\nPending write: {:.2} MiB\nTable readers: {:.2} MiB\nRow cache: {:.2} MiB",
|
||||
"Memory buffers: {:.2} MiB\nPending write: {:.2} MiB\nTable readers: {:.2} MiB\nRow \
|
||||
cache: {:.2} MiB",
|
||||
mibs(stats.mem_table_total),
|
||||
mibs(stats.mem_table_unflushed),
|
||||
mibs(stats.mem_table_readers_total),
|
||||
|
@ -202,7 +206,8 @@ impl Engine {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
let options = BackupEngineOptions::new(path.expect("valid database backup path")).map_err(map_err)?;
|
||||
let options = BackupEngineOptions::new(path.expect("valid database backup path"))
|
||||
.map_err(map_err)?;
|
||||
let mut engine = BackupEngine::open(&options, &self.env).map_err(map_err)?;
|
||||
if config.database_backups_to_keep > 0 {
|
||||
let flush = !self.is_read_only();
|
||||
|
@ -232,13 +237,14 @@ impl Engine {
|
|||
let config = &self.server.config;
|
||||
let path = config.database_backup_path.as_ref();
|
||||
if path.is_none() || path.is_some_and(|path| path.as_os_str().is_empty()) {
|
||||
return Ok(
|
||||
"Configure database_backup_path to enable backups, or the path specified is not valid".to_owned(),
|
||||
);
|
||||
return Ok("Configure database_backup_path to enable backups, or the path \
|
||||
specified is not valid"
|
||||
.to_owned());
|
||||
}
|
||||
|
||||
let mut res = String::new();
|
||||
let options = BackupEngineOptions::new(path.expect("valid database backup path")).or_else(or_else)?;
|
||||
let options = BackupEngineOptions::new(path.expect("valid database backup path"))
|
||||
.or_else(or_else)?;
|
||||
let engine = BackupEngine::open(&options, &self.env).or_else(or_else)?;
|
||||
for info in engine.get_backup_info() {
|
||||
writeln!(
|
||||
|
@ -256,8 +262,8 @@ impl Engine {
|
|||
|
||||
pub fn file_list(&self) -> Result<String> {
|
||||
match self.db.live_files() {
|
||||
Err(e) => Ok(String::from(e)),
|
||||
Ok(files) => {
|
||||
| Err(e) => Ok(String::from(e)),
|
||||
| Ok(files) => {
|
||||
let mut res = String::new();
|
||||
writeln!(res, "| lev | sst | keys | dels | size | column |")?;
|
||||
writeln!(res, "| ---: | :--- | ---: | ---: | ---: | :--- |")?;
|
||||
|
@ -265,7 +271,12 @@ impl Engine {
|
|||
writeln!(
|
||||
res,
|
||||
"| {} | {:<13} | {:7}+ | {:4}- | {:9} | {} |",
|
||||
file.level, file.name, file.num_entries, file.num_deletions, file.size, file.column_family_name,
|
||||
file.level,
|
||||
file.name,
|
||||
file.num_entries,
|
||||
file.num_deletions,
|
||||
file.size,
|
||||
file.column_family_name,
|
||||
)?;
|
||||
}
|
||||
|
||||
|
@ -277,7 +288,11 @@ impl Engine {
|
|||
/// Query for database property by null-terminated name which is expected to
|
||||
/// have a result with an integer representation. This is intended for
|
||||
/// low-overhead programmatic use.
|
||||
pub(crate) fn property_integer(&self, cf: &impl AsColumnFamilyRef, name: &CStr) -> Result<u64> {
|
||||
pub(crate) fn property_integer(
|
||||
&self,
|
||||
cf: &impl AsColumnFamilyRef,
|
||||
name: &CStr,
|
||||
) -> Result<u64> {
|
||||
result(self.db.property_int_value_cf(cf, name))
|
||||
.and_then(|val| val.map_or_else(|| Err!("Property {name:?} not found."), Ok))
|
||||
}
|
||||
|
@ -300,8 +315,8 @@ impl Engine {
|
|||
pub(crate) fn repair(db_opts: &Options, path: &PathBuf) -> Result<()> {
|
||||
warn!("Starting database repair. This may take a long time...");
|
||||
match Db::repair(db_opts, path) {
|
||||
Ok(()) => info!("Database repair successful."),
|
||||
Err(e) => return Err!("Repair failed: {e:?}"),
|
||||
| Ok(()) => info!("Database repair successful."),
|
||||
| Err(e) => return Err!("Repair failed: {e:?}"),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -320,10 +335,10 @@ pub(crate) fn handle_log(level: LogLevel, msg: &str) {
|
|||
}
|
||||
|
||||
match level {
|
||||
LogLevel::Header | LogLevel::Debug => debug!("{msg}"),
|
||||
LogLevel::Error | LogLevel::Fatal => error!("{msg}"),
|
||||
LogLevel::Info => debug!("{msg}"),
|
||||
LogLevel::Warn => warn!("{msg}"),
|
||||
| LogLevel::Header | LogLevel::Debug => debug!("{msg}"),
|
||||
| LogLevel::Error | LogLevel::Fatal => error!("{msg}"),
|
||||
| LogLevel::Info => debug!("{msg}"),
|
||||
| LogLevel::Warn => warn!("{msg}"),
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -11,11 +11,7 @@ pub struct Handle<'a> {
|
|||
}
|
||||
|
||||
impl<'a> From<DBPinnableSlice<'a>> for Handle<'a> {
|
||||
fn from(val: DBPinnableSlice<'a>) -> Self {
|
||||
Self {
|
||||
val,
|
||||
}
|
||||
}
|
||||
fn from(val: DBPinnableSlice<'a>) -> Self { Self { val } }
|
||||
}
|
||||
|
||||
impl Debug for Handle<'_> {
|
||||
|
|
|
@ -56,7 +56,10 @@ impl Map {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
pub fn watch_prefix<'a, K>(&'a self, prefix: &K) -> Pin<Box<dyn Future<Output = ()> + Send + 'a>>
|
||||
pub fn watch_prefix<'a, K>(
|
||||
&'a self,
|
||||
prefix: &K,
|
||||
) -> Pin<Box<dyn Future<Output = ()> + Send + 'a>>
|
||||
where
|
||||
K: AsRef<[u8]> + ?Sized + Debug,
|
||||
{
|
||||
|
@ -64,7 +67,9 @@ impl Map {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
pub fn property_integer(&self, name: &CStr) -> Result<u64> { self.db.property_integer(&self.cf(), name) }
|
||||
pub fn property_integer(&self, name: &CStr) -> Result<u64> {
|
||||
self.db.property_integer(&self.cf(), name)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn property(&self, name: &str) -> Result<String> { self.db.property(&self.cf(), name) }
|
||||
|
@ -76,7 +81,9 @@ impl Map {
|
|||
}
|
||||
|
||||
impl Debug for Map {
|
||||
fn fmt(&self, out: &mut fmt::Formatter<'_>) -> fmt::Result { write!(out, "Map {{name: {0}}}", self.name) }
|
||||
fn fmt(&self, out: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(out, "Map {{name: {0}}}", self.name)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Map {
|
||||
|
|
|
@ -29,7 +29,10 @@ where
|
|||
/// - harder errors will panic
|
||||
#[inline]
|
||||
#[implement(super::Map)]
|
||||
pub fn acontains<const MAX: usize, K>(self: &Arc<Self>, key: &K) -> impl Future<Output = bool> + Send + '_
|
||||
pub fn acontains<const MAX: usize, K>(
|
||||
self: &Arc<Self>,
|
||||
key: &K,
|
||||
) -> impl Future<Output = bool> + Send + '_
|
||||
where
|
||||
K: Serialize + ?Sized + Debug,
|
||||
{
|
||||
|
@ -42,7 +45,11 @@ where
|
|||
/// - harder errors will panic
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self, buf), fields(%self), level = "trace")]
|
||||
pub fn bcontains<K, B>(self: &Arc<Self>, key: &K, buf: &mut B) -> impl Future<Output = bool> + Send + '_
|
||||
pub fn bcontains<K, B>(
|
||||
self: &Arc<Self>,
|
||||
key: &K,
|
||||
buf: &mut B,
|
||||
) -> impl Future<Output = bool> + Send + '_
|
||||
where
|
||||
K: Serialize + ?Sized + Debug,
|
||||
B: Write + AsRef<[u8]>,
|
||||
|
|
|
@ -26,7 +26,10 @@ where
|
|||
/// - From is a raw
|
||||
#[implement(super::Map)]
|
||||
#[inline]
|
||||
pub fn raw_count_from<'a, P>(self: &'a Arc<Self>, from: &'a P) -> impl Future<Output = usize> + Send + 'a
|
||||
pub fn raw_count_from<'a, P>(
|
||||
self: &'a Arc<Self>,
|
||||
from: &'a P,
|
||||
) -> impl Future<Output = usize> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
{
|
||||
|
@ -38,7 +41,10 @@ where
|
|||
/// - Prefix is structured key
|
||||
#[implement(super::Map)]
|
||||
#[inline]
|
||||
pub fn count_prefix<'a, P>(self: &'a Arc<Self>, prefix: &P) -> impl Future<Output = usize> + Send + 'a
|
||||
pub fn count_prefix<'a, P>(
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &P,
|
||||
) -> impl Future<Output = usize> + Send + 'a
|
||||
where
|
||||
P: Serialize + ?Sized + Debug + 'a,
|
||||
{
|
||||
|
@ -50,7 +56,10 @@ where
|
|||
/// - Prefix is raw
|
||||
#[implement(super::Map)]
|
||||
#[inline]
|
||||
pub fn raw_count_prefix<'a, P>(self: &'a Arc<Self>, prefix: &'a P) -> impl Future<Output = usize> + Send + 'a
|
||||
pub fn raw_count_prefix<'a, P>(
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &'a P,
|
||||
) -> impl Future<Output = usize> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
{
|
||||
|
|
|
@ -31,7 +31,10 @@ where
|
|||
/// the query. The maximum size is supplied as const generic parameter.
|
||||
#[implement(super::Map)]
|
||||
#[inline]
|
||||
pub fn aqry<const MAX: usize, K>(self: &Arc<Self>, key: &K) -> impl Future<Output = Result<Handle<'_>>> + Send
|
||||
pub fn aqry<const MAX: usize, K>(
|
||||
self: &Arc<Self>,
|
||||
key: &K,
|
||||
) -> impl Future<Output = Result<Handle<'_>>> + Send
|
||||
where
|
||||
K: Serialize + ?Sized + Debug,
|
||||
{
|
||||
|
@ -43,7 +46,11 @@ where
|
|||
/// asynchronously. The key is serialized into a user-supplied Writer.
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self, buf), level = "trace")]
|
||||
pub fn bqry<K, B>(self: &Arc<Self>, key: &K, buf: &mut B) -> impl Future<Output = Result<Handle<'_>>> + Send
|
||||
pub fn bqry<K, B>(
|
||||
self: &Arc<Self>,
|
||||
key: &K,
|
||||
buf: &mut B,
|
||||
) -> impl Future<Output = Result<Handle<'_>>> + Send
|
||||
where
|
||||
K: Serialize + ?Sized + Debug,
|
||||
B: Write + AsRef<[u8]>,
|
||||
|
@ -110,15 +117,15 @@ where
|
|||
|
||||
match res {
|
||||
// cache hit; not found
|
||||
Ok(None) => Err!(Request(NotFound("Not found in database"))),
|
||||
| Ok(None) => Err!(Request(NotFound("Not found in database"))),
|
||||
|
||||
// cache hit; value found
|
||||
Ok(Some(res)) => Ok(Some(Handle::from(res))),
|
||||
| Ok(Some(res)) => Ok(Some(Handle::from(res))),
|
||||
|
||||
// cache miss; unknown
|
||||
Err(e) if is_incomplete(&e) => Ok(None),
|
||||
| Err(e) if is_incomplete(&e) => Ok(None),
|
||||
|
||||
// some other error occurred
|
||||
Err(e) => or_else(e),
|
||||
| Err(e) => or_else(e),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,8 @@ use crate::{util::map_err, Handle};
|
|||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self, keys), level = "trace")]
|
||||
pub fn aqry_batch<'b, 'a: 'b, const MAX: usize, I, K>(
|
||||
self: &'a Arc<Self>, keys: I,
|
||||
self: &'a Arc<Self>,
|
||||
keys: I,
|
||||
) -> impl Stream<Item = Result<Handle<'b>>> + Send + 'a
|
||||
where
|
||||
I: Iterator<Item = &'b K> + Send + 'a,
|
||||
|
@ -22,7 +23,10 @@ where
|
|||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self, keys), level = "trace")]
|
||||
pub fn get_batch<'a, I, K>(self: &'a Arc<Self>, keys: I) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
|
||||
pub fn get_batch<'a, I, K>(
|
||||
self: &'a Arc<Self>,
|
||||
keys: I,
|
||||
) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
|
||||
where
|
||||
I: Iterator<Item = &'a K> + Debug + Send + 'a,
|
||||
K: AsRef<[u8]> + Debug + Send + ?Sized + Sync + 'a,
|
||||
|
@ -34,7 +38,10 @@ where
|
|||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(name = "batch_blocking", level = "trace", skip_all)]
|
||||
pub(crate) fn get_batch_blocking<'a, I, K>(&self, keys: I) -> impl Iterator<Item = Result<Handle<'_>>> + Send
|
||||
pub(crate) fn get_batch_blocking<'a, I, K>(
|
||||
&self,
|
||||
keys: I,
|
||||
) -> impl Iterator<Item = Result<Handle<'_>>> + Send
|
||||
where
|
||||
I: Iterator<Item = &'a K> + ExactSizeIterator + Debug + Send,
|
||||
K: AsRef<[u8]> + Debug + Send + ?Sized + Sync + 'a,
|
||||
|
|
|
@ -11,7 +11,10 @@ use crate::{
|
|||
};
|
||||
|
||||
#[implement(super::Map)]
|
||||
pub fn keys_from<'a, K, P>(self: &'a Arc<Self>, from: &P) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
pub fn keys_from<'a, K, P>(
|
||||
self: &'a Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
|
@ -30,7 +33,10 @@ where
|
|||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
pub fn keys_raw_from<'a, K, P>(self: &'a Arc<Self>, from: &P) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
pub fn keys_raw_from<'a, K, P>(
|
||||
self: &'a Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync,
|
||||
K: Deserialize<'a> + Send,
|
||||
|
|
|
@ -11,7 +11,10 @@ use serde::{Deserialize, Serialize};
|
|||
use crate::keyval::{result_deserialize_key, serialize_key, Key};
|
||||
|
||||
#[implement(super::Map)]
|
||||
pub fn keys_prefix<'a, K, P>(self: &'a Arc<Self>, prefix: &P) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
pub fn keys_prefix<'a, K, P>(
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &P,
|
||||
) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
|
@ -22,7 +25,10 @@ where
|
|||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), level = "trace")]
|
||||
pub fn keys_prefix_raw<P>(self: &Arc<Self>, prefix: &P) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
pub fn keys_prefix_raw<P>(
|
||||
self: &Arc<Self>,
|
||||
prefix: &P,
|
||||
) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
|
@ -33,7 +39,8 @@ where
|
|||
|
||||
#[implement(super::Map)]
|
||||
pub fn keys_raw_prefix<'a, K, P>(
|
||||
self: &'a Arc<Self>, prefix: &'a P,
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &'a P,
|
||||
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
|
@ -44,7 +51,10 @@ where
|
|||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
pub fn raw_keys_prefix<'a, P>(self: &'a Arc<Self>, prefix: &'a P) -> impl Stream<Item = Result<Key<'_>>> + Send + 'a
|
||||
pub fn raw_keys_prefix<'a, P>(
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &'a P,
|
||||
) -> impl Stream<Item = Result<Key<'_>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
{
|
||||
|
|
|
@ -11,7 +11,10 @@ use crate::{
|
|||
};
|
||||
|
||||
#[implement(super::Map)]
|
||||
pub fn rev_keys_from<'a, K, P>(self: &'a Arc<Self>, from: &P) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
pub fn rev_keys_from<'a, K, P>(
|
||||
self: &'a Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
|
@ -22,7 +25,10 @@ where
|
|||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), level = "trace")]
|
||||
pub fn rev_keys_from_raw<P>(self: &Arc<Self>, from: &P) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
pub fn rev_keys_from_raw<P>(
|
||||
self: &Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
|
@ -31,7 +37,10 @@ where
|
|||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
pub fn rev_keys_raw_from<'a, K, P>(self: &'a Arc<Self>, from: &P) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
pub fn rev_keys_raw_from<'a, K, P>(
|
||||
self: &'a Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync,
|
||||
K: Deserialize<'a> + Send,
|
||||
|
@ -42,7 +51,10 @@ where
|
|||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self, from), fields(%self), level = "trace")]
|
||||
pub fn rev_raw_keys_from<P>(self: &Arc<Self>, from: &P) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
pub fn rev_raw_keys_from<P>(
|
||||
self: &Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug,
|
||||
{
|
||||
|
|
|
@ -11,7 +11,10 @@ use serde::{Deserialize, Serialize};
|
|||
use crate::keyval::{result_deserialize_key, serialize_key, Key};
|
||||
|
||||
#[implement(super::Map)]
|
||||
pub fn rev_keys_prefix<'a, K, P>(self: &'a Arc<Self>, prefix: &P) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
pub fn rev_keys_prefix<'a, K, P>(
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &P,
|
||||
) -> impl Stream<Item = Result<Key<'_, K>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
|
@ -22,7 +25,10 @@ where
|
|||
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), level = "trace")]
|
||||
pub fn rev_keys_prefix_raw<P>(self: &Arc<Self>, prefix: &P) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
pub fn rev_keys_prefix_raw<P>(
|
||||
self: &Arc<Self>,
|
||||
prefix: &P,
|
||||
) -> impl Stream<Item = Result<Key<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
|
@ -33,7 +39,8 @@ where
|
|||
|
||||
#[implement(super::Map)]
|
||||
pub fn rev_keys_raw_prefix<'a, K, P>(
|
||||
self: &'a Arc<Self>, prefix: &'a P,
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &'a P,
|
||||
) -> impl Stream<Item = Result<Key<'_, K>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
|
@ -44,7 +51,10 @@ where
|
|||
}
|
||||
|
||||
#[implement(super::Map)]
|
||||
pub fn rev_raw_keys_prefix<'a, P>(self: &'a Arc<Self>, prefix: &'a P) -> impl Stream<Item = Result<Key<'_>>> + Send + 'a
|
||||
pub fn rev_raw_keys_prefix<'a, P>(
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &'a P,
|
||||
) -> impl Stream<Item = Result<Key<'_>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
{
|
||||
|
|
|
@ -19,7 +19,8 @@ use crate::{
|
|||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
pub fn rev_stream_from<'a, K, V, P>(
|
||||
self: &'a Arc<Self>, from: &P,
|
||||
self: &'a Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
|
@ -36,7 +37,10 @@ where
|
|||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), level = "trace")]
|
||||
pub fn rev_stream_from_raw<P>(self: &Arc<Self>, from: &P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
pub fn rev_stream_from_raw<P>(
|
||||
self: &Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
|
@ -50,7 +54,8 @@ where
|
|||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
pub fn rev_stream_raw_from<'a, K, V, P>(
|
||||
self: &'a Arc<Self>, from: &P,
|
||||
self: &'a Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync,
|
||||
|
@ -67,7 +72,10 @@ where
|
|||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self, from), fields(%self), level = "trace")]
|
||||
pub fn rev_raw_stream_from<P>(self: &Arc<Self>, from: &P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
pub fn rev_raw_stream_from<P>(
|
||||
self: &Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug,
|
||||
{
|
||||
|
|
|
@ -16,7 +16,8 @@ use crate::keyval::{result_deserialize, serialize_key, KeyVal};
|
|||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
pub fn rev_stream_prefix<'a, K, V, P>(
|
||||
self: &'a Arc<Self>, prefix: &P,
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
|
@ -33,7 +34,10 @@ where
|
|||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), level = "trace")]
|
||||
pub fn rev_stream_prefix_raw<P>(self: &Arc<Self>, prefix: &P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
pub fn rev_stream_prefix_raw<P>(
|
||||
self: &Arc<Self>,
|
||||
prefix: &P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
|
@ -48,7 +52,8 @@ where
|
|||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
pub fn rev_stream_raw_prefix<'a, K, V, P>(
|
||||
self: &'a Arc<Self>, prefix: &'a P,
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &'a P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
|
@ -65,7 +70,8 @@ where
|
|||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
pub fn rev_raw_stream_prefix<'a, P>(
|
||||
self: &'a Arc<Self>, prefix: &'a P,
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &'a P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
|
|
|
@ -18,7 +18,10 @@ use crate::{
|
|||
/// - Query is serialized
|
||||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
pub fn stream_from<'a, K, V, P>(self: &'a Arc<Self>, from: &P) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
pub fn stream_from<'a, K, V, P>(
|
||||
self: &'a Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
K: Deserialize<'a> + Send,
|
||||
|
@ -33,7 +36,10 @@ where
|
|||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), level = "trace")]
|
||||
pub fn stream_from_raw<P>(self: &Arc<Self>, from: &P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
pub fn stream_from_raw<P>(
|
||||
self: &Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
|
@ -47,7 +53,8 @@ where
|
|||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
pub fn stream_raw_from<'a, K, V, P>(
|
||||
self: &'a Arc<Self>, from: &P,
|
||||
self: &'a Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync,
|
||||
|
@ -63,7 +70,10 @@ where
|
|||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self, from), fields(%self), level = "trace")]
|
||||
pub fn raw_stream_from<P>(self: &Arc<Self>, from: &P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
pub fn raw_stream_from<P>(
|
||||
self: &Arc<Self>,
|
||||
from: &P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug,
|
||||
{
|
||||
|
|
|
@ -16,7 +16,8 @@ use crate::keyval::{result_deserialize, serialize_key, KeyVal};
|
|||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
pub fn stream_prefix<'a, K, V, P>(
|
||||
self: &'a Arc<Self>, prefix: &P,
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
|
@ -33,7 +34,10 @@ where
|
|||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
#[tracing::instrument(skip(self), level = "trace")]
|
||||
pub fn stream_prefix_raw<P>(self: &Arc<Self>, prefix: &P) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
pub fn stream_prefix_raw<P>(
|
||||
self: &Arc<Self>,
|
||||
prefix: &P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_>>> + Send
|
||||
where
|
||||
P: Serialize + ?Sized + Debug,
|
||||
{
|
||||
|
@ -48,7 +52,8 @@ where
|
|||
/// - Result is deserialized
|
||||
#[implement(super::Map)]
|
||||
pub fn stream_raw_prefix<'a, K, V, P>(
|
||||
self: &'a Arc<Self>, prefix: &'a P,
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &'a P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_, K, V>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
|
@ -65,7 +70,8 @@ where
|
|||
/// - Result is raw
|
||||
#[implement(super::Map)]
|
||||
pub fn raw_stream_prefix<'a, P>(
|
||||
self: &'a Arc<Self>, prefix: &'a P,
|
||||
self: &'a Arc<Self>,
|
||||
prefix: &'a P,
|
||||
) -> impl Stream<Item = Result<KeyVal<'_>>> + Send + 'a
|
||||
where
|
||||
P: AsRef<[u8]> + ?Sized + Debug + Sync + 'a,
|
||||
|
|
|
@ -2,8 +2,9 @@ use std::{cmp, collections::HashMap, convert::TryFrom};
|
|||
|
||||
use conduwuit::{err, utils, Config, Result};
|
||||
use rocksdb::{
|
||||
statistics::StatsLevel, BlockBasedOptions, Cache, DBCompactionStyle, DBCompressionType, DBRecoveryMode, Env,
|
||||
LogLevel, LruCacheOptions, Options, UniversalCompactOptions, UniversalCompactionStopStyle,
|
||||
statistics::StatsLevel, BlockBasedOptions, Cache, DBCompactionStyle, DBCompressionType,
|
||||
DBRecoveryMode, Env, LogLevel, LruCacheOptions, Options, UniversalCompactOptions,
|
||||
UniversalCompactionStopStyle,
|
||||
};
|
||||
|
||||
/// Create database-wide options suitable for opening the database. This also
|
||||
|
@ -11,7 +12,12 @@ use rocksdb::{
|
|||
/// resulting value. Note that we require special per-column options on some
|
||||
/// columns, therefor columns should only be opened after passing this result
|
||||
/// through cf_options().
|
||||
pub(crate) fn db_options(config: &Config, env: &mut Env, row_cache: &Cache, col_cache: &Cache) -> Result<Options> {
|
||||
pub(crate) fn db_options(
|
||||
config: &Config,
|
||||
env: &mut Env,
|
||||
row_cache: &Cache,
|
||||
col_cache: &Cache,
|
||||
) -> Result<Options> {
|
||||
const DEFAULT_STATS_LEVEL: StatsLevel = if cfg!(debug_assertions) {
|
||||
StatsLevel::ExceptDetailedTimers
|
||||
} else {
|
||||
|
@ -73,13 +79,13 @@ pub(crate) fn db_options(config: &Config, env: &mut Env, row_cache: &Cache, col_
|
|||
opts.set_disable_auto_compactions(!config.rocksdb_compaction);
|
||||
|
||||
opts.set_statistics_level(match config.rocksdb_stats_level {
|
||||
0 => StatsLevel::DisableAll,
|
||||
1 => DEFAULT_STATS_LEVEL,
|
||||
2 => StatsLevel::ExceptHistogramOrTimers,
|
||||
3 => StatsLevel::ExceptTimers,
|
||||
4 => StatsLevel::ExceptDetailedTimers,
|
||||
5 => StatsLevel::ExceptTimeForMutex,
|
||||
6_u8..=u8::MAX => StatsLevel::All,
|
||||
| 0 => StatsLevel::DisableAll,
|
||||
| 1 => DEFAULT_STATS_LEVEL,
|
||||
| 2 => StatsLevel::ExceptHistogramOrTimers,
|
||||
| 3 => StatsLevel::ExceptTimers,
|
||||
| 4 => StatsLevel::ExceptDetailedTimers,
|
||||
| 5 => StatsLevel::ExceptTimeForMutex,
|
||||
| 6_u8..=u8::MAX => StatsLevel::All,
|
||||
});
|
||||
|
||||
// Default: https://github.com/facebook/rocksdb/wiki/WAL-Recovery-Modes#ktoleratecorruptedtailrecords
|
||||
|
@ -88,11 +94,11 @@ pub(crate) fn db_options(config: &Config, env: &mut Env, row_cache: &Cache, col_
|
|||
// recovered in this manner as it's likely any lost information will be
|
||||
// restored via federation.
|
||||
opts.set_wal_recovery_mode(match config.rocksdb_recovery_mode {
|
||||
0 => DBRecoveryMode::AbsoluteConsistency,
|
||||
1 => DBRecoveryMode::TolerateCorruptedTailRecords,
|
||||
2 => DBRecoveryMode::PointInTime,
|
||||
3 => DBRecoveryMode::SkipAnyCorruptedRecord,
|
||||
4_u8..=u8::MAX => unimplemented!(),
|
||||
| 0 => DBRecoveryMode::AbsoluteConsistency,
|
||||
| 1 => DBRecoveryMode::TolerateCorruptedTailRecords,
|
||||
| 2 => DBRecoveryMode::PointInTime,
|
||||
| 3 => DBRecoveryMode::SkipAnyCorruptedRecord,
|
||||
| 4_u8..=u8::MAX => unimplemented!(),
|
||||
});
|
||||
|
||||
// <https://github.com/facebook/rocksdb/wiki/Track-WAL-in-MANIFEST>
|
||||
|
@ -111,11 +117,14 @@ pub(crate) fn db_options(config: &Config, env: &mut Env, row_cache: &Cache, col_
|
|||
/// db_options() as the argument to this function and use the return value in
|
||||
/// the arguments to open the specific column.
|
||||
pub(crate) fn cf_options(
|
||||
cfg: &Config, name: &str, mut opts: Options, cache: &mut HashMap<String, Cache>,
|
||||
cfg: &Config,
|
||||
name: &str,
|
||||
mut opts: Options,
|
||||
cache: &mut HashMap<String, Cache>,
|
||||
) -> Result<Options> {
|
||||
// Columns with non-default compaction options
|
||||
match name {
|
||||
"backupid_algorithm"
|
||||
| "backupid_algorithm"
|
||||
| "backupid_etag"
|
||||
| "backupkeyid_backup"
|
||||
| "roomid_shortroomid"
|
||||
|
@ -125,12 +134,12 @@ pub(crate) fn cf_options(
|
|||
| "shortstatehash_statediff"
|
||||
| "userdevicetxnid_response"
|
||||
| "userfilterid_filter" => set_for_sequential_small_uc(&mut opts, cfg),
|
||||
&_ => {},
|
||||
| &_ => {},
|
||||
}
|
||||
|
||||
// Columns with non-default table/cache configs
|
||||
match name {
|
||||
"shorteventid_eventid" => set_table_with_new_cache(
|
||||
| "shorteventid_eventid" => set_table_with_new_cache(
|
||||
&mut opts,
|
||||
cfg,
|
||||
cache,
|
||||
|
@ -138,7 +147,7 @@ pub(crate) fn cf_options(
|
|||
cache_size(cfg, cfg.shorteventid_cache_capacity, 64)?,
|
||||
),
|
||||
|
||||
"eventid_shorteventid" => set_table_with_new_cache(
|
||||
| "eventid_shorteventid" => set_table_with_new_cache(
|
||||
&mut opts,
|
||||
cfg,
|
||||
cache,
|
||||
|
@ -146,7 +155,7 @@ pub(crate) fn cf_options(
|
|||
cache_size(cfg, cfg.eventidshort_cache_capacity, 64)?,
|
||||
),
|
||||
|
||||
"eventid_pduid" => set_table_with_new_cache(
|
||||
| "eventid_pduid" => set_table_with_new_cache(
|
||||
&mut opts,
|
||||
cfg,
|
||||
cache,
|
||||
|
@ -154,7 +163,7 @@ pub(crate) fn cf_options(
|
|||
cache_size(cfg, cfg.eventid_pdu_cache_capacity, 64)?,
|
||||
),
|
||||
|
||||
"shorteventid_authchain" => {
|
||||
| "shorteventid_authchain" => {
|
||||
set_table_with_new_cache(
|
||||
&mut opts,
|
||||
cfg,
|
||||
|
@ -164,7 +173,7 @@ pub(crate) fn cf_options(
|
|||
);
|
||||
},
|
||||
|
||||
"shortstatekey_statekey" => set_table_with_new_cache(
|
||||
| "shortstatekey_statekey" => set_table_with_new_cache(
|
||||
&mut opts,
|
||||
cfg,
|
||||
cache,
|
||||
|
@ -172,7 +181,7 @@ pub(crate) fn cf_options(
|
|||
cache_size(cfg, cfg.shortstatekey_cache_capacity, 1024)?,
|
||||
),
|
||||
|
||||
"statekey_shortstatekey" => set_table_with_new_cache(
|
||||
| "statekey_shortstatekey" => set_table_with_new_cache(
|
||||
&mut opts,
|
||||
cfg,
|
||||
cache,
|
||||
|
@ -180,22 +189,32 @@ pub(crate) fn cf_options(
|
|||
cache_size(cfg, cfg.statekeyshort_cache_capacity, 1024)?,
|
||||
),
|
||||
|
||||
"servernameevent_data" => set_table_with_new_cache(
|
||||
| "servernameevent_data" => set_table_with_new_cache(
|
||||
&mut opts,
|
||||
cfg,
|
||||
cache,
|
||||
name,
|
||||
cache_size(cfg, cfg.servernameevent_data_cache_capacity, 128)?, /* Raw average value size = 102, key
|
||||
cache_size(cfg, cfg.servernameevent_data_cache_capacity, 128)?, /* Raw average
|
||||
* value size =
|
||||
* 102, key
|
||||
* size = 34 */
|
||||
),
|
||||
|
||||
"eventid_outlierpdu" => {
|
||||
set_table_with_new_cache(&mut opts, cfg, cache, name, cache_size(cfg, cfg.pdu_cache_capacity, 1536)?);
|
||||
| "eventid_outlierpdu" => {
|
||||
set_table_with_new_cache(
|
||||
&mut opts,
|
||||
cfg,
|
||||
cache,
|
||||
name,
|
||||
cache_size(cfg, cfg.pdu_cache_capacity, 1536)?,
|
||||
);
|
||||
},
|
||||
|
||||
"pduid_pdu" => set_table_with_shared_cache(&mut opts, cfg, cache, name, "eventid_outlierpdu"),
|
||||
| "pduid_pdu" => {
|
||||
set_table_with_shared_cache(&mut opts, cfg, cache, name, "eventid_outlierpdu");
|
||||
},
|
||||
|
||||
&_ => {},
|
||||
| &_ => {},
|
||||
}
|
||||
|
||||
Ok(opts)
|
||||
|
@ -203,11 +222,11 @@ pub(crate) fn cf_options(
|
|||
|
||||
fn set_logging_defaults(opts: &mut Options, config: &Config) {
|
||||
let rocksdb_log_level = match config.rocksdb_log_level.as_ref() {
|
||||
"debug" => LogLevel::Debug,
|
||||
"info" => LogLevel::Info,
|
||||
"warn" => LogLevel::Warn,
|
||||
"fatal" => LogLevel::Fatal,
|
||||
_ => LogLevel::Error,
|
||||
| "debug" => LogLevel::Debug,
|
||||
| "info" => LogLevel::Info,
|
||||
| "warn" => LogLevel::Warn,
|
||||
| "fatal" => LogLevel::Fatal,
|
||||
| _ => LogLevel::Error,
|
||||
};
|
||||
|
||||
opts.set_log_level(rocksdb_log_level);
|
||||
|
@ -225,13 +244,13 @@ fn set_logging_defaults(opts: &mut Options, config: &Config) {
|
|||
|
||||
fn set_compression_defaults(opts: &mut Options, config: &Config) {
|
||||
let rocksdb_compression_algo = match config.rocksdb_compression_algo.as_ref() {
|
||||
"snappy" => DBCompressionType::Snappy,
|
||||
"zlib" => DBCompressionType::Zlib,
|
||||
"bz2" => DBCompressionType::Bz2,
|
||||
"lz4" => DBCompressionType::Lz4,
|
||||
"lz4hc" => DBCompressionType::Lz4hc,
|
||||
"none" => DBCompressionType::None,
|
||||
_ => DBCompressionType::Zstd,
|
||||
| "snappy" => DBCompressionType::Snappy,
|
||||
| "zlib" => DBCompressionType::Zlib,
|
||||
| "bz2" => DBCompressionType::Bz2,
|
||||
| "lz4" => DBCompressionType::Lz4,
|
||||
| "lz4hc" => DBCompressionType::Lz4hc,
|
||||
| "none" => DBCompressionType::None,
|
||||
| _ => DBCompressionType::Zstd,
|
||||
};
|
||||
|
||||
if config.rocksdb_bottommost_compression {
|
||||
|
@ -239,7 +258,13 @@ fn set_compression_defaults(opts: &mut Options, config: &Config) {
|
|||
opts.set_bottommost_zstd_max_train_bytes(0, true);
|
||||
|
||||
// -14 w_bits is only read by zlib.
|
||||
opts.set_bottommost_compression_options(-14, config.rocksdb_bottommost_compression_level, 0, 0, true);
|
||||
opts.set_bottommost_compression_options(
|
||||
-14,
|
||||
config.rocksdb_bottommost_compression_level,
|
||||
0,
|
||||
0,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
// -14 w_bits is only read by zlib.
|
||||
|
@ -338,7 +363,11 @@ fn uc_options(_config: &Config) -> UniversalCompactOptions {
|
|||
}
|
||||
|
||||
fn set_table_with_new_cache(
|
||||
opts: &mut Options, config: &Config, caches: &mut HashMap<String, Cache>, name: &str, size: usize,
|
||||
opts: &mut Options,
|
||||
config: &Config,
|
||||
caches: &mut HashMap<String, Cache>,
|
||||
name: &str,
|
||||
size: usize,
|
||||
) {
|
||||
let mut cache_opts = LruCacheOptions::default();
|
||||
cache_opts.set_capacity(size);
|
||||
|
@ -351,7 +380,11 @@ fn set_table_with_new_cache(
|
|||
}
|
||||
|
||||
fn set_table_with_shared_cache(
|
||||
opts: &mut Options, config: &Config, cache: &HashMap<String, Cache>, _name: &str, cache_name: &str,
|
||||
opts: &mut Options,
|
||||
config: &Config,
|
||||
cache: &HashMap<String, Cache>,
|
||||
_name: &str,
|
||||
cache_name: &str,
|
||||
) {
|
||||
let mut table = table_options(config);
|
||||
table.set_block_cache(
|
||||
|
|
|
@ -230,8 +230,8 @@ fn worker_wait(&self, recv: &Receiver<Cmd>) -> Result<Cmd, RecvError> {
|
|||
#[implement(Pool)]
|
||||
fn worker_handle(&self, cmd: Cmd) {
|
||||
match cmd {
|
||||
Cmd::Get(cmd) => self.handle_get(cmd),
|
||||
Cmd::Iter(cmd) => self.handle_iter(cmd),
|
||||
| Cmd::Get(cmd) => self.handle_get(cmd),
|
||||
| Cmd::Iter(cmd) => self.handle_iter(cmd),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -251,8 +251,8 @@ fn handle_iter(&self, mut cmd: Seek) {
|
|||
|
||||
let from = cmd.key.as_deref().map(Into::into);
|
||||
let result = match cmd.dir {
|
||||
Direction::Forward => cmd.state.init_fwd(from),
|
||||
Direction::Reverse => cmd.state.init_rev(from),
|
||||
| Direction::Forward => cmd.state.init_fwd(from),
|
||||
| Direction::Reverse => cmd.state.init_rev(from),
|
||||
};
|
||||
|
||||
let chan_result = chan.send(into_send_seek(result));
|
||||
|
@ -274,8 +274,8 @@ fn _handle_seek(&self, mut cmd: Seek) {
|
|||
}
|
||||
|
||||
match cmd.dir {
|
||||
Direction::Forward => cmd.state.seek_fwd(),
|
||||
Direction::Reverse => cmd.state.seek_rev(),
|
||||
| Direction::Forward => cmd.state.seek_fwd(),
|
||||
| Direction::Reverse => cmd.state.seek_rev(),
|
||||
};
|
||||
|
||||
let chan_result = chan.send(into_send_seek(cmd.state));
|
||||
|
|
|
@ -6,7 +6,9 @@ use serde::{ser, Serialize};
|
|||
use crate::util::unhandled;
|
||||
|
||||
#[inline]
|
||||
pub fn serialize_to_vec<T: Serialize>(val: T) -> Result<Vec<u8>> { serialize_to::<Vec<u8>, T>(val) }
|
||||
pub fn serialize_to_vec<T: Serialize>(val: T) -> Result<Vec<u8>> {
|
||||
serialize_to::<Vec<u8>, T>(val)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn serialize_to<B, T>(val: T) -> Result<B>
|
||||
|
@ -26,17 +28,15 @@ where
|
|||
W: Write + AsRef<[u8]> + 'a,
|
||||
T: Serialize,
|
||||
{
|
||||
let mut serializer = Serializer {
|
||||
out,
|
||||
depth: 0,
|
||||
sep: false,
|
||||
fin: false,
|
||||
};
|
||||
let mut serializer = Serializer { out, depth: 0, sep: false, fin: false };
|
||||
|
||||
val.serialize(&mut serializer)
|
||||
.map_err(|error| err!(SerdeSer("{error}")))
|
||||
.debug_inspect(|()| {
|
||||
debug_assert_eq!(serializer.depth, 0, "Serialization completed at non-zero recursion level");
|
||||
debug_assert_eq!(
|
||||
serializer.depth, 0,
|
||||
"Serialization completed at non-zero recursion level"
|
||||
);
|
||||
})?;
|
||||
|
||||
Ok((*out).as_ref())
|
||||
|
@ -132,29 +132,45 @@ impl<W: Write> ser::Serializer for &mut Serializer<'_, W> {
|
|||
Ok(self)
|
||||
}
|
||||
|
||||
fn serialize_tuple_struct(self, _name: &'static str, _len: usize) -> Result<Self::SerializeTupleStruct> {
|
||||
fn serialize_tuple_struct(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeTupleStruct> {
|
||||
self.tuple_start();
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn serialize_tuple_variant(
|
||||
self, _name: &'static str, _idx: u32, _var: &'static str, _len: usize,
|
||||
self,
|
||||
_name: &'static str,
|
||||
_idx: u32,
|
||||
_var: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeTupleVariant> {
|
||||
unhandled!("serialize Tuple Variant not implemented")
|
||||
}
|
||||
|
||||
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap> {
|
||||
unhandled!("serialize Map not implemented; did you mean to use database::Json() around your serde_json::Value?")
|
||||
unhandled!(
|
||||
"serialize Map not implemented; did you mean to use database::Json() around your \
|
||||
serde_json::Value?"
|
||||
)
|
||||
}
|
||||
|
||||
fn serialize_struct(self, _name: &'static str, _len: usize) -> Result<Self::SerializeStruct> {
|
||||
unhandled!(
|
||||
"serialize Struct not implemented at this time; did you mean to use database::Json() around your struct?"
|
||||
"serialize Struct not implemented at this time; did you mean to use \
|
||||
database::Json() around your struct?"
|
||||
)
|
||||
}
|
||||
|
||||
fn serialize_struct_variant(
|
||||
self, _name: &'static str, _idx: u32, _var: &'static str, _len: usize,
|
||||
self,
|
||||
_name: &'static str,
|
||||
_idx: u32,
|
||||
_var: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeStructVariant> {
|
||||
unhandled!("serialize Struct Variant not implemented")
|
||||
}
|
||||
|
@ -170,36 +186,47 @@ impl<W: Write> ser::Serializer for &mut Serializer<'_, W> {
|
|||
);
|
||||
|
||||
match name {
|
||||
"Json" => serde_json::to_writer(&mut self.out, value).map_err(Into::into),
|
||||
_ => unhandled!("Unrecognized serialization Newtype {name:?}"),
|
||||
| "Json" => serde_json::to_writer(&mut self.out, value).map_err(Into::into),
|
||||
| _ => unhandled!("Unrecognized serialization Newtype {name:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_newtype_variant<T: Serialize + ?Sized>(
|
||||
self, _name: &'static str, _idx: u32, _var: &'static str, _value: &T,
|
||||
self,
|
||||
_name: &'static str,
|
||||
_idx: u32,
|
||||
_var: &'static str,
|
||||
_value: &T,
|
||||
) -> Result<Self::Ok> {
|
||||
unhandled!("serialize Newtype Variant not implemented")
|
||||
}
|
||||
|
||||
fn serialize_unit_struct(self, name: &'static str) -> Result<Self::Ok> {
|
||||
match name {
|
||||
"Interfix" => {
|
||||
| "Interfix" => {
|
||||
self.set_finalized();
|
||||
},
|
||||
"Separator" => {
|
||||
| "Separator" => {
|
||||
self.separator()?;
|
||||
},
|
||||
_ => unhandled!("Unrecognized serialization directive: {name:?}"),
|
||||
| _ => unhandled!("Unrecognized serialization directive: {name:?}"),
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn serialize_unit_variant(self, _name: &'static str, _idx: u32, _var: &'static str) -> Result<Self::Ok> {
|
||||
fn serialize_unit_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_idx: u32,
|
||||
_var: &'static str,
|
||||
) -> Result<Self::Ok> {
|
||||
unhandled!("serialize Unit Variant not implemented")
|
||||
}
|
||||
|
||||
fn serialize_some<T: Serialize + ?Sized>(self, val: &T) -> Result<Self::Ok> { val.serialize(self) }
|
||||
fn serialize_some<T: Serialize + ?Sized>(self, val: &T) -> Result<Self::Ok> {
|
||||
val.serialize(self)
|
||||
}
|
||||
|
||||
fn serialize_none(self) -> Result<Self::Ok> { Ok(()) }
|
||||
|
||||
|
@ -226,27 +253,39 @@ impl<W: Write> ser::Serializer for &mut Serializer<'_, W> {
|
|||
self.write(v)
|
||||
}
|
||||
|
||||
fn serialize_f64(self, _v: f64) -> Result<Self::Ok> { unhandled!("serialize f64 not implemented") }
|
||||
fn serialize_f64(self, _v: f64) -> Result<Self::Ok> {
|
||||
unhandled!("serialize f64 not implemented")
|
||||
}
|
||||
|
||||
fn serialize_f32(self, _v: f32) -> Result<Self::Ok> { unhandled!("serialize f32 not implemented") }
|
||||
fn serialize_f32(self, _v: f32) -> Result<Self::Ok> {
|
||||
unhandled!("serialize f32 not implemented")
|
||||
}
|
||||
|
||||
fn serialize_i64(self, v: i64) -> Result<Self::Ok> { self.write(&v.to_be_bytes()) }
|
||||
|
||||
fn serialize_i32(self, v: i32) -> Result<Self::Ok> { self.write(&v.to_be_bytes()) }
|
||||
|
||||
fn serialize_i16(self, _v: i16) -> Result<Self::Ok> { unhandled!("serialize i16 not implemented") }
|
||||
fn serialize_i16(self, _v: i16) -> Result<Self::Ok> {
|
||||
unhandled!("serialize i16 not implemented")
|
||||
}
|
||||
|
||||
fn serialize_i8(self, _v: i8) -> Result<Self::Ok> { unhandled!("serialize i8 not implemented") }
|
||||
fn serialize_i8(self, _v: i8) -> Result<Self::Ok> {
|
||||
unhandled!("serialize i8 not implemented")
|
||||
}
|
||||
|
||||
fn serialize_u64(self, v: u64) -> Result<Self::Ok> { self.write(&v.to_be_bytes()) }
|
||||
|
||||
fn serialize_u32(self, v: u32) -> Result<Self::Ok> { self.write(&v.to_be_bytes()) }
|
||||
|
||||
fn serialize_u16(self, _v: u16) -> Result<Self::Ok> { unhandled!("serialize u16 not implemented") }
|
||||
fn serialize_u16(self, _v: u16) -> Result<Self::Ok> {
|
||||
unhandled!("serialize u16 not implemented")
|
||||
}
|
||||
|
||||
fn serialize_u8(self, v: u8) -> Result<Self::Ok> { self.write(&[v]) }
|
||||
|
||||
fn serialize_bool(self, _v: bool) -> Result<Self::Ok> { unhandled!("serialize bool not implemented") }
|
||||
fn serialize_bool(self, _v: bool) -> Result<Self::Ok> {
|
||||
unhandled!("serialize bool not implemented")
|
||||
}
|
||||
|
||||
fn serialize_unit(self) -> Result<Self::Ok> { unhandled!("serialize unit not implemented") }
|
||||
}
|
||||
|
@ -255,7 +294,9 @@ impl<W: Write> ser::SerializeSeq for &mut Serializer<'_, W> {
|
|||
type Error = Error;
|
||||
type Ok = ();
|
||||
|
||||
fn serialize_element<T: Serialize + ?Sized>(&mut self, val: &T) -> Result<Self::Ok> { val.serialize(&mut **self) }
|
||||
fn serialize_element<T: Serialize + ?Sized>(&mut self, val: &T) -> Result<Self::Ok> {
|
||||
val.serialize(&mut **self)
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok> { self.sequence_end() }
|
||||
}
|
||||
|
@ -315,7 +356,11 @@ impl<W: Write> ser::SerializeStruct for &mut Serializer<'_, W> {
|
|||
type Error = Error;
|
||||
type Ok = ();
|
||||
|
||||
fn serialize_field<T: Serialize + ?Sized>(&mut self, _key: &'static str, _val: &T) -> Result<Self::Ok> {
|
||||
fn serialize_field<T: Serialize + ?Sized>(
|
||||
&mut self,
|
||||
_key: &'static str,
|
||||
_val: &T,
|
||||
) -> Result<Self::Ok> {
|
||||
unhandled!("serialize Struct Field not implemented")
|
||||
}
|
||||
|
||||
|
@ -326,9 +371,15 @@ impl<W: Write> ser::SerializeStructVariant for &mut Serializer<'_, W> {
|
|||
type Error = Error;
|
||||
type Ok = ();
|
||||
|
||||
fn serialize_field<T: Serialize + ?Sized>(&mut self, _key: &'static str, _val: &T) -> Result<Self::Ok> {
|
||||
fn serialize_field<T: Serialize + ?Sized>(
|
||||
&mut self,
|
||||
_key: &'static str,
|
||||
_val: &T,
|
||||
) -> Result<Self::Ok> {
|
||||
unhandled!("serialize Struct Variant Field not implemented")
|
||||
}
|
||||
|
||||
fn end(self) -> Result<Self::Ok> { unhandled!("serialize Struct Variant End not implemented") }
|
||||
fn end(self) -> Result<Self::Ok> {
|
||||
unhandled!("serialize Struct Variant End not implemented")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,18 +17,12 @@ pub(crate) struct Items<'a> {
|
|||
|
||||
impl<'a> Items<'a> {
|
||||
pub(crate) fn new(db: &'a Arc<Engine>, cf: &'a Arc<ColumnFamily>, opts: ReadOptions) -> Self {
|
||||
Self {
|
||||
state: State::new(db, cf, opts),
|
||||
}
|
||||
Self { state: State::new(db, cf, opts) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> convert::From<State<'a>> for Items<'a> {
|
||||
fn from(state: State<'a>) -> Self {
|
||||
Self {
|
||||
state,
|
||||
}
|
||||
}
|
||||
fn from(state: State<'a>) -> Self { Self { state } }
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a, KeyVal<'a>> for Items<'a> {
|
||||
|
@ -40,11 +34,7 @@ impl<'a> Cursor<'a, KeyVal<'a>> for Items<'a> {
|
|||
fn seek(&mut self) { self.state.seek_fwd(); }
|
||||
|
||||
#[inline]
|
||||
fn init(self, from: From<'a>) -> Self {
|
||||
Self {
|
||||
state: self.state.init_fwd(from),
|
||||
}
|
||||
}
|
||||
fn init(self, from: From<'a>) -> Self { Self { state: self.state.init_fwd(from) } }
|
||||
}
|
||||
|
||||
impl<'a> Stream for Items<'a> {
|
||||
|
|
|
@ -17,18 +17,12 @@ pub(crate) struct ItemsRev<'a> {
|
|||
|
||||
impl<'a> ItemsRev<'a> {
|
||||
pub(crate) fn new(db: &'a Arc<Engine>, cf: &'a Arc<ColumnFamily>, opts: ReadOptions) -> Self {
|
||||
Self {
|
||||
state: State::new(db, cf, opts),
|
||||
}
|
||||
Self { state: State::new(db, cf, opts) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> convert::From<State<'a>> for ItemsRev<'a> {
|
||||
fn from(state: State<'a>) -> Self {
|
||||
Self {
|
||||
state,
|
||||
}
|
||||
}
|
||||
fn from(state: State<'a>) -> Self { Self { state } }
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a, KeyVal<'a>> for ItemsRev<'a> {
|
||||
|
@ -40,11 +34,7 @@ impl<'a> Cursor<'a, KeyVal<'a>> for ItemsRev<'a> {
|
|||
fn seek(&mut self) { self.state.seek_rev(); }
|
||||
|
||||
#[inline]
|
||||
fn init(self, from: From<'a>) -> Self {
|
||||
Self {
|
||||
state: self.state.init_rev(from),
|
||||
}
|
||||
}
|
||||
fn init(self, from: From<'a>) -> Self { Self { state: self.state.init_rev(from) } }
|
||||
}
|
||||
|
||||
impl<'a> Stream for ItemsRev<'a> {
|
||||
|
|
|
@ -17,18 +17,12 @@ pub(crate) struct Keys<'a> {
|
|||
|
||||
impl<'a> Keys<'a> {
|
||||
pub(crate) fn new(db: &'a Arc<Engine>, cf: &'a Arc<ColumnFamily>, opts: ReadOptions) -> Self {
|
||||
Self {
|
||||
state: State::new(db, cf, opts),
|
||||
}
|
||||
Self { state: State::new(db, cf, opts) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> convert::From<State<'a>> for Keys<'a> {
|
||||
fn from(state: State<'a>) -> Self {
|
||||
Self {
|
||||
state,
|
||||
}
|
||||
}
|
||||
fn from(state: State<'a>) -> Self { Self { state } }
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a, Key<'a>> for Keys<'a> {
|
||||
|
@ -41,11 +35,7 @@ impl<'a> Cursor<'a, Key<'a>> for Keys<'a> {
|
|||
fn seek(&mut self) { self.state.seek_fwd(); }
|
||||
|
||||
#[inline]
|
||||
fn init(self, from: From<'a>) -> Self {
|
||||
Self {
|
||||
state: self.state.init_fwd(from),
|
||||
}
|
||||
}
|
||||
fn init(self, from: From<'a>) -> Self { Self { state: self.state.init_fwd(from) } }
|
||||
}
|
||||
|
||||
impl<'a> Stream for Keys<'a> {
|
||||
|
|
|
@ -17,18 +17,12 @@ pub(crate) struct KeysRev<'a> {
|
|||
|
||||
impl<'a> KeysRev<'a> {
|
||||
pub(crate) fn new(db: &'a Arc<Engine>, cf: &'a Arc<ColumnFamily>, opts: ReadOptions) -> Self {
|
||||
Self {
|
||||
state: State::new(db, cf, opts),
|
||||
}
|
||||
Self { state: State::new(db, cf, opts) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> convert::From<State<'a>> for KeysRev<'a> {
|
||||
fn from(state: State<'a>) -> Self {
|
||||
Self {
|
||||
state,
|
||||
}
|
||||
}
|
||||
fn from(state: State<'a>) -> Self { Self { state } }
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a, Key<'a>> for KeysRev<'a> {
|
||||
|
@ -41,11 +35,7 @@ impl<'a> Cursor<'a, Key<'a>> for KeysRev<'a> {
|
|||
fn seek(&mut self) { self.state.seek_rev(); }
|
||||
|
||||
#[inline]
|
||||
fn init(self, from: From<'a>) -> Self {
|
||||
Self {
|
||||
state: self.state.init_rev(from),
|
||||
}
|
||||
}
|
||||
fn init(self, from: From<'a>) -> Self { Self { state: self.state.init_rev(from) } }
|
||||
}
|
||||
|
||||
impl<'a> Stream for KeysRev<'a> {
|
||||
|
|
|
@ -66,10 +66,7 @@ fn ser_complex() {
|
|||
media_id: "AbCdEfGhIjK",
|
||||
};
|
||||
|
||||
let dim = Dim {
|
||||
width: 123,
|
||||
height: 456,
|
||||
};
|
||||
let dim = Dim { width: 123, height: 456 };
|
||||
|
||||
let mut a = Vec::new();
|
||||
a.extend_from_slice(b"mxc://");
|
||||
|
@ -128,9 +125,7 @@ fn ser_json_macro() {
|
|||
foo: String,
|
||||
}
|
||||
|
||||
let content = Foo {
|
||||
foo: "bar".to_owned(),
|
||||
};
|
||||
let content = Foo { foo: "bar".to_owned() };
|
||||
let content = serde_json::to_value(content).expect("failed to serialize content");
|
||||
let sender: &UserId = "@foo:example.com".try_into().unwrap();
|
||||
let serialized = serialize_to_vec(Json(json!({
|
||||
|
@ -153,7 +148,8 @@ fn ser_json_raw() {
|
|||
..Default::default()
|
||||
};
|
||||
|
||||
let value = serde_json::value::to_raw_value(&filter).expect("failed to serialize to raw value");
|
||||
let value =
|
||||
serde_json::value::to_raw_value(&filter).expect("failed to serialize to raw value");
|
||||
let a = serialize_to_vec(value.get()).expect("failed to serialize raw value");
|
||||
let s = String::from_utf8_lossy(&a);
|
||||
assert_eq!(&s, r#"{"event_fields":["content.body"]}"#);
|
||||
|
@ -169,7 +165,8 @@ fn ser_json_raw_json() {
|
|||
..Default::default()
|
||||
};
|
||||
|
||||
let value = serde_json::value::to_raw_value(&filter).expect("failed to serialize to raw value");
|
||||
let value =
|
||||
serde_json::value::to_raw_value(&filter).expect("failed to serialize to raw value");
|
||||
let a = serialize_to_vec(Json(value)).expect("failed to serialize json value");
|
||||
let s = String::from_utf8_lossy(&a);
|
||||
assert_eq!(&s, r#"{"event_fields":["content.body"]}"#);
|
||||
|
@ -241,7 +238,8 @@ fn de_tuple_ignore() {
|
|||
let room_id: &RoomId = "!room:example.com".try_into().unwrap();
|
||||
|
||||
let raw: &[u8] = b"@user:example.com\xFF@user2:example.net\xFF!room:example.com";
|
||||
let (a, _, c): (&UserId, Ignore, &RoomId) = de::from_slice(raw).expect("failed to deserialize");
|
||||
let (a, _, c): (&UserId, Ignore, &RoomId) =
|
||||
de::from_slice(raw).expect("failed to deserialize");
|
||||
|
||||
assert_eq!(a, user_id, "deserialized user_id does not match");
|
||||
assert_eq!(c, room_id, "deserialized room_id does not match");
|
||||
|
@ -254,7 +252,8 @@ fn de_json_array() {
|
|||
|
||||
let b: Raw<Vec<Raw<String>>> = de::from_slice(&s).expect("failed to deserialize");
|
||||
|
||||
let d: Vec<String> = serde_json::from_str(b.json().get()).expect("failed to deserialize JSON");
|
||||
let d: Vec<String> =
|
||||
serde_json::from_str(b.json().get()).expect("failed to deserialize JSON");
|
||||
|
||||
for (i, a) in a.iter().enumerate() {
|
||||
assert_eq!(*a, d[i]);
|
||||
|
@ -268,7 +267,8 @@ fn de_json_raw_array() {
|
|||
|
||||
let b: Raw<Vec<Raw<String>>> = de::from_slice(&s).expect("failed to deserialize");
|
||||
|
||||
let c: Vec<Raw<String>> = serde_json::from_str(b.json().get()).expect("failed to deserialize JSON");
|
||||
let c: Vec<Raw<String>> =
|
||||
serde_json::from_str(b.json().get()).expect("failed to deserialize JSON");
|
||||
|
||||
for (i, a) in a.iter().enumerate() {
|
||||
let c = serde_json::to_value(c[i].json()).expect("failed to deserialize JSON to string");
|
||||
|
|
|
@ -30,13 +30,15 @@ pub(crate) fn _into_direction(mode: &IteratorMode<'_>) -> Direction {
|
|||
use IteratorMode::{End, From, Start};
|
||||
|
||||
match mode {
|
||||
Start | From(_, Forward) => Forward,
|
||||
End | From(_, Reverse) => Reverse,
|
||||
| Start | From(_, Forward) => Forward,
|
||||
| End | From(_, Reverse) => Reverse,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn result<T>(r: std::result::Result<T, rocksdb::Error>) -> Result<T, conduwuit::Error> {
|
||||
pub(crate) fn result<T>(
|
||||
r: std::result::Result<T, rocksdb::Error>,
|
||||
) -> Result<T, conduwuit::Error> {
|
||||
r.map_or_else(or_else, and_then)
|
||||
}
|
||||
|
||||
|
|
|
@ -15,10 +15,13 @@ pub(crate) struct Watchers {
|
|||
}
|
||||
|
||||
impl Watchers {
|
||||
pub(crate) fn watch<'a>(&'a self, prefix: &[u8]) -> Pin<Box<dyn Future<Output = ()> + Send + 'a>> {
|
||||
pub(crate) fn watch<'a>(
|
||||
&'a self,
|
||||
prefix: &[u8],
|
||||
) -> Pin<Box<dyn Future<Output = ()> + Send + 'a>> {
|
||||
let mut rx = match self.watchers.write().unwrap().entry(prefix.to_vec()) {
|
||||
hash_map::Entry::Occupied(o) => o.get().1.clone(),
|
||||
hash_map::Entry::Vacant(v) => {
|
||||
| hash_map::Entry::Occupied(o) => o.get().1.clone(),
|
||||
| hash_map::Entry::Vacant(v) => {
|
||||
let (tx, rx) = watch::channel(());
|
||||
v.insert((tx, rx.clone()));
|
||||
rx
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue