apply new rustfmt.toml changes, fix some clippy lints

Signed-off-by: strawberry <strawberry@puppygock.gay>
This commit is contained in:
strawberry 2024-12-15 00:05:47 -05:00
parent 0317cc8cc5
commit 77e0b76408
No known key found for this signature in database
296 changed files with 7147 additions and 4300 deletions

View file

@ -29,8 +29,9 @@ pub fn memory_usage() -> Option<String> {
let resident = mibs(stats::resident::read());
let retained = mibs(stats::retained::read());
Some(format!(
"allocated: {allocated:.2} MiB\nactive: {active:.2} MiB\nmapped: {mapped:.2} MiB\nmetadata: {metadata:.2} \
MiB\nresident: {resident:.2} MiB\nretained: {retained:.2} MiB\n"
"allocated: {allocated:.2} MiB\nactive: {active:.2} MiB\nmapped: {mapped:.2} \
MiB\nmetadata: {metadata:.2} MiB\nresident: {resident:.2} MiB\nretained: {retained:.2} \
MiB\n"
))
}

View file

@ -8,10 +8,20 @@ pub use je::{memory_stats, memory_usage};
#[cfg(all(not(target_env = "msvc"), feature = "hardened_malloc", not(feature = "jemalloc")))]
pub mod hardened;
#[cfg(all(not(target_env = "msvc"), feature = "hardened_malloc", not(feature = "jemalloc")))]
#[cfg(all(
not(target_env = "msvc"),
feature = "hardened_malloc",
not(feature = "jemalloc")
))]
pub use hardened::{memory_stats, memory_usage};
#[cfg(any(target_env = "msvc", all(not(feature = "hardened_malloc"), not(feature = "jemalloc"))))]
#[cfg(any(
target_env = "msvc",
all(not(feature = "hardened_malloc"), not(feature = "jemalloc"))
))]
pub mod default;
#[cfg(any(target_env = "msvc", all(not(feature = "hardened_malloc"), not(feature = "jemalloc"))))]
#[cfg(any(
target_env = "msvc",
all(not(feature = "hardened_malloc"), not(feature = "jemalloc"))
))]
pub use default::{memory_stats, memory_usage};

View file

@ -16,18 +16,24 @@ pub fn check(config: &Config) -> Result<()> {
warn_unknown_key(config);
if config.sentry && config.sentry_endpoint.is_none() {
return Err!(Config("sentry_endpoint", "Sentry cannot be enabled without an endpoint set"));
return Err!(Config(
"sentry_endpoint",
"Sentry cannot be enabled without an endpoint set"
));
}
if cfg!(all(feature = "hardened_malloc", feature = "jemalloc")) {
info!("hardened_malloc and jemalloc compile-time features are both enabled, this causes jemalloc to be used.");
info!(
"hardened_malloc and jemalloc compile-time features are both enabled, this causes \
jemalloc to be used."
);
}
if cfg!(not(unix)) && config.unix_socket_path.is_some() {
return Err!(Config(
"unix_socket_path",
"UNIX socket support is only available on *nix platforms. Please remove 'unix_socket_path' from your \
config."
"UNIX socket support is only available on *nix platforms. Please remove \
'unix_socket_path' from your config."
));
}
@ -44,30 +50,36 @@ pub fn check(config: &Config) -> Result<()> {
use std::path::Path;
if addr.ip().is_loopback() {
debug_info!("Found loopback listening address {addr}, running checks if we're in a container.");
debug_info!(
"Found loopback listening address {addr}, running checks if we're in a \
container."
);
if Path::new("/proc/vz").exists() /* Guest */ && !Path::new("/proc/bz").exists()
/* Host */
{
error!(
"You are detected using OpenVZ with a loopback/localhost listening address of {addr}. If you \
are using OpenVZ for containers and you use NAT-based networking to communicate with the \
host and guest, this will NOT work. Please change this to \"0.0.0.0\". If this is expected, \
you can ignore.",
"You are detected using OpenVZ with a loopback/localhost listening \
address of {addr}. If you are using OpenVZ for containers and you use \
NAT-based networking to communicate with the host and guest, this will \
NOT work. Please change this to \"0.0.0.0\". If this is expected, you \
can ignore.",
);
} else if Path::new("/.dockerenv").exists() {
error!(
"You are detected using Docker with a loopback/localhost listening address of {addr}. If you \
are using a reverse proxy on the host and require communication to conduwuit in the Docker \
container via NAT-based networking, this will NOT work. Please change this to \"0.0.0.0\". \
If this is expected, you can ignore.",
"You are detected using Docker with a loopback/localhost listening \
address of {addr}. If you are using a reverse proxy on the host and \
require communication to conduwuit in the Docker container via \
NAT-based networking, this will NOT work. Please change this to \
\"0.0.0.0\". If this is expected, you can ignore.",
);
} else if Path::new("/run/.containerenv").exists() {
error!(
"You are detected using Podman with a loopback/localhost listening address of {addr}. If you \
are using a reverse proxy on the host and require communication to conduwuit in the Podman \
container via NAT-based networking, this will NOT work. Please change this to \"0.0.0.0\". \
If this is expected, you can ignore.",
"You are detected using Podman with a loopback/localhost listening \
address of {addr}. If you are using a reverse proxy on the host and \
require communication to conduwuit in the Podman container via \
NAT-based networking, this will NOT work. Please change this to \
\"0.0.0.0\". If this is expected, you can ignore.",
);
}
}
@ -93,7 +105,8 @@ pub fn check(config: &Config) -> Result<()> {
if config.emergency_password == Some(String::from("F670$2CP@Hw8mG7RY1$%!#Ic7YA")) {
return Err!(Config(
"emergency_password",
"The public example emergency password is being used, this is insecure. Please change this."
"The public example emergency password is being used, this is insecure. Please \
change this."
));
}
@ -124,7 +137,8 @@ pub fn check(config: &Config) -> Result<()> {
if config.max_request_size < 10_000_000 {
return Err!(Config(
"max_request_size",
"Max request size is less than 10MB. Please increase it as this is too low for operable federation."
"Max request size is less than 10MB. Please increase it as this is too low for \
operable federation."
));
}
@ -145,11 +159,12 @@ pub fn check(config: &Config) -> Result<()> {
{
return Err!(Config(
"registration_token",
"!! You have `allow_registration` enabled without a token configured in your config which means you are \
allowing ANYONE to register on your conduwuit instance without any 2nd-step (e.g. registration token). \
If this is not the intended behaviour, please set a registration token. For security and safety reasons, \
conduwuit will shut down. If you are extra sure this is the desired behaviour you want, please set the \
following config option to true:
"!! You have `allow_registration` enabled without a token configured in your config \
which means you are allowing ANYONE to register on your conduwuit instance without \
any 2nd-step (e.g. registration token). If this is not the intended behaviour, \
please set a registration token. For security and safety reasons, conduwuit will \
shut down. If you are extra sure this is the desired behaviour you want, please \
set the following config option to true:
`yes_i_am_very_very_sure_i_want_an_open_registration_server_prone_to_abuse`"
));
}
@ -161,17 +176,18 @@ pub fn check(config: &Config) -> Result<()> {
{
warn!(
"Open registration is enabled via setting \
`yes_i_am_very_very_sure_i_want_an_open_registration_server_prone_to_abuse` and `allow_registration` to \
true without a registration token configured. You are expected to be aware of the risks now. If this is \
not the desired behaviour, please set a registration token."
`yes_i_am_very_very_sure_i_want_an_open_registration_server_prone_to_abuse` and \
`allow_registration` to true without a registration token configured. You are \
expected to be aware of the risks now. If this is not the desired behaviour, \
please set a registration token."
);
}
if config.allow_outgoing_presence && !config.allow_local_presence {
return Err!(Config(
"allow_local_presence",
"Outgoing presence requires allowing local presence. Please enable 'allow_local_presence' or disable \
outgoing presence."
"Outgoing presence requires allowing local presence. Please enable \
'allow_local_presence' or disable outgoing presence."
));
}
@ -180,9 +196,10 @@ pub fn check(config: &Config) -> Result<()> {
.contains(&"*".to_owned())
{
warn!(
"All URLs are allowed for URL previews via setting \"url_preview_domain_contains_allowlist\" to \"*\". \
This opens up significant attack surface to your server. You are expected to be aware of the risks by \
doing this."
"All URLs are allowed for URL previews via setting \
\"url_preview_domain_contains_allowlist\" to \"*\". This opens up significant \
attack surface to your server. You are expected to be aware of the risks by doing \
this."
);
}
if config
@ -190,9 +207,10 @@ pub fn check(config: &Config) -> Result<()> {
.contains(&"*".to_owned())
{
warn!(
"All URLs are allowed for URL previews via setting \"url_preview_domain_explicit_allowlist\" to \"*\". \
This opens up significant attack surface to your server. You are expected to be aware of the risks by \
doing this."
"All URLs are allowed for URL previews via setting \
\"url_preview_domain_explicit_allowlist\" to \"*\". This opens up significant \
attack surface to your server. You are expected to be aware of the risks by doing \
this."
);
}
if config
@ -200,9 +218,9 @@ pub fn check(config: &Config) -> Result<()> {
.contains(&"*".to_owned())
{
warn!(
"All URLs are allowed for URL previews via setting \"url_preview_url_contains_allowlist\" to \"*\". This \
opens up significant attack surface to your server. You are expected to be aware of the risks by doing \
this."
"All URLs are allowed for URL previews via setting \
\"url_preview_url_contains_allowlist\" to \"*\". This opens up significant attack \
surface to your server. You are expected to be aware of the risks by doing this."
);
}
@ -260,7 +278,8 @@ pub(super) fn is_dual_listening(raw_config: &Figment) -> Result<()> {
let contains_unix_socket = raw_config.contains("unix_socket_path");
if contains_address && contains_unix_socket {
return Err!(
"TOML keys \"address\" and \"unix_socket_path\" were both defined. Please specify only one option."
"TOML keys \"address\" and \"unix_socket_path\" were both defined. Please specify \
only one option."
);
}

View file

@ -18,8 +18,8 @@ pub use figment::{value::Value as FigmentValue, Figment};
use itertools::Itertools;
use regex::RegexSet;
use ruma::{
api::client::discovery::discover_support::ContactRole, OwnedRoomOrAliasId, OwnedServerName, OwnedUserId,
RoomVersionId,
api::client::discovery::discover_support::ContactRole, OwnedRoomOrAliasId, OwnedServerName,
OwnedUserId, RoomVersionId,
};
use serde::{de::IgnoredAny, Deserialize};
use url::Url;
@ -181,7 +181,10 @@ pub struct Config {
/// are scaled by your CPU core count.
///
/// default: 1.0
#[serde(default = "default_cache_capacity_modifier", alias = "conduit_cache_capacity_modifier")]
#[serde(
default = "default_cache_capacity_modifier",
alias = "conduit_cache_capacity_modifier"
)]
pub cache_capacity_modifier: f64,
/// default: varies by system
@ -1555,7 +1558,8 @@ pub struct Config {
pub db_pool_queue_size: usize,
#[serde(flatten)]
#[allow(clippy::zero_sized_map_values)] // this is a catchall, the map shouldn't be zero at runtime
#[allow(clippy::zero_sized_map_values)]
// this is a catchall, the map shouldn't be zero at runtime
catchall: BTreeMap<String, IgnoredAny>,
}
@ -1676,15 +1680,15 @@ impl Config {
fn get_bind_hosts(&self) -> Vec<IpAddr> {
match &self.address.addrs {
Left(addr) => vec![*addr],
Right(addrs) => addrs.clone(),
| Left(addr) => vec![*addr],
| Right(addrs) => addrs.clone(),
}
}
fn get_bind_ports(&self) -> Vec<u16> {
match &self.port.ports {
Left(port) => vec![*port],
Right(ports) => ports.clone(),
| Left(port) => vec![*port],
| Right(ports) => ports.clone(),
}
}
@ -1756,9 +1760,13 @@ impl fmt::Display for Config {
line("Allow registration", &self.allow_registration.to_string());
line(
"Registration token",
if self.registration_token.is_none() && self.registration_token_file.is_none() && self.allow_registration {
if self.registration_token.is_none()
&& self.registration_token_file.is_none()
&& self.allow_registration
{
"not set (⚠️ open registration!)"
} else if self.registration_token.is_none() && self.registration_token_file.is_none() {
} else if self.registration_token.is_none() && self.registration_token_file.is_none()
{
"not set"
} else {
"set"
@ -1811,7 +1819,8 @@ impl fmt::Display for Config {
&self.allow_outgoing_read_receipts.to_string(),
);
line(
"Block non-admin room invites (local and remote, admins can still send and receive invites)",
"Block non-admin room invites (local and remote, admins can still send and receive \
invites)",
&self.block_non_admin_invites.to_string(),
);
line("Enable admin escape commands", &self.admin_escape_commands.to_string());
@ -1859,13 +1868,10 @@ impl fmt::Display for Config {
"Lockdown public room directory (only allow admins to publish)",
&self.lockdown_public_room_directory.to_string(),
);
line(
"JWT secret",
match self.jwt_secret {
Some(_) => "set",
None => "not set",
},
);
line("JWT secret", match self.jwt_secret {
| Some(_) => "set",
| None => "not set",
});
line(
"Trusted key servers",
&self
@ -1979,7 +1985,8 @@ impl fmt::Display for Config {
&lst.join(", ")
});
line("Forbidden Remote Room Directory Server Names", {
let mut lst = Vec::with_capacity(self.forbidden_remote_room_directory_server_names.len());
let mut lst =
Vec::with_capacity(self.forbidden_remote_room_directory_server_names.len());
for domain in &self.forbidden_remote_room_directory_server_names {
lst.push(domain.host());
}
@ -2099,11 +2106,7 @@ fn default_address() -> ListeningAddr {
}
}
fn default_port() -> ListeningPort {
ListeningPort {
ports: Left(8008),
}
}
fn default_port() -> ListeningPort { ListeningPort { ports: Left(8008) } }
fn default_unix_socket_perms() -> u32 { 660 }
@ -2115,19 +2118,33 @@ fn default_pdu_cache_capacity() -> u32 { parallelism_scaled_u32(10_000).saturati
fn default_cache_capacity_modifier() -> f64 { 1.0 }
fn default_auth_chain_cache_capacity() -> u32 { parallelism_scaled_u32(10_000).saturating_add(100_000) }
fn default_auth_chain_cache_capacity() -> u32 {
parallelism_scaled_u32(10_000).saturating_add(100_000)
}
fn default_shorteventid_cache_capacity() -> u32 { parallelism_scaled_u32(50_000).saturating_add(100_000) }
fn default_shorteventid_cache_capacity() -> u32 {
parallelism_scaled_u32(50_000).saturating_add(100_000)
}
fn default_eventidshort_cache_capacity() -> u32 { parallelism_scaled_u32(25_000).saturating_add(100_000) }
fn default_eventidshort_cache_capacity() -> u32 {
parallelism_scaled_u32(25_000).saturating_add(100_000)
}
fn default_eventid_pdu_cache_capacity() -> u32 { parallelism_scaled_u32(25_000).saturating_add(100_000) }
fn default_eventid_pdu_cache_capacity() -> u32 {
parallelism_scaled_u32(25_000).saturating_add(100_000)
}
fn default_shortstatekey_cache_capacity() -> u32 { parallelism_scaled_u32(10_000).saturating_add(100_000) }
fn default_shortstatekey_cache_capacity() -> u32 {
parallelism_scaled_u32(10_000).saturating_add(100_000)
}
fn default_statekeyshort_cache_capacity() -> u32 { parallelism_scaled_u32(10_000).saturating_add(100_000) }
fn default_statekeyshort_cache_capacity() -> u32 {
parallelism_scaled_u32(10_000).saturating_add(100_000)
}
fn default_servernameevent_data_cache_capacity() -> u32 { parallelism_scaled_u32(100_000).saturating_add(500_000) }
fn default_servernameevent_data_cache_capacity() -> u32 {
parallelism_scaled_u32(100_000).saturating_add(500_000)
}
fn default_server_visibility_cache_capacity() -> u32 { parallelism_scaled_u32(500) }
@ -2203,7 +2220,9 @@ fn default_jaeger_filter() -> String {
fn default_tracing_flame_output_path() -> String { "./tracing.folded".to_owned() }
fn default_trusted_servers() -> Vec<OwnedServerName> { vec![OwnedServerName::try_from("matrix.org").unwrap()] }
fn default_trusted_servers() -> Vec<OwnedServerName> {
vec![OwnedServerName::try_from("matrix.org").unwrap()]
}
/// do debug logging by default for debug builds
#[must_use]
@ -2332,4 +2351,6 @@ fn default_trusted_server_batch_size() -> usize { 256 }
fn default_db_pool_workers() -> usize { sys::available_parallelism().saturating_mul(4).max(32) }
fn default_db_pool_queue_size() -> usize { sys::available_parallelism().saturating_mul(8).max(256) }
fn default_db_pool_queue_size() -> usize {
sys::available_parallelism().saturating_mul(8).max(256)
}

View file

@ -42,11 +42,9 @@ pub enum ProxyConfig {
impl ProxyConfig {
pub fn to_proxy(&self) -> Result<Option<Proxy>> {
Ok(match self.clone() {
Self::None => None,
Self::Global {
url,
} => Some(Proxy::all(url)?),
Self::ByDomain(proxies) => Some(Proxy::custom(move |url| {
| Self::None => None,
| Self::Global { url } => Some(Proxy::all(url)?),
| Self::ByDomain(proxies) => Some(Proxy::custom(move |url| {
// first matching proxy
proxies.iter().find_map(|proxy| proxy.for_url(url)).cloned()
})),
@ -76,24 +74,26 @@ impl PartialProxyConfig {
for wc_domain in &self.include {
if wc_domain.matches(domain) {
match included_because {
Some(prev) if !wc_domain.more_specific_than(prev) => (),
_ => included_because = Some(wc_domain),
| Some(prev) if !wc_domain.more_specific_than(prev) => (),
| _ => included_because = Some(wc_domain),
}
}
}
for wc_domain in &self.exclude {
if wc_domain.matches(domain) {
match excluded_because {
Some(prev) if !wc_domain.more_specific_than(prev) => (),
_ => excluded_because = Some(wc_domain),
| Some(prev) if !wc_domain.more_specific_than(prev) => (),
| _ => excluded_because = Some(wc_domain),
}
}
}
match (included_because, excluded_because) {
(Some(a), Some(b)) if a.more_specific_than(b) => Some(&self.url), /* included for a more specific reason */
| (Some(a), Some(b)) if a.more_specific_than(b) => Some(&self.url), /* included for
* a more specific
* reason */
// than excluded
(Some(_), None) => Some(&self.url),
_ => None,
| (Some(_), None) => Some(&self.url),
| _ => None,
}
}
}
@ -108,19 +108,19 @@ enum WildCardedDomain {
impl WildCardedDomain {
fn matches(&self, domain: &str) -> bool {
match self {
Self::WildCard => true,
Self::WildCarded(d) => domain.ends_with(d),
Self::Exact(d) => domain == d,
| Self::WildCard => true,
| Self::WildCarded(d) => domain.ends_with(d),
| Self::Exact(d) => domain == d,
}
}
fn more_specific_than(&self, other: &Self) -> bool {
match (self, other) {
(Self::WildCard, Self::WildCard) => false,
(_, Self::WildCard) => true,
(Self::Exact(a), Self::WildCarded(_)) => other.matches(a),
(Self::WildCarded(a), Self::WildCarded(b)) => a != b && a.ends_with(b),
_ => false,
| (Self::WildCard, Self::WildCard) => false,
| (_, Self::WildCard) => true,
| (Self::Exact(a), Self::WildCarded(_)) => other.matches(a),
| (Self::WildCarded(a), Self::WildCarded(b)) => a != b && a.ends_with(b),
| _ => false,
}
}
}

View file

@ -84,7 +84,9 @@ pub fn trap() {
}
#[must_use]
pub fn panic_str(p: &Box<dyn Any + Send>) -> &'static str { p.downcast_ref::<&str>().copied().unwrap_or_default() }
pub fn panic_str(p: &Box<dyn Any + Send>) -> &'static str {
p.downcast_ref::<&str>().copied().unwrap_or_default()
}
#[inline(always)]
#[must_use]
@ -96,4 +98,6 @@ pub fn type_name<T: ?Sized>() -> &'static str { std::any::type_name::<T>() }
#[must_use]
#[inline]
pub const fn logging() -> bool { cfg!(debug_assertions) && cfg!(not(feature = "dev_release_log_level")) }
pub const fn logging() -> bool {
cfg!(debug_assertions) && cfg!(not(feature = "dev_release_log_level"))
}

View file

@ -184,7 +184,12 @@ impl Visit for Visitor<'_> {
}
}
pub fn visit(out: &mut String, level: Level, __callsite: &'static DefaultCallsite, vs: &mut ValueSet<'_>) {
pub fn visit(
out: &mut String,
level: Level,
__callsite: &'static DefaultCallsite,
vs: &mut ValueSet<'_>,
) {
let meta = __callsite.metadata();
let enabled = level_enabled!(level) && {
let interest = __callsite.interest();

View file

@ -68,18 +68,20 @@ where
pub fn inspect_log<E: fmt::Display>(error: &E) { inspect_log_level(error, Level::ERROR); }
#[inline]
pub fn inspect_debug_log<E: fmt::Debug>(error: &E) { inspect_debug_log_level(error, Level::ERROR); }
pub fn inspect_debug_log<E: fmt::Debug>(error: &E) {
inspect_debug_log_level(error, Level::ERROR);
}
#[inline]
pub fn inspect_log_level<E: fmt::Display>(error: &E, level: Level) {
use crate::{debug, error, info, trace, warn};
match level {
Level::ERROR => error!("{error}"),
Level::WARN => warn!("{error}"),
Level::INFO => info!("{error}"),
Level::DEBUG => debug!("{error}"),
Level::TRACE => trace!("{error}"),
| Level::ERROR => error!("{error}"),
| Level::WARN => warn!("{error}"),
| Level::INFO => info!("{error}"),
| Level::DEBUG => debug!("{error}"),
| Level::TRACE => trace!("{error}"),
}
}
@ -88,10 +90,10 @@ pub fn inspect_debug_log_level<E: fmt::Debug>(error: &E, level: Level) {
use crate::{debug, debug_error, debug_info, debug_warn, trace};
match level {
Level::ERROR => debug_error!("{error:?}"),
Level::WARN => debug_warn!("{error:?}"),
Level::INFO => debug_info!("{error:?}"),
Level::DEBUG => debug!("{error:?}"),
Level::TRACE => trace!("{error:?}"),
| Level::ERROR => debug_error!("{error:?}"),
| Level::WARN => debug_warn!("{error:?}"),
| Level::INFO => debug_info!("{error:?}"),
| Level::DEBUG => debug!("{error:?}"),
| Level::TRACE => trace!("{error:?}"),
}
}

View file

@ -128,23 +128,25 @@ pub enum Error {
impl Error {
//#[deprecated]
pub fn bad_database(message: &'static str) -> Self { crate::err!(Database(error!("{message}"))) }
pub fn bad_database(message: &'static str) -> Self {
crate::err!(Database(error!("{message}")))
}
/// Sanitizes public-facing errors that can leak sensitive information.
pub fn sanitized_message(&self) -> String {
match self {
Self::Database(..) => String::from("Database error occurred."),
Self::Io(..) => String::from("I/O error occurred."),
_ => self.message(),
| Self::Database(..) => String::from("Database error occurred."),
| Self::Io(..) => String::from("I/O error occurred."),
| _ => self.message(),
}
}
/// Generate the error message string.
pub fn message(&self) -> String {
match self {
Self::Federation(ref origin, ref error) => format!("Answer from {origin}: {error}"),
Self::Ruma(ref error) => response::ruma_error_message(error),
_ => format!("{self}"),
| Self::Federation(ref origin, ref error) => format!("Answer from {origin}: {error}"),
| Self::Ruma(ref error) => response::ruma_error_message(error),
| _ => format!("{self}"),
}
}
@ -154,9 +156,10 @@ impl Error {
use ruma::api::client::error::ErrorKind::Unknown;
match self {
Self::Federation(_, error) | Self::Ruma(error) => response::ruma_error_kind(error).clone(),
Self::BadRequest(kind, ..) | Self::Request(kind, ..) => kind.clone(),
_ => Unknown,
| Self::Federation(_, error) | Self::Ruma(error) =>
response::ruma_error_kind(error).clone(),
| Self::BadRequest(kind, ..) | Self::Request(kind, ..) => kind.clone(),
| _ => Unknown,
}
}
@ -166,12 +169,12 @@ impl Error {
use http::StatusCode;
match self {
Self::Federation(_, error) | Self::Ruma(error) => error.status_code,
Self::Request(kind, _, code) => response::status_code(kind, *code),
Self::BadRequest(kind, ..) => response::bad_request_code(kind),
Self::Reqwest(error) => error.status().unwrap_or(StatusCode::INTERNAL_SERVER_ERROR),
Self::Conflict(_) => StatusCode::CONFLICT,
_ => StatusCode::INTERNAL_SERVER_ERROR,
| Self::Federation(_, error) | Self::Ruma(error) => error.status_code,
| Self::Request(kind, _, code) => response::status_code(kind, *code),
| Self::BadRequest(kind, ..) => response::bad_request_code(kind),
| Self::Reqwest(error) => error.status().unwrap_or(StatusCode::INTERNAL_SERVER_ERROR),
| Self::Conflict(_) => StatusCode::CONFLICT,
| _ => StatusCode::INTERNAL_SERVER_ERROR,
}
}

View file

@ -20,9 +20,9 @@ impl Error {
#[inline]
pub fn into_panic(self) -> Box<dyn Any + Send + 'static> {
match self {
Self::Panic(_, e) | Self::PanicAny(e) => e,
Self::JoinError(e) => e.into_panic(),
_ => Box::new(self),
| Self::Panic(_, e) | Self::PanicAny(e) => e,
| Self::JoinError(e) => e.into_panic(),
| _ => Box::new(self),
}
}
@ -37,9 +37,9 @@ impl Error {
#[inline]
pub fn is_panic(&self) -> bool {
match &self {
Self::Panic(..) | Self::PanicAny(..) => true,
Self::JoinError(e) => e.is_panic(),
_ => false,
| Self::Panic(..) | Self::PanicAny(..) => true,
| Self::JoinError(e) => e.is_panic(),
| _ => false,
}
}
}

View file

@ -57,49 +57,35 @@ pub(super) fn bad_request_code(kind: &ErrorKind) -> StatusCode {
match kind {
// 429
LimitExceeded {
..
} => StatusCode::TOO_MANY_REQUESTS,
| LimitExceeded { .. } => StatusCode::TOO_MANY_REQUESTS,
// 413
TooLarge => StatusCode::PAYLOAD_TOO_LARGE,
| TooLarge => StatusCode::PAYLOAD_TOO_LARGE,
// 405
Unrecognized => StatusCode::METHOD_NOT_ALLOWED,
| Unrecognized => StatusCode::METHOD_NOT_ALLOWED,
// 404
NotFound => StatusCode::NOT_FOUND,
| NotFound => StatusCode::NOT_FOUND,
// 403
GuestAccessForbidden
| GuestAccessForbidden
| ThreepidAuthFailed
| UserDeactivated
| ThreepidDenied
| WrongRoomKeysVersion {
..
}
| Forbidden {
..
} => StatusCode::FORBIDDEN,
| WrongRoomKeysVersion { .. }
| Forbidden { .. } => StatusCode::FORBIDDEN,
// 401
UnknownToken {
..
}
| MissingToken
| Unauthorized => StatusCode::UNAUTHORIZED,
| UnknownToken { .. } | MissingToken | Unauthorized => StatusCode::UNAUTHORIZED,
// 400
_ => StatusCode::BAD_REQUEST,
| _ => StatusCode::BAD_REQUEST,
}
}
pub(super) fn ruma_error_message(error: &ruma::api::client::error::Error) -> String {
if let ErrorBody::Standard {
message,
..
} = &error.body
{
if let ErrorBody::Standard { message, .. } = &error.body {
return message.to_string();
}

View file

@ -41,17 +41,22 @@ static FEATURES: OnceLock<Vec<String>> = OnceLock::new();
static DEPENDENCIES: OnceLock<DepsSet> = OnceLock::new();
#[must_use]
pub fn dependencies_names() -> Vec<&'static str> { dependencies().keys().map(String::as_str).collect() }
pub fn dependencies_names() -> Vec<&'static str> {
dependencies().keys().map(String::as_str).collect()
}
pub fn dependencies() -> &'static DepsSet {
DEPENDENCIES
.get_or_init(|| init_dependencies().unwrap_or_else(|e| panic!("Failed to initialize dependencies: {e}")))
DEPENDENCIES.get_or_init(|| {
init_dependencies().unwrap_or_else(|e| panic!("Failed to initialize dependencies: {e}"))
})
}
/// List of all possible features for the project. For *enabled* features in
/// this build see the companion function in info::rustc.
pub fn features() -> &'static Vec<String> {
FEATURES.get_or_init(|| init_features().unwrap_or_else(|e| panic!("Failed initialize features: {e}")))
FEATURES.get_or_init(|| {
init_features().unwrap_or_else(|e| panic!("Failed initialize features: {e}"))
})
}
fn init_features() -> Result<Vec<String>> {

View file

@ -34,7 +34,9 @@ impl crate::Server {
}
#[inline]
pub fn available_room_versions(&self) -> impl Iterator<Item = (RoomVersionId, RoomVersionStability)> {
pub fn available_room_versions(
&self,
) -> impl Iterator<Item = (RoomVersionId, RoomVersionStability)> {
available_room_versions()
}

View file

@ -22,11 +22,7 @@ type ScopeNames = ArrayVec<&'static str, 32>;
impl Layer {
#[inline]
pub fn new(state: &Arc<State>) -> Self {
Self {
state: state.clone(),
}
}
pub fn new(state: &Arc<State>) -> Self { Self { state: state.clone() } }
}
impl fmt::Debug for Layer {
@ -56,9 +52,7 @@ where
S: Subscriber + for<'a> LookupSpan<'a>,
{
let names = ScopeNames::new();
let mut visitor = Visitor {
values: Values::new(),
};
let mut visitor = Visitor { values: Values::new() };
event.record(&mut visitor);
let mut closure = capture.closure.lock().expect("exclusive lock");
@ -83,7 +77,7 @@ where
}
}
capture.filter.as_ref().map_or(true, |filter| {
capture.filter.as_ref().is_none_or(|filter| {
filter(Data {
layer,
event,
@ -95,7 +89,9 @@ where
}
impl Visit for Visitor {
fn record_debug(&mut self, f: &Field, v: &dyn fmt::Debug) { self.values.push((f.name(), format!("{v:?}"))); }
fn record_debug(&mut self, f: &Field, v: &dyn fmt::Debug) {
self.values.push((f.name(), format!("{v:?}")));
}
fn record_str(&mut self, f: &Field, v: &str) { self.values.push((f.name(), v.to_owned())); }
}

View file

@ -41,9 +41,7 @@ impl Capture {
#[must_use]
pub fn start(self: &Arc<Self>) -> Guard {
self.state.add(self);
Guard {
capture: self.clone(),
}
Guard { capture: self.clone() }
}
pub fn stop(self: &Arc<Self>) { self.state.del(self); }

View file

@ -13,11 +13,7 @@ impl Default for State {
impl State {
#[must_use]
pub fn new() -> Self {
Self {
active: RwLock::new(Vec::new()),
}
}
pub fn new() -> Self { Self { active: RwLock::new(Vec::new()) } }
pub(super) fn add(&self, capture: &Arc<Capture>) {
self.active

View file

@ -5,11 +5,11 @@ use super::Level;
#[must_use]
pub fn html(level: &Level) -> (&'static str, &'static str) {
match *level {
Level::TRACE => ("#000000", "#A0A0A0"),
Level::DEBUG => ("#000000", "#FFFFFF"),
Level::ERROR => ("#000000", "#FF0000"),
Level::WARN => ("#000000", "#FFFF00"),
Level::INFO => ("#FFFFFF", "#008E00"),
| Level::TRACE => ("#000000", "#A0A0A0"),
| Level::DEBUG => ("#000000", "#FFFFFF"),
| Level::ERROR => ("#000000", "#FF0000"),
| Level::WARN => ("#000000", "#FFFF00"),
| Level::INFO => ("#FFFFFF", "#008E00"),
}
}
@ -18,10 +18,10 @@ pub fn html(level: &Level) -> (&'static str, &'static str) {
#[must_use]
pub fn code_tag(level: &Level) -> &'static str {
match *level {
Level::TRACE => "#888888",
Level::DEBUG => "#C8C8C8",
Level::ERROR => "#FF0000",
Level::WARN => "#FFFF00",
Level::INFO => "#00FF00",
| Level::TRACE => "#888888",
| Level::DEBUG => "#C8C8C8",
| Level::ERROR => "#FF0000",
| Level::WARN => "#FFFF00",
| Level::INFO => "#00FF00",
}
}

View file

@ -11,7 +11,8 @@ where
let level = level.as_str().to_uppercase();
write!(
out,
"<font data-mx-color=\"{color}\"><code>{level:>5}</code></font> <code>{span:^12}</code> <code>{msg}</code><br>"
"<font data-mx-color=\"{color}\"><code>{level:>5}</code></font> <code>{span:^12}</code> \
<code>{msg}</code><br>"
)?;
Ok(())

View file

@ -5,13 +5,13 @@ use crate::Result;
#[inline]
pub fn from_str(str: &str) -> Result<FmtSpan, FmtSpan> {
match str.to_uppercase().as_str() {
"ENTER" => Ok(FmtSpan::ENTER),
"EXIT" => Ok(FmtSpan::EXIT),
"NEW" => Ok(FmtSpan::NEW),
"CLOSE" => Ok(FmtSpan::CLOSE),
"ACTIVE" => Ok(FmtSpan::ACTIVE),
"FULL" => Ok(FmtSpan::FULL),
"NONE" => Ok(FmtSpan::NONE),
_ => Err(FmtSpan::NONE),
| "ENTER" => Ok(FmtSpan::ENTER),
| "EXIT" => Ok(FmtSpan::EXIT),
| "NEW" => Ok(FmtSpan::NEW),
| "CLOSE" => Ok(FmtSpan::CLOSE),
| "ACTIVE" => Ok(FmtSpan::ACTIVE),
| "FULL" => Ok(FmtSpan::FULL),
| "NONE" => Ok(FmtSpan::NONE),
| _ => Err(FmtSpan::NONE),
}
}

View file

@ -25,10 +25,7 @@ impl Suppress {
.reload(&suppress, Some(&[handle]))
.expect("log filter reloaded");
Self {
server: server.clone(),
restore,
}
Self { server: server.clone(), restore }
}
}

View file

@ -68,5 +68,7 @@ impl Metrics {
pub fn task_root(&self) -> Option<&TaskMonitor> { self.task_monitor.as_ref() }
pub fn runtime_metrics(&self) -> Option<&runtime::RuntimeMetrics> { self.runtime_metrics.as_ref() }
pub fn runtime_metrics(&self) -> Option<&runtime::RuntimeMetrics> {
self.runtime_metrics.as_ref()
}
}

View file

@ -35,7 +35,8 @@ impl Builder {
{
Self {
event_type: content.event_type().into(),
content: to_raw_value(content).expect("Builder failed to serialize state event content to RawValue"),
content: to_raw_value(content)
.expect("Builder failed to serialize state event content to RawValue"),
state_key: Some(state_key),
..Self::default()
}
@ -47,7 +48,8 @@ impl Builder {
{
Self {
event_type: content.event_type().into(),
content: to_raw_value(content).expect("Builder failed to serialize timeline event content to RawValue"),
content: to_raw_value(content)
.expect("Builder failed to serialize timeline event content to RawValue"),
..Self::default()
}
}

View file

@ -21,8 +21,8 @@ impl Count {
#[must_use]
pub fn from_signed(signed: i64) -> Self {
match signed {
i64::MIN..=0 => Self::Backfilled(signed),
_ => Self::Normal(signed as u64),
| i64::MIN..=0 => Self::Backfilled(signed),
| _ => Self::Normal(signed as u64),
}
}
@ -31,8 +31,8 @@ impl Count {
pub fn into_unsigned(self) -> u64 {
self.debug_assert_valid();
match self {
Self::Normal(i) => i,
Self::Backfilled(i) => i as u64,
| Self::Normal(i) => i,
| Self::Backfilled(i) => i as u64,
}
}
@ -41,8 +41,8 @@ impl Count {
pub fn into_signed(self) -> i64 {
self.debug_assert_valid();
match self {
Self::Normal(i) => i as i64,
Self::Backfilled(i) => i,
| Self::Normal(i) => i as i64,
| Self::Backfilled(i) => i,
}
}
@ -51,27 +51,27 @@ impl Count {
pub fn into_normal(self) -> Self {
self.debug_assert_valid();
match self {
Self::Normal(i) => Self::Normal(i),
Self::Backfilled(_) => Self::Normal(0),
| Self::Normal(i) => Self::Normal(i),
| Self::Backfilled(_) => Self::Normal(0),
}
}
#[inline]
pub fn checked_inc(self, dir: Direction) -> Result<Self, Error> {
match dir {
Direction::Forward => self.checked_add(1),
Direction::Backward => self.checked_sub(1),
| Direction::Forward => self.checked_add(1),
| Direction::Backward => self.checked_sub(1),
}
}
#[inline]
pub fn checked_add(self, add: u64) -> Result<Self, Error> {
Ok(match self {
Self::Normal(i) => Self::Normal(
| Self::Normal(i) => Self::Normal(
i.checked_add(add)
.ok_or_else(|| err!(Arithmetic("Count::Normal overflow")))?,
),
Self::Backfilled(i) => Self::Backfilled(
| Self::Backfilled(i) => Self::Backfilled(
i.checked_add(add as i64)
.ok_or_else(|| err!(Arithmetic("Count::Backfilled overflow")))?,
),
@ -81,11 +81,11 @@ impl Count {
#[inline]
pub fn checked_sub(self, sub: u64) -> Result<Self, Error> {
Ok(match self {
Self::Normal(i) => Self::Normal(
| Self::Normal(i) => Self::Normal(
i.checked_sub(sub)
.ok_or_else(|| err!(Arithmetic("Count::Normal underflow")))?,
),
Self::Backfilled(i) => Self::Backfilled(
| Self::Backfilled(i) => Self::Backfilled(
i.checked_sub(sub as i64)
.ok_or_else(|| err!(Arithmetic("Count::Backfilled underflow")))?,
),
@ -96,8 +96,8 @@ impl Count {
#[must_use]
pub fn saturating_inc(self, dir: Direction) -> Self {
match dir {
Direction::Forward => self.saturating_add(1),
Direction::Backward => self.saturating_sub(1),
| Direction::Forward => self.saturating_add(1),
| Direction::Backward => self.saturating_sub(1),
}
}
@ -105,8 +105,8 @@ impl Count {
#[must_use]
pub fn saturating_add(self, add: u64) -> Self {
match self {
Self::Normal(i) => Self::Normal(i.saturating_add(add)),
Self::Backfilled(i) => Self::Backfilled(i.saturating_add(add as i64)),
| Self::Normal(i) => Self::Normal(i.saturating_add(add)),
| Self::Backfilled(i) => Self::Backfilled(i.saturating_add(add as i64)),
}
}
@ -114,8 +114,8 @@ impl Count {
#[must_use]
pub fn saturating_sub(self, sub: u64) -> Self {
match self {
Self::Normal(i) => Self::Normal(i.saturating_sub(sub)),
Self::Backfilled(i) => Self::Backfilled(i.saturating_sub(sub as i64)),
| Self::Normal(i) => Self::Normal(i.saturating_sub(sub)),
| Self::Backfilled(i) => Self::Backfilled(i.saturating_sub(sub as i64)),
}
}
@ -139,8 +139,8 @@ impl Display for Count {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
self.debug_assert_valid();
match self {
Self::Normal(i) => write!(f, "{i}"),
Self::Backfilled(i) => write!(f, "{i}"),
| Self::Normal(i) => write!(f, "{i}"),
| Self::Backfilled(i) => write!(f, "{i}"),
}
}
}

View file

@ -19,13 +19,19 @@ impl Event for Pdu {
fn content(&self) -> &RawJsonValue { &self.content }
fn origin_server_ts(&self) -> MilliSecondsSinceUnixEpoch { MilliSecondsSinceUnixEpoch(self.origin_server_ts) }
fn origin_server_ts(&self) -> MilliSecondsSinceUnixEpoch {
MilliSecondsSinceUnixEpoch(self.origin_server_ts)
}
fn state_key(&self) -> Option<&str> { self.state_key.as_deref() }
fn prev_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ { self.prev_events.iter() }
fn prev_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ {
self.prev_events.iter()
}
fn auth_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ { self.auth_events.iter() }
fn auth_events(&self) -> impl DoubleEndedIterator<Item = &Self::Id> + Send + '_ {
self.auth_events.iter()
}
fn redacts(&self) -> Option<&Self::Id> { self.redacts.as_ref() }
}

View file

@ -8,7 +8,8 @@ use crate::{err, Result};
/// Returns a tuple of the new `EventId` and the PDU as a `BTreeMap<String,
/// CanonicalJsonValue>`.
pub fn gen_event_id_canonical_json(
pdu: &RawJsonValue, room_version_id: &RoomVersionId,
pdu: &RawJsonValue,
room_version_id: &RoomVersionId,
) -> Result<(OwnedEventId, CanonicalJsonObject)> {
let value: CanonicalJsonObject = serde_json::from_str(pdu.get())
.map_err(|e| err!(BadServerResponse(warn!("Error parsing incoming event: {e:?}"))))?;
@ -19,7 +20,10 @@ pub fn gen_event_id_canonical_json(
}
/// Generates a correct eventId for the incoming pdu.
pub fn gen_event_id(value: &CanonicalJsonObject, room_version_id: &RoomVersionId) -> Result<OwnedEventId> {
pub fn gen_event_id(
value: &CanonicalJsonObject,
room_version_id: &RoomVersionId,
) -> Result<OwnedEventId> {
let reference_hash = ruma::signatures::reference_hash(value, room_version_id)?;
let event_id: OwnedEventId = format!("${reference_hash}").try_into()?;

View file

@ -84,7 +84,7 @@ fn matches_url(&self, filter: &RoomEventFilter) -> bool {
.is_some_and(Value::is_string);
match url_filter {
UrlFilter::EventsWithUrl => url,
UrlFilter::EventsWithoutUrl => !url,
| UrlFilter::EventsWithUrl => url,
| UrlFilter::EventsWithoutUrl => !url,
}
}

View file

@ -15,7 +15,8 @@ mod unsigned;
use std::{cmp::Ordering, sync::Arc};
use ruma::{
events::TimelineEventType, CanonicalJsonObject, CanonicalJsonValue, EventId, OwnedRoomId, OwnedUserId, UInt,
events::TimelineEventType, CanonicalJsonObject, CanonicalJsonValue, EventId, OwnedRoomId,
OwnedUserId, UInt,
};
use serde::{Deserialize, Serialize};
use serde_json::value::RawValue as RawJsonValue;

View file

@ -29,10 +29,10 @@ impl RawId {
#[must_use]
pub fn shortroomid(self) -> [u8; INT_LEN] {
match self {
Self::Normal(raw) => raw[0..INT_LEN]
| Self::Normal(raw) => raw[0..INT_LEN]
.try_into()
.expect("normal raw shortroomid array from slice"),
Self::Backfilled(raw) => raw[0..INT_LEN]
| Self::Backfilled(raw) => raw[0..INT_LEN]
.try_into()
.expect("backfilled raw shortroomid array from slice"),
}
@ -42,10 +42,10 @@ impl RawId {
#[must_use]
pub fn shorteventid(self) -> [u8; INT_LEN] {
match self {
Self::Normal(raw) => raw[INT_LEN..INT_LEN * 2]
| Self::Normal(raw) => raw[INT_LEN..INT_LEN * 2]
.try_into()
.expect("normal raw shorteventid array from slice"),
Self::Backfilled(raw) => raw[INT_LEN * 2..INT_LEN * 3]
| Self::Backfilled(raw) => raw[INT_LEN * 2..INT_LEN * 3]
.try_into()
.expect("backfilled raw shorteventid array from slice"),
}
@ -55,8 +55,8 @@ impl RawId {
#[must_use]
pub fn as_bytes(&self) -> &[u8] {
match self {
Self::Normal(ref raw) => raw,
Self::Backfilled(ref raw) => raw,
| Self::Normal(ref raw) => raw,
| Self::Backfilled(ref raw) => raw,
}
}
}
@ -70,17 +70,17 @@ impl From<&[u8]> for RawId {
#[inline]
fn from(id: &[u8]) -> Self {
match id.len() {
Self::NORMAL_LEN => Self::Normal(
| Self::NORMAL_LEN => Self::Normal(
id[0..Self::NORMAL_LEN]
.try_into()
.expect("normal RawId from [u8]"),
),
Self::BACKFILLED_LEN => Self::Backfilled(
| Self::BACKFILLED_LEN => Self::Backfilled(
id[0..Self::BACKFILLED_LEN]
.try_into()
.expect("backfilled RawId from [u8]"),
),
_ => unimplemented!("unrecognized RawId length"),
| _ => unimplemented!("unrecognized RawId length"),
}
}
}
@ -95,11 +95,11 @@ impl From<Id> for RawId {
vec.extend(id.shortroomid.to_be_bytes());
id.shorteventid.debug_assert_valid();
match id.shorteventid {
Count::Normal(shorteventid) => {
| Count::Normal(shorteventid) => {
vec.extend(shorteventid.to_be_bytes());
Self::Normal(vec.as_ref().try_into().expect("RawVec into RawId::Normal"))
},
Count::Backfilled(shorteventid) => {
| Count::Backfilled(shorteventid) => {
vec.extend(0_u64.to_be_bytes());
vec.extend(shorteventid.to_be_bytes());
Self::Backfilled(

View file

@ -22,8 +22,8 @@ struct ExtractRedactedBecause {
pub fn redact(&mut self, room_version_id: &RoomVersionId, reason: &Self) -> Result {
self.unsigned = None;
let mut content =
serde_json::from_str(self.content.get()).map_err(|_| Error::bad_database("PDU in db has invalid content."))?;
let mut content = serde_json::from_str(self.content.get())
.map_err(|_| Error::bad_database("PDU in db has invalid content."))?;
redact_content_in_place(&mut content, room_version_id, self.kind.to_string())
.map_err(|e| Error::Redaction(self.sender.server_name().to_owned(), e))?;
@ -75,7 +75,9 @@ pub fn is_redacted(&self) -> bool {
#[must_use]
pub fn copy_redacts(&self) -> (Option<Arc<EventId>>, Box<RawJsonValue>) {
if self.kind == TimelineEventType::RoomRedaction {
if let Ok(mut content) = serde_json::from_str::<RoomRedactionEventContent>(self.content.get()) {
if let Ok(mut content) =
serde_json::from_str::<RoomRedactionEventContent>(self.content.get())
{
if let Some(redacts) = content.redacts {
return (Some(redacts.into()), self.content.clone());
} else if let Some(redacts) = self.redacts.clone() {

View file

@ -1,8 +1,8 @@
use ruma::{
events::{
room::member::RoomMemberEventContent, space::child::HierarchySpaceChildEvent, AnyEphemeralRoomEvent,
AnyMessageLikeEvent, AnyStateEvent, AnyStrippedStateEvent, AnySyncStateEvent, AnySyncTimelineEvent,
AnyTimelineEvent, StateEvent,
room::member::RoomMemberEventContent, space::child::HierarchySpaceChildEvent,
AnyEphemeralRoomEvent, AnyMessageLikeEvent, AnyStateEvent, AnyStrippedStateEvent,
AnySyncStateEvent, AnySyncTimelineEvent, AnyTimelineEvent, StateEvent,
},
serde::Raw,
};

View file

@ -13,8 +13,8 @@ pub fn remove_transaction_id(&mut self) -> Result {
return Ok(());
};
let mut unsigned: BTreeMap<String, Box<RawJsonValue>> =
serde_json::from_str(unsigned.get()).map_err(|e| err!(Database("Invalid unsigned in pdu event: {e}")))?;
let mut unsigned: BTreeMap<String, Box<RawJsonValue>> = serde_json::from_str(unsigned.get())
.map_err(|e| err!(Database("Invalid unsigned in pdu event: {e}")))?;
unsigned.remove("transaction_id");
self.unsigned = to_raw_value(&unsigned)
@ -97,7 +97,9 @@ where
#[implement(Pdu)]
#[must_use]
pub fn get_unsigned_as_value(&self) -> JsonValue { self.get_unsigned::<JsonValue>().unwrap_or_default() }
pub fn get_unsigned_as_value(&self) -> JsonValue {
self.get_unsigned::<JsonValue>().unwrap_or_default()
}
#[implement(Pdu)]
pub fn get_unsigned<T>(&self) -> Result<JsonValue> {

View file

@ -79,8 +79,9 @@ impl Server {
return Err!("Restart already in progress");
}
self.shutdown()
.inspect_err(|_| self.restarting.store(false, Ordering::Release))
self.shutdown().inspect_err(|_| {
self.restarting.store(false, Ordering::Release);
})
}
pub fn shutdown(&self) -> Result<()> {
@ -88,8 +89,9 @@ impl Server {
return Err!("Shutdown already in progress");
}
self.signal("SIGTERM")
.inspect_err(|_| self.stopping.store(false, Ordering::Release))
self.signal("SIGTERM").inspect_err(|_| {
self.stopping.store(false, Ordering::Release);
})
}
pub fn signal(&self, sig: &'static str) -> Result<()> {

View file

@ -66,19 +66,25 @@ impl BoolExt for bool {
}
#[inline]
fn map_ok_or<T, E, F: FnOnce() -> T>(self, err: E, f: F) -> Result<T, E> { self.ok_or(err).map(|()| f()) }
fn map_ok_or<T, E, F: FnOnce() -> T>(self, err: E, f: F) -> Result<T, E> {
self.ok_or(err).map(|()| f())
}
#[inline]
fn map_or<T, F: FnOnce() -> T>(self, err: T, f: F) -> T { self.then(f).unwrap_or(err) }
#[inline]
fn map_or_else<T, F: FnOnce() -> T>(self, err: F, f: F) -> T { self.then(f).unwrap_or_else(err) }
fn map_or_else<T, F: FnOnce() -> T>(self, err: F, f: F) -> T {
self.then(f).unwrap_or_else(err)
}
#[inline]
fn ok_or<E>(self, err: E) -> Result<(), E> { self.into_option().ok_or(err) }
#[inline]
fn ok_or_else<E, F: FnOnce() -> E>(self, err: F) -> Result<(), E> { self.into_option().ok_or_else(err) }
fn ok_or_else<E, F: FnOnce() -> E>(self, err: F) -> Result<(), E> {
self.into_option().ok_or_else(err)
}
#[inline]
fn or<T, F: FnOnce() -> T>(self, f: F) -> Option<T> { (!self).then(f) }

View file

@ -39,7 +39,9 @@ pub fn increment(old: Option<&[u8]>) -> [u8; 8] {
/// Parses 8 big-endian bytes into an u64; panic on invalid argument
#[inline]
#[must_use]
pub fn u64_from_u8(bytes: &[u8]) -> u64 { u64_from_bytes(bytes).expect("must slice at least 8 bytes") }
pub fn u64_from_u8(bytes: &[u8]) -> u64 {
u64_from_bytes(bytes).expect("must slice at least 8 bytes")
}
/// Parses the big-endian bytes into an u64.
#[inline]

View file

@ -71,13 +71,10 @@ pub fn content_disposition_type(content_type: Option<&str>) -> ContentDispositio
/// `sanitize_filename` crate
#[tracing::instrument(level = "debug")]
pub fn sanitise_filename(filename: &str) -> String {
sanitize_filename::sanitize_with_options(
filename,
sanitize_filename::Options {
truncate: false,
..Default::default()
},
)
sanitize_filename::sanitize_with_options(filename, sanitize_filename::Options {
truncate: false,
..Default::default()
})
}
/// creates the final Content-Disposition based on whether the filename exists
@ -89,11 +86,16 @@ pub fn sanitise_filename(filename: &str) -> String {
///
/// else: `Content-Disposition: attachment/inline`
pub fn make_content_disposition(
content_disposition: Option<&ContentDisposition>, content_type: Option<&str>, filename: Option<&str>,
content_disposition: Option<&ContentDisposition>,
content_type: Option<&str>,
filename: Option<&str>,
) -> ContentDisposition {
ContentDisposition::new(content_disposition_type(content_type)).with_filename(
filename
.or_else(|| content_disposition.and_then(|content_disposition| content_disposition.filename.as_deref()))
.or_else(|| {
content_disposition
.and_then(|content_disposition| content_disposition.filename.as_deref())
})
.map(sanitise_filename),
)
}
@ -102,8 +104,8 @@ pub fn make_content_disposition(
mod tests {
#[test]
fn string_sanitisation() {
const SAMPLE: &str =
"🏳this\\r\\n įs \r\\n ä \\r\nstrïng 🥴that\n\r ../../../../../../../may be\r\n malicious🏳";
const SAMPLE: &str = "🏳this\\r\\n įs \r\\n ä \\r\nstrïng 🥴that\n\r \
../../../../../../../may be\r\n malicious🏳";
const SANITISED: &str = "🏳thisrn įs n ä rstrïng 🥴that ..............may be malicious🏳";
let options = sanitize_filename::Options {
@ -125,14 +127,12 @@ mod tests {
fn empty_sanitisation() {
use crate::utils::string::EMPTY;
let result = sanitize_filename::sanitize_with_options(
EMPTY,
sanitize_filename::Options {
let result =
sanitize_filename::sanitize_with_options(EMPTY, sanitize_filename::Options {
windows: true,
truncate: true,
replacement: "",
},
);
});
assert_eq!(EMPTY, result);
}

View file

@ -31,10 +31,8 @@ impl<T: fmt::Debug> fmt::Debug for TruncatedSlice<'_, T> {
/// fn bar(foos: &[&str]);
/// ```
pub fn slice_truncated<T: fmt::Debug>(
slice: &[T], max_len: usize,
slice: &[T],
max_len: usize,
) -> tracing::field::DebugValue<TruncatedSlice<'_, T>> {
tracing::field::debug(TruncatedSlice {
inner: slice,
max_len,
})
tracing::field::debug(TruncatedSlice { inner: slice, max_len })
}

View file

@ -9,9 +9,7 @@ macro_rules! defer {
fn drop(&mut self) { (self.closure)(); }
}
let _defer_ = _Defer_ {
closure: || $body,
};
let _defer_ = _Defer_ { closure: || $body };
};
($body:expr) => {

View file

@ -14,17 +14,23 @@ pub trait TryExtExt<T, E>
where
Self: TryFuture<Ok = T, Error = E> + Send,
{
fn is_err(self) -> MapOkOrElse<Self, impl FnOnce(Self::Ok) -> bool, impl FnOnce(Self::Error) -> bool>
fn is_err(
self,
) -> MapOkOrElse<Self, impl FnOnce(Self::Ok) -> bool, impl FnOnce(Self::Error) -> bool>
where
Self: Sized;
#[allow(clippy::wrong_self_convention)]
fn is_ok(self) -> MapOkOrElse<Self, impl FnOnce(Self::Ok) -> bool, impl FnOnce(Self::Error) -> bool>
fn is_ok(
self,
) -> MapOkOrElse<Self, impl FnOnce(Self::Ok) -> bool, impl FnOnce(Self::Error) -> bool>
where
Self: Sized;
fn map_ok_or<U, F>(
self, default: U, f: F,
self,
default: U,
f: F,
) -> MapOkOrElse<Self, impl FnOnce(Self::Ok) -> U, impl FnOnce(Self::Error) -> U>
where
F: FnOnce(Self::Ok) -> U,
@ -32,11 +38,18 @@ where
fn ok(
self,
) -> MapOkOrElse<Self, impl FnOnce(Self::Ok) -> Option<Self::Ok>, impl FnOnce(Self::Error) -> Option<Self::Ok>>
) -> MapOkOrElse<
Self,
impl FnOnce(Self::Ok) -> Option<Self::Ok>,
impl FnOnce(Self::Error) -> Option<Self::Ok>,
>
where
Self: Sized;
fn unwrap_or(self, default: Self::Ok) -> UnwrapOrElse<Self, impl FnOnce(Self::Error) -> Self::Ok>
fn unwrap_or(
self,
default: Self::Ok,
) -> UnwrapOrElse<Self, impl FnOnce(Self::Error) -> Self::Ok>
where
Self: Sized;
@ -51,7 +64,9 @@ where
Fut: TryFuture<Ok = T, Error = E> + Send,
{
#[inline]
fn is_err(self) -> MapOkOrElse<Self, impl FnOnce(Self::Ok) -> bool, impl FnOnce(Self::Error) -> bool>
fn is_err(
self,
) -> MapOkOrElse<Self, impl FnOnce(Self::Ok) -> bool, impl FnOnce(Self::Error) -> bool>
where
Self: Sized,
{
@ -59,7 +74,9 @@ where
}
#[inline]
fn is_ok(self) -> MapOkOrElse<Self, impl FnOnce(Self::Ok) -> bool, impl FnOnce(Self::Error) -> bool>
fn is_ok(
self,
) -> MapOkOrElse<Self, impl FnOnce(Self::Ok) -> bool, impl FnOnce(Self::Error) -> bool>
where
Self: Sized,
{
@ -68,7 +85,9 @@ where
#[inline]
fn map_ok_or<U, F>(
self, default: U, f: F,
self,
default: U,
f: F,
) -> MapOkOrElse<Self, impl FnOnce(Self::Ok) -> U, impl FnOnce(Self::Error) -> U>
where
F: FnOnce(Self::Ok) -> U,
@ -80,7 +99,11 @@ where
#[inline]
fn ok(
self,
) -> MapOkOrElse<Self, impl FnOnce(Self::Ok) -> Option<Self::Ok>, impl FnOnce(Self::Error) -> Option<Self::Ok>>
) -> MapOkOrElse<
Self,
impl FnOnce(Self::Ok) -> Option<Self::Ok>,
impl FnOnce(Self::Error) -> Option<Self::Ok>,
>
where
Self: Sized,
{
@ -88,7 +111,10 @@ where
}
#[inline]
fn unwrap_or(self, default: Self::Ok) -> UnwrapOrElse<Self, impl FnOnce(Self::Error) -> Self::Ok>
fn unwrap_or(
self,
default: Self::Ok,
) -> UnwrapOrElse<Self, impl FnOnce(Self::Error) -> Self::Ok>
where
Self: Sized,
{

View file

@ -1,8 +1,8 @@
use std::sync::OnceLock;
use argon2::{
password_hash, password_hash::SaltString, Algorithm, Argon2, Params, PasswordHash, PasswordHasher,
PasswordVerifier, Version,
password_hash, password_hash::SaltString, Algorithm, Argon2, Params, PasswordHash,
PasswordHasher, PasswordVerifier, Version,
};
use crate::{err, Error, Result};

View file

@ -16,12 +16,12 @@ impl fmt::Display for Escape<'_> {
let mut last = 0;
for (i, ch) in s.char_indices() {
let s = match ch {
'>' => "&gt;",
'<' => "&lt;",
'&' => "&amp;",
'\'' => "&#39;",
'"' => "&quot;",
_ => continue,
| '>' => "&gt;",
| '<' => "&lt;",
| '&' => "&amp;",
| '\'' => "&#39;",
| '"' => "&quot;",
| _ => continue,
};
fmt.write_str(&pile_o_bits[last..i])?;
fmt.write_str(s)?;

View file

@ -8,16 +8,24 @@ use crate::Result;
/// `CanonicalJsonObject`.
///
/// `value` must serialize to an `serde_json::Value::Object`.
pub fn to_canonical_object<T: serde::Serialize>(value: T) -> Result<CanonicalJsonObject, CanonicalJsonError> {
pub fn to_canonical_object<T: serde::Serialize>(
value: T,
) -> Result<CanonicalJsonObject, CanonicalJsonError> {
use serde::ser::Error;
match serde_json::to_value(value).map_err(CanonicalJsonError::SerDe)? {
serde_json::Value::Object(map) => try_from_json_map(map),
_ => Err(CanonicalJsonError::SerDe(serde_json::Error::custom("Value must be an object"))),
| serde_json::Value::Object(map) => try_from_json_map(map),
| _ =>
Err(CanonicalJsonError::SerDe(serde_json::Error::custom("Value must be an object"))),
}
}
pub fn deserialize_from_str<'de, D: serde::de::Deserializer<'de>, T: FromStr<Err = E>, E: fmt::Display>(
pub fn deserialize_from_str<
'de,
D: serde::de::Deserializer<'de>,
T: FromStr<Err = E>,
E: fmt::Display,
>(
deserializer: D,
) -> Result<T, D::Error> {
struct Visitor<T: FromStr<Err = E>, E>(std::marker::PhantomData<T>);

View file

@ -56,7 +56,12 @@ macro_rules! validated {
/// Returns false if the exponential backoff has expired based on the inputs
#[inline]
#[must_use]
pub fn continue_exponential_backoff_secs(min: u64, max: u64, elapsed: Duration, tries: u32) -> bool {
pub fn continue_exponential_backoff_secs(
min: u64,
max: u64,
elapsed: Duration,
tries: u32,
) -> bool {
let min = Duration::from_secs(min);
let max = Duration::from_secs(max);
continue_exponential_backoff(min, max, elapsed, tries)
@ -65,7 +70,12 @@ pub fn continue_exponential_backoff_secs(min: u64, max: u64, elapsed: Duration,
/// Returns false if the exponential backoff has expired based on the inputs
#[inline]
#[must_use]
pub fn continue_exponential_backoff(min: Duration, max: Duration, elapsed: Duration, tries: u32) -> bool {
pub fn continue_exponential_backoff(
min: Duration,
max: Duration,
elapsed: Duration,
tries: u32,
) -> bool {
let min = min.saturating_mul(tries).saturating_mul(tries);
let min = cmp::min(min, max);
elapsed < min

View file

@ -47,8 +47,8 @@ pub fn exchange<T>(state: &mut T, source: T) -> T { std::mem::replace(state, sou
macro_rules! extract_variant {
($e:expr, $variant:path) => {
match $e {
$variant(value) => Some(value),
_ => None,
| $variant(value) => Some(value),
| _ => None,
}
};
}

View file

@ -70,10 +70,9 @@ where
impl<Key, Val> Drop for Guard<Key, Val> {
fn drop(&mut self) {
if Arc::strong_count(Omg::mutex(&self.val)) <= 2 {
self.map
.lock()
.expect("locked")
.retain(|_, val| !Arc::ptr_eq(val, Omg::mutex(&self.val)) || Arc::strong_count(val) > 2);
self.map.lock().expect("locked").retain(|_, val| {
!Arc::ptr_eq(val, Omg::mutex(&self.val)) || Arc::strong_count(val) > 2
});
}
}
}

View file

@ -10,9 +10,9 @@ mod unwrap_infallible;
mod unwrap_or_err;
pub use self::{
debug_inspect::DebugInspect, filter::Filter, flat_ok::FlatOk, into_is_ok::IntoIsOk, log_debug_err::LogDebugErr,
log_err::LogErr, map_expect::MapExpect, not_found::NotFound, unwrap_infallible::UnwrapInfallible,
unwrap_or_err::UnwrapOrErr,
debug_inspect::DebugInspect, filter::Filter, flat_ok::FlatOk, into_is_ok::IntoIsOk,
log_debug_err::LogDebugErr, log_err::LogErr, map_expect::MapExpect, not_found::NotFound,
unwrap_infallible::UnwrapInfallible, unwrap_or_err::UnwrapOrErr,
};
pub type Result<T = (), E = crate::Error> = std::result::Result<T, E>;

View file

@ -19,7 +19,9 @@ impl<T, E> FlatOk<T> for Option<Result<T, E>> {
fn flat_ok_or<Ep>(self, err: Ep) -> Result<T, Ep> { self.flat_ok().ok_or(err) }
#[inline]
fn flat_ok_or_else<Ep, F: FnOnce() -> Ep>(self, err: F) -> Result<T, Ep> { self.flat_ok().ok_or_else(err) }
fn flat_ok_or_else<Ep, F: FnOnce() -> Ep>(self, err: F) -> Result<T, Ep> {
self.flat_ok().ok_or_else(err)
}
}
impl<T, E> FlatOk<T> for Result<Option<T>, E> {
@ -30,5 +32,7 @@ impl<T, E> FlatOk<T> for Result<Option<T>, E> {
fn flat_ok_or<Ep>(self, err: Ep) -> Result<T, Ep> { self.flat_ok().ok_or(err) }
#[inline]
fn flat_ok_or_else<Ep, F: FnOnce() -> Ep>(self, err: F) -> Result<T, Ep> { self.flat_ok().ok_or_else(err) }
fn flat_ok_or_else<Ep, F: FnOnce() -> Ep>(self, err: F) -> Result<T, Ep> {
self.flat_ok().ok_or_else(err)
}
}

View file

@ -20,5 +20,7 @@ pub trait LogErr<T, E: Display> {
impl<T, E: Display> LogErr<T, E> for Result<T, E> {
#[inline]
fn err_log(self, level: Level) -> Self { self.inspect_err(|error| error::inspect_log_level(&error, level)) }
fn err_log(self, level: Level) -> Self {
self.inspect_err(|error| error::inspect_log_level(&error, level))
}
}

View file

@ -32,7 +32,9 @@ where
/// Intersection of sets
///
/// Outputs the set of elements common to all input sets. Inputs must be sorted.
pub fn intersection_sorted<Item, Iter, Iters>(mut input: Iters) -> impl Iterator<Item = Item> + Send
pub fn intersection_sorted<Item, Iter, Iters>(
mut input: Iters,
) -> impl Iterator<Item = Item> + Send
where
Iters: Iterator<Item = Iter> + Clone + Send,
Iter: Iterator<Item = Item> + Send,

View file

@ -14,9 +14,13 @@ where
Item: 'a,
{
#[inline]
fn expect_ok(self: T) -> impl Stream<Item = Item> + Send + 'a { self.map_expect("stream expectation failure") }
fn expect_ok(self: T) -> impl Stream<Item = Item> + Send + 'a {
self.map_expect("stream expectation failure")
}
//TODO: move to impl MapExpect
#[inline]
fn map_expect(self, msg: &'a str) -> impl Stream<Item = Item> + Send + 'a { self.map(|res| res.expect(msg)) }
fn map_expect(self, msg: &'a str) -> impl Stream<Item = Item> + Send + 'a {
self.map(|res| res.expect(msg))
}
}

View file

@ -23,8 +23,12 @@ where
#[cfg(not(debug_assertions))]
#[inline]
fn ignore_err(self: T) -> impl Stream<Item = Item> + Send + 'a { self.filter_map(|res| ready(res.ok())) }
fn ignore_err(self: T) -> impl Stream<Item = Item> + Send + 'a {
self.filter_map(|res| ready(res.ok()))
}
#[inline]
fn ignore_ok(self: T) -> impl Stream<Item = Error> + Send + 'a { self.filter_map(|res| ready(res.err())) }
fn ignore_ok(self: T) -> impl Stream<Item = Error> + Send + 'a {
self.filter_map(|res| ready(res.err()))
}
}

View file

@ -13,7 +13,11 @@ pub trait IterStream<I: IntoIterator + Send> {
/// Convert an Iterator into a TryStream
fn try_stream(
self,
) -> impl TryStream<Ok = <I as IntoIterator>::Item, Error = Error, Item = Result<<I as IntoIterator>::Item, Error>> + Send;
) -> impl TryStream<
Ok = <I as IntoIterator>::Item,
Error = Error,
Item = Result<<I as IntoIterator>::Item, Error>,
> + Send;
}
impl<I> IterStream<I> for I
@ -27,8 +31,11 @@ where
#[inline]
fn try_stream(
self,
) -> impl TryStream<Ok = <I as IntoIterator>::Item, Error = Error, Item = Result<<I as IntoIterator>::Item, Error>> + Send
{
) -> impl TryStream<
Ok = <I as IntoIterator>::Item,
Error = Error,
Item = Result<<I as IntoIterator>::Item, Error>,
> + Send {
self.stream().map(Ok)
}
}

View file

@ -3,7 +3,9 @@
use futures::{
future::{ready, Ready},
stream::{All, Any, Filter, FilterMap, Fold, ForEach, Scan, SkipWhile, Stream, StreamExt, TakeWhile},
stream::{
All, Any, Filter, FilterMap, Fold, ForEach, Scan, SkipWhile, Stream, StreamExt, TakeWhile,
},
};
/// Synchronous combinators to augment futures::StreamExt. Most Stream
@ -24,19 +26,32 @@ where
where
F: Fn(Item) -> bool;
fn ready_filter<'a, F>(self, f: F) -> Filter<Self, Ready<bool>, impl FnMut(&Item) -> Ready<bool> + 'a>
fn ready_filter<'a, F>(
self,
f: F,
) -> Filter<Self, Ready<bool>, impl FnMut(&Item) -> Ready<bool> + 'a>
where
F: Fn(&Item) -> bool + 'a;
fn ready_filter_map<F, U>(self, f: F) -> FilterMap<Self, Ready<Option<U>>, impl FnMut(Item) -> Ready<Option<U>>>
fn ready_filter_map<F, U>(
self,
f: F,
) -> FilterMap<Self, Ready<Option<U>>, impl FnMut(Item) -> Ready<Option<U>>>
where
F: Fn(Item) -> Option<U>;
fn ready_fold<T, F>(self, init: T, f: F) -> Fold<Self, Ready<T>, T, impl FnMut(T, Item) -> Ready<T>>
fn ready_fold<T, F>(
self,
init: T,
f: F,
) -> Fold<Self, Ready<T>, T, impl FnMut(T, Item) -> Ready<T>>
where
F: Fn(T, Item) -> T;
fn ready_fold_default<T, F>(self, f: F) -> Fold<Self, Ready<T>, T, impl FnMut(T, Item) -> Ready<T>>
fn ready_fold_default<T, F>(
self,
f: F,
) -> Fold<Self, Ready<T>, T, impl FnMut(T, Item) -> Ready<T>>
where
F: Fn(T, Item) -> T,
T: Default;
@ -45,23 +60,33 @@ where
where
F: FnMut(Item);
fn ready_take_while<'a, F>(self, f: F) -> TakeWhile<Self, Ready<bool>, impl FnMut(&Item) -> Ready<bool> + 'a>
fn ready_take_while<'a, F>(
self,
f: F,
) -> TakeWhile<Self, Ready<bool>, impl FnMut(&Item) -> Ready<bool> + 'a>
where
F: Fn(&Item) -> bool + 'a;
fn ready_scan<B, T, F>(
self, init: T, f: F,
self,
init: T,
f: F,
) -> Scan<Self, T, Ready<Option<B>>, impl FnMut(&mut T, Item) -> Ready<Option<B>>>
where
F: Fn(&mut T, Item) -> Option<B>;
fn ready_scan_each<T, F>(
self, init: T, f: F,
self,
init: T,
f: F,
) -> Scan<Self, T, Ready<Option<Item>>, impl FnMut(&mut T, Item) -> Ready<Option<Item>>>
where
F: Fn(&mut T, &Item);
fn ready_skip_while<'a, F>(self, f: F) -> SkipWhile<Self, Ready<bool>, impl FnMut(&Item) -> Ready<bool> + 'a>
fn ready_skip_while<'a, F>(
self,
f: F,
) -> SkipWhile<Self, Ready<bool>, impl FnMut(&Item) -> Ready<bool> + 'a>
where
F: Fn(&Item) -> bool + 'a;
}
@ -87,7 +112,10 @@ where
}
#[inline]
fn ready_filter<'a, F>(self, f: F) -> Filter<Self, Ready<bool>, impl FnMut(&Item) -> Ready<bool> + 'a>
fn ready_filter<'a, F>(
self,
f: F,
) -> Filter<Self, Ready<bool>, impl FnMut(&Item) -> Ready<bool> + 'a>
where
F: Fn(&Item) -> bool + 'a,
{
@ -95,7 +123,10 @@ where
}
#[inline]
fn ready_filter_map<F, U>(self, f: F) -> FilterMap<Self, Ready<Option<U>>, impl FnMut(Item) -> Ready<Option<U>>>
fn ready_filter_map<F, U>(
self,
f: F,
) -> FilterMap<Self, Ready<Option<U>>, impl FnMut(Item) -> Ready<Option<U>>>
where
F: Fn(Item) -> Option<U>,
{
@ -103,7 +134,11 @@ where
}
#[inline]
fn ready_fold<T, F>(self, init: T, f: F) -> Fold<Self, Ready<T>, T, impl FnMut(T, Item) -> Ready<T>>
fn ready_fold<T, F>(
self,
init: T,
f: F,
) -> Fold<Self, Ready<T>, T, impl FnMut(T, Item) -> Ready<T>>
where
F: Fn(T, Item) -> T,
{
@ -111,7 +146,10 @@ where
}
#[inline]
fn ready_fold_default<T, F>(self, f: F) -> Fold<Self, Ready<T>, T, impl FnMut(T, Item) -> Ready<T>>
fn ready_fold_default<T, F>(
self,
f: F,
) -> Fold<Self, Ready<T>, T, impl FnMut(T, Item) -> Ready<T>>
where
F: Fn(T, Item) -> T,
T: Default,
@ -121,7 +159,10 @@ where
#[inline]
#[allow(clippy::unit_arg)]
fn ready_for_each<F>(self, mut f: F) -> ForEach<Self, Ready<()>, impl FnMut(Item) -> Ready<()>>
fn ready_for_each<F>(
self,
mut f: F,
) -> ForEach<Self, Ready<()>, impl FnMut(Item) -> Ready<()>>
where
F: FnMut(Item),
{
@ -129,7 +170,10 @@ where
}
#[inline]
fn ready_take_while<'a, F>(self, f: F) -> TakeWhile<Self, Ready<bool>, impl FnMut(&Item) -> Ready<bool> + 'a>
fn ready_take_while<'a, F>(
self,
f: F,
) -> TakeWhile<Self, Ready<bool>, impl FnMut(&Item) -> Ready<bool> + 'a>
where
F: Fn(&Item) -> bool + 'a,
{
@ -138,7 +182,9 @@ where
#[inline]
fn ready_scan<B, T, F>(
self, init: T, f: F,
self,
init: T,
f: F,
) -> Scan<Self, T, Ready<Option<B>>, impl FnMut(&mut T, Item) -> Ready<Option<B>>>
where
F: Fn(&mut T, Item) -> Option<B>,
@ -148,7 +194,9 @@ where
#[inline]
fn ready_scan_each<T, F>(
self, init: T, f: F,
self,
init: T,
f: F,
) -> Scan<Self, T, Ready<Option<Item>>, impl FnMut(&mut T, Item) -> Ready<Option<Item>>>
where
F: Fn(&mut T, &Item),
@ -160,7 +208,10 @@ where
}
#[inline]
fn ready_skip_while<'a, F>(self, f: F) -> SkipWhile<Self, Ready<bool>, impl FnMut(&Item) -> Ready<bool> + 'a>
fn ready_skip_while<'a, F>(
self,
f: F,
) -> SkipWhile<Self, Ready<bool>, impl FnMut(&Item) -> Ready<bool> + 'a>
where
F: Fn(&Item) -> bool + 'a,
{

View file

@ -24,12 +24,17 @@ where
F: Fn(Item) -> K + Send,
K: Eq + Hash + Send;
fn counts_by_with_cap<const CAP: usize, K, F>(self, f: F) -> impl Future<Output = HashMap<K, usize>> + Send
fn counts_by_with_cap<const CAP: usize, K, F>(
self,
f: F,
) -> impl Future<Output = HashMap<K, usize>> + Send
where
F: Fn(Item) -> K + Send,
K: Eq + Hash + Send;
fn counts_with_cap<const CAP: usize>(self) -> impl Future<Output = HashMap<Item, usize>> + Send
fn counts_with_cap<const CAP: usize>(
self,
) -> impl Future<Output = HashMap<Item, usize>> + Send
where
<Self as Stream>::Item: Eq + Hash;
@ -63,7 +68,10 @@ where
}
#[inline]
fn counts_by_with_cap<const CAP: usize, K, F>(self, f: F) -> impl Future<Output = HashMap<K, usize>> + Send
fn counts_by_with_cap<const CAP: usize, K, F>(
self,
f: F,
) -> impl Future<Output = HashMap<K, usize>> + Send
where
F: Fn(Item) -> K + Send,
K: Eq + Hash + Send,
@ -72,7 +80,9 @@ where
}
#[inline]
fn counts_with_cap<const CAP: usize>(self) -> impl Future<Output = HashMap<Item, usize>> + Send
fn counts_with_cap<const CAP: usize>(
self,
) -> impl Future<Output = HashMap<Item, usize>> + Send
where
<Self as Stream>::Item: Eq + Hash,
{

View file

@ -12,13 +12,20 @@ pub trait TryBroadbandExt<T, E>
where
Self: TryStream<Ok = T, Error = E, Item = Result<T, E>> + Send + Sized,
{
fn broadn_and_then<U, F, Fut, N>(self, n: N, f: F) -> impl TryStream<Ok = U, Error = E, Item = Result<U, E>> + Send
fn broadn_and_then<U, F, Fut, N>(
self,
n: N,
f: F,
) -> impl TryStream<Ok = U, Error = E, Item = Result<U, E>> + Send
where
N: Into<Option<usize>>,
F: Fn(Self::Ok) -> Fut + Send + Sync,
Fut: TryFuture<Ok = U, Error = E, Output = Result<U, E>> + Send;
fn broad_and_then<U, F, Fut>(self, f: F) -> impl TryStream<Ok = U, Error = E, Item = Result<U, E>> + Send
fn broad_and_then<U, F, Fut>(
self,
f: F,
) -> impl TryStream<Ok = U, Error = E, Item = Result<U, E>> + Send
where
F: Fn(Self::Ok) -> Fut + Send + Sync,
Fut: TryFuture<Ok = U, Error = E, Output = Result<U, E>> + Send,
@ -31,7 +38,11 @@ impl<T, E, S> TryBroadbandExt<T, E> for S
where
S: TryStream<Ok = T, Error = E, Item = Result<T, E>> + Send + Sized,
{
fn broadn_and_then<U, F, Fut, N>(self, n: N, f: F) -> impl TryStream<Ok = U, Error = E, Item = Result<U, E>> + Send
fn broadn_and_then<U, F, Fut, N>(
self,
n: N,
f: F,
) -> impl TryStream<Ok = U, Error = E, Item = Result<U, E>> + Send
where
N: Into<Option<usize>>,
F: Fn(Self::Ok) -> Fut + Send + Sync,

View file

@ -16,31 +16,43 @@ where
S: TryStream<Ok = T, Error = E, Item = Result<T, E>> + Send + ?Sized,
Self: TryStream + Send + Sized,
{
fn ready_and_then<U, F>(self, f: F) -> AndThen<Self, Ready<Result<U, E>>, impl FnMut(S::Ok) -> Ready<Result<U, E>>>
fn ready_and_then<U, F>(
self,
f: F,
) -> AndThen<Self, Ready<Result<U, E>>, impl FnMut(S::Ok) -> Ready<Result<U, E>>>
where
F: Fn(S::Ok) -> Result<U, E>;
fn ready_try_filter_map<F, U>(
self, f: F,
) -> TryFilterMap<Self, Ready<Result<Option<U>, E>>, impl FnMut(S::Ok) -> Ready<Result<Option<U>, E>>>
self,
f: F,
) -> TryFilterMap<
Self,
Ready<Result<Option<U>, E>>,
impl FnMut(S::Ok) -> Ready<Result<Option<U>, E>>,
>
where
F: Fn(S::Ok) -> Result<Option<U>, E>;
fn ready_try_fold<U, F>(
self, init: U, f: F,
self,
init: U,
f: F,
) -> TryFold<Self, Ready<Result<U, E>>, U, impl FnMut(U, S::Ok) -> Ready<Result<U, E>>>
where
F: Fn(U, S::Ok) -> Result<U, E>;
fn ready_try_fold_default<U, F>(
self, f: F,
self,
f: F,
) -> TryFold<Self, Ready<Result<U, E>>, U, impl FnMut(U, S::Ok) -> Ready<Result<U, E>>>
where
F: Fn(U, S::Ok) -> Result<U, E>,
U: Default;
fn ready_try_for_each<F>(
self, f: F,
self,
f: F,
) -> TryForEach<Self, Ready<Result<(), E>>, impl FnMut(S::Ok) -> Ready<Result<(), E>>>
where
F: FnMut(S::Ok) -> Result<(), E>;
@ -52,7 +64,10 @@ where
Self: TryStream + Send + Sized,
{
#[inline]
fn ready_and_then<U, F>(self, f: F) -> AndThen<Self, Ready<Result<U, E>>, impl FnMut(S::Ok) -> Ready<Result<U, E>>>
fn ready_and_then<U, F>(
self,
f: F,
) -> AndThen<Self, Ready<Result<U, E>>, impl FnMut(S::Ok) -> Ready<Result<U, E>>>
where
F: Fn(S::Ok) -> Result<U, E>,
{
@ -60,8 +75,13 @@ where
}
fn ready_try_filter_map<F, U>(
self, f: F,
) -> TryFilterMap<Self, Ready<Result<Option<U>, E>>, impl FnMut(S::Ok) -> Ready<Result<Option<U>, E>>>
self,
f: F,
) -> TryFilterMap<
Self,
Ready<Result<Option<U>, E>>,
impl FnMut(S::Ok) -> Ready<Result<Option<U>, E>>,
>
where
F: Fn(S::Ok) -> Result<Option<U>, E>,
{
@ -70,7 +90,9 @@ where
#[inline]
fn ready_try_fold<U, F>(
self, init: U, f: F,
self,
init: U,
f: F,
) -> TryFold<Self, Ready<Result<U, E>>, U, impl FnMut(U, S::Ok) -> Ready<Result<U, E>>>
where
F: Fn(U, S::Ok) -> Result<U, E>,
@ -80,7 +102,8 @@ where
#[inline]
fn ready_try_fold_default<U, F>(
self, f: F,
self,
f: F,
) -> TryFold<Self, Ready<Result<U, E>>, U, impl FnMut(U, S::Ok) -> Ready<Result<U, E>>>
where
F: Fn(U, S::Ok) -> Result<U, E>,
@ -91,7 +114,8 @@ where
#[inline]
fn ready_try_for_each<F>(
self, mut f: F,
self,
mut f: F,
) -> TryForEach<Self, Ready<Result<(), E>>, impl FnMut(S::Ok) -> Ready<Result<(), E>>>
where
F: FnMut(S::Ok) -> Result<(), E>,

View file

@ -15,7 +15,9 @@ pub trait Between<'a> {
impl<'a> Between<'a> for &'a str {
#[inline]
fn between_infallible(&self, delim: Delim<'_>) -> &'a str { self.between(delim).unwrap_or(self) }
fn between_infallible(&self, delim: Delim<'_>) -> &'a str {
self.between(delim).unwrap_or(self)
}
#[inline]
fn between(&self, delim: Delim<'_>) -> Option<&'a str> {

View file

@ -15,8 +15,12 @@ pub trait SplitInfallible<'a> {
impl<'a> SplitInfallible<'a> for &'a str {
#[inline]
fn rsplit_once_infallible(&self, delim: &str) -> Pair<'a> { self.rsplit_once(delim).unwrap_or((self, EMPTY)) }
fn rsplit_once_infallible(&self, delim: &str) -> Pair<'a> {
self.rsplit_once(delim).unwrap_or((self, EMPTY))
}
#[inline]
fn split_once_infallible(&self, delim: &str) -> Pair<'a> { self.split_once(delim).unwrap_or((self, EMPTY)) }
fn split_once_infallible(&self, delim: &str) -> Pair<'a> {
self.split_once(delim).unwrap_or((self, EMPTY))
}
}

View file

@ -26,7 +26,9 @@ impl<'a> Unquote<'a> for &'a str {
}
#[inline]
fn unquote(&self) -> Option<&'a str> { self.strip_prefix(QUOTE).and_then(|s| s.strip_suffix(QUOTE)) }
fn unquote(&self) -> Option<&'a str> {
self.strip_prefix(QUOTE).and_then(|s| s.strip_suffix(QUOTE))
}
#[inline]
fn is_quoted(&self) -> bool { self.starts_with(QUOTE) && self.ends_with(QUOTE) }

View file

@ -45,8 +45,8 @@ pub unsafe fn current_exe() -> Result<std::path::PathBuf> {
let exe = std::env::current_exe()?;
match exe.to_str() {
None => Ok(exe),
Some(str) => Ok(str
| None => Ok(exe),
| Some(str) => Ok(str
.strip_suffix(" (deleted)")
.map(PathBuf::from)
.unwrap_or(exe)),
@ -58,5 +58,6 @@ pub unsafe fn current_exe() -> Result<std::path::PathBuf> {
/// accurate on all platforms; defaults to false.
#[must_use]
pub fn current_exe_deleted() -> bool {
std::env::current_exe().is_ok_and(|exe| exe.to_str().is_some_and(|exe| exe.ends_with(" (deleted)")))
std::env::current_exe()
.is_ok_and(|exe| exe.to_str().is_some_and(|exe| exe.ends_with(" (deleted)")))
}

View file

@ -1,4 +1,5 @@
#![cfg(test)]
#![allow(clippy::disallowed_methods)]
use crate::utils;

View file

@ -13,7 +13,9 @@ pub fn now_millis() -> u64 {
}
#[inline]
pub fn parse_timepoint_ago(ago: &str) -> Result<SystemTime> { timepoint_ago(parse_duration(ago)?) }
pub fn parse_timepoint_ago(ago: &str) -> Result<SystemTime> {
timepoint_ago(parse_duration(ago)?)
}
#[inline]
pub fn timepoint_ago(duration: Duration) -> Result<SystemTime> {
@ -61,13 +63,13 @@ pub fn pretty(d: Duration) -> String {
let gen64 = |w, f, u| fmt(w, (f * 100.0) as u32, u);
let gen128 = |w, f, u| gen64(u64::try_from(w).expect("u128 to u64"), f, u);
match whole_and_frac(d) {
(Days(whole), frac) => gen64(whole, frac, "days"),
(Hours(whole), frac) => gen64(whole, frac, "hours"),
(Mins(whole), frac) => gen64(whole, frac, "minutes"),
(Secs(whole), frac) => gen64(whole, frac, "seconds"),
(Millis(whole), frac) => gen128(whole, frac, "milliseconds"),
(Micros(whole), frac) => gen128(whole, frac, "microseconds"),
(Nanos(whole), frac) => gen128(whole, frac, "nanoseconds"),
| (Days(whole), frac) => gen64(whole, frac, "days"),
| (Hours(whole), frac) => gen64(whole, frac, "hours"),
| (Mins(whole), frac) => gen64(whole, frac, "minutes"),
| (Secs(whole), frac) => gen64(whole, frac, "seconds"),
| (Millis(whole), frac) => gen128(whole, frac, "milliseconds"),
| (Micros(whole), frac) => gen128(whole, frac, "microseconds"),
| (Nanos(whole), frac) => gen128(whole, frac, "nanoseconds"),
}
}
@ -80,18 +82,15 @@ pub fn whole_and_frac(d: Duration) -> (Unit, f64) {
use Unit::*;
let whole = whole_unit(d);
(
whole,
match whole {
Days(_) => (d.as_secs() % 86_400) as f64 / 86_400.0,
Hours(_) => (d.as_secs() % 3_600) as f64 / 3_600.0,
Mins(_) => (d.as_secs() % 60) as f64 / 60.0,
Secs(_) => f64::from(d.subsec_millis()) / 1000.0,
Millis(_) => f64::from(d.subsec_micros()) / 1000.0,
Micros(_) => f64::from(d.subsec_nanos()) / 1000.0,
Nanos(_) => 0.0,
},
)
(whole, match whole {
| Days(_) => (d.as_secs() % 86_400) as f64 / 86_400.0,
| Hours(_) => (d.as_secs() % 3_600) as f64 / 3_600.0,
| Mins(_) => (d.as_secs() % 60) as f64 / 60.0,
| Secs(_) => f64::from(d.subsec_millis()) / 1000.0,
| Millis(_) => f64::from(d.subsec_micros()) / 1000.0,
| Micros(_) => f64::from(d.subsec_nanos()) / 1000.0,
| Nanos(_) => 0.0,
})
}
/// Return the largest Unit which represents the duration. The value is
@ -101,18 +100,18 @@ pub fn whole_unit(d: Duration) -> Unit {
use Unit::*;
match d.as_secs() {
86_400.. => Days(d.as_secs() / 86_400),
3_600..=86_399 => Hours(d.as_secs() / 3_600),
60..=3_599 => Mins(d.as_secs() / 60),
| 86_400.. => Days(d.as_secs() / 86_400),
| 3_600..=86_399 => Hours(d.as_secs() / 3_600),
| 60..=3_599 => Mins(d.as_secs() / 60),
_ => match d.as_micros() {
1_000_000.. => Secs(d.as_secs()),
1_000..=999_999 => Millis(d.subsec_millis().into()),
| _ => match d.as_micros() {
| 1_000_000.. => Secs(d.as_secs()),
| 1_000..=999_999 => Millis(d.subsec_millis().into()),
_ => match d.as_nanos() {
1_000.. => Micros(d.subsec_micros().into()),
| _ => match d.as_nanos() {
| 1_000.. => Micros(d.subsec_micros().into()),
_ => Nanos(d.subsec_nanos().into()),
| _ => Nanos(d.subsec_nanos().into()),
},
},
}