fix edition 2024 lints
Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
parent
cbf207bd1f
commit
a67ab75417
36 changed files with 60 additions and 72 deletions
|
@ -905,6 +905,7 @@ missing_docs_in_private_items = { level = "allow", priority = 1 }
|
|||
missing_errors_doc = { level = "allow", priority = 1 }
|
||||
missing_panics_doc = { level = "allow", priority = 1 }
|
||||
module_name_repetitions = { level = "allow", priority = 1 }
|
||||
needless_continue = { level = "allow", priority = 1 }
|
||||
no_effect_underscore_binding = { level = "allow", priority = 1 }
|
||||
similar_names = { level = "allow", priority = 1 }
|
||||
single_match_else = { level = "allow", priority = 1 }
|
||||
|
@ -969,6 +970,7 @@ style = { level = "warn", priority = -1 }
|
|||
# trivial assertions are quite alright
|
||||
assertions_on_constants = { level = "allow", priority = 1 }
|
||||
module_inception = { level = "allow", priority = 1 }
|
||||
obfuscated_if_else = { level = "allow", priority = 1 }
|
||||
|
||||
###################
|
||||
suspicious = { level = "warn", priority = -1 }
|
||||
|
|
|
@ -62,7 +62,7 @@ pub(super) async fn process(command: AdminCommand, context: &Command<'_>) -> Res
|
|||
| Debug(command) => debug::process(command, context).await?,
|
||||
| Query(command) => query::process(command, context).await?,
|
||||
| Check(command) => check::process(command, context).await?,
|
||||
};
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ pub(super) async fn list_rooms(
|
|||
|
||||
if rooms.is_empty() {
|
||||
return Ok(RoomMessageEventContent::text_plain("No more rooms."));
|
||||
};
|
||||
}
|
||||
|
||||
let output_plain = format!(
|
||||
"Rooms ({}):\n```\n{}\n```",
|
||||
|
|
|
@ -67,7 +67,7 @@ pub(super) async fn reprocess(
|
|||
|
||||
if rooms.is_empty() {
|
||||
return Ok(RoomMessageEventContent::text_plain("No more rooms."));
|
||||
};
|
||||
}
|
||||
|
||||
let output = format!(
|
||||
"Rooms (page {page}):\n```\n{}\n```",
|
||||
|
|
|
@ -166,7 +166,7 @@ pub(super) async fn create_user(
|
|||
"Failed to automatically join room {room} for user {user_id}: {e}"
|
||||
);
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -550,7 +550,7 @@ pub(super) async fn force_join_list_of_local_users(
|
|||
debug_warn!("Failed force joining {user_id} to {room_id} during bulk join: {e}");
|
||||
failed_joins = failed_joins.saturating_add(1);
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Ok(RoomMessageEventContent::notice_markdown(format!(
|
||||
|
@ -646,7 +646,7 @@ pub(super) async fn force_join_all_local_users(
|
|||
debug_warn!("Failed force joining {user_id} to {room_id} during bulk join: {e}");
|
||||
failed_joins = failed_joins.saturating_add(1);
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Ok(RoomMessageEventContent::notice_markdown(format!(
|
||||
|
|
|
@ -499,7 +499,7 @@ pub(crate) async fn register_route(
|
|||
| _ => {
|
||||
info!("Automatically joined room {room} for user {user_id}");
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -131,7 +131,7 @@ pub(crate) async fn set_room_visibility_route(
|
|||
|
||||
if !services.rooms.metadata.exists(&body.room_id).await {
|
||||
// Return 404 if the room doesn't exist
|
||||
return Err(Error::BadRequest(ErrorKind::NotFound, "Room not found"));
|
||||
return Err!(Request(NotFound("Room not found")));
|
||||
}
|
||||
|
||||
if services
|
||||
|
@ -145,10 +145,7 @@ pub(crate) async fn set_room_visibility_route(
|
|||
}
|
||||
|
||||
if !user_can_publish_room(&services, sender_user, &body.room_id).await? {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::forbidden(),
|
||||
"User is not allowed to publish this room",
|
||||
));
|
||||
return Err!(Request(Forbidden("User is not allowed to publish this room")));
|
||||
}
|
||||
|
||||
match &body.visibility {
|
||||
|
@ -386,12 +383,7 @@ async fn user_can_publish_room(
|
|||
.await
|
||||
{
|
||||
| Ok(event) => Ok(event.sender == user_id),
|
||||
| _ => {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::forbidden(),
|
||||
"User is not allowed to publish this room",
|
||||
));
|
||||
},
|
||||
| _ => Err!(Request(Forbidden("User is not allowed to publish this room"))),
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -993,7 +993,7 @@ async fn join_room_by_id_helper_remote(
|
|||
| _ => {
|
||||
join_event_stub.remove("event_id");
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// In order to create a compatible ref hash (EventID) the `hashes` field needs
|
||||
// to be present
|
||||
|
@ -1420,7 +1420,7 @@ async fn join_room_by_id_helper_local(
|
|||
| _ => {
|
||||
join_event_stub.remove("event_id");
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// In order to create a compatible ref hash (EventID) the `hashes` field needs
|
||||
// to be present
|
||||
|
@ -1947,7 +1947,7 @@ async fn remote_leave_room(
|
|||
| _ => {
|
||||
leave_event_stub.remove("event_id");
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// In order to create a compatible ref hash (EventID) the `hashes` field needs
|
||||
// to be present
|
||||
|
|
|
@ -43,7 +43,7 @@ pub(crate) async fn report_room_route(
|
|||
ErrorKind::InvalidParam,
|
||||
"Reason too long, should be 750 characters or fewer",
|
||||
));
|
||||
};
|
||||
}
|
||||
|
||||
delay_response().await;
|
||||
|
||||
|
@ -164,14 +164,14 @@ async fn is_event_report_valid(
|
|||
ErrorKind::InvalidParam,
|
||||
"Invalid score, must be within 0 to -100",
|
||||
));
|
||||
};
|
||||
}
|
||||
|
||||
if reason.as_ref().is_some_and(|s| s.len() > 750) {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::InvalidParam,
|
||||
"Reason too long, should be 750 characters or fewer",
|
||||
));
|
||||
};
|
||||
}
|
||||
|
||||
if !services
|
||||
.rooms
|
||||
|
|
|
@ -110,7 +110,7 @@ pub(super) async fn auth(
|
|||
}
|
||||
},
|
||||
| _ => {},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
match (metadata.authentication, token) {
|
||||
|
|
|
@ -135,7 +135,7 @@ async fn create_join_event(
|
|||
|
||||
if state_key != sender {
|
||||
return Err!(Request(BadJson("State key does not match sender user.")));
|
||||
};
|
||||
}
|
||||
|
||||
if let Some(authorising_user) = content.join_authorized_via_users_server {
|
||||
use ruma::RoomVersionId::*;
|
||||
|
|
|
@ -137,7 +137,7 @@ pub(crate) async fn create_knock_event_v1_route(
|
|||
|
||||
if state_key != sender {
|
||||
return Err!(Request(InvalidParam("state_key does not match sender user of event.")));
|
||||
};
|
||||
}
|
||||
|
||||
let origin: OwnedServerName = serde_json::from_value(
|
||||
value
|
||||
|
|
|
@ -241,7 +241,7 @@ impl<'a, 'de: 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
|
|||
| "Ignore" => self.record_ignore(),
|
||||
| "IgnoreAll" => self.record_ignore_all(),
|
||||
| _ => unhandled!("Unrecognized deserialization Directive {name:?}"),
|
||||
};
|
||||
}
|
||||
|
||||
visitor.visit_unit()
|
||||
}
|
||||
|
|
|
@ -18,5 +18,5 @@ pub(crate) fn handle(level: LogLevel, msg: &str) {
|
|||
| LogLevel::Error | LogLevel::Fatal => error!("{msg}"),
|
||||
| LogLevel::Info => debug!("{msg}"),
|
||||
| LogLevel::Warn => warn!("{msg}"),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ pub fn compact_blocking(&self, opts: Options) -> Result {
|
|||
co.set_target_level(level.try_into()?);
|
||||
},
|
||||
| (Some(_), Some(_)) => return Err!("compacting between specific levels not supported"),
|
||||
};
|
||||
}
|
||||
|
||||
self.db
|
||||
.db
|
||||
|
|
|
@ -50,7 +50,6 @@ where
|
|||
.iter()
|
||||
.map(ser::serialize_to::<KeyBuf, _>)
|
||||
.map(|result| result.expect("failed to serialize query key"))
|
||||
.map(Into::into)
|
||||
.collect();
|
||||
|
||||
self.db
|
||||
|
|
|
@ -40,7 +40,7 @@ pub fn rev_raw_stream(self: &Arc<Self>) -> impl Stream<Item = Result<KeyVal<'_>>
|
|||
.into_stream()
|
||||
.flatten()
|
||||
.boxed();
|
||||
};
|
||||
}
|
||||
|
||||
let seek = Seek {
|
||||
map: self.clone(),
|
||||
|
|
|
@ -89,7 +89,7 @@ where
|
|||
.into_stream()
|
||||
.flatten()
|
||||
.boxed();
|
||||
};
|
||||
}
|
||||
|
||||
let seek = Seek {
|
||||
map: self.clone(),
|
||||
|
|
|
@ -39,7 +39,7 @@ pub fn raw_stream(self: &Arc<Self>) -> impl Stream<Item = Result<KeyVal<'_>>> +
|
|||
.into_stream()
|
||||
.flatten()
|
||||
.boxed();
|
||||
};
|
||||
}
|
||||
|
||||
let seek = Seek {
|
||||
map: self.clone(),
|
||||
|
|
|
@ -86,7 +86,7 @@ where
|
|||
.into_stream()
|
||||
.flatten()
|
||||
.boxed();
|
||||
};
|
||||
}
|
||||
|
||||
let seek = Seek {
|
||||
map: self.clone(),
|
||||
|
|
|
@ -146,11 +146,9 @@ pub(crate) fn close(&self) {
|
|||
.map(JoinHandle::join)
|
||||
.map(|result| result.map_err(Error::from_panic))
|
||||
.enumerate()
|
||||
.for_each(|(id, result)| {
|
||||
match result {
|
||||
.for_each(|(id, result)| match result {
|
||||
| Ok(()) => trace!(?id, "worker joined"),
|
||||
| Err(error) => error!(?id, "worker joined with error: {error}"),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -345,7 +343,7 @@ fn worker_handle(self: &Arc<Self>, cmd: Cmd) {
|
|||
| Cmd::Get(cmd) if cmd.key.len() == 1 => self.handle_get(cmd),
|
||||
| Cmd::Get(cmd) => self.handle_batch(cmd),
|
||||
| Cmd::Iter(cmd) => self.handle_iter(cmd),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[implement(Pool)]
|
||||
|
@ -362,7 +360,7 @@ fn handle_iter(&self, mut cmd: Seek) {
|
|||
return;
|
||||
}
|
||||
|
||||
let from = cmd.key.as_deref().map(Into::into);
|
||||
let from = cmd.key.as_deref();
|
||||
|
||||
let result = match cmd.dir {
|
||||
| Direction::Forward => cmd.state.init_fwd(from),
|
||||
|
@ -394,7 +392,7 @@ fn handle_batch(self: &Arc<Self>, mut cmd: Get) {
|
|||
return;
|
||||
}
|
||||
|
||||
let keys = cmd.key.iter().map(Into::into);
|
||||
let keys = cmd.key.iter();
|
||||
|
||||
let result: SmallVec<_> = cmd.map.get_batch_blocking(keys).collect();
|
||||
|
||||
|
|
|
@ -224,7 +224,7 @@ impl<W: Write> ser::Serializer for &mut Serializer<'_, W> {
|
|||
self.separator()?;
|
||||
},
|
||||
| _ => unhandled!("Unrecognized serialization directive: {name:?}"),
|
||||
};
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -113,13 +113,13 @@ impl<'a> State<'a> {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
fn fetch_key(&self) -> Option<Key<'_>> { self.inner.key().map(Key::from) }
|
||||
fn fetch_key(&self) -> Option<Key<'_>> { self.inner.key() }
|
||||
|
||||
#[inline]
|
||||
fn _fetch_val(&self) -> Option<Val<'_>> { self.inner.value().map(Val::from) }
|
||||
fn _fetch_val(&self) -> Option<Val<'_>> { self.inner.value() }
|
||||
|
||||
#[inline]
|
||||
fn fetch(&self) -> Option<KeyVal<'_>> { self.inner.item().map(KeyVal::from) }
|
||||
fn fetch(&self) -> Option<KeyVal<'_>> { self.inner.item() }
|
||||
|
||||
#[inline]
|
||||
pub(super) fn status(&self) -> Option<rocksdb::Error> { self.inner.status().err() }
|
||||
|
|
|
@ -53,6 +53,6 @@ impl Watchers {
|
|||
tx.0.send(()).expect("channel should still be open");
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -205,7 +205,7 @@ fn get_default(field: &Field) -> Option<String> {
|
|||
},
|
||||
| Meta::Path { .. } => return Some("false".to_owned()),
|
||||
| _ => return None,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
|
|
|
@ -172,7 +172,7 @@ impl std::fmt::Display for BlurhashingError {
|
|||
#[cfg(feature = "blurhashing")]
|
||||
| Self::ImageError(e) =>
|
||||
write!(f, "There was an error with the image loading library => {e}")?,
|
||||
};
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -283,7 +283,7 @@ async fn location_request(&self, location: &str) -> Result<FileMeta> {
|
|||
.map_err(Into::into)
|
||||
.map(|content| FileMeta {
|
||||
content: Some(content),
|
||||
content_type: content_type.clone().map(Into::into),
|
||||
content_type: content_type.clone(),
|
||||
content_disposition: Some(make_content_disposition(
|
||||
content_disposition.as_ref(),
|
||||
content_type.as_deref(),
|
||||
|
|
|
@ -170,7 +170,7 @@ pub async fn handle_incoming_pdu<'a>(
|
|||
| Entry::Occupied(mut e) => {
|
||||
*e.get_mut() = (now, e.get().1.saturating_add(1));
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -125,7 +125,7 @@ pub async fn get_summary_and_children_local(
|
|||
SummaryAccessibility::Inaccessible
|
||||
},
|
||||
)),
|
||||
};
|
||||
}
|
||||
|
||||
let children_pdus: Vec<_> = self
|
||||
.get_stripped_space_child_events(current_room)
|
||||
|
|
|
@ -218,7 +218,7 @@ impl Service {
|
|||
)
|
||||
.await
|
||||
.ok();
|
||||
};
|
||||
}
|
||||
|
||||
// Copy direct chat flag
|
||||
if let Ok(mut direct_event) = self
|
||||
|
@ -250,7 +250,7 @@ impl Service {
|
|||
)
|
||||
.await?;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -303,7 +303,7 @@ impl Service {
|
|||
});
|
||||
|
||||
return Ok(());
|
||||
};
|
||||
}
|
||||
|
||||
// Else we have two options.
|
||||
// 1. We add the current diff on top of the parent layer.
|
||||
|
@ -419,7 +419,7 @@ impl Service {
|
|||
2, // every state change is 2 event changes on average
|
||||
states_parents,
|
||||
)?;
|
||||
};
|
||||
}
|
||||
|
||||
Ok(HashSetCompressStateEvent {
|
||||
shortstatehash: new_shortstatehash,
|
||||
|
|
|
@ -422,7 +422,7 @@ impl Service {
|
|||
highlight = true;
|
||||
},
|
||||
| _ => {},
|
||||
};
|
||||
}
|
||||
|
||||
// Break early if both conditions are true
|
||||
if notify && highlight {
|
||||
|
@ -484,7 +484,7 @@ impl Service {
|
|||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
},
|
||||
| TimelineEventType::SpaceChild =>
|
||||
if let Some(_state_key) = &pdu.state_key {
|
||||
|
@ -776,7 +776,7 @@ impl Service {
|
|||
| _ => {
|
||||
pdu_json.remove("event_id");
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Add origin because synapse likes that (and it's required in the spec)
|
||||
pdu_json.insert(
|
||||
|
@ -847,7 +847,7 @@ impl Service {
|
|||
{
|
||||
return Err!(Request(Forbidden("User cannot redact this event.")));
|
||||
}
|
||||
};
|
||||
}
|
||||
},
|
||||
| _ => {
|
||||
let content: RoomRedactionEventContent = pdu.get_content()?;
|
||||
|
@ -863,7 +863,7 @@ impl Service {
|
|||
}
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if pdu.kind == TimelineEventType::RoomMember {
|
||||
let content: RoomMemberEventContent = pdu.get_content()?;
|
||||
|
@ -1293,10 +1293,10 @@ async fn check_pdu_for_admin_room(&self, pdu: &PduEvent, sender: &UserId) -> Res
|
|||
}
|
||||
},
|
||||
| _ => {},
|
||||
};
|
||||
}
|
||||
},
|
||||
| _ => {},
|
||||
};
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -131,7 +131,7 @@ impl crate::Service for Service {
|
|||
| Err(error) => {
|
||||
error!(id = ?error.id(), ?error, "sender worker finished");
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -138,7 +138,7 @@ impl Service {
|
|||
match response {
|
||||
| Ok(dest) => self.handle_response_ok(&dest, futures, statuses).await,
|
||||
| Err((dest, e)) => Self::handle_response_err(dest, statuses, &e),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_response_err(dest: Destination, statuses: &mut CurTransactionStatus, e: &Error) {
|
||||
|
@ -319,10 +319,7 @@ impl Service {
|
|||
if let Destination::Federation(server_name) = dest {
|
||||
if let Ok((select_edus, last_count)) = self.select_edus(server_name).await {
|
||||
debug_assert!(select_edus.len() <= EDU_LIMIT, "exceeded edus limit");
|
||||
let select_edus = select_edus
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.map(SendingEvent::Edu);
|
||||
let select_edus = select_edus.into_iter().map(SendingEvent::Edu);
|
||||
|
||||
events.extend(select_edus);
|
||||
self.db.set_latest_educount(server_name, last_count);
|
||||
|
|
|
@ -43,7 +43,7 @@ where
|
|||
.keys()
|
||||
.rev()
|
||||
.take(self.services.server.config.trusted_server_batch_size)
|
||||
.last()
|
||||
.next_back()
|
||||
.cloned()
|
||||
{
|
||||
let request = Request {
|
||||
|
|
|
@ -69,7 +69,7 @@ pub async fn read_tokens(&self) -> Result<HashSet<String>> {
|
|||
},
|
||||
| Err(e) => error!("Failed to read the registration token file: {e}"),
|
||||
}
|
||||
};
|
||||
}
|
||||
if let Some(token) = &self.services.config.registration_token {
|
||||
tokens.insert(token.to_owned());
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue