resolve couple pedantic clippy lints, remove unnecessary qualifications

Signed-off-by: strawberry <strawberry@puppygock.gay>
This commit is contained in:
strawberry 2024-03-22 21:51:21 -04:00 committed by June
parent 6d7ef80aba
commit 9d0b647911
35 changed files with 127 additions and 148 deletions

View file

@ -16,8 +16,7 @@ use tracing::{error, info, warn};
use super::{DEVICE_ID_LENGTH, SESSION_ID_LENGTH, TOKEN_LENGTH};
use crate::{
api::client_server::{self, join_room_by_id_helper},
services, utils, Error, Result, Ruma,
api::client_server::{self, join_room_by_id_helper}, service, services, utils, Error, Result, Ruma
};
const RANDOM_USER_ID_LENGTH: usize = 10;
@ -279,7 +278,7 @@ pub async fn register_route(body: Ruma<register::v3::Request>) -> Result<registe
// If this is the first real user, grant them admin privileges except for guest
// users Note: the server user, @conduit:servername, is generated first
if !is_guest {
if let Some(admin_room) = services().admin.get_admin_room()? {
if let Some(admin_room) = service::admin::Service::get_admin_room()? {
if services().rooms.state_cache.room_joined_count(&admin_room)? == Some(1) {
services().admin.make_user_admin(&user_id, displayname).await?;
@ -375,12 +374,7 @@ pub async fn change_password_route(body: Ruma<change_password::v3::Request>) ->
if body.logout_devices {
// Logout all devices except the current one
for id in services()
.users
.all_device_ids(sender_user)
.filter_map(std::result::Result::ok)
.filter(|id| id != sender_device)
{
for id in services().users.all_device_ids(sender_user).filter_map(Result::ok).filter(|id| id != sender_device) {
services().users.remove_device(sender_user, &id)?;
}
}

View file

@ -90,7 +90,7 @@ pub(crate) async fn get_alias_helper(room_alias: OwnedRoomAliasId) -> Result<get
let mut servers = response.servers;
// find active servers in room state cache to suggest
for extra_servers in services().rooms.state_cache.room_servers(&room_id).filter_map(std::result::Result::ok) {
for extra_servers in services().rooms.state_cache.room_servers(&room_id).filter_map(Result::ok) {
servers.push(extra_servers);
}
@ -152,7 +152,7 @@ pub(crate) async fn get_alias_helper(room_alias: OwnedRoomAliasId) -> Result<get
let mut servers: Vec<OwnedServerName> = Vec::new();
// find active servers in room state cache to suggest
for extra_servers in services().rooms.state_cache.room_servers(&room_id).filter_map(std::result::Result::ok) {
for extra_servers in services().rooms.state_cache.room_servers(&room_id).filter_map(Result::ok) {
servers.push(extra_servers);
}

View file

@ -69,7 +69,7 @@ pub async fn get_context_route(body: Ruma<get_context::v3::Request>) -> Result<g
.timeline
.pdus_until(sender_user, &room_id, base_token)?
.take(limit / 2)
.filter_map(std::result::Result::ok) // Remove buggy events
.filter_map(Result::ok) // Remove buggy events
.filter(|(_, pdu)| {
services()
.rooms
@ -101,7 +101,7 @@ pub async fn get_context_route(body: Ruma<get_context::v3::Request>) -> Result<g
.timeline
.pdus_after(sender_user, &room_id, base_token)?
.take(limit / 2)
.filter_map(std::result::Result::ok) // Remove buggy events
.filter_map(Result::ok) // Remove buggy events
.filter(|(_, pdu)| {
services()
.rooms

View file

@ -16,7 +16,7 @@ pub async fn get_devices_route(body: Ruma<get_devices::v3::Request>) -> Result<g
let devices: Vec<device::Device> = services()
.users
.all_devices_metadata(sender_user)
.filter_map(std::result::Result::ok) // Filter out buggy devices
.filter_map(Result::ok) // Filter out buggy devices
.collect();
Ok(get_devices::v3::Response {

View file

@ -191,10 +191,10 @@ pub async fn get_key_changes_route(body: Ruma<get_key_changes::v3::Request>) ->
body.from.parse().map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid `from`."))?,
Some(body.to.parse().map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid `to`."))?),
)
.filter_map(std::result::Result::ok),
.filter_map(Result::ok),
);
for room_id in services().rooms.state_cache.rooms_joined(sender_user).filter_map(std::result::Result::ok) {
for room_id in services().rooms.state_cache.rooms_joined(sender_user).filter_map(Result::ok) {
device_list_updates.extend(
services()
.users
@ -203,7 +203,7 @@ pub async fn get_key_changes_route(body: Ruma<get_key_changes::v3::Request>) ->
body.from.parse().map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid `from`."))?,
Some(body.to.parse().map_err(|_| Error::BadRequest(ErrorKind::InvalidParam, "Invalid `to`."))?),
)
.filter_map(std::result::Result::ok),
.filter_map(Result::ok),
);
}
Ok(get_key_changes::v3::Response {

View file

@ -399,12 +399,7 @@ pub async fn joined_rooms_route(body: Ruma<joined_rooms::v3::Request>) -> Result
let sender_user = body.sender_user.as_ref().expect("user is authenticated");
Ok(joined_rooms::v3::Response {
joined_rooms: services()
.rooms
.state_cache
.rooms_joined(sender_user)
.filter_map(std::result::Result::ok)
.collect(),
joined_rooms: services().rooms.state_cache.rooms_joined(sender_user).filter_map(Result::ok).collect(),
})
}
@ -456,7 +451,7 @@ pub async fn joined_members_route(body: Ruma<joined_members::v3::Request>) -> Re
}
let mut joined = BTreeMap::new();
for user_id in services().rooms.state_cache.room_members(&body.room_id).filter_map(std::result::Result::ok) {
for user_id in services().rooms.state_cache.room_members(&body.room_id).filter_map(Result::ok) {
let display_name = services().users.displayname(&user_id)?;
let avatar_url = services().users.avatar_url(&user_id)?;
@ -847,7 +842,7 @@ pub(crate) async fn join_room_by_id_helper(
.rooms
.state_cache
.room_members(restriction_room_id)
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.find(|uid| uid.server_name() == services().globals.server_name())
});
Some(authorized_user)
@ -1208,7 +1203,7 @@ pub(crate) async fn invite_helper(
.rooms
.state_cache
.room_servers(room_id)
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.filter(|server| &**server != services().globals.server_name());
services().sending.send_pdu(servers, &pdu_id)?;

View file

@ -177,7 +177,7 @@ pub async fn get_message_events_route(
.timeline
.pdus_after(sender_user, &body.room_id, from)?
.take(limit)
.filter_map(std::result::Result::ok) // Filter out buggy events
.filter_map(Result::ok) // Filter out buggy events
.filter(|(_, pdu)| {
services()
.rooms
@ -219,7 +219,7 @@ pub async fn get_message_events_route(
.timeline
.pdus_until(sender_user, &body.room_id, from)?
.take(limit)
.filter_map(std::result::Result::ok) // Filter out buggy events
.filter_map(Result::ok) // Filter out buggy events
.filter(|(_, pdu)| {
services()
.rooms

View file

@ -32,7 +32,7 @@ pub async fn set_displayname_route(
.rooms
.state_cache
.rooms_joined(sender_user)
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.map(|room_id| {
Ok::<_, Error>((
PduBuilder {
@ -60,7 +60,7 @@ pub async fn set_displayname_route(
room_id,
))
})
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.collect();
for (pdu_builder, room_id) in all_rooms_joined {
@ -143,7 +143,7 @@ pub async fn set_avatar_url_route(body: Ruma<set_avatar_url::v3::Request>) -> Re
.rooms
.state_cache
.rooms_joined(sender_user)
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.map(|room_id| {
Ok::<_, Error>((
PduBuilder {
@ -171,7 +171,7 @@ pub async fn set_avatar_url_route(body: Ruma<set_avatar_url::v3::Request>) -> Re
room_id,
))
})
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.collect();
for (pdu_builder, room_id) in all_joined_rooms {

View file

@ -44,7 +44,7 @@ pub async fn report_event_route(body: Ruma<report_content::v3::Request>) -> Resu
.rooms
.state_cache
.room_members(&pdu.room_id)
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.any(|user_id| user_id == *sender_user)
{
return Err(Error::BadRequest(

View file

@ -576,12 +576,7 @@ pub async fn get_room_aliases_route(body: Ruma<aliases::v3::Request>) -> Result<
}
Ok(aliases::v3::Response {
aliases: services()
.rooms
.alias
.local_aliases_for_room(&body.room_id)
.filter_map(std::result::Result::ok)
.collect(),
aliases: services().rooms.alias.local_aliases_for_room(&body.room_id).filter_map(Result::ok).collect(),
})
}
@ -801,7 +796,7 @@ pub async fn upgrade_room_route(body: Ruma<upgrade_room::v3::Request>) -> Result
}
// Moves any local aliases to the new room
for alias in services().rooms.alias.local_aliases_for_room(&body.room_id).filter_map(std::result::Result::ok) {
for alias in services().rooms.alias.local_aliases_for_room(&body.room_id).filter_map(Result::ok) {
services().rooms.alias.set_alias(&alias, &replacement_room)?;
}

View file

@ -22,9 +22,10 @@ pub async fn search_events_route(body: Ruma<search_events::v3::Request>) -> Resu
let search_criteria = body.search_categories.room_events.as_ref().unwrap();
let filter = &search_criteria.filter;
let room_ids = filter.rooms.clone().unwrap_or_else(|| {
services().rooms.state_cache.rooms_joined(sender_user).filter_map(std::result::Result::ok).collect()
});
let room_ids = filter
.rooms
.clone()
.unwrap_or_else(|| services().rooms.state_cache.rooms_joined(sender_user).filter_map(Result::ok).collect());
// Use limit or else 10, with maximum 100
let limit = filter.limit.map_or(10, u64::from).min(100) as usize;
@ -92,7 +93,7 @@ pub async fn search_events_route(body: Ruma<search_events::v3::Request>) -> Resu
result: Some(result),
})
})
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.skip(skip)
.take(limit)
.collect();

View file

@ -7,7 +7,7 @@ use ruma::{
use crate::{service::rooms::spaces::PagnationToken, services, Error, Result, Ruma};
/// # `GET /_matrix/client/v1/rooms/{room_id}/hierarchy``
/// # `GET /_matrix/client/v1/rooms/{room_id}/hierarchy`
///
/// Paginates over the space tree in a depth-first manner to locate child rooms
/// of a given space.

View file

@ -193,8 +193,7 @@ async fn sync_helper(
let mut device_list_left = HashSet::new();
// Look for device list updates of this account
device_list_updates
.extend(services().users.keys_changed(sender_user.as_ref(), since, None).filter_map(std::result::Result::ok));
device_list_updates.extend(services().users.keys_changed(sender_user.as_ref(), since, None).filter_map(Result::ok));
let all_joined_rooms = services().rooms.state_cache.rooms_joined(&sender_user).collect::<Vec<_>>();
@ -372,7 +371,7 @@ async fn sync_helper(
.rooms
.user
.get_shared_rooms(vec![sender_user.clone(), user_id.clone()])?
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.filter_map(|other_room_id| {
Some(
services()
@ -542,7 +541,7 @@ async fn load_joined_room(
.rooms
.timeline
.all_pdus(sender_user, room_id)?
.filter_map(std::result::Result::ok) // Ignore all broken pdus
.filter_map(Result::ok) // Ignore all broken pdus
.filter(|(_, pdu)| pdu.kind == TimelineEventType::RoomMember)
.map(|(_, pdu)| {
let content: RoomMemberEventContent = serde_json::from_str(pdu.content.get())
@ -566,7 +565,7 @@ async fn load_joined_room(
}
})
// Filter out buggy users
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
// Filter for possible heroes
.flatten()
{
@ -817,8 +816,7 @@ async fn load_joined_room(
};
// Look for device list updates in this room
device_list_updates
.extend(services().users.keys_changed(room_id.as_ref(), since, None).filter_map(std::result::Result::ok));
device_list_updates.extend(services().users.keys_changed(room_id.as_ref(), since, None).filter_map(Result::ok));
let notification_count = if send_notification_counts {
Some(
@ -863,7 +861,7 @@ async fn load_joined_room(
.edus
.read_receipt
.readreceipts_since(room_id, since)
.filter_map(std::result::Result::ok) // Filter out buggy events
.filter_map(Result::ok) // Filter out buggy events
.map(|(_, _, v)| v)
.collect();
@ -956,7 +954,7 @@ fn share_encrypted_room(sender_user: &UserId, user_id: &UserId, ignore_room: &Ro
.rooms
.user
.get_shared_rooms(vec![sender_user.to_owned(), user_id.to_owned()])?
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.filter(|room_id| room_id != ignore_room)
.filter_map(|other_room_id| {
Some(
@ -999,7 +997,7 @@ pub async fn sync_events_v4_route(
services().users.update_sync_request_with_cache(sender_user.clone(), sender_device.clone(), &mut body);
let all_joined_rooms =
services().rooms.state_cache.rooms_joined(&sender_user).filter_map(std::result::Result::ok).collect::<Vec<_>>();
services().rooms.state_cache.rooms_joined(&sender_user).filter_map(Result::ok).collect::<Vec<_>>();
if body.extensions.to_device.enabled.unwrap_or(false) {
services().users.remove_to_device_events(&sender_user, &sender_device, globalsince)?;
@ -1011,9 +1009,8 @@ pub async fn sync_events_v4_route(
if body.extensions.e2ee.enabled.unwrap_or(false) {
// Look for device list updates of this account
device_list_changes.extend(
services().users.keys_changed(sender_user.as_ref(), globalsince, None).filter_map(std::result::Result::ok),
);
device_list_changes
.extend(services().users.keys_changed(sender_user.as_ref(), globalsince, None).filter_map(Result::ok));
for room_id in &all_joined_rooms {
let current_shortstatehash = if let Some(s) = services().rooms.state.get_room_shortstatehash(room_id)? {
@ -1129,16 +1126,15 @@ pub async fn sync_events_v4_route(
}
}
// Look for device list updates in this room
device_list_changes.extend(
services().users.keys_changed(room_id.as_ref(), globalsince, None).filter_map(std::result::Result::ok),
);
device_list_changes
.extend(services().users.keys_changed(room_id.as_ref(), globalsince, None).filter_map(Result::ok));
}
for user_id in left_encrypted_users {
let dont_share_encrypted_room = services()
.rooms
.user
.get_shared_rooms(vec![sender_user.clone(), user_id.clone()])?
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.filter_map(|other_room_id| {
Some(
services()
@ -1288,7 +1284,7 @@ pub async fn sync_events_v4_route(
let required_state = required_state_request
.iter()
.map(|state| services().rooms.state_accessor.room_state_get(room_id, &state.0, &state.1))
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.flatten()
.map(|state| state.to_sync_state_event())
.collect();
@ -1298,7 +1294,7 @@ pub async fn sync_events_v4_route(
.rooms
.state_cache
.room_members(room_id)
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.filter(|member| member != &sender_user)
.map(|member| {
Ok::<_, Error>(
@ -1310,7 +1306,7 @@ pub async fn sync_events_v4_route(
}),
)
})
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.flatten()
.take(5)
.collect::<Vec<_>>();

View file

@ -20,7 +20,7 @@ pub async fn get_threads_route(body: Ruma<get_threads::v1::Request>) -> Result<g
.threads
.threads_until(sender_user, &body.room_id, from, &body.include)?
.take(limit)
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.filter(|(_, pdu)| {
services()
.rooms

View file

@ -45,7 +45,7 @@ pub async fn search_users_route(body: Ruma<search_users::v3::Request>) -> Result
let mut user_visible = false;
let user_is_in_public_rooms =
services().rooms.state_cache.rooms_joined(&user_id).filter_map(std::result::Result::ok).any(|room| {
services().rooms.state_cache.rooms_joined(&user_id).filter_map(Result::ok).any(|room| {
services().rooms.state_accessor.room_state_get(&room, &StateEventType::RoomJoinRules, "").map_or(
false,
|event| {

View file

@ -321,7 +321,7 @@ where
e.url()
),
false => {
info!("Could not send request to {} at {}: {}", destination, actual_destination_str, e)
info!("Could not send request to {} at {}: {}", destination, actual_destination_str, e);
},
},
},
@ -1016,7 +1016,7 @@ pub async fn get_backfill_route(body: Ruma<get_backfill::v1::Request>) -> Result
.take(limit.try_into().unwrap());
let events = all_events
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.filter(|(_, e)| {
matches!(
services().rooms.state_accessor.server_can_see_event(sender_servername, &e.room_id, &e.event_id,),
@ -1412,7 +1412,7 @@ async fn create_join_event(
.rooms
.state_cache
.room_servers(room_id)
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.filter(|server| &**server != services().globals.server_name());
services().sending.send_pdu(servers, &pdu_id)?;
@ -1614,7 +1614,7 @@ pub async fn get_devices_route(body: Ruma<get_devices::v1::Request>) -> Result<g
devices: services()
.users
.all_devices_metadata(&body.user_id)
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.filter_map(|metadata| {
let device_id_string = metadata.device_id.as_str().to_owned();
let device_display_name = match services().globals.allow_device_name_federation() {