resolve some pedantic lints, reduce some allocations
Signed-off-by: strawberry <strawberry@puppygock.gay>
This commit is contained in:
parent
507baf20fa
commit
496a9c7af8
12 changed files with 72 additions and 72 deletions
|
@ -1030,7 +1030,7 @@ impl Service {
|
|||
|
||||
for room_id in services().rooms.state_cache.rooms_joined(&user_id) {
|
||||
let room_id = room_id?;
|
||||
rooms.push(Self::get_room_info(room_id));
|
||||
rooms.push(Self::get_room_info(&room_id));
|
||||
}
|
||||
|
||||
if rooms.is_empty() {
|
||||
|
@ -1510,7 +1510,7 @@ impl Service {
|
|||
.metadata
|
||||
.iter_ids()
|
||||
.filter_map(std::result::Result::ok)
|
||||
.map(Self::get_room_info)
|
||||
.map(|id: OwnedRoomId| Self::get_room_info(&id))
|
||||
.collect::<Vec<_>>();
|
||||
rooms.sort_by_key(|r| r.1);
|
||||
rooms.reverse();
|
||||
|
@ -1713,7 +1713,7 @@ impl Service {
|
|||
.directory
|
||||
.public_rooms()
|
||||
.filter_map(std::result::Result::ok)
|
||||
.map(Self::get_room_info)
|
||||
.map(|id: OwnedRoomId| Self::get_room_info(&id))
|
||||
.collect::<Vec<_>>();
|
||||
rooms.sort_by_key(|r| r.1);
|
||||
rooms.reverse();
|
||||
|
@ -1955,11 +1955,11 @@ impl Service {
|
|||
Ok(reply_message_content)
|
||||
}
|
||||
|
||||
fn get_room_info(id: OwnedRoomId) -> (OwnedRoomId, u64, String) {
|
||||
fn get_room_info(id: &OwnedRoomId) -> (OwnedRoomId, u64, String) {
|
||||
(
|
||||
id.clone(),
|
||||
services().rooms.state_cache.room_joined_count(&id).ok().flatten().unwrap_or(0),
|
||||
services().rooms.state_accessor.get_name(&id).ok().flatten().unwrap_or_else(|| id.to_string()),
|
||||
services().rooms.state_cache.room_joined_count(id).ok().flatten().unwrap_or(0),
|
||||
services().rooms.state_accessor.get_name(id).ok().flatten().unwrap_or_else(|| id.to_string()),
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -121,7 +121,7 @@ pub async fn presence_handler(
|
|||
}
|
||||
|
||||
Some(user_id) = presence_timers.next() => {
|
||||
process_presence_timer(user_id)?;
|
||||
process_presence_timer(&user_id)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -133,7 +133,7 @@ async fn presence_timer(user_id: OwnedUserId, timeout: Duration) -> OwnedUserId
|
|||
user_id
|
||||
}
|
||||
|
||||
fn process_presence_timer(user_id: OwnedUserId) -> Result<()> {
|
||||
fn process_presence_timer(user_id: &OwnedUserId) -> Result<()> {
|
||||
let idle_timeout = services().globals.config.presence_idle_timeout_s * 1_000;
|
||||
let offline_timeout = services().globals.config.presence_offline_timeout_s * 1_000;
|
||||
|
||||
|
@ -141,8 +141,8 @@ fn process_presence_timer(user_id: OwnedUserId) -> Result<()> {
|
|||
let mut last_active_ago = None;
|
||||
let mut status_msg = None;
|
||||
|
||||
for room_id in services().rooms.state_cache.rooms_joined(&user_id) {
|
||||
let presence_event = services().rooms.edus.presence.get_presence(&room_id?, &user_id)?;
|
||||
for room_id in services().rooms.state_cache.rooms_joined(user_id) {
|
||||
let presence_event = services().rooms.edus.presence.get_presence(&room_id?, user_id)?;
|
||||
|
||||
if let Some(presence_event) = presence_event {
|
||||
presence_state = presence_event.content.presence;
|
||||
|
@ -162,10 +162,10 @@ fn process_presence_timer(user_id: OwnedUserId) -> Result<()> {
|
|||
debug!("Processed presence timer for user '{user_id}': Old state = {presence_state}, New state = {new_state:?}");
|
||||
|
||||
if let Some(new_state) = new_state {
|
||||
for room_id in services().rooms.state_cache.rooms_joined(&user_id) {
|
||||
for room_id in services().rooms.state_cache.rooms_joined(user_id) {
|
||||
services().rooms.edus.presence.set_presence(
|
||||
&room_id?,
|
||||
&user_id,
|
||||
user_id,
|
||||
new_state.clone(),
|
||||
Some(false),
|
||||
last_active_ago,
|
||||
|
|
|
@ -41,8 +41,8 @@ impl Service {
|
|||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn paginate_relations_with_filter(
|
||||
&self, sender_user: &UserId, room_id: &RoomId, target: &EventId, filter_event_type: Option<TimelineEventType>,
|
||||
filter_rel_type: Option<RelationType>, from: PduCount, to: Option<PduCount>, limit: usize,
|
||||
&self, sender_user: &UserId, room_id: &RoomId, target: &EventId, filter_event_type: &Option<TimelineEventType>,
|
||||
filter_rel_type: &Option<RelationType>, from: PduCount, to: Option<PduCount>, limit: usize,
|
||||
) -> Result<get_relating_events::v1::Response> {
|
||||
let next_token;
|
||||
|
||||
|
|
|
@ -85,6 +85,23 @@ impl Ord for PduCount {
|
|||
}
|
||||
}
|
||||
|
||||
// Update Relationships
|
||||
#[derive(Deserialize)]
|
||||
struct ExtractRelatesTo {
|
||||
#[serde(rename = "m.relates_to")]
|
||||
relates_to: Relation,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
struct ExtractEventId {
|
||||
event_id: OwnedEventId,
|
||||
}
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
struct ExtractRelatesToEventId {
|
||||
#[serde(rename = "m.relates_to")]
|
||||
relates_to: ExtractEventId,
|
||||
}
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
|
||||
|
@ -467,23 +484,6 @@ impl Service {
|
|||
_ => {},
|
||||
}
|
||||
|
||||
// Update Relationships
|
||||
#[derive(Deserialize)]
|
||||
struct ExtractRelatesTo {
|
||||
#[serde(rename = "m.relates_to")]
|
||||
relates_to: Relation,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
struct ExtractEventId {
|
||||
event_id: OwnedEventId,
|
||||
}
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
struct ExtractRelatesToEventId {
|
||||
#[serde(rename = "m.relates_to")]
|
||||
relates_to: ExtractEventId,
|
||||
}
|
||||
|
||||
if let Ok(content) = serde_json::from_str::<ExtractRelatesToEventId>(pdu.content.get()) {
|
||||
if let Some(related_pducount) = services().rooms.timeline.get_pdu_count(&content.relates_to.event_id)? {
|
||||
services().rooms.pdu_metadata.add_relation(PduCount::Normal(count2), related_pducount)?;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue