Reduce number of separate sources of truth for presence disabled-ness
Instead of checking if we should update every time we want to update, call the updater every time and decide internally.
This commit is contained in:
parent
34fe7b7369
commit
8134dd9151
4 changed files with 29 additions and 26 deletions
|
@ -85,12 +85,10 @@ pub async fn set_displayname_route(
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
if services().globals.allow_local_presence() {
|
|
||||||
// Presence update
|
// Presence update
|
||||||
services()
|
services()
|
||||||
.presence
|
.presence
|
||||||
.ping_presence(sender_user, PresenceState::Online)?;
|
.ping_presence(sender_user, PresenceState::Online)?;
|
||||||
}
|
|
||||||
|
|
||||||
Ok(set_display_name::v3::Response {})
|
Ok(set_display_name::v3::Response {})
|
||||||
}
|
}
|
||||||
|
@ -224,12 +222,10 @@ pub async fn set_avatar_url_route(body: Ruma<set_avatar_url::v3::Request>) -> Re
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
if services().globals.allow_local_presence() {
|
|
||||||
// Presence update
|
// Presence update
|
||||||
services()
|
services()
|
||||||
.presence
|
.presence
|
||||||
.ping_presence(sender_user, PresenceState::Online)?;
|
.ping_presence(sender_user, PresenceState::Online)?;
|
||||||
}
|
|
||||||
|
|
||||||
Ok(set_avatar_url::v3::Response {})
|
Ok(set_avatar_url::v3::Response {})
|
||||||
}
|
}
|
||||||
|
|
|
@ -171,11 +171,9 @@ async fn sync_helper(
|
||||||
// bool = caching allowed
|
// bool = caching allowed
|
||||||
) -> Result<(sync_events::v3::Response, bool), Error> {
|
) -> Result<(sync_events::v3::Response, bool), Error> {
|
||||||
// Presence update
|
// Presence update
|
||||||
if services().globals.allow_local_presence() {
|
|
||||||
services()
|
services()
|
||||||
.presence
|
.presence
|
||||||
.ping_presence(&sender_user, body.set_presence)?;
|
.ping_presence(&sender_user, body.set_presence)?;
|
||||||
}
|
|
||||||
|
|
||||||
// Setup watchers, so if there's no response, we can wait for them
|
// Setup watchers, so if there's no response, we can wait for them
|
||||||
let watcher = services().globals.watch(&sender_user, &sender_device);
|
let watcher = services().globals.watch(&sender_user, &sender_device);
|
||||||
|
|
|
@ -24,6 +24,10 @@ impl service::presence::Data for KeyValueDatabase {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ping_presence(&self, user_id: &UserId, new_state: PresenceState) -> Result<()> {
|
fn ping_presence(&self, user_id: &UserId, new_state: PresenceState) -> Result<()> {
|
||||||
|
let Some(ref tx) = *self.presence_timer_sender else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
let now = utils::millis_since_unix_epoch();
|
let now = utils::millis_since_unix_epoch();
|
||||||
let mut state_changed = false;
|
let mut state_changed = false;
|
||||||
|
|
||||||
|
@ -74,8 +78,7 @@ impl service::presence::Data for KeyValueDatabase {
|
||||||
_ => services().globals.config.presence_offline_timeout_s,
|
_ => services().globals.config.presence_offline_timeout_s,
|
||||||
};
|
};
|
||||||
|
|
||||||
self.presence_timer_sender
|
tx.send((user_id.to_owned(), Duration::from_secs(timeout)))
|
||||||
.send((user_id.to_owned(), Duration::from_secs(timeout)))
|
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
error!("Failed to add presence timer: {}", e);
|
error!("Failed to add presence timer: {}", e);
|
||||||
Error::bad_database("Failed to add presence timer")
|
Error::bad_database("Failed to add presence timer")
|
||||||
|
@ -86,6 +89,10 @@ impl service::presence::Data for KeyValueDatabase {
|
||||||
&self, room_id: &RoomId, user_id: &UserId, presence_state: PresenceState, currently_active: Option<bool>,
|
&self, room_id: &RoomId, user_id: &UserId, presence_state: PresenceState, currently_active: Option<bool>,
|
||||||
last_active_ago: Option<UInt>, status_msg: Option<String>,
|
last_active_ago: Option<UInt>, status_msg: Option<String>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
|
let Some(ref tx) = *self.presence_timer_sender else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
let now = utils::millis_since_unix_epoch();
|
let now = utils::millis_since_unix_epoch();
|
||||||
let last_active_ts = match last_active_ago {
|
let last_active_ts = match last_active_ago {
|
||||||
Some(last_active_ago) => now.saturating_sub(last_active_ago.into()),
|
Some(last_active_ago) => now.saturating_sub(last_active_ago.into()),
|
||||||
|
@ -107,8 +114,7 @@ impl service::presence::Data for KeyValueDatabase {
|
||||||
_ => services().globals.config.presence_offline_timeout_s,
|
_ => services().globals.config.presence_offline_timeout_s,
|
||||||
};
|
};
|
||||||
|
|
||||||
self.presence_timer_sender
|
tx.send((user_id.to_owned(), Duration::from_secs(timeout)))
|
||||||
.send((user_id.to_owned(), Duration::from_secs(timeout)))
|
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
error!("Failed to add presence timer: {}", e);
|
error!("Failed to add presence timer: {}", e);
|
||||||
Error::bad_database("Failed to add presence timer")
|
Error::bad_database("Failed to add presence timer")
|
||||||
|
|
|
@ -185,7 +185,7 @@ pub struct KeyValueDatabase {
|
||||||
pub(super) our_real_users_cache: RwLock<HashMap<OwnedRoomId, Arc<HashSet<OwnedUserId>>>>,
|
pub(super) our_real_users_cache: RwLock<HashMap<OwnedRoomId, Arc<HashSet<OwnedUserId>>>>,
|
||||||
pub(super) appservice_in_room_cache: RwLock<HashMap<OwnedRoomId, HashMap<String, bool>>>,
|
pub(super) appservice_in_room_cache: RwLock<HashMap<OwnedRoomId, HashMap<String, bool>>>,
|
||||||
pub(super) lasttimelinecount_cache: Mutex<HashMap<OwnedRoomId, PduCount>>,
|
pub(super) lasttimelinecount_cache: Mutex<HashMap<OwnedRoomId, PduCount>>,
|
||||||
pub(super) presence_timer_sender: Arc<mpsc::UnboundedSender<(OwnedUserId, Duration)>>,
|
pub(super) presence_timer_sender: Arc<Option<mpsc::UnboundedSender<(OwnedUserId, Duration)>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
|
@ -275,7 +275,13 @@ impl KeyValueDatabase {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let presence_sender = if services().globals.allow_local_presence() {
|
||||||
let (presence_sender, presence_receiver) = mpsc::unbounded_channel();
|
let (presence_sender, presence_receiver) = mpsc::unbounded_channel();
|
||||||
|
Self::start_presence_handler(presence_receiver).await;
|
||||||
|
Some(presence_sender)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
let db_raw = Box::new(Self {
|
let db_raw = Box::new(Self {
|
||||||
db: builder.clone(),
|
db: builder.clone(),
|
||||||
|
@ -1059,9 +1065,6 @@ impl KeyValueDatabase {
|
||||||
if services().globals.allow_check_for_updates() {
|
if services().globals.allow_check_for_updates() {
|
||||||
Self::start_check_for_updates_task().await;
|
Self::start_check_for_updates_task().await;
|
||||||
}
|
}
|
||||||
if services().globals.allow_local_presence() {
|
|
||||||
Self::start_presence_handler(presence_receiver).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue