fix every clippy warning possible, remove io_uring as default feature

this project's codebase is so horrendous, im shocked that no one has ran
clippy at all. it had ~200 total lint warnings, some with performance
issues and unsoundness, and the rest just very ugly codebase. i have sat
down and fixed as many of these as possible and i am exhausted.
i haven't fixed some extremely complex ones, but i brought it down from
~200 to ~30.

i have also removed io_uring as a default feature due to it falling
under the same category as linux eBPF: major kernel attack surface for
minimal performance gains. this also makes it impossible to cross-compile
from macOS to Linux because io_uring does not exist in Darwin land.
there are far better ways to achieve better performance than io_uring on
the codebase level.

Signed-off-by: strawberry <june@girlboss.ceo>
This commit is contained in:
strawberry 2023-11-27 00:39:50 -05:00
parent 19d1b484e0
commit 54a3f47851
33 changed files with 312 additions and 314 deletions

View file

@ -256,8 +256,8 @@ lasttimelinecount_cache: {lasttimelinecount_cache}\n"
..
} = new_keys;
keys.verify_keys.extend(verify_keys.into_iter());
keys.old_verify_keys.extend(old_verify_keys.into_iter());
keys.verify_keys.extend(verify_keys);
keys.old_verify_keys.extend(old_verify_keys);
self.server_signingkeys.insert(
origin.as_bytes(),

View file

@ -20,7 +20,7 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
let parsed = services()
.rooms
.state_compressor
.parse_compressed_state_event(&compressed)?;
.parse_compressed_state_event(compressed)?;
result.insert(parsed.0, parsed.1);
i += 1;
@ -49,7 +49,7 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
let (_, eventid) = services()
.rooms
.state_compressor
.parse_compressed_state_event(&compressed)?;
.parse_compressed_state_event(compressed)?;
if let Some(pdu) = services().rooms.timeline.get_pdu(&eventid)? {
result.insert(
(
@ -101,7 +101,7 @@ impl service::rooms::state_accessor::Data for KeyValueDatabase {
services()
.rooms
.state_compressor
.parse_compressed_state_event(&compressed)
.parse_compressed_state_event(compressed)
.ok()
.map(|(_, id)| id)
}))

View file

@ -52,13 +52,13 @@ impl service::rooms::threads::Data for KeyValueDatabase {
.collect::<Vec<_>>()
.join(&[0xff][..]);
self.threadid_userids.insert(&root_id, &users)?;
self.threadid_userids.insert(root_id, &users)?;
Ok(())
}
fn get_participants(&self, root_id: &[u8]) -> Result<Option<Vec<OwnedUserId>>> {
if let Some(users) = self.threadid_userids.get(&root_id)? {
if let Some(users) = self.threadid_userids.get(root_id)? {
Ok(Some(
users
.split(|b| *b == 0xff)

View file

@ -39,11 +39,10 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
/// Returns the `count` of this pdu's id.
fn get_pdu_count(&self, event_id: &EventId) -> Result<Option<PduCount>> {
Ok(self
.eventid_pduid
self.eventid_pduid
.get(event_id.as_bytes())?
.map(|pdu_id| pdu_count(&pdu_id))
.transpose()?)
.transpose()
}
/// Returns the json of a pdu.
@ -80,7 +79,7 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
/// Returns the pdu's id.
fn get_pdu_id(&self, event_id: &EventId) -> Result<Option<Vec<u8>>> {
Ok(self.eventid_pduid.get(event_id.as_bytes())?)
self.eventid_pduid.get(event_id.as_bytes())
}
/// Returns the pdu.
@ -230,7 +229,7 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
room_id: &RoomId,
until: PduCount,
) -> Result<Box<dyn Iterator<Item = Result<(PduCount, PduEvent)>> + 'a>> {
let (prefix, current) = count_to_id(&room_id, until, 1, true)?;
let (prefix, current) = count_to_id(room_id, until, 1, true)?;
let user_id = user_id.to_owned();
@ -257,7 +256,7 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
room_id: &RoomId,
from: PduCount,
) -> Result<Box<dyn Iterator<Item = Result<(PduCount, PduEvent)>> + 'a>> {
let (prefix, current) = count_to_id(&room_id, from, 1, false)?;
let (prefix, current) = count_to_id(room_id, from, 1, false)?;
let user_id = user_id.to_owned();

View file

@ -427,14 +427,12 @@ impl KeyValueDatabase {
}
// If the database has any data, perform data migrations before starting
let latest_database_version: u64;
// do not increment the db version if the user is not using sha256_media
if cfg!(feature = "sha256_media") {
latest_database_version = 14;
let latest_database_version = if cfg!(feature = "sha256_media") {
14
} else {
latest_database_version = 13;
}
13
};
if services().users.count()? > 0 {
// MIGRATIONS
@ -487,6 +485,7 @@ impl KeyValueDatabase {
continue;
}
#[allow(deprecated)]
let path = services().globals.get_media_file(&key);
let mut file = fs::File::create(path)?;
file.write_all(&content)?;