optimize sha256 interface gather/vector inputs
Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
parent
14e3b242df
commit
887ae84f1e
7 changed files with 85 additions and 50 deletions
|
@ -1,7 +1,7 @@
|
||||||
use axum::extract::State;
|
use axum::extract::State;
|
||||||
use axum_client_ip::InsecureClientIp;
|
use axum_client_ip::InsecureClientIp;
|
||||||
use base64::{engine::general_purpose, Engine as _};
|
use base64::{engine::general_purpose, Engine as _};
|
||||||
use conduit::{err, utils, warn, Err, Error, PduEvent, Result};
|
use conduit::{err, utils, utils::hash::sha256, warn, Err, Error, PduEvent, Result};
|
||||||
use ruma::{
|
use ruma::{
|
||||||
api::{client::error::ErrorKind, federation::membership::create_invite},
|
api::{client::error::ErrorKind, federation::membership::create_invite},
|
||||||
events::room::member::{MembershipState, RoomMemberEventContent},
|
events::room::member::{MembershipState, RoomMemberEventContent},
|
||||||
|
@ -160,7 +160,7 @@ pub(crate) async fn create_invite_route(
|
||||||
ruma::api::appservice::event::push_events::v1::Request {
|
ruma::api::appservice::event::push_events::v1::Request {
|
||||||
events: vec![pdu.to_room_event()],
|
events: vec![pdu.to_room_event()],
|
||||||
txn_id: general_purpose::URL_SAFE_NO_PAD
|
txn_id: general_purpose::URL_SAFE_NO_PAD
|
||||||
.encode(utils::calculate_hash(&[pdu.event_id.as_bytes()]))
|
.encode(sha256::hash(pdu.event_id.as_bytes()))
|
||||||
.into(),
|
.into(),
|
||||||
ephemeral: Vec::new(),
|
ephemeral: Vec::new(),
|
||||||
to_device: Vec::new(),
|
to_device: Vec::new(),
|
||||||
|
|
|
@ -1,13 +1,10 @@
|
||||||
mod argon;
|
mod argon;
|
||||||
mod sha256;
|
pub mod sha256;
|
||||||
|
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
|
|
||||||
pub fn password(password: &str) -> Result<String> { argon::password(password) }
|
pub fn verify_password(password: &str, password_hash: &str) -> Result {
|
||||||
|
|
||||||
pub fn verify_password(password: &str, password_hash: &str) -> Result<()> {
|
|
||||||
argon::verify_password(password, password_hash)
|
argon::verify_password(password, password_hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
pub fn password(password: &str) -> Result<String> { argon::password(password) }
|
||||||
pub fn calculate_hash(keys: &[&[u8]]) -> Vec<u8> { sha256::hash(keys) }
|
|
||||||
|
|
|
@ -1,9 +1,62 @@
|
||||||
use ring::{digest, digest::SHA256};
|
use ring::{
|
||||||
|
digest,
|
||||||
|
digest::{Context, SHA256, SHA256_OUTPUT_LEN},
|
||||||
|
};
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, level = "debug")]
|
pub type Digest = [u8; SHA256_OUTPUT_LEN];
|
||||||
pub(super) fn hash(keys: &[&[u8]]) -> Vec<u8> {
|
|
||||||
// We only hash the pdu's event ids, not the whole pdu
|
/// Sha256 hash (input gather joined by 0xFF bytes)
|
||||||
let bytes = keys.join(&0xFF);
|
#[must_use]
|
||||||
let hash = digest::digest(&SHA256, &bytes);
|
#[tracing::instrument(skip(inputs), level = "trace")]
|
||||||
hash.as_ref().to_owned()
|
pub fn delimited<'a, T, I>(mut inputs: I) -> Digest
|
||||||
|
where
|
||||||
|
I: Iterator<Item = T> + 'a,
|
||||||
|
T: AsRef<[u8]> + 'a,
|
||||||
|
{
|
||||||
|
let mut ctx = Context::new(&SHA256);
|
||||||
|
if let Some(input) = inputs.next() {
|
||||||
|
ctx.update(input.as_ref());
|
||||||
|
for input in inputs {
|
||||||
|
ctx.update(b"\xFF");
|
||||||
|
ctx.update(input.as_ref());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.finish()
|
||||||
|
.as_ref()
|
||||||
|
.try_into()
|
||||||
|
.expect("failed to return Digest buffer")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sha256 hash (input gather)
|
||||||
|
#[must_use]
|
||||||
|
#[tracing::instrument(skip(inputs), level = "trace")]
|
||||||
|
pub fn concat<'a, T, I>(inputs: I) -> Digest
|
||||||
|
where
|
||||||
|
I: Iterator<Item = T> + 'a,
|
||||||
|
T: AsRef<[u8]> + 'a,
|
||||||
|
{
|
||||||
|
inputs
|
||||||
|
.fold(Context::new(&SHA256), |mut ctx, input| {
|
||||||
|
ctx.update(input.as_ref());
|
||||||
|
ctx
|
||||||
|
})
|
||||||
|
.finish()
|
||||||
|
.as_ref()
|
||||||
|
.try_into()
|
||||||
|
.expect("failed to return Digest buffer")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sha256 hash
|
||||||
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
|
#[tracing::instrument(skip(input), level = "trace")]
|
||||||
|
pub fn hash<T>(input: T) -> Digest
|
||||||
|
where
|
||||||
|
T: AsRef<[u8]>,
|
||||||
|
{
|
||||||
|
digest::digest(&SHA256, input.as_ref())
|
||||||
|
.as_ref()
|
||||||
|
.try_into()
|
||||||
|
.expect("failed to return Digest buffer")
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ pub use self::{
|
||||||
bytes::{increment, u64_from_bytes, u64_from_u8, u64_from_u8x8},
|
bytes::{increment, u64_from_bytes, u64_from_u8, u64_from_u8x8},
|
||||||
debug::slice_truncated as debug_slice_truncated,
|
debug::slice_truncated as debug_slice_truncated,
|
||||||
future::TryExtExt as TryFutureExtExt,
|
future::TryExtExt as TryFutureExtExt,
|
||||||
hash::calculate_hash,
|
hash::sha256::delimited as calculate_hash,
|
||||||
html::Escape as HtmlEscape,
|
html::Escape as HtmlEscape,
|
||||||
json::{deserialize_from_str, to_canonical_object},
|
json::{deserialize_from_str, to_canonical_object},
|
||||||
math::clamp,
|
math::clamp,
|
||||||
|
|
|
@ -157,12 +157,7 @@ impl Service {
|
||||||
|
|
||||||
let previous_shortstatehash = self.get_room_shortstatehash(room_id).await;
|
let previous_shortstatehash = self.get_room_shortstatehash(room_id).await;
|
||||||
|
|
||||||
let state_hash = calculate_hash(
|
let state_hash = calculate_hash(state_ids_compressed.iter().map(|s| &s[..]));
|
||||||
&state_ids_compressed
|
|
||||||
.iter()
|
|
||||||
.map(|s| &s[..])
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let (shortstatehash, already_existed) = self
|
let (shortstatehash, already_existed) = self
|
||||||
.services
|
.services
|
||||||
|
|
|
@ -352,12 +352,7 @@ impl Service {
|
||||||
.await
|
.await
|
||||||
.ok();
|
.ok();
|
||||||
|
|
||||||
let state_hash = utils::calculate_hash(
|
let state_hash = utils::calculate_hash(new_state_ids_compressed.iter().map(|bytes| &bytes[..]));
|
||||||
&new_state_ids_compressed
|
|
||||||
.iter()
|
|
||||||
.map(|bytes| &bytes[..])
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let (new_shortstatehash, already_existed) = self
|
let (new_shortstatehash, already_existed) = self
|
||||||
.services
|
.services
|
||||||
|
|
|
@ -539,16 +539,13 @@ impl Service {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let txn_id = &*general_purpose::URL_SAFE_NO_PAD.encode(calculate_hash(
|
let txn_hash = calculate_hash(events.iter().filter_map(|e| match e {
|
||||||
&events
|
SendingEvent::Edu(b) => Some(&**b),
|
||||||
.iter()
|
SendingEvent::Pdu(b) => Some(b.as_ref()),
|
||||||
.map(|e| match e {
|
SendingEvent::Flush => None,
|
||||||
SendingEvent::Edu(b) => &**b,
|
}));
|
||||||
SendingEvent::Pdu(b) => b.as_ref(),
|
|
||||||
SendingEvent::Flush => &[],
|
let txn_id = &*general_purpose::URL_SAFE_NO_PAD.encode(txn_hash);
|
||||||
})
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
));
|
|
||||||
|
|
||||||
//debug_assert!(pdu_jsons.len() + edu_jsons.len() > 0, "sending empty
|
//debug_assert!(pdu_jsons.len() + edu_jsons.len() > 0, "sending empty
|
||||||
// transaction");
|
// transaction");
|
||||||
|
@ -664,23 +661,21 @@ impl Service {
|
||||||
|
|
||||||
//debug_assert!(pdu_jsons.len() + edu_jsons.len() > 0, "sending empty
|
//debug_assert!(pdu_jsons.len() + edu_jsons.len() > 0, "sending empty
|
||||||
// transaction");
|
// transaction");
|
||||||
let transaction_id = &*general_purpose::URL_SAFE_NO_PAD.encode(calculate_hash(
|
|
||||||
&events
|
let txn_hash = calculate_hash(events.iter().filter_map(|e| match e {
|
||||||
.iter()
|
SendingEvent::Edu(b) => Some(&**b),
|
||||||
.map(|e| match e {
|
SendingEvent::Pdu(b) => Some(b.as_ref()),
|
||||||
SendingEvent::Edu(b) => &**b,
|
SendingEvent::Flush => None,
|
||||||
SendingEvent::Pdu(b) => b.as_ref(),
|
}));
|
||||||
SendingEvent::Flush => &[],
|
|
||||||
})
|
let txn_id = &*general_purpose::URL_SAFE_NO_PAD.encode(txn_hash);
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
));
|
|
||||||
|
|
||||||
let request = send_transaction_message::v1::Request {
|
let request = send_transaction_message::v1::Request {
|
||||||
origin: self.server.config.server_name.clone(),
|
origin: self.server.config.server_name.clone(),
|
||||||
pdus: pdu_jsons,
|
pdus: pdu_jsons,
|
||||||
edus: edu_jsons,
|
edus: edu_jsons,
|
||||||
origin_server_ts: MilliSecondsSinceUnixEpoch::now(),
|
origin_server_ts: MilliSecondsSinceUnixEpoch::now(),
|
||||||
transaction_id: transaction_id.into(),
|
transaction_id: txn_id.into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let client = &self.services.client.sender;
|
let client = &self.services.client.sender;
|
||||||
|
@ -692,7 +687,7 @@ impl Service {
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(_, res)| res.is_err())
|
.filter(|(_, res)| res.is_err())
|
||||||
.for_each(
|
.for_each(
|
||||||
|(pdu_id, res)| warn!(%transaction_id, %server, "error sending PDU {pdu_id} to remote server: {res:?}"),
|
|(pdu_id, res)| warn!(%txn_id, %server, "error sending PDU {pdu_id} to remote server: {res:?}"),
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
.map(|_| dest.clone())
|
.map(|_| dest.clone())
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue