feat: URL preview support
from upstream MR https://gitlab.com/famedly/conduit/-/merge_requests/347 with the following changes (so far): - remove hardcoded list of allowed hosts (strongly disagree with this, even if it is desired, it should not be harcoded) - add more allow config options for granularity via URL contains, host contains, and domain is (explicit match) for security - warn if a user is allowing all URLs to be previewed for security reasons - replace an expect with proper error handling - bump webpage to 2.0 - improved code style a tad Co-authored-by: rooot <hey@rooot.gay> Signed-off-by: rooot <hey@rooot.gay> Signed-off-by: strawberry <strawberry@puppygock.gay>
This commit is contained in:
parent
6f26be1c6e
commit
c0dd5b1cc2
13 changed files with 821 additions and 41 deletions
|
@ -1,14 +1,21 @@
|
|||
use std::time::Duration;
|
||||
use std::{io::Cursor, net::IpAddr, sync::Arc, time::Duration};
|
||||
|
||||
use crate::{service::media::FileMeta, services, utils, Error, Result, Ruma};
|
||||
use crate::{
|
||||
service::media::{FileMeta, UrlPreviewData},
|
||||
services, utils, Error, Result, Ruma,
|
||||
};
|
||||
use image::io::Reader as ImgReader;
|
||||
|
||||
use reqwest::Url;
|
||||
use ruma::api::client::{
|
||||
error::ErrorKind,
|
||||
media::{
|
||||
create_content, get_content, get_content_as_filename, get_content_thumbnail,
|
||||
get_media_config,
|
||||
get_media_config, get_media_preview,
|
||||
},
|
||||
};
|
||||
use tracing::info;
|
||||
use tracing::{debug, error, info};
|
||||
use webpage::HTML;
|
||||
|
||||
/// generated MXC ID (`media-id`) length
|
||||
const MXC_LENGTH: usize = 32;
|
||||
|
@ -24,6 +31,43 @@ pub async fn get_media_config_route(
|
|||
})
|
||||
}
|
||||
|
||||
/// # `GET /_matrix/media/v3/preview_url`
|
||||
///
|
||||
/// Returns URL preview.
|
||||
pub async fn get_media_preview_route(
|
||||
body: Ruma<get_media_preview::v3::Request>,
|
||||
) -> Result<get_media_preview::v3::Response> {
|
||||
let url = &body.url;
|
||||
if !url_preview_allowed(url) {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"URL is not allowed to be previewed",
|
||||
));
|
||||
}
|
||||
|
||||
if let Ok(preview) = get_url_preview(url).await {
|
||||
let res = serde_json::value::to_raw_value(&preview).map_err(|e| {
|
||||
error!(
|
||||
"Failed to convert UrlPreviewData into a serde json value: {}",
|
||||
e
|
||||
);
|
||||
Error::BadRequest(
|
||||
ErrorKind::Unknown,
|
||||
"Unknown error occurred parsing URL preview",
|
||||
)
|
||||
})?;
|
||||
|
||||
return Ok(get_media_preview::v3::Response::from_raw_value(res));
|
||||
}
|
||||
|
||||
Err(Error::BadRequest(
|
||||
ErrorKind::LimitExceeded {
|
||||
retry_after_ms: Some(Duration::from_secs(5)),
|
||||
},
|
||||
"Retry later",
|
||||
))
|
||||
}
|
||||
|
||||
/// # `POST /_matrix/media/v3/upload`
|
||||
///
|
||||
/// Permanently save media in the server.
|
||||
|
@ -266,3 +310,264 @@ pub async fn get_content_thumbnail_route(
|
|||
Err(Error::BadRequest(ErrorKind::NotFound, "Media not found."))
|
||||
}
|
||||
}
|
||||
|
||||
async fn download_image(client: &reqwest::Client, url: &str) -> Result<UrlPreviewData> {
|
||||
let image = client.get(url).send().await?.bytes().await?;
|
||||
let mxc = format!(
|
||||
"mxc://{}/{}",
|
||||
services().globals.server_name(),
|
||||
utils::random_string(MXC_LENGTH)
|
||||
);
|
||||
|
||||
services()
|
||||
.media
|
||||
.create(mxc.clone(), None, None, &image)
|
||||
.await?;
|
||||
|
||||
let (width, height) = match ImgReader::new(Cursor::new(&image)).with_guessed_format() {
|
||||
Err(_) => (None, None),
|
||||
Ok(reader) => match reader.into_dimensions() {
|
||||
Err(_) => (None, None),
|
||||
Ok((width, height)) => (Some(width), Some(height)),
|
||||
},
|
||||
};
|
||||
|
||||
Ok(UrlPreviewData {
|
||||
image: Some(mxc),
|
||||
image_size: Some(image.len()),
|
||||
image_width: width,
|
||||
image_height: height,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
async fn download_html(client: &reqwest::Client, url: &str) -> Result<UrlPreviewData> {
|
||||
let max_download_size = 300_000; // TODO: is this bytes? kilobytes? megabytes?
|
||||
|
||||
let mut response = client.get(url).send().await?;
|
||||
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
while let Some(chunk) = response.chunk().await? {
|
||||
bytes.extend_from_slice(&chunk);
|
||||
if bytes.len() > max_download_size {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let body = String::from_utf8_lossy(&bytes);
|
||||
let html = match HTML::from_string(body.to_string(), Some(url.to_owned())) {
|
||||
Ok(html) => html,
|
||||
Err(_) => {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Unknown,
|
||||
"Failed to parse HTML",
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
let mut data = match html.opengraph.images.first() {
|
||||
None => UrlPreviewData::default(),
|
||||
Some(obj) => download_image(client, &obj.url).await?,
|
||||
};
|
||||
|
||||
let props = html.opengraph.properties;
|
||||
|
||||
/* use OpenGraph title/description, but fall back to HTML if not available */
|
||||
data.title = props.get("title").cloned().or(html.title);
|
||||
data.description = props.get("description").cloned().or(html.description);
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
fn url_request_allowed(addr: &IpAddr) -> bool {
|
||||
// TODO: make this check ip_range_denylist
|
||||
|
||||
// could be implemented with reqwest when it supports IP filtering:
|
||||
// https://github.com/seanmonstar/reqwest/issues/1515
|
||||
|
||||
// These checks have been taken from the Rust core/net/ipaddr.rs crate,
|
||||
// IpAddr::V4.is_global() and IpAddr::V6.is_global(), as .is_global is not
|
||||
// yet stabilized. TODO: Once this is stable, this match can be simplified.
|
||||
match addr {
|
||||
IpAddr::V4(ip4) => {
|
||||
!(ip4.octets()[0] == 0 // "This network"
|
||||
|| ip4.is_private()
|
||||
|| (ip4.octets()[0] == 100 && (ip4.octets()[1] & 0b1100_0000 == 0b0100_0000)) // is_shared()
|
||||
|| ip4.is_loopback()
|
||||
|| ip4.is_link_local()
|
||||
// addresses reserved for future protocols (`192.0.0.0/24`)
|
||||
|| (ip4.octets()[0] == 192 && ip4.octets()[1] == 0 && ip4.octets()[2] == 0)
|
||||
|| ip4.is_documentation()
|
||||
|| (ip4.octets()[0] == 198 && (ip4.octets()[1] & 0xfe) == 18) // is_benchmarking()
|
||||
|| (ip4.octets()[0] & 240 == 240 && !ip4.is_broadcast()) // is_reserved()
|
||||
|| ip4.is_broadcast())
|
||||
}
|
||||
IpAddr::V6(ip6) => {
|
||||
!(ip6.is_unspecified()
|
||||
|| ip6.is_loopback()
|
||||
// IPv4-mapped Address (`::ffff:0:0/96`)
|
||||
|| matches!(ip6.segments(), [0, 0, 0, 0, 0, 0xffff, _, _])
|
||||
// IPv4-IPv6 Translat. (`64:ff9b:1::/48`)
|
||||
|| matches!(ip6.segments(), [0x64, 0xff9b, 1, _, _, _, _, _])
|
||||
// Discard-Only Address Block (`100::/64`)
|
||||
|| matches!(ip6.segments(), [0x100, 0, 0, 0, _, _, _, _])
|
||||
// IETF Protocol Assignments (`2001::/23`)
|
||||
|| (matches!(ip6.segments(), [0x2001, b, _, _, _, _, _, _] if b < 0x200)
|
||||
&& !(
|
||||
// Port Control Protocol Anycast (`2001:1::1`)
|
||||
u128::from_be_bytes(ip6.octets()) == 0x2001_0001_0000_0000_0000_0000_0000_0001
|
||||
// Traversal Using Relays around NAT Anycast (`2001:1::2`)
|
||||
|| u128::from_be_bytes(ip6.octets()) == 0x2001_0001_0000_0000_0000_0000_0000_0002
|
||||
// AMT (`2001:3::/32`)
|
||||
|| matches!(ip6.segments(), [0x2001, 3, _, _, _, _, _, _])
|
||||
// AS112-v6 (`2001:4:112::/48`)
|
||||
|| matches!(ip6.segments(), [0x2001, 4, 0x112, _, _, _, _, _])
|
||||
// ORCHIDv2 (`2001:20::/28`)
|
||||
|| matches!(ip6.segments(), [0x2001, b, _, _, _, _, _, _] if (0x20..=0x2F).contains(&b))
|
||||
))
|
||||
|| ((ip6.segments()[0] == 0x2001) && (ip6.segments()[1] == 0xdb8)) // is_documentation()
|
||||
|| ((ip6.segments()[0] & 0xfe00) == 0xfc00) // is_unique_local()
|
||||
|| ((ip6.segments()[0] & 0xffc0) == 0xfe80)) // is_unicast_link_local
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn request_url_preview(url: &str) -> Result<UrlPreviewData> {
|
||||
let client = services().globals.default_client();
|
||||
let response = client.head(url).send().await?;
|
||||
|
||||
if !response
|
||||
.remote_addr()
|
||||
.map_or(false, |a| url_request_allowed(&a.ip()))
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Forbidden,
|
||||
"Requesting from this address is forbidden",
|
||||
));
|
||||
}
|
||||
|
||||
let content_type = match response
|
||||
.headers()
|
||||
.get(reqwest::header::CONTENT_TYPE)
|
||||
.and_then(|x| x.to_str().ok())
|
||||
{
|
||||
Some(ct) => ct,
|
||||
None => {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Unknown,
|
||||
"Unknown Content-Type",
|
||||
))
|
||||
}
|
||||
};
|
||||
let data = match content_type {
|
||||
html if html.starts_with("text/html") => download_html(&client, url).await?,
|
||||
img if img.starts_with("image/") => download_image(&client, url).await?,
|
||||
_ => {
|
||||
return Err(Error::BadRequest(
|
||||
ErrorKind::Unknown,
|
||||
"Unsupported Content-Type",
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
services().media.set_url_preview(url, &data).await?;
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
async fn get_url_preview(url: &str) -> Result<UrlPreviewData> {
|
||||
if let Some(preview) = services().media.get_url_preview(url).await {
|
||||
return Ok(preview);
|
||||
}
|
||||
|
||||
// ensure that only one request is made per URL
|
||||
let mutex_request = Arc::clone(
|
||||
services()
|
||||
.media
|
||||
.url_preview_mutex
|
||||
.write()
|
||||
.unwrap()
|
||||
.entry(url.to_owned())
|
||||
.or_default(),
|
||||
);
|
||||
let _request_lock = mutex_request.lock().await;
|
||||
|
||||
match services().media.get_url_preview(url).await {
|
||||
Some(preview) => Ok(preview),
|
||||
None => request_url_preview(url).await,
|
||||
}
|
||||
}
|
||||
|
||||
fn url_preview_allowed(url_str: &str) -> bool {
|
||||
let url: Url = match Url::parse(url_str) {
|
||||
Ok(u) => u,
|
||||
Err(_) => return false,
|
||||
};
|
||||
|
||||
if ["http", "https"]
|
||||
.iter()
|
||||
.all(|&scheme| scheme != url.scheme().to_lowercase())
|
||||
{
|
||||
debug!("Ignoring non-HTTP/HTTPS URL to preview: {}", url);
|
||||
return false;
|
||||
}
|
||||
|
||||
let host = match url.host_str() {
|
||||
None => {
|
||||
debug!(
|
||||
"Ignoring URL preview for a URL that does not have a host (?): {}",
|
||||
url
|
||||
);
|
||||
return false;
|
||||
}
|
||||
Some(h) => h.to_owned(),
|
||||
};
|
||||
|
||||
let allowlist_domain_contains = services().globals.url_preview_domain_contains_allowlist();
|
||||
let allowlist_domain_explicit = services().globals.url_preview_domain_explicit_allowlist();
|
||||
let allowlist_url_contains = services().globals.url_preview_url_contains_allowlist();
|
||||
|
||||
if allowlist_domain_contains.contains(&"*".to_owned())
|
||||
|| allowlist_domain_explicit.contains(&"*".to_owned())
|
||||
|| allowlist_url_contains.contains(&"*".to_owned())
|
||||
{
|
||||
debug!(
|
||||
"Config key contains * which is allowing all URL previews. Allowing URL {}",
|
||||
url
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
if !host.is_empty() {
|
||||
if allowlist_domain_explicit.contains(&host) {
|
||||
return true;
|
||||
}
|
||||
debug!(
|
||||
"Host {} is allowed by url_preview_domain_explicit_allowlist (check 1/3)",
|
||||
&host
|
||||
);
|
||||
|
||||
if allowlist_domain_contains
|
||||
.iter()
|
||||
.any(|domain_s| domain_s.contains(&host.clone()))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
debug!(
|
||||
"Host {} is allowed by url_preview_domain_contains_allowlist (check 2/3)",
|
||||
&host
|
||||
);
|
||||
|
||||
if allowlist_url_contains
|
||||
.iter()
|
||||
.any(|url_s| url_s.contains(&url.to_string()))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
debug!(
|
||||
"URL {} is allowed by url_preview_url_contains_allowlist (check 3/3)",
|
||||
&host
|
||||
);
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
|
|
@ -134,6 +134,15 @@ pub struct Config {
|
|||
#[serde(default = "default_ip_range_denylist")]
|
||||
pub ip_range_denylist: Vec<String>,
|
||||
|
||||
#[serde(default = "Vec::new")]
|
||||
pub url_preview_domain_contains_allowlist: Vec<String>,
|
||||
|
||||
#[serde(default = "Vec::new")]
|
||||
pub url_preview_domain_explicit_allowlist: Vec<String>,
|
||||
|
||||
#[serde(default = "Vec::new")]
|
||||
pub url_preview_url_contains_allowlist: Vec<String>,
|
||||
|
||||
#[serde(default = "RegexSet::empty")]
|
||||
#[serde(with = "serde_regex")]
|
||||
pub forbidden_room_names: RegexSet,
|
||||
|
@ -349,6 +358,18 @@ impl fmt::Display for Config {
|
|||
("Forbidden room names", {
|
||||
&self.forbidden_room_names.patterns().iter().join(", ")
|
||||
}),
|
||||
(
|
||||
"URL preview domain contains allowlist",
|
||||
&self.url_preview_domain_contains_allowlist.join(", "),
|
||||
),
|
||||
(
|
||||
"URL preview domain explicit allowlist",
|
||||
&self.url_preview_domain_explicit_allowlist.join(", "),
|
||||
),
|
||||
(
|
||||
"URL preview URL contains allowlist",
|
||||
&self.url_preview_url_contains_allowlist.join(", "),
|
||||
),
|
||||
];
|
||||
|
||||
let mut msg: String = "Active config values:\n\n".to_owned();
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
use ruma::api::client::error::ErrorKind;
|
||||
|
||||
use crate::{database::KeyValueDatabase, service, utils, Error, Result};
|
||||
use crate::{
|
||||
database::KeyValueDatabase,
|
||||
service::{self, media::UrlPreviewData},
|
||||
utils, Error, Result,
|
||||
};
|
||||
|
||||
impl service::media::Data for KeyValueDatabase {
|
||||
fn create_file_metadata(
|
||||
|
@ -79,4 +83,112 @@ impl service::media::Data for KeyValueDatabase {
|
|||
};
|
||||
Ok((content_disposition, content_type, key))
|
||||
}
|
||||
|
||||
fn remove_url_preview(&self, url: &str) -> Result<()> {
|
||||
self.url_previews.remove(url.as_bytes())
|
||||
}
|
||||
|
||||
fn set_url_preview(
|
||||
&self,
|
||||
url: &str,
|
||||
data: &UrlPreviewData,
|
||||
timestamp: std::time::Duration,
|
||||
) -> Result<()> {
|
||||
let mut value = Vec::<u8>::new();
|
||||
value.extend_from_slice(×tamp.as_secs().to_be_bytes());
|
||||
value.push(0xff);
|
||||
value.extend_from_slice(
|
||||
data.title
|
||||
.as_ref()
|
||||
.map(|t| t.as_bytes())
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
value.push(0xff);
|
||||
value.extend_from_slice(
|
||||
data.description
|
||||
.as_ref()
|
||||
.map(|d| d.as_bytes())
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
value.push(0xff);
|
||||
value.extend_from_slice(
|
||||
data.image
|
||||
.as_ref()
|
||||
.map(|i| i.as_bytes())
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
value.push(0xff);
|
||||
value.extend_from_slice(&data.image_size.unwrap_or(0).to_be_bytes());
|
||||
value.push(0xff);
|
||||
value.extend_from_slice(&data.image_width.unwrap_or(0).to_be_bytes());
|
||||
value.push(0xff);
|
||||
value.extend_from_slice(&data.image_height.unwrap_or(0).to_be_bytes());
|
||||
|
||||
self.url_previews.insert(url.as_bytes(), &value)
|
||||
}
|
||||
|
||||
fn get_url_preview(&self, url: &str) -> Option<UrlPreviewData> {
|
||||
let values = self.url_previews.get(url.as_bytes()).ok()??;
|
||||
|
||||
let mut values = values.split(|&b| b == 0xff);
|
||||
|
||||
let _ts = match values
|
||||
.next()
|
||||
.map(|b| u64::from_be_bytes(b.try_into().expect("valid BE array")))
|
||||
{
|
||||
Some(0) => None,
|
||||
x => x,
|
||||
};
|
||||
let title = match values
|
||||
.next()
|
||||
.and_then(|b| String::from_utf8(b.to_vec()).ok())
|
||||
{
|
||||
Some(s) if s.is_empty() => None,
|
||||
x => x,
|
||||
};
|
||||
let description = match values
|
||||
.next()
|
||||
.and_then(|b| String::from_utf8(b.to_vec()).ok())
|
||||
{
|
||||
Some(s) if s.is_empty() => None,
|
||||
x => x,
|
||||
};
|
||||
let image = match values
|
||||
.next()
|
||||
.and_then(|b| String::from_utf8(b.to_vec()).ok())
|
||||
{
|
||||
Some(s) if s.is_empty() => None,
|
||||
x => x,
|
||||
};
|
||||
let image_size = match values
|
||||
.next()
|
||||
.map(|b| usize::from_be_bytes(b.try_into().expect("valid BE array")))
|
||||
{
|
||||
Some(0) => None,
|
||||
x => x,
|
||||
};
|
||||
let image_width = match values
|
||||
.next()
|
||||
.map(|b| u32::from_be_bytes(b.try_into().expect("valid BE array")))
|
||||
{
|
||||
Some(0) => None,
|
||||
x => x,
|
||||
};
|
||||
let image_height = match values
|
||||
.next()
|
||||
.map(|b| u32::from_be_bytes(b.try_into().expect("valid BE array")))
|
||||
{
|
||||
Some(0) => None,
|
||||
x => x,
|
||||
};
|
||||
|
||||
Some(UrlPreviewData {
|
||||
title,
|
||||
description,
|
||||
image,
|
||||
image_size,
|
||||
image_width,
|
||||
image_height,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -147,6 +147,7 @@ pub struct KeyValueDatabase {
|
|||
|
||||
//pub media: media::Media,
|
||||
pub(super) mediaid_file: Arc<dyn KvTree>, // MediaId = MXC + WidthHeight + ContentDisposition + ContentType
|
||||
pub(super) url_previews: Arc<dyn KvTree>,
|
||||
//pub key_backups: key_backups::KeyBackups,
|
||||
pub(super) backupid_algorithm: Arc<dyn KvTree>, // BackupId = UserId + Version(Count)
|
||||
pub(super) backupid_etag: Arc<dyn KvTree>, // BackupId = UserId + Version(Count)
|
||||
|
@ -350,6 +351,7 @@ impl KeyValueDatabase {
|
|||
roomuserdataid_accountdata: builder.open_tree("roomuserdataid_accountdata")?,
|
||||
roomusertype_roomuserdataid: builder.open_tree("roomusertype_roomuserdataid")?,
|
||||
mediaid_file: builder.open_tree("mediaid_file")?,
|
||||
url_previews: builder.open_tree("url_previews")?,
|
||||
backupid_algorithm: builder.open_tree("backupid_algorithm")?,
|
||||
backupid_etag: builder.open_tree("backupid_etag")?,
|
||||
backupkeyid_backup: builder.open_tree("backupkeyid_backup")?,
|
||||
|
|
25
src/main.rs
25
src/main.rs
|
@ -148,8 +148,11 @@ async fn main() {
|
|||
error!(?error, "The database couldn't be loaded or created");
|
||||
return;
|
||||
};
|
||||
|
||||
let config = &services().globals.config;
|
||||
|
||||
/* ad-hoc config validation/checks */
|
||||
|
||||
// check if user specified valid IP CIDR ranges on startup
|
||||
for cidr in services().globals.ip_range_denylist() {
|
||||
let _ = ipaddress::IPAddress::parse(cidr)
|
||||
|
@ -179,6 +182,27 @@ async fn main() {
|
|||
warn!("! Outgoing federated presence is not spec compliant due to relying on PDUs and EDUs combined.\nOutgoing presence will not be very reliable due to this and any issues with federated outgoing presence are very likely attributed to this issue.\nIncoming presence and local presence are unaffected.");
|
||||
}
|
||||
|
||||
if config
|
||||
.url_preview_domain_contains_allowlist
|
||||
.contains(&"*".to_owned())
|
||||
{
|
||||
warn!("All URLs are allowed for URL previews via setting \"url_preview_domain_contains_allowlist\" to \"*\". This opens up significant attack surface to your server. You are expected to be aware of the risks by doing this.");
|
||||
}
|
||||
if config
|
||||
.url_preview_domain_explicit_allowlist
|
||||
.contains(&"*".to_owned())
|
||||
{
|
||||
warn!("All URLs are allowed for URL previews via setting \"url_preview_domain_explicit_allowlist\" to \"*\". This opens up significant attack surface to your server. You are expected to be aware of the risks by doing this.");
|
||||
}
|
||||
if config
|
||||
.url_preview_url_contains_allowlist
|
||||
.contains(&"*".to_owned())
|
||||
{
|
||||
warn!("All URLs are allowed for URL previews via setting \"url_preview_url_contains_allowlist\" to \"*\". This opens up significant attack surface to your server. You are expected to be aware of the risks by doing this.");
|
||||
}
|
||||
|
||||
/* end ad-hoc config validation/checks */
|
||||
|
||||
info!("Starting server");
|
||||
if let Err(e) = run_server().await {
|
||||
error!("Critical error running server: {}", e);
|
||||
|
@ -464,6 +488,7 @@ fn routes() -> Router {
|
|||
.ruma_route(client_server::turn_server_route)
|
||||
.ruma_route(client_server::send_event_to_device_route)
|
||||
.ruma_route(client_server::get_media_config_route)
|
||||
.ruma_route(client_server::get_media_preview_route)
|
||||
.ruma_route(client_server::create_content_route)
|
||||
.ruma_route(client_server::get_content_route)
|
||||
.ruma_route(client_server::get_content_as_filename_route)
|
||||
|
|
|
@ -390,6 +390,18 @@ impl Service<'_> {
|
|||
&self.config.emergency_password
|
||||
}
|
||||
|
||||
pub fn url_preview_domain_contains_allowlist(&self) -> &Vec<String> {
|
||||
&self.config.url_preview_domain_contains_allowlist
|
||||
}
|
||||
|
||||
pub fn url_preview_domain_explicit_allowlist(&self) -> &Vec<String> {
|
||||
&self.config.url_preview_domain_explicit_allowlist
|
||||
}
|
||||
|
||||
pub fn url_preview_url_contains_allowlist(&self) -> &Vec<String> {
|
||||
&self.config.url_preview_url_contains_allowlist
|
||||
}
|
||||
|
||||
pub fn forbidden_room_names(&self) -> &RegexSet {
|
||||
&self.config.forbidden_room_names
|
||||
}
|
||||
|
|
|
@ -17,4 +17,15 @@ pub trait Data: Send + Sync {
|
|||
width: u32,
|
||||
height: u32,
|
||||
) -> Result<(Option<String>, Option<String>, Vec<u8>)>;
|
||||
|
||||
fn remove_url_preview(&self, url: &str) -> Result<()>;
|
||||
|
||||
fn set_url_preview(
|
||||
&self,
|
||||
url: &str,
|
||||
data: &super::UrlPreviewData,
|
||||
timestamp: std::time::Duration,
|
||||
) -> Result<()>;
|
||||
|
||||
fn get_url_preview(&self, url: &str) -> Option<super::UrlPreviewData>;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,13 @@
|
|||
mod data;
|
||||
use std::io::Cursor;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::Cursor,
|
||||
sync::{Arc, RwLock},
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
pub(crate) use data::Data;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::{services, Result};
|
||||
use image::imageops::FilterType;
|
||||
|
@ -9,6 +15,7 @@ use image::imageops::FilterType;
|
|||
use tokio::{
|
||||
fs::File,
|
||||
io::{AsyncReadExt, AsyncWriteExt, BufReader},
|
||||
sync::Mutex,
|
||||
};
|
||||
|
||||
pub struct FileMeta {
|
||||
|
@ -17,8 +24,43 @@ pub struct FileMeta {
|
|||
pub file: Vec<u8>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct UrlPreviewData {
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename(serialize = "og:title")
|
||||
)]
|
||||
pub title: Option<String>,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename(serialize = "og:description")
|
||||
)]
|
||||
pub description: Option<String>,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename(serialize = "og:image")
|
||||
)]
|
||||
pub image: Option<String>,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename(serialize = "matrix:image:size")
|
||||
)]
|
||||
pub image_size: Option<usize>,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename(serialize = "og:image:width")
|
||||
)]
|
||||
pub image_width: Option<u32>,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
rename(serialize = "og:image:height")
|
||||
)]
|
||||
pub image_height: Option<u32>,
|
||||
}
|
||||
|
||||
pub struct Service {
|
||||
pub db: &'static dyn Data,
|
||||
pub url_preview_mutex: RwLock<HashMap<String, Arc<Mutex<()>>>>,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
|
@ -260,6 +302,22 @@ impl Service {
|
|||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_url_preview(&self, url: &str) -> Option<UrlPreviewData> {
|
||||
self.db.get_url_preview(url)
|
||||
}
|
||||
|
||||
pub async fn remove_url_preview(&self, url: &str) -> Result<()> {
|
||||
// TODO: also remove the downloaded image
|
||||
self.db.remove_url_preview(url)
|
||||
}
|
||||
|
||||
pub async fn set_url_preview(&self, url: &str, data: &UrlPreviewData) -> Result<()> {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.expect("valid system time");
|
||||
self.db.set_url_preview(url, data, now)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::{
|
||||
collections::{BTreeMap, HashMap},
|
||||
sync::{Arc, Mutex},
|
||||
sync::{Arc, Mutex, RwLock},
|
||||
};
|
||||
|
||||
use lru_cache::LruCache;
|
||||
|
@ -114,7 +114,10 @@ impl Services<'_> {
|
|||
account_data: account_data::Service { db },
|
||||
admin: admin::Service::build(),
|
||||
key_backups: key_backups::Service { db },
|
||||
media: media::Service { db },
|
||||
media: media::Service {
|
||||
db,
|
||||
url_preview_mutex: RwLock::new(HashMap::new()),
|
||||
},
|
||||
sending: sending::Service::build(db, &config),
|
||||
|
||||
globals: globals::Service::load(db, config)?,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue