1
0
Fork 0
mirror of https://gitlab.com/famedly/conduit.git synced 2025-06-27 16:35:59 +00:00

Fix lint/clippy

This commit is contained in:
Steven Vergenz 2024-11-04 10:52:43 -08:00
parent 03653d6a10
commit 0e34f07d11
7 changed files with 79 additions and 84 deletions

View file

@ -4,9 +4,10 @@
use std::time::Duration; use std::time::Duration;
use crate::{ use crate::{
service::media::{FileMeta, UrlPreviewData},
config::UrlPreviewPermission, config::UrlPreviewPermission,
services, utils, Error, Result, Ruma}; service::media::{FileMeta, UrlPreviewData},
services, utils, Error, Result, Ruma,
};
use hickory_resolver::error::ResolveErrorKind; use hickory_resolver::error::ResolveErrorKind;
use http::header::{CONTENT_DISPOSITION, CONTENT_TYPE}; use http::header::{CONTENT_DISPOSITION, CONTENT_TYPE};
use ruma::{ use ruma::{
@ -16,9 +17,7 @@ use ruma::{
get_content, get_content_as_filename, get_content_thumbnail, get_media_config, get_content, get_content_as_filename, get_content_thumbnail, get_media_config,
}, },
error::ErrorKind, error::ErrorKind,
media::{ media::{self, create_content, get_media_preview},
self, create_content, get_media_preview,
},
}, },
federation::authenticated_media::{self as federation_media, FileOrLocation}, federation::authenticated_media::{self as federation_media, FileOrLocation},
}, },
@ -27,12 +26,10 @@ use ruma::{
ServerName, UInt, ServerName, UInt,
}; };
use { use image::io::Reader as ImgReader;
webpage::HTML, use reqwest::Url;
reqwest::Url, use std::{io::Cursor, net::IpAddr, sync::Arc};
std::{io::Cursor, net::IpAddr, sync::Arc}, use webpage::HTML;
image::io::Reader as ImgReader,
};
const MXC_LENGTH: usize = 32; const MXC_LENGTH: usize = 32;
@ -58,17 +55,15 @@ pub async fn get_media_config_auth_route(
}) })
} }
async fn download_image( async fn download_image(client: &reqwest::Client, url: &str) -> Result<UrlPreviewData> {
client: &reqwest::Client,
url: &str,
) -> Result<UrlPreviewData> {
let image = client.get(url).send().await?.bytes().await?; let image = client.get(url).send().await?.bytes().await?;
let mxc = format!( let mxc = format!(
"mxc://{}/{}", "mxc://{}/{}",
services().globals.server_name(), services().globals.server_name(),
utils::random_string(MXC_LENGTH) utils::random_string(MXC_LENGTH)
); );
services().media services()
.media
.create(mxc.clone(), None, None, &image) .create(mxc.clone(), None, None, &image)
.await?; .await?;
@ -89,10 +84,7 @@ async fn download_image(
}) })
} }
async fn download_html( async fn download_html(client: &reqwest::Client, url: &str) -> Result<UrlPreviewData> {
client: &reqwest::Client,
url: &str,
) -> Result<UrlPreviewData> {
let max_download_size = 300_000; let max_download_size = 300_000;
let mut response = client.get(url).send().await?; let mut response = client.get(url).send().await?;
@ -122,7 +114,11 @@ async fn download_html(
let props = html.opengraph.properties; let props = html.opengraph.properties;
/* use OpenGraph title/description, but fall back to HTML if not available */ /* use OpenGraph title/description, but fall back to HTML if not available */
data.title = props.get("title").cloned().or(html.title).unwrap_or(String::from(url)); data.title = props
.get("title")
.cloned()
.or(html.title)
.unwrap_or(String::from(url));
data.description = props.get("description").cloned().or(html.description); data.description = props.get("description").cloned().or(html.description);
Ok(data) Ok(data)
} }
@ -169,7 +165,7 @@ fn is_ip_external(addr: &IpAddr) -> bool {
// AS112-v6 (`2001:4:112::/48`) // AS112-v6 (`2001:4:112::/48`)
|| matches!(ip6.segments(), [0x2001, 4, 0x112, _, _, _, _, _]) || matches!(ip6.segments(), [0x2001, 4, 0x112, _, _, _, _, _])
// ORCHIDv2 (`2001:20::/28`) // ORCHIDv2 (`2001:20::/28`)
|| matches!(ip6.segments(), [0x2001, b, _, _, _, _, _, _] if b >= 0x20 && b <= 0x2F) || matches!(ip6.segments(), [0x2001, b, _, _, _, _, _, _] if (0x20..=0x2f).contains(&b))
)) ))
|| ((ip6.segments()[0] == 0x2001) && (ip6.segments()[1] == 0xdb8)) // is_documentation() || ((ip6.segments()[0] == 0x2001) && (ip6.segments()[1] == 0xdb8)) // is_documentation()
|| ((ip6.segments()[0] & 0xfe00) == 0xfc00) // is_unique_local() || ((ip6.segments()[0] & 0xfe00) == 0xfc00) // is_unique_local()
@ -187,28 +183,39 @@ async fn request_url_preview(url: &Url) -> Result<UrlPreviewData> {
let dns_resolver = services().globals.dns_resolver(); let dns_resolver = services().globals.dns_resolver();
match dns_resolver.lookup_ip(format!("{host}.")).await { match dns_resolver.lookup_ip(format!("{host}.")).await {
Err(_) => { Err(_) => {
return Err(Error::BadServerResponse("Failed to resolve media preview host")); return Err(Error::BadServerResponse(
}, "Failed to resolve media preview host",
));
}
Ok(lookup) if lookup.iter().any(|ip| !is_ip_external(&ip)) => { Ok(lookup) if lookup.iter().any(|ip| !is_ip_external(&ip)) => {
return Err(Error::BadRequest(ErrorKind::Unknown, "Requesting from this address forbidden")); return Err(Error::BadRequest(
}, ErrorKind::Unknown,
Ok(_) => { }, "Requesting from this address forbidden",
));
}
Ok(_) => {}
} }
// Spamhaus API is over DNS. Query the API domain, no result = no block // Spamhaus API is over DNS. Query the API domain, no result = no block
// https://docs.spamhaus.com/datasets/docs/source/70-access-methods/data-query-service/040-dqs-queries.html // https://docs.spamhaus.com/datasets/docs/source/70-access-methods/data-query-service/040-dqs-queries.html
if services().globals.url_previews().use_spamhaus_denylist { if services().globals.url_previews().use_spamhaus_denylist {
let resolver = services().globals.dns_resolver(); let resolver = services().globals.dns_resolver();
match resolver.lookup_ip(format!("{host}.dbl.spamhaus.org.")).await { match resolver
.lookup_ip(format!("{host}.dbl.spamhaus.org."))
.await
{
Err(e) => { Err(e) => {
if let ResolveErrorKind::NoRecordsFound { .. } = e.kind() { } if let ResolveErrorKind::NoRecordsFound { .. } = e.kind() {
else { } else {
tracing::log::warn!("Failed to check Spamhaus denylist: {}", e); tracing::log::warn!("Failed to check Spamhaus denylist: {}", e);
} }
}, }
Ok(_) => { Ok(_) => {
return Err(Error::BadRequest(ErrorKind::Unknown, "Domain fails reputation check")); return Err(Error::BadRequest(
}, ErrorKind::Unknown,
"Domain fails reputation check",
));
}
} }
} }
@ -239,7 +246,10 @@ async fn request_url_preview(url: &Url) -> Result<UrlPreviewData> {
} }
}; };
services().media.set_url_preview(url.as_str(), &data).await?; services()
.media
.set_url_preview(url.as_str(), &data)
.await?;
Ok(data) Ok(data)
} }
@ -264,7 +274,7 @@ async fn get_url_preview(url: &Url) -> Result<UrlPreviewData> {
match services().media.get_url_preview(url.as_str()).await { match services().media.get_url_preview(url.as_str()).await {
Some(preview) => Ok(preview), Some(preview) => Ok(preview),
None => request_url_preview(url).await None => request_url_preview(url).await,
} }
} }
@ -276,10 +286,10 @@ fn url_preview_allowed(url: &Url) -> bool {
match preview_config.default { match preview_config.default {
UrlPreviewPermission::Forbid => { UrlPreviewPermission::Forbid => {
preview_config.exceptions.iter().any(|ex| ex.matches(&host)) preview_config.exceptions.iter().any(|ex| ex.matches(&host))
}, }
UrlPreviewPermission::Allow => { UrlPreviewPermission::Allow => {
!preview_config.exceptions.iter().any(|ex| ex.matches(&host)) !preview_config.exceptions.iter().any(|ex| ex.matches(&host))
}, }
} }
} }
@ -291,21 +301,14 @@ pub async fn get_media_preview_route(
) -> Result<get_media_preview::v3::Response> { ) -> Result<get_media_preview::v3::Response> {
let url = match Url::parse(&body.url) { let url = match Url::parse(&body.url) {
Err(_) => { Err(_) => {
return Err(Error::BadRequest(ErrorKind::Unknown, "Not a valid URL"));
}
Ok(u) if u.scheme() != "http" && u.scheme() != "https" || u.host().is_none() => {
return Err(Error::BadRequest( return Err(Error::BadRequest(
ErrorKind::Unknown, ErrorKind::Unknown,
"Not a valid URL",
));
},
Ok(u)
if u.scheme() != "http"
&& u.scheme() != "https"
|| u.host().is_none()
=> {
return Err(Error::BadRequest(
ErrorKind::Unknown,
"Not a valid HTTP URL", "Not a valid HTTP URL",
)); ));
}, }
Ok(url) => url, Ok(url) => url,
}; };
@ -320,13 +323,11 @@ pub async fn get_media_preview_route(
Ok(preview) => { Ok(preview) => {
let res = serde_json::value::to_raw_value(&preview).expect("Converting to JSON failed"); let res = serde_json::value::to_raw_value(&preview).expect("Converting to JSON failed");
Ok(get_media_preview::v3::Response::from_raw_value(res)) Ok(get_media_preview::v3::Response::from_raw_value(res))
}, }
Err(_) => { Err(_) => Err(Error::BadRequest(
Err(Error::BadRequest( ErrorKind::NotFound,
ErrorKind::NotFound, "Failed to find preview data",
"Failed to find preview data", )),
))
},
} }
} }

View file

@ -4,11 +4,11 @@ use std::{
net::{IpAddr, Ipv4Addr}, net::{IpAddr, Ipv4Addr},
}; };
use wild_carded_domain::WildCardedDomain;
use ruma::{OwnedServerName, RoomVersionId}; use ruma::{OwnedServerName, RoomVersionId};
use serde::{de::IgnoredAny, Deserialize}; use serde::{de::IgnoredAny, Deserialize};
use tracing::warn; use tracing::warn;
use url::Url; use url::Url;
use wild_carded_domain::WildCardedDomain;
mod proxy; mod proxy;
mod wild_carded_domain; mod wild_carded_domain;

View file

@ -1,8 +1,8 @@
use reqwest::{Proxy, Url}; use reqwest::{Proxy, Url};
use serde::Deserialize; use serde::Deserialize;
use crate::Result;
use super::wild_carded_domain::WildCardedDomain; use super::wild_carded_domain::WildCardedDomain;
use crate::Result;
/// ## Examples: /// ## Examples:
/// - No proxy (default): /// - No proxy (default):

View file

@ -1,6 +1,10 @@
use ruma::{api::client::error::ErrorKind, http_headers::ContentDisposition}; use ruma::{api::client::error::ErrorKind, http_headers::ContentDisposition};
use crate::{database::KeyValueDatabase, service::{self, media::UrlPreviewData}, utils, Error, Result}; use crate::{
database::KeyValueDatabase,
service::{self, media::UrlPreviewData},
utils, Error, Result,
};
impl service::media::Data for KeyValueDatabase { impl service::media::Data for KeyValueDatabase {
fn create_file_metadata( fn create_file_metadata(
@ -73,13 +77,16 @@ impl service::media::Data for KeyValueDatabase {
self.url_previews.remove(url.as_bytes()) self.url_previews.remove(url.as_bytes())
} }
fn set_url_preview(&self, url: &str, data: &UrlPreviewData, timestamp: std::time::Duration) -> Result<()> { fn set_url_preview(
&self,
url: &str,
data: &UrlPreviewData,
timestamp: std::time::Duration,
) -> Result<()> {
let mut value = Vec::<u8>::new(); let mut value = Vec::<u8>::new();
value.extend_from_slice(&timestamp.as_secs().to_be_bytes()); value.extend_from_slice(&timestamp.as_secs().to_be_bytes());
value.push(0xff); value.push(0xff);
value.extend_from_slice( value.extend_from_slice(data.title.as_bytes());
data.title.as_bytes(),
);
value.push(0xff); value.push(0xff);
value.extend_from_slice( value.extend_from_slice(
data.description data.description
@ -88,9 +95,7 @@ impl service::media::Data for KeyValueDatabase {
.unwrap_or_default(), .unwrap_or_default(),
); );
value.push(0xff); value.push(0xff);
value.extend_from_slice( value.extend_from_slice(data.image.as_bytes());
data.image.as_bytes(),
);
value.push(0xff); value.push(0xff);
value.extend_from_slice(&data.image_size.unwrap_or(0).to_be_bytes()); value.extend_from_slice(&data.image_size.unwrap_or(0).to_be_bytes());
value.push(0xff); value.push(0xff);

View file

@ -7,8 +7,7 @@ use ruma::{
use crate::api::server_server::DestinationResponse; use crate::api::server_server::DestinationResponse;
use crate::config::UrlPreviewConfig; use crate::{config::UrlPreviewConfig, services, Config, Error, Result};
use crate::{services, Config, Error, Result};
use futures_util::FutureExt; use futures_util::FutureExt;
use hickory_resolver::TokioAsyncResolver; use hickory_resolver::TokioAsyncResolver;
use hyper_util::client::legacy::connect::dns::{GaiResolver, Name as HyperName}; use hyper_util::client::legacy::connect::dns::{GaiResolver, Name as HyperName};

View file

@ -20,10 +20,7 @@ pub trait Data: Send + Sync {
height: u32, height: u32,
) -> Result<(ContentDisposition, Option<String>, Vec<u8>)>; ) -> Result<(ContentDisposition, Option<String>, Vec<u8>)>;
fn remove_url_preview( fn remove_url_preview(&self, url: &str) -> Result<()>;
&self,
url: &str
) -> Result<()>;
fn set_url_preview( fn set_url_preview(
&self, &self,
@ -32,8 +29,5 @@ pub trait Data: Send + Sync {
timestamp: std::time::Duration, timestamp: std::time::Duration,
) -> Result<()>; ) -> Result<()>;
fn get_url_preview( fn get_url_preview(&self, url: &str) -> Option<super::UrlPreviewData>;
&self,
url: &str
) -> Option<super::UrlPreviewData>;
} }

View file

@ -1,7 +1,7 @@
mod data; mod data;
use std::{ use std::{
io::Cursor,
collections::HashMap, collections::HashMap,
io::Cursor,
sync::{Arc, RwLock}, sync::{Arc, RwLock},
time::SystemTime, time::SystemTime,
}; };
@ -12,12 +12,12 @@ use ruma::http_headers::{ContentDisposition, ContentDispositionType};
use crate::{services, Result}; use crate::{services, Result};
use image::imageops::FilterType; use image::imageops::FilterType;
use serde::Serialize;
use tokio::{ use tokio::{
fs::File, fs::File,
io::{AsyncReadExt, AsyncWriteExt, BufReader}, io::{AsyncReadExt, AsyncWriteExt, BufReader},
sync::Mutex, sync::Mutex,
}; };
use serde::Serialize;
pub struct FileMeta { pub struct FileMeta {
pub content_disposition: ContentDisposition, pub content_disposition: ContentDisposition,
@ -27,18 +27,14 @@ pub struct FileMeta {
#[derive(Serialize, Default)] #[derive(Serialize, Default)]
pub struct UrlPreviewData { pub struct UrlPreviewData {
#[serde( #[serde(rename(serialize = "og:title"))]
rename(serialize = "og:title")
)]
pub title: String, pub title: String,
#[serde( #[serde(
skip_serializing_if = "Option::is_none", skip_serializing_if = "Option::is_none",
rename(serialize = "og:description") rename(serialize = "og:description")
)] )]
pub description: Option<String>, pub description: Option<String>,
#[serde( #[serde(rename(serialize = "og:image"))]
rename(serialize = "og:image")
)]
pub image: String, pub image: String,
#[serde( #[serde(
skip_serializing_if = "Option::is_none", skip_serializing_if = "Option::is_none",