mirror of
https://gitlab.com/famedly/conduit.git
synced 2025-06-27 16:35:59 +00:00
feat(media): save user id of uploader
This commit is contained in:
parent
19d0ea408c
commit
3171b779c6
5 changed files with 36 additions and 4 deletions
|
@ -71,6 +71,7 @@ pub async fn create_content_route(
|
|||
filename.as_deref(),
|
||||
content_type.as_deref(),
|
||||
&file,
|
||||
body.sender_user.as_deref(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
@ -148,6 +149,7 @@ pub async fn get_remote_content(
|
|||
.and_then(|cd| cd.filename.as_deref()),
|
||||
content_response.content_type.as_deref(),
|
||||
&content_response.file,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ruma::{api::client::error::ErrorKind, ServerName};
|
||||
use ruma::{api::client::error::ErrorKind, ServerName, UserId};
|
||||
use sha2::{digest::Output, Sha256};
|
||||
use tracing::error;
|
||||
|
||||
|
@ -17,6 +17,7 @@ impl service::media::Data for KeyValueDatabase {
|
|||
media_id: &str,
|
||||
filename: Option<&str>,
|
||||
content_type: Option<&str>,
|
||||
user_id: Option<&UserId>,
|
||||
) -> Result<()> {
|
||||
let metadata = FilehashMetadata::new(file_size);
|
||||
|
||||
|
@ -39,7 +40,26 @@ impl service::media::Data for KeyValueDatabase {
|
|||
value.push(0xff);
|
||||
value.extend_from_slice(content_type.map(|f| f.as_bytes()).unwrap_or_default());
|
||||
|
||||
self.servernamemediaid_metadata.insert(&key, &value)
|
||||
self.servernamemediaid_metadata.insert(&key, &value)?;
|
||||
|
||||
if let Some(user_id) = user_id {
|
||||
let mut key = servername.as_bytes().to_vec();
|
||||
key.push(0xff);
|
||||
key.extend_from_slice(user_id.localpart().as_bytes());
|
||||
key.push(0xff);
|
||||
key.extend_from_slice(media_id.as_bytes());
|
||||
|
||||
self.servername_userlocalpart_mediaid.insert(&key, &[])?;
|
||||
|
||||
let mut key = servername.as_bytes().to_vec();
|
||||
key.push(0xff);
|
||||
key.extend_from_slice(media_id.as_bytes());
|
||||
|
||||
self.servernamemediaid_userlocalpart
|
||||
.insert(&key, user_id.localpart().as_bytes())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn search_file_metadata(&self, servername: &ServerName, media_id: &str) -> Result<DbFileMeta> {
|
||||
|
|
|
@ -179,6 +179,8 @@ pub struct KeyValueDatabase {
|
|||
pub(super) servernamemediaid_metadata: Arc<dyn KvTree>, // Servername + MediaID -> content sha256 + Filename + ContentType + extra 0xff byte if media is allowed on unauthenticated endpoints
|
||||
pub(super) filehash_servername_mediaid: Arc<dyn KvTree>, // sha256 of content + Servername + MediaID, used to delete dangling references to filehashes from servernamemediaid
|
||||
pub(super) filehash_metadata: Arc<dyn KvTree>, // sha256 of content -> file size + creation time + last access time
|
||||
pub(super) servername_userlocalpart_mediaid: Arc<dyn KvTree>, // Servername + User Localpart + MediaID
|
||||
pub(super) servernamemediaid_userlocalpart: Arc<dyn KvTree>, // Servername + MediaID -> User Localpart, used to remove keys from above when files are deleted by unrelated means
|
||||
pub(super) thumbnailid_metadata: Arc<dyn KvTree>, // ThumbnailId = Servername + MediaID + width + height -> Filename + ContentType + extra 0xff byte if media is allowed on unauthenticated endpoints
|
||||
pub(super) filehash_thumbnailid: Arc<dyn KvTree>, // sha256 of content + "ThumbnailId", as defined above. Used to dangling references to filehashes from thumbnailIds
|
||||
//pub key_backups: key_backups::KeyBackups,
|
||||
|
@ -387,6 +389,10 @@ impl KeyValueDatabase {
|
|||
servernamemediaid_metadata: builder.open_tree("servernamemediaid_metadata")?,
|
||||
filehash_servername_mediaid: builder.open_tree("filehash_servername_mediaid")?,
|
||||
filehash_metadata: builder.open_tree("filehash_metadata")?,
|
||||
servername_userlocalpart_mediaid: builder
|
||||
.open_tree("servername_userlocalpart_mediaid")?,
|
||||
servernamemediaid_userlocalpart: builder
|
||||
.open_tree("servernamemediaid_userlocalpart")?,
|
||||
thumbnailid_metadata: builder.open_tree("thumbnailid_metadata")?,
|
||||
filehash_thumbnailid: builder.open_tree("filehash_thumbnailid")?,
|
||||
backupid_algorithm: builder.open_tree("backupid_algorithm")?,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ruma::ServerName;
|
||||
use ruma::{ServerName, UserId};
|
||||
use sha2::{digest::Output, Sha256};
|
||||
|
||||
use crate::Result;
|
||||
|
@ -6,6 +6,7 @@ use crate::Result;
|
|||
use super::DbFileMeta;
|
||||
|
||||
pub trait Data: Send + Sync {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn create_file_metadata(
|
||||
&self,
|
||||
sha256_digest: Output<Sha256>,
|
||||
|
@ -14,6 +15,7 @@ pub trait Data: Send + Sync {
|
|||
media_id: &str,
|
||||
filename: Option<&str>,
|
||||
content_type: Option<&str>,
|
||||
user_id: Option<&UserId>,
|
||||
) -> Result<()>;
|
||||
|
||||
fn search_file_metadata(&self, servername: &ServerName, media_id: &str) -> Result<DbFileMeta>;
|
||||
|
|
|
@ -5,7 +5,7 @@ pub use data::Data;
|
|||
use ruma::{
|
||||
api::client::{error::ErrorKind, media::is_safe_inline_content_type},
|
||||
http_headers::{ContentDisposition, ContentDispositionType},
|
||||
ServerName,
|
||||
ServerName, UserId,
|
||||
};
|
||||
use sha2::{digest::Output, Digest, Sha256};
|
||||
|
||||
|
@ -43,6 +43,7 @@ impl Service {
|
|||
filename: Option<&str>,
|
||||
content_type: Option<&str>,
|
||||
file: &[u8],
|
||||
user_id: Option<&UserId>,
|
||||
) -> Result<()> {
|
||||
let (sha256_digest, sha256_hex) = generate_digests(file);
|
||||
|
||||
|
@ -53,6 +54,7 @@ impl Service {
|
|||
media_id,
|
||||
filename,
|
||||
content_type,
|
||||
user_id,
|
||||
)?;
|
||||
|
||||
create_file(&sha256_hex, file).await
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue