diff --git a/src/background/media_metadata.rs b/src/background/media_metadata.rs new file mode 100644 index 0000000..205011a --- /dev/null +++ b/src/background/media_metadata.rs @@ -0,0 +1,93 @@ +use crate::db::{Database, FileUpload}; +use crate::filesystem::FileStore; +use crate::processing::probe_file; +use anyhow::Result; +use log::{error, info, warn}; + +pub struct MediaMetadata { + db: Database, + fs: FileStore, +} + +impl MediaMetadata { + pub fn new(db: Database, fs: FileStore) -> Self { + Self { db, fs } + } + + pub async fn process(&mut self) -> Result<()> { + let to_migrate = self.db.get_missing_media_metadata().await?; + + info!("{} files are missing metadata", to_migrate.len()); + + for file in to_migrate { + // probe file and update metadata + let path = self.fs.get(&file.id); + if let Ok(data) = probe_file(&path) { + let bv = data.best_video(); + let duration = if data.duration < 0.0 { + None + } else { + Some(data.duration) + }; + let bitrate = if data.bitrate == 0 { + None + } else { + Some(data.bitrate as u32) + }; + info!( + "Updating metadata: id={}, dim={}x{}, dur={}, br={}", + hex::encode(&file.id), + bv.map(|v| v.width).unwrap_or(0), + bv.map(|v| v.height).unwrap_or(0), + duration.unwrap_or(0.0), + bitrate.unwrap_or(0) + ); + if let Err(e) = self + .db + .update_metadata( + &file.id, + bv.map(|v| v.width as u32), + bv.map(|v| v.height as u32), + duration, + bitrate, + ) + .await + { + error!("Failed to update metadata: {}", e); + } + } else { + warn!("Skipping missing file: {}", hex::encode(&file.id)); + } + } + Ok(()) + } +} + +impl Database { + pub async fn get_missing_media_metadata(&mut self) -> Result> { + let results: Vec = sqlx::query_as("select * from uploads where (width is null or height is null or bitrate is null or duration is null) and (mime_type like 'image/%' or mime_type like 'video/%')") + .fetch_all(&self.pool) + .await?; + + Ok(results) + } + + pub async fn update_metadata( + &mut self, + id: &Vec, + width: Option, + height: Option, + duration: Option, + bitrate: Option, + ) -> Result<()> { + sqlx::query("update uploads set width=?, height=?, duration=?, bitrate=? where id=?") + .bind(width) + .bind(height) + .bind(duration) + .bind(bitrate) + .bind(id) + .execute(&self.pool) + .await?; + Ok(()) + } +} diff --git a/src/background/mod.rs b/src/background/mod.rs new file mode 100644 index 0000000..39dbbef --- /dev/null +++ b/src/background/mod.rs @@ -0,0 +1,24 @@ +use crate::db::Database; +use crate::filesystem::FileStore; +use anyhow::Result; +use log::info; +use tokio::task::JoinHandle; + +#[cfg(feature = "media-compression")] +mod media_metadata; + +pub fn start_background_tasks(db: Database, file_store: FileStore) -> Vec>> { + let mut ret = vec![]; + + #[cfg(feature = "media-compression")] + { + ret.push(tokio::spawn(async move { + info!("Starting MediaMetadata background task"); + let mut m = media_metadata::MediaMetadata::new(db.clone(), file_store.clone()); + m.process().await?; + info!("MediaMetadata background task completed"); + Ok(()) + })); + } + ret +} diff --git a/src/bin/main.rs b/src/bin/main.rs index cbe29a7..add9594 100644 --- a/src/bin/main.rs +++ b/src/bin/main.rs @@ -12,6 +12,7 @@ use rocket::shield::Shield; use route96::analytics::plausible::PlausibleAnalytics; #[cfg(feature = "analytics")] use route96::analytics::AnalyticsFairing; +use route96::background::start_background_tasks; use route96::cors::CORS; use route96::db::Database; use route96::filesystem::FileStore; @@ -63,8 +64,9 @@ async fn main() -> Result<(), Error> { .limit("form", upload_limit); config.ident = Ident::try_new("route96").unwrap(); + let fs = FileStore::new(settings.clone()); let mut rocket = rocket::Rocket::custom(config) - .manage(FileStore::new(settings.clone())) + .manage(fs.clone()) .manage(settings.clone()) .manage(db.clone()) .attach(CORS) @@ -93,10 +95,19 @@ async fn main() -> Result<(), Error> { { rocket = rocket.mount("/", routes![routes::get_blob_thumb]); } + + let jh = start_background_tasks(db, fs); + if let Err(e) = rocket.launch().await { error!("Rocker error {}", e); + for j in jh { + let _ = j.await?; + } Err(Error::from(e)) } else { + for j in jh { + let _ = j.await?; + } Ok(()) } } diff --git a/src/filesystem.rs b/src/filesystem.rs index 52f7085..bd4fbc0 100644 --- a/src/filesystem.rs +++ b/src/filesystem.rs @@ -43,6 +43,7 @@ pub struct NewFileResult { pub labels: Vec, } +#[derive(Clone)] pub struct FileStore { settings: Settings, } diff --git a/src/lib.rs b/src/lib.rs index b0d1c99..0423c35 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,7 @@ #[cfg(feature = "analytics")] pub mod analytics; pub mod auth; +pub mod background; pub mod cors; pub mod db; pub mod filesystem; diff --git a/src/processing/mod.rs b/src/processing/mod.rs index 6137d6f..57c747e 100644 --- a/src/processing/mod.rs +++ b/src/processing/mod.rs @@ -137,7 +137,7 @@ pub fn can_compress(mime_type: &str) -> bool { } pub fn compress_file( - stream: &Path, + path: &Path, mime_type: &str, out_dir: &Path, ) -> Result { @@ -147,12 +147,12 @@ pub fn compress_file( if mime_type.starts_with("image/") { let mut proc = WebpProcessor::new(); - return proc.compress(stream, mime_type, out_dir); + return proc.compress(path, mime_type, out_dir); } bail!("No media processor") } -pub fn probe_file(stream: &Path) -> Result { - let mut demuxer = Demuxer::new(stream.to_str().unwrap())?; +pub fn probe_file(path: &Path) -> Result { + let mut demuxer = Demuxer::new(path.to_str().unwrap())?; unsafe { demuxer.probe_input() } }