Compare commits

...

24 Commits

Author SHA1 Message Date
6998f0ffac
feat: r96util probe media before importing
All checks were successful
continuous-integration/drone/push Build is passing
2025-02-12 14:52:06 +00:00
317b0708e0
feat: r96util parallel
All checks were successful
continuous-integration/drone/push Build is passing
2025-02-12 14:24:13 +00:00
069aa30d52
feat: r96util progress
All checks were successful
continuous-integration/drone/push Build is passing
2025-02-12 13:50:59 +00:00
4dad339c09
feat: r96util import
All checks were successful
continuous-integration/drone/push Build is passing
2025-02-12 12:11:58 +00:00
f5b206dad3
fix: walkdir
All checks were successful
continuous-integration/drone/push Build is passing
2025-02-12 11:23:31 +00:00
b6bd190252
feat: r96util
All checks were successful
continuous-integration/drone/push Build is passing
2025-02-10 20:48:40 +00:00
3b4bb866ab
fix: uploaded timestamp blossom 2025-02-07 14:36:01 +00:00
c885a71295
feat: return thumbnail url in meta 2025-02-07 09:50:46 +00:00
e1fca9a604
feat: filter list by mime 2025-02-06 22:33:26 +00:00
16a14de5d6
fix: dont patch video metadata for image files (always empty) 2025-02-04 13:30:22 +00:00
314d0c68af
feat: accept void-cat uuid 2025-02-04 13:09:02 +00:00
5530f39779
fix: head void_cat_redirect 2025-01-30 22:25:56 +00:00
4f40efa99c
fix: negative duration 2025-01-27 22:57:16 +00:00
ceca1904d7
fix: bump ffmpeg-rs-raw 2025-01-27 22:51:22 +00:00
2172c8557a
fix: bump ffmpeg-rs-raw 2025-01-27 22:27:32 +00:00
f3989ba244
fix: log probe error 2025-01-27 22:11:58 +00:00
9f78c1a54f
feat: backfill media metadata 2025-01-27 21:48:57 +00:00
201a3aaa49
fix: method tag for media upload 2025-01-27 21:22:06 +00:00
3ba5e7bc4c
feat: video duration / bitrate 2025-01-27 21:19:11 +00:00
5fbe40faae
feat: improve file list 2025-01-27 11:15:26 +00:00
0d8686a850
fix: thumbnail gen single frame 2025-01-27 10:14:24 +00:00
71f6f47a00
fix: release db connection 2025-01-25 23:52:42 +00:00
0bd531a21d
feat: ui render image thumbs 2025-01-25 23:28:01 +00:00
6763e53d41
feat: generate thumbnails 2025-01-25 23:22:39 +00:00
24 changed files with 175891 additions and 168 deletions

64
Cargo.lock generated
View File

@ -737,6 +737,19 @@ dependencies = [
"yaml-rust2",
]
[[package]]
name = "console"
version = "0.15.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b"
dependencies = [
"encode_unicode",
"libc",
"once_cell",
"unicode-width",
"windows-sys 0.59.0",
]
[[package]]
name = "const-oid"
version = "0.9.6"
@ -1040,6 +1053,12 @@ dependencies = [
"serde",
]
[[package]]
name = "encode_unicode"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
[[package]]
name = "encoding_rs"
version = "0.8.35"
@ -1132,7 +1151,7 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "ffmpeg-rs-raw"
version = "0.1.0"
source = "git+https://git.v0l.io/Kieran/ffmpeg-rs-raw.git?rev=de2050cec07a095bace38d3ccf9c4c4f9b03b217#de2050cec07a095bace38d3ccf9c4c4f9b03b217"
source = "git+https://git.v0l.io/Kieran/ffmpeg-rs-raw.git?rev=a63b88ef3c8f58c7c0ac57d361d06ff0bb3ed385#a63b88ef3c8f58c7c0ac57d361d06ff0bb3ed385"
dependencies = [
"anyhow",
"ffmpeg-sys-the-third",
@ -2017,6 +2036,19 @@ dependencies = [
"serde",
]
[[package]]
name = "indicatif"
version = "0.17.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235"
dependencies = [
"console",
"number_prefix",
"portable-atomic",
"unicode-width",
"web-time",
]
[[package]]
name = "infer"
version = "0.16.0"
@ -2525,6 +2557,12 @@ dependencies = [
"syn",
]
[[package]]
name = "number_prefix"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
[[package]]
name = "object"
version = "0.36.5"
@ -2801,6 +2839,12 @@ dependencies = [
"universal-hash",
]
[[package]]
name = "portable-atomic"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6"
[[package]]
name = "powerfmt"
version = "0.2.0"
@ -3195,6 +3239,7 @@ dependencies = [
"ffmpeg-rs-raw",
"hex",
"http-range-header",
"indicatif",
"infer",
"libc",
"log",
@ -3213,6 +3258,7 @@ dependencies = [
"tokio",
"tokio-util",
"uuid",
"walkdir",
]
[[package]]
@ -4353,6 +4399,12 @@ version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "unicode-width"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd"
[[package]]
name = "unicode-xid"
version = "0.2.6"
@ -4560,6 +4612,16 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "web-time"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "whoami"
version = "1.5.2"

View File

@ -15,11 +15,16 @@ required-features = ["bin-void-cat-force-migrate"]
name = "route96"
path = "src/bin/main.rs"
[[bin]]
name = "r96util"
path = "src/bin/r96util.rs"
required-features = ["r96util"]
[lib]
name = "route96"
[features]
default = ["nip96", "blossom", "analytics", "ranges", "react-ui"]
default = ["nip96", "blossom", "analytics", "ranges", "react-ui", "r96util"]
media-compression = ["dep:ffmpeg-rs-raw", "dep:libc"]
labels = ["nip96", "dep:candle-core", "dep:candle-nn", "dep:candle-transformers"]
nip96 = ["media-compression"]
@ -31,6 +36,7 @@ analytics = []
void-cat-redirects = ["dep:sqlx-postgres"]
ranges = ["dep:http-range-header"]
react-ui = []
r96util = ["dep:walkdir", "dep:indicatif"]
[dependencies]
log = "0.4.21"
@ -54,8 +60,9 @@ mime2ext = "0.1.53"
infer = "0.16.0"
tokio-util = { version = "0.7.13", features = ["io", "io-util"] }
libc = { version = "0.2.153", optional = true }
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "de2050cec07a095bace38d3ccf9c4c4f9b03b217", optional = true }
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "a63b88ef3c8f58c7c0ac57d361d06ff0bb3ed385", optional = true }
candle-core = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true }
candle-nn = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true }
candle-transformers = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true }
@ -63,3 +70,5 @@ sqlx-postgres = { version = "0.8.2", optional = true, features = ["chrono", "uui
http-range-header = { version = "0.4.2", optional = true }
nostr-cursor = { git = "https://git.v0l.io/Kieran/nostr_backup_proc.git", branch = "main", optional = true }
regex = { version = "1.11.1", optional = true }
walkdir = { version = "2.5.0", optional = true }
indicatif = { version = "0.17.11", optional = true }

View File

@ -0,0 +1,4 @@
-- Add migration script here
alter table uploads
add column duration float,
add column bitrate integer unsigned;

View File

@ -1,7 +1,7 @@
use crate::analytics::Analytics;
use crate::settings::Settings;
use anyhow::Error;
use log::{info, warn};
use log::{debug, warn};
use nostr::serde_json;
use reqwest::ClientBuilder;
use rocket::Request;
@ -61,7 +61,7 @@ impl PlausibleAnalytics {
.send()
.await
{
Ok(_v) => info!("Sent {:?}", msg),
Ok(_v) => debug!("Sent {:?}", msg),
Err(e) => warn!("Failed to track: {}", e),
}
}

View File

@ -0,0 +1,98 @@
use crate::db::{Database, FileUpload};
use crate::filesystem::FileStore;
use crate::processing::probe_file;
use anyhow::Result;
use log::{error, info, warn};
pub struct MediaMetadata {
db: Database,
fs: FileStore,
}
impl MediaMetadata {
pub fn new(db: Database, fs: FileStore) -> Self {
Self { db, fs }
}
pub async fn process(&mut self) -> Result<()> {
let to_migrate = self.db.get_missing_media_metadata().await?;
info!("{} files are missing metadata", to_migrate.len());
for file in to_migrate {
// probe file and update metadata
let path = self.fs.get(&file.id);
match probe_file(&path) {
Ok(data) => {
let bv = data.best_video();
let duration = if data.duration < 0.0 {
None
} else {
Some(data.duration)
};
let bitrate = if data.bitrate == 0 {
None
} else {
Some(data.bitrate as u32)
};
info!(
"Updating metadata: id={}, dim={}x{}, dur={}, br={}",
hex::encode(&file.id),
bv.map(|v| v.width).unwrap_or(0),
bv.map(|v| v.height).unwrap_or(0),
duration.unwrap_or(0.0),
bitrate.unwrap_or(0)
);
if let Err(e) = self
.db
.update_metadata(
&file.id,
bv.map(|v| v.width as u32),
bv.map(|v| v.height as u32),
duration,
bitrate,
)
.await
{
error!("Failed to update metadata: {}", e);
}
}
Err(e) => {
warn!("Skipping missing file: {}, {}", hex::encode(&file.id), e);
}
}
}
Ok(())
}
}
impl Database {
pub async fn get_missing_media_metadata(&mut self) -> Result<Vec<FileUpload>> {
let results: Vec<FileUpload> = sqlx::query_as("select * from uploads where \
(mime_type like 'image/%' and (width is null or height is null)) or \
(mime_type like 'video/%' and (width is null or height is null or bitrate is null or duration is null))")
.fetch_all(&self.pool)
.await?;
Ok(results)
}
pub async fn update_metadata(
&mut self,
id: &Vec<u8>,
width: Option<u32>,
height: Option<u32>,
duration: Option<f32>,
bitrate: Option<u32>,
) -> Result<()> {
sqlx::query("update uploads set width=?, height=?, duration=?, bitrate=? where id=?")
.bind(width)
.bind(height)
.bind(duration)
.bind(bitrate)
.bind(id)
.execute(&self.pool)
.await?;
Ok(())
}
}

24
src/background/mod.rs Normal file
View File

@ -0,0 +1,24 @@
use crate::db::Database;
use crate::filesystem::FileStore;
use anyhow::Result;
use log::info;
use tokio::task::JoinHandle;
#[cfg(feature = "media-compression")]
mod media_metadata;
pub fn start_background_tasks(db: Database, file_store: FileStore) -> Vec<JoinHandle<Result<()>>> {
let mut ret = vec![];
#[cfg(feature = "media-compression")]
{
ret.push(tokio::spawn(async move {
info!("Starting MediaMetadata background task");
let mut m = media_metadata::MediaMetadata::new(db.clone(), file_store.clone());
m.process().await?;
info!("MediaMetadata background task completed");
Ok(())
}));
}
ret
}

View File

@ -12,6 +12,7 @@ use rocket::shield::Shield;
use route96::analytics::plausible::PlausibleAnalytics;
#[cfg(feature = "analytics")]
use route96::analytics::AnalyticsFairing;
use route96::background::start_background_tasks;
use route96::cors::CORS;
use route96::db::Database;
use route96::filesystem::FileStore;
@ -63,15 +64,22 @@ async fn main() -> Result<(), Error> {
.limit("form", upload_limit);
config.ident = Ident::try_new("route96").unwrap();
let fs = FileStore::new(settings.clone());
let mut rocket = rocket::Rocket::custom(config)
.manage(FileStore::new(settings.clone()))
.manage(fs.clone())
.manage(settings.clone())
.manage(db.clone())
.attach(CORS)
.attach(Shield::new()) // disable
.mount(
"/",
routes![root, get_blob, head_blob, routes::void_cat_redirect],
routes![
root,
get_blob,
head_blob,
routes::void_cat_redirect,
routes::void_cat_redirect_head
],
)
.mount("/admin", routes::admin_routes());
@ -89,10 +97,23 @@ async fn main() -> Result<(), Error> {
{
rocket = rocket.mount("/", routes::nip96_routes());
}
#[cfg(feature = "media-compression")]
{
rocket = rocket.mount("/", routes![routes::get_blob_thumb]);
}
let jh = start_background_tasks(db, fs);
if let Err(e) = rocket.launch().await {
error!("Rocker error {}", e);
for j in jh {
let _ = j.await?;
}
Err(Error::from(e))
} else {
for j in jh {
let _ = j.await?;
}
Ok(())
}
}

228
src/bin/r96util.rs Normal file
View File

@ -0,0 +1,228 @@
use anyhow::{Context, Error, Result};
use clap::{Parser, Subcommand};
use config::Config;
use indicatif::{ProgressBar, ProgressStyle};
use log::{error, info};
use route96::db::{Database, FileUpload};
use route96::filesystem::{FileStore, FileSystemResult};
use route96::processing::probe_file;
use route96::settings::Settings;
use std::future::Future;
use std::path::{Path, PathBuf};
use std::pin::Pin;
use std::sync::Arc;
use std::time::SystemTime;
use tokio::sync::Semaphore;
#[derive(Parser, Debug)]
#[command(version, about)]
struct Args {
#[arg(long)]
pub config: Option<String>,
#[clap(subcommand)]
pub command: Commands,
}
#[derive(Debug, Subcommand)]
enum Commands {
/// Check file hash matches filename / path
Check {
#[arg(long)]
delete: Option<bool>,
},
/// Import a directory into the filesystem
/// (does NOT import files into the database, use database-import command for that)
Import {
#[arg(long)]
from: PathBuf,
#[arg(long, default_missing_value = "true", num_args = 0..=1)]
probe_media: Option<bool>,
},
/// Import files from filesystem into database
DatabaseImport {
/// Don't actually import data and just print which files WOULD be imported
#[arg(long, default_missing_value = "true", num_args = 0..=1)]
dry_run: Option<bool>,
},
}
#[tokio::main]
async fn main() -> Result<(), Error> {
if std::env::var("RUST_LOG").is_err() {
std::env::set_var("RUST_LOG", "info");
}
pretty_env_logger::init();
let args: Args = Args::parse();
let builder = Config::builder()
.add_source(config::File::with_name(if let Some(ref c) = args.config {
c.as_str()
} else {
"config.yaml"
}))
.add_source(config::Environment::with_prefix("APP"))
.build()?;
let settings: Settings = builder.try_deserialize()?;
match args.command {
Commands::Check { delete } => {
info!("Checking files in: {}", settings.storage_dir);
let fs = FileStore::new(settings.clone());
iter_files(&fs.storage_dir(), 4, |entry, p| {
let p = p.clone();
Box::pin(async move {
let id = if let Some(i) = id_from_path(&entry) {
i
} else {
p.set_message(format!("Skipping invalid file: {}", &entry.display()));
return Ok(());
};
let hash = FileStore::hash_file(&entry).await?;
if hash != id {
if delete.unwrap_or(false) {
p.set_message(format!("Deleting corrupt file: {}", &entry.display()));
tokio::fs::remove_file(&entry).await?;
} else {
p.set_message(format!("File is corrupted: {}", &entry.display()));
}
}
Ok(())
})
})
.await?;
}
Commands::Import { from, probe_media } => {
let fs = FileStore::new(settings.clone());
let db = Database::new(&settings.database).await?;
db.migrate().await?;
info!("Importing from: {}", fs.storage_dir().display());
iter_files(&from, 4, |entry, p| {
let fs = fs.clone();
let p = p.clone();
Box::pin(async move {
let mime = infer::get_from_path(&entry)?
.map(|m| m.mime_type())
.unwrap_or("application/octet-stream");
// test media is not corrupt
if probe_media.unwrap_or(true)
&& (mime.starts_with("image/") || mime.starts_with("video/"))
&& probe_file(&entry).is_err()
{
p.set_message(format!("Skipping media invalid file: {}", &entry.display()));
return Ok(());
}
let file = tokio::fs::File::open(&entry).await?;
let dst = fs.put(file, mime, false).await?;
match dst {
FileSystemResult::AlreadyExists(_) => {
p.set_message(format!("Duplicate file: {}", &entry.display()));
}
FileSystemResult::NewFile(_) => {
p.set_message(format!("Imported: {}", &entry.display()));
}
}
Ok(())
})
})
.await?;
}
Commands::DatabaseImport { dry_run } => {
let fs = FileStore::new(settings.clone());
let db = Database::new(&settings.database).await?;
db.migrate().await?;
info!("Importing to DB from: {}", fs.storage_dir().display());
iter_files(&fs.storage_dir(), 4, |entry, p| {
let db = db.clone();
let p = p.clone();
Box::pin(async move {
let id = if let Some(i) = id_from_path(&entry) {
i
} else {
p.set_message(format!("Skipping invalid file: {}", &entry.display()));
return Ok(());
};
let u = db.get_file(&id).await.context("db get_file")?;
if u.is_none() {
if !dry_run.unwrap_or(false) {
p.set_message(format!("Importing file: {}", &entry.display()));
let mime = infer::get_from_path(&entry)
.context("infer")?
.map(|m| m.mime_type())
.unwrap_or("application/octet-stream")
.to_string();
let meta = entry.metadata().context("file metadata")?;
let entry = FileUpload {
id,
name: None,
size: meta.len(),
mime_type: mime,
created: meta.created().unwrap_or(SystemTime::now()).into(),
width: None,
height: None,
blur_hash: None,
alt: None,
duration: None,
bitrate: None,
};
db.add_file(&entry, None).await.context("db add_file")?;
} else {
p.set_message(format!(
"[DRY-RUN] Importing file: {}",
&entry.display()
));
}
}
Ok(())
})
})
.await?;
}
}
Ok(())
}
fn id_from_path(path: &Path) -> Option<Vec<u8>> {
hex::decode(path.file_name()?.to_str()?).ok()
}
async fn iter_files<F>(p: &Path, threads: usize, mut op: F) -> Result<()>
where
F: FnMut(PathBuf, ProgressBar) -> Pin<Box<dyn Future<Output = Result<()>> + Send>>,
{
let semaphore = Arc::new(Semaphore::new(threads));
info!("Scanning files: {}", p.display());
let entries = walkdir::WalkDir::new(p);
let dir = entries
.into_iter()
.filter_map(Result::ok)
.filter(|e| e.file_type().is_file())
.collect::<Vec<_>>();
let p = ProgressBar::new(dir.len() as u64).with_style(ProgressStyle::with_template(
"{spinner} [{pos}/{len}] {msg}",
)?);
let mut all_tasks = vec![];
for entry in dir {
let _lock = semaphore.clone().acquire_owned().await?;
p.inc(1);
let fut = op(entry.path().to_path_buf(), p.clone());
all_tasks.push(tokio::spawn(async move {
if let Err(e) = fut.await {
error!("Error processing file: {} {}", entry.path().display(), e);
}
drop(_lock);
}));
}
for task in all_tasks {
task.await?;
}
p.finish_with_message("Done!");
Ok(())
}

View File

@ -103,7 +103,7 @@ async fn migrate_file(
let src_path = PathBuf::new()
.join(&args.data_path)
.join(VoidFile::map_to_path(&f.id));
let dst_path = fs.map_path(&id_vec);
let dst_path = fs.get(&id_vec);
if src_path.exists() && !dst_path.exists() {
info!(
"Copying file: {} from {} => {}",
@ -139,7 +139,9 @@ async fn migrate_file(
},
blur_hash: None,
alt: f.description.clone(),
duration: None,
bitrate: None,
};
db.add_file(&fu, uid).await?;
db.add_file(&fu, Some(uid)).await?;
Ok(())
}

View File

@ -25,6 +25,10 @@ pub struct FileUpload {
pub blur_hash: Option<String>,
/// Alt text of the media
pub alt: Option<String>,
/// Duration of media in seconds
pub duration: Option<f32>,
/// Average bitrate in bits/s
pub bitrate: Option<u32>,
#[sqlx(skip)]
#[cfg(feature = "labels")]
@ -43,6 +47,8 @@ impl From<&NewFileResult> for FileUpload {
height: value.height,
blur_hash: value.blur_hash.clone(),
alt: None,
duration: value.duration,
bitrate: value.bitrate,
#[cfg(feature = "labels")]
labels: value.labels.clone(),
}
@ -142,10 +148,10 @@ impl Database {
.try_get(0)
}
pub async fn add_file(&self, file: &FileUpload, user_id: u64) -> Result<(), Error> {
pub async fn add_file(&self, file: &FileUpload, user_id: Option<u64>) -> Result<(), Error> {
let mut tx = self.pool.begin().await?;
let q = sqlx::query("insert ignore into \
uploads(id,name,size,mime_type,blur_hash,width,height,alt,created) values(?,?,?,?,?,?,?,?,?)")
uploads(id,name,size,mime_type,blur_hash,width,height,alt,created,duration,bitrate) values(?,?,?,?,?,?,?,?,?,?,?)")
.bind(&file.id)
.bind(&file.name)
.bind(file.size)
@ -154,13 +160,18 @@ impl Database {
.bind(file.width)
.bind(file.height)
.bind(&file.alt)
.bind(file.created);
.bind(file.created)
.bind(file.duration)
.bind(file.bitrate);
tx.execute(q).await?;
let q2 = sqlx::query("insert ignore into user_uploads(file,user_id) values(?,?)")
.bind(&file.id)
.bind(user_id);
tx.execute(q2).await?;
if let Some(user_id) = user_id {
let q2 = sqlx::query("insert ignore into user_uploads(file,user_id) values(?,?)")
.bind(&file.id)
.bind(user_id);
tx.execute(q2).await?;
}
#[cfg(feature = "labels")]
for lbl in &file.labels {

View File

@ -1,5 +1,6 @@
#[cfg(feature = "labels")]
use crate::db::FileLabel;
use crate::processing::can_compress;
#[cfg(feature = "labels")]
use crate::processing::labeling::label_frame;
#[cfg(feature = "media-compression")]
@ -13,7 +14,7 @@ use ffmpeg_rs_raw::DemuxerInfo;
use rocket::form::validate::Contains;
use serde::Serialize;
use sha2::{Digest, Sha256};
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncReadExt};
use uuid::Uuid;
@ -36,10 +37,13 @@ pub struct NewFileResult {
pub width: Option<u32>,
pub height: Option<u32>,
pub blur_hash: Option<String>,
pub duration: Option<f32>,
pub bitrate: Option<u32>,
#[cfg(feature = "labels")]
pub labels: Vec<FileLabel>,
}
#[derive(Clone)]
pub struct FileStore {
settings: Settings,
}
@ -57,7 +61,7 @@ impl FileStore {
/// Store a new file
pub async fn put<'r, S>(
&self,
stream: S,
path: S,
mime_type: &str,
compress: bool,
) -> Result<FileSystemResult>
@ -65,7 +69,7 @@ impl FileStore {
S: AsyncRead + Unpin + 'r,
{
// store file in temp path and hash the file
let (temp_file, size, hash) = self.store_hash_temp_file(stream).await?;
let (temp_file, size, hash) = self.store_hash_temp_file(path).await?;
let dst_path = self.map_path(&hash);
// check if file hash already exists
@ -74,7 +78,7 @@ impl FileStore {
return Ok(FileSystemResult::AlreadyExists(hash));
}
let mut res = if compress {
let mut res = if compress && can_compress(mime_type) {
#[cfg(feature = "media-compression")]
{
let res = match self.compress_file(&temp_file, mime_type).await {
@ -92,7 +96,7 @@ impl FileStore {
anyhow::bail!("Compression not supported!");
}
} else {
let (width, height, mime_type) = {
let (width, height, mime_type, duration, bitrate) = {
#[cfg(feature = "media-compression")]
{
let probe = probe_file(&temp_file).ok();
@ -102,10 +106,20 @@ impl FileStore {
v_stream.map(|v| v.width as u32),
v_stream.map(|v| v.height as u32),
mime,
probe
.as_ref()
.map(|p| if p.duration < 0. { 0.0 } else { p.duration }),
probe.as_ref().map(|p| p.bitrate as u32),
)
}
#[cfg(not(feature = "media-compression"))]
(None, None, Self::infer_mime_type(mime_type, &temp_file))
(
None,
None,
Self::infer_mime_type(mime_type, &temp_file),
None,
None,
)
};
NewFileResult {
path: temp_file,
@ -115,6 +129,8 @@ impl FileStore {
width,
height,
blur_hash: None,
duration,
bitrate,
}
};
@ -194,6 +210,8 @@ impl FileStore {
height: Some(compressed_result.height as u32),
blur_hash: None,
mime_type: compressed_result.mime_type,
duration: Some(compressed_result.duration),
bitrate: Some(compressed_result.bitrate),
#[cfg(feature = "labels")]
labels,
})
@ -214,7 +232,7 @@ impl FileStore {
Ok((out_path, n, hash))
}
async fn hash_file(p: &PathBuf) -> Result<Vec<u8>, Error> {
pub async fn hash_file(p: &Path) -> Result<Vec<u8>, Error> {
let mut file = File::open(p).await?;
let mut hasher = Sha256::new();
let mut buf = [0; 4096];
@ -229,7 +247,7 @@ impl FileStore {
Ok(res.to_vec())
}
pub fn map_path(&self, id: &Vec<u8>) -> PathBuf {
fn map_path(&self, id: &Vec<u8>) -> PathBuf {
let id = hex::encode(id);
self.storage_dir().join(&id[0..2]).join(&id[2..4]).join(id)
}

View File

@ -1,6 +1,7 @@
#[cfg(feature = "analytics")]
pub mod analytics;
pub mod auth;
pub mod background;
pub mod cors;
pub mod db;
pub mod filesystem;

View File

@ -1,8 +1,9 @@
use std::path::{Path, PathBuf};
use anyhow::{bail, Error, Result};
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
use ffmpeg_rs_raw::{Demuxer, DemuxerInfo, Encoder, StreamType, Transcoder};
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{av_frame_free, av_packet_free, AVFrame};
use ffmpeg_rs_raw::{Decoder, Demuxer, DemuxerInfo, Encoder, Scaler, StreamType, Transcoder};
use std::path::{Path, PathBuf};
use std::ptr;
use uuid::Uuid;
#[cfg(feature = "labels")]
@ -21,7 +22,7 @@ impl WebpProcessor {
Self
}
pub fn process_file(
pub fn compress(
&mut self,
input: &Path,
mime_type: &str,
@ -65,9 +66,65 @@ impl WebpProcessor {
mime_type: "image/webp".to_string(),
width: image_stream.width,
height: image_stream.height,
duration: if probe.duration < 0. {
0.
} else {
probe.duration
},
bitrate: probe.bitrate as u32,
})
}
}
pub fn thumbnail(&mut self, input: &Path, out_path: &Path) -> Result<()> {
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_WEBP;
unsafe {
let mut input = Demuxer::new(input.to_str().unwrap())?;
let probe = input.probe_input()?;
let image_stream = probe
.streams
.iter()
.find(|c| c.stream_type == StreamType::Video)
.ok_or(Error::msg("No image found, cant compress"))?;
let w = 512u16;
let scale = w as f32 / image_stream.width as f32;
let h = (image_stream.height as f32 * scale) as u16;
let enc = Encoder::new(AV_CODEC_ID_WEBP)?
.with_height(h as i32)
.with_width(w as i32)
.with_pix_fmt(AV_PIX_FMT_YUV420P)
.with_framerate(1.0)?
.open(None)?;
let mut sws = Scaler::new();
let mut decoder = Decoder::new();
decoder.setup_decoder(image_stream, None)?;
while let Ok((mut pkt, _stream)) = input.get_packet() {
let mut frame_save: *mut AVFrame = ptr::null_mut();
for (mut frame, _stream) in decoder.decode_pkt(pkt)? {
if frame_save.is_null() {
frame_save = sws.process_frame(frame, w, h, AV_PIX_FMT_YUV420P)?;
}
av_frame_free(&mut frame);
}
av_packet_free(&mut pkt);
if !frame_save.is_null() {
enc.save_picture(frame_save, out_path.to_str().unwrap())?;
av_frame_free(&mut frame_save);
return Ok(());
}
}
Ok(())
}
}
}
pub struct NewFileProcessorResult {
@ -75,6 +132,8 @@ pub struct NewFileProcessorResult {
pub mime_type: String,
pub width: usize,
pub height: usize,
pub duration: f32,
pub bitrate: u32,
}
pub fn can_compress(mime_type: &str) -> bool {
@ -82,7 +141,7 @@ pub fn can_compress(mime_type: &str) -> bool {
}
pub fn compress_file(
stream: &Path,
path: &Path,
mime_type: &str,
out_dir: &Path,
) -> Result<NewFileProcessorResult, Error> {
@ -92,12 +151,12 @@ pub fn compress_file(
if mime_type.starts_with("image/") {
let mut proc = WebpProcessor::new();
return proc.process_file(stream, mime_type, out_dir);
return proc.compress(path, mime_type, out_dir);
}
bail!("No media processor")
}
pub fn probe_file(stream: &Path) -> Result<DemuxerInfo> {
let mut demuxer = Demuxer::new(stream.to_str().unwrap())?;
pub fn probe_file(path: &Path) -> Result<DemuxerInfo> {
let mut demuxer = Demuxer::new(path.to_str().unwrap())?;
unsafe { demuxer.probe_input() }
}

View File

@ -1,11 +1,11 @@
use crate::auth::nip98::Nip98Auth;
use crate::db::{Database, FileUpload};
use crate::db::{Database, FileUpload, User};
use crate::routes::{Nip94Event, PagedResult};
use crate::settings::Settings;
use rocket::serde::json::Json;
use rocket::serde::Serialize;
use rocket::{routes, Responder, Route, State};
use sqlx::{Error, Row};
use sqlx::{Error, QueryBuilder, Row};
pub fn admin_routes() -> Vec<Route> {
routes![admin_list_files, admin_get_self]
@ -55,6 +55,13 @@ pub struct SelfUser {
pub total_size: u64,
}
#[derive(Serialize)]
pub struct AdminNip94File {
#[serde(flatten)]
pub inner: Nip94Event,
pub uploader: Vec<String>,
}
#[rocket::get("/self")]
async fn admin_get_self(auth: Nip98Auth, db: &State<Database>) -> AdminResponse<SelfUser> {
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
@ -76,14 +83,15 @@ async fn admin_get_self(auth: Nip98Auth, db: &State<Database>) -> AdminResponse<
}
}
#[rocket::get("/files?<page>&<count>")]
#[rocket::get("/files?<page>&<count>&<mime_type>")]
async fn admin_list_files(
auth: Nip98Auth,
page: u32,
count: u32,
mime_type: Option<String>,
db: &State<Database>,
settings: &State<Settings>,
) -> AdminResponse<PagedResult<Nip94Event>> {
) -> AdminResponse<PagedResult<AdminNip94File>> {
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
let server_count = count.clamp(1, 5_000);
@ -95,14 +103,20 @@ async fn admin_list_files(
if !user.is_admin {
return AdminResponse::error("User is not an admin");
}
match db.list_all_files(page * server_count, server_count).await {
match db
.list_all_files(page * server_count, server_count, mime_type)
.await
{
Ok((files, count)) => AdminResponse::success(PagedResult {
count: files.len() as u32,
page,
total: count as u32,
files: files
.iter()
.map(|f| Nip94Event::from_upload(settings, f))
.into_iter()
.map(|f| AdminNip94File {
inner: Nip94Event::from_upload(settings, &f.0),
uploader: f.1.into_iter().map(|u| hex::encode(&u.pubkey)).collect(),
})
.collect(),
}),
Err(e) => AdminResponse::error(&format!("Could not list files: {}", e)),
@ -114,21 +128,29 @@ impl Database {
&self,
offset: u32,
limit: u32,
) -> Result<(Vec<FileUpload>, i64), Error> {
let results: Vec<FileUpload> = sqlx::query_as(
"select u.* \
from uploads u \
order by u.created desc \
limit ? offset ?",
)
.bind(limit)
.bind(offset)
.fetch_all(&self.pool)
.await?;
mime_type: Option<String>,
) -> Result<(Vec<(FileUpload, Vec<User>)>, i64), Error> {
let mut q = QueryBuilder::new("select u.* from uploads u ");
if let Some(m) = mime_type {
q.push("where u.mime_type = ");
q.push_bind(m);
}
q.push(" order by u.created desc limit ");
q.push_bind(limit);
q.push(" offset ");
q.push_bind(offset);
let results: Vec<FileUpload> = q.build_query_as().fetch_all(&self.pool).await?;
let count: i64 = sqlx::query("select count(u.id) from uploads u")
.fetch_one(&self.pool)
.await?
.try_get(0)?;
Ok((results, count))
let mut res = Vec::with_capacity(results.len());
for upload in results.into_iter() {
let upd = self.get_file_owners(&upload.id).await?;
res.push((upload, upd));
}
Ok((res, count))
}
}

View File

@ -25,7 +25,7 @@ pub struct BlobDescriptor {
pub size: u64,
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub mime_type: Option<String>,
pub created: u64,
pub uploaded: u64,
#[serde(rename = "nip94", skip_serializing_if = "Option::is_none")]
pub nip94: Option<HashMap<String, String>>,
}
@ -45,7 +45,7 @@ impl BlobDescriptor {
sha256: id_hex,
size: value.size,
mime_type: Some(value.mime_type.clone()),
created: value.created.timestamp() as u64,
uploaded: value.created.timestamp() as u64,
nip94: Some(
Nip94Event::from_upload(settings, value)
.tags
@ -415,7 +415,7 @@ where
return BlossomResponse::error(format!("Failed to save file (db): {}", e));
}
};
if let Err(e) = db.add_file(&upload, user_id).await {
if let Err(e) = db.add_file(&upload, Some(user_id)).await {
error!("{}", e.to_string());
BlossomResponse::error(format!("Error saving file (db): {}", e))
} else {

View File

@ -1,5 +1,7 @@
use crate::db::{Database, FileUpload};
use crate::filesystem::FileStore;
#[cfg(feature = "media-compression")]
use crate::processing::WebpProcessor;
pub use crate::routes::admin::admin_routes;
#[cfg(feature = "blossom")]
pub use crate::routes::blossom::blossom_routes;
@ -16,6 +18,7 @@ use rocket::http::{ContentType, Header, Status};
use rocket::response::Responder;
use rocket::serde::Serialize;
use rocket::{Request, Response, State};
use std::env::temp_dir;
use std::io::SeekFrom;
use std::ops::Range;
use std::pin::{pin, Pin};
@ -56,28 +59,40 @@ struct PagedResult<T> {
impl Nip94Event {
pub fn from_upload(settings: &Settings, upload: &FileUpload) -> Self {
let hex_id = hex::encode(&upload.id);
let ext = if upload.mime_type != "application/octet-stream" {
mime2ext::mime2ext(&upload.mime_type)
} else {
None
};
let mut tags = vec![
vec![
"url".to_string(),
format!(
"{}/{}{}",
&settings.public_url,
&hex_id,
mime2ext::mime2ext(&upload.mime_type)
.map(|m| format!(".{m}"))
.unwrap_or("".to_string())
),
format!("{}/{}.{}", &settings.public_url, &hex_id, ext.unwrap_or("")),
],
vec!["x".to_string(), hex_id],
vec!["x".to_string(), hex_id.clone()],
vec!["m".to_string(), upload.mime_type.clone()],
vec!["size".to_string(), upload.size.to_string()],
];
if upload.mime_type.starts_with("image/") || upload.mime_type.starts_with("video/") {
tags.push(vec![
"thumb".to_string(),
format!("{}/thumb/{}.webp", &settings.public_url, &hex_id),
]);
}
if let Some(bh) = &upload.blur_hash {
tags.push(vec!["blurhash".to_string(), bh.clone()]);
}
if let (Some(w), Some(h)) = (upload.width, upload.height) {
tags.push(vec!["dim".to_string(), format!("{}x{}", w, h)])
}
if let Some(d) = &upload.duration {
tags.push(vec!["duration".to_string(), d.to_string()]);
}
if let Some(b) = &upload.bitrate {
tags.push(vec!["bitrate".to_string(), b.to_string()]);
}
#[cfg(feature = "labels")]
for l in &upload.labels {
let val = if l.label.contains(',') {
@ -170,49 +185,61 @@ impl AsyncRead for RangeBody {
impl<'r> Responder<'r, 'static> for FilePayload {
fn respond_to(self, request: &'r Request<'_>) -> rocket::response::Result<'static> {
let mut response = Response::new();
response.set_header(Header::new("cache-control", "max-age=31536000, immutable"));
// handle ranges
#[cfg(feature = "ranges")]
{
response.set_header(Header::new("accept-ranges", "bytes"));
if let Some(r) = request.headers().get("range").next() {
if let Ok(ranges) = parse_range_header(r) {
if ranges.ranges.len() > 1 {
warn!("Multipart ranges are not supported, fallback to non-range request");
response.set_streamed_body(self.file);
} else {
const MAX_UNBOUNDED_RANGE: u64 = 1024 * 1024;
let single_range = ranges.ranges.first().unwrap();
let range_start = match single_range.start {
StartPosition::Index(i) => i,
StartPosition::FromLast(i) => self.info.size - i,
};
let range_end = match single_range.end {
EndPosition::Index(i) => i,
EndPosition::LastByte => {
(range_start + MAX_UNBOUNDED_RANGE).min(self.info.size)
}
};
let r_len = range_end - range_start;
let r_body = RangeBody::new(self.file, range_start..range_end);
response.set_status(Status::PartialContent);
response.set_header(Header::new("content-length", r_len.to_string()));
response.set_header(Header::new(
"content-range",
format!("bytes {}-{}/{}", range_start, range_end - 1, self.info.size),
));
response.set_streamed_body(Box::pin(r_body));
}
}
const MAX_UNBOUNDED_RANGE: u64 = 1024 * 1024;
// only use range response for files > 1MiB
if self.info.size < MAX_UNBOUNDED_RANGE {
response.set_sized_body(None, self.file);
} else {
response.set_streamed_body(self.file);
response.set_header(Header::new("accept-ranges", "bytes"));
if let Some(r) = request.headers().get("range").next() {
if let Ok(ranges) = parse_range_header(r) {
if ranges.ranges.len() > 1 {
warn!(
"Multipart ranges are not supported, fallback to non-range request"
);
response.set_streamed_body(self.file);
} else {
let single_range = ranges.ranges.first().unwrap();
let range_start = match single_range.start {
StartPosition::Index(i) => i,
StartPosition::FromLast(i) => self.info.size - i,
};
let range_end = match single_range.end {
EndPosition::Index(i) => i,
EndPosition::LastByte => {
(range_start + MAX_UNBOUNDED_RANGE).min(self.info.size)
}
};
let r_len = range_end - range_start;
let r_body = RangeBody::new(self.file, range_start..range_end);
response.set_status(Status::PartialContent);
response.set_header(Header::new("content-length", r_len.to_string()));
response.set_header(Header::new(
"content-range",
format!(
"bytes {}-{}/{}",
range_start,
range_end - 1,
self.info.size
),
));
response.set_streamed_body(Box::pin(r_body));
}
}
} else {
response.set_sized_body(None, self.file);
}
}
}
#[cfg(not(feature = "ranges"))]
{
response.set_streamed_body(self.file);
response.set_header(Header::new("content-length", self.info.size.to_string()));
response.set_sized_body(None, self.file);
}
if let Ok(ct) = ContentType::from_str(&self.info.mime_type) {
@ -352,6 +379,64 @@ pub async fn head_blob(sha256: &str, fs: &State<FileStore>) -> Status {
}
}
/// Generate thumbnail for image / video
#[cfg(feature = "media-compression")]
#[rocket::get("/thumb/<sha256>")]
pub async fn get_blob_thumb(
sha256: &str,
fs: &State<FileStore>,
db: &State<Database>,
) -> Result<FilePayload, Status> {
let sha256 = if sha256.contains(".") {
sha256.split('.').next().unwrap()
} else {
sha256
};
let id = if let Ok(i) = hex::decode(sha256) {
i
} else {
return Err(Status::NotFound);
};
if id.len() != 32 {
return Err(Status::NotFound);
}
let info = if let Ok(Some(info)) = db.get_file(&id).await {
info
} else {
return Err(Status::NotFound);
};
if !(info.mime_type.starts_with("image/") || info.mime_type.starts_with("video/")) {
return Err(Status::NotFound);
}
let file_path = fs.get(&id);
let mut thumb_file = temp_dir().join(format!("thumb_{}", sha256));
thumb_file.set_extension("webp");
if !thumb_file.exists() {
let mut p = WebpProcessor::new();
if p.thumbnail(&file_path, &thumb_file).is_err() {
return Err(Status::InternalServerError);
}
};
if let Ok(f) = File::open(&thumb_file).await {
Ok(FilePayload {
file: f,
info: FileUpload {
size: thumb_file.metadata().unwrap().len(),
mime_type: "image/webp".to_string(),
..info
},
})
} else {
Err(Status::NotFound)
}
}
/// Legacy URL redirect for void.cat uploads
#[rocket::get("/d/<id>")]
pub async fn void_cat_redirect(id: &str, settings: &State<Settings>) -> Option<NamedFile> {
@ -361,10 +446,15 @@ pub async fn void_cat_redirect(id: &str, settings: &State<Settings>) -> Option<N
id
};
if let Some(base) = &settings.void_cat_files {
let uuid =
uuid::Uuid::from_slice_le(nostr::bitcoin::base58::decode(id).unwrap().as_slice())
.unwrap();
let f = base.join(VoidFile::map_to_path(&uuid));
let uuid = if let Ok(b58) = nostr::bitcoin::base58::decode(id) {
uuid::Uuid::from_slice_le(b58.as_slice())
} else {
uuid::Uuid::parse_str(id)
};
if uuid.is_err() {
return None;
}
let f = base.join(VoidFile::map_to_path(&uuid.unwrap()));
debug!("Legacy file map: {} => {}", id, f.display());
if let Ok(f) = NamedFile::open(f).await {
Some(f)
@ -375,3 +465,24 @@ pub async fn void_cat_redirect(id: &str, settings: &State<Settings>) -> Option<N
None
}
}
#[rocket::head("/d/<id>")]
pub async fn void_cat_redirect_head(id: &str) -> VoidCatFile {
let id = if id.contains(".") {
id.split('.').next().unwrap()
} else {
id
};
let uuid =
uuid::Uuid::from_slice_le(nostr::bitcoin::base58::decode(id).unwrap().as_slice()).unwrap();
VoidCatFile {
status: Status::Ok,
uuid: Header::new("X-UUID", uuid.to_string()),
}
}
#[derive(Responder)]
pub struct VoidCatFile {
pub status: Status,
pub uuid: Header<'static>,
}

View File

@ -232,7 +232,7 @@ async fn upload(
Err(e) => return Nip96Response::error(&format!("Could not save user: {}", e)),
};
if let Err(e) = db.add_file(&upload, user_id).await {
if let Err(e) = db.add_file(&upload, Some(user_id)).await {
error!("{}", e.to_string());
return Nip96Response::error(&format!("Could not save file (db): {}", e));
}

View File

@ -2,19 +2,32 @@ import { hexToBech32 } from "@snort/shared";
import { NostrLink } from "@snort/system";
import { useUserProfile } from "@snort/system-react";
export default function Profile({ link }: { link: NostrLink }) {
export default function Profile({
link,
size,
showName,
}: {
link: NostrLink;
size?: number;
showName?: boolean;
}) {
const profile = useUserProfile(link.id);
const s = size ?? 40;
return (
<div className="flex gap-2 items-center">
<img
src={profile?.picture}
className="rounded-full w-12 h-12 object-fit object-center"
width={s}
height={s}
className="rounded-full object-fit object-center"
/>
<div>
{profile?.display_name ??
profile?.name ??
hexToBech32("npub", link.id).slice(0, 12)}
</div>
{(showName ?? true) && (
<div>
{profile?.display_name ??
profile?.name ??
hexToBech32("npub", link.id).slice(0, 12)}
</div>
)}
</div>
);
}

View File

@ -8,5 +8,5 @@ body {
}
hr {
@apply border-neutral-500
@apply border-neutral-500;
}

File diff suppressed because one or more lines are too long

View File

@ -2,7 +2,11 @@ import { base64 } from "@scure/base";
import { throwIfOffline } from "@snort/shared";
import { EventKind, EventPublisher, NostrEvent } from "@snort/system";
export interface AdminSelf { is_admin: boolean, file_count: number, total_size: number }
export interface AdminSelf {
is_admin: boolean;
file_count: number;
total_size: number;
}
export class Route96 {
constructor(
@ -14,14 +18,13 @@ export class Route96 {
async getSelf() {
const rsp = await this.#req("admin/self", "GET");
const data =
await this.#handleResponse<AdminResponse<AdminSelf>>(rsp);
const data = await this.#handleResponse<AdminResponse<AdminSelf>>(rsp);
return data;
}
async listFiles(page = 0, count = 10) {
async listFiles(page = 0, count = 10, mime: string | undefined) {
const rsp = await this.#req(
`admin/files?page=${page}&count=${count}`,
`admin/files?page=${page}&count=${count}${mime ? `&mime_type=${mime}` : ""}`,
"GET",
);
const data = await this.#handleResponse<AdminResponseFileList>(rsp);

View File

@ -41,7 +41,7 @@ export class Blossom {
);
const tags = [["x", bytesToString("hex", new Uint8Array(hash))]];
const rsp = await this.#req("media", "PUT", "upload", file, tags);
const rsp = await this.#req("media", "PUT", "media", file, tags);
if (rsp.ok) {
return (await rsp.json()) as BlobDescriptor;
} else {
@ -51,9 +51,16 @@ export class Blossom {
}
async mirror(url: string) {
const rsp = await this.#req("mirror", "PUT", "mirror", JSON.stringify({ url }), undefined, {
"content-type": "application/json"
});
const rsp = await this.#req(
"mirror",
"PUT",
"mirror",
JSON.stringify({ url }),
undefined,
{
"content-type": "application/json",
},
);
if (rsp.ok) {
return (await rsp.json()) as BlobDescriptor;
} else {

View File

@ -1,7 +1,8 @@
import { NostrEvent } from "@snort/system";
import { NostrEvent, NostrLink } from "@snort/system";
import { useState } from "react";
import { FormatBytes } from "../const";
import classNames from "classnames";
import Profile from "../components/profile";
interface FileInfo {
id: string;
@ -9,6 +10,7 @@ interface FileInfo {
name?: string;
type?: string;
size?: number;
uploader?: Array<string>;
}
export default function FileList({
@ -30,15 +32,17 @@ export default function FileList({
}
function renderInner(f: FileInfo) {
if (f.type?.startsWith("image/") || !f.type) {
if (
f.type?.startsWith("image/") ||
f.type?.startsWith("video/") ||
!f.type
) {
return (
<img src={f.url} className="w-full h-full object-contain object-center" loading="lazy" />
);
} else if (f.type?.startsWith("video/")) {
return (
<div className="w-full h-full flex items-center justify-center">
Video
</div>
<img
src={f.url.replace(`/${f.id}`, `/thumb/${f.id}`)}
className="w-full h-full object-contain object-center"
loading="lazy"
/>
);
}
}
@ -54,6 +58,7 @@ export default function FileList({
name: f.content,
type: f.tags.find((a) => a[0] === "m")?.at(1),
size: Number(f.tags.find((a) => a[0] === "size")?.at(1)),
uploader: "uploader" in f ? (f.uploader as Array<string>) : undefined,
};
} else {
return {
@ -74,12 +79,14 @@ export default function FileList({
ret.push(
<div
onClick={() => onPage?.(x)}
className={classNames("bg-neutral-700 hover:bg-neutral-600 min-w-8 text-center cursor-pointer font-bold",
className={classNames(
"bg-neutral-700 hover:bg-neutral-600 min-w-8 text-center cursor-pointer font-bold",
{
"rounded-l-md": x === start,
"rounded-r-md": (x + 1) === n,
"rounded-r-md": x + 1 === n,
"bg-neutral-400": page === x,
})}
},
)}
>
{x + 1}
</div>,
@ -102,24 +109,39 @@ export default function FileList({
>
<div className="absolute flex flex-col items-center justify-center w-full h-full text-wrap text-sm break-all text-center opacity-0 hover:opacity-100 hover:bg-black/80">
<div>
{(info.name?.length ?? 0) === 0 ? "Untitled" : info.name}
{(info.name?.length ?? 0) === 0
? "Untitled"
: info.name!.length > 20
? `${info.name?.substring(0, 10)}...${info.name?.substring(info.name.length - 10)}`
: info.name}
</div>
<div>
{info.size && !isNaN(info.size)
? FormatBytes(info.size, 2)
: ""}
</div>
<div>{info.type}</div>
<div className="flex gap-2">
<a href={info.url} className="underline" target="_blank">
Link
</a>
{onDelete && <a href="#" onClick={e => {
e.preventDefault();
onDelete?.(info.id)
}} className="underline">
Delete
</a>}
{onDelete && (
<a
href="#"
onClick={(e) => {
e.preventDefault();
onDelete?.(info.id);
}}
className="underline"
>
Delete
</a>
)}
</div>
{info.uploader &&
info.uploader.map((a) => (
<Profile link={NostrLink.publicKey(a)} size={20} />
))}
</div>
{renderInner(info)}
</div>
@ -134,6 +156,9 @@ export default function FileList({
<table className="table-auto text-sm">
<thead>
<tr>
<th className="border border-neutral-400 bg-neutral-500 py-1 px-2">
Preview
</th>
<th className="border border-neutral-400 bg-neutral-500 py-1 px-2">
Name
</th>
@ -143,6 +168,11 @@ export default function FileList({
<th className="border border-neutral-400 bg-neutral-500 py-1 px-2">
Size
</th>
{files.some((i) => "uploader" in i) && (
<th className="border border-neutral-400 bg-neutral-500 py-1 px-2">
Uploader
</th>
)}
<th className="border border-neutral-400 bg-neutral-500 py-1 px-2">
Actions
</th>
@ -153,6 +183,9 @@ export default function FileList({
const info = getInfo(a);
return (
<tr key={info.id}>
<td className="border border-neutral-500 py-1 px-2 w-8 h-8">
{renderInner(info)}
</td>
<td className="border border-neutral-500 py-1 px-2 break-all">
{(info.name?.length ?? 0) === 0 ? "<Untitled>" : info.name}
</td>
@ -164,17 +197,30 @@ export default function FileList({
? FormatBytes(info.size, 2)
: ""}
</td>
{info.uploader && (
<td className="border border-neutral-500 py-1 px-2">
{info.uploader.map((a) => (
<Profile link={NostrLink.publicKey(a)} size={20} />
))}
</td>
)}
<td className="border border-neutral-500 py-1 px-2">
<div className="flex gap-2">
<a href={info.url} className="underline" target="_blank">
Link
</a>
{onDelete && <a href="#" onClick={e => {
e.preventDefault();
onDelete?.(info.id)
}} className="underline">
Delete
</a>}
{onDelete && (
<a
href="#"
onClick={(e) => {
e.preventDefault();
onDelete?.(info.id);
}}
className="underline"
>
Delete
</a>
)}
</div>
</td>
</tr>

View File

@ -23,6 +23,7 @@ export default function Upload() {
const [adminListedFiles, setAdminListedFiles] = useState<Nip96FileList>();
const [listedPage, setListedPage] = useState(0);
const [adminListedPage, setAdminListedPage] = useState(0);
const [mimeFilter, setMimeFilter] = useState<string>();
const login = useLogin();
const pub = usePublisher();
@ -30,7 +31,8 @@ export default function Upload() {
const legacyFiles = Report as Record<string, Array<string>>;
const myLegacyFiles = login ? (legacyFiles[login.pubkey] ?? []) : [];
const url = import.meta.env.VITE_API_URL || `${location.protocol}//${location.host}`;
const url =
import.meta.env.VITE_API_URL || `${location.protocol}//${location.host}`;
async function doUpload() {
if (!pub) return;
if (!toUpload) return;
@ -38,7 +40,9 @@ export default function Upload() {
setError(undefined);
if (type === "blossom") {
const uploader = new Blossom(url, pub);
const result = noCompress ? await uploader.upload(toUpload) : await uploader.media(toUpload);
const result = noCompress
? await uploader.upload(toUpload)
: await uploader.media(toUpload);
setResults((s) => [...s, result]);
}
if (type === "nip96") {
@ -82,7 +86,7 @@ export default function Upload() {
try {
setError(undefined);
const uploader = new Route96(url, pub);
const result = await uploader.listFiles(n, 50);
const result = await uploader.listFiles(n, 50, mimeFilter);
setAdminListedFiles(result);
} catch (e) {
if (e instanceof Error) {
@ -132,7 +136,7 @@ export default function Upload() {
useEffect(() => {
listAllUploads(adminListedPage);
}, [adminListedPage]);
}, [adminListedPage, mimeFilter]);
useEffect(() => {
if (pub && !self) {
@ -187,29 +191,33 @@ export default function Upload() {
</Button>
<Button
className="flex-1"
onClick={doUpload} disabled={login === undefined}>
onClick={doUpload}
disabled={login === undefined}
>
Upload
</Button>
</div>
<hr />
{!listedFiles && <Button disabled={login === undefined} onClick={() => listUploads(0)}>
List Uploads
</Button>}
{!listedFiles && (
<Button disabled={login === undefined} onClick={() => listUploads(0)}>
List Uploads
</Button>
)}
{self && <div className="flex justify-between font-medium">
<div>Uploads: {self.file_count.toLocaleString()}</div>
<div>Total Size: {FormatBytes(self.total_size)}</div>
</div>}
{self && (
<div className="flex justify-between font-medium">
<div>Uploads: {self.file_count.toLocaleString()}</div>
<div>Total Size: {FormatBytes(self.total_size)}</div>
</div>
)}
{login && myLegacyFiles.length > 0 && (
<div className="flex flex-col gap-4 font-bold">
You have {myLegacyFiles.length.toLocaleString()} files which can be migrated from void.cat
You have {myLegacyFiles.length.toLocaleString()} files which can be
migrated from void.cat
<div className="flex gap-2">
<Button onClick={() => migrateLegacy()}>
Migrate Files
</Button>
<Button onClick={() => setShowLegacy(s => !s)}>
<Button onClick={() => migrateLegacy()}>Migrate Files</Button>
<Button onClick={() => setShowLegacy((s) => !s)}>
{!showLegacy ? "Show Files" : "Hide Files"}
</Button>
</div>
@ -218,7 +226,10 @@ export default function Upload() {
)}
{showLegacy && (
<FileList
files={myLegacyFiles.map(f => ({ id: f, url: `https://void.cat/d/${f}` }))}
files={myLegacyFiles.map((f) => ({
id: f,
url: `https://void.cat/d/${f}`,
}))}
/>
)}
{listedFiles && (
@ -239,6 +250,18 @@ export default function Upload() {
<hr />
<h3>Admin File List:</h3>
<Button onClick={() => listAllUploads(0)}>List All Uploads</Button>
<div>
<select value={mimeFilter} onChange={e => setMimeFilter(e.target.value)}>
<option value={""}>All</option>
<option>image/webp</option>
<option>image/jpeg</option>
<option>image/jpg</option>
<option>image/png</option>
<option>image/gif</option>
<option>video/mp4</option>
<option>video/mov</option>
</select>
</div>
{adminListedFiles && (
<FileList
files={adminListedFiles.files}
@ -248,8 +271,7 @@ export default function Upload() {
onDelete={async (x) => {
await deleteFile(x);
await listAllUploads(adminListedPage);
}
}
}}
/>
)}
</>