feat: infer mime

feat: store in tmp dir next to output dir
This commit is contained in:
kieran 2025-01-15 19:59:09 +00:00
parent 4accc35bdb
commit 5e97f4d12a
No known key found for this signature in database
GPG Key ID: DE71CEB3925BE941
11 changed files with 327 additions and 297 deletions

24
Cargo.lock generated
View File

@ -575,6 +575,17 @@ dependencies = [
"nom",
]
[[package]]
name = "cfb"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d38f2da7a0a2c4ccf0065be06397cc26a81f4e528be095826eee9d4adbb8c60f"
dependencies = [
"byteorder",
"fnv",
"uuid",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
@ -1121,7 +1132,7 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "ffmpeg-rs-raw"
version = "0.1.0"
source = "git+https://git.v0l.io/Kieran/ffmpeg-rs-raw.git?rev=76333375d8c7c825cd9e45c041866f2c655c7bbd#76333375d8c7c825cd9e45c041866f2c655c7bbd"
source = "git+https://git.v0l.io/Kieran/ffmpeg-rs-raw.git?rev=de2050cec07a095bace38d3ccf9c4c4f9b03b217#de2050cec07a095bace38d3ccf9c4c4f9b03b217"
dependencies = [
"anyhow",
"ffmpeg-sys-the-third",
@ -2006,6 +2017,15 @@ dependencies = [
"serde",
]
[[package]]
name = "infer"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc150e5ce2330295b8616ce0e3f53250e53af31759a9dbedad1621ba29151847"
dependencies = [
"cfb",
]
[[package]]
name = "inlinable_string"
version = "0.1.15"
@ -3175,12 +3195,14 @@ dependencies = [
"ffmpeg-rs-raw",
"hex",
"http-range-header",
"infer",
"libc",
"log",
"mime2ext",
"nostr",
"nostr-cursor",
"pretty_env_logger",
"rand",
"regex",
"reqwest",
"rocket",

View File

@ -52,10 +52,11 @@ serde_with = { version = "3.8.1", features = ["hex"] }
reqwest = { version = "0.12.8", features = ["stream"] }
clap = { version = "4.5.18", features = ["derive"] }
mime2ext = "0.1.53"
tokio-util = { version = "0.7.13", features = ["io"] }
infer = "0.16.0"
tokio-util = { version = "0.7.13", features = ["io", "io-util"] }
libc = { version = "0.2.153", optional = true }
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "76333375d8c7c825cd9e45c041866f2c655c7bbd", optional = true }
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "de2050cec07a095bace38d3ccf9c4c4f9b03b217", optional = true }
candle-core = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true }
candle-nn = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true }
candle-transformers = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true }
@ -63,5 +64,6 @@ sqlx-postgres = { version = "0.8.2", optional = true, features = ["chrono", "uui
http-range-header = { version = "0.4.2", optional = true }
nostr-cursor = { git = "https://git.v0l.io/Kieran/nostr_backup_proc.git", branch = "main", optional = true }
regex = { version = "1.11.1", optional = true }
rand = "0.8.5"

View File

@ -125,10 +125,7 @@ async fn migrate_file(
let md: Option<Vec<&str>> = f.media_dimensions.as_ref().map(|s| s.split("x").collect());
let fu = FileUpload {
id: id_vec,
name: match &f.name {
Some(n) => n.to_string(),
None => "".to_string(),
},
name: f.name.clone(),
size: f.size as u64,
mime_type: f.mime_type.clone(),
created: f.uploaded,
@ -142,7 +139,6 @@ async fn migrate_file(
},
blur_hash: None,
alt: f.description.clone(),
..Default::default()
};
db.add_file(&fu, uid).await?;
Ok(())

View File

@ -1,3 +1,4 @@
use crate::filesystem::NewFileResult;
use chrono::{DateTime, Utc};
use serde::Serialize;
use sqlx::migrate::MigrateError;
@ -5,15 +6,24 @@ use sqlx::{Error, Executor, FromRow, Row};
#[derive(Clone, FromRow, Default, Serialize)]
pub struct FileUpload {
/// SHA-256 hash of the file
#[serde(with = "hex")]
pub id: Vec<u8>,
pub name: String,
/// Filename
pub name: Option<String>,
/// Size in bytes
pub size: u64,
/// MIME type
pub mime_type: String,
/// When the upload was created
pub created: DateTime<Utc>,
/// Width of the media in pixels
pub width: Option<u32>,
/// Height of the media in pixels
pub height: Option<u32>,
/// Blurhash of the media
pub blur_hash: Option<String>,
/// Alt text of the media
pub alt: Option<String>,
#[sqlx(skip)]
@ -21,6 +31,23 @@ pub struct FileUpload {
pub labels: Vec<FileLabel>,
}
impl From<&NewFileResult> for FileUpload {
fn from(value: &NewFileResult) -> Self {
Self {
id: value.id.clone(),
name: None,
size: value.size,
mime_type: value.mime_type.clone(),
created: Utc::now(),
width: value.width,
height: value.height,
blur_hash: value.blur_hash.clone(),
alt: None,
#[cfg(feature = "labels")]
labels: value.labels.clone(),
}
}
}
#[derive(Clone, FromRow, Serialize)]
pub struct User {
pub id: u64,

View File

@ -1,32 +1,43 @@
use std::env::temp_dir;
use std::fs;
use std::io::SeekFrom;
use std::path::{Path, PathBuf};
use std::time::SystemTime;
use anyhow::Error;
use chrono::Utc;
use ffmpeg_rs_raw::DemuxerInfo;
use log::info;
use rocket::form::validate::Contains;
use serde::Serialize;
use sha2::{Digest, Sha256};
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeekExt};
#[cfg(feature = "labels")]
use crate::db::FileLabel;
use crate::db::FileUpload;
#[cfg(feature = "labels")]
use crate::processing::labeling::label_frame;
#[cfg(feature = "media-compression")]
use crate::processing::{compress_file, probe_file, FileProcessorResult};
use crate::processing::{compress_file, probe_file};
use crate::settings::Settings;
use anyhow::Error;
use anyhow::Result;
#[cfg(feature = "media-compression")]
use ffmpeg_rs_raw::DemuxerInfo;
#[cfg(feature = "media-compression")]
use rocket::form::validate::Contains;
use serde::Serialize;
use sha2::{Digest, Sha256};
use std::path::PathBuf;
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncReadExt};
use uuid::Uuid;
#[derive(Clone, Default, Serialize)]
pub struct FileSystemResult {
#[derive(Clone)]
pub enum FileSystemResult {
/// File hash already exists
AlreadyExists(Vec<u8>),
/// New file created on disk and is stored
NewFile(NewFileResult),
}
#[derive(Clone, Serialize)]
pub struct NewFileResult {
pub path: PathBuf,
pub upload: FileUpload,
#[serde(with = "hex")]
pub id: Vec<u8>,
pub size: u64,
pub mime_type: String,
pub width: Option<u32>,
pub height: Option<u32>,
pub blur_hash: Option<String>,
#[cfg(feature = "labels")]
pub labels: Vec<FileLabel>,
}
pub struct FileStore {
@ -44,178 +55,168 @@ impl FileStore {
}
/// Store a new file
pub async fn put<S>(
pub async fn put<'r, S>(
&self,
stream: S,
mime_type: &str,
compress: bool,
) -> Result<FileSystemResult, Error>
) -> Result<FileSystemResult>
where
S: AsyncRead + Unpin,
S: AsyncRead + Unpin + 'r,
{
let result = self
.store_compress_file(stream, mime_type, compress)
.await?;
let dst_path = self.map_path(&result.upload.id);
// store file in temp path and hash the file
let (temp_file, size, hash) = self.store_hash_temp_file(stream).await?;
let dst_path = self.map_path(&hash);
// check if file hash already exists
if dst_path.exists() {
fs::remove_file(result.path)?;
return Ok(FileSystemResult {
path: dst_path,
..result
});
}
fs::create_dir_all(dst_path.parent().unwrap())?;
if let Err(e) = fs::copy(&result.path, &dst_path) {
fs::remove_file(&result.path)?;
Err(Error::from(e))
} else {
fs::remove_file(result.path)?;
Ok(FileSystemResult {
path: dst_path,
..result
})
}
tokio::fs::remove_file(temp_file).await?;
return Ok(FileSystemResult::AlreadyExists(hash));
}
/// Try to replace the mime-type when unknown using ffmpeg probe result
fn hack_mime_type(mime_type: &str, p: &DemuxerInfo) -> String {
if mime_type == "application/octet-stream" {
if p.format.contains("mp4") {
"video/mp4".to_string()
} else if p.format.contains("webp") {
"image/webp".to_string()
} else if p.format.contains("jpeg") {
"image/jpeg".to_string()
} else if p.format.contains("png") {
"image/png".to_string()
} else if p.format.contains("gif") {
"image/gif".to_string()
} else {
mime_type.to_string()
}
} else {
mime_type.to_string()
}
}
async fn store_compress_file<S>(
&self,
mut stream: S,
mime_type: &str,
compress: bool,
) -> Result<FileSystemResult, Error>
where
S: AsyncRead + Unpin,
let mut res = if compress {
#[cfg(feature = "media-compression")]
{
let random_id = uuid::Uuid::new_v4();
let tmp_path = FileStore::map_temp(random_id);
let mut file = File::options()
.create(true)
.truncate(false)
.write(true)
.read(true)
.open(tmp_path.clone())
.await?;
tokio::io::copy(&mut stream, &mut file).await?;
let res = match self.compress_file(&temp_file, mime_type).await {
Err(e) => {
tokio::fs::remove_file(&temp_file).await?;
return Err(e);
}
Ok(res) => res,
};
tokio::fs::remove_file(temp_file).await?;
res
}
#[cfg(not(feature = "media-compression"))]
{
anyhow::bail!("Compression not supported!");
}
} else {
let (width, height, mime_type) = {
#[cfg(feature = "media-compression")]
{
let probe = probe_file(&temp_file).ok();
let v_stream = probe.as_ref().and_then(|p| p.best_video());
let mime = Self::hack_mime_type(mime_type, &probe, &temp_file);
(
v_stream.map(|v| v.width as u32),
v_stream.map(|v| v.height as u32),
mime,
)
}
#[cfg(not(feature = "media-compression"))]
(None, None, Self::infer_mime_type(mime_type, &temp_file))
};
NewFileResult {
path: temp_file,
id: hash,
size,
mime_type,
width,
height,
blur_hash: None,
}
};
info!("File saved to temp path: {}", tmp_path.to_str().unwrap());
// copy temp file to final destination
let final_dest = self.map_path(&res.id);
// Compressed file already exists
if final_dest.exists() {
tokio::fs::remove_file(&res.path).await?;
Ok(FileSystemResult::AlreadyExists(res.id))
} else {
tokio::fs::create_dir_all(final_dest.parent().unwrap()).await?;
tokio::fs::rename(&res.path, &final_dest).await?;
res.path = final_dest;
Ok(FileSystemResult::NewFile(res))
}
}
#[cfg(feature = "media-compression")]
if compress {
let start = SystemTime::now();
let proc_result = compress_file(tmp_path.clone(), mime_type)?;
if let FileProcessorResult::NewFile(new_temp) = proc_result {
let old_size = tmp_path.metadata()?.len();
let new_size = new_temp.result.metadata()?.len();
let time_compress = SystemTime::now().duration_since(start)?;
let start = SystemTime::now();
/// Try to replace the mime-type when unknown using ffmpeg probe result
fn hack_mime_type(mime_type: &str, p: &Option<DemuxerInfo>, out_path: &PathBuf) -> String {
if let Some(p) = p {
if p.format.contains("mp4") {
return "video/mp4".to_string();
} else if p.format.contains("webp") {
return "image/webp".to_string();
} else if p.format.contains("jpeg") {
return "image/jpeg".to_string();
} else if p.format.contains("png") {
return "image/png".to_string();
} else if p.format.contains("gif") {
return "image/gif".to_string();
}
}
// infer mime type
Self::infer_mime_type(mime_type, out_path)
}
fn infer_mime_type(mime_type: &str, out_path: &PathBuf) -> String {
// infer mime type
if let Ok(Some(i)) = infer::get_from_path(out_path) {
i.mime_type().to_string()
} else {
mime_type.to_string()
}
}
async fn compress_file(&self, input: &PathBuf, mime_type: &str) -> Result<NewFileResult> {
let compressed_result = compress_file(input, mime_type, &self.temp_dir())?;
#[cfg(feature = "labels")]
let labels = if let Some(mp) = &self.settings.vit_model {
label_frame(&new_temp.result, mp.model.clone(), mp.config.clone())?
label_frame(
&compressed_result.result,
mp.model.clone(),
mp.config.clone(),
)?
.iter()
.map(|l| FileLabel::new(l.0.clone(), "vit224".to_string()))
.collect()
} else {
vec![]
};
let hash = FileStore::hash_file(&compressed_result.result).await?;
let time_labels = SystemTime::now().duration_since(start)?;
// delete old temp
fs::remove_file(tmp_path)?;
file = File::options()
.create(true)
.truncate(false)
.write(true)
.read(true)
.open(new_temp.result.clone())
.await?;
let n = file.metadata().await?.len();
let hash = FileStore::hash_file(&mut file).await?;
info!("Processed media: ratio={:.2}x, old_size={:.3}kb, new_size={:.3}kb, duration_compress={:.2}ms, duration_labels={:.2}ms",
old_size as f32 / new_size as f32,
old_size as f32 / 1024.0,
new_size as f32 / 1024.0,
time_compress.as_micros() as f64 / 1000.0,
time_labels.as_micros() as f64 / 1000.0
);
return Ok(FileSystemResult {
path: new_temp.result,
upload: FileUpload {
let n = File::open(&compressed_result.result)
.await?
.metadata()
.await?
.len();
Ok(NewFileResult {
path: compressed_result.result,
id: hash,
name: "".to_string(),
size: n,
width: Some(new_temp.width as u32),
height: Some(new_temp.height as u32),
width: Some(compressed_result.width as u32),
height: Some(compressed_result.height as u32),
blur_hash: None,
mime_type: new_temp.mime_type,
mime_type: compressed_result.mime_type,
#[cfg(feature = "labels")]
labels,
created: Utc::now(),
..Default::default()
},
});
}
} else if let Ok(p) = probe_file(tmp_path.clone()) {
let n = file.metadata().await?.len();
let hash = FileStore::hash_file(&mut file).await?;
let v_stream = p.best_video();
return Ok(FileSystemResult {
path: tmp_path,
upload: FileUpload {
id: hash,
name: "".to_string(),
size: n,
created: Utc::now(),
mime_type: Self::hack_mime_type(mime_type, &p),
width: v_stream.map(|v| v.width as u32),
height: v_stream.map(|v| v.height as u32),
..Default::default()
},
});
}
let n = file.metadata().await?.len();
let hash = FileStore::hash_file(&mut file).await?;
Ok(FileSystemResult {
path: tmp_path,
upload: FileUpload {
id: hash,
name: "".to_string(),
size: n,
created: Utc::now(),
mime_type: mime_type.to_string(),
..Default::default()
},
})
}
async fn hash_file(file: &mut File) -> Result<Vec<u8>, Error> {
async fn store_hash_temp_file<S>(&self, mut stream: S) -> Result<(PathBuf, u64, Vec<u8>)>
where
S: AsyncRead + Unpin,
{
let uid = Uuid::new_v4();
let out_path = self.temp_dir().join(uid.to_string());
tokio::fs::create_dir_all(&out_path.parent().unwrap()).await?;
let mut file = File::create(&out_path).await?;
let n = tokio::io::copy(&mut stream, &mut file).await?;
let hash = FileStore::hash_file(&out_path).await?;
Ok((out_path, n, hash))
}
async fn hash_file(p: &PathBuf) -> Result<Vec<u8>, Error> {
let mut file = File::open(p).await?;
let mut hasher = Sha256::new();
file.seek(SeekFrom::Start(0)).await?;
let mut buf = [0; 4096];
loop {
let n = file.read(&mut buf).await?;
@ -228,15 +229,16 @@ impl FileStore {
Ok(res.to_vec())
}
fn map_temp(id: uuid::Uuid) -> PathBuf {
temp_dir().join(id.to_string())
}
pub fn map_path(&self, id: &Vec<u8>) -> PathBuf {
let id = hex::encode(id);
Path::new(&self.settings.storage_dir)
.join(&id[0..2])
.join(&id[2..4])
.join(id)
self.storage_dir().join(&id[0..2]).join(&id[2..4]).join(id)
}
pub fn temp_dir(&self) -> PathBuf {
self.storage_dir().join("tmp")
}
pub fn storage_dir(&self) -> PathBuf {
PathBuf::from(&self.settings.storage_dir)
}
}

View File

@ -1,13 +1,12 @@
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use crate::processing::probe::FFProbe;
use anyhow::{bail, Error, Result};
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
use ffmpeg_rs_raw::{DemuxerInfo, Encoder, StreamType, Transcoder};
use ffmpeg_rs_raw::{Demuxer, DemuxerInfo, Encoder, StreamType, Transcoder};
use uuid::Uuid;
#[cfg(feature = "labels")]
pub mod labeling;
mod probe;
pub struct WebpProcessor;
@ -22,7 +21,12 @@ impl WebpProcessor {
Self
}
pub fn process_file(&mut self, input: PathBuf, mime_type: &str) -> Result<FileProcessorResult> {
pub fn process_file(
&mut self,
input: &Path,
mime_type: &str,
out_dir: &Path,
) -> Result<NewFileProcessorResult> {
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_WEBP;
if !mime_type.starts_with("image/") {
@ -30,11 +34,13 @@ impl WebpProcessor {
}
if mime_type == "image/webp" {
return Ok(FileProcessorResult::Skip);
bail!("MIME type is already image/webp");
}
let mut out_path = input.clone();
out_path.set_extension("compressed.webp");
let uid = Uuid::new_v4();
let mut out_path = out_dir.join(uid.to_string());
out_path.set_extension("webp");
unsafe {
let mut trans = Transcoder::new(input.to_str().unwrap(), out_path.to_str().unwrap())?;
@ -54,21 +60,16 @@ impl WebpProcessor {
trans.transcode_stream(image_stream, enc)?;
trans.run(None)?;
Ok(FileProcessorResult::NewFile(NewFileProcessorResult {
Ok(NewFileProcessorResult {
result: out_path,
mime_type: "image/webp".to_string(),
width: image_stream.width,
height: image_stream.height,
}))
})
}
}
}
pub enum FileProcessorResult {
NewFile(NewFileProcessorResult),
Skip,
}
pub struct NewFileProcessorResult {
pub result: PathBuf,
pub mime_type: String,
@ -76,21 +77,27 @@ pub struct NewFileProcessorResult {
pub height: usize,
}
pub fn compress_file(in_file: PathBuf, mime_type: &str) -> Result<FileProcessorResult, Error> {
let proc = if mime_type.starts_with("image/") {
Some(WebpProcessor::new())
} else {
None
};
if let Some(mut proc) = proc {
proc.process_file(in_file, mime_type)
} else {
Ok(FileProcessorResult::Skip)
}
pub fn can_compress(mime_type: &str) -> bool {
mime_type.starts_with("image/")
}
pub fn probe_file(in_file: PathBuf) -> Result<DemuxerInfo> {
let proc = FFProbe::new();
let info = proc.process_file(in_file)?;
Ok(info)
pub fn compress_file(
stream: &Path,
mime_type: &str,
out_dir: &Path,
) -> Result<NewFileProcessorResult, Error> {
if !can_compress(mime_type) {
bail!("MIME type not supported");
}
if mime_type.starts_with("image/") {
let mut proc = WebpProcessor::new();
return proc.process_file(stream, mime_type, out_dir);
}
bail!("No media processor")
}
pub fn probe_file(stream: &Path) -> Result<DemuxerInfo> {
let mut demuxer = Demuxer::new(stream.to_str().unwrap())?;
unsafe { demuxer.probe_input() }
}

View File

@ -1,19 +0,0 @@
use anyhow::Result;
use ffmpeg_rs_raw::{Demuxer, DemuxerInfo};
use std::path::PathBuf;
/// Image converter to WEBP
pub struct FFProbe {}
impl FFProbe {
pub fn new() -> Self {
Self {}
}
pub fn process_file(self, in_file: PathBuf) -> Result<DemuxerInfo> {
unsafe {
let mut demuxer = Demuxer::new(in_file.to_str().unwrap())?;
demuxer.probe_input()
}
}
}

View File

@ -1,6 +1,6 @@
use crate::auth::blossom::BlossomAuth;
use crate::db::{Database, FileUpload};
use crate::filesystem::FileStore;
use crate::filesystem::{FileStore, FileSystemResult};
use crate::routes::{delete_file, Nip94Event};
use crate::settings::Settings;
use crate::webhook::Webhook;
@ -385,7 +385,7 @@ async fn process_upload(
.await
}
async fn process_stream<S>(
async fn process_stream<'p, S>(
stream: S,
mime_type: &str,
name: &Option<&str>,
@ -397,11 +397,10 @@ async fn process_stream<S>(
webhook: &State<Option<Webhook>>,
) -> BlossomResponse
where
S: AsyncRead + Unpin,
S: AsyncRead + Unpin + 'p,
{
match fs.put(stream, mime_type, compress).await {
Ok(mut blob) => {
blob.upload.name = name.unwrap_or("").to_owned();
let upload = match fs.put(stream, mime_type, compress).await {
Ok(FileSystemResult::NewFile(blob)) => {
if let Some(wh) = webhook.as_ref() {
match wh.store_file(pubkey, blob.clone()).await {
Ok(store) => {
@ -419,33 +418,33 @@ where
}
}
}
let mut ret: FileUpload = (&blob).into();
// update file data before inserting
ret.name = name.map(|s| s.to_string());
ret
}
Ok(FileSystemResult::AlreadyExists(i)) => match db.get_file(&i).await {
Ok(Some(f)) => f,
_ => return BlossomResponse::error("File not found"),
},
Err(e) => {
error!("{}", e.to_string());
return BlossomResponse::error(format!("Error saving file (disk): {}", e));
}
};
let user_id = match db.upsert_user(pubkey).await {
Ok(u) => u,
Err(e) => {
return BlossomResponse::error(format!("Failed to save file (db): {}", e));
}
};
if let Err(e) = db.add_file(&blob.upload, user_id).await {
if let Err(e) = db.add_file(&upload, user_id).await {
error!("{}", e.to_string());
let _ = fs::remove_file(blob.path);
if let Some(dbe) = e.as_database_error() {
if let Some(c) = dbe.code() {
if c == "23000" {
return BlossomResponse::error("File already exists");
}
}
}
BlossomResponse::error(format!("Error saving file (db): {}", e))
} else {
BlossomResponse::BlobDescriptor(Json(BlobDescriptor::from_upload(
settings,
&blob.upload,
)))
}
}
Err(e) => {
error!("{}", e.to_string());
BlossomResponse::error(format!("Error saving file (disk): {}", e))
}
BlossomResponse::BlobDescriptor(Json(BlobDescriptor::from_upload(settings, &upload)))
}
}

View File

@ -40,7 +40,7 @@ pub struct FilePayload {
#[serde(crate = "rocket::serde")]
struct Nip94Event {
pub created_at: i64,
pub content: String,
pub content: Option<String>,
pub tags: Vec<Vec<String>>,
}
@ -218,10 +218,10 @@ impl<'r> Responder<'r, 'static> for FilePayload {
if let Ok(ct) = ContentType::from_str(&self.info.mime_type) {
response.set_header(ct);
}
if !self.info.name.is_empty() {
if let Some(name) = &self.info.name {
response.set_header(Header::new(
"content-disposition",
format!("inline; filename=\"{}\"", self.info.name),
format!("inline; filename=\"{}\"", name),
));
}
Ok(response)

View File

@ -14,7 +14,7 @@ use rocket::{routes, FromForm, Responder, Route, State};
use crate::auth::nip98::Nip98Auth;
use crate::db::{Database, FileUpload};
use crate::filesystem::FileStore;
use crate::filesystem::{FileStore, FileSystemResult};
use crate::routes::{delete_file, Nip94Event, PagedResult};
use crate::settings::Settings;
use crate::webhook::Webhook;
@ -208,17 +208,13 @@ async fn upload(
return Nip96Response::Forbidden(Json(Nip96UploadResult::error("Not on whitelist")));
}
}
match fs
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
let upload = match fs
.put(file, content_type, !form.no_transform.unwrap_or(false))
.await
{
Ok(mut blob) => {
blob.upload.name = match &form.caption {
Some(c) => c.to_string(),
None => "".to_string(),
};
blob.upload.alt = form.alt.as_ref().map(|s| s.to_string());
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
Ok(FileSystemResult::NewFile(blob)) => {
if let Some(wh) = webhook.as_ref() {
match wh.store_file(&pubkey_vec, blob.clone()).await {
Ok(store) => {
@ -236,34 +232,32 @@ async fn upload(
}
}
}
let mut upload: FileUpload = (&blob).into();
upload.name = form.caption.map(|cap| cap.to_string());
upload.alt = form.alt.as_ref().map(|s| s.to_string());
upload
}
Ok(FileSystemResult::AlreadyExists(i)) => match db.get_file(&i).await {
Ok(Some(f)) => f,
_ => return Nip96Response::error("File not found"),
},
Err(e) => {
error!("{}", e.to_string());
return Nip96Response::error(&format!("Could not save file: {}", e));
}
};
let user_id = match db.upsert_user(&pubkey_vec).await {
Ok(u) => u,
Err(e) => return Nip96Response::error(&format!("Could not save user: {}", e)),
};
let tmp_file = blob.path.clone();
if let Err(e) = db.add_file(&blob.upload, user_id).await {
if let Err(e) = db.add_file(&upload, user_id).await {
error!("{}", e.to_string());
let _ = fs::remove_file(tmp_file);
if let Some(dbe) = e.as_database_error() {
if let Some(c) = dbe.code() {
if c == "23000" {
return Nip96Response::error("File already exists");
}
}
}
return Nip96Response::error(&format!("Could not save file (db): {}", e));
}
Nip96Response::UploadResult(Json(Nip96UploadResult::from_upload(
settings,
&blob.upload,
)))
}
Err(e) => {
error!("{}", e.to_string());
Nip96Response::error(&format!("Could not save file: {}", e))
}
}
Nip96Response::UploadResult(Json(Nip96UploadResult::from_upload(settings, &upload)))
}
#[rocket::delete("/n96/<sha256>")]

View File

@ -3,7 +3,7 @@ use nostr::serde_json;
use reqwest::{Client, ClientBuilder};
use serde::{Deserialize, Serialize};
use crate::filesystem::FileSystemResult;
use crate::filesystem::NewFileResult;
pub struct Webhook {
url: String,
@ -26,8 +26,8 @@ impl Webhook {
}
/// Ask webhook api if this file can be accepted
pub async fn store_file(&self, pubkey: &Vec<u8>, fs: FileSystemResult) -> Result<bool, Error> {
let body: WebhookRequest<FileSystemResult> = WebhookRequest {
pub async fn store_file(&self, pubkey: &Vec<u8>, fs: NewFileResult) -> Result<bool, Error> {
let body = WebhookRequest {
action: "store_file".to_string(),
subject: Some(hex::encode(pubkey)),
payload: fs,