feat: BUD-05
This commit is contained in:
parent
9ccbace175
commit
a580ceac44
103
Cargo.lock
generated
103
Cargo.lock
generated
@ -150,6 +150,12 @@ dependencies = [
|
||||
"derive_arbitrary",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
|
||||
|
||||
[[package]]
|
||||
name = "async-stream"
|
||||
version = "0.3.5"
|
||||
@ -228,6 +234,16 @@ dependencies = [
|
||||
"rustc-demangle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base58ck"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c8d66485a3a2ea485c1913c4572ce0256067a5377ac8c75c4960e1cda98605f"
|
||||
dependencies = [
|
||||
"bitcoin-internals 0.3.0",
|
||||
"bitcoin_hashes 0.14.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.21.7"
|
||||
@ -248,9 +264,9 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
|
||||
|
||||
[[package]]
|
||||
name = "bech32"
|
||||
version = "0.10.0-beta"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98f7eed2b2781a6f0b5c903471d48e15f56fb4e1165df8a9a2337fd1a59d45ea"
|
||||
checksum = "d965446196e3b7decd44aa7ee49e31d630118f90ef12f97900f262eb915c951d"
|
||||
|
||||
[[package]]
|
||||
name = "binascii"
|
||||
@ -306,14 +322,17 @@ checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
|
||||
|
||||
[[package]]
|
||||
name = "bitcoin"
|
||||
version = "0.31.2"
|
||||
version = "0.32.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c85783c2fe40083ea54a33aa2f0ba58831d90fcd190f5bdc47e74e84d2a96ae"
|
||||
checksum = "0032b0e8ead7074cda7fc4f034409607e3f03a6f71d66ade8a307f79b4d99e73"
|
||||
dependencies = [
|
||||
"base58ck",
|
||||
"bech32",
|
||||
"bitcoin-internals",
|
||||
"bitcoin_hashes 0.13.0",
|
||||
"hex-conservative",
|
||||
"bitcoin-internals 0.3.0",
|
||||
"bitcoin-io",
|
||||
"bitcoin-units",
|
||||
"bitcoin_hashes 0.14.0",
|
||||
"hex-conservative 0.2.1",
|
||||
"hex_lit",
|
||||
"secp256k1",
|
||||
"serde",
|
||||
@ -324,10 +343,32 @@ name = "bitcoin-internals"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9425c3bf7089c983facbae04de54513cce73b41c7f9ff8c845b54e7bc64ebbfb"
|
||||
|
||||
[[package]]
|
||||
name = "bitcoin-internals"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30bdbe14aa07b06e6cfeffc529a1f099e5fbe249524f8125358604df99a4bed2"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bitcoin-io"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "340e09e8399c7bd8912f495af6aa58bea0c9214773417ffaa8f6460f93aaee56"
|
||||
|
||||
[[package]]
|
||||
name = "bitcoin-units"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5285c8bcaa25876d07f37e3d30c303f2609179716e11d688f51e8f1fe70063e2"
|
||||
dependencies = [
|
||||
"bitcoin-internals 0.3.0",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bitcoin_hashes"
|
||||
version = "0.11.0"
|
||||
@ -340,8 +381,18 @@ version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1930a4dabfebb8d7d9992db18ebe3ae2876f0a305fab206fd168df931ede293b"
|
||||
dependencies = [
|
||||
"bitcoin-internals",
|
||||
"hex-conservative",
|
||||
"bitcoin-internals 0.2.0",
|
||||
"hex-conservative 0.1.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bitcoin_hashes"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bb18c03d0db0247e147a21a6faafd5a7eb851c743db062de72018b6b7e8e4d16"
|
||||
dependencies = [
|
||||
"bitcoin-io",
|
||||
"hex-conservative 0.2.1",
|
||||
"serde",
|
||||
]
|
||||
|
||||
@ -1444,6 +1495,15 @@ version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "212ab92002354b4819390025006c897e8140934349e8635c9b077f47b4dcbd20"
|
||||
|
||||
[[package]]
|
||||
name = "hex-conservative"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5313b072ce3c597065a808dbf612c4c8e8590bdbf8b579508bf7a762c5eae6cd"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hex_lit"
|
||||
version = "0.1.1"
|
||||
@ -1972,6 +2032,12 @@ version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e664971378a3987224f7a0e10059782035e89899ae403718ee07de85bec42afe"
|
||||
|
||||
[[package]]
|
||||
name = "negentropy"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "43a88da9dd148bbcdce323dd6ac47d369b4769d4a3b78c6c52389b9269f77932"
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "7.1.3"
|
||||
@ -1984,12 +2050,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "nostr"
|
||||
version = "0.34.1"
|
||||
version = "0.35.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1c3c32439eef3ea4d9079b2a8f557992d27259c26527e43d4228dd321e43a77"
|
||||
checksum = "56db234b2e07901e372f34e9463f91590579cd8e6dbd34ed2ccc7e461e4ba639"
|
||||
dependencies = [
|
||||
"aes",
|
||||
"base64 0.21.7",
|
||||
"base64 0.22.1",
|
||||
"bech32",
|
||||
"bip39",
|
||||
"bitcoin",
|
||||
"cbc",
|
||||
@ -1998,13 +2065,13 @@ dependencies = [
|
||||
"getrandom",
|
||||
"instant",
|
||||
"js-sys",
|
||||
"negentropy",
|
||||
"negentropy 0.3.1",
|
||||
"negentropy 0.4.3",
|
||||
"once_cell",
|
||||
"reqwest",
|
||||
"scrypt",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tracing",
|
||||
"unicode-normalization",
|
||||
"url",
|
||||
"wasm-bindgen",
|
||||
@ -3003,9 +3070,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "secp256k1"
|
||||
version = "0.28.2"
|
||||
version = "0.29.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d24b59d129cdadea20aea4fb2352fa053712e5d713eee47d700cd4b2bc002f10"
|
||||
checksum = "9465315bc9d4566e1724f0fffcbcc446268cb522e60f9a27bcded6b19c108113"
|
||||
dependencies = [
|
||||
"bitcoin_hashes 0.13.0",
|
||||
"rand",
|
||||
@ -3015,9 +3082,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "secp256k1-sys"
|
||||
version = "0.9.2"
|
||||
version = "0.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5d1746aae42c19d583c3c1a8c646bfad910498e2051c551a7f2e3c0c9fbb7eb"
|
||||
checksum = "d4387882333d3aa8cb20530a17c69a3752e97837832f34f6dccc760e715001d9"
|
||||
dependencies = [
|
||||
"cc",
|
||||
]
|
||||
|
@ -18,14 +18,14 @@ name = "route96"
|
||||
default = ["nip96", "blossom", "analytics"]
|
||||
labels = ["nip96", "dep:candle-core", "dep:candle-nn", "dep:candle-transformers"]
|
||||
nip96 = ["dep:ffmpeg-sys-the-third", "dep:blurhash", "dep:libc"]
|
||||
blossom = []
|
||||
blossom = ["dep:ffmpeg-sys-the-third", "dep:libc"]
|
||||
bin-void-cat-migrate = ["dep:sqlx-postgres", "dep:clap", "dep:clap_derive"]
|
||||
torrent-v2 = []
|
||||
analytics = []
|
||||
|
||||
[dependencies]
|
||||
log = "0.4.21"
|
||||
nostr = "0.34.1"
|
||||
nostr = "0.35.0"
|
||||
pretty_env_logger = "0.5.0"
|
||||
rocket = { version = "0.5.0", features = ["json"] }
|
||||
tokio = { version = "1.37.0", features = ["rt", "rt-multi-thread", "macros"] }
|
||||
|
@ -8,7 +8,9 @@ Image hosting service
|
||||
- [Blossom Support](https://github.com/hzrd149/blossom/blob/master/buds/01.md)
|
||||
- [BUD-01](https://github.com/hzrd149/blossom/blob/master/buds/01.md)
|
||||
- [BUD-02](https://github.com/hzrd149/blossom/blob/master/buds/02.md)
|
||||
- [BUD-05](https://github.com/hzrd149/blossom/blob/master/buds/05.md)
|
||||
- [BUD-06](https://github.com/hzrd149/blossom/blob/master/buds/06.md)
|
||||
- [BUD-08](https://github.com/hzrd149/blossom/blob/master/buds/08.md)
|
||||
- Image compression to WebP (FFMPEG, NIP-96 only)
|
||||
- Blurhash calculation (NIP-96 only)
|
||||
- AI image labeling ([ViT224](https://huggingface.co/google/vit-base-patch16-224))
|
||||
|
@ -50,7 +50,7 @@ impl PlausibleAnalytics {
|
||||
)
|
||||
.send_json(&msg)
|
||||
{
|
||||
Ok(v) => info!("Sent {:?}", msg),
|
||||
Ok(_v) => info!("Sent {:?}", msg),
|
||||
Err(e) => warn!("Failed to track: {}", e),
|
||||
}
|
||||
}
|
||||
@ -80,7 +80,7 @@ impl Analytics for PlausibleAnalytics {
|
||||
xff: match req.headers().get_one("X-Forwarded-For") {
|
||||
Some(s) => Some(s.to_string()),
|
||||
None => None,
|
||||
}
|
||||
},
|
||||
})?)
|
||||
}
|
||||
}
|
||||
|
@ -41,10 +41,12 @@ impl<'r> FromRequest<'r> for BlossomAuth {
|
||||
}
|
||||
|
||||
// check expiration tag
|
||||
if let Some(expiration) = event.tags.iter().find_map(|t| if t.kind() == TagKind::Expiration {
|
||||
if let Some(expiration) = event.tags.iter().find_map(|t| {
|
||||
if t.kind() == TagKind::Expiration {
|
||||
t.content()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}) {
|
||||
let u_exp: Timestamp = expiration.parse().unwrap();
|
||||
if u_exp <= Timestamp::now() {
|
||||
|
@ -1,11 +1,11 @@
|
||||
use base64::Engine;
|
||||
use base64::prelude::BASE64_STANDARD;
|
||||
use base64::Engine;
|
||||
use log::info;
|
||||
use nostr::{Event, JsonUtil, Kind, Timestamp};
|
||||
use rocket::{async_trait, Request};
|
||||
use rocket::http::Status;
|
||||
use rocket::http::uri::{Absolute, Uri};
|
||||
use rocket::http::Status;
|
||||
use rocket::request::{FromRequest, Outcome};
|
||||
use rocket::{async_trait, Request};
|
||||
|
||||
pub struct Nip98Auth {
|
||||
pub content_type: Option<String>,
|
||||
@ -42,7 +42,7 @@ impl<'r> FromRequest<'r> for Nip98Auth {
|
||||
|
||||
// check url tag
|
||||
if let Some(url) = event.tags.iter().find_map(|t| {
|
||||
let vec = t.as_vec();
|
||||
let vec = t.as_slice();
|
||||
if vec[0] == "u" {
|
||||
Some(vec[1].clone())
|
||||
} else {
|
||||
@ -62,7 +62,7 @@ impl<'r> FromRequest<'r> for Nip98Auth {
|
||||
|
||||
// check method tag
|
||||
if let Some(method) = event.tags.iter().find_map(|t| {
|
||||
let vec = t.as_vec();
|
||||
let vec = t.as_slice();
|
||||
if vec[0] == "method" {
|
||||
Some(vec[1].clone())
|
||||
} else {
|
||||
|
@ -7,7 +7,7 @@ use nostr::bitcoin::base58;
|
||||
use route96::db::{Database, FileUpload};
|
||||
use route96::filesystem::FileStore;
|
||||
use route96::settings::Settings;
|
||||
use sqlx::{FromRow};
|
||||
use sqlx::FromRow;
|
||||
use sqlx_postgres::{PgPool, PgPoolOptions};
|
||||
use std::path::PathBuf;
|
||||
use tokio::io::{AsyncWriteExt, BufWriter};
|
||||
@ -191,7 +191,8 @@ impl VoidCatDb {
|
||||
async fn connect(conn: &str) -> Result<Self, sqlx::Error> {
|
||||
let pool = PgPoolOptions::new()
|
||||
.max_connections(50)
|
||||
.connect(conn).await?;
|
||||
.connect(conn)
|
||||
.await?;
|
||||
Ok(Self { pool })
|
||||
}
|
||||
|
||||
|
25
src/db.rs
25
src/db.rs
@ -118,7 +118,8 @@ impl Database {
|
||||
|
||||
#[cfg(feature = "labels")]
|
||||
for lbl in &file.labels {
|
||||
let q3 = sqlx::query("insert ignore into upload_labels(file,label,model) values(?,?,?)")
|
||||
let q3 =
|
||||
sqlx::query("insert ignore into upload_labels(file,label,model) values(?,?,?)")
|
||||
.bind(&file.id)
|
||||
.bind(&lbl.label)
|
||||
.bind(&lbl.model);
|
||||
@ -136,9 +137,11 @@ impl Database {
|
||||
}
|
||||
|
||||
pub async fn get_file_owners(&self, file: &Vec<u8>) -> Result<Vec<User>, Error> {
|
||||
sqlx::query_as("select users.* from users, user_uploads \
|
||||
sqlx::query_as(
|
||||
"select users.* from users, user_uploads \
|
||||
where users.id = user_uploads.user_id \
|
||||
and user_uploads.file = ?")
|
||||
and user_uploads.file = ?",
|
||||
)
|
||||
.bind(file)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
@ -146,8 +149,10 @@ impl Database {
|
||||
|
||||
#[cfg(feature = "labels")]
|
||||
pub async fn get_file_labels(&self, file: &Vec<u8>) -> Result<Vec<FileLabel>, Error> {
|
||||
sqlx::query_as("select upload_labels.* from uploads, upload_labels \
|
||||
where uploads.id = ? and uploads.id = upload_labels.file")
|
||||
sqlx::query_as(
|
||||
"select upload_labels.* from uploads, upload_labels \
|
||||
where uploads.id = ? and uploads.id = upload_labels.file",
|
||||
)
|
||||
.bind(file)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
@ -170,7 +175,12 @@ impl Database {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn list_files(&self, pubkey: &Vec<u8>, offset: u32, limit: u32) -> Result<(Vec<FileUpload>, i64), Error> {
|
||||
pub async fn list_files(
|
||||
&self,
|
||||
pubkey: &Vec<u8>,
|
||||
offset: u32,
|
||||
limit: u32,
|
||||
) -> Result<(Vec<FileUpload>, i64), Error> {
|
||||
let results: Vec<FileUpload> = sqlx::query_as(
|
||||
"select uploads.* from uploads, users, user_uploads \
|
||||
where users.pubkey = ? \
|
||||
@ -188,7 +198,8 @@ impl Database {
|
||||
"select count(uploads.id) from uploads, users, user_uploads \
|
||||
where users.pubkey = ? \
|
||||
and users.id = user_uploads.user_id \
|
||||
and user_uploads.file = uploads.id")
|
||||
and user_uploads.file = uploads.id",
|
||||
)
|
||||
.bind(pubkey)
|
||||
.fetch_one(&self.pool)
|
||||
.await?
|
||||
|
@ -15,10 +15,10 @@ use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeekExt};
|
||||
#[cfg(feature = "labels")]
|
||||
use crate::db::FileLabel;
|
||||
use crate::db::FileUpload;
|
||||
#[cfg(feature = "nip96")]
|
||||
use crate::processing::{compress_file, FileProcessorResult, probe_file, ProbeStream};
|
||||
#[cfg(feature = "labels")]
|
||||
use crate::processing::labeling::label_frame;
|
||||
#[cfg(feature = "nip96")]
|
||||
use crate::processing::{compress_file, probe_file, FileProcessorResult, ProbeStream};
|
||||
use crate::settings::Settings;
|
||||
|
||||
#[derive(Clone, Default, Serialize)]
|
||||
@ -33,9 +33,7 @@ pub struct FileStore {
|
||||
|
||||
impl FileStore {
|
||||
pub fn new(settings: Settings) -> Self {
|
||||
Self {
|
||||
settings,
|
||||
}
|
||||
Self { settings }
|
||||
}
|
||||
|
||||
/// Get a file path by id
|
||||
@ -44,11 +42,18 @@ impl FileStore {
|
||||
}
|
||||
|
||||
/// Store a new file
|
||||
pub async fn put<TStream>(&self, stream: TStream, mime_type: &str, compress: bool) -> Result<FileSystemResult, Error>
|
||||
pub async fn put<TStream>(
|
||||
&self,
|
||||
stream: TStream,
|
||||
mime_type: &str,
|
||||
compress: bool,
|
||||
) -> Result<FileSystemResult, Error>
|
||||
where
|
||||
TStream: AsyncRead + Unpin,
|
||||
{
|
||||
let result = self.store_compress_file(stream, mime_type, compress).await?;
|
||||
let result = self
|
||||
.store_compress_file(stream, mime_type, compress)
|
||||
.await?;
|
||||
let dst_path = self.map_path(&result.upload.id);
|
||||
if dst_path.exists() {
|
||||
fs::remove_file(result.path)?;
|
||||
@ -70,7 +75,12 @@ impl FileStore {
|
||||
}
|
||||
}
|
||||
|
||||
async fn store_compress_file<TStream>(&self, mut stream: TStream, mime_type: &str, compress: bool) -> Result<FileSystemResult, Error>
|
||||
async fn store_compress_file<TStream>(
|
||||
&self,
|
||||
mut stream: TStream,
|
||||
mime_type: &str,
|
||||
compress: bool,
|
||||
) -> Result<FileSystemResult, Error>
|
||||
where
|
||||
TStream: AsyncRead + Unpin,
|
||||
{
|
||||
@ -97,7 +107,8 @@ impl FileStore {
|
||||
let time_compress = SystemTime::now().duration_since(start).unwrap();
|
||||
let start = SystemTime::now();
|
||||
let blur_hash = blurhash::encode(
|
||||
9, 9,
|
||||
9,
|
||||
9,
|
||||
new_temp.width as u32,
|
||||
new_temp.height as u32,
|
||||
new_temp.image.as_slice(),
|
||||
@ -111,8 +122,10 @@ impl FileStore {
|
||||
new_temp.image.as_mut_slice(),
|
||||
new_temp.width,
|
||||
new_temp.height,
|
||||
mp.clone())?
|
||||
.iter().map(|l| FileLabel::new(l.clone(), "vit224".to_string()))
|
||||
mp.clone(),
|
||||
)?
|
||||
.iter()
|
||||
.map(|l| FileLabel::new(l.clone(), "vit224".to_string()))
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
@ -161,7 +174,7 @@ impl FileStore {
|
||||
} else if let FileProcessorResult::Probe(p) = probe_file(tmp_path.clone())? {
|
||||
let video_stream_size = p.streams.iter().find_map(|s| match s {
|
||||
ProbeStream::Video { width, height, .. } => Some((width, height)),
|
||||
_ => None
|
||||
_ => None,
|
||||
});
|
||||
let n = file.metadata().await?.len();
|
||||
let hash = FileStore::hash_file(&mut file).await?;
|
||||
@ -175,11 +188,11 @@ impl FileStore {
|
||||
mime_type: mime_type.to_string(),
|
||||
width: match video_stream_size {
|
||||
Some((w, _h)) => Some(*w),
|
||||
_ => None
|
||||
_ => None,
|
||||
},
|
||||
height: match video_stream_size {
|
||||
Some((_w, h)) => Some(*h),
|
||||
_ => None
|
||||
_ => None,
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
|
@ -1,4 +1,5 @@
|
||||
|
||||
#[cfg(feature = "analytics")]
|
||||
pub mod analytics;
|
||||
pub mod auth;
|
||||
pub mod cors;
|
||||
pub mod db;
|
||||
@ -8,5 +9,3 @@ pub mod processing;
|
||||
pub mod routes;
|
||||
pub mod settings;
|
||||
pub mod webhook;
|
||||
#[cfg(feature = "analytics")]
|
||||
pub mod analytics;
|
@ -1,19 +1,24 @@
|
||||
use std::{fs, ptr, slice};
|
||||
use std::mem::transmute;
|
||||
use std::path::PathBuf;
|
||||
use std::{fs, ptr, slice};
|
||||
|
||||
use anyhow::Error;
|
||||
use candle_core::{D, Device, DType, IndexOp, Tensor};
|
||||
use candle_core::{DType, Device, IndexOp, Tensor, D};
|
||||
use candle_nn::VarBuilder;
|
||||
use candle_transformers::models::vit;
|
||||
use ffmpeg_sys_the_third::{av_frame_alloc, av_frame_free};
|
||||
use ffmpeg_sys_the_third::AVColorRange::AVCOL_RANGE_JPEG;
|
||||
use ffmpeg_sys_the_third::AVColorSpace::AVCOL_SPC_RGB;
|
||||
use ffmpeg_sys_the_third::AVPixelFormat::{AV_PIX_FMT_RGB24, AV_PIX_FMT_RGBA};
|
||||
use ffmpeg_sys_the_third::{av_frame_alloc, av_frame_free};
|
||||
|
||||
use crate::processing::resize_image;
|
||||
|
||||
pub fn label_frame(frame: &mut [u8], width: usize, height: usize, model: PathBuf) -> Result<Vec<String>, Error> {
|
||||
pub fn label_frame(
|
||||
frame: &mut [u8],
|
||||
width: usize,
|
||||
height: usize,
|
||||
model: PathBuf,
|
||||
) -> Result<Vec<String>, Error> {
|
||||
unsafe {
|
||||
let device = Device::Cpu;
|
||||
let image = load_frame_224(frame, width, height)?.to_device(&device)?;
|
||||
@ -26,10 +31,12 @@ pub fn label_frame(frame: &mut [u8], width: usize, height: usize, model: PathBuf
|
||||
.to_vec1::<f32>()?;
|
||||
let mut prs = prs.iter().enumerate().collect::<Vec<_>>();
|
||||
prs.sort_by(|(_, p1), (_, p2)| p2.total_cmp(p1));
|
||||
let res = prs.iter()
|
||||
let res = prs
|
||||
.iter()
|
||||
//.filter(|&(_c, q)| **q >= 0.50f32)
|
||||
.take(5)
|
||||
.map(|&(c, _q)| CLASSES[c].to_string()).collect();
|
||||
.map(|&(c, _q)| CLASSES[c].to_string())
|
||||
.collect();
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
@ -37,7 +44,16 @@ pub fn label_frame(frame: &mut [u8], width: usize, height: usize, model: PathBuf
|
||||
unsafe fn load_frame_224(data: &mut [u8], width: usize, height: usize) -> Result<Tensor, Error> {
|
||||
let frame = av_frame_alloc();
|
||||
(*frame).extended_data = &mut data.as_mut_ptr();
|
||||
(*frame).data = [*(*frame).extended_data, ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ptr::null_mut()];
|
||||
(*frame).data = [
|
||||
*(*frame).extended_data,
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut(),
|
||||
];
|
||||
(*frame).linesize = [(width * 4) as libc::c_int, 0, 0, 0, 0, 0, 0, 0];
|
||||
(*frame).format = transmute(AV_PIX_FMT_RGBA);
|
||||
(*frame).width = width as libc::c_int;
|
||||
@ -45,14 +61,15 @@ unsafe fn load_frame_224(data: &mut [u8], width: usize, height: usize) -> Result
|
||||
(*frame).color_range = AVCOL_RANGE_JPEG;
|
||||
(*frame).colorspace = AVCOL_SPC_RGB;
|
||||
|
||||
let mut dst_frame = resize_image(frame,
|
||||
224,
|
||||
224,
|
||||
AV_PIX_FMT_RGB24)?;
|
||||
let pic_slice = slice::from_raw_parts_mut((*dst_frame).data[0], ((*dst_frame).width * (*dst_frame).height * 3) as usize);
|
||||
let mut dst_frame = resize_image(frame, 224, 224, AV_PIX_FMT_RGB24)?;
|
||||
let pic_slice = slice::from_raw_parts_mut(
|
||||
(*dst_frame).data[0],
|
||||
((*dst_frame).width * (*dst_frame).height * 3) as usize,
|
||||
);
|
||||
|
||||
fs::write("frame_224.raw", &pic_slice)?;
|
||||
let data = Tensor::from_vec(pic_slice.to_vec(), (224, 224, 3), &Device::Cpu)?.permute((2, 0, 1))?;
|
||||
let data =
|
||||
Tensor::from_vec(pic_slice.to_vec(), (224, 224, 3), &Device::Cpu)?.permute((2, 0, 1))?;
|
||||
let mean = Tensor::new(&[0.485f32, 0.456, 0.406], &Device::Cpu)?.reshape((3, 1, 1))?;
|
||||
let std = Tensor::new(&[0.229f32, 0.224, 0.225], &Device::Cpu)?.reshape((3, 1, 1))?;
|
||||
let res = (data.to_dtype(DType::F32)? / 255.)?
|
||||
@ -62,7 +79,6 @@ unsafe fn load_frame_224(data: &mut [u8], width: usize, height: usize) -> Result
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
|
||||
pub const CLASSES: [&str; 1000] = [
|
||||
"tench, Tinca tinca",
|
||||
"goldfish, Carassius auratus",
|
||||
|
@ -3,15 +3,17 @@ use std::path::PathBuf;
|
||||
use std::ptr;
|
||||
|
||||
use anyhow::Error;
|
||||
use ffmpeg_sys_the_third::{av_frame_alloc, AVFrame, AVPixelFormat, sws_freeContext, sws_getContext, sws_scale_frame};
|
||||
use ffmpeg_sys_the_third::{
|
||||
av_frame_alloc, sws_freeContext, sws_getContext, sws_scale_frame, AVFrame, AVPixelFormat,
|
||||
};
|
||||
|
||||
use crate::processing::probe::FFProbe;
|
||||
use crate::processing::webp::WebpProcessor;
|
||||
|
||||
mod webp;
|
||||
#[cfg(feature = "labels")]
|
||||
pub mod labeling;
|
||||
mod probe;
|
||||
mod webp;
|
||||
|
||||
pub struct ProbeResult {
|
||||
pub streams: Vec<ProbeStream>,
|
||||
@ -29,13 +31,13 @@ pub enum ProbeStream {
|
||||
},
|
||||
}
|
||||
|
||||
pub(crate) enum FileProcessorResult {
|
||||
pub enum FileProcessorResult {
|
||||
NewFile(NewFileProcessorResult),
|
||||
Probe(ProbeResult),
|
||||
Skip,
|
||||
}
|
||||
|
||||
pub(crate) struct NewFileProcessorResult {
|
||||
pub struct NewFileProcessorResult {
|
||||
pub result: PathBuf,
|
||||
pub mime_type: String,
|
||||
pub width: usize,
|
||||
@ -63,14 +65,24 @@ pub fn probe_file(in_file: PathBuf) -> Result<FileProcessorResult, Error> {
|
||||
proc.process_file(in_file)
|
||||
}
|
||||
|
||||
unsafe fn resize_image(frame: *const AVFrame, width: usize, height: usize, pix_fmt: AVPixelFormat) -> Result<*mut AVFrame, Error> {
|
||||
let sws_ctx = sws_getContext((*frame).width,
|
||||
unsafe fn resize_image(
|
||||
frame: *const AVFrame,
|
||||
width: usize,
|
||||
height: usize,
|
||||
pix_fmt: AVPixelFormat,
|
||||
) -> Result<*mut AVFrame, Error> {
|
||||
let sws_ctx = sws_getContext(
|
||||
(*frame).width,
|
||||
(*frame).height,
|
||||
transmute((*frame).format),
|
||||
width as libc::c_int,
|
||||
height as libc::c_int,
|
||||
pix_fmt,
|
||||
0, ptr::null_mut(), ptr::null_mut(), ptr::null_mut());
|
||||
0,
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut(),
|
||||
);
|
||||
if sws_ctx.is_null() {
|
||||
return Err(Error::msg("Failed to create sws context"));
|
||||
}
|
||||
|
@ -3,8 +3,11 @@ use std::path::PathBuf;
|
||||
use std::ptr;
|
||||
|
||||
use anyhow::Error;
|
||||
use ffmpeg_sys_the_third::{avcodec_get_name, avformat_close_input, avformat_find_stream_info, avformat_free_context, avformat_open_input, AVFormatContext};
|
||||
use ffmpeg_sys_the_third::AVMediaType::{AVMEDIA_TYPE_AUDIO, AVMEDIA_TYPE_VIDEO};
|
||||
use ffmpeg_sys_the_third::{
|
||||
avcodec_get_name, avformat_close_input, avformat_find_stream_info, avformat_free_context,
|
||||
avformat_open_input, AVFormatContext,
|
||||
};
|
||||
|
||||
use crate::processing::{FileProcessorResult, ProbeResult, ProbeStream};
|
||||
|
||||
@ -19,10 +22,13 @@ impl FFProbe {
|
||||
pub fn process_file(self, in_file: PathBuf) -> Result<FileProcessorResult, Error> {
|
||||
unsafe {
|
||||
let mut dec_fmt: *mut AVFormatContext = ptr::null_mut();
|
||||
let ret = avformat_open_input(&mut dec_fmt,
|
||||
format!("{}\0", in_file.into_os_string().into_string().unwrap()).as_ptr() as *const libc::c_char,
|
||||
let ret = avformat_open_input(
|
||||
&mut dec_fmt,
|
||||
format!("{}\0", in_file.into_os_string().into_string().unwrap()).as_ptr()
|
||||
as *const libc::c_char,
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut());
|
||||
ptr::null_mut(),
|
||||
);
|
||||
if ret < 0 {
|
||||
// input might not be media
|
||||
return Ok(FileProcessorResult::Skip);
|
||||
@ -38,7 +44,9 @@ impl FFProbe {
|
||||
while ptr_x < (*dec_fmt).nb_streams {
|
||||
let ptr = *(*dec_fmt).streams.add(ptr_x as usize);
|
||||
let codec_par = (*ptr).codecpar;
|
||||
let codec = CStr::from_ptr(avcodec_get_name((*codec_par).codec_id)).to_str()?.to_string();
|
||||
let codec = CStr::from_ptr(avcodec_get_name((*codec_par).codec_id))
|
||||
.to_str()?
|
||||
.to_string();
|
||||
if (*codec_par).codec_type == AVMEDIA_TYPE_VIDEO {
|
||||
stream_info.push(ProbeStream::Video {
|
||||
width: (*codec_par).width as u32,
|
||||
@ -58,7 +66,7 @@ impl FFProbe {
|
||||
avformat_free_context(dec_fmt);
|
||||
|
||||
Ok(FileProcessorResult::Probe(ProbeResult {
|
||||
streams: stream_info
|
||||
streams: stream_info,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
@ -1,15 +1,27 @@
|
||||
use std::{ptr, slice};
|
||||
use std::collections::HashMap;
|
||||
use std::mem::transmute;
|
||||
use std::path::PathBuf;
|
||||
use std::{ptr, slice};
|
||||
|
||||
use anyhow::Error;
|
||||
use ffmpeg_sys_the_third::{AV_CODEC_FLAG_GLOBAL_HEADER, av_dump_format, av_find_best_stream, av_frame_alloc, av_frame_copy_props, av_frame_free, av_guess_format, av_interleaved_write_frame, av_packet_alloc, av_packet_free, av_packet_rescale_ts, av_packet_unref, AV_PROFILE_H264_HIGH, av_read_frame, av_write_trailer, AVCodec, avcodec_alloc_context3, avcodec_find_encoder, avcodec_free_context, avcodec_open2, avcodec_parameters_from_context, avcodec_parameters_to_context, avcodec_receive_frame, avcodec_receive_packet, avcodec_send_frame, avcodec_send_packet, AVCodecContext, AVCodecID, AVERROR, AVERROR_EOF, AVERROR_STREAM_NOT_FOUND, AVFMT_GLOBALHEADER, avformat_alloc_output_context2, avformat_close_input, avformat_find_stream_info, avformat_free_context, avformat_init_output, avformat_new_stream, avformat_open_input, avformat_write_header, AVFormatContext, AVIO_FLAG_WRITE, avio_open, AVMediaType, AVPacket, sws_freeContext, sws_getContext, sws_scale_frame, SwsContext};
|
||||
use ffmpeg_sys_the_third::AVMediaType::{AVMEDIA_TYPE_AUDIO, AVMEDIA_TYPE_VIDEO};
|
||||
use ffmpeg_sys_the_third::AVPixelFormat::{AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P};
|
||||
use ffmpeg_sys_the_third::{
|
||||
av_dump_format, av_find_best_stream, av_frame_alloc, av_frame_copy_props, av_frame_free,
|
||||
av_guess_format, av_interleaved_write_frame, av_packet_alloc, av_packet_free,
|
||||
av_packet_rescale_ts, av_packet_unref, av_read_frame, av_write_trailer, avcodec_alloc_context3,
|
||||
avcodec_find_encoder, avcodec_free_context, avcodec_open2, avcodec_parameters_from_context,
|
||||
avcodec_parameters_to_context, avcodec_receive_frame, avcodec_receive_packet,
|
||||
avcodec_send_frame, avcodec_send_packet, avformat_alloc_output_context2, avformat_close_input,
|
||||
avformat_find_stream_info, avformat_free_context, avformat_init_output, avformat_new_stream,
|
||||
avformat_open_input, avformat_write_header, avio_open, sws_freeContext, sws_getContext,
|
||||
sws_scale_frame, AVCodec, AVCodecContext, AVCodecID, AVFormatContext, AVMediaType, AVPacket,
|
||||
SwsContext, AVERROR, AVERROR_EOF, AVERROR_STREAM_NOT_FOUND, AVFMT_GLOBALHEADER,
|
||||
AVIO_FLAG_WRITE, AV_CODEC_FLAG_GLOBAL_HEADER, AV_PROFILE_H264_HIGH,
|
||||
};
|
||||
use libc::EAGAIN;
|
||||
|
||||
use crate::processing::{FileProcessorResult, NewFileProcessorResult, resize_image};
|
||||
use crate::processing::{resize_image, FileProcessorResult, NewFileProcessorResult};
|
||||
|
||||
/// Image converter to WEBP
|
||||
pub struct WebpProcessor {
|
||||
@ -39,18 +51,26 @@ impl WebpProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn transcode_pkt(&mut self, pkt: *mut AVPacket, in_fmt: *mut AVFormatContext, out_fmt: *mut AVFormatContext) -> Result<(), Error> {
|
||||
unsafe fn transcode_pkt(
|
||||
&mut self,
|
||||
pkt: *mut AVPacket,
|
||||
in_fmt: *mut AVFormatContext,
|
||||
out_fmt: *mut AVFormatContext,
|
||||
) -> Result<(), Error> {
|
||||
let idx = (*pkt).stream_index as usize;
|
||||
let out_idx = match self.stream_map.get(&idx) {
|
||||
Some(i) => i,
|
||||
None => return Ok(())
|
||||
None => return Ok(()),
|
||||
};
|
||||
let in_stream = *(*in_fmt).streams.add(idx);
|
||||
let out_stream = *(*out_fmt).streams.add(*out_idx);
|
||||
av_packet_rescale_ts(pkt, (*in_stream).time_base, (*out_stream).time_base);
|
||||
|
||||
let dec_ctx = self.decoders.get_mut(&idx).expect("Missing decoder config");
|
||||
let enc_ctx = self.encoders.get_mut(out_idx).expect("Missing encoder config");
|
||||
let enc_ctx = self
|
||||
.encoders
|
||||
.get_mut(out_idx)
|
||||
.expect("Missing encoder config");
|
||||
|
||||
let ret = avcodec_send_packet(*dec_ctx, pkt);
|
||||
if ret < 0 {
|
||||
@ -76,16 +96,21 @@ impl WebpProcessor {
|
||||
}
|
||||
frame_out
|
||||
}
|
||||
None => frame
|
||||
None => frame,
|
||||
};
|
||||
|
||||
// take the first frame as "image"
|
||||
if (*(*out_stream).codecpar).codec_type == AVMEDIA_TYPE_VIDEO && self.image.is_none() {
|
||||
let mut dst_frame = resize_image(frame_out,
|
||||
let mut dst_frame = resize_image(
|
||||
frame_out,
|
||||
(*frame_out).width as usize,
|
||||
(*frame_out).height as usize,
|
||||
AV_PIX_FMT_RGBA)?;
|
||||
let pic_slice = slice::from_raw_parts_mut((*dst_frame).data[0], ((*dst_frame).width * (*dst_frame).height * 4) as usize);
|
||||
AV_PIX_FMT_RGBA,
|
||||
)?;
|
||||
let pic_slice = slice::from_raw_parts_mut(
|
||||
(*dst_frame).data[0],
|
||||
((*dst_frame).width * (*dst_frame).height * 4) as usize,
|
||||
);
|
||||
self.image = Some(pic_slice.to_vec());
|
||||
av_frame_free(&mut dst_frame);
|
||||
}
|
||||
@ -115,7 +140,11 @@ impl WebpProcessor {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
unsafe fn setup_decoder(&mut self, in_fmt: *mut AVFormatContext, av_type: AVMediaType) -> Result<i32, Error> {
|
||||
unsafe fn setup_decoder(
|
||||
&mut self,
|
||||
in_fmt: *mut AVFormatContext,
|
||||
av_type: AVMediaType,
|
||||
) -> Result<i32, Error> {
|
||||
let mut decoder: *const AVCodec = ptr::null_mut();
|
||||
let stream_idx = av_find_best_stream(in_fmt, av_type, -1, -1, &mut decoder, 0);
|
||||
if stream_idx == AVERROR_STREAM_NOT_FOUND {
|
||||
@ -141,13 +170,18 @@ impl WebpProcessor {
|
||||
Ok(stream_idx)
|
||||
}
|
||||
|
||||
unsafe fn setup_encoder(&mut self, in_fmt: *mut AVFormatContext, out_fmt: *mut AVFormatContext, in_idx: i32) -> Result<(), Error> {
|
||||
unsafe fn setup_encoder(
|
||||
&mut self,
|
||||
in_fmt: *mut AVFormatContext,
|
||||
out_fmt: *mut AVFormatContext,
|
||||
in_idx: i32,
|
||||
) -> Result<(), Error> {
|
||||
let in_stream = *(*in_fmt).streams.add(in_idx as usize);
|
||||
let stream_type = (*(*in_stream).codecpar).codec_type;
|
||||
let out_codec = match stream_type {
|
||||
AVMEDIA_TYPE_VIDEO => avcodec_find_encoder((*(*out_fmt).oformat).video_codec),
|
||||
AVMEDIA_TYPE_AUDIO => avcodec_find_encoder((*(*out_fmt).oformat).audio_codec),
|
||||
_ => ptr::null_mut()
|
||||
_ => ptr::null_mut(),
|
||||
};
|
||||
// not mapped ignore
|
||||
if out_codec.is_null() {
|
||||
@ -203,15 +237,21 @@ impl WebpProcessor {
|
||||
|
||||
let out_idx = (*stream).index as usize;
|
||||
// setup scaler if pix_fmt doesnt match
|
||||
if stream_type == AVMEDIA_TYPE_VIDEO &&
|
||||
(*(*in_stream).codecpar).format != (*(*stream).codecpar).format {
|
||||
let sws_ctx = sws_getContext((*(*in_stream).codecpar).width,
|
||||
if stream_type == AVMEDIA_TYPE_VIDEO
|
||||
&& (*(*in_stream).codecpar).format != (*(*stream).codecpar).format
|
||||
{
|
||||
let sws_ctx = sws_getContext(
|
||||
(*(*in_stream).codecpar).width,
|
||||
(*(*in_stream).codecpar).height,
|
||||
transmute((*(*in_stream).codecpar).format),
|
||||
(*(*stream).codecpar).width,
|
||||
(*(*stream).codecpar).height,
|
||||
transmute((*(*stream).codecpar).format),
|
||||
0, ptr::null_mut(), ptr::null_mut(), ptr::null_mut());
|
||||
0,
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut(),
|
||||
);
|
||||
if sws_ctx.is_null() {
|
||||
return Err(Error::msg("Failed to create sws context"));
|
||||
}
|
||||
@ -262,16 +302,23 @@ impl WebpProcessor {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn process_file(mut self, in_file: PathBuf, mime_type: &str) -> Result<FileProcessorResult, Error> {
|
||||
pub fn process_file(
|
||||
mut self,
|
||||
in_file: PathBuf,
|
||||
mime_type: &str,
|
||||
) -> Result<FileProcessorResult, Error> {
|
||||
unsafe {
|
||||
let mut out_path = in_file.clone();
|
||||
out_path.set_extension("_compressed");
|
||||
|
||||
let mut dec_fmt: *mut AVFormatContext = ptr::null_mut();
|
||||
let ret = avformat_open_input(&mut dec_fmt,
|
||||
format!("{}\0", in_file.into_os_string().into_string().unwrap()).as_ptr() as *const libc::c_char,
|
||||
let ret = avformat_open_input(
|
||||
&mut dec_fmt,
|
||||
format!("{}\0", in_file.into_os_string().into_string().unwrap()).as_ptr()
|
||||
as *const libc::c_char,
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut());
|
||||
ptr::null_mut(),
|
||||
);
|
||||
if ret < 0 {
|
||||
return Err(Error::msg("Failed to create input context"));
|
||||
}
|
||||
@ -284,23 +331,32 @@ impl WebpProcessor {
|
||||
let in_audio_stream = self.setup_decoder(dec_fmt, AVMEDIA_TYPE_AUDIO)?;
|
||||
|
||||
let out_format = if mime_type.starts_with("image/") {
|
||||
av_guess_format("webp\0".as_ptr() as *const libc::c_char,
|
||||
av_guess_format(
|
||||
"webp\0".as_ptr() as *const libc::c_char,
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut())
|
||||
ptr::null_mut(),
|
||||
)
|
||||
} else if mime_type.starts_with("video/") {
|
||||
av_guess_format("matroska\0".as_ptr() as *const libc::c_char,
|
||||
av_guess_format(
|
||||
"matroska\0".as_ptr() as *const libc::c_char,
|
||||
ptr::null_mut(),
|
||||
ptr::null_mut())
|
||||
ptr::null_mut(),
|
||||
)
|
||||
} else {
|
||||
return Err(Error::msg("Mime type not supported"));
|
||||
};
|
||||
|
||||
let out_filename = format!("{}\0", out_path.clone().into_os_string().into_string().unwrap());
|
||||
let out_filename = format!(
|
||||
"{}\0",
|
||||
out_path.clone().into_os_string().into_string().unwrap()
|
||||
);
|
||||
let mut out_fmt: *mut AVFormatContext = ptr::null_mut();
|
||||
let ret = avformat_alloc_output_context2(&mut out_fmt,
|
||||
let ret = avformat_alloc_output_context2(
|
||||
&mut out_fmt,
|
||||
out_format,
|
||||
ptr::null_mut(),
|
||||
out_filename.as_ptr() as *const libc::c_char);
|
||||
out_filename.as_ptr() as *const libc::c_char,
|
||||
);
|
||||
if ret < 0 {
|
||||
return Err(Error::msg("Failed to create output context"));
|
||||
}
|
||||
@ -354,8 +410,7 @@ impl WebpProcessor {
|
||||
avformat_free_context(dec_fmt);
|
||||
avformat_free_context(out_fmt);
|
||||
|
||||
Ok(FileProcessorResult::NewFile(
|
||||
NewFileProcessorResult {
|
||||
Ok(FileProcessorResult::NewFile(NewFileProcessorResult {
|
||||
result: out_path,
|
||||
mime_type: "image/webp".to_string(),
|
||||
width: self.width.unwrap_or(0),
|
||||
|
@ -1,11 +1,11 @@
|
||||
use crate::auth::nip98::Nip98Auth;
|
||||
use crate::db::{Database, FileUpload, User};
|
||||
use crate::routes::{Nip94Event, PagedResult};
|
||||
use crate::settings::Settings;
|
||||
use rocket::serde::json::Json;
|
||||
use rocket::serde::Serialize;
|
||||
use rocket::{routes, Responder, Route, State};
|
||||
use sqlx::{Error, Row};
|
||||
use crate::settings::Settings;
|
||||
|
||||
pub fn admin_routes() -> Vec<Route> {
|
||||
routes![admin_list_files, admin_get_self]
|
||||
@ -13,8 +13,7 @@ pub fn admin_routes() -> Vec<Route> {
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
struct AdminResponseBase<T>
|
||||
{
|
||||
struct AdminResponseBase<T> {
|
||||
pub status: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub message: Option<String>,
|
||||
@ -23,8 +22,7 @@ struct AdminResponseBase<T>
|
||||
}
|
||||
|
||||
#[derive(Responder)]
|
||||
enum AdminResponse<T>
|
||||
{
|
||||
enum AdminResponse<T> {
|
||||
#[response(status = 500)]
|
||||
GenericError(Json<AdminResponseBase<T>>),
|
||||
|
||||
@ -32,8 +30,7 @@ enum AdminResponse<T>
|
||||
Ok(Json<AdminResponseBase<T>>),
|
||||
}
|
||||
|
||||
impl<T> AdminResponse<T>
|
||||
{
|
||||
impl<T> AdminResponse<T> {
|
||||
pub fn error(msg: &str) -> Self {
|
||||
Self::GenericError(Json(AdminResponseBase {
|
||||
status: "error".to_string(),
|
||||
@ -52,16 +49,11 @@ impl<T> AdminResponse<T>
|
||||
}
|
||||
|
||||
#[rocket::get("/self")]
|
||||
async fn admin_get_self(
|
||||
auth: Nip98Auth,
|
||||
db: &State<Database>,
|
||||
) -> AdminResponse<User> {
|
||||
async fn admin_get_self(auth: Nip98Auth, db: &State<Database>) -> AdminResponse<User> {
|
||||
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
|
||||
match db.get_user(&pubkey_vec).await {
|
||||
Ok(user) => AdminResponse::success(user),
|
||||
Err(_) => {
|
||||
AdminResponse::error("User not found")
|
||||
}
|
||||
Err(_) => AdminResponse::error("User not found"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -78,18 +70,13 @@ async fn admin_list_files(
|
||||
|
||||
let user = match db.get_user(&pubkey_vec).await {
|
||||
Ok(user) => user,
|
||||
Err(_) => {
|
||||
return AdminResponse::error("User not found")
|
||||
}
|
||||
Err(_) => return AdminResponse::error("User not found"),
|
||||
};
|
||||
|
||||
if !user.is_admin {
|
||||
return AdminResponse::error("User is not an admin");
|
||||
}
|
||||
match db
|
||||
.list_all_files(page * server_count, server_count)
|
||||
.await
|
||||
{
|
||||
match db.list_all_files(page * server_count, server_count).await {
|
||||
Ok((files, count)) => AdminResponse::success(PagedResult {
|
||||
count: files.len() as u32,
|
||||
page,
|
||||
@ -104,7 +91,11 @@ async fn admin_list_files(
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub async fn list_all_files(&self, offset: u32, limit: u32) -> Result<(Vec<FileUpload>, i64), Error> {
|
||||
pub async fn list_all_files(
|
||||
&self,
|
||||
offset: u32,
|
||||
limit: u32,
|
||||
) -> Result<(Vec<FileUpload>, i64), Error> {
|
||||
let results: Vec<FileUpload> = sqlx::query_as(
|
||||
"select u.* \
|
||||
from uploads u \
|
||||
@ -115,8 +106,7 @@ impl Database {
|
||||
.bind(offset)
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
let count: i64 = sqlx::query(
|
||||
"select count(u.id) from uploads u")
|
||||
let count: i64 = sqlx::query("select count(u.id) from uploads u")
|
||||
.fetch_one(&self.pool)
|
||||
.await?
|
||||
.try_get(0)?;
|
||||
|
@ -1,3 +1,4 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
|
||||
use log::error;
|
||||
@ -17,7 +18,6 @@ use crate::routes::{delete_file, Nip94Event};
|
||||
use crate::settings::Settings;
|
||||
use crate::webhook::Webhook;
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct BlobDescriptor {
|
||||
@ -28,7 +28,7 @@ pub struct BlobDescriptor {
|
||||
pub mime_type: Option<String>,
|
||||
pub created: u64,
|
||||
#[serde(rename = "nip94", skip_serializing_if = "Option::is_none")]
|
||||
pub nip94: Option<Nip94Event>,
|
||||
pub nip94: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
impl BlobDescriptor {
|
||||
@ -40,7 +40,13 @@ impl BlobDescriptor {
|
||||
size: value.size,
|
||||
mime_type: Some(value.mime_type.clone()),
|
||||
created: value.created.timestamp() as u64,
|
||||
nip94: Some(Nip94Event::from_upload(settings, value)),
|
||||
nip94: Some(
|
||||
Nip94Event::from_upload(settings, value)
|
||||
.tags
|
||||
.iter()
|
||||
.map(|r| (r[0].clone(), r[1].clone()))
|
||||
.collect(),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -51,7 +57,7 @@ struct BlossomError {
|
||||
}
|
||||
|
||||
pub fn blossom_routes() -> Vec<Route> {
|
||||
routes![delete_blob, upload, list_files, upload_head]
|
||||
routes![delete_blob, upload, list_files, upload_head, upload_media]
|
||||
}
|
||||
|
||||
impl BlossomError {
|
||||
@ -102,8 +108,7 @@ impl<'r> Responder<'r, 'static> for BlossomHead {
|
||||
|
||||
fn check_method(event: &nostr::Event, method: &str) -> bool {
|
||||
if let Some(t) = event.tags.iter().find_map(|t| {
|
||||
if t.kind() == TagKind::SingleLetter(SingleLetterTag::lowercase(Alphabet::T))
|
||||
{
|
||||
if t.kind() == TagKind::SingleLetter(SingleLetterTag::lowercase(Alphabet::T)) {
|
||||
t.content()
|
||||
} else {
|
||||
None
|
||||
@ -127,6 +132,72 @@ async fn delete_blob(
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::get("/list/<pubkey>")]
|
||||
async fn list_files(
|
||||
db: &State<Database>,
|
||||
settings: &State<Settings>,
|
||||
pubkey: &str,
|
||||
) -> BlossomResponse {
|
||||
let id = if let Ok(i) = hex::decode(pubkey) {
|
||||
i
|
||||
} else {
|
||||
return BlossomResponse::error("invalid pubkey");
|
||||
};
|
||||
match db.list_files(&id, 0, 10_000).await {
|
||||
Ok((files, _count)) => BlossomResponse::BlobDescriptorList(Json(
|
||||
files
|
||||
.iter()
|
||||
.map(|f| BlobDescriptor::from_upload(&settings, f))
|
||||
.collect(),
|
||||
)),
|
||||
Err(e) => BlossomResponse::error(format!("Could not list files: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::head("/upload")]
|
||||
async fn upload_head(auth: BlossomAuth, settings: &State<Settings>) -> BlossomHead {
|
||||
if !check_method(&auth.event, "upload") {
|
||||
return BlossomHead {
|
||||
msg: Some("Invalid auth method tag"),
|
||||
};
|
||||
}
|
||||
|
||||
if let Some(z) = auth.x_content_length {
|
||||
if z > settings.max_upload_bytes {
|
||||
return BlossomHead {
|
||||
msg: Some("File too large"),
|
||||
};
|
||||
}
|
||||
} else {
|
||||
return BlossomHead {
|
||||
msg: Some("Missing x-content-length header"),
|
||||
};
|
||||
}
|
||||
|
||||
if let None = auth.x_sha_256 {
|
||||
return BlossomHead {
|
||||
msg: Some("Missing x-sha-256 header"),
|
||||
};
|
||||
}
|
||||
|
||||
if let None = auth.x_content_type {
|
||||
return BlossomHead {
|
||||
msg: Some("Missing x-content-type header"),
|
||||
};
|
||||
}
|
||||
|
||||
// check whitelist
|
||||
if let Some(wl) = &settings.whitelist {
|
||||
if !wl.contains(&auth.event.pubkey.to_hex()) {
|
||||
return BlossomHead {
|
||||
msg: Some("Not on whitelist"),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
BlossomHead { msg: None }
|
||||
}
|
||||
|
||||
#[rocket::put("/upload", data = "<data>")]
|
||||
async fn upload(
|
||||
auth: BlossomAuth,
|
||||
@ -136,7 +207,32 @@ async fn upload(
|
||||
webhook: &State<Option<Webhook>>,
|
||||
data: Data<'_>,
|
||||
) -> BlossomResponse {
|
||||
if !check_method(&auth.event, "upload") {
|
||||
process_upload("upload", false, auth, fs, db, settings, webhook, data).await
|
||||
}
|
||||
|
||||
#[rocket::put("/media", data = "<data>")]
|
||||
async fn upload_media(
|
||||
auth: BlossomAuth,
|
||||
fs: &State<FileStore>,
|
||||
db: &State<Database>,
|
||||
settings: &State<Settings>,
|
||||
webhook: &State<Option<Webhook>>,
|
||||
data: Data<'_>,
|
||||
) -> BlossomResponse {
|
||||
process_upload("media", true, auth, fs, db, settings, webhook, data).await
|
||||
}
|
||||
|
||||
async fn process_upload(
|
||||
method: &str,
|
||||
compress: bool,
|
||||
auth: BlossomAuth,
|
||||
fs: &State<FileStore>,
|
||||
db: &State<Database>,
|
||||
settings: &State<Settings>,
|
||||
webhook: &State<Option<Webhook>>,
|
||||
data: Data<'_>,
|
||||
) -> BlossomResponse {
|
||||
if !check_method(&auth.event, method) {
|
||||
return BlossomResponse::error("Invalid request method tag");
|
||||
}
|
||||
|
||||
@ -173,7 +269,7 @@ async fn upload(
|
||||
.put(
|
||||
data.open(ByteUnit::from(settings.max_upload_bytes)),
|
||||
&mime_type,
|
||||
false,
|
||||
compress,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@ -218,7 +314,7 @@ async fn upload(
|
||||
} else {
|
||||
BlossomResponse::BlobDescriptor(Json(BlobDescriptor::from_upload(
|
||||
&settings,
|
||||
&blob.upload
|
||||
&blob.upload,
|
||||
)))
|
||||
}
|
||||
}
|
||||
@ -228,74 +324,3 @@ async fn upload(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::get("/list/<pubkey>")]
|
||||
async fn list_files(
|
||||
db: &State<Database>,
|
||||
settings: &State<Settings>,
|
||||
pubkey: &str,
|
||||
) -> BlossomResponse {
|
||||
let id = if let Ok(i) = hex::decode(pubkey) {
|
||||
i
|
||||
} else {
|
||||
return BlossomResponse::error("invalid pubkey");
|
||||
};
|
||||
match db.list_files(&id, 0, 10_000).await {
|
||||
Ok((files, _count)) => BlossomResponse::BlobDescriptorList(Json(
|
||||
files
|
||||
.iter()
|
||||
.map(|f| BlobDescriptor::from_upload(&settings, f))
|
||||
.collect(),
|
||||
)),
|
||||
Err(e) => BlossomResponse::error(format!("Could not list files: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::head("/upload")]
|
||||
async fn upload_head(
|
||||
auth: BlossomAuth,
|
||||
settings: &State<Settings>,
|
||||
) -> BlossomHead {
|
||||
if !check_method(&auth.event, "upload") {
|
||||
return BlossomHead {
|
||||
msg: Some("Invalid auth method tag")
|
||||
};
|
||||
}
|
||||
|
||||
if let Some(z) = auth.x_content_length {
|
||||
if z > settings.max_upload_bytes {
|
||||
return BlossomHead {
|
||||
msg: Some("File too large")
|
||||
};
|
||||
}
|
||||
} else {
|
||||
return BlossomHead {
|
||||
msg: Some("Missing x-content-length header")
|
||||
};
|
||||
}
|
||||
|
||||
if let None = auth.x_sha_256 {
|
||||
return BlossomHead {
|
||||
msg: Some("Missing x-sha-256 header")
|
||||
};
|
||||
}
|
||||
|
||||
if let None = auth.x_content_type {
|
||||
return BlossomHead {
|
||||
msg: Some("Missing x-content-type header")
|
||||
};
|
||||
}
|
||||
|
||||
// check whitelist
|
||||
if let Some(wl) = &settings.whitelist {
|
||||
if !wl.contains(&auth.event.pubkey.to_hex()) {
|
||||
return BlossomHead {
|
||||
msg: Some("Not on whitelist")
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
BlossomHead {
|
||||
msg: None
|
||||
}
|
||||
}
|
@ -124,7 +124,7 @@ async fn delete_file(
|
||||
|
||||
let this_owner = match owners.iter().find(|o| o.pubkey.eq(&pubkey_vec)) {
|
||||
Some(o) => o,
|
||||
None => return Err(Error::msg("You dont own this file, you cannot delete it"))
|
||||
None => return Err(Error::msg("You dont own this file, you cannot delete it")),
|
||||
};
|
||||
if let Err(e) = db.delete_file_owner(&id, this_owner.id).await {
|
||||
return Err(Error::msg(format!("Failed to delete (db): {}", e)));
|
||||
|
@ -126,6 +126,7 @@ struct Nip96Form<'r> {
|
||||
alt: Option<&'r str>,
|
||||
caption: Option<&'r str>,
|
||||
media_type: Option<&'r str>,
|
||||
#[allow(dead_code)]
|
||||
content_type: Option<&'r str>,
|
||||
no_transform: Option<bool>,
|
||||
}
|
||||
|
@ -16,9 +16,7 @@ struct WebhookRequest<T> {
|
||||
|
||||
impl Webhook {
|
||||
pub fn new(url: String) -> Self {
|
||||
Self {
|
||||
url
|
||||
}
|
||||
Self { url }
|
||||
}
|
||||
|
||||
/// Ask webhook api if this file can be accepted
|
||||
|
Loading…
x
Reference in New Issue
Block a user