feat: void-cat redirects
This commit is contained in:
parent
c86631423d
commit
b90995f07f
@ -18,5 +18,5 @@ steps:
|
|||||||
- dockerd &
|
- dockerd &
|
||||||
- docker login -u kieran -p $TOKEN git.v0l.io
|
- docker login -u kieran -p $TOKEN git.v0l.io
|
||||||
- docker login -u voidic -p $TOKEN_DOCKER
|
- docker login -u voidic -p $TOKEN_DOCKER
|
||||||
- docker buildx build --push -t git.v0l.io/kieran/route96:latest -t voidic/route96:latest --build-arg FEATURES="labels" .
|
- docker buildx build --push -t git.v0l.io/kieran/route96:latest -t voidic/route96:latest --build-arg FEATURES="labels,void-cat-redirects" .
|
||||||
- kill $(cat /var/run/docker.pid)
|
- kill $(cat /var/run/docker.pid)
|
||||||
|
@ -23,6 +23,7 @@ blossom = []
|
|||||||
bin-void-cat-migrate = ["dep:sqlx-postgres", "dep:clap", "dep:clap_derive"]
|
bin-void-cat-migrate = ["dep:sqlx-postgres", "dep:clap", "dep:clap_derive"]
|
||||||
torrent-v2 = []
|
torrent-v2 = []
|
||||||
analytics = []
|
analytics = []
|
||||||
|
void-cat-redirects = ["dep:sqlx-postgres"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.4.21"
|
log = "0.4.21"
|
||||||
|
@ -17,6 +17,8 @@ use route96::filesystem::FileStore;
|
|||||||
use route96::routes;
|
use route96::routes;
|
||||||
use route96::routes::{get_blob, head_blob, root};
|
use route96::routes::{get_blob, head_blob, root};
|
||||||
use route96::settings::Settings;
|
use route96::settings::Settings;
|
||||||
|
#[cfg(feature = "void-cat-redirects")]
|
||||||
|
use route96::void_db::VoidCatDb;
|
||||||
use route96::webhook::Webhook;
|
use route96::webhook::Webhook;
|
||||||
|
|
||||||
#[rocket::main]
|
#[rocket::main]
|
||||||
@ -79,6 +81,15 @@ async fn main() -> Result<(), Error> {
|
|||||||
{
|
{
|
||||||
rocket = rocket.mount("/", routes::nip96_routes());
|
rocket = rocket.mount("/", routes::nip96_routes());
|
||||||
}
|
}
|
||||||
|
#[cfg(feature = "void-cat-redirects")]
|
||||||
|
{
|
||||||
|
if let Some(conn) = settings.void_cat_database {
|
||||||
|
let vdb = VoidCatDb::connect(&conn).await?;
|
||||||
|
rocket = rocket
|
||||||
|
.mount("/", routes![routes::void_cat_redirect])
|
||||||
|
.manage(vdb);
|
||||||
|
}
|
||||||
|
}
|
||||||
if let Err(e) = rocket.launch().await {
|
if let Err(e) = rocket.launch().await {
|
||||||
error!("Rocker error {}", e);
|
error!("Rocker error {}", e);
|
||||||
Err(Error::from(e))
|
Err(Error::from(e))
|
||||||
|
@ -7,6 +7,7 @@ use nostr::bitcoin::base58;
|
|||||||
use route96::db::{Database, FileUpload};
|
use route96::db::{Database, FileUpload};
|
||||||
use route96::filesystem::FileStore;
|
use route96::filesystem::FileStore;
|
||||||
use route96::settings::Settings;
|
use route96::settings::Settings;
|
||||||
|
use route96::void_db::{VoidCatDb, VoidFile};
|
||||||
use sqlx::FromRow;
|
use sqlx::FromRow;
|
||||||
use sqlx_postgres::{PgPool, PgPoolOptions};
|
use sqlx_postgres::{PgPool, PgPoolOptions};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
@ -149,62 +150,3 @@ async fn migrate_file(
|
|||||||
db.add_file(&fu, uid).await?;
|
db.add_file(&fu, uid).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(FromRow)]
|
|
||||||
struct VoidFile {
|
|
||||||
#[sqlx(rename = "Id")]
|
|
||||||
pub id: Uuid,
|
|
||||||
#[sqlx(rename = "Name")]
|
|
||||||
pub name: Option<String>,
|
|
||||||
#[sqlx(rename = "Size")]
|
|
||||||
pub size: i64,
|
|
||||||
#[sqlx(rename = "Uploaded")]
|
|
||||||
pub uploaded: DateTime<Utc>,
|
|
||||||
#[sqlx(rename = "Description")]
|
|
||||||
pub description: Option<String>,
|
|
||||||
#[sqlx(rename = "MimeType")]
|
|
||||||
pub mime_type: String,
|
|
||||||
#[sqlx(rename = "Digest")]
|
|
||||||
pub digest: String,
|
|
||||||
#[sqlx(rename = "MediaDimensions")]
|
|
||||||
pub media_dimensions: Option<String>,
|
|
||||||
#[sqlx(rename = "Email")]
|
|
||||||
pub email: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl VoidFile {
|
|
||||||
fn map_to_path(&self) -> PathBuf {
|
|
||||||
let id_str = self.id.as_hyphenated().to_string();
|
|
||||||
PathBuf::new()
|
|
||||||
.join("files-v2/")
|
|
||||||
.join(&id_str[..2])
|
|
||||||
.join(&id_str[2..4])
|
|
||||||
.join(&id_str)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct VoidCatDb {
|
|
||||||
pub pool: PgPool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl VoidCatDb {
|
|
||||||
async fn connect(conn: &str) -> Result<Self, sqlx::Error> {
|
|
||||||
let pool = PgPoolOptions::new()
|
|
||||||
.max_connections(50)
|
|
||||||
.connect(conn)
|
|
||||||
.await?;
|
|
||||||
Ok(Self { pool })
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn list_files(&self, page: usize) -> Result<Vec<VoidFile>, sqlx::Error> {
|
|
||||||
let page_size = 100;
|
|
||||||
sqlx::query_as(format!("select f.\"Id\", f.\"Name\", CAST(f.\"Size\" as BIGINT) \"Size\", f.\"Uploaded\", f.\"Description\", f.\"MimeType\", f.\"Digest\", f.\"MediaDimensions\", u.\"Email\"
|
|
||||||
from \"Files\" f, \"UserFiles\" uf, \"Users\" u
|
|
||||||
where f.\"Id\" = uf.\"FileId\"
|
|
||||||
and uf.\"UserId\" = u.\"Id\"
|
|
||||||
and u.\"AuthType\" = 4\
|
|
||||||
offset {} limit {}", page * page_size, page_size).as_str())
|
|
||||||
.fetch_all(&self.pool)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -101,7 +101,7 @@ impl FileStore {
|
|||||||
if compress {
|
if compress {
|
||||||
let start = SystemTime::now();
|
let start = SystemTime::now();
|
||||||
let proc_result = compress_file(tmp_path.clone(), mime_type)?;
|
let proc_result = compress_file(tmp_path.clone(), mime_type)?;
|
||||||
if let FileProcessorResult::NewFile(new_temp) = proc_result {
|
if let FileProcessorResult::NewFile(mut new_temp) = proc_result {
|
||||||
let old_size = tmp_path.metadata()?.len();
|
let old_size = tmp_path.metadata()?.len();
|
||||||
let new_size = new_temp.result.metadata()?.len();
|
let new_size = new_temp.result.metadata()?.len();
|
||||||
let time_compress = SystemTime::now().duration_since(start)?;
|
let time_compress = SystemTime::now().duration_since(start)?;
|
||||||
|
@ -8,4 +8,6 @@ pub mod filesystem;
|
|||||||
pub mod processing;
|
pub mod processing;
|
||||||
pub mod routes;
|
pub mod routes;
|
||||||
pub mod settings;
|
pub mod settings;
|
||||||
|
#[cfg(any(feature = "void-cat-redirects", feature = "bin-void-cat-migrate"))]
|
||||||
|
pub mod void_db;
|
||||||
pub mod webhook;
|
pub mod webhook;
|
||||||
|
@ -10,10 +10,14 @@ pub use crate::routes::blossom::blossom_routes;
|
|||||||
#[cfg(feature = "nip96")]
|
#[cfg(feature = "nip96")]
|
||||||
pub use crate::routes::nip96::nip96_routes;
|
pub use crate::routes::nip96::nip96_routes;
|
||||||
use crate::settings::Settings;
|
use crate::settings::Settings;
|
||||||
|
#[cfg(feature = "void-cat-redirects")]
|
||||||
|
use crate::void_db::VoidCatDb;
|
||||||
use anyhow::Error;
|
use anyhow::Error;
|
||||||
use nostr::Event;
|
use nostr::Event;
|
||||||
use rocket::fs::NamedFile;
|
use rocket::fs::NamedFile;
|
||||||
use rocket::http::{ContentType, Header, Status};
|
use rocket::http::{ContentType, Header, Status};
|
||||||
|
#[cfg(feature = "void-cat-redirects")]
|
||||||
|
use rocket::response::Redirect;
|
||||||
use rocket::response::Responder;
|
use rocket::response::Responder;
|
||||||
use rocket::serde::Serialize;
|
use rocket::serde::Serialize;
|
||||||
use rocket::{Request, State};
|
use rocket::{Request, State};
|
||||||
@ -207,3 +211,27 @@ pub async fn head_blob(sha256: &str, fs: &State<FileStore>) -> Status {
|
|||||||
Status::NotFound
|
Status::NotFound
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "void-cat-redirects")]
|
||||||
|
#[rocket::get("/d/<id>")]
|
||||||
|
pub async fn void_cat_redirect(
|
||||||
|
id: &str,
|
||||||
|
settings: &State<Settings>,
|
||||||
|
vdb: &State<VoidCatDb>,
|
||||||
|
) -> Option<Redirect> {
|
||||||
|
let id = if id.contains(".") {
|
||||||
|
id.split('.').next().unwrap()
|
||||||
|
} else {
|
||||||
|
id
|
||||||
|
};
|
||||||
|
let uuid =
|
||||||
|
uuid::Uuid::from_slice_le(nostr::bitcoin::base58::decode(id).unwrap().as_slice()).unwrap();
|
||||||
|
if let Ok(Some(d)) = vdb.get_digest(&uuid).await {
|
||||||
|
Some(Redirect::permanent(format!(
|
||||||
|
"{}/{}",
|
||||||
|
&settings.public_url, &d
|
||||||
|
)))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -29,4 +29,7 @@ pub struct Settings {
|
|||||||
|
|
||||||
/// Analytics tracking
|
/// Analytics tracking
|
||||||
pub plausible_url: Option<String>,
|
pub plausible_url: Option<String>,
|
||||||
|
|
||||||
|
#[cfg(feature = "void-cat-redirects")]
|
||||||
|
pub void_cat_database: Option<String>,
|
||||||
}
|
}
|
||||||
|
71
src/void_db.rs
Normal file
71
src/void_db.rs
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use sqlx::FromRow;
|
||||||
|
use sqlx_postgres::{PgPool, PgPoolOptions};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(FromRow)]
|
||||||
|
pub struct VoidFile {
|
||||||
|
#[sqlx(rename = "Id")]
|
||||||
|
pub id: Uuid,
|
||||||
|
#[sqlx(rename = "Name")]
|
||||||
|
pub name: Option<String>,
|
||||||
|
#[sqlx(rename = "Size")]
|
||||||
|
pub size: i64,
|
||||||
|
#[sqlx(rename = "Uploaded")]
|
||||||
|
pub uploaded: DateTime<Utc>,
|
||||||
|
#[sqlx(rename = "Description")]
|
||||||
|
pub description: Option<String>,
|
||||||
|
#[sqlx(rename = "MimeType")]
|
||||||
|
pub mime_type: String,
|
||||||
|
#[sqlx(rename = "Digest")]
|
||||||
|
pub digest: String,
|
||||||
|
#[sqlx(rename = "MediaDimensions")]
|
||||||
|
pub media_dimensions: Option<String>,
|
||||||
|
#[sqlx(rename = "Email")]
|
||||||
|
pub email: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VoidFile {
|
||||||
|
pub fn map_to_path(&self) -> PathBuf {
|
||||||
|
let id_str = self.id.as_hyphenated().to_string();
|
||||||
|
PathBuf::new()
|
||||||
|
.join("files-v2/")
|
||||||
|
.join(&id_str[..2])
|
||||||
|
.join(&id_str[2..4])
|
||||||
|
.join(&id_str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct VoidCatDb {
|
||||||
|
pub pool: PgPool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VoidCatDb {
|
||||||
|
pub async fn connect(conn: &str) -> Result<Self, sqlx::Error> {
|
||||||
|
let pool = PgPoolOptions::new()
|
||||||
|
.max_connections(50)
|
||||||
|
.connect(conn)
|
||||||
|
.await?;
|
||||||
|
Ok(Self { pool })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_files(&self, page: usize) -> Result<Vec<VoidFile>, sqlx::Error> {
|
||||||
|
let page_size = 100;
|
||||||
|
sqlx::query_as(format!("select f.\"Id\", f.\"Name\", CAST(f.\"Size\" as BIGINT) \"Size\", f.\"Uploaded\", f.\"Description\", f.\"MimeType\", f.\"Digest\", f.\"MediaDimensions\", u.\"Email\"
|
||||||
|
from \"Files\" f, \"UserFiles\" uf, \"Users\" u
|
||||||
|
where f.\"Id\" = uf.\"FileId\"
|
||||||
|
and uf.\"UserId\" = u.\"Id\"
|
||||||
|
and u.\"AuthType\" = 4\
|
||||||
|
offset {} limit {}", page * page_size, page_size).as_str())
|
||||||
|
.fetch_all(&self.pool)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_digest(&self, file_id: &Uuid) -> Result<Option<String>, sqlx::Error> {
|
||||||
|
sqlx::query_scalar("select f.\"Digest\" from \"Files\" f where f.\"Id\" = $1")
|
||||||
|
.bind(file_id)
|
||||||
|
.fetch_optional(&self.pool)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user