Compare commits

..

No commits in common. "222198fcb656287171c321083f4fe822d9449180" and "5530f397798e6b0f35bbf03d167e99ce93a09120" have entirely different histories.

21 changed files with 624 additions and 958 deletions

656
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -3,6 +3,14 @@ name = "route96"
version = "0.4.0" version = "0.4.0"
edition = "2021" edition = "2021"
[[bin]]
name = "void_cat_migrate"
required-features = ["bin-void-cat-migrate"]
[[bin]]
name = "void_cat_forced_migrate"
required-features = ["bin-void-cat-force-migrate"]
[[bin]] [[bin]]
name = "route96" name = "route96"
path = "src/bin/main.rs" path = "src/bin/main.rs"
@ -11,18 +19,22 @@ path = "src/bin/main.rs"
name = "route96" name = "route96"
[features] [features]
default = ["nip96", "blossom", "analytics", "react-ui", "payments"] default = ["nip96", "blossom", "analytics", "ranges", "react-ui"]
media-compression = ["dep:ffmpeg-rs-raw", "dep:libc"] media-compression = ["dep:ffmpeg-rs-raw", "dep:libc"]
labels = ["media-compression", "dep:candle-core", "dep:candle-nn", "dep:candle-transformers"] labels = ["nip96", "dep:candle-core", "dep:candle-nn", "dep:candle-transformers"]
nip96 = ["media-compression"] nip96 = ["media-compression"]
blossom = [] blossom = []
bin-void-cat-migrate = ["dep:sqlx-postgres"]
bin-void-cat-force-migrate = ["dep:regex", "dep:nostr-cursor"]
torrent-v2 = []
analytics = [] analytics = []
void-cat-redirects = ["dep:sqlx-postgres"]
ranges = ["dep:http-range-header"]
react-ui = [] react-ui = []
payments = ["dep:fedimint-tonic-lnd"]
[dependencies] [dependencies]
log = "0.4.21" log = "0.4.21"
nostr = "0.39.0" nostr = "0.37.0"
pretty_env_logger = "0.5.0" pretty_env_logger = "0.5.0"
rocket = { version = "0.5.1", features = ["json"] } rocket = { version = "0.5.1", features = ["json"] }
tokio = { version = "1.37.0", features = ["rt", "rt-multi-thread", "macros"] } tokio = { version = "1.37.0", features = ["rt", "rt-multi-thread", "macros"] }
@ -33,19 +45,21 @@ uuid = { version = "1.8.0", features = ["v4", "serde"] }
anyhow = "^1.0.82" anyhow = "^1.0.82"
sha2 = "0.10.8" sha2 = "0.10.8"
sqlx = { version = "0.8.1", features = ["mysql", "runtime-tokio", "chrono", "uuid"] } sqlx = { version = "0.8.1", features = ["mysql", "runtime-tokio", "chrono", "uuid"] }
config = { version = "0.15.7", features = ["yaml"] } config = { version = "0.14.0", features = ["yaml"] }
chrono = { version = "0.4.38", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
reqwest = { version = "0.12.8", features = ["stream", "http2"] } serde_with = { version = "3.8.1", features = ["hex"] }
reqwest = { version = "0.12.8", features = ["stream"] }
clap = { version = "4.5.18", features = ["derive"] } clap = { version = "4.5.18", features = ["derive"] }
mime2ext = "0.1.53" mime2ext = "0.1.53"
infer = "0.19.0" infer = "0.16.0"
tokio-util = { version = "0.7.13", features = ["io", "io-util"] } tokio-util = { version = "0.7.13", features = ["io", "io-util"] }
http-range-header = { version = "0.4.2" }
base58 = "0.2.0"
libc = { version = "0.2.153", optional = true } libc = { version = "0.2.153", optional = true }
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "a63b88ef3c8f58c7c0ac57d361d06ff0bb3ed385", optional = true } ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "a63b88ef3c8f58c7c0ac57d361d06ff0bb3ed385", optional = true }
candle-core = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true } candle-core = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true }
candle-nn = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true } candle-nn = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true }
candle-transformers = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true } candle-transformers = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true }
fedimint-tonic-lnd = { version = "0.2.0", optional = true, default-features = false, features = ["invoicesrpc", "lightningrpc"] } sqlx-postgres = { version = "0.8.2", optional = true, features = ["chrono", "uuid"] }
http-range-header = { version = "0.4.2", optional = true }
nostr-cursor = { git = "https://git.v0l.io/Kieran/nostr_backup_proc.git", branch = "main", optional = true }
regex = { version = "1.11.1", optional = true }

View File

@ -12,7 +12,6 @@ Image hosting service
- [BUD-05](https://github.com/hzrd149/blossom/blob/master/buds/05.md) - [BUD-05](https://github.com/hzrd149/blossom/blob/master/buds/05.md)
- [BUD-06](https://github.com/hzrd149/blossom/blob/master/buds/06.md) - [BUD-06](https://github.com/hzrd149/blossom/blob/master/buds/06.md)
- [BUD-08](https://github.com/hzrd149/blossom/blob/master/buds/08.md) - [BUD-08](https://github.com/hzrd149/blossom/blob/master/buds/08.md)
- [BUD-10](https://github.com/hzrd149/blossom/pull/57)
- Image compression to WebP - Image compression to WebP
- Blurhash calculation - Blurhash calculation
- AI image labeling ([ViT224](https://huggingface.co/google/vit-base-patch16-224)) - AI image labeling ([ViT224](https://huggingface.co/google/vit-base-patch16-224))
@ -46,10 +45,4 @@ docker run --rm -it \
``` ```
### Manual ### Manual
See [install.md](docs/debian.md) See [install.md](docs/debian.md)
## Blossom only mode (No FFMPEG)
If you don't want to support NIP-96 or any media compression you can build like so:
```bash
cargo build --release --no-default-features --features blossom
```

View File

@ -1,5 +1,17 @@
# Listen address for webserver
listen: "0.0.0.0:8000" listen: "0.0.0.0:8000"
# Database connection string (MYSQL)
database: "mysql://root:root@db:3306/route96" database: "mysql://root:root@db:3306/route96"
# Directory to store uploads
storage_dir: "/app/data" storage_dir: "/app/data"
# Maximum support filesize for uploading
max_upload_bytes: 104857600 max_upload_bytes: 104857600
# Public facing url
public_url: "http://localhost:8000" public_url: "http://localhost:8000"
# Whitelisted pubkeys, leave out to disable
# whitelist: ["63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed"]

View File

@ -13,37 +13,19 @@ max_upload_bytes: 5e+9
# Public facing url # Public facing url
public_url: "http://localhost:8000" public_url: "http://localhost:8000"
# (Optional) Whitelisted pubkeys, leave out to disable # Whitelisted pubkeys, leave out to disable
# whitelist: ["63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed"] # whitelist: ["63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed"]
# (Optional) Path for ViT(224) image model (https://huggingface.co/google/vit-base-patch16-224) # Path for ViT(224) image model (https://huggingface.co/google/vit-base-patch16-224)
# vit_model: vit_model:
# model: "falcon_nsfw.safetensors" model: "/home/kieran/Downloads/falcon_nsfw.safetensors"
# config: "falcon_nsfw.json" config: "/home/kieran/Downloads/falcon_nsfw.json"
# (Optional) Analytics support # Analytics support
# plausible_url: "https://plausible.com/" # plausible_url: "https://plausible.com/"
# (Optional) Legacy file path for void.cat uploads # Support legacy void
# void_cat_files: "/my/void.cat/data" # void_cat_database: "postgres://postgres:postgres@localhost:41911/void"
# (Optional) Payment system config # Legacy file path for void.cat uploads
payments: # void_cat_files: "/my/void.cat/data"
# (Optional) Fiat currency used to track exchange rate along with invoices
# If [cost] is using a fiat currency, exchange rates will always be stored
# in that currency, so this config is not needed
fiat: "USD"
# LND node config
lnd:
endpoint: "https://127.0.0.1:10001"
tls: "/home/kieran/.polar/networks/3/volumes/lnd/alice/tls.cert"
macaroon: "/home/kieran/.polar/networks/3/volumes/lnd/alice/data/chain/bitcoin/regtest/admin.macaroon"
# Cost per unit (BTC/USD/EUR/AUD/CAD/JPY/GBP)
cost:
currency: "BTC"
amount: 0.00000100
# Unit metric used to calculate quote (GBSpace, GBEgress)
unit: "GBSpace"
# Billing interval (day / month / year)
interval:
month: 1

View File

@ -1,22 +0,0 @@
-- Add migration script here
alter table users
add column paid_until timestamp,
add column paid_size integer unsigned not null;
create table payments
(
payment_hash binary(32) not null primary key,
user_id integer unsigned not null,
created timestamp default current_timestamp,
amount integer unsigned not null,
is_paid bit(1) not null default 0,
days_value integer unsigned not null,
size_value integer unsigned not null,
settle_index integer unsigned,
rate float,
constraint fk_payments_user_id
foreign key (user_id) references users (id)
on delete cascade
on update restrict
);

View File

@ -3,7 +3,6 @@ use crate::filesystem::FileStore;
use crate::processing::probe_file; use crate::processing::probe_file;
use anyhow::Result; use anyhow::Result;
use log::{error, info, warn}; use log::{error, info, warn};
use tokio::sync::broadcast;
pub struct MediaMetadata { pub struct MediaMetadata {
db: Database, db: Database,
@ -15,16 +14,12 @@ impl MediaMetadata {
Self { db, fs } Self { db, fs }
} }
pub async fn process(&mut self, mut rx: broadcast::Receiver<()>) -> Result<()> { pub async fn process(&mut self) -> Result<()> {
let to_migrate = self.db.get_missing_media_metadata().await?; let to_migrate = self.db.get_missing_media_metadata().await?;
info!("{} files are missing metadata", to_migrate.len()); info!("{} files are missing metadata", to_migrate.len());
for file in to_migrate { for file in to_migrate {
if rx.try_recv().is_ok() {
info!("Shutting down MediaMetadata process");
break;
}
// probe file and update metadata // probe file and update metadata
let path = self.fs.get(&file.id); let path = self.fs.get(&file.id);
match probe_file(&path) { match probe_file(&path) {

View File

@ -1,54 +1,24 @@
use crate::db::Database; use crate::db::Database;
use crate::filesystem::FileStore; use crate::filesystem::FileStore;
use log::{error, info, warn}; use anyhow::Result;
use tokio::sync::broadcast; use log::info;
use tokio::task::JoinHandle; use tokio::task::JoinHandle;
#[cfg(feature = "media-compression")] #[cfg(feature = "media-compression")]
mod media_metadata; mod media_metadata;
#[cfg(feature = "payments")] pub fn start_background_tasks(db: Database, file_store: FileStore) -> Vec<JoinHandle<Result<()>>> {
mod payments;
pub fn start_background_tasks(
db: Database,
file_store: FileStore,
shutdown_rx: broadcast::Receiver<()>,
#[cfg(feature = "payments")] client: Option<fedimint_tonic_lnd::Client>,
) -> Vec<JoinHandle<()>> {
let mut ret = vec![]; let mut ret = vec![];
#[cfg(feature = "media-compression")] #[cfg(feature = "media-compression")]
{ {
let db = db.clone();
let rx = shutdown_rx.resubscribe();
ret.push(tokio::spawn(async move { ret.push(tokio::spawn(async move {
info!("Starting MediaMetadata background task"); info!("Starting MediaMetadata background task");
let mut m = media_metadata::MediaMetadata::new(db, file_store.clone()); let mut m = media_metadata::MediaMetadata::new(db.clone(), file_store.clone());
if let Err(e) = m.process(rx).await { m.process().await?;
error!("MediaMetadata failed: {}", e); info!("MediaMetadata background task completed");
} else { Ok(())
info!("MediaMetadata background task completed");
}
})); }));
} }
#[cfg(feature = "payments")]
{
if let Some(client) = client {
let db = db.clone();
let rx = shutdown_rx.resubscribe();
ret.push(tokio::spawn(async move {
info!("Starting PaymentsHandler background task");
let mut m = payments::PaymentsHandler::new(client, db);
if let Err(e) = m.process(rx).await {
error!("PaymentsHandler failed: {}", e);
} else {
info!("PaymentsHandler background task completed");
}
}));
} else {
warn!("Not starting PaymentsHandler, configuration missing")
}
}
ret ret
} }

View File

@ -1,71 +0,0 @@
use crate::db::Database;
use anyhow::Result;
use fedimint_tonic_lnd::lnrpc::invoice::InvoiceState;
use fedimint_tonic_lnd::lnrpc::InvoiceSubscription;
use fedimint_tonic_lnd::Client;
use log::{error, info};
use rocket::futures::StreamExt;
use sqlx::Row;
use tokio::sync::broadcast;
pub struct PaymentsHandler {
client: Client,
database: Database,
}
impl PaymentsHandler {
pub fn new(client: Client, database: Database) -> Self {
PaymentsHandler { client, database }
}
pub async fn process(&mut self, mut rx: broadcast::Receiver<()>) -> Result<()> {
let start_idx = self.database.get_last_settle_index().await?;
let mut invoices = self
.client
.lightning()
.subscribe_invoices(InvoiceSubscription {
add_index: 0,
settle_index: start_idx,
})
.await?;
info!("Starting invoice subscription from {}", start_idx);
let invoices = invoices.get_mut();
loop {
tokio::select! {
Ok(_) = rx.recv() => {
break;
}
Some(Ok(msg)) = invoices.next() => {
if msg.state == InvoiceState::Settled as i32 {
if let Ok(Some(mut p)) = self.database.get_payment(&msg.r_hash).await {
p.settle_index = Some(msg.settle_index);
p.is_paid = true;
match self.database.complete_payment(&p).await {
Ok(()) => info!(
"Successfully completed payment: {}",
hex::encode(&msg.r_hash)
),
Err(e) => error!("Failed to complete payment: {}", e),
}
}
}
}
}
}
Ok(())
}
}
impl Database {
async fn get_last_settle_index(&self) -> Result<u64> {
Ok(
sqlx::query("select max(settle_index) from payments where is_paid = true")
.fetch_one(&self.pool)
.await?
.try_get(0)
.unwrap_or(0),
)
}
}

View File

@ -3,8 +3,6 @@ use std::net::{IpAddr, SocketAddr};
use anyhow::Error; use anyhow::Error;
use clap::Parser; use clap::Parser;
use config::Config; use config::Config;
#[cfg(feature = "payments")]
use fedimint_tonic_lnd::lnrpc::GetInfoRequest;
use log::{error, info}; use log::{error, info};
use rocket::config::Ident; use rocket::config::Ident;
use rocket::data::{ByteUnit, Limits}; use rocket::data::{ByteUnit, Limits};
@ -21,7 +19,6 @@ use route96::filesystem::FileStore;
use route96::routes; use route96::routes;
use route96::routes::{get_blob, head_blob, root}; use route96::routes::{get_blob, head_blob, root};
use route96::settings::Settings; use route96::settings::Settings;
use tokio::sync::broadcast;
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
#[command(version, about)] #[command(version, about)]
@ -76,13 +73,7 @@ async fn main() -> Result<(), Error> {
.attach(Shield::new()) // disable .attach(Shield::new()) // disable
.mount( .mount(
"/", "/",
routes![ routes![root, get_blob, head_blob, routes::void_cat_redirect, routes::void_cat_redirect_head],
root,
get_blob,
head_blob,
routes::void_cat_redirect,
routes::void_cat_redirect_head
],
) )
.mount("/admin", routes::admin_routes()); .mount("/admin", routes::admin_routes());
@ -104,47 +95,19 @@ async fn main() -> Result<(), Error> {
{ {
rocket = rocket.mount("/", routes![routes::get_blob_thumb]); rocket = rocket.mount("/", routes![routes::get_blob_thumb]);
} }
#[cfg(feature = "payments")]
let lnd = {
if let Some(lnd) = settings.payments.as_ref().map(|p| &p.lnd) {
let lnd = fedimint_tonic_lnd::connect(
lnd.endpoint.clone(),
lnd.tls.clone(),
lnd.macaroon.clone(),
)
.await?;
let info = { let jh = start_background_tasks(db, fs);
let mut lnd = lnd.clone();
lnd.lightning().get_info(GetInfoRequest::default()).await?
};
info!(
"LND connected: {} v{}",
info.get_ref().alias,
info.get_ref().version
);
rocket = rocket
.manage(lnd.clone())
.mount("/", routes::payment::routes());
Some(lnd)
} else {
None
}
};
let (shutdown_tx, shutdown_rx) = broadcast::channel(1);
let jh = start_background_tasks(db, fs, shutdown_rx, lnd);
if let Err(e) = rocket.launch().await { if let Err(e) = rocket.launch().await {
error!("Rocker error {}", e); error!("Rocker error {}", e);
for j in jh {
let _ = j.await?;
}
Err(Error::from(e))
} else {
for j in jh {
let _ = j.await?;
}
Ok(())
} }
shutdown_tx
.send(())
.expect("Failed to send shutdown signal");
for j in jh {
j.await?;
}
Ok(())
} }

View File

@ -0,0 +1,70 @@
use clap::Parser;
use log::{info, warn};
use nostr::serde_json;
use nostr_cursor::cursor::NostrCursor;
use regex::Regex;
use rocket::futures::StreamExt;
use std::collections::{HashMap, HashSet};
use std::path::PathBuf;
use tokio::fs::File;
use tokio::io::AsyncWriteExt;
use uuid::Uuid;
#[derive(Parser, Debug)]
#[command(version, about)]
struct ProgramArgs {
/// Directory pointing to archives to scan
#[arg(short, long)]
pub archive: PathBuf,
/// Output path .csv
#[arg(short, long)]
pub output: PathBuf,
}
#[tokio::main]
async fn main() -> Result<(), anyhow::Error> {
pretty_env_logger::init();
let args: ProgramArgs = ProgramArgs::parse();
let mut report: HashMap<String, HashSet<String>> = HashMap::new();
let mut binding = NostrCursor::new(args.archive);
let mut cursor = Box::pin(binding.walk());
let matcher = Regex::new(r"void\.cat/d/(\w+)")?;
while let Some(Ok(e)) = cursor.next().await {
if e.content.contains("void.cat") {
let links = matcher.captures_iter(&e.content).collect::<Vec<_>>();
for link in links {
let g = link.get(1).unwrap().as_str();
let base58 = if let Ok(b) = nostr::bitcoin::base58::decode(g) {
b
} else {
warn!("Invalid base58 id {}", g);
continue;
};
let _uuid = if let Ok(u) = Uuid::from_slice_le(base58.as_slice()) {
u
} else {
warn!("Invalid uuid {}", g);
continue;
};
info!("Got link: {} => {}", g, e.pubkey);
if let Some(ur) = report.get_mut(&e.pubkey) {
ur.insert(g.to_string());
} else {
report.insert(e.pubkey.clone(), HashSet::from([g.to_string()]));
}
}
}
}
let json = serde_json::to_string(&report)?;
File::create(args.output)
.await?
.write_all(json.as_bytes())
.await?;
Ok(())
}

147
src/bin/void_cat_migrate.rs Normal file
View File

@ -0,0 +1,147 @@
use anyhow::Error;
use clap::Parser;
use config::Config;
use log::{info, warn};
use nostr::bitcoin::base58;
use route96::db::{Database, FileUpload};
use route96::filesystem::FileStore;
use route96::settings::Settings;
use route96::void_db::VoidCatDb;
use route96::void_file::VoidFile;
use std::path::PathBuf;
use tokio::io::{AsyncWriteExt, BufWriter};
#[derive(Debug, Clone, clap::ValueEnum)]
enum ArgOperation {
Migrate,
ExportNginxRedirects,
}
#[derive(Parser, Debug)]
#[command(version, about)]
struct Args {
/// Database connection string for void.cat DB
#[arg(long)]
pub database: String,
/// Path to filestore on void.cat
#[arg(long)]
pub data_path: String,
#[arg(long)]
pub operation: ArgOperation,
}
#[tokio::main]
async fn main() -> Result<(), Error> {
pretty_env_logger::init();
let builder = Config::builder()
.add_source(config::File::with_name("config.yaml"))
.add_source(config::Environment::with_prefix("APP"))
.build()?;
let settings: Settings = builder.try_deserialize()?;
let db = Database::new(&settings.database).await?;
let fs = FileStore::new(settings.clone());
let args: Args = Args::parse();
let db_void = VoidCatDb::connect(&args.database).await?;
match args.operation {
ArgOperation::Migrate => {
let mut page = 0;
loop {
let files = db_void.list_files(page).await?;
if files.is_empty() {
break;
}
for f in files {
if let Err(e) = migrate_file(&f, &db, &fs, &args).await {
warn!("Failed to migrate file: {}, {}", &f.id, e);
}
}
page += 1;
}
}
ArgOperation::ExportNginxRedirects => {
let path: PathBuf = args.data_path.parse()?;
let conf_path = &path.join("nginx.conf");
info!("Writing redirects to {}", conf_path.to_str().unwrap());
let mut fout = BufWriter::new(tokio::fs::File::create(conf_path).await?);
let mut page = 0;
loop {
let files = db_void.list_files(page).await?;
if files.is_empty() {
break;
}
for f in files {
let legacy_id = base58::encode(f.id.to_bytes_le().as_slice());
let redirect = format!("location ^\\/d\\/{}(?:\\.\\w+)?$ {{\n\treturn 301 https://nostr.download/{};\n}}\n", &legacy_id, &f.digest);
fout.write_all(redirect.as_bytes()).await?;
}
page += 1;
}
}
}
Ok(())
}
async fn migrate_file(
f: &VoidFile,
db: &Database,
fs: &FileStore,
args: &Args,
) -> Result<(), Error> {
let pubkey_vec = hex::decode(&f.email)?;
let id_vec = hex::decode(&f.digest)?;
// copy file
let src_path = PathBuf::new()
.join(&args.data_path)
.join(VoidFile::map_to_path(&f.id));
let dst_path = fs.map_path(&id_vec);
if src_path.exists() && !dst_path.exists() {
info!(
"Copying file: {} from {} => {}",
&f.id,
src_path.to_str().unwrap(),
dst_path.to_str().unwrap()
);
tokio::fs::create_dir_all(dst_path.parent().unwrap()).await?;
tokio::fs::copy(src_path, dst_path).await?;
} else if dst_path.exists() {
info!("File already exists {}, continuing...", &f.id);
} else {
anyhow::bail!("Source file not found {}", src_path.to_str().unwrap());
}
let uid = db.upsert_user(&pubkey_vec).await?;
info!("Mapped user {} => {}", &f.email, uid);
let md: Option<Vec<&str>> = f.media_dimensions.as_ref().map(|s| s.split("x").collect());
let fu = FileUpload {
id: id_vec,
name: f.name.clone(),
size: f.size as u64,
mime_type: f.mime_type.clone(),
created: f.uploaded,
width: match &md {
Some(s) => Some(s[0].parse::<u32>()?),
None => None,
},
height: match &md {
Some(s) => Some(s[1].parse::<u32>()?),
None => None,
},
blur_hash: None,
alt: f.description.clone(),
duration: None,
bitrate: None,
};
db.add_file(&fu, uid).await?;
Ok(())
}

View File

@ -61,10 +61,6 @@ pub struct User {
pub pubkey: Vec<u8>, pub pubkey: Vec<u8>,
pub created: DateTime<Utc>, pub created: DateTime<Utc>,
pub is_admin: bool, pub is_admin: bool,
#[cfg(feature = "payments")]
pub paid_until: Option<DateTime<Utc>>,
#[cfg(feature = "payments")]
pub paid_size: u64,
} }
#[cfg(feature = "labels")] #[cfg(feature = "labels")]
@ -94,20 +90,6 @@ pub struct UserStats {
pub total_size: u64, pub total_size: u64,
} }
#[cfg(feature = "payments")]
#[derive(Clone, FromRow, Serialize)]
pub struct Payment {
pub payment_hash: Vec<u8>,
pub user_id: u64,
pub created: DateTime<Utc>,
pub amount: u64,
pub is_paid: bool,
pub days_value: u64,
pub size_value: u64,
pub settle_index: Option<u64>,
pub rate: Option<f32>,
}
#[derive(Clone)] #[derive(Clone)]
pub struct Database { pub struct Database {
pub(crate) pool: sqlx::pool::Pool<sqlx::mysql::MySql>, pub(crate) pool: sqlx::pool::Pool<sqlx::mysql::MySql>,
@ -288,56 +270,3 @@ impl Database {
Ok((results, count)) Ok((results, count))
} }
} }
#[cfg(feature = "payments")]
impl Database {
pub async fn insert_payment(&self, payment: &Payment) -> Result<(), Error> {
sqlx::query("insert into payments(payment_hash,user_id,amount,days_value,size_value,rate) values(?,?,?,?,?,?)")
.bind(&payment.payment_hash)
.bind(payment.user_id)
.bind(payment.amount)
.bind(payment.days_value)
.bind(payment.size_value)
.bind(payment.rate)
.execute(&self.pool)
.await?;
Ok(())
}
pub async fn get_payment(&self, payment_hash: &Vec<u8>) -> Result<Option<Payment>, Error> {
sqlx::query_as("select * from payments where payment_hash = ?")
.bind(payment_hash)
.fetch_optional(&self.pool)
.await
}
pub async fn get_user_payments(&self, uid: u64) -> Result<Vec<Payment>, Error> {
sqlx::query_as("select * from payments where user_id = ?")
.bind(uid)
.fetch_all(&self.pool)
.await
}
pub async fn complete_payment(&self, payment: &Payment) -> Result<(), Error> {
let mut tx = self.pool.begin().await?;
sqlx::query("update payments set is_paid = true, settle_index = ? where payment_hash = ?")
.bind(payment.settle_index)
.bind(&payment.payment_hash)
.execute(&mut *tx)
.await?;
// TODO: check space is not downgraded
sqlx::query("update users set paid_until = TIMESTAMPADD(DAY, ?, IFNULL(paid_until, current_timestamp)), paid_size = ? where id = ?")
.bind(payment.days_value)
.bind(payment.size_value)
.bind(payment.user_id)
.execute(&mut *tx)
.await?;
tx.commit().await?;
Ok(())
}
}

View File

@ -1,5 +1,6 @@
#[cfg(feature = "labels")] #[cfg(feature = "labels")]
use crate::db::FileLabel; use crate::db::FileLabel;
use crate::processing::can_compress;
#[cfg(feature = "labels")] #[cfg(feature = "labels")]
use crate::processing::labeling::label_frame; use crate::processing::labeling::label_frame;
#[cfg(feature = "media-compression")] #[cfg(feature = "media-compression")]
@ -17,7 +18,6 @@ use std::path::PathBuf;
use tokio::fs::File; use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncReadExt}; use tokio::io::{AsyncRead, AsyncReadExt};
use uuid::Uuid; use uuid::Uuid;
use crate::can_compress;
#[derive(Clone)] #[derive(Clone)]
pub enum FileSystemResult { pub enum FileSystemResult {
@ -180,7 +180,6 @@ impl FileStore {
} }
} }
#[cfg(feature = "media-compression")]
async fn compress_file(&self, input: &PathBuf, mime_type: &str) -> Result<NewFileResult> { async fn compress_file(&self, input: &PathBuf, mime_type: &str) -> Result<NewFileResult> {
let compressed_result = compress_file(input, mime_type, &self.temp_dir())?; let compressed_result = compress_file(input, mime_type, &self.temp_dir())?;
#[cfg(feature = "labels")] #[cfg(feature = "labels")]

View File

@ -5,14 +5,10 @@ pub mod background;
pub mod cors; pub mod cors;
pub mod db; pub mod db;
pub mod filesystem; pub mod filesystem;
#[cfg(feature = "payments")]
pub mod payments;
#[cfg(feature = "media-compression")] #[cfg(feature = "media-compression")]
pub mod processing; pub mod processing;
pub mod routes; pub mod routes;
pub mod settings; pub mod settings;
#[cfg(any(feature = "void-cat-redirects", feature = "bin-void-cat-migrate"))]
pub mod void_db;
pub mod void_file; pub mod void_file;
pub fn can_compress(mime_type: &str) -> bool {
mime_type.starts_with("image/")
}

View File

@ -1,53 +0,0 @@
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter};
#[cfg(feature = "payments")]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PaymentAmount {
pub currency: Currency,
pub amount: f32,
}
#[cfg(feature = "payments")]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum Currency {
BTC,
USD,
EUR,
GBP,
JPY,
CAD,
AUD,
}
#[cfg(feature = "payments")]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum PaymentUnit {
GBSpace,
GBEgress,
}
impl PaymentUnit {
/// Get the total size from a number of units
pub fn to_size(&self, units: f32) -> u64 {
(1000f32 * 1000f32 * 1000f32 * units) as u64
}
}
impl Display for PaymentUnit {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
PaymentUnit::GBSpace => write!(f, "GB Space"),
PaymentUnit::GBEgress => write!(f, "GB Egress"),
}
}
}
#[cfg(feature = "payments")]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum PaymentInterval {
Day(u16),
Month(u16),
Year(u16),
}

View File

@ -5,7 +5,6 @@ use ffmpeg_rs_raw::{Decoder, Demuxer, DemuxerInfo, Encoder, Scaler, StreamType,
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::ptr; use std::ptr;
use uuid::Uuid; use uuid::Uuid;
use crate::can_compress;
#[cfg(feature = "labels")] #[cfg(feature = "labels")]
pub mod labeling; pub mod labeling;
@ -137,6 +136,10 @@ pub struct NewFileProcessorResult {
pub bitrate: u32, pub bitrate: u32,
} }
pub fn can_compress(mime_type: &str) -> bool {
mime_type.starts_with("image/")
}
pub fn compress_file( pub fn compress_file(
path: &Path, path: &Path,
mime_type: &str, mime_type: &str,

View File

@ -53,10 +53,6 @@ pub struct SelfUser {
pub is_admin: bool, pub is_admin: bool,
pub file_count: u64, pub file_count: u64,
pub total_size: u64, pub total_size: u64,
#[cfg(feature = "payments")]
pub paid_until: u64,
#[cfg(feature = "payments")]
pub quota: u64,
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -81,12 +77,6 @@ async fn admin_get_self(auth: Nip98Auth, db: &State<Database>) -> AdminResponse<
is_admin: user.is_admin, is_admin: user.is_admin,
file_count: s.file_count, file_count: s.file_count,
total_size: s.total_size, total_size: s.total_size,
paid_until: if let Some(u) = &user.paid_until {
u.timestamp() as u64
} else {
0
},
quota: user.paid_size,
}) })
} }
Err(_) => AdminResponse::error("User not found"), Err(_) => AdminResponse::error("User not found"),

View File

@ -10,7 +10,6 @@ pub use crate::routes::nip96::nip96_routes;
use crate::settings::Settings; use crate::settings::Settings;
use crate::void_file::VoidFile; use crate::void_file::VoidFile;
use anyhow::Error; use anyhow::Error;
use base58::FromBase58;
use http_range_header::{parse_range_header, EndPosition, StartPosition}; use http_range_header::{parse_range_header, EndPosition, StartPosition};
use log::{debug, warn}; use log::{debug, warn};
use nostr::Event; use nostr::Event;
@ -34,8 +33,6 @@ mod blossom;
mod nip96; mod nip96;
mod admin; mod admin;
#[cfg(feature = "payments")]
pub mod payment;
pub struct FilePayload { pub struct FilePayload {
pub file: File, pub file: File,
@ -186,45 +183,59 @@ impl<'r> Responder<'r, 'static> for FilePayload {
response.set_header(Header::new("cache-control", "max-age=31536000, immutable")); response.set_header(Header::new("cache-control", "max-age=31536000, immutable"));
// handle ranges // handle ranges
const MAX_UNBOUNDED_RANGE: u64 = 1024 * 1024; #[cfg(feature = "ranges")]
// only use range response for files > 1MiB {
if self.info.size < MAX_UNBOUNDED_RANGE { const MAX_UNBOUNDED_RANGE: u64 = 1024 * 1024;
response.set_sized_body(None, self.file); // only use range response for files > 1MiB
} else { if self.info.size < MAX_UNBOUNDED_RANGE {
response.set_header(Header::new("accept-ranges", "bytes"));
if let Some(r) = request.headers().get("range").next() {
if let Ok(ranges) = parse_range_header(r) {
if ranges.ranges.len() > 1 {
warn!("Multipart ranges are not supported, fallback to non-range request");
response.set_streamed_body(self.file);
} else {
let single_range = ranges.ranges.first().unwrap();
let range_start = match single_range.start {
StartPosition::Index(i) => i,
StartPosition::FromLast(i) => self.info.size - i,
};
let range_end = match single_range.end {
EndPosition::Index(i) => i,
EndPosition::LastByte => {
(range_start + MAX_UNBOUNDED_RANGE).min(self.info.size)
}
};
let r_len = range_end - range_start;
let r_body = RangeBody::new(self.file, range_start..range_end);
response.set_status(Status::PartialContent);
response.set_header(Header::new("content-length", r_len.to_string()));
response.set_header(Header::new(
"content-range",
format!("bytes {}-{}/{}", range_start, range_end - 1, self.info.size),
));
response.set_streamed_body(Box::pin(r_body));
}
}
} else {
response.set_sized_body(None, self.file); response.set_sized_body(None, self.file);
} else {
response.set_header(Header::new("accept-ranges", "bytes"));
if let Some(r) = request.headers().get("range").next() {
if let Ok(ranges) = parse_range_header(r) {
if ranges.ranges.len() > 1 {
warn!(
"Multipart ranges are not supported, fallback to non-range request"
);
response.set_streamed_body(self.file);
} else {
let single_range = ranges.ranges.first().unwrap();
let range_start = match single_range.start {
StartPosition::Index(i) => i,
StartPosition::FromLast(i) => self.info.size - i,
};
let range_end = match single_range.end {
EndPosition::Index(i) => i,
EndPosition::LastByte => {
(range_start + MAX_UNBOUNDED_RANGE).min(self.info.size)
}
};
let r_len = range_end - range_start;
let r_body = RangeBody::new(self.file, range_start..range_end);
response.set_status(Status::PartialContent);
response.set_header(Header::new("content-length", r_len.to_string()));
response.set_header(Header::new(
"content-range",
format!(
"bytes {}-{}/{}",
range_start,
range_end - 1,
self.info.size
),
));
response.set_streamed_body(Box::pin(r_body));
}
}
} else {
response.set_sized_body(None, self.file);
}
} }
} }
#[cfg(not(feature = "ranges"))]
{
response.set_sized_body(None, self.file);
}
if let Ok(ct) = ContentType::from_str(&self.info.mime_type) { if let Ok(ct) = ContentType::from_str(&self.info.mime_type) {
response.set_header(ct); response.set_header(ct);
@ -426,7 +437,9 @@ pub async fn void_cat_redirect(id: &str, settings: &State<Settings>) -> Option<N
id id
}; };
if let Some(base) = &settings.void_cat_files { if let Some(base) = &settings.void_cat_files {
let uuid = uuid::Uuid::from_slice_le(id.from_base58().unwrap().as_slice()).unwrap(); let uuid =
uuid::Uuid::from_slice_le(nostr::bitcoin::base58::decode(id).unwrap().as_slice())
.unwrap();
let f = base.join(VoidFile::map_to_path(&uuid)); let f = base.join(VoidFile::map_to_path(&uuid));
debug!("Legacy file map: {} => {}", id, f.display()); debug!("Legacy file map: {} => {}", id, f.display());
if let Ok(f) = NamedFile::open(f).await { if let Ok(f) = NamedFile::open(f).await {
@ -446,7 +459,8 @@ pub async fn void_cat_redirect_head(id: &str) -> VoidCatFile {
} else { } else {
id id
}; };
let uuid = uuid::Uuid::from_slice_le(id.from_base58().unwrap().as_slice()).unwrap(); let uuid =
uuid::Uuid::from_slice_le(nostr::bitcoin::base58::decode(id).unwrap().as_slice()).unwrap();
VoidCatFile { VoidCatFile {
status: Status::Ok, status: Status::Ok,
uuid: Header::new("X-UUID", uuid.to_string()), uuid: Header::new("X-UUID", uuid.to_string()),

View File

@ -1,131 +0,0 @@
use crate::auth::nip98::Nip98Auth;
use crate::db::{Database, Payment};
use crate::payments::{Currency, PaymentAmount, PaymentInterval, PaymentUnit};
use crate::settings::Settings;
use chrono::{Months, Utc};
use fedimint_tonic_lnd::lnrpc::Invoice;
use fedimint_tonic_lnd::Client;
use log::{error, info};
use rocket::serde::json::Json;
use rocket::{routes, Route, State};
use serde::{Deserialize, Serialize};
use std::ops::{Add, Deref};
pub fn routes() -> Vec<Route> {
routes![get_payment, req_payment]
}
#[derive(Deserialize, Serialize)]
struct PaymentInfo {
/// Billing quota metric
pub unit: PaymentUnit,
/// Amount of time to bill units (GB/mo, Gb Egress/day etc.)
pub interval: PaymentInterval,
/// Value amount of payment
pub cost: PaymentAmount,
}
#[derive(Deserialize, Serialize)]
struct PaymentRequest {
/// Number of units requested to make payment
pub units: f32,
/// Quantity of orders to make
pub quantity: u16,
}
#[derive(Deserialize, Serialize)]
struct PaymentResponse {
pub pr: String,
}
#[rocket::get("/payment")]
async fn get_payment(settings: &State<Settings>) -> Option<Json<PaymentInfo>> {
settings.payments.as_ref().map(|p| {
Json::from(PaymentInfo {
unit: p.unit.clone(),
interval: p.interval.clone(),
cost: p.cost.clone(),
})
})
}
#[rocket::post("/payment", data = "<req>", format = "json")]
async fn req_payment(
auth: Nip98Auth,
db: &State<Database>,
settings: &State<Settings>,
lnd: &State<Client>,
req: Json<PaymentRequest>,
) -> Result<Json<PaymentResponse>, String> {
let cfg = if let Some(p) = &settings.payments {
p
} else {
return Err("Payment not enabled, missing configuration option(s)".to_string());
};
let btc_amount = match cfg.cost.currency {
Currency::BTC => cfg.cost.amount,
_ => return Err("Currency not supported".to_string()),
};
let amount = btc_amount * req.units * req.quantity as f32;
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
let uid = db
.upsert_user(&pubkey_vec)
.await
.map_err(|_| "Failed to get user account".to_string())?;
let mut lnd = lnd.deref().clone();
let c = lnd.lightning();
let msat = (amount * 1e11f32) as u64;
let memo = format!(
"{}x {} {} for {}",
req.quantity, req.units, cfg.unit, auth.event.pubkey
);
info!("Requesting {} msats: {}", msat, memo);
let invoice = c
.add_invoice(Invoice {
value_msat: msat as i64,
memo,
..Default::default()
})
.await
.map_err(|e| e.message().to_string())?;
let days_value = match cfg.interval {
PaymentInterval::Day(d) => d as u64,
PaymentInterval::Month(m) => {
let now = Utc::now();
(now.add(Months::new(m as u32)) - now).num_days() as u64
}
PaymentInterval::Year(y) => {
let now = Utc::now();
(now.add(Months::new(12 * y as u32)) - now).num_days() as u64
}
};
let record = Payment {
payment_hash: invoice.get_ref().r_hash.clone(),
user_id: uid,
created: Default::default(),
amount: msat,
is_paid: false,
days_value,
size_value: cfg.unit.to_size(req.units),
settle_index: None,
rate: None,
};
if let Err(e) = db.insert_payment(&record).await {
error!("Failed to insert payment: {}", e);
return Err("Failed to insert payment".to_string());
}
Ok(Json(PaymentResponse {
pr: invoice.get_ref().payment_request.clone(),
}))
}

View File

@ -1,5 +1,3 @@
#[cfg(feature = "payments")]
use crate::payments::{Currency, PaymentAmount, PaymentInterval, PaymentUnit};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::path::PathBuf; use std::path::PathBuf;
@ -32,12 +30,11 @@ pub struct Settings {
/// Analytics tracking /// Analytics tracking
pub plausible_url: Option<String>, pub plausible_url: Option<String>,
#[cfg(feature = "void-cat-redirects")]
pub void_cat_database: Option<String>,
/// Path to void.cat uploads (files-v2) /// Path to void.cat uploads (files-v2)
pub void_cat_files: Option<PathBuf>, pub void_cat_files: Option<PathBuf>,
#[cfg(feature = "payments")]
/// Payment options for paid storage
pub payments: Option<PaymentConfig>,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -45,30 +42,3 @@ pub struct VitModelConfig {
pub model: PathBuf, pub model: PathBuf,
pub config: PathBuf, pub config: PathBuf,
} }
#[cfg(feature = "payments")]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PaymentConfig {
/// LND connection details
pub lnd: LndConfig,
/// Pricing per unit
pub cost: PaymentAmount,
/// What metric to bill payments on
pub unit: PaymentUnit,
/// Billing interval time per unit
pub interval: PaymentInterval,
/// Fiat base currency to store exchange rates along with invoice
pub fiat: Option<Currency>,
}
#[cfg(feature = "payments")]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LndConfig {
pub endpoint: String,
pub tls: PathBuf,
pub macaroon: PathBuf,
}