mirror of
https://github.com/v0l/route96.git
synced 2025-06-14 15:46:32 +00:00
Compare commits
42 Commits
reports
...
d3711ff52c
Author | SHA1 | Date | |
---|---|---|---|
d3711ff52c | |||
fc080b5cd0 | |||
0554f1220f | |||
ca2d23508b | |||
71cb34eaee
|
|||
e3e2986294
|
|||
afb33085b7
|
|||
f569b94d19
|
|||
915624b2d7
|
|||
6ba1f2ae9c
|
|||
f18a14e980
|
|||
2c42e19f42 | |||
b9d920ad49 | |||
7eb6b7221c
|
|||
8dc2544b15
|
|||
57050567c4
|
|||
1a5388fc1c
|
|||
6ccdb0fdc3
|
|||
6998f0ffac
|
|||
317b0708e0
|
|||
069aa30d52
|
|||
4dad339c09
|
|||
f5b206dad3
|
|||
b6bd190252
|
|||
3b4bb866ab
|
|||
c885a71295
|
|||
e1fca9a604
|
|||
16a14de5d6
|
|||
314d0c68af
|
|||
5530f39779
|
|||
4f40efa99c
|
|||
ceca1904d7
|
|||
2172c8557a
|
|||
f3989ba244
|
|||
9f78c1a54f
|
|||
201a3aaa49
|
|||
3ba5e7bc4c
|
|||
5fbe40faae
|
|||
0d8686a850
|
|||
71f6f47a00
|
|||
0bd531a21d
|
|||
6763e53d41
|
11
.drone.yml
11
.drone.yml
@ -5,11 +5,6 @@ metadata:
|
||||
namespace: git
|
||||
concurrency:
|
||||
limit: 1
|
||||
trigger:
|
||||
branch:
|
||||
- main
|
||||
event:
|
||||
- push
|
||||
steps:
|
||||
- name: build
|
||||
image: docker
|
||||
@ -21,9 +16,8 @@ steps:
|
||||
from_secret: docker_hub
|
||||
commands:
|
||||
- dockerd &
|
||||
- docker login -u kieran -p $TOKEN git.v0l.io
|
||||
- docker login -u voidic -p $TOKEN_DOCKER
|
||||
- docker buildx build --push -t git.v0l.io/kieran/route96:latest -t voidic/route96:latest .
|
||||
- docker buildx build --push -t voidic/route96:latest .
|
||||
- kill $(cat /var/run/docker.pid)
|
||||
---
|
||||
kind: pipeline
|
||||
@ -47,7 +41,6 @@ steps:
|
||||
from_secret: docker_hub
|
||||
commands:
|
||||
- dockerd &
|
||||
- docker login -u kieran -p $TOKEN git.v0l.io
|
||||
- docker login -u voidic -p $TOKEN_DOCKER
|
||||
- docker buildx build --push -t git.v0l.io/kieran/route96:$DRONE_TAG -t voidic/route96:$DRONE_TAG .
|
||||
- docker buildx build --push voidic/route96:$DRONE_TAG .
|
||||
- kill $(cat /var/run/docker.pid)
|
1533
Cargo.lock
generated
1533
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
36
Cargo.toml
36
Cargo.toml
@ -3,14 +3,6 @@ name = "route96"
|
||||
version = "0.4.0"
|
||||
edition = "2021"
|
||||
|
||||
[[bin]]
|
||||
name = "void_cat_migrate"
|
||||
required-features = ["bin-void-cat-migrate"]
|
||||
|
||||
[[bin]]
|
||||
name = "void_cat_forced_migrate"
|
||||
required-features = ["bin-void-cat-force-migrate"]
|
||||
|
||||
[[bin]]
|
||||
name = "route96"
|
||||
path = "src/bin/main.rs"
|
||||
@ -19,22 +11,18 @@ path = "src/bin/main.rs"
|
||||
name = "route96"
|
||||
|
||||
[features]
|
||||
default = ["nip96", "blossom", "analytics", "ranges", "react-ui"]
|
||||
default = ["nip96", "blossom", "analytics", "react-ui", "payments"]
|
||||
media-compression = ["dep:ffmpeg-rs-raw", "dep:libc"]
|
||||
labels = ["nip96", "dep:candle-core", "dep:candle-nn", "dep:candle-transformers"]
|
||||
labels = ["media-compression", "dep:candle-core", "dep:candle-nn", "dep:candle-transformers"]
|
||||
nip96 = ["media-compression"]
|
||||
blossom = []
|
||||
bin-void-cat-migrate = ["dep:sqlx-postgres"]
|
||||
bin-void-cat-force-migrate = ["dep:regex", "dep:nostr-cursor"]
|
||||
torrent-v2 = []
|
||||
analytics = []
|
||||
void-cat-redirects = ["dep:sqlx-postgres"]
|
||||
ranges = ["dep:http-range-header"]
|
||||
react-ui = []
|
||||
payments = ["dep:fedimint-tonic-lnd"]
|
||||
|
||||
[dependencies]
|
||||
log = "0.4.21"
|
||||
nostr = "0.37.0"
|
||||
nostr = "0.39.0"
|
||||
pretty_env_logger = "0.5.0"
|
||||
rocket = { version = "0.5.1", features = ["json"] }
|
||||
tokio = { version = "1.37.0", features = ["rt", "rt-multi-thread", "macros"] }
|
||||
@ -45,21 +33,19 @@ uuid = { version = "1.8.0", features = ["v4", "serde"] }
|
||||
anyhow = "^1.0.82"
|
||||
sha2 = "0.10.8"
|
||||
sqlx = { version = "0.8.1", features = ["mysql", "runtime-tokio", "chrono", "uuid"] }
|
||||
config = { version = "0.14.0", features = ["yaml"] }
|
||||
config = { version = "0.15.7", features = ["yaml"] }
|
||||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
serde_with = { version = "3.8.1", features = ["hex"] }
|
||||
reqwest = { version = "0.12.8", features = ["stream"] }
|
||||
reqwest = { version = "0.12.8", features = ["stream", "http2"] }
|
||||
clap = { version = "4.5.18", features = ["derive"] }
|
||||
mime2ext = "0.1.53"
|
||||
infer = "0.16.0"
|
||||
infer = "0.19.0"
|
||||
tokio-util = { version = "0.7.13", features = ["io", "io-util"] }
|
||||
http-range-header = { version = "0.4.2" }
|
||||
base58 = "0.2.0"
|
||||
|
||||
libc = { version = "0.2.153", optional = true }
|
||||
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "de2050cec07a095bace38d3ccf9c4c4f9b03b217", optional = true }
|
||||
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "a63b88ef3c8f58c7c0ac57d361d06ff0bb3ed385", optional = true }
|
||||
candle-core = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true }
|
||||
candle-nn = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true }
|
||||
candle-transformers = { git = "https://git.v0l.io/huggingface/candle.git", tag = "0.8.1", optional = true }
|
||||
sqlx-postgres = { version = "0.8.2", optional = true, features = ["chrono", "uuid"] }
|
||||
http-range-header = { version = "0.4.2", optional = true }
|
||||
nostr-cursor = { git = "https://git.v0l.io/Kieran/nostr_backup_proc.git", branch = "main", optional = true }
|
||||
regex = { version = "1.11.1", optional = true }
|
||||
fedimint-tonic-lnd = { version = "0.2.0", optional = true, default-features = false, features = ["invoicesrpc", "lightningrpc"] }
|
40
config.yaml
40
config.yaml
@ -13,19 +13,39 @@ max_upload_bytes: 5e+9
|
||||
# Public facing url
|
||||
public_url: "http://localhost:8000"
|
||||
|
||||
# Whitelisted pubkeys, leave out to disable
|
||||
# (Optional) Whitelisted pubkeys, leave out to disable
|
||||
# whitelist: ["63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed"]
|
||||
|
||||
# Path for ViT(224) image model (https://huggingface.co/google/vit-base-patch16-224)
|
||||
vit_model:
|
||||
model: "/home/kieran/Downloads/falcon_nsfw.safetensors"
|
||||
config: "/home/kieran/Downloads/falcon_nsfw.json"
|
||||
# (Optional) Path for ViT(224) image model (https://huggingface.co/google/vit-base-patch16-224)
|
||||
# vit_model:
|
||||
# model: "falcon_nsfw.safetensors"
|
||||
# config: "falcon_nsfw.json"
|
||||
|
||||
# Analytics support
|
||||
# (Optional) Analytics support
|
||||
# plausible_url: "https://plausible.com/"
|
||||
|
||||
# Support legacy void
|
||||
# void_cat_database: "postgres://postgres:postgres@localhost:41911/void"
|
||||
# (Optional) Legacy file path for void.cat uploads
|
||||
# void_cat_files: "/my/void.cat/data"
|
||||
|
||||
# Legacy file path for void.cat uploads
|
||||
# void_cat_files: "/my/void.cat/data"
|
||||
# (Optional) Payment system config
|
||||
payments:
|
||||
# (Optional) Free quota in bytes for users without payments (default: 100MB)
|
||||
free_quota_bytes: 104857600
|
||||
# (Optional) Fiat currency used to track exchange rate along with invoices
|
||||
# If [cost] is using a fiat currency, exchange rates will always be stored
|
||||
# in that currency, so this config is not needed
|
||||
fiat: "USD"
|
||||
# LND node config
|
||||
lnd:
|
||||
endpoint: "https://127.0.0.1:10001"
|
||||
tls: "/home/kieran/.polar/networks/3/volumes/lnd/alice/tls.cert"
|
||||
macaroon: "/home/kieran/.polar/networks/3/volumes/lnd/alice/data/chain/bitcoin/regtest/admin.macaroon"
|
||||
# Cost per unit (BTC/USD/EUR/AUD/CAD/JPY/GBP)
|
||||
cost:
|
||||
currency: "BTC"
|
||||
amount: 0.00000100
|
||||
# Unit metric used to calculate quote (GBSpace, GBEgress)
|
||||
unit: "GBSpace"
|
||||
# Billing interval (day / month / year)
|
||||
interval:
|
||||
month: 1
|
65
grafana.json
65
grafana.json
@ -18,13 +18,13 @@
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
"id": 15,
|
||||
"id": 2,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
@ -93,18 +93,18 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"pluginVersion": "10.4.2",
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"dataset": "void_cat",
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "table",
|
||||
"rawQuery": true,
|
||||
"rawSql": "SELECT \n hex(void_cat.users.pubkey) as pubkey, \n count(void_cat.user_uploads.file) as uploads, \n sum(void_cat.uploads.size) as size\nFROM void_cat.users, void_cat.user_uploads, void_cat.uploads\nwhere void_cat.users.id = void_cat.user_uploads.user_id\nand void_cat.user_uploads.file = void_cat.uploads.id\ngroup by void_cat.users.pubkey",
|
||||
"rawSql": "SELECT \n hex(users.pubkey) as pubkey, \n count(user_uploads.file) as uploads, \n sum(uploads.size) as size\nFROM users, user_uploads, uploads\nwhere users.id = user_uploads.user_id\nand user_uploads.file = uploads.id\ngroup by users.pubkey",
|
||||
"refId": "A",
|
||||
"sql": {
|
||||
"columns": [
|
||||
@ -137,7 +137,7 @@
|
||||
{
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
@ -174,6 +174,7 @@
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"percentChangeColorMode": "standard",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
@ -185,18 +186,18 @@
|
||||
"textMode": "auto",
|
||||
"wideLayout": true
|
||||
},
|
||||
"pluginVersion": "10.4.2",
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"dataset": "mysql",
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "table",
|
||||
"rawQuery": true,
|
||||
"rawSql": "select \n sum(uploads.size) as size\nfrom void_cat.uploads",
|
||||
"rawSql": "select \n sum(uploads.size) as size\nfrom uploads",
|
||||
"refId": "A",
|
||||
"sql": {
|
||||
"columns": [
|
||||
@ -223,7 +224,7 @@
|
||||
{
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
@ -260,6 +261,7 @@
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"percentChangeColorMode": "standard",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
@ -271,18 +273,18 @@
|
||||
"textMode": "auto",
|
||||
"wideLayout": true
|
||||
},
|
||||
"pluginVersion": "10.4.2",
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"dataset": "mysql",
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "table",
|
||||
"rawQuery": true,
|
||||
"rawSql": "select \n count(users.pubkey) as users\nfrom void_cat.users",
|
||||
"rawSql": "select \n count(users.pubkey) as users\nfrom users",
|
||||
"refId": "A",
|
||||
"sql": {
|
||||
"columns": [
|
||||
@ -309,7 +311,7 @@
|
||||
{
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
@ -346,6 +348,7 @@
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"percentChangeColorMode": "standard",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
@ -357,18 +360,18 @@
|
||||
"textMode": "auto",
|
||||
"wideLayout": true
|
||||
},
|
||||
"pluginVersion": "10.4.2",
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"dataset": "mysql",
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "table",
|
||||
"rawQuery": true,
|
||||
"rawSql": "select \n count(uploads.id) as files\nfrom void_cat.uploads",
|
||||
"rawSql": "select \n count(uploads.id) as files\nfrom uploads",
|
||||
"refId": "A",
|
||||
"sql": {
|
||||
"columns": [
|
||||
@ -395,7 +398,7 @@
|
||||
{
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
@ -445,18 +448,18 @@
|
||||
},
|
||||
"showHeader": true
|
||||
},
|
||||
"pluginVersion": "10.4.2",
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"dataset": "mysql",
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "table",
|
||||
"rawQuery": true,
|
||||
"rawSql": "select \n hex(uploads.id) as sha256,\n hex(users.pubkey) as uploader,\n uploads.name,\n sys.format_bytes(uploads.size) as size,\n uploads.mime_type,\n uploads.created,\n uploads.width,\n uploads.height\nfrom void_cat.uploads, void_cat.user_uploads, void_cat.users\nwhere uploads.id = user_uploads.file\nand users.id = user_uploads.user_id\norder by uploads.created desc\nlimit 50",
|
||||
"rawSql": "select \n hex(uploads.id) as sha256,\n hex(users.pubkey) as uploader,\n uploads.name,\n sys.format_bytes(uploads.size) as size,\n uploads.mime_type,\n uploads.created,\n uploads.width,\n uploads.height\nfrom uploads, user_uploads, users\nwhere uploads.id = user_uploads.file\nand users.id = user_uploads.user_id\norder by uploads.created desc\nlimit 50",
|
||||
"refId": "A",
|
||||
"sql": {
|
||||
"columns": [
|
||||
@ -483,7 +486,7 @@
|
||||
{
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
@ -533,18 +536,18 @@
|
||||
},
|
||||
"showHeader": true
|
||||
},
|
||||
"pluginVersion": "10.4.2",
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"dataset": "mysql",
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "table",
|
||||
"rawQuery": true,
|
||||
"rawSql": "select \n hex(uploads.id) as sha256,\n hex(users.pubkey) as uploader,\n uploads.name,\n sys.format_bytes(uploads.size) as size,\n uploads.mime_type,\n uploads.created\nfrom void_cat.uploads, void_cat.user_uploads, void_cat.users\nwhere uploads.id = user_uploads.file\nand users.id = user_uploads.user_id\norder by uploads.size desc\nlimit 50",
|
||||
"rawSql": "select \n hex(uploads.id) as sha256,\n hex(users.pubkey) as uploader,\n uploads.name,\n sys.format_bytes(uploads.size) as size,\n uploads.mime_type,\n uploads.created\nfrom uploads, user_uploads, users\nwhere uploads.id = user_uploads.file\nand users.id = user_uploads.user_id\norder by uploads.size desc\nlimit 50",
|
||||
"refId": "A",
|
||||
"sql": {
|
||||
"columns": [
|
||||
@ -571,7 +574,7 @@
|
||||
{
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
@ -621,13 +624,13 @@
|
||||
},
|
||||
"showHeader": true
|
||||
},
|
||||
"pluginVersion": "10.4.2",
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"dataset": "mysql",
|
||||
"datasource": {
|
||||
"type": "mysql",
|
||||
"uid": "cdnhzi5uxm8zkb"
|
||||
"uid": "behhij20nn4zka"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "table",
|
||||
@ -657,7 +660,9 @@
|
||||
"type": "table"
|
||||
}
|
||||
],
|
||||
"schemaVersion": 39,
|
||||
"preload": false,
|
||||
"refresh": "",
|
||||
"schemaVersion": 40,
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": []
|
||||
@ -670,6 +675,6 @@
|
||||
"timezone": "browser",
|
||||
"title": "route96",
|
||||
"uid": "ddni0rqotyltse",
|
||||
"version": 12,
|
||||
"version": 3,
|
||||
"weekStart": ""
|
||||
}
|
4
migrations/20250127210244_video_metadata.sql
Normal file
4
migrations/20250127210244_video_metadata.sql
Normal file
@ -0,0 +1,4 @@
|
||||
-- Add migration script here
|
||||
alter table uploads
|
||||
add column duration float,
|
||||
add column bitrate integer unsigned;
|
22
migrations/20250202135844_payments.sql
Normal file
22
migrations/20250202135844_payments.sql
Normal file
@ -0,0 +1,22 @@
|
||||
-- Add migration script here
|
||||
alter table users
|
||||
add column paid_until timestamp,
|
||||
add column paid_size integer unsigned not null;
|
||||
|
||||
create table payments
|
||||
(
|
||||
payment_hash binary(32) not null primary key,
|
||||
user_id integer unsigned not null,
|
||||
created timestamp default current_timestamp,
|
||||
amount integer unsigned not null,
|
||||
is_paid bit(1) not null default 0,
|
||||
days_value integer unsigned not null,
|
||||
size_value integer unsigned not null,
|
||||
settle_index integer unsigned,
|
||||
rate float,
|
||||
|
||||
constraint fk_payments_user_id
|
||||
foreign key (user_id) references users (id)
|
||||
on delete cascade
|
||||
on update restrict
|
||||
);
|
28
migrations/20250610135841_reports.sql
Normal file
28
migrations/20250610135841_reports.sql
Normal file
@ -0,0 +1,28 @@
|
||||
-- Create reports table for file reporting functionality
|
||||
create table reports
|
||||
(
|
||||
id integer unsigned not null auto_increment primary key,
|
||||
file_id binary(32) not null,
|
||||
reporter_id integer unsigned not null,
|
||||
event_json text not null,
|
||||
created timestamp default current_timestamp,
|
||||
|
||||
constraint fk_reports_file
|
||||
foreign key (file_id) references uploads (id)
|
||||
on delete cascade
|
||||
on update restrict,
|
||||
|
||||
constraint fk_reports_reporter
|
||||
foreign key (reporter_id) references users (id)
|
||||
on delete cascade
|
||||
on update restrict
|
||||
);
|
||||
|
||||
-- Unique index to prevent duplicate reports from same user for same file
|
||||
create unique index ix_reports_file_reporter on reports (file_id, reporter_id);
|
||||
|
||||
-- Index for efficient lookups by file
|
||||
create index ix_reports_file_id on reports (file_id);
|
||||
|
||||
-- Index for efficient lookups by reporter
|
||||
create index ix_reports_reporter_id on reports (reporter_id);
|
5
migrations/20250610140000_add_reviewed_flag.sql
Normal file
5
migrations/20250610140000_add_reviewed_flag.sql
Normal file
@ -0,0 +1,5 @@
|
||||
-- Add reviewed flag to reports table
|
||||
alter table reports add column reviewed boolean not null default false;
|
||||
|
||||
-- Index for efficient filtering of non-reviewed reports
|
||||
create index ix_reports_reviewed on reports (reviewed);
|
@ -1,7 +1,7 @@
|
||||
use crate::analytics::Analytics;
|
||||
use crate::settings::Settings;
|
||||
use anyhow::Error;
|
||||
use log::{info, warn};
|
||||
use log::{debug, warn};
|
||||
use nostr::serde_json;
|
||||
use reqwest::ClientBuilder;
|
||||
use rocket::Request;
|
||||
@ -61,7 +61,7 @@ impl PlausibleAnalytics {
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(_v) => info!("Sent {:?}", msg),
|
||||
Ok(_v) => debug!("Sent {:?}", msg),
|
||||
Err(e) => warn!("Failed to track: {}", e),
|
||||
}
|
||||
}
|
||||
|
@ -33,10 +33,12 @@ impl<'r> FromRequest<'r> for Nip98Auth {
|
||||
if event.kind != Kind::HttpAuth {
|
||||
return Outcome::Error((Status::new(401), "Wrong event kind"));
|
||||
}
|
||||
if event.created_at > Timestamp::now() {
|
||||
if (event.created_at.as_u64() as i64 -
|
||||
Timestamp::now().as_u64() as i64).abs() >= 60
|
||||
{
|
||||
return Outcome::Error((
|
||||
Status::new(401),
|
||||
"Created timestamp is in the future",
|
||||
"Created timestamp is out of range",
|
||||
));
|
||||
}
|
||||
|
||||
|
98
src/background/media_metadata.rs
Normal file
98
src/background/media_metadata.rs
Normal file
@ -0,0 +1,98 @@
|
||||
use crate::db::{Database, FileUpload};
|
||||
use crate::filesystem::FileStore;
|
||||
use crate::processing::probe_file;
|
||||
use anyhow::Result;
|
||||
use log::{error, info, warn};
|
||||
|
||||
pub struct MediaMetadata {
|
||||
db: Database,
|
||||
fs: FileStore,
|
||||
}
|
||||
|
||||
impl MediaMetadata {
|
||||
pub fn new(db: Database, fs: FileStore) -> Self {
|
||||
Self { db, fs }
|
||||
}
|
||||
|
||||
pub async fn process(&mut self) -> Result<()> {
|
||||
let to_migrate = self.db.get_missing_media_metadata().await?;
|
||||
|
||||
info!("{} files are missing metadata", to_migrate.len());
|
||||
|
||||
for file in to_migrate {
|
||||
// probe file and update metadata
|
||||
let path = self.fs.get(&file.id);
|
||||
match probe_file(&path) {
|
||||
Ok(data) => {
|
||||
let bv = data.best_video();
|
||||
let duration = if data.duration < 0.0 {
|
||||
None
|
||||
} else {
|
||||
Some(data.duration)
|
||||
};
|
||||
let bitrate = if data.bitrate == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(data.bitrate as u32)
|
||||
};
|
||||
info!(
|
||||
"Updating metadata: id={}, dim={}x{}, dur={}, br={}",
|
||||
hex::encode(&file.id),
|
||||
bv.map(|v| v.width).unwrap_or(0),
|
||||
bv.map(|v| v.height).unwrap_or(0),
|
||||
duration.unwrap_or(0.0),
|
||||
bitrate.unwrap_or(0)
|
||||
);
|
||||
if let Err(e) = self
|
||||
.db
|
||||
.update_metadata(
|
||||
&file.id,
|
||||
bv.map(|v| v.width as u32),
|
||||
bv.map(|v| v.height as u32),
|
||||
duration,
|
||||
bitrate,
|
||||
)
|
||||
.await
|
||||
{
|
||||
error!("Failed to update metadata: {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Skipping missing file: {}, {}", hex::encode(&file.id), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub async fn get_missing_media_metadata(&mut self) -> Result<Vec<FileUpload>> {
|
||||
let results: Vec<FileUpload> = sqlx::query_as("select * from uploads where \
|
||||
(mime_type like 'image/%' and (width is null or height is null)) or \
|
||||
(mime_type like 'video/%' and (width is null or height is null or bitrate is null or duration is null))")
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
pub async fn update_metadata(
|
||||
&mut self,
|
||||
id: &Vec<u8>,
|
||||
width: Option<u32>,
|
||||
height: Option<u32>,
|
||||
duration: Option<f32>,
|
||||
bitrate: Option<u32>,
|
||||
) -> Result<()> {
|
||||
sqlx::query("update uploads set width=?, height=?, duration=?, bitrate=? where id=?")
|
||||
.bind(width)
|
||||
.bind(height)
|
||||
.bind(duration)
|
||||
.bind(bitrate)
|
||||
.bind(id)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
54
src/background/mod.rs
Normal file
54
src/background/mod.rs
Normal file
@ -0,0 +1,54 @@
|
||||
use crate::db::Database;
|
||||
use crate::filesystem::FileStore;
|
||||
use log::{error, info, warn};
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
#[cfg(feature = "media-compression")]
|
||||
mod media_metadata;
|
||||
|
||||
#[cfg(feature = "payments")]
|
||||
mod payments;
|
||||
|
||||
pub fn start_background_tasks(
|
||||
db: Database,
|
||||
file_store: FileStore,
|
||||
shutdown_rx: broadcast::Receiver<()>,
|
||||
#[cfg(feature = "payments")] client: Option<fedimint_tonic_lnd::Client>,
|
||||
) -> Vec<JoinHandle<()>> {
|
||||
let mut ret = vec![];
|
||||
|
||||
#[cfg(feature = "media-compression")]
|
||||
{
|
||||
let db = db.clone();
|
||||
let rx = shutdown_rx.resubscribe();
|
||||
ret.push(tokio::spawn(async move {
|
||||
info!("Starting MediaMetadata background task");
|
||||
let mut m = media_metadata::MediaMetadata::new(db, file_store.clone());
|
||||
if let Err(e) = m.process(rx).await {
|
||||
error!("MediaMetadata failed: {}", e);
|
||||
} else {
|
||||
info!("MediaMetadata background task completed");
|
||||
}
|
||||
}));
|
||||
}
|
||||
#[cfg(feature = "payments")]
|
||||
{
|
||||
if let Some(client) = client {
|
||||
let db = db.clone();
|
||||
let rx = shutdown_rx.resubscribe();
|
||||
ret.push(tokio::spawn(async move {
|
||||
info!("Starting PaymentsHandler background task");
|
||||
let mut m = payments::PaymentsHandler::new(client, db);
|
||||
if let Err(e) = m.process(rx).await {
|
||||
error!("PaymentsHandler failed: {}", e);
|
||||
} else {
|
||||
info!("PaymentsHandler background task completed");
|
||||
}
|
||||
}));
|
||||
} else {
|
||||
warn!("Not starting PaymentsHandler, configuration missing")
|
||||
}
|
||||
}
|
||||
ret
|
||||
}
|
71
src/background/payments.rs
Normal file
71
src/background/payments.rs
Normal file
@ -0,0 +1,71 @@
|
||||
use crate::db::Database;
|
||||
use anyhow::Result;
|
||||
use fedimint_tonic_lnd::lnrpc::invoice::InvoiceState;
|
||||
use fedimint_tonic_lnd::lnrpc::InvoiceSubscription;
|
||||
use fedimint_tonic_lnd::Client;
|
||||
use log::{error, info};
|
||||
use rocket::futures::StreamExt;
|
||||
use sqlx::Row;
|
||||
use tokio::sync::broadcast;
|
||||
|
||||
pub struct PaymentsHandler {
|
||||
client: Client,
|
||||
database: Database,
|
||||
}
|
||||
|
||||
impl PaymentsHandler {
|
||||
pub fn new(client: Client, database: Database) -> Self {
|
||||
PaymentsHandler { client, database }
|
||||
}
|
||||
|
||||
pub async fn process(&mut self, mut rx: broadcast::Receiver<()>) -> Result<()> {
|
||||
let start_idx = self.database.get_last_settle_index().await?;
|
||||
let mut invoices = self
|
||||
.client
|
||||
.lightning()
|
||||
.subscribe_invoices(InvoiceSubscription {
|
||||
add_index: 0,
|
||||
settle_index: start_idx,
|
||||
})
|
||||
.await?;
|
||||
info!("Starting invoice subscription from {}", start_idx);
|
||||
|
||||
let invoices = invoices.get_mut();
|
||||
loop {
|
||||
tokio::select! {
|
||||
Ok(_) = rx.recv() => {
|
||||
break;
|
||||
}
|
||||
Some(Ok(msg)) = invoices.next() => {
|
||||
if msg.state == InvoiceState::Settled as i32 {
|
||||
if let Ok(Some(mut p)) = self.database.get_payment(&msg.r_hash).await {
|
||||
p.settle_index = Some(msg.settle_index);
|
||||
p.is_paid = true;
|
||||
match self.database.complete_payment(&p).await {
|
||||
Ok(()) => info!(
|
||||
"Successfully completed payment: {}",
|
||||
hex::encode(&msg.r_hash)
|
||||
),
|
||||
Err(e) => error!("Failed to complete payment: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Database {
|
||||
async fn get_last_settle_index(&self) -> Result<u64> {
|
||||
Ok(
|
||||
sqlx::query("select max(settle_index) from payments where is_paid = true")
|
||||
.fetch_one(&self.pool)
|
||||
.await?
|
||||
.try_get(0)
|
||||
.unwrap_or(0),
|
||||
)
|
||||
}
|
||||
}
|
@ -3,6 +3,8 @@ use std::net::{IpAddr, SocketAddr};
|
||||
use anyhow::Error;
|
||||
use clap::Parser;
|
||||
use config::Config;
|
||||
#[cfg(feature = "payments")]
|
||||
use fedimint_tonic_lnd::lnrpc::GetInfoRequest;
|
||||
use log::{error, info};
|
||||
use rocket::config::Ident;
|
||||
use rocket::data::{ByteUnit, Limits};
|
||||
@ -12,12 +14,14 @@ use rocket::shield::Shield;
|
||||
use route96::analytics::plausible::PlausibleAnalytics;
|
||||
#[cfg(feature = "analytics")]
|
||||
use route96::analytics::AnalyticsFairing;
|
||||
use route96::background::start_background_tasks;
|
||||
use route96::cors::CORS;
|
||||
use route96::db::Database;
|
||||
use route96::filesystem::FileStore;
|
||||
use route96::routes;
|
||||
use route96::routes::{get_blob, head_blob, root};
|
||||
use route96::settings::Settings;
|
||||
use tokio::sync::broadcast;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about)]
|
||||
@ -63,15 +67,22 @@ async fn main() -> Result<(), Error> {
|
||||
.limit("form", upload_limit);
|
||||
config.ident = Ident::try_new("route96").unwrap();
|
||||
|
||||
let fs = FileStore::new(settings.clone());
|
||||
let mut rocket = rocket::Rocket::custom(config)
|
||||
.manage(FileStore::new(settings.clone()))
|
||||
.manage(fs.clone())
|
||||
.manage(settings.clone())
|
||||
.manage(db.clone())
|
||||
.attach(CORS)
|
||||
.attach(Shield::new()) // disable
|
||||
.mount(
|
||||
"/",
|
||||
routes![root, get_blob, head_blob, routes::void_cat_redirect],
|
||||
routes![
|
||||
root,
|
||||
get_blob,
|
||||
head_blob,
|
||||
routes::void_cat_redirect,
|
||||
routes::void_cat_redirect_head
|
||||
],
|
||||
)
|
||||
.mount("/admin", routes::admin_routes());
|
||||
|
||||
@ -89,10 +100,51 @@ async fn main() -> Result<(), Error> {
|
||||
{
|
||||
rocket = rocket.mount("/", routes::nip96_routes());
|
||||
}
|
||||
#[cfg(feature = "media-compression")]
|
||||
{
|
||||
rocket = rocket.mount("/", routes![routes::get_blob_thumb]);
|
||||
}
|
||||
#[cfg(feature = "payments")]
|
||||
let lnd = {
|
||||
if let Some(lnd) = settings.payments.as_ref().map(|p| &p.lnd) {
|
||||
let lnd = fedimint_tonic_lnd::connect(
|
||||
lnd.endpoint.clone(),
|
||||
lnd.tls.clone(),
|
||||
lnd.macaroon.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let info = {
|
||||
let mut lnd = lnd.clone();
|
||||
lnd.lightning().get_info(GetInfoRequest::default()).await?
|
||||
};
|
||||
|
||||
info!(
|
||||
"LND connected: {} v{}",
|
||||
info.get_ref().alias,
|
||||
info.get_ref().version
|
||||
);
|
||||
rocket = rocket
|
||||
.manage(lnd.clone())
|
||||
.mount("/", routes::payment::routes());
|
||||
Some(lnd)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
let (shutdown_tx, shutdown_rx) = broadcast::channel(1);
|
||||
let jh = start_background_tasks(db, fs, shutdown_rx, lnd);
|
||||
|
||||
if let Err(e) = rocket.launch().await {
|
||||
error!("Rocker error {}", e);
|
||||
Err(Error::from(e))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
shutdown_tx
|
||||
.send(())
|
||||
.expect("Failed to send shutdown signal");
|
||||
|
||||
for j in jh {
|
||||
j.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
228
src/bin/r96util.rs
Normal file
228
src/bin/r96util.rs
Normal file
@ -0,0 +1,228 @@
|
||||
use anyhow::{Context, Error, Result};
|
||||
use clap::{Parser, Subcommand};
|
||||
use config::Config;
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use log::{error, info};
|
||||
use route96::db::{Database, FileUpload};
|
||||
use route96::filesystem::{FileStore, FileSystemResult};
|
||||
use route96::processing::probe_file;
|
||||
use route96::settings::Settings;
|
||||
use std::future::Future;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::pin::Pin;
|
||||
use std::sync::Arc;
|
||||
use std::time::SystemTime;
|
||||
use tokio::sync::Semaphore;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about)]
|
||||
struct Args {
|
||||
#[arg(long)]
|
||||
pub config: Option<String>,
|
||||
|
||||
#[clap(subcommand)]
|
||||
pub command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Commands {
|
||||
/// Check file hash matches filename / path
|
||||
Check {
|
||||
#[arg(long)]
|
||||
delete: Option<bool>,
|
||||
},
|
||||
|
||||
/// Import a directory into the filesystem
|
||||
/// (does NOT import files into the database, use database-import command for that)
|
||||
Import {
|
||||
#[arg(long)]
|
||||
from: PathBuf,
|
||||
#[arg(long, default_missing_value = "true", num_args = 0..=1)]
|
||||
probe_media: Option<bool>,
|
||||
},
|
||||
|
||||
/// Import files from filesystem into database
|
||||
DatabaseImport {
|
||||
/// Don't actually import data and just print which files WOULD be imported
|
||||
#[arg(long, default_missing_value = "true", num_args = 0..=1)]
|
||||
dry_run: Option<bool>,
|
||||
},
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Error> {
|
||||
if std::env::var("RUST_LOG").is_err() {
|
||||
std::env::set_var("RUST_LOG", "info");
|
||||
}
|
||||
env_logger::init();
|
||||
|
||||
let args: Args = Args::parse();
|
||||
|
||||
let builder = Config::builder()
|
||||
.add_source(config::File::with_name(if let Some(ref c) = args.config {
|
||||
c.as_str()
|
||||
} else {
|
||||
"config.yaml"
|
||||
}))
|
||||
.add_source(config::Environment::with_prefix("APP"))
|
||||
.build()?;
|
||||
|
||||
let settings: Settings = builder.try_deserialize()?;
|
||||
|
||||
match args.command {
|
||||
Commands::Check { delete } => {
|
||||
info!("Checking files in: {}", settings.storage_dir);
|
||||
let fs = FileStore::new(settings.clone());
|
||||
iter_files(&fs.storage_dir(), 4, |entry, p| {
|
||||
let p = p.clone();
|
||||
Box::pin(async move {
|
||||
let id = if let Some(i) = id_from_path(&entry) {
|
||||
i
|
||||
} else {
|
||||
p.set_message(format!("Skipping invalid file: {}", &entry.display()));
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let hash = FileStore::hash_file(&entry).await?;
|
||||
if hash != id {
|
||||
if delete.unwrap_or(false) {
|
||||
p.set_message(format!("Deleting corrupt file: {}", &entry.display()));
|
||||
tokio::fs::remove_file(&entry).await?;
|
||||
} else {
|
||||
p.set_message(format!("File is corrupted: {}", &entry.display()));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
Commands::Import { from, probe_media } => {
|
||||
let fs = FileStore::new(settings.clone());
|
||||
let db = Database::new(&settings.database).await?;
|
||||
db.migrate().await?;
|
||||
info!("Importing from: {}", fs.storage_dir().display());
|
||||
iter_files(&from, 4, |entry, p| {
|
||||
let fs = fs.clone();
|
||||
let p = p.clone();
|
||||
Box::pin(async move {
|
||||
let mime = infer::get_from_path(&entry)?
|
||||
.map(|m| m.mime_type())
|
||||
.unwrap_or("application/octet-stream");
|
||||
|
||||
// test media is not corrupt
|
||||
if probe_media.unwrap_or(true)
|
||||
&& (mime.starts_with("image/") || mime.starts_with("video/"))
|
||||
&& probe_file(&entry).is_err()
|
||||
{
|
||||
p.set_message(format!("Skipping media invalid file: {}", &entry.display()));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let file = tokio::fs::File::open(&entry).await?;
|
||||
let dst = fs.put(file, mime, false).await?;
|
||||
match dst {
|
||||
FileSystemResult::AlreadyExists(_) => {
|
||||
p.set_message(format!("Duplicate file: {}", &entry.display()));
|
||||
}
|
||||
FileSystemResult::NewFile(_) => {
|
||||
p.set_message(format!("Imported: {}", &entry.display()));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
Commands::DatabaseImport { dry_run } => {
|
||||
let fs = FileStore::new(settings.clone());
|
||||
let db = Database::new(&settings.database).await?;
|
||||
db.migrate().await?;
|
||||
info!("Importing to DB from: {}", fs.storage_dir().display());
|
||||
iter_files(&fs.storage_dir(), 4, |entry, p| {
|
||||
let db = db.clone();
|
||||
let p = p.clone();
|
||||
Box::pin(async move {
|
||||
let id = if let Some(i) = id_from_path(&entry) {
|
||||
i
|
||||
} else {
|
||||
p.set_message(format!("Skipping invalid file: {}", &entry.display()));
|
||||
return Ok(());
|
||||
};
|
||||
let u = db.get_file(&id).await.context("db get_file")?;
|
||||
if u.is_none() {
|
||||
if !dry_run.unwrap_or(false) {
|
||||
p.set_message(format!("Importing file: {}", &entry.display()));
|
||||
let mime = infer::get_from_path(&entry)
|
||||
.context("infer")?
|
||||
.map(|m| m.mime_type())
|
||||
.unwrap_or("application/octet-stream")
|
||||
.to_string();
|
||||
let meta = entry.metadata().context("file metadata")?;
|
||||
let entry = FileUpload {
|
||||
id,
|
||||
name: None,
|
||||
size: meta.len(),
|
||||
mime_type: mime,
|
||||
created: meta.created().unwrap_or(SystemTime::now()).into(),
|
||||
width: None,
|
||||
height: None,
|
||||
blur_hash: None,
|
||||
alt: None,
|
||||
duration: None,
|
||||
bitrate: None,
|
||||
};
|
||||
db.add_file(&entry, None).await.context("db add_file")?;
|
||||
} else {
|
||||
p.set_message(format!(
|
||||
"[DRY-RUN] Importing file: {}",
|
||||
&entry.display()
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn id_from_path(path: &Path) -> Option<Vec<u8>> {
|
||||
hex::decode(path.file_name()?.to_str()?).ok()
|
||||
}
|
||||
|
||||
async fn iter_files<F>(p: &Path, threads: usize, mut op: F) -> Result<()>
|
||||
where
|
||||
F: FnMut(PathBuf, ProgressBar) -> Pin<Box<dyn Future<Output = Result<()>> + Send>>,
|
||||
{
|
||||
let semaphore = Arc::new(Semaphore::new(threads));
|
||||
info!("Scanning files: {}", p.display());
|
||||
let entries = walkdir::WalkDir::new(p);
|
||||
let dir = entries
|
||||
.into_iter()
|
||||
.filter_map(Result::ok)
|
||||
.filter(|e| e.file_type().is_file())
|
||||
.collect::<Vec<_>>();
|
||||
let p = ProgressBar::new(dir.len() as u64).with_style(ProgressStyle::with_template(
|
||||
"{spinner} [{pos}/{len}] {msg}",
|
||||
)?);
|
||||
let mut all_tasks = vec![];
|
||||
for entry in dir {
|
||||
let _lock = semaphore.clone().acquire_owned().await?;
|
||||
p.inc(1);
|
||||
let fut = op(entry.path().to_path_buf(), p.clone());
|
||||
all_tasks.push(tokio::spawn(async move {
|
||||
if let Err(e) = fut.await {
|
||||
error!("Error processing file: {} {}", entry.path().display(), e);
|
||||
}
|
||||
drop(_lock);
|
||||
}));
|
||||
}
|
||||
for task in all_tasks {
|
||||
task.await?;
|
||||
}
|
||||
p.finish_with_message("Done!");
|
||||
Ok(())
|
||||
}
|
@ -24,7 +24,7 @@ struct ProgramArgs {
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), anyhow::Error> {
|
||||
pretty_env_logger::init();
|
||||
env_logger::init();
|
||||
|
||||
let args: ProgramArgs = ProgramArgs::parse();
|
||||
|
||||
|
@ -34,7 +34,7 @@ struct Args {
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Error> {
|
||||
pretty_env_logger::init();
|
||||
env_logger::init();
|
||||
|
||||
let builder = Config::builder()
|
||||
.add_source(config::File::with_name("config.yaml"))
|
||||
@ -103,7 +103,7 @@ async fn migrate_file(
|
||||
let src_path = PathBuf::new()
|
||||
.join(&args.data_path)
|
||||
.join(VoidFile::map_to_path(&f.id));
|
||||
let dst_path = fs.map_path(&id_vec);
|
||||
let dst_path = fs.get(&id_vec);
|
||||
if src_path.exists() && !dst_path.exists() {
|
||||
info!(
|
||||
"Copying file: {} from {} => {}",
|
||||
@ -139,7 +139,9 @@ async fn migrate_file(
|
||||
},
|
||||
blur_hash: None,
|
||||
alt: f.description.clone(),
|
||||
duration: None,
|
||||
bitrate: None,
|
||||
};
|
||||
db.add_file(&fu, uid).await?;
|
||||
db.add_file(&fu, Some(uid)).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
171
src/db.rs
171
src/db.rs
@ -25,6 +25,10 @@ pub struct FileUpload {
|
||||
pub blur_hash: Option<String>,
|
||||
/// Alt text of the media
|
||||
pub alt: Option<String>,
|
||||
/// Duration of media in seconds
|
||||
pub duration: Option<f32>,
|
||||
/// Average bitrate in bits/s
|
||||
pub bitrate: Option<u32>,
|
||||
|
||||
#[sqlx(skip)]
|
||||
#[cfg(feature = "labels")]
|
||||
@ -43,6 +47,8 @@ impl From<&NewFileResult> for FileUpload {
|
||||
height: value.height,
|
||||
blur_hash: value.blur_hash.clone(),
|
||||
alt: None,
|
||||
duration: value.duration,
|
||||
bitrate: value.bitrate,
|
||||
#[cfg(feature = "labels")]
|
||||
labels: value.labels.clone(),
|
||||
}
|
||||
@ -55,6 +61,10 @@ pub struct User {
|
||||
pub pubkey: Vec<u8>,
|
||||
pub created: DateTime<Utc>,
|
||||
pub is_admin: bool,
|
||||
#[cfg(feature = "payments")]
|
||||
pub paid_until: Option<DateTime<Utc>>,
|
||||
#[cfg(feature = "payments")]
|
||||
pub paid_size: u64,
|
||||
}
|
||||
|
||||
#[cfg(feature = "labels")]
|
||||
@ -84,6 +94,31 @@ pub struct UserStats {
|
||||
pub total_size: u64,
|
||||
}
|
||||
|
||||
#[cfg(feature = "payments")]
|
||||
#[derive(Clone, FromRow, Serialize)]
|
||||
pub struct Payment {
|
||||
pub payment_hash: Vec<u8>,
|
||||
pub user_id: u64,
|
||||
pub created: DateTime<Utc>,
|
||||
pub amount: u64,
|
||||
pub is_paid: bool,
|
||||
pub days_value: u64,
|
||||
pub size_value: u64,
|
||||
pub settle_index: Option<u64>,
|
||||
pub rate: Option<f32>,
|
||||
}
|
||||
|
||||
#[derive(Clone, FromRow, Serialize)]
|
||||
pub struct Report {
|
||||
pub id: u64,
|
||||
#[serde(with = "hex")]
|
||||
pub file_id: Vec<u8>,
|
||||
pub reporter_id: u64,
|
||||
pub event_json: String,
|
||||
pub created: DateTime<Utc>,
|
||||
pub reviewed: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Database {
|
||||
pub(crate) pool: sqlx::pool::Pool<sqlx::mysql::MySql>,
|
||||
@ -145,7 +180,7 @@ impl Database {
|
||||
pub async fn add_file(&self, file: &FileUpload, user_id: u64) -> Result<(), Error> {
|
||||
let mut tx = self.pool.begin().await?;
|
||||
let q = sqlx::query("insert ignore into \
|
||||
uploads(id,name,size,mime_type,blur_hash,width,height,alt,created) values(?,?,?,?,?,?,?,?,?)")
|
||||
uploads(id,name,size,mime_type,blur_hash,width,height,alt,created,duration,bitrate) values(?,?,?,?,?,?,?,?,?,?,?)")
|
||||
.bind(&file.id)
|
||||
.bind(&file.name)
|
||||
.bind(file.size)
|
||||
@ -154,7 +189,9 @@ impl Database {
|
||||
.bind(file.width)
|
||||
.bind(file.height)
|
||||
.bind(&file.alt)
|
||||
.bind(file.created);
|
||||
.bind(file.created)
|
||||
.bind(file.duration)
|
||||
.bind(file.bitrate);
|
||||
tx.execute(q).await?;
|
||||
|
||||
let q2 = sqlx::query("insert ignore into user_uploads(file,user_id) values(?,?)")
|
||||
@ -262,3 +299,133 @@ impl Database {
|
||||
Ok((results, count))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "payments")]
|
||||
impl Database {
|
||||
pub async fn insert_payment(&self, payment: &Payment) -> Result<(), Error> {
|
||||
sqlx::query("insert into payments(payment_hash,user_id,amount,days_value,size_value,rate) values(?,?,?,?,?,?)")
|
||||
.bind(&payment.payment_hash)
|
||||
.bind(payment.user_id)
|
||||
.bind(payment.amount)
|
||||
.bind(payment.days_value)
|
||||
.bind(payment.size_value)
|
||||
.bind(payment.rate)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_payment(&self, payment_hash: &Vec<u8>) -> Result<Option<Payment>, Error> {
|
||||
sqlx::query_as("select * from payments where payment_hash = ?")
|
||||
.bind(payment_hash)
|
||||
.fetch_optional(&self.pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_user_payments(&self, uid: u64) -> Result<Vec<Payment>, Error> {
|
||||
sqlx::query_as("select * from payments where user_id = ?")
|
||||
.bind(uid)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn complete_payment(&self, payment: &Payment) -> Result<(), Error> {
|
||||
let mut tx = self.pool.begin().await?;
|
||||
|
||||
sqlx::query("update payments set is_paid = true, settle_index = ? where payment_hash = ?")
|
||||
.bind(payment.settle_index)
|
||||
.bind(&payment.payment_hash)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
// TODO: check space is not downgraded
|
||||
|
||||
sqlx::query("update users set paid_until = TIMESTAMPADD(DAY, ?, IFNULL(paid_until, current_timestamp)), paid_size = ? where id = ?")
|
||||
.bind(payment.days_value)
|
||||
.bind(payment.size_value)
|
||||
.bind(payment.user_id)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
tx.commit().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check if user has sufficient quota for an upload
|
||||
pub async fn check_user_quota(&self, pubkey: &Vec<u8>, upload_size: u64, free_quota_bytes: u64) -> Result<bool, Error> {
|
||||
// Get or create user
|
||||
let user_id = self.upsert_user(pubkey).await?;
|
||||
|
||||
// Get user's current storage usage
|
||||
let user_stats = self.get_user_stats(user_id).await.unwrap_or(UserStats {
|
||||
file_count: 0,
|
||||
total_size: 0
|
||||
});
|
||||
|
||||
// Get user's paid quota
|
||||
let user = self.get_user(pubkey).await?;
|
||||
let (paid_size, paid_until) = (user.paid_size, user.paid_until);
|
||||
|
||||
// Calculate total available quota
|
||||
let mut available_quota = free_quota_bytes;
|
||||
|
||||
// Add paid quota if still valid
|
||||
if let Some(paid_until) = paid_until {
|
||||
if paid_until > chrono::Utc::now() {
|
||||
available_quota += paid_size;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if upload would exceed quota
|
||||
Ok(user_stats.total_size + upload_size <= available_quota)
|
||||
}
|
||||
|
||||
/// Add a new report to the database
|
||||
pub async fn add_report(&self, file_id: &[u8], reporter_id: u64, event_json: &str) -> Result<(), Error> {
|
||||
sqlx::query("insert into reports (file_id, reporter_id, event_json) values (?, ?, ?)")
|
||||
.bind(file_id)
|
||||
.bind(reporter_id)
|
||||
.bind(event_json)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// List reports with pagination for admin view
|
||||
pub async fn list_reports(&self, offset: u32, limit: u32) -> Result<(Vec<Report>, i64), Error> {
|
||||
let reports: Vec<Report> = sqlx::query_as(
|
||||
"select id, file_id, reporter_id, event_json, created, reviewed from reports where reviewed = false order by created desc limit ? offset ?"
|
||||
)
|
||||
.bind(limit)
|
||||
.bind(offset)
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
let count: i64 = sqlx::query("select count(id) from reports where reviewed = false")
|
||||
.fetch_one(&self.pool)
|
||||
.await?
|
||||
.try_get(0)?;
|
||||
|
||||
Ok((reports, count))
|
||||
}
|
||||
|
||||
/// Get reports for a specific file
|
||||
pub async fn get_file_reports(&self, file_id: &[u8]) -> Result<Vec<Report>, Error> {
|
||||
sqlx::query_as(
|
||||
"select id, file_id, reporter_id, event_json, created, reviewed from reports where file_id = ? order by created desc"
|
||||
)
|
||||
.bind(file_id)
|
||||
.fetch_all(&self.pool)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Mark a report as reviewed (used for acknowledging)
|
||||
pub async fn mark_report_reviewed(&self, report_id: u64) -> Result<(), Error> {
|
||||
sqlx::query("update reports set reviewed = true where id = ?")
|
||||
.bind(report_id)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
#[cfg(feature = "labels")]
|
||||
use crate::db::FileLabel;
|
||||
|
||||
#[cfg(feature = "labels")]
|
||||
use crate::processing::labeling::label_frame;
|
||||
#[cfg(feature = "media-compression")]
|
||||
@ -9,11 +10,12 @@ use anyhow::Error;
|
||||
use anyhow::Result;
|
||||
#[cfg(feature = "media-compression")]
|
||||
use ffmpeg_rs_raw::DemuxerInfo;
|
||||
use ffmpeg_rs_raw::StreamInfo;
|
||||
#[cfg(feature = "media-compression")]
|
||||
use rocket::form::validate::Contains;
|
||||
use serde::Serialize;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tokio::fs::File;
|
||||
use tokio::io::{AsyncRead, AsyncReadExt};
|
||||
use uuid::Uuid;
|
||||
@ -36,10 +38,13 @@ pub struct NewFileResult {
|
||||
pub width: Option<u32>,
|
||||
pub height: Option<u32>,
|
||||
pub blur_hash: Option<String>,
|
||||
pub duration: Option<f32>,
|
||||
pub bitrate: Option<u32>,
|
||||
#[cfg(feature = "labels")]
|
||||
pub labels: Vec<FileLabel>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FileStore {
|
||||
settings: Settings,
|
||||
}
|
||||
@ -57,7 +62,7 @@ impl FileStore {
|
||||
/// Store a new file
|
||||
pub async fn put<'r, S>(
|
||||
&self,
|
||||
stream: S,
|
||||
path: S,
|
||||
mime_type: &str,
|
||||
compress: bool,
|
||||
) -> Result<FileSystemResult>
|
||||
@ -65,7 +70,7 @@ impl FileStore {
|
||||
S: AsyncRead + Unpin + 'r,
|
||||
{
|
||||
// store file in temp path and hash the file
|
||||
let (temp_file, size, hash) = self.store_hash_temp_file(stream).await?;
|
||||
let (temp_file, size, hash) = self.store_hash_temp_file(path).await?;
|
||||
let dst_path = self.map_path(&hash);
|
||||
|
||||
// check if file hash already exists
|
||||
@ -74,7 +79,7 @@ impl FileStore {
|
||||
return Ok(FileSystemResult::AlreadyExists(hash));
|
||||
}
|
||||
|
||||
let mut res = if compress {
|
||||
let mut res = if compress && crate::can_compress(mime_type) {
|
||||
#[cfg(feature = "media-compression")]
|
||||
{
|
||||
let res = match self.compress_file(&temp_file, mime_type).await {
|
||||
@ -92,20 +97,30 @@ impl FileStore {
|
||||
anyhow::bail!("Compression not supported!");
|
||||
}
|
||||
} else {
|
||||
let (width, height, mime_type) = {
|
||||
let (width, height, mime_type, duration, bitrate) = {
|
||||
#[cfg(feature = "media-compression")]
|
||||
{
|
||||
let probe = probe_file(&temp_file).ok();
|
||||
let v_stream = probe.as_ref().and_then(|p| p.best_video());
|
||||
let mime = Self::hack_mime_type(mime_type, &probe, &temp_file);
|
||||
let mime = Self::hack_mime_type(mime_type, &probe, &v_stream, &temp_file);
|
||||
(
|
||||
v_stream.map(|v| v.width as u32),
|
||||
v_stream.map(|v| v.height as u32),
|
||||
mime,
|
||||
probe
|
||||
.as_ref()
|
||||
.map(|p| if p.duration < 0. { 0.0 } else { p.duration }),
|
||||
probe.as_ref().map(|p| p.bitrate as u32),
|
||||
)
|
||||
}
|
||||
#[cfg(not(feature = "media-compression"))]
|
||||
(None, None, Self::infer_mime_type(mime_type, &temp_file))
|
||||
(
|
||||
None,
|
||||
None,
|
||||
Self::infer_mime_type(mime_type, &temp_file),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
};
|
||||
NewFileResult {
|
||||
path: temp_file,
|
||||
@ -115,6 +130,8 @@ impl FileStore {
|
||||
width,
|
||||
height,
|
||||
blur_hash: None,
|
||||
duration,
|
||||
bitrate,
|
||||
}
|
||||
};
|
||||
|
||||
@ -136,18 +153,50 @@ impl FileStore {
|
||||
|
||||
#[cfg(feature = "media-compression")]
|
||||
/// Try to replace the mime-type when unknown using ffmpeg probe result
|
||||
fn hack_mime_type(mime_type: &str, p: &Option<DemuxerInfo>, out_path: &PathBuf) -> String {
|
||||
fn hack_mime_type(
|
||||
mime_type: &str,
|
||||
p: &Option<DemuxerInfo>,
|
||||
stream: &Option<&StreamInfo>,
|
||||
out_path: &PathBuf,
|
||||
) -> String {
|
||||
if let Some(p) = p {
|
||||
if p.format.contains("mp4") {
|
||||
return "video/mp4".to_string();
|
||||
let mime = if p.format.contains("mp4") {
|
||||
Some("video/mp4")
|
||||
} else if p.format.contains("webp") {
|
||||
return "image/webp".to_string();
|
||||
Some("image/webp")
|
||||
} else if p.format.contains("jpeg") {
|
||||
return "image/jpeg".to_string();
|
||||
Some("image/jpeg")
|
||||
} else if p.format.contains("png") {
|
||||
return "image/png".to_string();
|
||||
Some("image/png")
|
||||
} else if p.format.contains("gif") {
|
||||
return "image/gif".to_string();
|
||||
Some("image/gif")
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let codec = if let Some(s) = stream {
|
||||
match s.codec {
|
||||
27 => Some("avc1".to_owned()), //AV_CODEC_ID_H264
|
||||
173 => Some("hvc1".to_owned()), //AV_CODEC_ID_HEVC
|
||||
86016 => Some("mp4a.40.33".to_string()), //AV_CODEC_ID_MP2
|
||||
86017 => Some("mp4a.40.34".to_string()), //AV_CODEC_ID_MP3
|
||||
86018 => Some("mp4a.40.2".to_string()), //AV_CODEC_ID_AAC
|
||||
86019 => Some("ac-3".to_string()), //AV_CODEC_ID_AC3
|
||||
86056 => Some("ec-3".to_string()), //AV_CODEC_ID_EAC3
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
if let Some(m) = mime {
|
||||
return format!(
|
||||
"{}{}",
|
||||
m,
|
||||
if let Some(c) = codec {
|
||||
format!("; codecs=\"{}\"", c)
|
||||
} else {
|
||||
"".to_owned()
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -164,7 +213,8 @@ impl FileStore {
|
||||
}
|
||||
}
|
||||
|
||||
async fn compress_file(&self, input: &PathBuf, mime_type: &str) -> Result<NewFileResult> {
|
||||
#[cfg(feature = "media-compression")]
|
||||
async fn compress_file(&self, input: &Path, mime_type: &str) -> Result<NewFileResult> {
|
||||
let compressed_result = compress_file(input, mime_type, &self.temp_dir())?;
|
||||
#[cfg(feature = "labels")]
|
||||
let labels = if let Some(mp) = &self.settings.vit_model {
|
||||
@ -194,6 +244,8 @@ impl FileStore {
|
||||
height: Some(compressed_result.height as u32),
|
||||
blur_hash: None,
|
||||
mime_type: compressed_result.mime_type,
|
||||
duration: Some(compressed_result.duration),
|
||||
bitrate: Some(compressed_result.bitrate),
|
||||
#[cfg(feature = "labels")]
|
||||
labels,
|
||||
})
|
||||
@ -214,7 +266,7 @@ impl FileStore {
|
||||
Ok((out_path, n, hash))
|
||||
}
|
||||
|
||||
async fn hash_file(p: &PathBuf) -> Result<Vec<u8>, Error> {
|
||||
pub async fn hash_file(p: &Path) -> Result<Vec<u8>, Error> {
|
||||
let mut file = File::open(p).await?;
|
||||
let mut hasher = Sha256::new();
|
||||
let mut buf = [0; 4096];
|
||||
@ -229,7 +281,7 @@ impl FileStore {
|
||||
Ok(res.to_vec())
|
||||
}
|
||||
|
||||
pub fn map_path(&self, id: &Vec<u8>) -> PathBuf {
|
||||
fn map_path(&self, id: &Vec<u8>) -> PathBuf {
|
||||
let id = hex::encode(id);
|
||||
self.storage_dir().join(&id[0..2]).join(&id[2..4]).join(id)
|
||||
}
|
||||
|
@ -1,13 +1,18 @@
|
||||
#[cfg(feature = "analytics")]
|
||||
pub mod analytics;
|
||||
pub mod auth;
|
||||
pub mod background;
|
||||
pub mod cors;
|
||||
pub mod db;
|
||||
pub mod filesystem;
|
||||
#[cfg(feature = "payments")]
|
||||
pub mod payments;
|
||||
#[cfg(feature = "media-compression")]
|
||||
pub mod processing;
|
||||
pub mod routes;
|
||||
pub mod settings;
|
||||
#[cfg(any(feature = "void-cat-redirects", feature = "bin-void-cat-migrate"))]
|
||||
pub mod void_db;
|
||||
pub mod void_file;
|
||||
|
||||
pub fn can_compress(mime_type: &str) -> bool {
|
||||
mime_type.starts_with("image/")
|
||||
}
|
||||
|
53
src/payments.rs
Normal file
53
src/payments.rs
Normal file
@ -0,0 +1,53 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
#[cfg(feature = "payments")]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PaymentAmount {
|
||||
pub currency: Currency,
|
||||
pub amount: f32,
|
||||
}
|
||||
|
||||
#[cfg(feature = "payments")]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum Currency {
|
||||
BTC,
|
||||
USD,
|
||||
EUR,
|
||||
GBP,
|
||||
JPY,
|
||||
CAD,
|
||||
AUD,
|
||||
}
|
||||
|
||||
#[cfg(feature = "payments")]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum PaymentUnit {
|
||||
GBSpace,
|
||||
GBEgress,
|
||||
}
|
||||
|
||||
impl PaymentUnit {
|
||||
/// Get the total size from a number of units
|
||||
pub fn to_size(&self, units: f32) -> u64 {
|
||||
(1000f32 * 1000f32 * 1000f32 * units) as u64
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for PaymentUnit {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
PaymentUnit::GBSpace => write!(f, "GB Space"),
|
||||
PaymentUnit::GBEgress => write!(f, "GB Egress"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "payments")]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum PaymentInterval {
|
||||
Day(u16),
|
||||
Month(u16),
|
||||
Year(u16),
|
||||
}
|
@ -1,8 +1,9 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::{bail, Error, Result};
|
||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
|
||||
use ffmpeg_rs_raw::{Demuxer, DemuxerInfo, Encoder, StreamType, Transcoder};
|
||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{av_frame_free, av_packet_free, AVFrame};
|
||||
use ffmpeg_rs_raw::{Decoder, Demuxer, DemuxerInfo, Encoder, Scaler, StreamType, Transcoder};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::ptr;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[cfg(feature = "labels")]
|
||||
@ -21,7 +22,7 @@ impl WebpProcessor {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn process_file(
|
||||
pub fn compress(
|
||||
&mut self,
|
||||
input: &Path,
|
||||
mime_type: &str,
|
||||
@ -65,9 +66,65 @@ impl WebpProcessor {
|
||||
mime_type: "image/webp".to_string(),
|
||||
width: image_stream.width,
|
||||
height: image_stream.height,
|
||||
duration: if probe.duration < 0. {
|
||||
0.
|
||||
} else {
|
||||
probe.duration
|
||||
},
|
||||
bitrate: probe.bitrate as u32,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn thumbnail(&mut self, input: &Path, out_path: &Path) -> Result<()> {
|
||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_WEBP;
|
||||
|
||||
unsafe {
|
||||
let mut input = Demuxer::new(input.to_str().unwrap())?;
|
||||
|
||||
let probe = input.probe_input()?;
|
||||
|
||||
let image_stream = probe
|
||||
.streams
|
||||
.iter()
|
||||
.find(|c| c.stream_type == StreamType::Video)
|
||||
.ok_or(Error::msg("No image found, cant compress"))?;
|
||||
|
||||
let w = 512u16;
|
||||
let scale = w as f32 / image_stream.width as f32;
|
||||
let h = (image_stream.height as f32 * scale) as u16;
|
||||
|
||||
let enc = Encoder::new(AV_CODEC_ID_WEBP)?
|
||||
.with_height(h as i32)
|
||||
.with_width(w as i32)
|
||||
.with_pix_fmt(AV_PIX_FMT_YUV420P)
|
||||
.with_framerate(1.0)?
|
||||
.open(None)?;
|
||||
|
||||
let mut sws = Scaler::new();
|
||||
let mut decoder = Decoder::new();
|
||||
decoder.setup_decoder(image_stream, None)?;
|
||||
|
||||
while let Ok((mut pkt, _stream)) = input.get_packet() {
|
||||
let mut frame_save: *mut AVFrame = ptr::null_mut();
|
||||
for (mut frame, _stream) in decoder.decode_pkt(pkt)? {
|
||||
if frame_save.is_null() {
|
||||
frame_save = sws.process_frame(frame, w, h, AV_PIX_FMT_YUV420P)?;
|
||||
}
|
||||
av_frame_free(&mut frame);
|
||||
}
|
||||
|
||||
av_packet_free(&mut pkt);
|
||||
if !frame_save.is_null() {
|
||||
enc.save_picture(frame_save, out_path.to_str().unwrap())?;
|
||||
av_frame_free(&mut frame_save);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NewFileProcessorResult {
|
||||
@ -75,29 +132,27 @@ pub struct NewFileProcessorResult {
|
||||
pub mime_type: String,
|
||||
pub width: usize,
|
||||
pub height: usize,
|
||||
}
|
||||
|
||||
pub fn can_compress(mime_type: &str) -> bool {
|
||||
mime_type.starts_with("image/")
|
||||
pub duration: f32,
|
||||
pub bitrate: u32,
|
||||
}
|
||||
|
||||
pub fn compress_file(
|
||||
stream: &Path,
|
||||
path: &Path,
|
||||
mime_type: &str,
|
||||
out_dir: &Path,
|
||||
) -> Result<NewFileProcessorResult, Error> {
|
||||
if !can_compress(mime_type) {
|
||||
if !crate::can_compress(mime_type) {
|
||||
bail!("MIME type not supported");
|
||||
}
|
||||
|
||||
if mime_type.starts_with("image/") {
|
||||
let mut proc = WebpProcessor::new();
|
||||
return proc.process_file(stream, mime_type, out_dir);
|
||||
return proc.compress(path, mime_type, out_dir);
|
||||
}
|
||||
bail!("No media processor")
|
||||
}
|
||||
|
||||
pub fn probe_file(stream: &Path) -> Result<DemuxerInfo> {
|
||||
let mut demuxer = Demuxer::new(stream.to_str().unwrap())?;
|
||||
pub fn probe_file(path: &Path) -> Result<DemuxerInfo> {
|
||||
let mut demuxer = Demuxer::new(path.to_str().unwrap())?;
|
||||
unsafe { demuxer.probe_input() }
|
||||
}
|
||||
|
@ -1,14 +1,14 @@
|
||||
use crate::auth::nip98::Nip98Auth;
|
||||
use crate::db::{Database, FileUpload};
|
||||
use crate::db::{Database, FileUpload, User, Report};
|
||||
use crate::routes::{Nip94Event, PagedResult};
|
||||
use crate::settings::Settings;
|
||||
use rocket::serde::json::Json;
|
||||
use rocket::serde::Serialize;
|
||||
use rocket::{routes, Responder, Route, State};
|
||||
use sqlx::{Error, Row};
|
||||
use sqlx::{Error, QueryBuilder, Row};
|
||||
|
||||
pub fn admin_routes() -> Vec<Route> {
|
||||
routes![admin_list_files, admin_get_self]
|
||||
routes![admin_list_files, admin_get_self, admin_list_reports, admin_acknowledge_report]
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
@ -53,10 +53,25 @@ pub struct SelfUser {
|
||||
pub is_admin: bool,
|
||||
pub file_count: u64,
|
||||
pub total_size: u64,
|
||||
#[cfg(feature = "payments")]
|
||||
pub paid_until: u64,
|
||||
#[cfg(feature = "payments")]
|
||||
pub quota: u64,
|
||||
#[cfg(feature = "payments")]
|
||||
pub free_quota: u64,
|
||||
#[cfg(feature = "payments")]
|
||||
pub total_available_quota: u64,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct AdminNip94File {
|
||||
#[serde(flatten)]
|
||||
pub inner: Nip94Event,
|
||||
pub uploader: Vec<String>,
|
||||
}
|
||||
|
||||
#[rocket::get("/self")]
|
||||
async fn admin_get_self(auth: Nip98Auth, db: &State<Database>) -> AdminResponse<SelfUser> {
|
||||
async fn admin_get_self(auth: Nip98Auth, db: &State<Database>, settings: &State<Settings>) -> AdminResponse<SelfUser> {
|
||||
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
|
||||
match db.get_user(&pubkey_vec).await {
|
||||
Ok(user) => {
|
||||
@ -66,24 +81,55 @@ async fn admin_get_self(auth: Nip98Auth, db: &State<Database>) -> AdminResponse<
|
||||
return AdminResponse::error(&format!("Failed to load user stats: {}", e))
|
||||
}
|
||||
};
|
||||
|
||||
#[cfg(feature = "payments")]
|
||||
let (free_quota, total_available_quota) = {
|
||||
let free_quota = settings.payments.as_ref()
|
||||
.and_then(|p| p.free_quota_bytes)
|
||||
.unwrap_or(104857600);
|
||||
let mut total_available = free_quota;
|
||||
|
||||
// Add paid quota if still valid
|
||||
if let Some(paid_until) = &user.paid_until {
|
||||
if *paid_until > chrono::Utc::now() {
|
||||
total_available += user.paid_size;
|
||||
}
|
||||
}
|
||||
|
||||
(free_quota, total_available)
|
||||
};
|
||||
|
||||
AdminResponse::success(SelfUser {
|
||||
is_admin: user.is_admin,
|
||||
file_count: s.file_count,
|
||||
total_size: s.total_size,
|
||||
#[cfg(feature = "payments")]
|
||||
paid_until: if let Some(u) = &user.paid_until {
|
||||
u.timestamp() as u64
|
||||
} else {
|
||||
0
|
||||
},
|
||||
#[cfg(feature = "payments")]
|
||||
quota: user.paid_size,
|
||||
#[cfg(feature = "payments")]
|
||||
free_quota,
|
||||
#[cfg(feature = "payments")]
|
||||
total_available_quota,
|
||||
})
|
||||
}
|
||||
Err(_) => AdminResponse::error("User not found"),
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::get("/files?<page>&<count>")]
|
||||
#[rocket::get("/files?<page>&<count>&<mime_type>")]
|
||||
async fn admin_list_files(
|
||||
auth: Nip98Auth,
|
||||
page: u32,
|
||||
count: u32,
|
||||
mime_type: Option<String>,
|
||||
db: &State<Database>,
|
||||
settings: &State<Settings>,
|
||||
) -> AdminResponse<PagedResult<Nip94Event>> {
|
||||
) -> AdminResponse<PagedResult<AdminNip94File>> {
|
||||
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
|
||||
let server_count = count.clamp(1, 5_000);
|
||||
|
||||
@ -95,40 +141,107 @@ async fn admin_list_files(
|
||||
if !user.is_admin {
|
||||
return AdminResponse::error("User is not an admin");
|
||||
}
|
||||
match db.list_all_files(page * server_count, server_count).await {
|
||||
match db
|
||||
.list_all_files(page * server_count, server_count, mime_type)
|
||||
.await
|
||||
{
|
||||
Ok((files, count)) => AdminResponse::success(PagedResult {
|
||||
count: files.len() as u32,
|
||||
page,
|
||||
total: count as u32,
|
||||
files: files
|
||||
.iter()
|
||||
.map(|f| Nip94Event::from_upload(settings, f))
|
||||
.into_iter()
|
||||
.map(|f| AdminNip94File {
|
||||
inner: Nip94Event::from_upload(settings, &f.0),
|
||||
uploader: f.1.into_iter().map(|u| hex::encode(&u.pubkey)).collect(),
|
||||
})
|
||||
.collect(),
|
||||
}),
|
||||
Err(e) => AdminResponse::error(&format!("Could not list files: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::get("/reports?<page>&<count>")]
|
||||
async fn admin_list_reports(
|
||||
auth: Nip98Auth,
|
||||
page: u32,
|
||||
count: u32,
|
||||
db: &State<Database>,
|
||||
) -> AdminResponse<PagedResult<Report>> {
|
||||
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
|
||||
let server_count = count.clamp(1, 5_000);
|
||||
|
||||
let user = match db.get_user(&pubkey_vec).await {
|
||||
Ok(user) => user,
|
||||
Err(_) => return AdminResponse::error("User not found"),
|
||||
};
|
||||
|
||||
if !user.is_admin {
|
||||
return AdminResponse::error("User is not an admin");
|
||||
}
|
||||
|
||||
match db.list_reports(page * server_count, server_count).await {
|
||||
Ok((reports, total_count)) => AdminResponse::success(PagedResult {
|
||||
count: reports.len() as u32,
|
||||
page,
|
||||
total: total_count as u32,
|
||||
files: reports,
|
||||
}),
|
||||
Err(e) => AdminResponse::error(&format!("Could not list reports: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::delete("/reports/<report_id>")]
|
||||
async fn admin_acknowledge_report(
|
||||
auth: Nip98Auth,
|
||||
report_id: u64,
|
||||
db: &State<Database>,
|
||||
) -> AdminResponse<()> {
|
||||
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
|
||||
|
||||
let user = match db.get_user(&pubkey_vec).await {
|
||||
Ok(user) => user,
|
||||
Err(_) => return AdminResponse::error("User not found"),
|
||||
};
|
||||
|
||||
if !user.is_admin {
|
||||
return AdminResponse::error("User is not an admin");
|
||||
}
|
||||
|
||||
match db.mark_report_reviewed(report_id).await {
|
||||
Ok(()) => AdminResponse::success(()),
|
||||
Err(e) => AdminResponse::error(&format!("Could not acknowledge report: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub async fn list_all_files(
|
||||
&self,
|
||||
offset: u32,
|
||||
limit: u32,
|
||||
) -> Result<(Vec<FileUpload>, i64), Error> {
|
||||
let results: Vec<FileUpload> = sqlx::query_as(
|
||||
"select u.* \
|
||||
from uploads u \
|
||||
order by u.created desc \
|
||||
limit ? offset ?",
|
||||
)
|
||||
.bind(limit)
|
||||
.bind(offset)
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
mime_type: Option<String>,
|
||||
) -> Result<(Vec<(FileUpload, Vec<User>)>, i64), Error> {
|
||||
let mut q = QueryBuilder::new("select u.* from uploads u ");
|
||||
if let Some(m) = mime_type {
|
||||
q.push("where u.mime_type = ");
|
||||
q.push_bind(m);
|
||||
}
|
||||
q.push(" order by u.created desc limit ");
|
||||
q.push_bind(limit);
|
||||
q.push(" offset ");
|
||||
q.push_bind(offset);
|
||||
|
||||
let results: Vec<FileUpload> = q.build_query_as().fetch_all(&self.pool).await?;
|
||||
let count: i64 = sqlx::query("select count(u.id) from uploads u")
|
||||
.fetch_one(&self.pool)
|
||||
.await?
|
||||
.try_get(0)?;
|
||||
Ok((results, count))
|
||||
|
||||
let mut res = Vec::with_capacity(results.len());
|
||||
for upload in results.into_iter() {
|
||||
let upd = self.get_file_owners(&upload.id).await?;
|
||||
res.push((upload, upd));
|
||||
}
|
||||
Ok((res, count))
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ use crate::routes::{delete_file, Nip94Event};
|
||||
use crate::settings::Settings;
|
||||
use log::error;
|
||||
use nostr::prelude::hex;
|
||||
use nostr::{Alphabet, SingleLetterTag, TagKind};
|
||||
use nostr::{Alphabet, SingleLetterTag, TagKind, JsonUtil};
|
||||
use rocket::data::ByteUnit;
|
||||
use rocket::futures::StreamExt;
|
||||
use rocket::http::{Header, Status};
|
||||
@ -13,7 +13,6 @@ use rocket::response::Responder;
|
||||
use rocket::serde::json::Json;
|
||||
use rocket::{routes, Data, Request, Response, Route, State};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use tokio::io::AsyncRead;
|
||||
use tokio_util::io::StreamReader;
|
||||
|
||||
@ -27,7 +26,7 @@ pub struct BlobDescriptor {
|
||||
pub mime_type: Option<String>,
|
||||
pub created: u64,
|
||||
#[serde(rename = "nip94", skip_serializing_if = "Option::is_none")]
|
||||
pub nip94: Option<HashMap<String, String>>,
|
||||
pub nip94: Option<Vec<Vec<String>>>,
|
||||
}
|
||||
|
||||
impl BlobDescriptor {
|
||||
@ -46,14 +45,7 @@ impl BlobDescriptor {
|
||||
size: value.size,
|
||||
mime_type: Some(value.mime_type.clone()),
|
||||
created: value.created.timestamp() as u64,
|
||||
nip94: Some(
|
||||
Nip94Event::from_upload(settings, value)
|
||||
.tags
|
||||
.iter()
|
||||
.map(|r| (r[0].clone(), r[1].clone()))
|
||||
.collect(),
|
||||
),
|
||||
}
|
||||
nip94: Some(Nip94Event::from_upload(settings, value).tags),
|
||||
}
|
||||
}
|
||||
|
||||
@ -71,13 +63,14 @@ pub fn blossom_routes() -> Vec<Route> {
|
||||
upload_head,
|
||||
upload_media,
|
||||
head_media,
|
||||
mirror
|
||||
mirror,
|
||||
report_file
|
||||
]
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "media-compression"))]
|
||||
pub fn blossom_routes() -> Vec<Route> {
|
||||
routes![delete_blob, upload, list_files, upload_head, mirror]
|
||||
routes![delete_blob, upload, list_files, upload_head, mirror, report_file]
|
||||
}
|
||||
|
||||
/// Generic holder response, mostly for errors
|
||||
@ -362,6 +355,21 @@ async fn process_upload(
|
||||
return e;
|
||||
}
|
||||
|
||||
// check quota
|
||||
#[cfg(feature = "payments")]
|
||||
if let Some(upload_size) = size {
|
||||
let free_quota = settings.payments.as_ref()
|
||||
.and_then(|p| p.free_quota_bytes)
|
||||
.unwrap_or(104857600); // Default to 100MB
|
||||
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
|
||||
|
||||
match db.check_user_quota(&pubkey_vec, upload_size, free_quota).await {
|
||||
Ok(false) => return BlossomResponse::error("Upload would exceed quota"),
|
||||
Err(_) => return BlossomResponse::error("Failed to check quota"),
|
||||
Ok(true) => {} // Quota check passed
|
||||
}
|
||||
}
|
||||
|
||||
process_stream(
|
||||
data.open(ByteUnit::Byte(settings.max_upload_bytes)),
|
||||
&auth
|
||||
@ -422,3 +430,65 @@ where
|
||||
BlossomResponse::BlobDescriptor(Json(BlobDescriptor::from_upload(settings, &upload)))
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::put("/report", data = "<data>", format = "json")]
|
||||
async fn report_file(
|
||||
auth: BlossomAuth,
|
||||
db: &State<Database>,
|
||||
settings: &State<Settings>,
|
||||
data: Json<nostr::Event>,
|
||||
) -> BlossomResponse {
|
||||
// Check if the request has the correct method tag
|
||||
if !check_method(&auth.event, "report") {
|
||||
return BlossomResponse::error("Invalid request method tag");
|
||||
}
|
||||
|
||||
// Check whitelist
|
||||
if let Some(e) = check_whitelist(&auth, settings) {
|
||||
return e;
|
||||
}
|
||||
|
||||
// Extract file SHA256 from the "x" tag in the report event
|
||||
let file_sha256 = if let Some(x_tag) = data.tags.iter().find_map(|t| {
|
||||
if t.kind() == TagKind::SingleLetter(SingleLetterTag::lowercase(Alphabet::X)) {
|
||||
t.content()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}) {
|
||||
match hex::decode(x_tag) {
|
||||
Ok(hash) => hash,
|
||||
Err(_) => return BlossomResponse::error("Invalid file hash in x tag"),
|
||||
}
|
||||
} else {
|
||||
return BlossomResponse::error("Missing file hash in x tag");
|
||||
};
|
||||
|
||||
// Verify the reported file exists
|
||||
match db.get_file(&file_sha256).await {
|
||||
Ok(Some(_)) => {}, // File exists, continue
|
||||
Ok(None) => return BlossomResponse::error("File not found"),
|
||||
Err(e) => return BlossomResponse::error(format!("Failed to check file: {}", e)),
|
||||
}
|
||||
|
||||
// Get or create the reporter user
|
||||
let reporter_id = match db.upsert_user(&auth.event.pubkey.to_bytes().to_vec()).await {
|
||||
Ok(user_id) => user_id,
|
||||
Err(e) => return BlossomResponse::error(format!("Failed to get user: {}", e)),
|
||||
};
|
||||
|
||||
// Store the report (the database will handle duplicate prevention via unique index)
|
||||
match db.add_report(&file_sha256, reporter_id, &data.as_json()).await {
|
||||
Ok(()) => BlossomResponse::Generic(BlossomGenericResponse {
|
||||
status: Status::Ok,
|
||||
message: Some("Report submitted successfully".to_string()),
|
||||
}),
|
||||
Err(e) => {
|
||||
if e.to_string().contains("Duplicate entry") {
|
||||
BlossomResponse::error("You have already reported this file")
|
||||
} else {
|
||||
BlossomResponse::error(format!("Failed to submit report: {}", e))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
use crate::db::{Database, FileUpload};
|
||||
use crate::filesystem::FileStore;
|
||||
#[cfg(feature = "media-compression")]
|
||||
use crate::processing::WebpProcessor;
|
||||
pub use crate::routes::admin::admin_routes;
|
||||
#[cfg(feature = "blossom")]
|
||||
pub use crate::routes::blossom::blossom_routes;
|
||||
@ -7,8 +9,10 @@ pub use crate::routes::blossom::blossom_routes;
|
||||
pub use crate::routes::nip96::nip96_routes;
|
||||
use crate::settings::Settings;
|
||||
use crate::void_file::VoidFile;
|
||||
use anyhow::Error;
|
||||
use http_range_header::{parse_range_header, EndPosition, StartPosition};
|
||||
use anyhow::{Error, Result};
|
||||
use http_range_header::{
|
||||
parse_range_header, EndPosition, StartPosition, SyntacticallyCorrectRange,
|
||||
};
|
||||
use log::{debug, warn};
|
||||
use nostr::Event;
|
||||
use rocket::fs::NamedFile;
|
||||
@ -16,6 +20,7 @@ use rocket::http::{ContentType, Header, Status};
|
||||
use rocket::response::Responder;
|
||||
use rocket::serde::Serialize;
|
||||
use rocket::{Request, Response, State};
|
||||
use std::env::temp_dir;
|
||||
use std::io::SeekFrom;
|
||||
use std::ops::Range;
|
||||
use std::pin::{pin, Pin};
|
||||
@ -56,28 +61,40 @@ struct PagedResult<T> {
|
||||
impl Nip94Event {
|
||||
pub fn from_upload(settings: &Settings, upload: &FileUpload) -> Self {
|
||||
let hex_id = hex::encode(&upload.id);
|
||||
let ext = if upload.mime_type != "application/octet-stream" {
|
||||
mime2ext::mime2ext(&upload.mime_type)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let mut tags = vec![
|
||||
vec![
|
||||
"url".to_string(),
|
||||
format!(
|
||||
"{}/{}{}",
|
||||
&settings.public_url,
|
||||
&hex_id,
|
||||
mime2ext::mime2ext(&upload.mime_type)
|
||||
.map(|m| format!(".{m}"))
|
||||
.unwrap_or("".to_string())
|
||||
),
|
||||
format!("{}/{}.{}", &settings.public_url, &hex_id, ext.unwrap_or("")),
|
||||
],
|
||||
vec!["x".to_string(), hex_id],
|
||||
vec!["x".to_string(), hex_id.clone()],
|
||||
vec!["m".to_string(), upload.mime_type.clone()],
|
||||
vec!["size".to_string(), upload.size.to_string()],
|
||||
];
|
||||
if upload.mime_type.starts_with("image/") || upload.mime_type.starts_with("video/") {
|
||||
tags.push(vec![
|
||||
"thumb".to_string(),
|
||||
format!("{}/thumb/{}.webp", &settings.public_url, &hex_id),
|
||||
]);
|
||||
}
|
||||
|
||||
if let Some(bh) = &upload.blur_hash {
|
||||
tags.push(vec!["blurhash".to_string(), bh.clone()]);
|
||||
}
|
||||
if let (Some(w), Some(h)) = (upload.width, upload.height) {
|
||||
tags.push(vec!["dim".to_string(), format!("{}x{}", w, h)])
|
||||
}
|
||||
if let Some(d) = &upload.duration {
|
||||
tags.push(vec!["duration".to_string(), d.to_string()]);
|
||||
}
|
||||
if let Some(b) = &upload.bitrate {
|
||||
tags.push(vec!["bitrate".to_string(), b.to_string()]);
|
||||
}
|
||||
|
||||
#[cfg(feature = "labels")]
|
||||
for l in &upload.labels {
|
||||
let val = if l.label.contains(',') {
|
||||
@ -104,18 +121,47 @@ struct RangeBody {
|
||||
range_end: u64,
|
||||
current_offset: u64,
|
||||
poll_complete: bool,
|
||||
file_size: u64,
|
||||
}
|
||||
|
||||
const MAX_UNBOUNDED_RANGE: u64 = 1024 * 1024;
|
||||
impl RangeBody {
|
||||
pub fn new(file: File, range: Range<u64>) -> Self {
|
||||
pub fn new(file: File, file_size: u64, range: Range<u64>) -> Self {
|
||||
Self {
|
||||
file,
|
||||
file_size,
|
||||
range_start: range.start,
|
||||
range_end: range.end,
|
||||
current_offset: 0,
|
||||
poll_complete: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_range(file_size: u64, header: &SyntacticallyCorrectRange) -> Range<u64> {
|
||||
let range_start = match header.start {
|
||||
StartPosition::Index(i) => i,
|
||||
StartPosition::FromLast(i) => file_size.saturating_sub(i),
|
||||
};
|
||||
let range_end = match header.end {
|
||||
EndPosition::Index(i) => i,
|
||||
EndPosition::LastByte => (file_size - 1).min(range_start + MAX_UNBOUNDED_RANGE),
|
||||
};
|
||||
range_start..range_end
|
||||
}
|
||||
|
||||
pub fn get_headers(&self) -> Vec<Header<'static>> {
|
||||
let r_len = (self.range_end - self.range_start) + 1;
|
||||
vec![
|
||||
Header::new("content-length", r_len.to_string()),
|
||||
Header::new(
|
||||
"content-range",
|
||||
format!(
|
||||
"bytes {}-{}/{}",
|
||||
self.range_start, self.range_end, self.file_size
|
||||
),
|
||||
),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
impl AsyncRead for RangeBody {
|
||||
@ -125,7 +171,7 @@ impl AsyncRead for RangeBody {
|
||||
buf: &mut ReadBuf<'_>,
|
||||
) -> Poll<std::io::Result<()>> {
|
||||
let range_start = self.range_start + self.current_offset;
|
||||
let range_len = self.range_end - range_start;
|
||||
let range_len = self.range_end.saturating_sub(range_start) + 1;
|
||||
let bytes_to_read = buf.remaining().min(range_len as usize) as u64;
|
||||
|
||||
if bytes_to_read == 0 {
|
||||
@ -170,10 +216,13 @@ impl AsyncRead for RangeBody {
|
||||
impl<'r> Responder<'r, 'static> for FilePayload {
|
||||
fn respond_to(self, request: &'r Request<'_>) -> rocket::response::Result<'static> {
|
||||
let mut response = Response::new();
|
||||
response.set_header(Header::new("cache-control", "max-age=31536000, immutable"));
|
||||
|
||||
// handle ranges
|
||||
#[cfg(feature = "ranges")]
|
||||
{
|
||||
// only use range response for files > 1MiB
|
||||
if self.info.size < MAX_UNBOUNDED_RANGE {
|
||||
response.set_sized_body(None, self.file);
|
||||
} else {
|
||||
response.set_header(Header::new("accept-ranges", "bytes"));
|
||||
if let Some(r) = request.headers().get("range").next() {
|
||||
if let Ok(ranges) = parse_range_header(r) {
|
||||
@ -181,39 +230,22 @@ impl<'r> Responder<'r, 'static> for FilePayload {
|
||||
warn!("Multipart ranges are not supported, fallback to non-range request");
|
||||
response.set_streamed_body(self.file);
|
||||
} else {
|
||||
const MAX_UNBOUNDED_RANGE: u64 = 1024 * 1024;
|
||||
let single_range = ranges.ranges.first().unwrap();
|
||||
let range_start = match single_range.start {
|
||||
StartPosition::Index(i) => i,
|
||||
StartPosition::FromLast(i) => self.info.size - i,
|
||||
};
|
||||
let range_end = match single_range.end {
|
||||
EndPosition::Index(i) => i,
|
||||
EndPosition::LastByte => {
|
||||
(range_start + MAX_UNBOUNDED_RANGE).min(self.info.size)
|
||||
}
|
||||
};
|
||||
let r_len = range_end - range_start;
|
||||
let r_body = RangeBody::new(self.file, range_start..range_end);
|
||||
let range = RangeBody::get_range(self.info.size, single_range);
|
||||
let r_body = RangeBody::new(self.file, self.info.size, range.clone());
|
||||
|
||||
response.set_status(Status::PartialContent);
|
||||
response.set_header(Header::new("content-length", r_len.to_string()));
|
||||
response.set_header(Header::new(
|
||||
"content-range",
|
||||
format!("bytes {}-{}/{}", range_start, range_end - 1, self.info.size),
|
||||
));
|
||||
let headers = r_body.get_headers();
|
||||
for h in headers {
|
||||
response.set_header(h);
|
||||
}
|
||||
response.set_streamed_body(Box::pin(r_body));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
response.set_streamed_body(self.file);
|
||||
response.set_sized_body(None, self.file);
|
||||
}
|
||||
}
|
||||
#[cfg(not(feature = "ranges"))]
|
||||
{
|
||||
response.set_streamed_body(self.file);
|
||||
response.set_header(Header::new("content-length", self.info.size.to_string()));
|
||||
}
|
||||
|
||||
if let Ok(ct) = ContentType::from_str(&self.info.mime_type) {
|
||||
response.set_header(ct);
|
||||
@ -352,6 +384,64 @@ pub async fn head_blob(sha256: &str, fs: &State<FileStore>) -> Status {
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate thumbnail for image / video
|
||||
#[cfg(feature = "media-compression")]
|
||||
#[rocket::get("/thumb/<sha256>")]
|
||||
pub async fn get_blob_thumb(
|
||||
sha256: &str,
|
||||
fs: &State<FileStore>,
|
||||
db: &State<Database>,
|
||||
) -> Result<FilePayload, Status> {
|
||||
let sha256 = if sha256.contains(".") {
|
||||
sha256.split('.').next().unwrap()
|
||||
} else {
|
||||
sha256
|
||||
};
|
||||
let id = if let Ok(i) = hex::decode(sha256) {
|
||||
i
|
||||
} else {
|
||||
return Err(Status::NotFound);
|
||||
};
|
||||
|
||||
if id.len() != 32 {
|
||||
return Err(Status::NotFound);
|
||||
}
|
||||
let info = if let Ok(Some(info)) = db.get_file(&id).await {
|
||||
info
|
||||
} else {
|
||||
return Err(Status::NotFound);
|
||||
};
|
||||
|
||||
if !(info.mime_type.starts_with("image/") || info.mime_type.starts_with("video/")) {
|
||||
return Err(Status::NotFound);
|
||||
}
|
||||
|
||||
let file_path = fs.get(&id);
|
||||
|
||||
let mut thumb_file = temp_dir().join(format!("thumb_{}", sha256));
|
||||
thumb_file.set_extension("webp");
|
||||
|
||||
if !thumb_file.exists() {
|
||||
let mut p = WebpProcessor::new();
|
||||
if p.thumbnail(&file_path, &thumb_file).is_err() {
|
||||
return Err(Status::InternalServerError);
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(f) = File::open(&thumb_file).await {
|
||||
Ok(FilePayload {
|
||||
file: f,
|
||||
info: FileUpload {
|
||||
size: thumb_file.metadata().unwrap().len(),
|
||||
mime_type: "image/webp".to_string(),
|
||||
..info
|
||||
},
|
||||
})
|
||||
} else {
|
||||
Err(Status::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
/// Legacy URL redirect for void.cat uploads
|
||||
#[rocket::get("/d/<id>")]
|
||||
pub async fn void_cat_redirect(id: &str, settings: &State<Settings>) -> Option<NamedFile> {
|
||||
@ -361,10 +451,15 @@ pub async fn void_cat_redirect(id: &str, settings: &State<Settings>) -> Option<N
|
||||
id
|
||||
};
|
||||
if let Some(base) = &settings.void_cat_files {
|
||||
let uuid =
|
||||
uuid::Uuid::from_slice_le(nostr::bitcoin::base58::decode(id).unwrap().as_slice())
|
||||
.unwrap();
|
||||
let f = base.join(VoidFile::map_to_path(&uuid));
|
||||
let uuid = if let Ok(b58) = nostr::bitcoin::base58::decode(id) {
|
||||
uuid::Uuid::from_slice_le(b58.as_slice())
|
||||
} else {
|
||||
uuid::Uuid::parse_str(id)
|
||||
};
|
||||
if uuid.is_err() {
|
||||
return None;
|
||||
}
|
||||
let f = base.join(VoidFile::map_to_path(&uuid.unwrap()));
|
||||
debug!("Legacy file map: {} => {}", id, f.display());
|
||||
if let Ok(f) = NamedFile::open(f).await {
|
||||
Some(f)
|
||||
@ -375,3 +470,55 @@ pub async fn void_cat_redirect(id: &str, settings: &State<Settings>) -> Option<N
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::head("/d/<id>")]
|
||||
pub async fn void_cat_redirect_head(id: &str) -> VoidCatFile {
|
||||
let id = if id.contains(".") {
|
||||
id.split('.').next().unwrap()
|
||||
} else {
|
||||
id
|
||||
};
|
||||
let uuid =
|
||||
uuid::Uuid::from_slice_le(nostr::bitcoin::base58::decode(id).unwrap().as_slice()).unwrap();
|
||||
VoidCatFile {
|
||||
status: Status::Ok,
|
||||
uuid: Header::new("X-UUID", uuid.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Responder)]
|
||||
pub struct VoidCatFile {
|
||||
pub status: Status,
|
||||
pub uuid: Header<'static>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_ranges() -> Result<()> {
|
||||
let size = 16482469;
|
||||
|
||||
let req = parse_range_header("bytes=0-1023")?;
|
||||
let r = RangeBody::get_range(size, req.ranges.first().unwrap());
|
||||
assert_eq!(r.start, 0);
|
||||
assert_eq!(r.end, 1023);
|
||||
|
||||
let req = parse_range_header("bytes=16482467-")?;
|
||||
let r = RangeBody::get_range(size, req.ranges.first().unwrap());
|
||||
assert_eq!(r.start, 16482467);
|
||||
assert_eq!(r.end, 16482468);
|
||||
|
||||
let req = parse_range_header("bytes=-10")?;
|
||||
let r = RangeBody::get_range(size, req.ranges.first().unwrap());
|
||||
assert_eq!(r.start, 16482459);
|
||||
assert_eq!(r.end, 16482468);
|
||||
|
||||
let req = parse_range_header("bytes=-16482470")?;
|
||||
let r = RangeBody::get_range(size, req.ranges.first().unwrap());
|
||||
assert_eq!(r.start, 0);
|
||||
assert_eq!(r.end, MAX_UNBOUNDED_RANGE);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -207,6 +207,20 @@ async fn upload(
|
||||
}
|
||||
|
||||
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
|
||||
|
||||
// check quota
|
||||
#[cfg(feature = "payments")]
|
||||
{
|
||||
let free_quota = settings.payments.as_ref()
|
||||
.and_then(|p| p.free_quota_bytes)
|
||||
.unwrap_or(104857600); // Default to 100MB
|
||||
|
||||
match db.check_user_quota(&pubkey_vec, form.size, free_quota).await {
|
||||
Ok(false) => return Nip96Response::error("Upload would exceed quota"),
|
||||
Err(_) => return Nip96Response::error("Failed to check quota"),
|
||||
Ok(true) => {} // Quota check passed
|
||||
}
|
||||
}
|
||||
let upload = match fs
|
||||
.put(file, content_type, !form.no_transform.unwrap_or(false))
|
||||
.await
|
||||
|
131
src/routes/payment.rs
Normal file
131
src/routes/payment.rs
Normal file
@ -0,0 +1,131 @@
|
||||
use crate::auth::nip98::Nip98Auth;
|
||||
use crate::db::{Database, Payment};
|
||||
use crate::payments::{Currency, PaymentAmount, PaymentInterval, PaymentUnit};
|
||||
use crate::settings::Settings;
|
||||
use chrono::{Months, Utc};
|
||||
use fedimint_tonic_lnd::lnrpc::Invoice;
|
||||
use fedimint_tonic_lnd::Client;
|
||||
use log::{error, info};
|
||||
use rocket::serde::json::Json;
|
||||
use rocket::{routes, Route, State};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::ops::{Add, Deref};
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![get_payment, req_payment]
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
struct PaymentInfo {
|
||||
/// Billing quota metric
|
||||
pub unit: PaymentUnit,
|
||||
|
||||
/// Amount of time to bill units (GB/mo, Gb Egress/day etc.)
|
||||
pub interval: PaymentInterval,
|
||||
|
||||
/// Value amount of payment
|
||||
pub cost: PaymentAmount,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
struct PaymentRequest {
|
||||
/// Number of units requested to make payment
|
||||
pub units: f32,
|
||||
|
||||
/// Quantity of orders to make
|
||||
pub quantity: u16,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
struct PaymentResponse {
|
||||
pub pr: String,
|
||||
}
|
||||
|
||||
#[rocket::get("/payment")]
|
||||
async fn get_payment(settings: &State<Settings>) -> Option<Json<PaymentInfo>> {
|
||||
settings.payments.as_ref().map(|p| {
|
||||
Json::from(PaymentInfo {
|
||||
unit: p.unit.clone(),
|
||||
interval: p.interval.clone(),
|
||||
cost: p.cost.clone(),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[rocket::post("/payment", data = "<req>", format = "json")]
|
||||
async fn req_payment(
|
||||
auth: Nip98Auth,
|
||||
db: &State<Database>,
|
||||
settings: &State<Settings>,
|
||||
lnd: &State<Client>,
|
||||
req: Json<PaymentRequest>,
|
||||
) -> Result<Json<PaymentResponse>, String> {
|
||||
let cfg = if let Some(p) = &settings.payments {
|
||||
p
|
||||
} else {
|
||||
return Err("Payment not enabled, missing configuration option(s)".to_string());
|
||||
};
|
||||
|
||||
let btc_amount = match cfg.cost.currency {
|
||||
Currency::BTC => cfg.cost.amount,
|
||||
_ => return Err("Currency not supported".to_string()),
|
||||
};
|
||||
|
||||
let amount = btc_amount * req.units * req.quantity as f32;
|
||||
|
||||
let pubkey_vec = auth.event.pubkey.to_bytes().to_vec();
|
||||
let uid = db
|
||||
.upsert_user(&pubkey_vec)
|
||||
.await
|
||||
.map_err(|_| "Failed to get user account".to_string())?;
|
||||
|
||||
let mut lnd = lnd.deref().clone();
|
||||
let c = lnd.lightning();
|
||||
let msat = (amount * 1e11f32) as u64;
|
||||
let memo = format!(
|
||||
"{}x {} {} for {}",
|
||||
req.quantity, req.units, cfg.unit, auth.event.pubkey
|
||||
);
|
||||
info!("Requesting {} msats: {}", msat, memo);
|
||||
let invoice = c
|
||||
.add_invoice(Invoice {
|
||||
value_msat: msat as i64,
|
||||
memo,
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.map_err(|e| e.message().to_string())?;
|
||||
|
||||
let days_value = match cfg.interval {
|
||||
PaymentInterval::Day(d) => d as u64,
|
||||
PaymentInterval::Month(m) => {
|
||||
let now = Utc::now();
|
||||
(now.add(Months::new(m as u32)) - now).num_days() as u64
|
||||
}
|
||||
PaymentInterval::Year(y) => {
|
||||
let now = Utc::now();
|
||||
(now.add(Months::new(12 * y as u32)) - now).num_days() as u64
|
||||
}
|
||||
};
|
||||
|
||||
let record = Payment {
|
||||
payment_hash: invoice.get_ref().r_hash.clone(),
|
||||
user_id: uid,
|
||||
created: Default::default(),
|
||||
amount: msat,
|
||||
is_paid: false,
|
||||
days_value,
|
||||
size_value: cfg.unit.to_size(req.units),
|
||||
settle_index: None,
|
||||
rate: None,
|
||||
};
|
||||
|
||||
if let Err(e) = db.insert_payment(&record).await {
|
||||
error!("Failed to insert payment: {}", e);
|
||||
return Err("Failed to insert payment".to_string());
|
||||
}
|
||||
|
||||
Ok(Json(PaymentResponse {
|
||||
pr: invoice.get_ref().payment_request.clone(),
|
||||
}))
|
||||
}
|
@ -1,3 +1,5 @@
|
||||
#[cfg(feature = "payments")]
|
||||
use crate::payments::{Currency, PaymentAmount, PaymentInterval, PaymentUnit};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
|
||||
@ -30,11 +32,12 @@ pub struct Settings {
|
||||
/// Analytics tracking
|
||||
pub plausible_url: Option<String>,
|
||||
|
||||
#[cfg(feature = "void-cat-redirects")]
|
||||
pub void_cat_database: Option<String>,
|
||||
|
||||
/// Path to void.cat uploads (files-v2)
|
||||
pub void_cat_files: Option<PathBuf>,
|
||||
|
||||
#[cfg(feature = "payments")]
|
||||
/// Payment options for paid storage
|
||||
pub payments: Option<PaymentConfig>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@ -42,3 +45,33 @@ pub struct VitModelConfig {
|
||||
pub model: PathBuf,
|
||||
pub config: PathBuf,
|
||||
}
|
||||
|
||||
#[cfg(feature = "payments")]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PaymentConfig {
|
||||
/// LND connection details
|
||||
pub lnd: LndConfig,
|
||||
|
||||
/// Pricing per unit
|
||||
pub cost: PaymentAmount,
|
||||
|
||||
/// What metric to bill payments on
|
||||
pub unit: PaymentUnit,
|
||||
|
||||
/// Billing interval time per unit
|
||||
pub interval: PaymentInterval,
|
||||
|
||||
/// Fiat base currency to store exchange rates along with invoice
|
||||
pub fiat: Option<Currency>,
|
||||
|
||||
/// Free quota in bytes for users without payments (default: 100MB)
|
||||
pub free_quota_bytes: Option<u64>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "payments")]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LndConfig {
|
||||
pub endpoint: String,
|
||||
pub tls: PathBuf,
|
||||
pub macaroon: PathBuf,
|
||||
}
|
||||
|
@ -2,19 +2,32 @@ import { hexToBech32 } from "@snort/shared";
|
||||
import { NostrLink } from "@snort/system";
|
||||
import { useUserProfile } from "@snort/system-react";
|
||||
|
||||
export default function Profile({ link }: { link: NostrLink }) {
|
||||
export default function Profile({
|
||||
link,
|
||||
size,
|
||||
showName,
|
||||
}: {
|
||||
link: NostrLink;
|
||||
size?: number;
|
||||
showName?: boolean;
|
||||
}) {
|
||||
const profile = useUserProfile(link.id);
|
||||
const s = size ?? 40;
|
||||
return (
|
||||
<div className="flex gap-2 items-center">
|
||||
<a className="flex gap-2 items-center" href={`https://snort.social/${link.encode()}`} target="_blank">
|
||||
<img
|
||||
src={profile?.picture}
|
||||
className="rounded-full w-12 h-12 object-fit object-center"
|
||||
width={s}
|
||||
height={s}
|
||||
className="rounded-full object-fit object-center"
|
||||
/>
|
||||
<div>
|
||||
{profile?.display_name ??
|
||||
profile?.name ??
|
||||
hexToBech32("npub", link.id).slice(0, 12)}
|
||||
</div>
|
||||
</div>
|
||||
{(showName ?? true) && (
|
||||
<div>
|
||||
{profile?.display_name ??
|
||||
profile?.name ??
|
||||
hexToBech32("npub", link.id).slice(0, 12)}
|
||||
</div>
|
||||
)}
|
||||
</a>
|
||||
);
|
||||
}
|
||||
|
@ -8,5 +8,5 @@ body {
|
||||
}
|
||||
|
||||
hr {
|
||||
@apply border-neutral-500
|
||||
}
|
||||
@apply border-neutral-500;
|
||||
}
|
||||
|
174964
ui_src/src/report.json
174964
ui_src/src/report.json
File diff suppressed because one or more lines are too long
@ -2,7 +2,24 @@ import { base64 } from "@scure/base";
|
||||
import { throwIfOffline } from "@snort/shared";
|
||||
import { EventKind, EventPublisher, NostrEvent } from "@snort/system";
|
||||
|
||||
export interface AdminSelf { is_admin: boolean, file_count: number, total_size: number }
|
||||
export interface AdminSelf {
|
||||
is_admin: boolean;
|
||||
file_count: number;
|
||||
total_size: number;
|
||||
paid_until?: number;
|
||||
quota?: number;
|
||||
free_quota?: number;
|
||||
total_available_quota?: number;
|
||||
}
|
||||
|
||||
export interface Report {
|
||||
id: number;
|
||||
file_id: string;
|
||||
reporter_id: number;
|
||||
event_json: string;
|
||||
created: string;
|
||||
reviewed: boolean;
|
||||
}
|
||||
|
||||
export class Route96 {
|
||||
constructor(
|
||||
@ -14,14 +31,13 @@ export class Route96 {
|
||||
|
||||
async getSelf() {
|
||||
const rsp = await this.#req("admin/self", "GET");
|
||||
const data =
|
||||
await this.#handleResponse<AdminResponse<AdminSelf>>(rsp);
|
||||
const data = await this.#handleResponse<AdminResponse<AdminSelf>>(rsp);
|
||||
return data;
|
||||
}
|
||||
|
||||
async listFiles(page = 0, count = 10) {
|
||||
async listFiles(page = 0, count = 10, mime: string | undefined) {
|
||||
const rsp = await this.#req(
|
||||
`admin/files?page=${page}&count=${count}`,
|
||||
`admin/files?page=${page}&count=${count}${mime ? `&mime_type=${mime}` : ""}`,
|
||||
"GET",
|
||||
);
|
||||
const data = await this.#handleResponse<AdminResponseFileList>(rsp);
|
||||
@ -32,6 +48,25 @@ export class Route96 {
|
||||
};
|
||||
}
|
||||
|
||||
async listReports(page = 0, count = 10) {
|
||||
const rsp = await this.#req(
|
||||
`admin/reports?page=${page}&count=${count}`,
|
||||
"GET",
|
||||
);
|
||||
const data = await this.#handleResponse<AdminResponseReportList>(rsp);
|
||||
return {
|
||||
...data,
|
||||
...data.data,
|
||||
files: data.data.files,
|
||||
};
|
||||
}
|
||||
|
||||
async acknowledgeReport(reportId: number) {
|
||||
const rsp = await this.#req(`admin/reports/${reportId}`, "DELETE");
|
||||
const data = await this.#handleResponse<AdminResponse<void>>(rsp);
|
||||
return data;
|
||||
}
|
||||
|
||||
async #handleResponse<T extends AdminResponseBase>(rsp: Response) {
|
||||
if (rsp.ok) {
|
||||
return (await rsp.json()) as T;
|
||||
@ -87,3 +122,10 @@ export type AdminResponseFileList = AdminResponse<{
|
||||
count: number;
|
||||
files: Array<NostrEvent>;
|
||||
}>;
|
||||
|
||||
export type AdminResponseReportList = AdminResponse<{
|
||||
total: number;
|
||||
page: number;
|
||||
count: number;
|
||||
files: Array<Report>;
|
||||
}>;
|
||||
|
@ -41,7 +41,7 @@ export class Blossom {
|
||||
);
|
||||
const tags = [["x", bytesToString("hex", new Uint8Array(hash))]];
|
||||
|
||||
const rsp = await this.#req("media", "PUT", "upload", file, tags);
|
||||
const rsp = await this.#req("media", "PUT", "media", file, tags);
|
||||
if (rsp.ok) {
|
||||
return (await rsp.json()) as BlobDescriptor;
|
||||
} else {
|
||||
@ -51,9 +51,16 @@ export class Blossom {
|
||||
}
|
||||
|
||||
async mirror(url: string) {
|
||||
const rsp = await this.#req("mirror", "PUT", "mirror", JSON.stringify({ url }), undefined, {
|
||||
"content-type": "application/json"
|
||||
});
|
||||
const rsp = await this.#req(
|
||||
"mirror",
|
||||
"PUT",
|
||||
"mirror",
|
||||
JSON.stringify({ url }),
|
||||
undefined,
|
||||
{
|
||||
"content-type": "application/json",
|
||||
},
|
||||
);
|
||||
if (rsp.ok) {
|
||||
return (await rsp.json()) as BlobDescriptor;
|
||||
} else {
|
||||
|
@ -1,7 +1,8 @@
|
||||
import { NostrEvent } from "@snort/system";
|
||||
import { NostrEvent, NostrLink } from "@snort/system";
|
||||
import { useState } from "react";
|
||||
import { FormatBytes } from "../const";
|
||||
import classNames from "classnames";
|
||||
import Profile from "../components/profile";
|
||||
|
||||
interface FileInfo {
|
||||
id: string;
|
||||
@ -9,6 +10,7 @@ interface FileInfo {
|
||||
name?: string;
|
||||
type?: string;
|
||||
size?: number;
|
||||
uploader?: Array<string>;
|
||||
}
|
||||
|
||||
export default function FileList({
|
||||
@ -30,15 +32,17 @@ export default function FileList({
|
||||
}
|
||||
|
||||
function renderInner(f: FileInfo) {
|
||||
if (f.type?.startsWith("image/") || !f.type) {
|
||||
if (
|
||||
f.type?.startsWith("image/") ||
|
||||
f.type?.startsWith("video/") ||
|
||||
!f.type
|
||||
) {
|
||||
return (
|
||||
<img src={f.url} className="w-full h-full object-contain object-center" loading="lazy" />
|
||||
);
|
||||
} else if (f.type?.startsWith("video/")) {
|
||||
return (
|
||||
<div className="w-full h-full flex items-center justify-center">
|
||||
Video
|
||||
</div>
|
||||
<img
|
||||
src={f.url.replace(`/${f.id}`, `/thumb/${f.id}`)}
|
||||
className="w-full h-full object-contain object-center"
|
||||
loading="lazy"
|
||||
/>
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -54,6 +58,7 @@ export default function FileList({
|
||||
name: f.content,
|
||||
type: f.tags.find((a) => a[0] === "m")?.at(1),
|
||||
size: Number(f.tags.find((a) => a[0] === "size")?.at(1)),
|
||||
uploader: "uploader" in f ? (f.uploader as Array<string>) : undefined,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
@ -74,12 +79,14 @@ export default function FileList({
|
||||
ret.push(
|
||||
<div
|
||||
onClick={() => onPage?.(x)}
|
||||
className={classNames("bg-neutral-700 hover:bg-neutral-600 min-w-8 text-center cursor-pointer font-bold",
|
||||
className={classNames(
|
||||
"bg-neutral-700 hover:bg-neutral-600 min-w-8 text-center cursor-pointer font-bold",
|
||||
{
|
||||
"rounded-l-md": x === start,
|
||||
"rounded-r-md": (x + 1) === n,
|
||||
"rounded-r-md": x + 1 === n,
|
||||
"bg-neutral-400": page === x,
|
||||
})}
|
||||
},
|
||||
)}
|
||||
>
|
||||
{x + 1}
|
||||
</div>,
|
||||
@ -102,24 +109,39 @@ export default function FileList({
|
||||
>
|
||||
<div className="absolute flex flex-col items-center justify-center w-full h-full text-wrap text-sm break-all text-center opacity-0 hover:opacity-100 hover:bg-black/80">
|
||||
<div>
|
||||
{(info.name?.length ?? 0) === 0 ? "Untitled" : info.name}
|
||||
{(info.name?.length ?? 0) === 0
|
||||
? "Untitled"
|
||||
: info.name!.length > 20
|
||||
? `${info.name?.substring(0, 10)}...${info.name?.substring(info.name.length - 10)}`
|
||||
: info.name}
|
||||
</div>
|
||||
<div>
|
||||
{info.size && !isNaN(info.size)
|
||||
? FormatBytes(info.size, 2)
|
||||
: ""}
|
||||
</div>
|
||||
<div>{info.type}</div>
|
||||
<div className="flex gap-2">
|
||||
<a href={info.url} className="underline" target="_blank">
|
||||
Link
|
||||
</a>
|
||||
{onDelete && <a href="#" onClick={e => {
|
||||
e.preventDefault();
|
||||
onDelete?.(info.id)
|
||||
}} className="underline">
|
||||
Delete
|
||||
</a>}
|
||||
{onDelete && (
|
||||
<a
|
||||
href="#"
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
onDelete?.(info.id);
|
||||
}}
|
||||
className="underline"
|
||||
>
|
||||
Delete
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
{info.uploader &&
|
||||
info.uploader.map((a) => (
|
||||
<Profile link={NostrLink.publicKey(a)} size={20} />
|
||||
))}
|
||||
</div>
|
||||
{renderInner(info)}
|
||||
</div>
|
||||
@ -134,6 +156,9 @@ export default function FileList({
|
||||
<table className="table-auto text-sm">
|
||||
<thead>
|
||||
<tr>
|
||||
<th className="border border-neutral-400 bg-neutral-500 py-1 px-2">
|
||||
Preview
|
||||
</th>
|
||||
<th className="border border-neutral-400 bg-neutral-500 py-1 px-2">
|
||||
Name
|
||||
</th>
|
||||
@ -143,6 +168,11 @@ export default function FileList({
|
||||
<th className="border border-neutral-400 bg-neutral-500 py-1 px-2">
|
||||
Size
|
||||
</th>
|
||||
{files.some((i) => "uploader" in i) && (
|
||||
<th className="border border-neutral-400 bg-neutral-500 py-1 px-2">
|
||||
Uploader
|
||||
</th>
|
||||
)}
|
||||
<th className="border border-neutral-400 bg-neutral-500 py-1 px-2">
|
||||
Actions
|
||||
</th>
|
||||
@ -153,6 +183,9 @@ export default function FileList({
|
||||
const info = getInfo(a);
|
||||
return (
|
||||
<tr key={info.id}>
|
||||
<td className="border border-neutral-500 py-1 px-2 w-8 h-8">
|
||||
{renderInner(info)}
|
||||
</td>
|
||||
<td className="border border-neutral-500 py-1 px-2 break-all">
|
||||
{(info.name?.length ?? 0) === 0 ? "<Untitled>" : info.name}
|
||||
</td>
|
||||
@ -164,17 +197,30 @@ export default function FileList({
|
||||
? FormatBytes(info.size, 2)
|
||||
: ""}
|
||||
</td>
|
||||
{info.uploader && (
|
||||
<td className="border border-neutral-500 py-1 px-2">
|
||||
{info.uploader.map((a) => (
|
||||
<Profile link={NostrLink.publicKey(a)} size={20} />
|
||||
))}
|
||||
</td>
|
||||
)}
|
||||
<td className="border border-neutral-500 py-1 px-2">
|
||||
<div className="flex gap-2">
|
||||
<a href={info.url} className="underline" target="_blank">
|
||||
Link
|
||||
</a>
|
||||
{onDelete && <a href="#" onClick={e => {
|
||||
e.preventDefault();
|
||||
onDelete?.(info.id)
|
||||
}} className="underline">
|
||||
Delete
|
||||
</a>}
|
||||
{onDelete && (
|
||||
<a
|
||||
href="#"
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
onDelete?.(info.id);
|
||||
}}
|
||||
className="underline"
|
||||
>
|
||||
Delete
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
|
141
ui_src/src/views/reports.tsx
Normal file
141
ui_src/src/views/reports.tsx
Normal file
@ -0,0 +1,141 @@
|
||||
import { NostrLink } from "@snort/system";
|
||||
import classNames from "classnames";
|
||||
import Profile from "../components/profile";
|
||||
import { Report } from "../upload/admin";
|
||||
|
||||
export default function ReportList({
|
||||
reports,
|
||||
pages,
|
||||
page,
|
||||
onPage,
|
||||
onAcknowledge,
|
||||
onDeleteFile,
|
||||
}: {
|
||||
reports: Array<Report>;
|
||||
pages?: number;
|
||||
page?: number;
|
||||
onPage?: (n: number) => void;
|
||||
onAcknowledge?: (reportId: number) => void;
|
||||
onDeleteFile?: (fileId: string) => void;
|
||||
}) {
|
||||
if (reports.length === 0) {
|
||||
return <b>No Reports</b>;
|
||||
}
|
||||
|
||||
function pageButtons(page: number, n: number) {
|
||||
const ret = [];
|
||||
const start = 0;
|
||||
|
||||
for (let x = start; x < n; x++) {
|
||||
ret.push(
|
||||
<div
|
||||
key={x}
|
||||
onClick={() => onPage?.(x)}
|
||||
className={classNames(
|
||||
"bg-neutral-700 hover:bg-neutral-600 min-w-8 text-center cursor-pointer font-bold",
|
||||
{
|
||||
"rounded-l-md": x === start,
|
||||
"rounded-r-md": x + 1 === n,
|
||||
"bg-neutral-400": page === x,
|
||||
},
|
||||
)}
|
||||
>
|
||||
{x + 1}
|
||||
</div>,
|
||||
);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
function getReporterPubkey(eventJson: string): string | null {
|
||||
try {
|
||||
const event = JSON.parse(eventJson);
|
||||
return event.pubkey;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function getReportReason(eventJson: string): string {
|
||||
try {
|
||||
const event = JSON.parse(eventJson);
|
||||
return event.content || "No reason provided";
|
||||
} catch {
|
||||
return "Invalid event data";
|
||||
}
|
||||
}
|
||||
|
||||
function formatDate(dateString: string): string {
|
||||
return new Date(dateString).toLocaleString();
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<table className="w-full border-collapse border border-neutral-500">
|
||||
<thead>
|
||||
<tr className="bg-neutral-700">
|
||||
<th className="border border-neutral-500 py-2 px-4 text-left">Report ID</th>
|
||||
<th className="border border-neutral-500 py-2 px-4 text-left">File ID</th>
|
||||
<th className="border border-neutral-500 py-2 px-4 text-left">Reporter</th>
|
||||
<th className="border border-neutral-500 py-2 px-4 text-left">Reason</th>
|
||||
<th className="border border-neutral-500 py-2 px-4 text-left">Created</th>
|
||||
<th className="border border-neutral-500 py-2 px-4 text-left">Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{reports.map((report) => {
|
||||
const reporterPubkey = getReporterPubkey(report.event_json);
|
||||
const reason = getReportReason(report.event_json);
|
||||
|
||||
return (
|
||||
<tr key={report.id} className="hover:bg-neutral-700">
|
||||
<td className="border border-neutral-500 py-2 px-4">{report.id}</td>
|
||||
<td className="border border-neutral-500 py-2 px-4 font-mono text-sm">
|
||||
{report.file_id.substring(0, 12)}...
|
||||
</td>
|
||||
<td className="border border-neutral-500 py-2 px-4">
|
||||
{reporterPubkey ? (
|
||||
<Profile link={NostrLink.publicKey(reporterPubkey)} size={20} />
|
||||
) : (
|
||||
"Unknown"
|
||||
)}
|
||||
</td>
|
||||
<td className="border border-neutral-500 py-2 px-4 max-w-xs truncate">
|
||||
{reason}
|
||||
</td>
|
||||
<td className="border border-neutral-500 py-2 px-4">
|
||||
{formatDate(report.created)}
|
||||
</td>
|
||||
<td className="border border-neutral-500 py-2 px-4">
|
||||
<div className="flex gap-2">
|
||||
<button
|
||||
onClick={() => onAcknowledge?.(report.id)}
|
||||
className="bg-blue-600 hover:bg-blue-700 px-2 py-1 rounded text-sm"
|
||||
>
|
||||
Acknowledge
|
||||
</button>
|
||||
<button
|
||||
onClick={() => onDeleteFile?.(report.file_id)}
|
||||
className="bg-red-600 hover:bg-red-700 px-2 py-1 rounded text-sm"
|
||||
>
|
||||
Delete File
|
||||
</button>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
{pages !== undefined && (
|
||||
<>
|
||||
<div className="flex justify-center mt-4">
|
||||
<div className="flex gap-1">{pageButtons(page ?? 0, pages)}</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
@ -8,29 +8,25 @@ import usePublisher from "../hooks/publisher";
|
||||
import { Nip96, Nip96FileList } from "../upload/nip96";
|
||||
import { AdminSelf, Route96 } from "../upload/admin";
|
||||
import { FormatBytes } from "../const";
|
||||
import Report from "../report.json";
|
||||
|
||||
export default function Upload() {
|
||||
const [type, setType] = useState<"blossom" | "nip96">("blossom");
|
||||
const [noCompress, setNoCompress] = useState(false);
|
||||
const [showLegacy, setShowLegacy] = useState(false);
|
||||
const [toUpload, setToUpload] = useState<File>();
|
||||
const [self, setSelf] = useState<AdminSelf>();
|
||||
const [error, setError] = useState<string>();
|
||||
const [bulkPrgress, setBulkProgress] = useState<number>();
|
||||
const [results, setResults] = useState<Array<object>>([]);
|
||||
const [listedFiles, setListedFiles] = useState<Nip96FileList>();
|
||||
const [adminListedFiles, setAdminListedFiles] = useState<Nip96FileList>();
|
||||
const [listedPage, setListedPage] = useState(0);
|
||||
const [adminListedPage, setAdminListedPage] = useState(0);
|
||||
const [mimeFilter, setMimeFilter] = useState<string>();
|
||||
|
||||
const login = useLogin();
|
||||
const pub = usePublisher();
|
||||
|
||||
const legacyFiles = Report as Record<string, Array<string>>;
|
||||
const myLegacyFiles = login ? (legacyFiles[login.pubkey] ?? []) : [];
|
||||
|
||||
const url = import.meta.env.VITE_API_URL || `${location.protocol}//${location.host}`;
|
||||
const url =
|
||||
import.meta.env.VITE_API_URL || `${location.protocol}//${location.host}`;
|
||||
async function doUpload() {
|
||||
if (!pub) return;
|
||||
if (!toUpload) return;
|
||||
@ -38,7 +34,9 @@ export default function Upload() {
|
||||
setError(undefined);
|
||||
if (type === "blossom") {
|
||||
const uploader = new Blossom(url, pub);
|
||||
const result = noCompress ? await uploader.upload(toUpload) : await uploader.media(toUpload);
|
||||
const result = noCompress
|
||||
? await uploader.upload(toUpload)
|
||||
: await uploader.media(toUpload);
|
||||
setResults((s) => [...s, result]);
|
||||
}
|
||||
if (type === "nip96") {
|
||||
@ -82,7 +80,7 @@ export default function Upload() {
|
||||
try {
|
||||
setError(undefined);
|
||||
const uploader = new Route96(url, pub);
|
||||
const result = await uploader.listFiles(n, 50);
|
||||
const result = await uploader.listFiles(n, 50, mimeFilter);
|
||||
setAdminListedFiles(result);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
@ -112,27 +110,13 @@ export default function Upload() {
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateLegacy() {
|
||||
if (!pub) return;
|
||||
const uploader = new Blossom(url, pub);
|
||||
let ctr = 0;
|
||||
for (const f of myLegacyFiles) {
|
||||
try {
|
||||
await uploader.mirror(`https://void.cat/d/${f}`);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
setBulkProgress(ctr++ / myLegacyFiles.length);
|
||||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
listUploads(listedPage);
|
||||
}, [listedPage]);
|
||||
|
||||
useEffect(() => {
|
||||
listAllUploads(adminListedPage);
|
||||
}, [adminListedPage]);
|
||||
}, [adminListedPage, mimeFilter]);
|
||||
|
||||
useEffect(() => {
|
||||
if (pub && !self) {
|
||||
@ -187,40 +171,26 @@ export default function Upload() {
|
||||
</Button>
|
||||
<Button
|
||||
className="flex-1"
|
||||
onClick={doUpload} disabled={login === undefined}>
|
||||
onClick={doUpload}
|
||||
disabled={login === undefined}
|
||||
>
|
||||
Upload
|
||||
</Button>
|
||||
</div>
|
||||
<hr />
|
||||
{!listedFiles && <Button disabled={login === undefined} onClick={() => listUploads(0)}>
|
||||
List Uploads
|
||||
</Button>}
|
||||
{!listedFiles && (
|
||||
<Button disabled={login === undefined} onClick={() => listUploads(0)}>
|
||||
List Uploads
|
||||
</Button>
|
||||
)}
|
||||
|
||||
|
||||
{self && <div className="flex justify-between font-medium">
|
||||
<div>Uploads: {self.file_count.toLocaleString()}</div>
|
||||
<div>Total Size: {FormatBytes(self.total_size)}</div>
|
||||
</div>}
|
||||
|
||||
{login && myLegacyFiles.length > 0 && (
|
||||
<div className="flex flex-col gap-4 font-bold">
|
||||
You have {myLegacyFiles.length.toLocaleString()} files which can be migrated from void.cat
|
||||
<div className="flex gap-2">
|
||||
<Button onClick={() => migrateLegacy()}>
|
||||
Migrate Files
|
||||
</Button>
|
||||
<Button onClick={() => setShowLegacy(s => !s)}>
|
||||
{!showLegacy ? "Show Files" : "Hide Files"}
|
||||
</Button>
|
||||
</div>
|
||||
{bulkPrgress !== undefined && <progress value={bulkPrgress} />}
|
||||
{self && (
|
||||
<div className="flex justify-between font-medium">
|
||||
<div>Uploads: {self.file_count.toLocaleString()}</div>
|
||||
<div>Total Size: {FormatBytes(self.total_size)}</div>
|
||||
</div>
|
||||
)}
|
||||
{showLegacy && (
|
||||
<FileList
|
||||
files={myLegacyFiles.map(f => ({ id: f, url: `https://void.cat/d/${f}` }))}
|
||||
/>
|
||||
)}
|
||||
|
||||
{listedFiles && (
|
||||
<FileList
|
||||
files={listedFiles.files}
|
||||
@ -239,6 +209,18 @@ export default function Upload() {
|
||||
<hr />
|
||||
<h3>Admin File List:</h3>
|
||||
<Button onClick={() => listAllUploads(0)}>List All Uploads</Button>
|
||||
<div>
|
||||
<select value={mimeFilter} onChange={e => setMimeFilter(e.target.value)}>
|
||||
<option value={""}>All</option>
|
||||
<option>image/webp</option>
|
||||
<option>image/jpeg</option>
|
||||
<option>image/jpg</option>
|
||||
<option>image/png</option>
|
||||
<option>image/gif</option>
|
||||
<option>video/mp4</option>
|
||||
<option>video/mov</option>
|
||||
</select>
|
||||
</div>
|
||||
{adminListedFiles && (
|
||||
<FileList
|
||||
files={adminListedFiles.files}
|
||||
@ -248,8 +230,7 @@ export default function Upload() {
|
||||
onDelete={async (x) => {
|
||||
await deleteFile(x);
|
||||
await listAllUploads(adminListedPage);
|
||||
}
|
||||
}
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
|
@ -1 +1 @@
|
||||
{"root":["./src/App.tsx","./src/const.ts","./src/login.ts","./src/main.tsx","./src/vite-env.d.ts","./src/components/button.tsx","./src/components/profile.tsx","./src/hooks/login.ts","./src/hooks/publisher.ts","./src/upload/admin.ts","./src/upload/blossom.ts","./src/upload/index.ts","./src/upload/nip96.ts","./src/views/files.tsx","./src/views/header.tsx","./src/views/upload.tsx"],"version":"5.6.2"}
|
||||
{"root":["./src/App.tsx","./src/const.ts","./src/login.ts","./src/main.tsx","./src/vite-env.d.ts","./src/components/button.tsx","./src/components/profile.tsx","./src/hooks/login.ts","./src/hooks/publisher.ts","./src/upload/admin.ts","./src/upload/blossom.ts","./src/upload/index.ts","./src/upload/nip96.ts","./src/views/files.tsx","./src/views/header.tsx","./src/views/reports.tsx","./src/views/upload.tsx"],"version":"5.6.2"}
|
Reference in New Issue
Block a user