refactor: convert to workspace

This commit is contained in:
kieran 2025-01-29 11:48:57 +00:00
parent 20c9d107b7
commit 9045bb93e4
No known key found for this signature in database
GPG Key ID: DE71CEB3925BE941
56 changed files with 6215 additions and 1123 deletions

1559
Cargo.lock generated Executable file → Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,81 +1,20 @@
[package] [workspace]
name = "zap-stream-core" resolver = "2"
version = "0.1.0" members = [
edition = "2021" "crates/core",
"crates/zap-stream",
[[bin]] "crates/zap-stream-db"
name = "zap-stream-core"
path = "src/bin/zap_stream_core.rs"
[features]
default = ["test-pattern", "srt", "rtmp"]
srt = ["dep:srt-tokio"]
rtmp = ["dep:rml_rtmp"]
local-overseer = [] # WIP
webhook-overseer = [] # WIP
zap-stream = [
"dep:nostr-sdk",
"dep:zap-stream-db",
"dep:fedimint-tonic-lnd",
"dep:reqwest",
"dep:base64",
"dep:sha2",
"tokio/fs",
]
test-pattern = [
"dep:resvg",
"dep:usvg",
"dep:tiny-skia",
"dep:fontdue",
"dep:ringbuf",
"zap-stream-db/test-pattern"
] ]
[dependencies] [workspace.dependencies]
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "df69b2f05da4279e36ad55086d77b45b2caf5174" } ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "df69b2f05da4279e36ad55086d77b45b2caf5174" }
tokio = { version = "1.36.0", features = ["rt", "rt-multi-thread", "macros"] } tokio = { version = "1.36.0", features = ["rt", "rt-multi-thread", "macros"] }
anyhow = { version = "^1.0.91", features = ["backtrace"] } anyhow = { version = "^1.0.91", features = ["backtrace"] }
pretty_env_logger = "0.5.0"
tokio-stream = "0.1.14"
futures-util = "0.3.30"
async-trait = "0.1.77" async-trait = "0.1.77"
log = "0.4.21" log = "0.4.21"
uuid = { version = "1.8.0", features = ["v4", "serde"] } uuid = { version = "1.8.0", features = ["v4", "serde"] }
serde = { version = "1.0.197", features = ["derive"] } serde = { version = "1.0.197", features = ["derive"] }
config = { version = "0.14.0", features = ["yaml"] }
url = "2.5.0" url = "2.5.0"
itertools = "0.13.0" itertools = "0.14.0"
rand = "0.8.5"
clap = { version = "4.5.16", features = ["derive"] }
libc = "0.2.162"
m3u8-rs = "6.0.0"
chrono = "^0.4.38" chrono = "^0.4.38"
hex = "0.4.3" hex = "0.4.3"
hyper = { version = "1.5.1", features = ["server"] }
hyper-util = { version = "0.1.10", features = ["tokio"] }
bytes = "1.8.0"
http-body-util = "0.1.2"
tokio-util = "0.7.13"
# srt
srt-tokio = { version = "0.4.3", optional = true }
# rtmp
rml_rtmp = { version = "0.8.0", optional = true }
# test-pattern
resvg = { version = "0.44.0", optional = true }
usvg = { version = "0.44.0", optional = true }
tiny-skia = { version = "0.11.4", optional = true }
fontdue = { version = "0.9.2", optional = true }
ringbuf = { version = "0.4.7", optional = true }
# zap-stream
zap-stream-db = { path = "zap-stream-db", optional = true }
nostr-sdk = { version = "0.36.0", optional = true }
fedimint-tonic-lnd = { version = "0.2.0", optional = true, default-features = false, features = ["invoicesrpc", "versionrpc"] }
reqwest = { version = "0.12.9", optional = true, features = ["stream"] }
base64 = { version = "0.22.1", optional = true }
sha2 = { version = "0.10.8", optional = true }

View File

@ -1,4 +1,5 @@
- RTMP? - RTMP?
- Setup multi-variant output - Setup multi-variant output
- API parity https://git.v0l.io/Kieran/zap.stream/issues/7 - API parity https://git.v0l.io/Kieran/zap.stream/issues/7
- HLS-LL - HLS-LL
- Delete old segments (HLS+N94)

4818
crates/core/Cargo.lock generated Executable file

File diff suppressed because it is too large Load Diff

44
crates/core/Cargo.toml Normal file
View File

@ -0,0 +1,44 @@
[package]
name = "zap-stream-core"
version = "0.1.0"
edition = "2021"
[features]
default = ["test-pattern", "srt", "rtmp"]
srt = ["dep:srt-tokio"]
rtmp = ["dep:rml_rtmp"]
local-overseer = [] # WIP
webhook-overseer = [] # WIP
test-pattern = [
"dep:resvg",
"dep:usvg",
"dep:tiny-skia",
"dep:fontdue",
"dep:ringbuf",
]
[dependencies]
ffmpeg-rs-raw.workspace = true
tokio.workspace = true
anyhow.workspace = true
async-trait.workspace = true
log.workspace = true
uuid.workspace = true
serde.workspace = true
hex.workspace = true
itertools.workspace = true
futures-util = "0.3.30"
m3u8-rs = "6.0.0"
# srt
srt-tokio = { version = "0.4.3", optional = true }
# rtmp
rml_rtmp = { version = "0.8.0", optional = true }
# test-pattern
resvg = { version = "0.44.0", optional = true }
usvg = { version = "0.44.0", optional = true }
tiny-skia = { version = "0.11.4", optional = true }
fontdue = { version = "0.9.2", optional = true }
ringbuf = { version = "0.4.7", optional = true }

View File

@ -47,7 +47,7 @@ relay {
port = 7777 port = 7777
# Set OS-limit on maximum number of open files/sockets (if 0, don't attempt to set) (restart required) # Set OS-limit on maximum number of open files/sockets (if 0, don't attempt to set) (restart required)
nofiles = 1000000 nofiles = 0
# HTTP header that contains the client's real IP, before reverse proxying (ie x-real-ip) (MUST be all lower-case) # HTTP header that contains the client's real IP, before reverse proxying (ie x-real-ip) (MUST be all lower-case)
realIpHeader = "" realIpHeader = ""

6
crates/core/src/lib.rs Normal file
View File

@ -0,0 +1,6 @@
pub mod egress;
pub mod ingress;
pub mod mux;
pub mod overseer;
pub mod pipeline;
pub mod variant;

View File

@ -20,6 +20,10 @@ impl LocalOverseer {
#[async_trait] #[async_trait]
impl Overseer for LocalOverseer { impl Overseer for LocalOverseer {
async fn check_streams(&self) -> Result<()> {
todo!()
}
async fn start_stream( async fn start_stream(
&self, &self,
_connection: &ConnectionInfo, _connection: &ConnectionInfo,

View File

@ -0,0 +1,84 @@
use crate::ingress::ConnectionInfo;
use crate::pipeline::PipelineConfig;
use anyhow::Result;
use async_trait::async_trait;
use std::cmp::PartialEq;
use std::path::PathBuf;
use uuid::Uuid;
#[cfg(feature = "local-overseer")]
mod local;
#[cfg(feature = "webhook-overseer")]
mod webhook;
#[cfg(feature = "zap-stream")]
mod zap_stream;
/// A copy of [ffmpeg_rs_raw::DemuxerInfo] without internal ptr
#[derive(PartialEq, Clone)]
pub struct IngressInfo {
pub bitrate: usize,
pub streams: Vec<IngressStream>,
}
/// A copy of [ffmpeg_rs_raw::StreamInfo] without ptr
#[derive(PartialEq, Clone)]
pub struct IngressStream {
pub index: usize,
pub stream_type: IngressStreamType,
pub codec: isize,
pub format: isize,
pub width: usize,
pub height: usize,
pub fps: f32,
pub sample_rate: usize,
pub language: String,
}
#[derive(PartialEq, Eq, Clone)]
pub enum IngressStreamType {
Video,
Audio,
Subtitle,
}
#[async_trait]
/// The control process that oversees streaming operations
pub trait Overseer: Send + Sync {
/// Check all streams
async fn check_streams(&self) -> Result<()>;
/// Set up a new streaming pipeline
async fn start_stream(
&self,
connection: &ConnectionInfo,
stream_info: &IngressInfo,
) -> Result<PipelineConfig>;
/// A new segment (HLS etc.) was generated for a stream variant
///
/// This handler is usually used for distribution / billing
async fn on_segment(
&self,
pipeline_id: &Uuid,
variant_id: &Uuid,
index: u64,
duration: f32,
path: &PathBuf,
) -> Result<()>;
/// At a regular interval, pipeline will emit one of the frames for processing as a
/// thumbnail
async fn on_thumbnail(
&self,
pipeline_id: &Uuid,
width: usize,
height: usize,
path: &PathBuf,
) -> Result<()>;
/// Stream is finished
async fn on_end(&self, pipeline_id: &Uuid) -> Result<()>;
}

View File

@ -21,6 +21,10 @@ impl WebhookOverseer {
#[async_trait] #[async_trait]
impl Overseer for WebhookOverseer { impl Overseer for WebhookOverseer {
async fn check_streams(&self) -> Result<()> {
todo!()
}
async fn start_stream( async fn start_stream(
&self, &self,
connection: &ConnectionInfo, connection: &ConnectionInfo,

View File

Before

Width:  |  Height:  |  Size: 39 KiB

After

Width:  |  Height:  |  Size: 39 KiB

View File

@ -8,8 +8,8 @@ default = []
test-pattern = [] test-pattern = []
[dependencies] [dependencies]
anyhow = "^1.0.70" anyhow.workspace = true
chrono = { version = "0.4.38", features = ["serde"] } chrono.workspace = true
sqlx = { version = "0.8.1", features = ["runtime-tokio", "migrate", "mysql", "chrono"] } uuid.workspace = true
log = "0.4.22"
uuid = { version = "1.11.0", features = ["v4"] } sqlx = { version = "0.8.1", features = ["runtime-tokio", "migrate", "mysql", "chrono"] }

View File

@ -42,6 +42,16 @@ impl ZapStreamDb {
.map_err(anyhow::Error::new)?) .map_err(anyhow::Error::new)?)
} }
/// Update a users balance
pub async fn update_user_balance(&self, uid: u64, diff: i64) -> Result<()> {
sqlx::query("update user set balance = balance + ? where id = ?")
.bind(diff)
.bind(uid)
.execute(&self.db)
.await?;
Ok(())
}
pub async fn upsert_user(&self, pubkey: &[u8; 32]) -> Result<u64> { pub async fn upsert_user(&self, pubkey: &[u8; 32]) -> Result<u64> {
let res = sqlx::query("insert ignore into user(pubkey) values(?) returning id") let res = sqlx::query("insert ignore into user(pubkey) values(?) returning id")
.bind(pubkey.as_slice()) .bind(pubkey.as_slice())

View File

@ -1,7 +1,6 @@
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use sqlx::{FromRow, Type}; use sqlx::{FromRow, Type};
use std::fmt::{Display, Formatter}; use std::fmt::{Display, Formatter};
use uuid::Uuid;
#[derive(Debug, Clone, FromRow)] #[derive(Debug, Clone, FromRow)]
pub struct User { pub struct User {

7
crates/zap-stream/Cargo.lock generated Normal file
View File

@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "zap-stream"
version = "0.1.0"

View File

@ -0,0 +1,43 @@
[package]
name = "zap-stream"
version = "0.1.0"
edition = "2021"
[features]
default = ["srt", "rtmp"]
srt = ["zap-stream-core/srt"]
rtmp = ["zap-stream-core/rtmp"]
test-pattern = ["zap-stream-core/test-pattern", "zap-stream-db/test-pattern"]
[dependencies]
zap-stream-db = { path = "../zap-stream-db" }
zap-stream-core = { path = "../core" }
uuid.workspace = true
ffmpeg-rs-raw.workspace = true
anyhow.workspace = true
log.workspace = true
tokio.workspace = true
async-trait.workspace = true
serde.workspace = true
chrono.workspace = true
hex.workspace = true
url.workspace = true
# http setuff
hyper = { version = "1.5.1", features = ["server"] }
bytes = "1.8.0"
http-body-util = "0.1.2"
tokio-util = "0.7.13"
hyper-util = "0.1.10"
# direct deps
config = { version = "0.15.6", features = ["yaml"] }
nostr-sdk = { version = "0.38.0" }
fedimint-tonic-lnd = { version = "0.2.0", default-features = false, features = ["invoicesrpc", "versionrpc"] }
reqwest = { version = "0.12.9", features = ["stream", "json"] }
base64 = { version = "0.22.1" }
sha2 = { version = "0.10.8" }
pretty_env_logger = "0.5.0"
clap = { version = "4.5.16", features = ["derive"] }
futures-util = "0.3.31"

View File

@ -0,0 +1,138 @@
##
## Default strfry config
##
# Directory that contains the strfry LMDB database (restart required)
db = "./strfry-db/"
dbParams {
# Maximum number of threads/processes that can simultaneously have LMDB transactions open (restart required)
maxreaders = 256
# Size of mmap() to use when loading LMDB (default is 10TB, does *not* correspond to disk-space used) (restart required)
mapsize = 10995116277760
# Disables read-ahead when accessing the LMDB mapping. Reduces IO activity when DB size is larger than RAM. (restart required)
noReadAhead = false
}
events {
# Maximum size of normalised JSON, in bytes
maxEventSize = 65536
# Events newer than this will be rejected
rejectEventsNewerThanSeconds = 900
# Events older than this will be rejected
rejectEventsOlderThanSeconds = 94608000
# Ephemeral events older than this will be rejected
rejectEphemeralEventsOlderThanSeconds = 60
# Ephemeral events will be deleted from the DB when older than this
ephemeralEventsLifetimeSeconds = 300
# Maximum number of tags allowed
maxNumTags = 2000
# Maximum size for tag values, in bytes
maxTagValSize = 1024
}
relay {
# Interface to listen on. Use 0.0.0.0 to listen on all interfaces (restart required)
bind = "0.0.0.0"
# Port to open for the nostr websocket protocol (restart required)
port = 7777
# Set OS-limit on maximum number of open files/sockets (if 0, don't attempt to set) (restart required)
nofiles = 1000000
# HTTP header that contains the client's real IP, before reverse proxying (ie x-real-ip) (MUST be all lower-case)
realIpHeader = ""
info {
# NIP-11: Name of this server. Short/descriptive (< 30 characters)
name = "strfry default"
# NIP-11: Detailed information about relay, free-form
description = "This is a strfry instance."
# NIP-11: Administrative nostr pubkey, for contact purposes
pubkey = ""
# NIP-11: Alternative administrative contact (email, website, etc)
contact = ""
}
# Maximum accepted incoming websocket frame size (should be larger than max event) (restart required)
maxWebsocketPayloadSize = 131072
# Websocket-level PING message frequency (should be less than any reverse proxy idle timeouts) (restart required)
autoPingSeconds = 55
# If TCP keep-alive should be enabled (detect dropped connections to upstream reverse proxy)
enableTcpKeepalive = false
# How much uninterrupted CPU time a REQ query should get during its DB scan
queryTimesliceBudgetMicroseconds = 10000
# Maximum records that can be returned per filter
maxFilterLimit = 500
# Maximum number of subscriptions (concurrent REQs) a connection can have open at any time
maxSubsPerConnection = 20
writePolicy {
# If non-empty, path to an executable script that implements the writePolicy plugin logic
plugin = "/app/write-policy.py"
}
compression {
# Use permessage-deflate compression if supported by client. Reduces bandwidth, but slight increase in CPU (restart required)
enabled = true
# Maintain a sliding window buffer for each connection. Improves compression, but uses more memory (restart required)
slidingWindow = true
}
logging {
# Dump all incoming messages
dumpInAll = false
# Dump all incoming EVENT messages
dumpInEvents = false
# Dump all incoming REQ/CLOSE messages
dumpInReqs = false
# Log performance metrics for initial REQ database scans
dbScanPerf = false
# Log reason for invalid event rejection? Can be disabled to silence excessive logging
invalidEvents = true
}
numThreads {
# Ingester threads: route incoming requests, validate events/sigs (restart required)
ingester = 3
# reqWorker threads: Handle initial DB scan for events (restart required)
reqWorker = 3
# reqMonitor threads: Handle filtering of new events (restart required)
reqMonitor = 3
# negentropy threads: Handle negentropy protocol messages (restart required)
negentropy = 2
}
negentropy {
# Support negentropy protocol messages
enabled = true
# Maximum records that sync will process before returning an error
maxSyncEvents = 1000000
}
}

View File

@ -59,15 +59,11 @@ impl Blossom {
) -> Result<BlobDescriptor> { ) -> Result<BlobDescriptor> {
let mut f = File::open(from_file).await?; let mut f = File::open(from_file).await?;
let hash = Self::hash_file(&mut f).await?; let hash = Self::hash_file(&mut f).await?;
let auth_event = EventBuilder::new( let auth_event = EventBuilder::new(Kind::Custom(24242), "Upload blob").tags([
Kind::Custom(24242), Tag::hashtag("upload"),
"Upload blob", Tag::parse(["x", &hash])?,
[ Tag::expiration(Timestamp::now().add(60)),
Tag::hashtag("upload"), ]);
Tag::parse(&["x", &hash])?,
Tag::expiration(Timestamp::now().add(60)),
],
);
let auth_event = auth_event.sign_with_keys(keys)?; let auth_event = auth_event.sign_with_keys(keys)?;

View File

@ -1,4 +1,3 @@
use crate::overseer::Overseer;
use bytes::Bytes; use bytes::Bytes;
use futures_util::TryStreamExt; use futures_util::TryStreamExt;
use http_body_util::combinators::BoxBody; use http_body_util::combinators::BoxBody;
@ -13,16 +12,18 @@ use std::pin::Pin;
use std::sync::Arc; use std::sync::Arc;
use tokio::fs::File; use tokio::fs::File;
use tokio_util::io::ReaderStream; use tokio_util::io::ReaderStream;
use zap_stream_core::overseer::Overseer;
use crate::overseer::ZapStreamOverseer;
#[derive(Clone)] #[derive(Clone)]
pub struct HttpServer { pub struct HttpServer {
index: String, index: String,
files_dir: PathBuf, files_dir: PathBuf,
overseer: Arc<dyn Overseer>, overseer: Arc<ZapStreamOverseer>,
} }
impl HttpServer { impl HttpServer {
pub fn new(index: String, files_dir: PathBuf, overseer: Arc<dyn Overseer>) -> Self { pub fn new(index: String, files_dir: PathBuf, overseer: Arc<ZapStreamOverseer>) -> Self {
Self { Self {
index, index,
files_dir, files_dir,

View File

@ -14,8 +14,6 @@ use tokio::net::TcpListener;
use tokio::task::JoinHandle; use tokio::task::JoinHandle;
use tokio::time::sleep; use tokio::time::sleep;
use url::Url; use url::Url;
use zap_stream_core::background::BackgroundMonitor;
use zap_stream_core::http::HttpServer;
#[cfg(feature = "rtmp")] #[cfg(feature = "rtmp")]
use zap_stream_core::ingress::rtmp; use zap_stream_core::ingress::rtmp;
#[cfg(feature = "srt")] #[cfg(feature = "srt")]
@ -25,7 +23,16 @@ use zap_stream_core::ingress::test;
use zap_stream_core::ingress::{file, tcp}; use zap_stream_core::ingress::{file, tcp};
use zap_stream_core::overseer::Overseer; use zap_stream_core::overseer::Overseer;
use zap_stream_core::settings::Settings; use crate::http::HttpServer;
use crate::monitor::BackgroundMonitor;
use crate::overseer::ZapStreamOverseer;
use crate::settings::Settings;
mod blossom;
mod http;
mod monitor;
mod overseer;
mod settings;
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
struct Args {} struct Args {}
@ -103,7 +110,7 @@ async fn main() -> Result<()> {
fn try_create_listener( fn try_create_listener(
u: &str, u: &str,
out_dir: &str, out_dir: &str,
overseer: &Arc<dyn Overseer>, overseer: &Arc<ZapStreamOverseer>,
) -> Result<JoinHandle<Result<()>>> { ) -> Result<JoinHandle<Result<()>>> {
let url: Url = u.parse()?; let url: Url = u.parse()?;
match url.scheme() { match url.scheme() {
@ -113,7 +120,7 @@ fn try_create_listener(
format!("{}:{}", url.host().unwrap(), url.port().unwrap()), format!("{}:{}", url.host().unwrap(), url.port().unwrap()),
overseer.clone(), overseer.clone(),
))), ))),
#[cfg(feature = "srt")] #[cfg(feature = "rtmp")]
"rtmp" => Ok(tokio::spawn(rtmp::listen( "rtmp" => Ok(tokio::spawn(rtmp::listen(
out_dir.to_string(), out_dir.to_string(),
format!("{}:{}", url.host().unwrap(), url.port().unwrap()), format!("{}:{}", url.host().unwrap(), url.port().unwrap()),

View File

@ -1,14 +1,15 @@
use crate::overseer::Overseer; use crate::overseer::ZapStreamOverseer;
use anyhow::Result; use anyhow::Result;
use std::sync::Arc; use std::sync::Arc;
use zap_stream_core::overseer::Overseer;
/// Monitor stream status, perform any necessary cleanup /// Monitor stream status, perform any necessary cleanup
pub struct BackgroundMonitor { pub struct BackgroundMonitor {
overseer: Arc<dyn Overseer>, overseer: Arc<ZapStreamOverseer>,
} }
impl BackgroundMonitor { impl BackgroundMonitor {
pub fn new(overseer: Arc<dyn Overseer>) -> Self { pub fn new(overseer: Arc<ZapStreamOverseer>) -> Self {
Self { overseer } Self { overseer }
} }

View File

@ -1,11 +1,10 @@
use crate::blossom::{BlobDescriptor, Blossom}; use crate::blossom::{BlobDescriptor, Blossom};
use crate::egress::hls::HlsEgress; use zap_stream_core::egress::hls::HlsEgress;
use crate::egress::EgressConfig; use zap_stream_core::egress::EgressConfig;
use crate::ingress::ConnectionInfo; use zap_stream_core::ingress::ConnectionInfo;
use crate::overseer::{get_default_variants, IngressInfo, Overseer}; use zap_stream_core::overseer::{IngressInfo, IngressStreamType, Overseer};
use crate::pipeline::{EgressType, PipelineConfig}; use zap_stream_core::pipeline::{EgressType, PipelineConfig};
use crate::settings::LndSettings; use zap_stream_core::variant::{StreamMapping, VariantStream};
use crate::variant::StreamMapping;
use anyhow::{anyhow, bail, Result}; use anyhow::{anyhow, bail, Result};
use async_trait::async_trait; use async_trait::async_trait;
use base64::alphabet::STANDARD; use base64::alphabet::STANDARD;
@ -32,11 +31,16 @@ use std::fs::create_dir_all;
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use url::Url; use url::Url;
use uuid::Uuid; use uuid::Uuid;
use zap_stream_core::variant::audio::AudioVariant;
use zap_stream_core::variant::mapping::VariantMapping;
use zap_stream_core::variant::video::VideoVariant;
use zap_stream_db::sqlx::Encode; use zap_stream_db::sqlx::Encode;
use zap_stream_db::{UserStream, UserStreamState, ZapStreamDb}; use zap_stream_db::{UserStream, UserStreamState, ZapStreamDb};
use crate::settings::LndSettings;
const STREAM_EVENT_KIND: u16 = 30_313; const STREAM_EVENT_KIND: u16 = 30_313;
@ -77,12 +81,26 @@ impl ZapStreamOverseer {
let db = ZapStreamDb::new(db).await?; let db = ZapStreamDb::new(db).await?;
db.migrate().await?; db.migrate().await?;
#[cfg(debug_assertions)]
{
let uid = db.upsert_user(&[0; 32]).await?;
db.update_user_balance(uid, 100_000_000).await?;
let user = db.get_user(uid).await?;
info!(
"ZERO pubkey: uid={},key={},balance={}",
user.id,
user.stream_key,
user.balance / 1000
);
}
let mut lnd = fedimint_tonic_lnd::connect( let mut lnd = fedimint_tonic_lnd::connect(
lnd.address.clone(), lnd.address.clone(),
PathBuf::from(&lnd.cert), PathBuf::from(&lnd.cert),
PathBuf::from(&lnd.macaroon), PathBuf::from(&lnd.macaroon),
) )
.await?; .await?;
let version = lnd let version = lnd
.versioner() .versioner()
@ -115,6 +133,52 @@ impl ZapStreamOverseer {
}) })
} }
pub(crate) async fn api(&self, req: Request<Incoming>) -> Result<Response<BoxBody<Bytes, anyhow::Error>>> {
let base = Response::builder()
.header("server", "zap-stream-core")
.header("access-control-allow-origin", "*")
.header("access-control-allow-headers", "*")
.header("access-control-allow-methods", "HEAD, GET");
Ok(match (req.method(), req.uri().path()) {
(&Method::GET, "/api/v1/account") => {
self.check_nip98_auth(req)?;
base.body(Default::default())?
}
(&Method::PATCH, "/api/v1/account") => {
bail!("Not implemented")
}
(&Method::GET, "/api/v1/topup") => {
bail!("Not implemented")
}
(&Method::PATCH, "/api/v1/event") => {
bail!("Not implemented")
}
(&Method::POST, "/api/v1/withdraw") => {
bail!("Not implemented")
}
(&Method::POST, "/api/v1/account/forward") => {
bail!("Not implemented")
}
(&Method::DELETE, "/api/v1/account/forward/<id>") => {
bail!("Not implemented")
}
(&Method::GET, "/api/v1/account/history") => {
bail!("Not implemented")
}
(&Method::GET, "/api/v1/account/keys") => {
bail!("Not implemented")
}
_ => {
if req.method() == Method::OPTIONS {
base.body(Default::default())?
} else {
base.status(404).body(Default::default())?
}
}
})
}
fn stream_to_event_builder(&self, stream: &UserStream) -> Result<EventBuilder> { fn stream_to_event_builder(&self, stream: &UserStream) -> Result<EventBuilder> {
let mut tags = vec![ let mut tags = vec![
Tag::parse(&["d".to_string(), stream.id.to_string()])?, Tag::parse(&["d".to_string(), stream.id.to_string()])?,
@ -159,60 +223,56 @@ impl ZapStreamOverseer {
let kind = Kind::from(STREAM_EVENT_KIND); let kind = Kind::from(STREAM_EVENT_KIND);
let coord = Coordinate::new(kind, self.keys.public_key).identifier(&stream.id); let coord = Coordinate::new(kind, self.keys.public_key).identifier(&stream.id);
tags.push(Tag::parse(&[ tags.push(Tag::parse([
"alt", "alt",
&format!("Watch live on https://zap.stream/{}", coord.to_bech32()?), &format!("Watch live on https://zap.stream/{}", coord.to_bech32()?),
])?); ])?);
Ok(EventBuilder::new(kind, "", tags)) Ok(EventBuilder::new(kind, "").tags(tags))
} }
fn blob_to_event_builder(&self, stream: &BlobDescriptor) -> Result<EventBuilder> { fn blob_to_event_builder(&self, stream: &BlobDescriptor) -> Result<EventBuilder> {
let tags = if let Some(tags) = stream.nip94.as_ref() { let tags = if let Some(tags) = stream.nip94.as_ref() {
tags.iter() tags.iter()
.map_while(|(k, v)| Tag::parse(&[k, v]).ok()) .map_while(|(k, v)| Tag::parse([k, v]).ok())
.collect() .collect()
} else { } else {
let mut tags = vec![ let mut tags = vec![
Tag::parse(&["x", &stream.sha256])?, Tag::parse(["x", &stream.sha256])?,
Tag::parse(&["url", &stream.url])?, Tag::parse(["url", &stream.url])?,
Tag::parse(&["size", &stream.size.to_string()])?, Tag::parse(["size", &stream.size.to_string()])?,
]; ];
if let Some(m) = stream.mime_type.as_ref() { if let Some(m) = stream.mime_type.as_ref() {
tags.push(Tag::parse(&["m", m])?) tags.push(Tag::parse(["m", m])?)
} }
tags tags
}; };
Ok(EventBuilder::new(Kind::FileMetadata, "", tags)) Ok(EventBuilder::new(Kind::FileMetadata, "").tags(tags))
} }
async fn publish_stream_event(&self, stream: &UserStream, pubkey: &Vec<u8>) -> Result<Event> { async fn publish_stream_event(&self, stream: &UserStream, pubkey: &Vec<u8>) -> Result<Event> {
let extra_tags = vec![ let extra_tags = vec![
Tag::parse(&["p", hex::encode(pubkey).as_str(), "", "host"])?, Tag::parse(["p", hex::encode(pubkey).as_str(), "", "host"])?,
Tag::parse(&[ Tag::parse([
"streaming", "streaming",
self.map_to_stream_public_url(stream, "live.m3u8")?.as_str(), self.map_to_stream_public_url(stream, "live.m3u8")?.as_str(),
])?, ])?,
Tag::parse(&[ Tag::parse([
"image", "image",
self.map_to_stream_public_url(stream, "thumb.webp")? self.map_to_stream_public_url(stream, "thumb.webp")?
.as_str(), .as_str(),
])?, ])?,
Tag::parse(&["service", self.map_to_public_url("api/v1")?.as_str()])?, Tag::parse(["service", self.map_to_public_url("api/v1")?.as_str()])?,
]; ];
let ev = self let ev = self
.stream_to_event_builder(stream)? .stream_to_event_builder(stream)?
.add_tags(extra_tags) .tags(extra_tags)
.sign_with_keys(&self.keys)?; .sign_with_keys(&self.keys)?;
self.client.send_event(ev.clone()).await?; self.client.send_event(ev.clone()).await?;
Ok(ev) Ok(ev)
} }
fn map_to_stream_public_url( fn map_to_stream_public_url(&self, stream: &UserStream, path: &str) -> Result<String> {
&self,
stream: &UserStream,
path: &str,
) -> Result<String> {
self.map_to_public_url(&format!("{}/{}", stream.id, path)) self.map_to_public_url(&format!("{}/{}", stream.id, path))
} }
@ -252,52 +312,6 @@ struct AccountInfo {
} }
#[async_trait] #[async_trait]
impl Overseer for ZapStreamOverseer { impl Overseer for ZapStreamOverseer {
async fn api(&self, req: Request<Incoming>) -> Result<Response<BoxBody<Bytes, anyhow::Error>>> {
let base = Response::builder()
.header("server", "zap-stream-core")
.header("access-control-allow-origin", "*")
.header("access-control-allow-headers", "*")
.header("access-control-allow-methods", "HEAD, GET");
Ok(match (req.method(), req.uri().path()) {
(&Method::GET, "/api/v1/account") => {
self.check_nip98_auth(req)?;
base.body(Default::default())?
}
(&Method::PATCH, "/api/v1/account") => {
bail!("Not implemented")
}
(&Method::GET, "/api/v1/topup") => {
bail!("Not implemented")
}
(&Method::PATCH, "/api/v1/event") => {
bail!("Not implemented")
}
(&Method::POST, "/api/v1/withdraw") => {
bail!("Not implemented")
}
(&Method::POST, "/api/v1/account/forward") => {
bail!("Not implemented")
}
(&Method::DELETE, "/api/v1/account/forward/<id>") => {
bail!("Not implemented")
}
(&Method::GET, "/api/v1/account/history") => {
bail!("Not implemented")
}
(&Method::GET, "/api/v1/account/keys") => {
bail!("Not implemented")
}
_ => {
if req.method() == Method::OPTIONS {
base.body(Default::default())?
} else {
base.status(404).body(Default::default())?
}
}
})
}
async fn check_streams(&self) -> Result<()> { async fn check_streams(&self) -> Result<()> {
let active_streams = self.db.list_live_streams().await?; let active_streams = self.db.list_live_streams().await?;
for stream in active_streams { for stream in active_streams {
@ -397,12 +411,12 @@ impl Overseer for ZapStreamOverseer {
pipeline_id pipeline_id
); );
let mut n94 = self.blob_to_event_builder(blob)?.add_tags([ let mut n94 = self.blob_to_event_builder(blob)?.add_tags([
Tag::parse(&["a", &a_tag])?, Tag::parse(["a", &a_tag])?,
Tag::parse(&["d", variant_id.to_string().as_str()])?, Tag::parse(["d", variant_id.to_string().as_str()])?,
Tag::parse(&["duration", duration.to_string().as_str()])?, Tag::parse(["duration", duration.to_string().as_str()])?,
]); ]);
for b in blobs.iter().skip(1) { for b in blobs.iter().skip(1) {
n94 = n94.add_tags(Tag::parse(&["url", &b.url])); n94 = n94.tag(Tag::parse(["url", &b.url])?);
} }
let n94 = n94.sign_with_keys(&self.keys)?; let n94 = n94.sign_with_keys(&self.keys)?;
let cc = self.client.clone(); let cc = self.client.clone();
@ -444,3 +458,65 @@ impl Overseer for ZapStreamOverseer {
Ok(()) Ok(())
} }
} }
fn get_default_variants(info: &IngressInfo) -> Result<Vec<VariantStream>> {
let mut vars: Vec<VariantStream> = vec![];
if let Some(video_src) = info
.streams
.iter()
.find(|c| c.stream_type == IngressStreamType::Video)
{
vars.push(VariantStream::CopyVideo(VariantMapping {
id: Uuid::new_v4(),
src_index: video_src.index,
dst_index: 0,
group_id: 0,
}));
vars.push(VariantStream::Video(VideoVariant {
mapping: VariantMapping {
id: Uuid::new_v4(),
src_index: video_src.index,
dst_index: 1,
group_id: 1,
},
width: 1280,
height: 720,
fps: video_src.fps,
bitrate: 3_000_000,
codec: "libx264".to_string(),
profile: 100,
level: 51,
keyframe_interval: video_src.fps as u16 * 2,
pixel_format: AV_PIX_FMT_YUV420P as u32,
}));
}
if let Some(audio_src) = info
.streams
.iter()
.find(|c| c.stream_type == IngressStreamType::Audio)
{
vars.push(VariantStream::CopyAudio(VariantMapping {
id: Uuid::new_v4(),
src_index: audio_src.index,
dst_index: 2,
group_id: 0,
}));
vars.push(VariantStream::Audio(AudioVariant {
mapping: VariantMapping {
id: Uuid::new_v4(),
src_index: audio_src.index,
dst_index: 3,
group_id: 1,
},
bitrate: 192_000,
codec: "aac".to_string(),
channels: 2,
sample_rate: 48_000,
sample_fmt: "fltp".to_owned(),
}));
}
Ok(vars)
}

View File

@ -1,4 +1,7 @@
use crate::overseer::ZapStreamOverseer;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::sync::Arc;
use zap_stream_core::overseer::Overseer;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Settings { pub struct Settings {
@ -55,3 +58,33 @@ pub struct LndSettings {
pub cert: String, pub cert: String,
pub macaroon: String, pub macaroon: String,
} }
impl Settings {
pub async fn get_overseer(&self) -> anyhow::Result<Arc<ZapStreamOverseer>> {
match &self.overseer {
OverseerConfig::ZapStream {
nsec: private_key,
database,
lnd,
relays,
blossom,
cost,
} => Ok(Arc::new(
ZapStreamOverseer::new(
&self.output_dir,
&self.public_url,
private_key,
database,
lnd,
relays,
blossom,
*cost,
)
.await?,
)),
_ => {
panic!("Unsupported overseer");
}
}
}
}

View File

Before

Width:  |  Height:  |  Size: 118 KiB

After

Width:  |  Height:  |  Size: 118 KiB

View File

@ -1,2 +0,0 @@
mod monitor;
pub use monitor::*;

View File

@ -1,11 +0,0 @@
pub mod background;
#[cfg(feature = "zap-stream")]
pub mod blossom;
pub mod egress;
pub mod http;
pub mod ingress;
pub mod mux;
pub mod overseer;
pub mod pipeline;
pub mod settings;
pub mod variant;

View File

@ -1,207 +0,0 @@
use crate::ingress::ConnectionInfo;
#[cfg(feature = "local-overseer")]
use crate::overseer::local::LocalOverseer;
#[cfg(feature = "webhook-overseer")]
use crate::overseer::webhook::WebhookOverseer;
#[cfg(feature = "zap-stream")]
use crate::overseer::zap_stream::ZapStreamOverseer;
use crate::pipeline::PipelineConfig;
#[cfg(any(
feature = "local-overseer",
feature = "webhook-overseer",
feature = "zap-stream"
))]
use crate::settings::OverseerConfig;
use crate::settings::Settings;
use crate::variant::audio::AudioVariant;
use crate::variant::mapping::VariantMapping;
use crate::variant::video::VideoVariant;
use crate::variant::VariantStream;
use anyhow::Result;
use async_trait::async_trait;
use bytes::Bytes;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
use http_body_util::combinators::BoxBody;
use http_body_util::Full;
use hyper::body::Incoming;
use hyper::{Request, Response};
use std::cmp::PartialEq;
use std::path::PathBuf;
use std::sync::Arc;
use uuid::Uuid;
#[cfg(feature = "local-overseer")]
mod local;
#[cfg(feature = "webhook-overseer")]
mod webhook;
#[cfg(feature = "zap-stream")]
mod zap_stream;
/// A copy of [ffmpeg_rs_raw::DemuxerInfo] without internal ptr
#[derive(PartialEq, Clone)]
pub struct IngressInfo {
pub bitrate: usize,
pub streams: Vec<IngressStream>,
}
/// A copy of [ffmpeg_rs_raw::StreamInfo] without ptr
#[derive(PartialEq, Clone)]
pub struct IngressStream {
pub index: usize,
pub stream_type: IngressStreamType,
pub codec: isize,
pub format: isize,
pub width: usize,
pub height: usize,
pub fps: f32,
pub sample_rate: usize,
pub language: String,
}
#[derive(PartialEq, Eq, Clone)]
pub enum IngressStreamType {
Video,
Audio,
Subtitle,
}
#[async_trait]
/// The control process that oversees streaming operations
pub trait Overseer: Send + Sync {
/// Add any API routes to the web server
async fn api(&self, req: Request<Incoming>) -> Result<Response<BoxBody<Bytes, anyhow::Error>>>;
/// Check all streams
async fn check_streams(&self) -> Result<()>;
/// Set up a new streaming pipeline
async fn start_stream(
&self,
connection: &ConnectionInfo,
stream_info: &IngressInfo,
) -> Result<PipelineConfig>;
/// A new segment (HLS etc.) was generated for a stream variant
///
/// This handler is usually used for distribution / billing
async fn on_segment(
&self,
pipeline_id: &Uuid,
variant_id: &Uuid,
index: u64,
duration: f32,
path: &PathBuf,
) -> Result<()>;
/// At a regular interval, pipeline will emit one of the frames for processing as a
/// thumbnail
async fn on_thumbnail(
&self,
pipeline_id: &Uuid,
width: usize,
height: usize,
path: &PathBuf,
) -> Result<()>;
/// Stream is finished
async fn on_end(&self, pipeline_id: &Uuid) -> Result<()>;
}
impl Settings {
pub async fn get_overseer(&self) -> Result<Arc<dyn Overseer>> {
match &self.overseer {
#[cfg(feature = "local-overseer")]
OverseerConfig::Local => Ok(Arc::new(LocalOverseer::new())),
#[cfg(feature = "webhook-overseer")]
OverseerConfig::Webhook { url } => Ok(Arc::new(WebhookOverseer::new(&url))),
#[cfg(feature = "zap-stream")]
OverseerConfig::ZapStream {
nsec: private_key,
database,
lnd,
relays,
blossom,
cost,
} => Ok(Arc::new(
ZapStreamOverseer::new(
&self.output_dir,
&self.public_url,
private_key,
database,
lnd,
relays,
blossom,
*cost,
)
.await?,
)),
_ => {
panic!("Unsupported overseer");
}
}
}
}
pub(crate) fn get_default_variants(info: &IngressInfo) -> Result<Vec<VariantStream>> {
let mut vars: Vec<VariantStream> = vec![];
if let Some(video_src) = info
.streams
.iter()
.find(|c| c.stream_type == IngressStreamType::Video)
{
vars.push(VariantStream::CopyVideo(VariantMapping {
id: Uuid::new_v4(),
src_index: video_src.index,
dst_index: 0,
group_id: 0,
}));
vars.push(VariantStream::Video(VideoVariant {
mapping: VariantMapping {
id: Uuid::new_v4(),
src_index: video_src.index,
dst_index: 1,
group_id: 1,
},
width: 1280,
height: 720,
fps: video_src.fps,
bitrate: 3_000_000,
codec: "libx264".to_string(),
profile: 100,
level: 51,
keyframe_interval: video_src.fps as u16 * 2,
pixel_format: AV_PIX_FMT_YUV420P as u32,
}));
}
if let Some(audio_src) = info
.streams
.iter()
.find(|c| c.stream_type == IngressStreamType::Audio)
{
vars.push(VariantStream::CopyAudio(VariantMapping {
id: Uuid::new_v4(),
src_index: audio_src.index,
dst_index: 2,
group_id: 0,
}));
vars.push(VariantStream::Audio(AudioVariant {
mapping: VariantMapping {
id: Uuid::new_v4(),
src_index: audio_src.index,
dst_index: 3,
group_id: 1,
},
bitrate: 192_000,
codec: "aac".to_string(),
channels: 2,
sample_rate: 48_000,
sample_fmt: "fltp".to_owned(),
}));
}
Ok(vars)
}