feat: overseer
This commit is contained in:
parent
04df558a2d
commit
60199cfa06
2139
Cargo.lock
generated
2139
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
19
Cargo.toml
19
Cargo.toml
@ -8,36 +8,43 @@ name = "zap-stream-core"
|
|||||||
path = "src/bin/zap_stream_core.rs"
|
path = "src/bin/zap_stream_core.rs"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["test-source"]
|
default = ["test-pattern"]
|
||||||
srt = ["dep:srt-tokio"]
|
srt = ["dep:srt-tokio"]
|
||||||
test-source = ["dep:resvg", "dep:usvg", "dep:tiny-skia", "dep:fontdue", "dep:ringbuf"]
|
zap-stream = ["dep:nostr-sdk", "dep:sqlx", "dep:fedimint-tonic-lnd", "dep:chrono"]
|
||||||
|
test-pattern = ["dep:resvg", "dep:usvg", "dep:tiny-skia", "dep:fontdue", "dep:ringbuf"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "0abe0c5229adeb64b013d1895c7eba3d917f05a4" }
|
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "0abe0c5229adeb64b013d1895c7eba3d917f05a4" }
|
||||||
|
|
||||||
tokio = { version = "1.36.0", features = ["rt", "rt-multi-thread", "macros"] }
|
tokio = { version = "1.36.0", features = ["rt", "rt-multi-thread", "macros"] }
|
||||||
anyhow = { version = "^1.0.91", features = ["backtrace"] }
|
anyhow = { version = "^1.0.91", features = ["backtrace"] }
|
||||||
pretty_env_logger = "0.5.0"
|
pretty_env_logger = "0.5.0"
|
||||||
|
|
||||||
tokio-stream = "0.1.14"
|
tokio-stream = "0.1.14"
|
||||||
futures-util = "0.3.30"
|
futures-util = "0.3.30"
|
||||||
async-trait = "0.1.77"
|
async-trait = "0.1.77"
|
||||||
log = "0.4.21"
|
log = "0.4.21"
|
||||||
uuid = { version = "1.8.0", features = ["v4", "serde"] }
|
uuid = { version = "1.8.0", features = ["v4", "serde"] }
|
||||||
serde = { version = "1.0.197", features = ["derive"] }
|
serde = { version = "1.0.197", features = ["derive"] }
|
||||||
config = { version = "0.14.0", features = ["toml"] }
|
config = { version = "0.14.0", features = ["yaml"] }
|
||||||
url = "2.5.0"
|
url = "2.5.0"
|
||||||
itertools = "0.13.0"
|
itertools = "0.13.0"
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
clap = { version = "4.5.16", features = ["derive"] }
|
clap = { version = "4.5.16", features = ["derive"] }
|
||||||
warp = "0.3.7"
|
warp = "0.3.7"
|
||||||
libc = "0.2.162"
|
libc = "0.2.162"
|
||||||
|
m3u8-rs = "6.0.0"
|
||||||
|
|
||||||
|
# test-pattern
|
||||||
srt-tokio = { version = "0.4.3", optional = true }
|
srt-tokio = { version = "0.4.3", optional = true }
|
||||||
resvg = { version = "0.44.0", optional = true }
|
resvg = { version = "0.44.0", optional = true }
|
||||||
usvg = { version = "0.44.0", optional = true }
|
usvg = { version = "0.44.0", optional = true }
|
||||||
tiny-skia = { version = "0.11.4", optional = true }
|
tiny-skia = { version = "0.11.4", optional = true }
|
||||||
fontdue = { version = "0.9.2", optional = true }
|
fontdue = { version = "0.9.2", optional = true }
|
||||||
ringbuf = { version = "0.4.7", optional = true }
|
ringbuf = { version = "0.4.7", optional = true }
|
||||||
m3u8-rs = "6.0.0"
|
|
||||||
|
# zap-stream
|
||||||
|
nostr-sdk = { version = "0.36.0", optional = true }
|
||||||
|
sqlx = { version = "0.8.2", optional = true, features = ["runtime-tokio", "migrate", "mysql", "chrono"] }
|
||||||
|
fedimint-tonic-lnd = { version = "0.2.0", optional = true, default-features = false, features = ["invoicesrpc", "versionrpc"] }
|
||||||
|
chrono = { version = "0.4.38", optional = true, features = ["serde"] }
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,5 +1,11 @@
|
|||||||
# zap.stream core
|
# zap.stream core
|
||||||
|
|
||||||
Pure rust zap.stream core streaming server
|
Rust zap.stream core streaming server
|
||||||
|
|
||||||
![diagram](./zap.stream.svg)
|
![diagram](./zap.stream.svg)
|
||||||
|
|
||||||
|
## Building
|
||||||
|
|
||||||
|
By default, the `zap-stream` feature is not built which means that a `webhook` service
|
||||||
|
is required to control access to the service.
|
||||||
|
|
||||||
|
12
config.toml
12
config.toml
@ -1,12 +0,0 @@
|
|||||||
# List of endpoints to listen on
|
|
||||||
# currently supporting srt/tcp
|
|
||||||
endpoints = [
|
|
||||||
"srt://127.0.0.1:3333",
|
|
||||||
"tcp://127.0.0.1:3334"
|
|
||||||
]
|
|
||||||
|
|
||||||
# Output directory for egress
|
|
||||||
output_dir = "./out"
|
|
||||||
|
|
||||||
# Wehook system url (required)
|
|
||||||
webhook_url = "http://localhost:5873/api/v1/stream-core"
|
|
42
config.yaml
Executable file
42
config.yaml
Executable file
@ -0,0 +1,42 @@
|
|||||||
|
# List of endpoints to listen on
|
||||||
|
# currently supporting srt/tcp/file/test-pattern
|
||||||
|
# All the endpoints must be valid URI's
|
||||||
|
endpoints:
|
||||||
|
- "srt://127.0.0.1:3333"
|
||||||
|
- "tcp://127.0.0.1:3334"
|
||||||
|
- "test-pattern:"
|
||||||
|
|
||||||
|
# Output directory for recording / hls
|
||||||
|
output_dir: "./out"
|
||||||
|
|
||||||
|
# Overseer is the main control structure which controls access to the service
|
||||||
|
#
|
||||||
|
# ** ONLY 1 OVERSEER CAN BE CONFIGURED AT A TIME **
|
||||||
|
#
|
||||||
|
# Supported overseers:
|
||||||
|
# static:
|
||||||
|
# egress-types:
|
||||||
|
# - hls
|
||||||
|
# - recorder
|
||||||
|
# webhook:
|
||||||
|
# url: <endpoint-url>
|
||||||
|
# zap-stream:
|
||||||
|
# private-key: "nsec1234"
|
||||||
|
# relays:
|
||||||
|
# - "wss://relay.com"
|
||||||
|
# lnd:
|
||||||
|
# address: <ip:port>
|
||||||
|
# cert: <path-to-tls-cert>
|
||||||
|
# macaroon: <path-to-macaroon>
|
||||||
|
# database: <database-connection-string>
|
||||||
|
#
|
||||||
|
overseer:
|
||||||
|
zap-stream:
|
||||||
|
nsec: "nsec1wya428srvpu96n4h78gualaj7wqw4ecgatgja8d5ytdqrxw56r2se440y4"
|
||||||
|
relays:
|
||||||
|
- "ws://localhost:7766"
|
||||||
|
database: "mysql://root:root@localhost:3368/zap-stream?max_connections=2"
|
||||||
|
lnd:
|
||||||
|
address: "https://127.0.0.1:10001"
|
||||||
|
cert: "/home/kieran/.polar/networks/1/volumes/lnd/alice/tls.cert"
|
||||||
|
macaroon: "/home/kieran/.polar/networks/1/volumes/lnd/alice/data/chain/bitcoin/regtest/admin.macaroon"
|
@ -1,12 +1,17 @@
|
|||||||
name: zap-stream-core
|
name: zap-stream-core
|
||||||
services:
|
services:
|
||||||
app:
|
db:
|
||||||
build:
|
image: mariadb
|
||||||
context: .
|
|
||||||
environment:
|
environment:
|
||||||
- "RUST_LOG=info"
|
- "MARIADB_ROOT_PASSWORD=root"
|
||||||
|
- "MARIADB_DATABASE=zap-stream"
|
||||||
ports:
|
ports:
|
||||||
- "3333:3333"
|
- "3368:3306"
|
||||||
- "3334:3334"
|
#volumes:
|
||||||
volumes:
|
#- "db:/var/lib/mysql"
|
||||||
- "./config.toml:/app/config.toml:ro"
|
relay:
|
||||||
|
image: scsibug/nostr-rs-relay
|
||||||
|
ports:
|
||||||
|
- "7766:8080"
|
||||||
|
volumes:
|
||||||
|
db:
|
@ -8,26 +8,20 @@ use url::Url;
|
|||||||
use zap_stream_core::egress::http::listen_out_dir;
|
use zap_stream_core::egress::http::listen_out_dir;
|
||||||
#[cfg(feature = "srt")]
|
#[cfg(feature = "srt")]
|
||||||
use zap_stream_core::ingress::srt;
|
use zap_stream_core::ingress::srt;
|
||||||
use zap_stream_core::ingress::{file, tcp, test};
|
#[cfg(feature = "test-pattern")]
|
||||||
|
use zap_stream_core::ingress::test;
|
||||||
|
|
||||||
|
use zap_stream_core::ingress::{file, tcp};
|
||||||
use zap_stream_core::settings::Settings;
|
use zap_stream_core::settings::Settings;
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
struct Args {
|
struct Args {}
|
||||||
/// Add file input at startup
|
|
||||||
#[arg(long)]
|
|
||||||
file: Option<String>,
|
|
||||||
|
|
||||||
/// Add input test pattern at startup
|
|
||||||
#[cfg(feature = "test-source")]
|
|
||||||
#[arg(long)]
|
|
||||||
test_pattern: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> anyhow::Result<()> {
|
async fn main() -> anyhow::Result<()> {
|
||||||
pretty_env_logger::init();
|
pretty_env_logger::init();
|
||||||
|
|
||||||
let args = Args::parse();
|
let _args = Args::parse();
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
//ffmpeg_sys_next::av_log_set_level(ffmpeg_sys_next::AV_LOG_DEBUG);
|
//ffmpeg_sys_next::av_log_set_level(ffmpeg_sys_next::AV_LOG_DEBUG);
|
||||||
@ -35,20 +29,32 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let builder = Config::builder()
|
let builder = Config::builder()
|
||||||
.add_source(config::File::with_name("config.toml"))
|
.add_source(config::File::with_name("config.yaml"))
|
||||||
.add_source(config::Environment::with_prefix("APP"))
|
.add_source(config::Environment::with_prefix("APP"))
|
||||||
.build()?;
|
.build()?;
|
||||||
|
|
||||||
let settings: Settings = builder.try_deserialize()?;
|
let settings: Settings = builder.try_deserialize()?;
|
||||||
|
let overseer = settings.get_overseer().await?;
|
||||||
|
|
||||||
let mut listeners = vec![];
|
let mut listeners = vec![];
|
||||||
for e in &settings.endpoints {
|
for e in &settings.endpoints {
|
||||||
let u: Url = e.parse()?;
|
let u: Url = e.parse()?;
|
||||||
let addr = format!("{}:{}", u.host_str().unwrap(), u.port().unwrap());
|
|
||||||
match u.scheme() {
|
match u.scheme() {
|
||||||
#[cfg(feature = "srt")]
|
#[cfg(feature = "srt")]
|
||||||
"srt" => listeners.push(tokio::spawn(srt::listen(addr, settings.clone()))),
|
"srt" => listeners.push(tokio::spawn(srt::listen(
|
||||||
"tcp" => listeners.push(tokio::spawn(tcp::listen(addr, settings.clone()))),
|
u.host().unwrap().to_string(),
|
||||||
|
overseer.clone(),
|
||||||
|
))),
|
||||||
|
"tcp" => listeners.push(tokio::spawn(tcp::listen(
|
||||||
|
u.host().unwrap().to_string(),
|
||||||
|
overseer.clone(),
|
||||||
|
))),
|
||||||
|
"file" => listeners.push(tokio::spawn(file::listen(
|
||||||
|
u.path().parse()?,
|
||||||
|
overseer.clone(),
|
||||||
|
))),
|
||||||
|
#[cfg(feature = "test-pattern")]
|
||||||
|
"test-pattern" => listeners.push(tokio::spawn(test::listen(overseer.clone()))),
|
||||||
_ => {
|
_ => {
|
||||||
error!("Unknown endpoint config: {e}");
|
error!("Unknown endpoint config: {e}");
|
||||||
}
|
}
|
||||||
@ -56,17 +62,9 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
}
|
}
|
||||||
listeners.push(tokio::spawn(listen_out_dir(
|
listeners.push(tokio::spawn(listen_out_dir(
|
||||||
"0.0.0.0:8080".to_owned(),
|
"0.0.0.0:8080".to_owned(),
|
||||||
settings.clone(),
|
settings.output_dir,
|
||||||
)));
|
)));
|
||||||
|
|
||||||
if let Some(p) = args.file {
|
|
||||||
listeners.push(tokio::spawn(file::listen(p.parse()?, settings.clone())));
|
|
||||||
}
|
|
||||||
#[cfg(feature = "test-source")]
|
|
||||||
if args.test_pattern {
|
|
||||||
listeners.push(tokio::spawn(test::listen(settings.clone())));
|
|
||||||
}
|
|
||||||
|
|
||||||
for handle in listeners {
|
for handle in listeners {
|
||||||
if let Err(e) = handle.await {
|
if let Err(e) = handle.await {
|
||||||
error!("{e}");
|
error!("{e}");
|
||||||
|
@ -1,308 +1,24 @@
|
|||||||
use anyhow::{bail, Result};
|
use anyhow::Result;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
|
||||||
av_free, av_opt_set, av_q2d, av_strdup, av_write_frame, avio_flush, avio_open, AVPacket,
|
|
||||||
AVIO_FLAG_WRITE, AV_PKT_FLAG_KEY,
|
|
||||||
};
|
|
||||||
use ffmpeg_rs_raw::{cstr, Encoder, Muxer};
|
|
||||||
use itertools::Itertools;
|
|
||||||
use log::{info, warn};
|
|
||||||
use m3u8_rs::MediaSegment;
|
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::fs::File;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::ptr;
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::egress::Egress;
|
use crate::egress::{Egress, EgressResult};
|
||||||
use crate::variant::{StreamMapping, VariantStream};
|
use crate::mux::HlsMuxer;
|
||||||
|
|
||||||
pub struct HlsEgress {
|
/// Alias the muxer directly
|
||||||
id: Uuid,
|
pub type HlsEgress = HlsMuxer;
|
||||||
|
|
||||||
/// All variant streams
|
impl Egress for HlsMuxer {
|
||||||
variants: Vec<HlsVariant>,
|
unsafe fn process_pkt(
|
||||||
}
|
&mut self,
|
||||||
|
packet: *mut AVPacket,
|
||||||
enum HlsVariantStream {
|
variant: &Uuid,
|
||||||
Video {
|
) -> Result<EgressResult> {
|
||||||
group: usize,
|
if let Some(ns) = self.mux_packet(packet, variant)? {
|
||||||
index: usize,
|
Ok(EgressResult::NewSegment(ns))
|
||||||
id: Uuid,
|
} else {
|
||||||
},
|
Ok(EgressResult::None)
|
||||||
Audio {
|
|
||||||
group: usize,
|
|
||||||
index: usize,
|
|
||||||
id: Uuid,
|
|
||||||
},
|
|
||||||
Subtitle {
|
|
||||||
group: usize,
|
|
||||||
index: usize,
|
|
||||||
id: Uuid,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HlsVariantStream {
|
|
||||||
pub fn group(&self) -> usize {
|
|
||||||
match self {
|
|
||||||
HlsVariantStream::Video { group, .. } => *group,
|
|
||||||
HlsVariantStream::Audio { group, .. } => *group,
|
|
||||||
HlsVariantStream::Subtitle { group, .. } => *group,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn index(&self) -> usize {
|
|
||||||
match self {
|
|
||||||
HlsVariantStream::Video { index, .. } => *index,
|
|
||||||
HlsVariantStream::Audio { index, .. } => *index,
|
|
||||||
HlsVariantStream::Subtitle { index, .. } => *index,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn id(&self) -> &Uuid {
|
|
||||||
match self {
|
|
||||||
HlsVariantStream::Video { id, .. } => id,
|
|
||||||
HlsVariantStream::Audio { id, .. } => id,
|
|
||||||
HlsVariantStream::Subtitle { id, .. } => id,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for HlsVariantStream {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
HlsVariantStream::Video { index, .. } => write!(f, "v:{}", index),
|
|
||||||
HlsVariantStream::Audio { index, .. } => write!(f, "a:{}", index),
|
|
||||||
HlsVariantStream::Subtitle { index, .. } => write!(f, "s:{}", index),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct HlsVariant {
|
|
||||||
/// Name of this variant (720p)
|
|
||||||
name: String,
|
|
||||||
/// MPEG-TS muxer for this variant
|
|
||||||
mux: Muxer,
|
|
||||||
/// List of streams ids in this variant
|
|
||||||
streams: Vec<HlsVariantStream>,
|
|
||||||
/// Segment length in seconds
|
|
||||||
segment_length: f32,
|
|
||||||
/// Current segment index
|
|
||||||
idx: u64,
|
|
||||||
/// Output directory (base)
|
|
||||||
out_dir: String,
|
|
||||||
/// List of segments to be included in the playlist
|
|
||||||
segments: Vec<SegmentInfo>,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct SegmentInfo(u64, f32);
|
|
||||||
|
|
||||||
impl SegmentInfo {
|
|
||||||
fn to_media_segment(&self) -> MediaSegment {
|
|
||||||
MediaSegment {
|
|
||||||
uri: HlsVariant::segment_name(self.0),
|
|
||||||
duration: self.1,
|
|
||||||
title: Some("no desc".to_string()),
|
|
||||||
..MediaSegment::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn filename(&self) -> String {
|
|
||||||
HlsVariant::segment_name(self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HlsVariant {
|
|
||||||
pub fn new<'a>(
|
|
||||||
out_dir: &'a str,
|
|
||||||
segment_length: f32,
|
|
||||||
group: usize,
|
|
||||||
encoded_vars: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
|
|
||||||
) -> Result<Self> {
|
|
||||||
let name = format!("stream_{}", group);
|
|
||||||
let first_seg = Self::map_segment_path(out_dir, &name, 1);
|
|
||||||
std::fs::create_dir_all(PathBuf::from(&first_seg).parent().unwrap())?;
|
|
||||||
|
|
||||||
let mut mux = unsafe {
|
|
||||||
Muxer::builder()
|
|
||||||
.with_output_path(first_seg.as_str(), Some("mpegts"))?
|
|
||||||
.build()?
|
|
||||||
};
|
|
||||||
let mut streams = Vec::new();
|
|
||||||
for (var, enc) in encoded_vars {
|
|
||||||
match var {
|
|
||||||
VariantStream::Video(v) => unsafe {
|
|
||||||
let stream = mux.add_stream_encoder(enc)?;
|
|
||||||
streams.push(HlsVariantStream::Video {
|
|
||||||
group,
|
|
||||||
index: (*stream).index as usize,
|
|
||||||
id: v.id(),
|
|
||||||
})
|
|
||||||
},
|
|
||||||
VariantStream::Audio(a) => unsafe {
|
|
||||||
let stream = mux.add_stream_encoder(enc)?;
|
|
||||||
streams.push(HlsVariantStream::Audio {
|
|
||||||
group,
|
|
||||||
index: (*stream).index as usize,
|
|
||||||
id: a.id(),
|
|
||||||
})
|
|
||||||
},
|
|
||||||
VariantStream::Subtitle(s) => unsafe {
|
|
||||||
let stream = mux.add_stream_encoder(enc)?;
|
|
||||||
streams.push(HlsVariantStream::Subtitle {
|
|
||||||
group,
|
|
||||||
index: (*stream).index as usize,
|
|
||||||
id: s.id(),
|
|
||||||
})
|
|
||||||
},
|
|
||||||
_ => panic!("unsupported variant stream"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
unsafe {
|
|
||||||
mux.open(None)?;
|
|
||||||
}
|
|
||||||
Ok(Self {
|
|
||||||
name: name.clone(),
|
|
||||||
segment_length,
|
|
||||||
mux,
|
|
||||||
streams,
|
|
||||||
idx: 1,
|
|
||||||
segments: Vec::from([SegmentInfo(1, segment_length)]),
|
|
||||||
out_dir: out_dir.to_string(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn segment_name(idx: u64) -> String {
|
|
||||||
format!("{}.ts", idx)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn out_dir(&self) -> PathBuf {
|
|
||||||
PathBuf::from(&self.out_dir).join(&self.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn map_segment_path(out_dir: &str, name: &str, idx: u64) -> String {
|
|
||||||
PathBuf::from(out_dir)
|
|
||||||
.join(name)
|
|
||||||
.join(Self::segment_name(idx))
|
|
||||||
.to_string_lossy()
|
|
||||||
.to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Mux a packet created by the encoder for this variant
|
|
||||||
pub unsafe fn mux_packet(&mut self, pkt: *mut AVPacket) -> Result<()> {
|
|
||||||
// time of this packet in seconds
|
|
||||||
let pkt_time = (*pkt).pts as f32 * av_q2d((*pkt).time_base) as f32;
|
|
||||||
// what segment this pkt should be in (index)
|
|
||||||
let pkt_seg = 1 + (pkt_time / self.segment_length).floor() as u64;
|
|
||||||
|
|
||||||
let can_split = (*pkt).flags & AV_PKT_FLAG_KEY == AV_PKT_FLAG_KEY;
|
|
||||||
if pkt_seg != self.idx && can_split {
|
|
||||||
self.split_next_seg()?;
|
|
||||||
}
|
|
||||||
self.mux.write_packet(pkt)
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe fn split_next_seg(&mut self) -> Result<()> {
|
|
||||||
self.idx += 1;
|
|
||||||
|
|
||||||
// Manually reset muxer avio
|
|
||||||
let ctx = self.mux.context();
|
|
||||||
av_write_frame(ctx, ptr::null_mut());
|
|
||||||
avio_flush((*ctx).pb);
|
|
||||||
av_free((*ctx).url as *mut _);
|
|
||||||
|
|
||||||
let next_seg_url = Self::map_segment_path(&*self.out_dir, &self.name, self.idx);
|
|
||||||
(*ctx).url = av_strdup(cstr!(next_seg_url.as_str()));
|
|
||||||
|
|
||||||
let ret = avio_open(&mut (*ctx).pb, (*ctx).url, AVIO_FLAG_WRITE);
|
|
||||||
if ret < 0 {
|
|
||||||
bail!("Failed to re-alloc avio");
|
|
||||||
}
|
|
||||||
|
|
||||||
// tell muxer it needs to write headers again
|
|
||||||
av_opt_set(
|
|
||||||
(*ctx).priv_data,
|
|
||||||
cstr!("mpegts_flags"),
|
|
||||||
cstr!("resend_headers"),
|
|
||||||
0,
|
|
||||||
);
|
|
||||||
|
|
||||||
info!("Writing segment {}", next_seg_url);
|
|
||||||
if let Err(e) = self.add_segment(self.idx, 2.0) {
|
|
||||||
warn!("Failed to update playlist: {}", e);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_segment(&mut self, idx: u64, duration: f32) -> Result<()> {
|
|
||||||
self.segments.push(SegmentInfo(idx, duration));
|
|
||||||
|
|
||||||
const MAX_SEGMENTS: usize = 10;
|
|
||||||
|
|
||||||
if self.segments.len() > MAX_SEGMENTS {
|
|
||||||
let n_drain = self.segments.len() - MAX_SEGMENTS;
|
|
||||||
let seg_dir = PathBuf::from(self.out_dir());
|
|
||||||
for seg in self.segments.drain(..n_drain) {
|
|
||||||
// delete file
|
|
||||||
let seg_path = seg_dir.join(seg.filename());
|
|
||||||
std::fs::remove_file(seg_path)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.write_playlist()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_playlist(&mut self) -> Result<()> {
|
|
||||||
let mut pl = m3u8_rs::MediaPlaylist::default();
|
|
||||||
pl.target_duration = self.segment_length as u64;
|
|
||||||
pl.segments = self.segments.iter().map(|s| s.to_media_segment()).collect();
|
|
||||||
pl.version = Some(3);
|
|
||||||
pl.media_sequence = self.segments.first().map(|s| s.0).unwrap_or(0);
|
|
||||||
|
|
||||||
let mut f_out = File::create(self.out_dir().join("live.m3u8"))?;
|
|
||||||
pl.write_to(&mut f_out)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_mapping(&self) -> String {
|
|
||||||
format!(
|
|
||||||
"{},name:{}",
|
|
||||||
self.streams.iter().map(|j| j.to_string()).join(","),
|
|
||||||
self.name
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HlsEgress {
|
|
||||||
pub fn new<'a>(
|
|
||||||
out_dir: &str,
|
|
||||||
segment_length: f32,
|
|
||||||
encoders: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
|
|
||||||
) -> Result<Self> {
|
|
||||||
let id = Uuid::new_v4();
|
|
||||||
let base = PathBuf::from(out_dir)
|
|
||||||
.join(id.to_string())
|
|
||||||
.to_string_lossy()
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let mut vars = Vec::new();
|
|
||||||
for (k, group) in &encoders
|
|
||||||
.sorted_by(|a, b| a.0.group_id().cmp(&b.0.group_id()))
|
|
||||||
.chunk_by(|a| a.0.group_id())
|
|
||||||
{
|
|
||||||
let var = HlsVariant::new(&base, segment_length, k, group)?;
|
|
||||||
vars.push(var);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self { id, variants: vars })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Egress for HlsEgress {
|
|
||||||
unsafe fn process_pkt(&mut self, packet: *mut AVPacket, variant: &Uuid) -> Result<()> {
|
|
||||||
for var in self.variants.iter_mut() {
|
|
||||||
if var.streams.iter().any(|s| s.id() == variant) {
|
|
||||||
var.mux_packet(packet)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -3,15 +3,11 @@ use std::net::SocketAddr;
|
|||||||
use anyhow::Error;
|
use anyhow::Error;
|
||||||
use warp::{cors, Filter};
|
use warp::{cors, Filter};
|
||||||
|
|
||||||
use crate::settings::Settings;
|
pub async fn listen_out_dir(addr: String, dir: String) -> Result<(), Error> {
|
||||||
|
|
||||||
pub async fn listen_out_dir(addr: String, settings: Settings) -> Result<(), Error> {
|
|
||||||
let addr: SocketAddr = addr.parse()?;
|
let addr: SocketAddr = addr.parse()?;
|
||||||
let cors = cors().allow_any_origin().allow_methods(vec!["GET"]);
|
let cors = cors().allow_any_origin().allow_methods(vec!["GET"]);
|
||||||
|
|
||||||
let warp_out = warp::get()
|
let warp_out = warp::get().and(warp::fs::dir(dir)).with(cors);
|
||||||
.and(warp::fs::dir(settings.output_dir.clone()))
|
|
||||||
.with(cors);
|
|
||||||
|
|
||||||
warp::serve(warp_out).run(addr).await;
|
warp::serve(warp_out).run(addr).await;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -3,6 +3,7 @@ use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::fmt::{Display, Formatter};
|
use std::fmt::{Display, Formatter};
|
||||||
|
use std::path::PathBuf;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
pub mod hls;
|
pub mod hls;
|
||||||
@ -31,5 +32,27 @@ impl Display for EgressConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait Egress {
|
pub trait Egress {
|
||||||
unsafe fn process_pkt(&mut self, packet: *mut AVPacket, variant: &Uuid) -> Result<()>;
|
unsafe fn process_pkt(&mut self, packet: *mut AVPacket, variant: &Uuid)
|
||||||
|
-> Result<EgressResult>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum EgressResult {
|
||||||
|
/// Nothing to report
|
||||||
|
None,
|
||||||
|
/// A new segment was created
|
||||||
|
NewSegment(NewSegment),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Basic details of new segment created by a muxer
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct NewSegment {
|
||||||
|
/// The id of the variant (video or audio)
|
||||||
|
pub variant: Uuid,
|
||||||
|
/// Segment index
|
||||||
|
pub idx: u64,
|
||||||
|
/// Duration in seconds
|
||||||
|
pub duration: f32,
|
||||||
|
/// Path on disk to the segment file
|
||||||
|
pub path: PathBuf,
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ use std::fs;
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::egress::{Egress, EgressConfig};
|
use crate::egress::{Egress, EgressConfig, EgressResult};
|
||||||
|
|
||||||
pub struct RecorderEgress {
|
pub struct RecorderEgress {
|
||||||
id: Uuid,
|
id: Uuid,
|
||||||
@ -39,11 +39,14 @@ impl RecorderEgress {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Egress for RecorderEgress {
|
impl Egress for RecorderEgress {
|
||||||
unsafe fn process_pkt(&mut self, packet: *mut AVPacket, variant: &Uuid) -> Result<()> {
|
unsafe fn process_pkt(
|
||||||
|
&mut self,
|
||||||
|
packet: *mut AVPacket,
|
||||||
|
variant: &Uuid,
|
||||||
|
) -> Result<EgressResult> {
|
||||||
if self.config.variants.contains(variant) {
|
if self.config.variants.contains(variant) {
|
||||||
self.muxer.write_packet(packet)
|
self.muxer.write_packet(packet)?;
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
Ok(EgressResult::None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,19 +1,21 @@
|
|||||||
|
use crate::ingress::{spawn_pipeline, ConnectionInfo};
|
||||||
|
use crate::overseer::Overseer;
|
||||||
|
use crate::settings::Settings;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use log::info;
|
use log::info;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::ingress::{spawn_pipeline, ConnectionInfo};
|
pub async fn listen(path: PathBuf, overseer: Arc<dyn Overseer>) -> Result<()> {
|
||||||
use crate::settings::Settings;
|
|
||||||
|
|
||||||
pub async fn listen(path: PathBuf, settings: Settings) -> Result<()> {
|
|
||||||
info!("Sending file {}", path.to_str().unwrap());
|
info!("Sending file {}", path.to_str().unwrap());
|
||||||
|
|
||||||
let info = ConnectionInfo {
|
let info = ConnectionInfo {
|
||||||
ip_addr: "127.0.0.1:6969".to_string(),
|
ip_addr: "127.0.0.1:6969".to_string(),
|
||||||
endpoint: "file-input".to_owned(),
|
endpoint: "file-input".to_owned(),
|
||||||
|
key: "".to_string(),
|
||||||
};
|
};
|
||||||
let file = std::fs::File::open(path)?;
|
let file = std::fs::File::open(path)?;
|
||||||
spawn_pipeline(info, settings, Box::new(file));
|
spawn_pipeline(info, overseer.clone(), Box::new(file)).await;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1,16 +1,18 @@
|
|||||||
|
use crate::overseer::Overseer;
|
||||||
use crate::pipeline::runner::PipelineRunner;
|
use crate::pipeline::runner::PipelineRunner;
|
||||||
use crate::settings::Settings;
|
use crate::settings::Settings;
|
||||||
use crate::webhook::Webhook;
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use log::{error, info};
|
use log::{error, info};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::runtime::Handle;
|
||||||
|
|
||||||
pub mod file;
|
pub mod file;
|
||||||
#[cfg(feature = "srt")]
|
#[cfg(feature = "srt")]
|
||||||
pub mod srt;
|
pub mod srt;
|
||||||
pub mod tcp;
|
pub mod tcp;
|
||||||
#[cfg(feature = "test-source")]
|
#[cfg(feature = "test-pattern")]
|
||||||
pub mod test;
|
pub mod test;
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
@ -20,29 +22,31 @@ pub struct ConnectionInfo {
|
|||||||
|
|
||||||
/// IP address of the connection
|
/// IP address of the connection
|
||||||
pub ip_addr: String,
|
pub ip_addr: String,
|
||||||
|
|
||||||
|
/// Stream key
|
||||||
|
pub key: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn spawn_pipeline(
|
pub async fn spawn_pipeline(
|
||||||
info: ConnectionInfo,
|
info: ConnectionInfo,
|
||||||
settings: Settings,
|
seer: Arc<dyn Overseer>,
|
||||||
reader: Box<dyn Read + Send>,
|
reader: Box<dyn Read + Send>,
|
||||||
) {
|
) {
|
||||||
info!("New client connected: {}", &info.ip_addr);
|
info!("New client connected: {}", &info.ip_addr);
|
||||||
|
let handle = Handle::current();
|
||||||
|
let seer = seer.clone();
|
||||||
std::thread::spawn(move || unsafe {
|
std::thread::spawn(move || unsafe {
|
||||||
if let Err(e) = spawn_pipeline_inner(info, settings, reader) {
|
match PipelineRunner::new(handle, seer, info, reader) {
|
||||||
error!("{}", e);
|
Ok(mut pl) => loop {
|
||||||
|
if let Err(e) = pl.run() {
|
||||||
|
error!("Pipeline run failed: {}", e);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to create PipelineRunner: {}", e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe fn spawn_pipeline_inner(
|
|
||||||
info: ConnectionInfo,
|
|
||||||
settings: Settings,
|
|
||||||
reader: Box<dyn Read + Send>,
|
|
||||||
) -> Result<()> {
|
|
||||||
let webhook = Webhook::new(settings.clone());
|
|
||||||
let mut pl = PipelineRunner::new(info, webhook, reader)?;
|
|
||||||
loop {
|
|
||||||
pl.run()?
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,14 +1,15 @@
|
|||||||
use crate::ingress::{spawn_pipeline, ConnectionInfo};
|
use crate::ingress::{spawn_pipeline, ConnectionInfo};
|
||||||
|
use crate::overseer::Overseer;
|
||||||
use crate::pipeline::runner::PipelineRunner;
|
use crate::pipeline::runner::PipelineRunner;
|
||||||
use crate::settings::Settings;
|
use crate::settings::Settings;
|
||||||
use crate::webhook::Webhook;
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use futures_util::{StreamExt, TryStreamExt};
|
use futures_util::{StreamExt, TryStreamExt};
|
||||||
use log::{error, info, warn};
|
use log::{error, info, warn};
|
||||||
use srt_tokio::{SrtListener, SrtSocket};
|
use srt_tokio::{SrtListener, SrtSocket};
|
||||||
|
use std::sync::Arc;
|
||||||
use tokio::sync::mpsc::unbounded_channel;
|
use tokio::sync::mpsc::unbounded_channel;
|
||||||
|
|
||||||
pub async fn listen(listen_addr: String, settings: Settings) -> Result<()> {
|
pub async fn listen(listen_addr: String, overseer: Arc<dyn Overseer>) -> Result<()> {
|
||||||
let (_binding, mut packets) = SrtListener::builder().bind(listen_addr.clone()).await?;
|
let (_binding, mut packets) = SrtListener::builder().bind(listen_addr.clone()).await?;
|
||||||
|
|
||||||
info!("SRT listening on: {}", listen_addr.clone());
|
info!("SRT listening on: {}", listen_addr.clone());
|
||||||
@ -18,7 +19,7 @@ pub async fn listen(listen_addr: String, settings: Settings) -> Result<()> {
|
|||||||
endpoint: listen_addr.clone(),
|
endpoint: listen_addr.clone(),
|
||||||
ip_addr: socket.settings().remote.to_string(),
|
ip_addr: socket.settings().remote.to_string(),
|
||||||
};
|
};
|
||||||
spawn_pipeline(info, settings.clone(), Box::new(socket));
|
spawn_pipeline(info, overseer.clone(), Box::new(socket)).await;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use log::info;
|
use log::info;
|
||||||
|
use std::sync::Arc;
|
||||||
use tokio::net::TcpListener;
|
use tokio::net::TcpListener;
|
||||||
|
|
||||||
use crate::ingress::{spawn_pipeline, ConnectionInfo};
|
use crate::ingress::{spawn_pipeline, ConnectionInfo};
|
||||||
|
use crate::overseer::Overseer;
|
||||||
use crate::settings::Settings;
|
use crate::settings::Settings;
|
||||||
|
|
||||||
pub async fn listen(addr: String, settings: Settings) -> Result<()> {
|
pub async fn listen(addr: String, overseer: Arc<dyn Overseer>) -> Result<()> {
|
||||||
let listener = TcpListener::bind(addr.clone()).await?;
|
let listener = TcpListener::bind(addr.clone()).await?;
|
||||||
|
|
||||||
info!("TCP listening on: {}", addr.clone());
|
info!("TCP listening on: {}", addr.clone());
|
||||||
@ -13,9 +15,10 @@ pub async fn listen(addr: String, settings: Settings) -> Result<()> {
|
|||||||
let info = ConnectionInfo {
|
let info = ConnectionInfo {
|
||||||
ip_addr: ip.to_string(),
|
ip_addr: ip.to_string(),
|
||||||
endpoint: addr.clone(),
|
endpoint: addr.clone(),
|
||||||
|
key: "".to_string(),
|
||||||
};
|
};
|
||||||
let socket = socket.into_std()?;
|
let socket = socket.into_std()?;
|
||||||
spawn_pipeline(info, settings.clone(), Box::new(socket));
|
spawn_pipeline(info, overseer.clone(), Box::new(socket)).await;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use crate::ingress::{spawn_pipeline, ConnectionInfo};
|
use crate::ingress::{spawn_pipeline, ConnectionInfo};
|
||||||
|
use crate::overseer::Overseer;
|
||||||
use crate::settings::Settings;
|
use crate::settings::Settings;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_H264;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_H264;
|
||||||
@ -15,20 +16,20 @@ use log::info;
|
|||||||
use ringbuf::traits::{Observer, Split};
|
use ringbuf::traits::{Observer, Split};
|
||||||
use ringbuf::{HeapCons, HeapRb};
|
use ringbuf::{HeapCons, HeapRb};
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::ops::Add;
|
use std::sync::Arc;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
use tiny_skia::Pixmap;
|
use tiny_skia::Pixmap;
|
||||||
use warp::Buf;
|
|
||||||
|
|
||||||
pub async fn listen(settings: Settings) -> Result<()> {
|
pub async fn listen(overseer: Arc<dyn Overseer>) -> Result<()> {
|
||||||
info!("Test pattern enabled");
|
info!("Test pattern enabled");
|
||||||
|
|
||||||
let info = ConnectionInfo {
|
let info = ConnectionInfo {
|
||||||
endpoint: "test-source".to_string(),
|
endpoint: "test-pattern".to_string(),
|
||||||
ip_addr: "".to_string(),
|
ip_addr: "test-pattern".to_string(),
|
||||||
|
key: "test-pattern".to_string(),
|
||||||
};
|
};
|
||||||
let src = TestPatternSrc::new()?;
|
let src = TestPatternSrc::new()?;
|
||||||
spawn_pipeline(info, settings, Box::new(src));
|
spawn_pipeline(info, overseer.clone(), Box::new(src)).await;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
pub mod egress;
|
pub mod egress;
|
||||||
pub mod ingress;
|
pub mod ingress;
|
||||||
|
pub mod mux;
|
||||||
|
pub mod overseer;
|
||||||
pub mod pipeline;
|
pub mod pipeline;
|
||||||
pub mod settings;
|
pub mod settings;
|
||||||
pub mod variant;
|
pub mod variant;
|
||||||
pub mod webhook;
|
|
||||||
|
302
src/mux/hls.rs
Normal file
302
src/mux/hls.rs
Normal file
@ -0,0 +1,302 @@
|
|||||||
|
use crate::egress::{EgressResult, NewSegment};
|
||||||
|
use crate::variant::{StreamMapping, VariantStream};
|
||||||
|
use anyhow::{bail, Result};
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
||||||
|
av_free, av_opt_set, av_q2d, av_write_frame, avio_flush, avio_open, AVPacket, AVIO_FLAG_WRITE,
|
||||||
|
AV_PKT_FLAG_KEY,
|
||||||
|
};
|
||||||
|
use ffmpeg_rs_raw::{cstr, Encoder, Muxer};
|
||||||
|
use itertools::Itertools;
|
||||||
|
use log::{info, warn};
|
||||||
|
use m3u8_rs::MediaSegment;
|
||||||
|
use std::fmt::Display;
|
||||||
|
use std::fs::File;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::ptr;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
pub enum HlsVariantStream {
|
||||||
|
Video {
|
||||||
|
group: usize,
|
||||||
|
index: usize,
|
||||||
|
id: Uuid,
|
||||||
|
},
|
||||||
|
Audio {
|
||||||
|
group: usize,
|
||||||
|
index: usize,
|
||||||
|
id: Uuid,
|
||||||
|
},
|
||||||
|
Subtitle {
|
||||||
|
group: usize,
|
||||||
|
index: usize,
|
||||||
|
id: Uuid,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HlsVariantStream {
|
||||||
|
pub fn id(&self) -> &Uuid {
|
||||||
|
match self {
|
||||||
|
HlsVariantStream::Video { id, .. } => id,
|
||||||
|
HlsVariantStream::Audio { id, .. } => id,
|
||||||
|
HlsVariantStream::Subtitle { id, .. } => id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for HlsVariantStream {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
HlsVariantStream::Video { index, .. } => write!(f, "v:{}", index),
|
||||||
|
HlsVariantStream::Audio { index, .. } => write!(f, "a:{}", index),
|
||||||
|
HlsVariantStream::Subtitle { index, .. } => write!(f, "s:{}", index),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HlsVariant {
|
||||||
|
/// Name of this variant (720p)
|
||||||
|
pub name: String,
|
||||||
|
/// MPEG-TS muxer for this variant
|
||||||
|
pub mux: Muxer,
|
||||||
|
/// List of streams ids in this variant
|
||||||
|
pub streams: Vec<HlsVariantStream>,
|
||||||
|
/// Segment length in seconds
|
||||||
|
pub segment_length: f32,
|
||||||
|
/// Current segment index
|
||||||
|
pub idx: u64,
|
||||||
|
/// Output directory (base)
|
||||||
|
pub out_dir: String,
|
||||||
|
/// List of segments to be included in the playlist
|
||||||
|
pub segments: Vec<SegmentInfo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SegmentInfo(u64, f32);
|
||||||
|
|
||||||
|
impl SegmentInfo {
|
||||||
|
fn to_media_segment(&self) -> MediaSegment {
|
||||||
|
MediaSegment {
|
||||||
|
uri: HlsVariant::segment_name(self.0),
|
||||||
|
duration: self.1,
|
||||||
|
title: Some("no desc".to_string()),
|
||||||
|
..MediaSegment::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn filename(&self) -> String {
|
||||||
|
HlsVariant::segment_name(self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HlsVariant {
|
||||||
|
pub fn new<'a>(
|
||||||
|
out_dir: &'a str,
|
||||||
|
segment_length: f32,
|
||||||
|
group: usize,
|
||||||
|
encoded_vars: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let name = format!("stream_{}", group);
|
||||||
|
let first_seg = Self::map_segment_path(out_dir, &name, 1);
|
||||||
|
std::fs::create_dir_all(PathBuf::from(&first_seg).parent().unwrap())?;
|
||||||
|
|
||||||
|
let mut mux = unsafe {
|
||||||
|
Muxer::builder()
|
||||||
|
.with_output_path(first_seg.as_str(), Some("mpegts"))?
|
||||||
|
.build()?
|
||||||
|
};
|
||||||
|
let mut streams = Vec::new();
|
||||||
|
for (var, enc) in encoded_vars {
|
||||||
|
match var {
|
||||||
|
VariantStream::Video(v) => unsafe {
|
||||||
|
let stream = mux.add_stream_encoder(enc)?;
|
||||||
|
streams.push(HlsVariantStream::Video {
|
||||||
|
group,
|
||||||
|
index: (*stream).index as usize,
|
||||||
|
id: v.id(),
|
||||||
|
})
|
||||||
|
},
|
||||||
|
VariantStream::Audio(a) => unsafe {
|
||||||
|
let stream = mux.add_stream_encoder(enc)?;
|
||||||
|
streams.push(HlsVariantStream::Audio {
|
||||||
|
group,
|
||||||
|
index: (*stream).index as usize,
|
||||||
|
id: a.id(),
|
||||||
|
})
|
||||||
|
},
|
||||||
|
VariantStream::Subtitle(s) => unsafe {
|
||||||
|
let stream = mux.add_stream_encoder(enc)?;
|
||||||
|
streams.push(HlsVariantStream::Subtitle {
|
||||||
|
group,
|
||||||
|
index: (*stream).index as usize,
|
||||||
|
id: s.id(),
|
||||||
|
})
|
||||||
|
},
|
||||||
|
_ => panic!("unsupported variant stream"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
unsafe {
|
||||||
|
mux.open(None)?;
|
||||||
|
}
|
||||||
|
Ok(Self {
|
||||||
|
name: name.clone(),
|
||||||
|
segment_length,
|
||||||
|
mux,
|
||||||
|
streams,
|
||||||
|
idx: 1,
|
||||||
|
segments: Vec::from([SegmentInfo(1, segment_length)]),
|
||||||
|
out_dir: out_dir.to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn segment_name(idx: u64) -> String {
|
||||||
|
format!("{}.ts", idx)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn out_dir(&self) -> PathBuf {
|
||||||
|
PathBuf::from(&self.out_dir).join(&self.name)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn map_segment_path(out_dir: &str, name: &str, idx: u64) -> String {
|
||||||
|
PathBuf::from(out_dir)
|
||||||
|
.join(name)
|
||||||
|
.join(Self::segment_name(idx))
|
||||||
|
.to_string_lossy()
|
||||||
|
.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mux a packet created by the encoder for this variant
|
||||||
|
pub unsafe fn mux_packet(&mut self, pkt: *mut AVPacket) -> Result<Option<NewSegment>> {
|
||||||
|
// time of this packet in seconds
|
||||||
|
let pkt_time = (*pkt).pts as f32 * av_q2d((*pkt).time_base) as f32;
|
||||||
|
// what segment this pkt should be in (index)
|
||||||
|
let pkt_seg = 1 + (pkt_time / self.segment_length).floor() as u64;
|
||||||
|
|
||||||
|
let mut result = None;
|
||||||
|
let can_split = (*pkt).flags & AV_PKT_FLAG_KEY == AV_PKT_FLAG_KEY;
|
||||||
|
if pkt_seg != self.idx && can_split {
|
||||||
|
result = Some(self.split_next_seg()?);
|
||||||
|
}
|
||||||
|
self.mux.write_packet(pkt)?;
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn split_next_seg(&mut self) -> Result<NewSegment> {
|
||||||
|
self.idx += 1;
|
||||||
|
|
||||||
|
// Manually reset muxer avio
|
||||||
|
let ctx = self.mux.context();
|
||||||
|
av_write_frame(ctx, ptr::null_mut());
|
||||||
|
avio_flush((*ctx).pb);
|
||||||
|
av_free((*ctx).url as *mut _);
|
||||||
|
|
||||||
|
let next_seg_url = Self::map_segment_path(&*self.out_dir, &self.name, self.idx);
|
||||||
|
(*ctx).url = cstr!(next_seg_url.as_str());
|
||||||
|
|
||||||
|
let ret = avio_open(&mut (*ctx).pb, (*ctx).url, AVIO_FLAG_WRITE);
|
||||||
|
if ret < 0 {
|
||||||
|
bail!("Failed to re-init avio");
|
||||||
|
}
|
||||||
|
|
||||||
|
// tell muxer it needs to write headers again
|
||||||
|
av_opt_set(
|
||||||
|
(*ctx).priv_data,
|
||||||
|
cstr!("events_flags"),
|
||||||
|
cstr!("resend_headers"),
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
|
||||||
|
// TODO: calc actual duration
|
||||||
|
let duration = 2.0;
|
||||||
|
info!("Writing segment {}", &next_seg_url);
|
||||||
|
if let Err(e) = self.add_segment(self.idx, duration) {
|
||||||
|
warn!("Failed to update playlist: {}", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the video variant for this group
|
||||||
|
/// since this could actually be audio which would not be useful for
|
||||||
|
/// [Overseer] impl
|
||||||
|
let video_var = self
|
||||||
|
.streams
|
||||||
|
.iter()
|
||||||
|
.find(|a| matches!(*a, HlsVariantStream::Video { .. }))
|
||||||
|
.map_or(Default::default(), |v| v.id().clone());
|
||||||
|
|
||||||
|
Ok(NewSegment {
|
||||||
|
variant: video_var,
|
||||||
|
idx: self.idx - 1, // emit result of the previously completed segment,
|
||||||
|
duration,
|
||||||
|
path: PathBuf::from(next_seg_url),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_segment(&mut self, idx: u64, duration: f32) -> Result<()> {
|
||||||
|
self.segments.push(SegmentInfo(idx, duration));
|
||||||
|
|
||||||
|
const MAX_SEGMENTS: usize = 10;
|
||||||
|
|
||||||
|
if self.segments.len() > MAX_SEGMENTS {
|
||||||
|
let n_drain = self.segments.len() - MAX_SEGMENTS;
|
||||||
|
let seg_dir = PathBuf::from(self.out_dir());
|
||||||
|
for seg in self.segments.drain(..n_drain) {
|
||||||
|
// delete file
|
||||||
|
let seg_path = seg_dir.join(seg.filename());
|
||||||
|
std::fs::remove_file(seg_path)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.write_playlist()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_playlist(&mut self) -> Result<()> {
|
||||||
|
let mut pl = m3u8_rs::MediaPlaylist::default();
|
||||||
|
pl.target_duration = self.segment_length as u64;
|
||||||
|
pl.segments = self.segments.iter().map(|s| s.to_media_segment()).collect();
|
||||||
|
pl.version = Some(3);
|
||||||
|
pl.media_sequence = self.segments.first().map(|s| s.0).unwrap_or(0);
|
||||||
|
|
||||||
|
let mut f_out = File::create(self.out_dir().join("live.m3u8"))?;
|
||||||
|
pl.write_to(&mut f_out)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HlsMuxer {
|
||||||
|
variants: Vec<HlsVariant>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HlsMuxer {
|
||||||
|
pub fn new<'a>(
|
||||||
|
out_dir: &str,
|
||||||
|
segment_length: f32,
|
||||||
|
encoders: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let id = Uuid::new_v4();
|
||||||
|
let base = PathBuf::from(out_dir)
|
||||||
|
.join(id.to_string())
|
||||||
|
.to_string_lossy()
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let mut vars = Vec::new();
|
||||||
|
for (k, group) in &encoders
|
||||||
|
.sorted_by(|a, b| a.0.group_id().cmp(&b.0.group_id()))
|
||||||
|
.chunk_by(|a| a.0.group_id())
|
||||||
|
{
|
||||||
|
let var = HlsVariant::new(&base, segment_length, k, group)?;
|
||||||
|
vars.push(var);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self { variants: vars })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mux an encoded packet from [Encoder]
|
||||||
|
pub unsafe fn mux_packet(
|
||||||
|
&mut self,
|
||||||
|
pkt: *mut AVPacket,
|
||||||
|
variant: &Uuid,
|
||||||
|
) -> Result<Option<NewSegment>> {
|
||||||
|
for var in self.variants.iter_mut() {
|
||||||
|
if var.streams.iter().any(|s| s.id() == variant) {
|
||||||
|
return var.mux_packet(pkt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
bail!("Packet doesnt match any variants");
|
||||||
|
}
|
||||||
|
}
|
2
src/mux/mod.rs
Normal file
2
src/mux/mod.rs
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
mod hls;
|
||||||
|
pub use hls::*;
|
209
src/overseer/mod.rs
Normal file
209
src/overseer/mod.rs
Normal file
@ -0,0 +1,209 @@
|
|||||||
|
use crate::egress::EgressConfig;
|
||||||
|
use crate::ingress::ConnectionInfo;
|
||||||
|
use crate::overseer::webhook::WebhookOverseer;
|
||||||
|
#[cfg(feature = "zap-stream")]
|
||||||
|
use crate::overseer::zap_stream::ZapStreamOverseer;
|
||||||
|
use crate::pipeline::{EgressType, PipelineConfig};
|
||||||
|
use crate::settings::{OverseerConfig, Settings};
|
||||||
|
use crate::variant::audio::AudioVariant;
|
||||||
|
use crate::variant::mapping::VariantMapping;
|
||||||
|
use crate::variant::video::VideoVariant;
|
||||||
|
use crate::variant::{StreamMapping, VariantStream};
|
||||||
|
use anyhow::Result;
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use chrono::Utc;
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
|
||||||
|
use std::cmp::PartialEq;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
mod webhook;
|
||||||
|
#[cfg(feature = "zap-stream")]
|
||||||
|
mod zap_stream;
|
||||||
|
|
||||||
|
/// A copy of [ffmpeg_rs_raw::DemuxerInfo] without internal ptr
|
||||||
|
#[derive(PartialEq, Clone)]
|
||||||
|
pub struct IngressInfo {
|
||||||
|
pub bitrate: usize,
|
||||||
|
pub streams: Vec<IngressStream>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A copy of [ffmpeg_rs_raw::StreamInfo] without ptr
|
||||||
|
#[derive(PartialEq, Clone)]
|
||||||
|
pub struct IngressStream {
|
||||||
|
pub index: usize,
|
||||||
|
pub stream_type: IngressStreamType,
|
||||||
|
pub codec: isize,
|
||||||
|
pub format: isize,
|
||||||
|
pub width: usize,
|
||||||
|
pub height: usize,
|
||||||
|
pub fps: f32,
|
||||||
|
pub sample_rate: usize,
|
||||||
|
pub language: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq, Eq, Clone)]
|
||||||
|
pub enum IngressStreamType {
|
||||||
|
Video,
|
||||||
|
Audio,
|
||||||
|
Subtitle,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
/// The control process that oversees streaming operations
|
||||||
|
pub trait Overseer: Send + Sync {
|
||||||
|
/// Set up a new streaming pipeline
|
||||||
|
async fn configure_pipeline(
|
||||||
|
&self,
|
||||||
|
connection: &ConnectionInfo,
|
||||||
|
stream_info: &IngressInfo,
|
||||||
|
) -> Result<PipelineConfig>;
|
||||||
|
|
||||||
|
/// A new segment (HLS etc.) was generated for a stream variant
|
||||||
|
///
|
||||||
|
/// This handler is usually used for distribution / billing
|
||||||
|
async fn new_segment(
|
||||||
|
&self,
|
||||||
|
pipeline: &Uuid,
|
||||||
|
variant_id: &Uuid,
|
||||||
|
index: u64,
|
||||||
|
duration: f32,
|
||||||
|
path: &PathBuf,
|
||||||
|
) -> Result<()>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Settings {
|
||||||
|
pub async fn get_overseer(&self) -> Result<Arc<dyn Overseer>> {
|
||||||
|
match &self.overseer {
|
||||||
|
OverseerConfig::Static { egress_types } => Ok(Arc::new(StaticOverseer::new(
|
||||||
|
&self.output_dir,
|
||||||
|
egress_types,
|
||||||
|
))),
|
||||||
|
OverseerConfig::Webhook { url } => Ok(Arc::new(WebhookOverseer::new(&url))),
|
||||||
|
OverseerConfig::ZapStream {
|
||||||
|
nsec: private_key,
|
||||||
|
database,
|
||||||
|
lnd,
|
||||||
|
relays,
|
||||||
|
} => {
|
||||||
|
#[cfg(not(feature = "zap-stream"))]
|
||||||
|
panic!("zap.stream overseer is not enabled");
|
||||||
|
|
||||||
|
#[cfg(feature = "zap-stream")]
|
||||||
|
Ok(Arc::new(
|
||||||
|
ZapStreamOverseer::new(private_key, database, lnd, relays).await?,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_default_variants(info: &IngressInfo) -> Result<Vec<VariantStream>> {
|
||||||
|
let mut vars: Vec<VariantStream> = vec![];
|
||||||
|
if let Some(video_src) = info
|
||||||
|
.streams
|
||||||
|
.iter()
|
||||||
|
.find(|c| c.stream_type == IngressStreamType::Video)
|
||||||
|
{
|
||||||
|
vars.push(VariantStream::CopyVideo(VariantMapping {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
src_index: video_src.index,
|
||||||
|
dst_index: 0,
|
||||||
|
group_id: 0,
|
||||||
|
}));
|
||||||
|
vars.push(VariantStream::Video(VideoVariant {
|
||||||
|
mapping: VariantMapping {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
src_index: video_src.index,
|
||||||
|
dst_index: 1,
|
||||||
|
group_id: 1,
|
||||||
|
},
|
||||||
|
width: 1280,
|
||||||
|
height: 720,
|
||||||
|
fps: video_src.fps,
|
||||||
|
bitrate: 3_000_000,
|
||||||
|
codec: 27,
|
||||||
|
profile: 100,
|
||||||
|
level: 51,
|
||||||
|
keyframe_interval: video_src.fps as u16 * 2,
|
||||||
|
pixel_format: AV_PIX_FMT_YUV420P as u32,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(audio_src) = info
|
||||||
|
.streams
|
||||||
|
.iter()
|
||||||
|
.find(|c| c.stream_type == IngressStreamType::Audio)
|
||||||
|
{
|
||||||
|
vars.push(VariantStream::CopyAudio(VariantMapping {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
src_index: audio_src.index,
|
||||||
|
dst_index: 2,
|
||||||
|
group_id: 0,
|
||||||
|
}));
|
||||||
|
vars.push(VariantStream::Audio(AudioVariant {
|
||||||
|
mapping: VariantMapping {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
src_index: audio_src.index,
|
||||||
|
dst_index: 3,
|
||||||
|
group_id: 1,
|
||||||
|
},
|
||||||
|
bitrate: 192_000,
|
||||||
|
codec: 86018,
|
||||||
|
channels: 2,
|
||||||
|
sample_rate: 48_000,
|
||||||
|
sample_fmt: "fltp".to_owned(),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(vars)
|
||||||
|
}
|
||||||
|
/// Simple static file output without any access controls
|
||||||
|
struct StaticOverseer {}
|
||||||
|
|
||||||
|
impl StaticOverseer {
|
||||||
|
fn new(out_dir: &str, egress_types: &Vec<String>) -> Self {
|
||||||
|
Self {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Overseer for StaticOverseer {
|
||||||
|
async fn configure_pipeline(
|
||||||
|
&self,
|
||||||
|
connection: &ConnectionInfo,
|
||||||
|
stream_info: &IngressInfo,
|
||||||
|
) -> Result<PipelineConfig> {
|
||||||
|
let vars = get_default_variants(stream_info)?;
|
||||||
|
let var_ids = vars.iter().map(|v| v.id()).collect();
|
||||||
|
Ok(PipelineConfig {
|
||||||
|
id: Utc::now().timestamp() as u64,
|
||||||
|
variants: vars,
|
||||||
|
egress: vec![
|
||||||
|
/*EgressType::Recorder(EgressConfig {
|
||||||
|
name: "REC".to_owned(),
|
||||||
|
out_dir: self.config.output_dir.clone(),
|
||||||
|
variants: var_ids,
|
||||||
|
}),*/
|
||||||
|
EgressType::HLS(EgressConfig {
|
||||||
|
name: "HLS".to_owned(),
|
||||||
|
// TODO: this is temp, webhook should not need full config
|
||||||
|
out_dir: "out".to_string(),
|
||||||
|
variants: var_ids,
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn new_segment(
|
||||||
|
&self,
|
||||||
|
pipeline: &Uuid,
|
||||||
|
variant_id: &Uuid,
|
||||||
|
index: u64,
|
||||||
|
duration: f32,
|
||||||
|
path: &PathBuf,
|
||||||
|
) -> Result<()> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
42
src/overseer/webhook.rs
Normal file
42
src/overseer/webhook.rs
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
use crate::ingress::ConnectionInfo;
|
||||||
|
use crate::overseer::{IngressInfo, Overseer};
|
||||||
|
use crate::pipeline::PipelineConfig;
|
||||||
|
use anyhow::Result;
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct WebhookOverseer {
|
||||||
|
url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WebhookOverseer {
|
||||||
|
pub fn new(url: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
url: url.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Overseer for WebhookOverseer {
|
||||||
|
async fn configure_pipeline(
|
||||||
|
&self,
|
||||||
|
connection: &ConnectionInfo,
|
||||||
|
stream_info: &IngressInfo,
|
||||||
|
) -> Result<PipelineConfig> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn new_segment(
|
||||||
|
&self,
|
||||||
|
pipeline: &Uuid,
|
||||||
|
variant_id: &Uuid,
|
||||||
|
index: u64,
|
||||||
|
duration: f32,
|
||||||
|
path: &PathBuf,
|
||||||
|
) -> Result<()> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
114
src/overseer/zap_stream/db.rs
Normal file
114
src/overseer/zap_stream/db.rs
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use nostr_sdk::{Client, Event, EventBuilder, Kind, Tag};
|
||||||
|
use sqlx::{FromRow, Type};
|
||||||
|
use std::fmt::{Display, Formatter};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, FromRow)]
|
||||||
|
pub struct User {
|
||||||
|
pub id: u64,
|
||||||
|
pub pubkey: [u8; 32],
|
||||||
|
pub created: DateTime<Utc>,
|
||||||
|
pub balance: i64,
|
||||||
|
pub tos_accepted: DateTime<Utc>,
|
||||||
|
pub stream_key: String,
|
||||||
|
pub is_admin: bool,
|
||||||
|
pub is_blocked: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Debug, Clone, Type)]
|
||||||
|
#[repr(u8)]
|
||||||
|
pub enum UserStreamState {
|
||||||
|
#[default]
|
||||||
|
Unknown = 0,
|
||||||
|
Planned = 1,
|
||||||
|
Live = 2,
|
||||||
|
Ended = 3,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for UserStreamState {
|
||||||
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
UserStreamState::Unknown => write!(f, "unknown"),
|
||||||
|
UserStreamState::Planned => write!(f, "planned"),
|
||||||
|
UserStreamState::Live => write!(f, "live"),
|
||||||
|
UserStreamState::Ended => write!(f, "ended"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default, FromRow)]
|
||||||
|
pub struct UserStream {
|
||||||
|
pub id: u64,
|
||||||
|
pub user_id: u64,
|
||||||
|
pub starts: DateTime<Utc>,
|
||||||
|
pub ends: Option<DateTime<Utc>>,
|
||||||
|
pub state: UserStreamState,
|
||||||
|
pub title: Option<String>,
|
||||||
|
pub summary: Option<String>,
|
||||||
|
pub image: Option<String>,
|
||||||
|
pub thumb: Option<String>,
|
||||||
|
pub tags: Option<String>,
|
||||||
|
pub content_warning: Option<String>,
|
||||||
|
pub goal: Option<String>,
|
||||||
|
pub pinned: Option<String>,
|
||||||
|
pub cost: u64,
|
||||||
|
pub duration: f32,
|
||||||
|
pub fee: Option<u32>,
|
||||||
|
pub event: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UserStream {
|
||||||
|
pub(crate) fn to_event_builder(&self) -> Result<EventBuilder> {
|
||||||
|
let mut tags = vec![
|
||||||
|
Tag::parse(&["d".to_string(), self.id.to_string()])?,
|
||||||
|
Tag::parse(&["status".to_string(), self.state.to_string()])?,
|
||||||
|
Tag::parse(&["starts".to_string(), self.starts.timestamp().to_string()])?,
|
||||||
|
];
|
||||||
|
if let Some(ref ends) = self.ends {
|
||||||
|
tags.push(Tag::parse(&[
|
||||||
|
"ends".to_string(),
|
||||||
|
ends.timestamp().to_string(),
|
||||||
|
])?);
|
||||||
|
}
|
||||||
|
if let Some(ref title) = self.title {
|
||||||
|
tags.push(Tag::parse(&["title".to_string(), title.to_string()])?);
|
||||||
|
}
|
||||||
|
if let Some(ref summary) = self.summary {
|
||||||
|
tags.push(Tag::parse(&["summary".to_string(), summary.to_string()])?);
|
||||||
|
}
|
||||||
|
if let Some(ref image) = self.image {
|
||||||
|
tags.push(Tag::parse(&["image".to_string(), image.to_string()])?);
|
||||||
|
}
|
||||||
|
if let Some(ref thumb) = self.thumb {
|
||||||
|
tags.push(Tag::parse(&["thumb".to_string(), thumb.to_string()])?);
|
||||||
|
}
|
||||||
|
if let Some(ref content_warning) = self.content_warning {
|
||||||
|
tags.push(Tag::parse(&[
|
||||||
|
"content_warning".to_string(),
|
||||||
|
content_warning.to_string(),
|
||||||
|
])?);
|
||||||
|
}
|
||||||
|
if let Some(ref goal) = self.goal {
|
||||||
|
tags.push(Tag::parse(&["goal".to_string(), goal.to_string()])?);
|
||||||
|
}
|
||||||
|
if let Some(ref pinned) = self.pinned {
|
||||||
|
tags.push(Tag::parse(&["pinned".to_string(), pinned.to_string()])?);
|
||||||
|
}
|
||||||
|
if let Some(ref tags_csv) = self.tags {
|
||||||
|
for tag in tags_csv.split(',') {
|
||||||
|
tags.push(Tag::parse(&["t".to_string(), tag.to_string()])?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(EventBuilder::new(Kind::from(30_313), "", tags))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) async fn publish_stream_event(&self, client: &Client) -> Result<Event> {
|
||||||
|
let ev = self
|
||||||
|
.to_event_builder()?
|
||||||
|
.sign(&client.signer().await?)
|
||||||
|
.await?;
|
||||||
|
client.send_event(ev.clone()).await?;
|
||||||
|
Ok(ev)
|
||||||
|
}
|
||||||
|
}
|
201
src/overseer/zap_stream/mod.rs
Normal file
201
src/overseer/zap_stream/mod.rs
Normal file
@ -0,0 +1,201 @@
|
|||||||
|
use crate::egress::hls::HlsEgress;
|
||||||
|
use crate::egress::EgressConfig;
|
||||||
|
use crate::ingress::ConnectionInfo;
|
||||||
|
use crate::overseer::zap_stream::db::{UserStream, UserStreamState};
|
||||||
|
use crate::overseer::{get_default_variants, IngressInfo, Overseer};
|
||||||
|
use crate::pipeline::{EgressType, PipelineConfig};
|
||||||
|
use crate::settings::LndSettings;
|
||||||
|
use crate::variant::StreamMapping;
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use fedimint_tonic_lnd::verrpc::VersionRequest;
|
||||||
|
use log::info;
|
||||||
|
use nostr_sdk::bitcoin::PrivateKey;
|
||||||
|
use nostr_sdk::{JsonUtil, Keys};
|
||||||
|
use sqlx::{MySqlPool, Row};
|
||||||
|
use std::env::temp_dir;
|
||||||
|
use std::fs::create_dir_all;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::str::FromStr;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
mod db;
|
||||||
|
|
||||||
|
/// zap.stream NIP-53 overseer
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ZapStreamOverseer {
|
||||||
|
db: MySqlPool,
|
||||||
|
lnd: fedimint_tonic_lnd::Client,
|
||||||
|
client: nostr_sdk::Client,
|
||||||
|
keys: Keys,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ZapStreamOverseer {
|
||||||
|
pub async fn new(
|
||||||
|
private_key: &str,
|
||||||
|
db: &str,
|
||||||
|
lnd: &LndSettings,
|
||||||
|
relays: &Vec<String>,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let db = MySqlPool::connect(db).await?;
|
||||||
|
|
||||||
|
info!("Connected to database, running migrations");
|
||||||
|
// automatically run migrations
|
||||||
|
sqlx::migrate!().run(&db).await?;
|
||||||
|
|
||||||
|
let mut lnd = fedimint_tonic_lnd::connect(
|
||||||
|
lnd.address.clone(),
|
||||||
|
PathBuf::from(&lnd.cert),
|
||||||
|
PathBuf::from(&lnd.macaroon),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let version = lnd
|
||||||
|
.versioner()
|
||||||
|
.get_version(VersionRequest::default())
|
||||||
|
.await?;
|
||||||
|
info!("LND connected: v{}", version.into_inner().version);
|
||||||
|
|
||||||
|
let keys = Keys::from_str(private_key)?;
|
||||||
|
let client = nostr_sdk::ClientBuilder::new().signer(keys.clone()).build();
|
||||||
|
for r in relays {
|
||||||
|
client.add_relay(r).await?;
|
||||||
|
}
|
||||||
|
client.connect().await;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
db,
|
||||||
|
lnd,
|
||||||
|
client,
|
||||||
|
keys,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find user by stream key, typical first lookup from ingress
|
||||||
|
async fn find_user_stream_key(&self, key: &str) -> Result<Option<u64>> {
|
||||||
|
#[cfg(feature = "test-pattern")]
|
||||||
|
if key == "test-pattern" {
|
||||||
|
// use the 00 pubkey for test sources
|
||||||
|
return Ok(Some(self.upsert_user(&[0; 32]).await?));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(sqlx::query("select id from user where stream_key = ?")
|
||||||
|
.bind(key)
|
||||||
|
.fetch_optional(&self.db)
|
||||||
|
.await?
|
||||||
|
.map(|r| r.try_get(0).unwrap()))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn upsert_user(&self, pubkey: &[u8; 32]) -> Result<u64> {
|
||||||
|
let res = sqlx::query("insert ignore into user(pubkey) values(?) returning id")
|
||||||
|
.bind(pubkey.as_slice())
|
||||||
|
.fetch_optional(&self.db)
|
||||||
|
.await?;
|
||||||
|
match res {
|
||||||
|
None => sqlx::query("select id from user where pubkey = ?")
|
||||||
|
.bind(pubkey.as_slice())
|
||||||
|
.fetch_one(&self.db)
|
||||||
|
.await?
|
||||||
|
.try_get(0)
|
||||||
|
.map_err(anyhow::Error::new),
|
||||||
|
Some(res) => res.try_get(0).map_err(anyhow::Error::new),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn create_stream(&self, user_stream: &UserStream) -> Result<u64> {
|
||||||
|
sqlx::query(
|
||||||
|
"insert into user_stream (user_id, state, starts) values (?, ?, ?) returning id",
|
||||||
|
)
|
||||||
|
.bind(&user_stream.user_id)
|
||||||
|
.bind(&user_stream.state)
|
||||||
|
.bind(&user_stream.starts)
|
||||||
|
.fetch_one(&self.db)
|
||||||
|
.await?
|
||||||
|
.try_get(0)
|
||||||
|
.map_err(anyhow::Error::new)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update_stream(&self, user_stream: &UserStream) -> Result<()> {
|
||||||
|
sqlx::query(
|
||||||
|
"update user_stream set state = ?, starts = ?, ends = ?, title = ?, summary = ?, image = ?, thumb = ?, tags = ?, content_warning = ?, goal = ?, pinned = ?, fee = ?, event = ? where id = ?",
|
||||||
|
)
|
||||||
|
.bind(&user_stream.state)
|
||||||
|
.bind(&user_stream.starts)
|
||||||
|
.bind(&user_stream.ends)
|
||||||
|
.bind(&user_stream.title)
|
||||||
|
.bind(&user_stream.summary)
|
||||||
|
.bind(&user_stream.image)
|
||||||
|
.bind(&user_stream.thumb)
|
||||||
|
.bind(&user_stream.tags)
|
||||||
|
.bind(&user_stream.content_warning)
|
||||||
|
.bind(&user_stream.goal)
|
||||||
|
.bind(&user_stream.pinned)
|
||||||
|
.bind(&user_stream.fee)
|
||||||
|
.bind(&user_stream.event)
|
||||||
|
.bind(&user_stream.id)
|
||||||
|
.execute(&self.db)
|
||||||
|
.await
|
||||||
|
.map_err(anyhow::Error::new)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl Overseer for ZapStreamOverseer {
|
||||||
|
async fn configure_pipeline(
|
||||||
|
&self,
|
||||||
|
connection: &ConnectionInfo,
|
||||||
|
stream_info: &IngressInfo,
|
||||||
|
) -> Result<PipelineConfig> {
|
||||||
|
let uid = self
|
||||||
|
.find_user_stream_key(&connection.key)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("User not found"))?;
|
||||||
|
|
||||||
|
let out_dir = temp_dir().join("zap-stream");
|
||||||
|
create_dir_all(&out_dir)?;
|
||||||
|
|
||||||
|
let variants = get_default_variants(&stream_info)?;
|
||||||
|
|
||||||
|
let mut egress = vec![];
|
||||||
|
egress.push(EgressType::HLS(EgressConfig {
|
||||||
|
name: "nip94-hls".to_string(),
|
||||||
|
out_dir: out_dir.to_string_lossy().to_string(),
|
||||||
|
variants: variants.iter().map(|v| v.id()).collect(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// insert new stream record
|
||||||
|
let mut new_stream = UserStream {
|
||||||
|
id: 0,
|
||||||
|
user_id: uid,
|
||||||
|
starts: Utc::now(),
|
||||||
|
state: UserStreamState::Live,
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let stream_id = self.create_stream(&new_stream).await?;
|
||||||
|
new_stream.id = stream_id;
|
||||||
|
|
||||||
|
let stream_event = new_stream.publish_stream_event(&self.client).await?;
|
||||||
|
new_stream.event = Some(stream_event.as_json());
|
||||||
|
self.update_stream(&new_stream).await?;
|
||||||
|
|
||||||
|
Ok(PipelineConfig {
|
||||||
|
id: stream_id,
|
||||||
|
variants,
|
||||||
|
egress,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn new_segment(
|
||||||
|
&self,
|
||||||
|
pipeline: &Uuid,
|
||||||
|
variant_id: &Uuid,
|
||||||
|
index: u64,
|
||||||
|
duration: f32,
|
||||||
|
path: &PathBuf,
|
||||||
|
) -> Result<()> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
@ -45,7 +45,7 @@ impl Display for EgressType {
|
|||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
|
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
|
||||||
pub struct PipelineConfig {
|
pub struct PipelineConfig {
|
||||||
pub id: Uuid,
|
pub id: u64,
|
||||||
/// Transcoded/Copied stream config
|
/// Transcoded/Copied stream config
|
||||||
pub variants: Vec<VariantStream>,
|
pub variants: Vec<VariantStream>,
|
||||||
/// Output muxers
|
/// Output muxers
|
||||||
|
@ -2,34 +2,41 @@ use std::collections::{HashMap, HashSet};
|
|||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::mem::transmute;
|
use std::mem::transmute;
|
||||||
use std::ops::Sub;
|
use std::ops::Sub;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
use crate::egress::hls::HlsEgress;
|
use crate::egress::hls::HlsEgress;
|
||||||
use crate::egress::recorder::RecorderEgress;
|
use crate::egress::recorder::RecorderEgress;
|
||||||
use crate::egress::Egress;
|
use crate::egress::Egress;
|
||||||
use crate::ingress::ConnectionInfo;
|
use crate::ingress::ConnectionInfo;
|
||||||
|
use crate::overseer::{IngressInfo, IngressStream, IngressStreamType, Overseer};
|
||||||
use crate::pipeline::{EgressType, PipelineConfig};
|
use crate::pipeline::{EgressType, PipelineConfig};
|
||||||
use crate::variant::{StreamMapping, VariantStream};
|
use crate::variant::{StreamMapping, VariantStream};
|
||||||
use crate::webhook::Webhook;
|
use anyhow::{bail, Result};
|
||||||
use anyhow::Result;
|
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
||||||
av_frame_free, av_get_sample_fmt, av_packet_free, av_rescale_q,
|
av_frame_free, av_get_sample_fmt, av_packet_free, av_rescale_q,
|
||||||
};
|
};
|
||||||
use ffmpeg_rs_raw::{
|
use ffmpeg_rs_raw::{
|
||||||
cstr, get_frame_from_hw, Decoder, Demuxer, DemuxerInfo, Encoder, Resample, Scaler,
|
cstr, get_frame_from_hw, Decoder, Demuxer, DemuxerInfo, Encoder, Resample, Scaler, StreamType,
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use log::{info, warn};
|
use log::{info, warn};
|
||||||
|
use tokio::runtime::Handle;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
/// Pipeline runner is the main entry process for stream transcoding
|
/// Pipeline runner is the main entry process for stream transcoding
|
||||||
|
///
|
||||||
/// Each client connection spawns a new [PipelineRunner] and it should be run in its own thread
|
/// Each client connection spawns a new [PipelineRunner] and it should be run in its own thread
|
||||||
/// using [ingress::spawn_pipeline]
|
/// using [crate::ingress::spawn_pipeline]
|
||||||
pub struct PipelineRunner {
|
pub struct PipelineRunner {
|
||||||
|
/// Async runtime handle
|
||||||
|
handle: Handle,
|
||||||
|
|
||||||
|
/// Input stream connection info
|
||||||
connection: ConnectionInfo,
|
connection: ConnectionInfo,
|
||||||
|
|
||||||
/// Configuration for this pipeline (variants, egress config etc.)
|
/// Configuration for this pipeline (variants, egress config etc.)
|
||||||
config: PipelineConfig,
|
config: Option<PipelineConfig>,
|
||||||
|
|
||||||
/// Singleton demuxer for this input
|
/// Singleton demuxer for this input
|
||||||
demuxer: Demuxer,
|
demuxer: Demuxer,
|
||||||
@ -54,18 +61,24 @@ pub struct PipelineRunner {
|
|||||||
|
|
||||||
fps_counter_start: Instant,
|
fps_counter_start: Instant,
|
||||||
frame_ctr: u64,
|
frame_ctr: u64,
|
||||||
webhook: Webhook,
|
|
||||||
|
|
||||||
info: Option<DemuxerInfo>,
|
/// Info about the input stream
|
||||||
|
info: Option<IngressInfo>,
|
||||||
|
|
||||||
|
/// Overseer managing this pipeline
|
||||||
|
overseer: Arc<dyn Overseer>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PipelineRunner {
|
impl PipelineRunner {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
|
handle: Handle,
|
||||||
|
overseer: Arc<dyn Overseer>,
|
||||||
connection: ConnectionInfo,
|
connection: ConnectionInfo,
|
||||||
webhook: Webhook,
|
|
||||||
recv: Box<dyn Read + Send>,
|
recv: Box<dyn Read + Send>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
|
handle,
|
||||||
|
overseer,
|
||||||
connection,
|
connection,
|
||||||
config: Default::default(),
|
config: Default::default(),
|
||||||
demuxer: Demuxer::new_custom_io(recv, None)?,
|
demuxer: Demuxer::new_custom_io(recv, None)?,
|
||||||
@ -77,7 +90,6 @@ impl PipelineRunner {
|
|||||||
fps_counter_start: Instant::now(),
|
fps_counter_start: Instant::now(),
|
||||||
egress: Vec::new(),
|
egress: Vec::new(),
|
||||||
frame_ctr: 0,
|
frame_ctr: 0,
|
||||||
webhook,
|
|
||||||
info: None,
|
info: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -86,6 +98,12 @@ impl PipelineRunner {
|
|||||||
pub unsafe fn run(&mut self) -> Result<()> {
|
pub unsafe fn run(&mut self) -> Result<()> {
|
||||||
self.setup()?;
|
self.setup()?;
|
||||||
|
|
||||||
|
let config = if let Some(ref config) = self.config {
|
||||||
|
config
|
||||||
|
} else {
|
||||||
|
bail!("Pipeline not configured, cannot run")
|
||||||
|
};
|
||||||
|
|
||||||
// run transcoder pipeline
|
// run transcoder pipeline
|
||||||
let (mut pkt, stream) = self.demuxer.get_packet()?;
|
let (mut pkt, stream) = self.demuxer.get_packet()?;
|
||||||
let src_index = (*stream).index;
|
let src_index = (*stream).index;
|
||||||
@ -106,8 +124,7 @@ impl PipelineRunner {
|
|||||||
(*frame).time_base = (*stream).time_base;
|
(*frame).time_base = (*stream).time_base;
|
||||||
|
|
||||||
// Get the variants which want this pkt
|
// Get the variants which want this pkt
|
||||||
let pkt_vars = self
|
let pkt_vars = config
|
||||||
.config
|
|
||||||
.variants
|
.variants
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|v| v.src_index() == src_index as usize);
|
.filter(|v| v.src_index() == src_index as usize);
|
||||||
@ -188,14 +205,49 @@ impl PipelineRunner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let info = self.demuxer.probe_input()?;
|
let info = self.demuxer.probe_input()?;
|
||||||
|
|
||||||
|
// convert to internal type
|
||||||
|
let i_info = IngressInfo {
|
||||||
|
bitrate: info.bitrate,
|
||||||
|
streams: info
|
||||||
|
.streams
|
||||||
|
.iter()
|
||||||
|
.map(|s| IngressStream {
|
||||||
|
index: s.index,
|
||||||
|
stream_type: match s.stream_type {
|
||||||
|
StreamType::Video => IngressStreamType::Video,
|
||||||
|
StreamType::Audio => IngressStreamType::Audio,
|
||||||
|
StreamType::Subtitle => IngressStreamType::Subtitle,
|
||||||
|
},
|
||||||
|
codec: s.codec,
|
||||||
|
format: s.format,
|
||||||
|
width: s.width,
|
||||||
|
height: s.height,
|
||||||
|
fps: s.fps,
|
||||||
|
sample_rate: s.sample_rate,
|
||||||
|
language: s.language.clone(),
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let cfg = self.handle.block_on(async {
|
||||||
|
self.overseer
|
||||||
|
.configure_pipeline(&self.connection, &i_info)
|
||||||
|
.await
|
||||||
|
})?;
|
||||||
|
self.config = Some(cfg);
|
||||||
|
self.info = Some(i_info);
|
||||||
|
|
||||||
self.setup_pipeline(&info)?;
|
self.setup_pipeline(&info)?;
|
||||||
self.info = Some(info);
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe fn setup_pipeline(&mut self, info: &DemuxerInfo) -> Result<()> {
|
unsafe fn setup_pipeline(&mut self, demux_info: &DemuxerInfo) -> Result<()> {
|
||||||
let cfg = self.webhook.start(info);
|
let cfg = if let Some(ref cfg) = self.config {
|
||||||
self.config = cfg.clone();
|
cfg
|
||||||
|
} else {
|
||||||
|
bail!("Cannot setup pipeline without config");
|
||||||
|
};
|
||||||
|
|
||||||
// src stream indexes
|
// src stream indexes
|
||||||
let inputs: HashSet<usize> = cfg.variants.iter().map(|e| e.src_index()).collect();
|
let inputs: HashSet<usize> = cfg.variants.iter().map(|e| e.src_index()).collect();
|
||||||
@ -205,7 +257,11 @@ impl PipelineRunner {
|
|||||||
|
|
||||||
// setup decoders
|
// setup decoders
|
||||||
for input_idx in inputs {
|
for input_idx in inputs {
|
||||||
let stream = info.streams.iter().find(|f| f.index == input_idx).unwrap();
|
let stream = demux_info
|
||||||
|
.streams
|
||||||
|
.iter()
|
||||||
|
.find(|f| f.index == input_idx)
|
||||||
|
.unwrap();
|
||||||
self.decoder.setup_decoder(stream, None)?;
|
self.decoder.setup_decoder(stream, None)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,7 +275,7 @@ impl PipelineRunner {
|
|||||||
VariantStream::Audio(a) => {
|
VariantStream::Audio(a) => {
|
||||||
let enc = a.try_into()?;
|
let enc = a.try_into()?;
|
||||||
let rs = Resample::new(
|
let rs = Resample::new(
|
||||||
av_get_sample_fmt(cstr!(a.sample_fmt.as_bytes())),
|
av_get_sample_fmt(cstr!(a.sample_fmt.as_str())),
|
||||||
a.sample_rate as _,
|
a.sample_rate as _,
|
||||||
a.channels as _,
|
a.channels as _,
|
||||||
);
|
);
|
||||||
@ -230,10 +286,10 @@ impl PipelineRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Setup copy streams
|
// TODO: Setup copy streams
|
||||||
|
|
||||||
// Setup egress
|
// Setup egress
|
||||||
for e in cfg.egress {
|
for e in &cfg.egress {
|
||||||
match e {
|
match e {
|
||||||
EgressType::HLS(ref c) => {
|
EgressType::HLS(ref c) => {
|
||||||
let encoders = self.encoders.iter().filter_map(|(k, v)| {
|
let encoders = self.encoders.iter().filter_map(|(k, v)| {
|
||||||
@ -252,7 +308,7 @@ impl PipelineRunner {
|
|||||||
let encoders = self
|
let encoders = self
|
||||||
.encoders
|
.encoders
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(k, v)| c.variants.contains(k))
|
.filter(|(k, _v)| c.variants.contains(k))
|
||||||
.map(|(_, v)| v);
|
.map(|(_, v)| v);
|
||||||
let rec = RecorderEgress::new(c.clone(), encoders)?;
|
let rec = RecorderEgress::new(c.clone(), encoders)?;
|
||||||
self.egress.push(Box::new(rec));
|
self.egress.push(Box::new(rec));
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use crate::pipeline::EgressType;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
@ -9,9 +10,38 @@ pub struct Settings {
|
|||||||
/// - rtmp://localhost:1935
|
/// - rtmp://localhost:1935
|
||||||
pub endpoints: Vec<String>,
|
pub endpoints: Vec<String>,
|
||||||
|
|
||||||
/// Output directory for egress
|
/// Where to store output (static files)
|
||||||
pub output_dir: String,
|
pub output_dir: String,
|
||||||
|
|
||||||
/// Webhook configuration URL
|
/// Overseer service see [crate::overseer::Overseer] for more info
|
||||||
pub webhook_url: String,
|
pub overseer: OverseerConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
pub enum OverseerConfig {
|
||||||
|
/// Static output
|
||||||
|
Static {
|
||||||
|
/// Types of output
|
||||||
|
egress_types: Vec<String>,
|
||||||
|
},
|
||||||
|
/// Control system via external API
|
||||||
|
Webhook {
|
||||||
|
/// Webhook service URL
|
||||||
|
url: String,
|
||||||
|
},
|
||||||
|
/// NIP-53 service (i.e. zap.stream backend)
|
||||||
|
ZapStream {
|
||||||
|
database: String,
|
||||||
|
lnd: LndSettings,
|
||||||
|
relays: Vec<String>,
|
||||||
|
nsec: String,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct LndSettings {
|
||||||
|
pub address: String,
|
||||||
|
pub cert: String,
|
||||||
|
pub macaroon: String,
|
||||||
}
|
}
|
||||||
|
@ -1,99 +0,0 @@
|
|||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
|
|
||||||
use ffmpeg_rs_raw::{DemuxerInfo, StreamType};
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
use crate::egress::EgressConfig;
|
|
||||||
use crate::pipeline::{EgressType, PipelineConfig};
|
|
||||||
use crate::settings::Settings;
|
|
||||||
use crate::variant::audio::AudioVariant;
|
|
||||||
use crate::variant::mapping::VariantMapping;
|
|
||||||
use crate::variant::video::VideoVariant;
|
|
||||||
use crate::variant::{StreamMapping, VariantStream};
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Webhook {
|
|
||||||
config: Settings,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Webhook {
|
|
||||||
pub fn new(config: Settings) -> Self {
|
|
||||||
Self { config }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn start(&self, stream_info: &DemuxerInfo) -> PipelineConfig {
|
|
||||||
let mut vars: Vec<VariantStream> = vec![];
|
|
||||||
if let Some(video_src) = stream_info
|
|
||||||
.streams
|
|
||||||
.iter()
|
|
||||||
.find(|c| c.stream_type == StreamType::Video)
|
|
||||||
{
|
|
||||||
vars.push(VariantStream::CopyVideo(VariantMapping {
|
|
||||||
id: Uuid::new_v4(),
|
|
||||||
src_index: video_src.index,
|
|
||||||
dst_index: 0,
|
|
||||||
group_id: 0,
|
|
||||||
}));
|
|
||||||
vars.push(VariantStream::Video(VideoVariant {
|
|
||||||
mapping: VariantMapping {
|
|
||||||
id: Uuid::new_v4(),
|
|
||||||
src_index: video_src.index,
|
|
||||||
dst_index: 1,
|
|
||||||
group_id: 1,
|
|
||||||
},
|
|
||||||
width: 1280,
|
|
||||||
height: 720,
|
|
||||||
fps: video_src.fps,
|
|
||||||
bitrate: 3_000_000,
|
|
||||||
codec: 27,
|
|
||||||
profile: 100,
|
|
||||||
level: 51,
|
|
||||||
keyframe_interval: video_src.fps as u16 * 2,
|
|
||||||
pixel_format: AV_PIX_FMT_YUV420P as u32,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(audio_src) = stream_info
|
|
||||||
.streams
|
|
||||||
.iter()
|
|
||||||
.find(|c| c.stream_type == StreamType::Audio)
|
|
||||||
{
|
|
||||||
vars.push(VariantStream::CopyAudio(VariantMapping {
|
|
||||||
id: Uuid::new_v4(),
|
|
||||||
src_index: audio_src.index,
|
|
||||||
dst_index: 2,
|
|
||||||
group_id: 0,
|
|
||||||
}));
|
|
||||||
vars.push(VariantStream::Audio(AudioVariant {
|
|
||||||
mapping: VariantMapping {
|
|
||||||
id: Uuid::new_v4(),
|
|
||||||
src_index: audio_src.index,
|
|
||||||
dst_index: 3,
|
|
||||||
group_id: 1,
|
|
||||||
},
|
|
||||||
bitrate: 192_000,
|
|
||||||
codec: 86018,
|
|
||||||
channels: 2,
|
|
||||||
sample_rate: 48_000,
|
|
||||||
sample_fmt: "fltp".to_owned(),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
let var_ids = vars.iter().map(|v| v.id()).collect();
|
|
||||||
PipelineConfig {
|
|
||||||
id: Uuid::new_v4(),
|
|
||||||
variants: vars,
|
|
||||||
egress: vec![
|
|
||||||
/*EgressType::Recorder(EgressConfig {
|
|
||||||
name: "REC".to_owned(),
|
|
||||||
out_dir: self.config.output_dir.clone(),
|
|
||||||
variants: var_ids,
|
|
||||||
}),*/
|
|
||||||
EgressType::HLS(EgressConfig {
|
|
||||||
name: "HLS".to_owned(),
|
|
||||||
out_dir: self.config.output_dir.clone(),
|
|
||||||
variants: var_ids,
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
x
Reference in New Issue
Block a user