Compare commits

...

20 Commits

Author SHA1 Message Date
aea0feef05 fix: dont upscale for variants
All checks were successful
continuous-integration/drone/push Build is passing
2025-06-19 22:20:19 +01:00
64100947b5 fix: hls mapping
All checks were successful
continuous-integration/drone/push Build is passing
2025-06-19 22:08:41 +01:00
166a8bdde9 chore: cleanup HLS output when dropped 2025-06-19 16:28:02 +01:00
02e4725043 fix: tests
All checks were successful
continuous-integration/drone/push Build is passing
2025-06-19 14:22:08 +01:00
686cd7f794 refactor: cleanup directory handling
Some checks failed
continuous-integration/drone/push Build is failing
2025-06-19 13:57:27 +01:00
6eb7ff9807 chore: use rust-ffmpeg docker image 2025-06-19 13:32:41 +01:00
68fad98000 feat: clean shutdown RTMP stream
All checks were successful
continuous-integration/drone Build is passing
2025-06-19 13:08:15 +01:00
5c2a58ed46 refactor: split hls module 2025-06-19 12:18:44 +01:00
2c3ef01d45 fix: docker build 2025-06-17 13:45:38 +01:00
ea33f72069 chore: run tests for docker build
Some checks failed
continuous-integration/drone Build is failing
2025-06-17 13:08:25 +01:00
e91c40806f fix: low latency generator always running
All checks were successful
continuous-integration/drone Build is passing
chore: add HLS generation tests
2025-06-17 12:57:02 +01:00
77eff603d0 fix: disable HLS-LL
All checks were successful
continuous-integration/drone Build is passing
2025-06-17 12:05:12 +01:00
e056e0427f fix: HLS-LL
All checks were successful
continuous-integration/drone Build is passing
refactor: fMP4 (WIP)
2025-06-17 11:48:49 +01:00
a046dc5801 fix: make HLS segment length match encoding params 2025-06-16 13:30:41 +01:00
4787ecd2b4 chore: add more keyframes 2025-06-16 09:58:52 +01:00
e7e1f0299d fix: segment duration calc
Some checks reported errors
continuous-integration/drone Build was killed
feat: add debugging tool for hls segments
2025-06-13 17:42:39 +01:00
338d351727 fix: disable HLS-LL
All checks were successful
continuous-integration/drone Build is passing
fix: thumb.webp path
2025-06-13 13:05:23 +01:00
047b3fec59 fix: hls partial sequencing
All checks were successful
continuous-integration/drone Build is passing
2025-06-13 12:36:20 +01:00
fee5e77407 fix: missing endpoint id 2025-06-13 12:21:40 +01:00
d88f829645 fix: match endpoint 2025-06-13 12:18:39 +01:00
28 changed files with 2573 additions and 633 deletions

125
Cargo.lock generated
View File

@ -182,7 +182,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -193,7 +193,7 @@ checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -365,7 +365,7 @@ dependencies = [
"regex",
"rustc-hash",
"shlex",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -674,7 +674,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -906,7 +906,7 @@ dependencies = [
"proc-macro2",
"quote",
"rustc_version",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -938,7 +938,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -974,6 +974,16 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "env_filter"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0"
dependencies = [
"log 0.4.25",
"regex",
]
[[package]]
name = "env_logger"
version = "0.10.2"
@ -987,6 +997,19 @@ dependencies = [
"termcolor",
]
[[package]]
name = "env_logger"
version = "0.11.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f"
dependencies = [
"anstream",
"anstyle",
"env_filter",
"jiff",
"log 0.4.25",
]
[[package]]
name = "equivalent"
version = "1.0.1"
@ -1282,7 +1305,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -1897,7 +1920,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -2026,6 +2049,30 @@ version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
[[package]]
name = "jiff"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be1f93b8b1eb69c77f24bbb0afdf66f54b632ee39af40ca21c4365a1d7347e49"
dependencies = [
"jiff-static",
"log 0.4.25",
"portable-atomic",
"portable-atomic-util",
"serde",
]
[[package]]
name = "jiff-static"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.103",
]
[[package]]
name = "js-sys"
version = "0.3.77"
@ -2153,7 +2200,7 @@ checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
[[package]]
name = "m3u8-rs"
version = "6.0.0"
source = "git+https://github.com/v0l/m3u8-rs.git?rev=d76ff96326814237a6d5e92288cdfe7060a43168#d76ff96326814237a6d5e92288cdfe7060a43168"
source = "git+https://git.v0l.io/Kieran/m3u8-rs.git?rev=6803eefca2838a8bfae9e19fd516ef36d7d89997#6803eefca2838a8bfae9e19fd516ef36d7d89997"
dependencies = [
"chrono",
"nom",
@ -2443,7 +2490,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -2576,7 +2623,7 @@ dependencies = [
"pest_meta",
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -2623,7 +2670,7 @@ checksum = "d56a66c0c55993aa927429d0f8a0abfd74f084e4d9c192cffed01e418d83eefb"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -2695,6 +2742,15 @@ version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6"
[[package]]
name = "portable-atomic-util"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507"
dependencies = [
"portable-atomic",
]
[[package]]
name = "ppv-lite86"
version = "0.2.20"
@ -2710,7 +2766,7 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "865724d4dbe39d9f3dd3b52b88d859d66bcb2d6a0acfd5ea68a65fb66d4bdc1c"
dependencies = [
"env_logger",
"env_logger 0.10.2",
"log 0.4.25",
]
@ -2721,7 +2777,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6924ced06e1f7dfe3fa48d57b9f74f55d8915f5036121bef647ef4b204895fac"
dependencies = [
"proc-macro2",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -2760,7 +2816,7 @@ dependencies = [
"prost",
"prost-types",
"regex",
"syn 2.0.96",
"syn 2.0.103",
"tempfile",
]
@ -2774,7 +2830,7 @@ dependencies = [
"itertools 0.12.1",
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -3335,7 +3391,7 @@ checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -3583,7 +3639,7 @@ dependencies = [
"quote",
"sqlx-core",
"sqlx-macros-core",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -3606,7 +3662,7 @@ dependencies = [
"sqlx-mysql",
"sqlx-postgres",
"sqlx-sqlite",
"syn 2.0.96",
"syn 2.0.103",
"tempfile",
"tokio",
"url",
@ -3831,9 +3887,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.96"
version = "2.0.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80"
checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8"
dependencies = [
"proc-macro2",
"quote",
@ -3875,7 +3931,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -3954,7 +4010,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -3965,7 +4021,7 @@ checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -4064,7 +4120,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -4238,7 +4294,7 @@ dependencies = [
"proc-macro2",
"prost-build",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -4308,7 +4364,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -4595,7 +4651,7 @@ dependencies = [
"log 0.4.25",
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
"wasm-bindgen-shared",
]
@ -4630,7 +4686,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@ -4993,7 +5049,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
"synstructure 0.13.1",
]
@ -5041,8 +5097,10 @@ dependencies = [
"async-trait",
"bytes",
"data-encoding",
"env_logger 0.11.8",
"ffmpeg-rs-raw",
"fontdue",
"futures",
"futures-util",
"hex",
"itertools 0.14.0",
@ -5055,6 +5113,7 @@ dependencies = [
"serde",
"sha2 0.10.8",
"srt-tokio",
"tempfile",
"tiny-skia",
"tokio",
"usvg",
@ -5090,7 +5149,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]
@ -5110,7 +5169,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
"synstructure 0.13.1",
]
@ -5139,7 +5198,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
"syn 2.0.103",
]
[[package]]

View File

@ -24,6 +24,6 @@ url = "2.5.0"
itertools = "0.14.0"
chrono = { version = "^0.4.38", features = ["serde"] }
hex = "0.4.3"
m3u8-rs = { git = "https://github.com/v0l/m3u8-rs.git", rev = "d76ff96326814237a6d5e92288cdfe7060a43168" }
m3u8-rs = { git = "https://git.v0l.io/Kieran/m3u8-rs.git", rev = "6803eefca2838a8bfae9e19fd516ef36d7d89997" }
sha2 = "0.10.8"
data-encoding = "2.9.0"

View File

@ -37,4 +37,9 @@ srt-tokio = { version = "0.4.4", optional = true }
rml_rtmp = { version = "0.8.0", optional = true }
bytes = "1.9.0"
xflv = "0.4.4"
futures = "0.3.30"
[dev-dependencies]
tempfile = "3.8.1"
env_logger = "0.11.3"

View File

@ -17,20 +17,12 @@ impl HlsEgress {
pub const PATH: &'static str = "hls";
pub fn new<'a>(
id: &Uuid,
out_dir: &str,
segment_length: f32,
out_dir: PathBuf,
encoders: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
segment_type: SegmentType,
) -> Result<Self> {
Ok(Self {
mux: HlsMuxer::new(
id,
PathBuf::from(out_dir).join(Self::PATH).to_str().unwrap(),
segment_length,
encoders,
segment_type,
)?,
mux: HlsMuxer::new(out_dir.join(Self::PATH), encoders, segment_type)?,
})
}
}

View File

@ -10,8 +10,6 @@ use crate::egress::{Egress, EgressResult};
use crate::variant::{StreamMapping, VariantStream};
pub struct RecorderEgress {
/// Pipeline ID
id: Uuid,
/// Internal muxer writing the output packets
muxer: Muxer,
/// Mapping from Variant ID to stream index
@ -20,15 +18,10 @@ pub struct RecorderEgress {
impl RecorderEgress {
pub fn new<'a>(
id: &Uuid,
out_dir: &str,
out_dir: PathBuf,
variants: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
) -> Result<Self> {
let base = PathBuf::from(out_dir).join(id.to_string());
let out_file = base.join("recording.ts");
fs::create_dir_all(&base)?;
let out_file = out_dir.join("recording.ts");
let mut var_map = HashMap::new();
let muxer = unsafe {
let mut m = Muxer::builder()
@ -41,11 +34,7 @@ impl RecorderEgress {
m.open(None)?;
m
};
Ok(Self {
id: *id,
muxer,
var_map,
})
Ok(Self { muxer, var_map })
}
}

View File

@ -22,6 +22,7 @@ pub struct FrameGenerator {
width: u16,
height: u16,
video_sample_fmt: AVPixelFormat,
realtime: bool,
audio_sample_rate: u32,
audio_frame_size: i32,
@ -71,6 +72,7 @@ impl FrameGenerator {
fps,
width,
height,
realtime: true,
video_sample_fmt: pix_fmt,
audio_sample_rate: sample_rate,
audio_frame_size: frame_size,
@ -86,6 +88,10 @@ impl FrameGenerator {
})
}
pub fn set_realtime(&mut self, realtime: bool) {
self.realtime = realtime;
}
pub fn from_stream(
video_stream: &IngressStream,
audio_stream: Option<&IngressStream>,
@ -258,11 +264,15 @@ impl FrameGenerator {
(*self.next_frame).data[0],
(self.width as usize * self.height as usize * 4) as usize,
);
for z in 0..(self.width as usize * self.height as usize) {
buf[z * 4..z * 4 + 4].copy_from_slice(&color32);
for chunk in buf.chunks_exact_mut(4) {
chunk[0] = color32[0];
chunk[1] = color32[1];
chunk[2] = color32[2];
chunk[3] = color32[3];
}
Ok(())
}
/// Copy data directly into the frame buffer (must be RGBA data)
pub unsafe fn copy_frame_data(&mut self, data: &[u8]) -> Result<()> {
if self.next_frame.is_null() {
@ -354,6 +364,7 @@ impl FrameGenerator {
self.begin()?;
}
if self.realtime {
let stream_time = Duration::from_secs_f64(
self.video_pts as f64 / self.pts_per_frame() as f64 / self.fps as f64,
);
@ -366,6 +377,7 @@ impl FrameGenerator {
if !wait_time.is_zero() && wait_time.as_secs_f32() > 1f32 / self.fps {
std::thread::sleep(wait_time);
}
}
// convert to output pixel format, or just return internal frame if it matches output
if self.video_sample_fmt != transmute((*self.next_frame).format) {

View File

@ -17,6 +17,7 @@ pub async fn listen(out_dir: String, path: PathBuf, overseer: Arc<dyn Overseer>)
app_name: "".to_string(),
key: "test".to_string(),
};
let url = path.to_str().unwrap().to_string();
let file = std::fs::File::open(path)?;
spawn_pipeline(
Handle::current(),
@ -24,6 +25,8 @@ pub async fn listen(out_dir: String, path: PathBuf, overseer: Arc<dyn Overseer>)
out_dir.clone(),
overseer.clone(),
Box::new(file),
Some(url),
None,
);
Ok(())

View File

@ -1,8 +1,9 @@
use crate::overseer::Overseer;
use crate::pipeline::runner::PipelineRunner;
use crate::pipeline::runner::{PipelineCommand, PipelineRunner};
use log::{error, info, warn};
use serde::{Deserialize, Serialize};
use std::io::Read;
use std::sync::mpsc::Receiver;
use std::sync::Arc;
use std::time::Instant;
use tokio::runtime::Handle;
@ -40,8 +41,10 @@ pub fn spawn_pipeline(
out_dir: String,
seer: Arc<dyn Overseer>,
reader: Box<dyn Read + Send>,
url: Option<String>,
rx: Option<Receiver<PipelineCommand>>,
) {
match PipelineRunner::new(handle, out_dir, seer, info, reader, None) {
match PipelineRunner::new(handle, out_dir, seer, info, reader, url, rx) {
Ok(pl) => match run_pipeline(pl) {
Ok(_) => {}
Err(e) => {

View File

@ -1,6 +1,6 @@
use crate::ingress::{BufferedReader, ConnectionInfo};
use crate::overseer::Overseer;
use crate::pipeline::runner::PipelineRunner;
use crate::pipeline::runner::{PipelineCommand, PipelineRunner};
use anyhow::{anyhow, bail, Result};
use bytes::{Bytes, BytesMut};
use log::{error, info};
@ -11,6 +11,7 @@ use rml_rtmp::sessions::{
use std::collections::VecDeque;
use std::io::{ErrorKind, Read, Write};
use std::net::TcpStream;
use std::sync::mpsc::Sender;
use std::sync::Arc;
use std::time::Duration;
use tokio::net::TcpListener;
@ -32,10 +33,11 @@ struct RtmpClient {
msg_queue: VecDeque<ServerSessionResult>,
pub published_stream: Option<RtmpPublishedStream>,
muxer: FlvMuxer,
tx: Sender<PipelineCommand>,
}
impl RtmpClient {
pub fn new(socket: TcpStream) -> Result<Self> {
pub fn new(socket: TcpStream, tx: Sender<PipelineCommand>) -> Result<Self> {
socket.set_nonblocking(false)?;
let cfg = ServerSessionConfig::new();
let (ses, res) = ServerSession::new(cfg)?;
@ -46,6 +48,7 @@ impl RtmpClient {
msg_queue: VecDeque::from(res),
published_stream: None,
muxer: FlvMuxer::new(),
tx,
})
}
@ -201,8 +204,12 @@ impl RtmpClient {
self.published_stream = Some(RtmpPublishedStream(app_name, stream_key));
}
}
ServerSessionEvent::PublishStreamFinished { .. } => {
// TODO: shutdown pipeline
ServerSessionEvent::PublishStreamFinished {
app_name,
stream_key,
} => {
self.tx.send(PipelineCommand::Shutdown)?;
info!("Stream ending: {app_name}/{stream_key}");
}
ServerSessionEvent::StreamMetadataChanged {
app_name,
@ -271,7 +278,6 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
info!("RTMP listening on: {}", &addr);
while let Ok((socket, ip)) = listener.accept().await {
let mut cc = RtmpClient::new(socket.into_std()?)?;
let overseer = overseer.clone();
let out_dir = out_dir.clone();
let handle = Handle::current();
@ -279,6 +285,8 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
std::thread::Builder::new()
.name(format!("client:rtmp:{}", new_id))
.spawn(move || {
let (tx, rx) = std::sync::mpsc::channel();
let mut cc = RtmpClient::new(socket.into_std()?, tx)?;
if let Err(e) = cc.handshake() {
bail!("Error during handshake: {}", e)
}
@ -301,6 +309,7 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
info,
Box::new(cc),
None,
Some(rx),
) {
Ok(pl) => pl,
Err(e) => {

View File

@ -1,5 +1,6 @@
use crate::ingress::{spawn_pipeline, BufferedReader, ConnectionInfo};
use crate::overseer::Overseer;
use crate::pipeline::runner::PipelineCommand;
use anyhow::Result;
use futures_util::stream::FusedStream;
use futures_util::StreamExt;
@ -7,6 +8,7 @@ use log::info;
use srt_tokio::{SrtListener, SrtSocket};
use std::io::Read;
use std::net::SocketAddr;
use std::sync::mpsc::{channel, Sender};
use std::sync::Arc;
use tokio::runtime::Handle;
use uuid::Uuid;
@ -31,6 +33,7 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
.as_ref()
.map_or(String::new(), |s| s.to_string()),
};
let (tx, rx) = channel();
spawn_pipeline(
Handle::current(),
info,
@ -40,7 +43,10 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
handle: Handle::current(),
socket,
buffer: BufferedReader::new(4096, MAX_SRT_BUFFER_SIZE, "SRT"),
tx,
}),
None,
Some(rx),
);
}
Ok(())
@ -50,6 +56,7 @@ struct SrtReader {
pub handle: Handle,
pub socket: SrtSocket,
pub buffer: BufferedReader,
pub tx: Sender<PipelineCommand>, // TODO: implement clean shutdown
}
impl Read for SrtReader {

View File

@ -27,6 +27,8 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
out_dir.clone(),
overseer.clone(),
Box::new(socket),
None,
None,
);
}
Ok(())

View File

@ -13,7 +13,6 @@ use ringbuf::traits::{Observer, Split};
use ringbuf::{HeapCons, HeapRb};
use std::io::Read;
use std::sync::Arc;
use std::time::Duration;
use tiny_skia::Pixmap;
use tokio::runtime::Handle;
use uuid::Uuid;
@ -21,10 +20,6 @@ use uuid::Uuid;
pub async fn listen(out_dir: String, overseer: Arc<dyn Overseer>) -> Result<()> {
info!("Test pattern enabled");
// add a delay, there is a race condition somewhere, the test pattern doesnt always
// get added to active_streams
tokio::time::sleep(Duration::from_secs(1)).await;
let info = ConnectionInfo {
id: Uuid::new_v4(),
endpoint: "test-pattern",
@ -36,9 +31,11 @@ pub async fn listen(out_dir: String, overseer: Arc<dyn Overseer>) -> Result<()>
spawn_pipeline(
Handle::current(),
info,
out_dir.clone(),
overseer.clone(),
out_dir,
overseer,
Box::new(src),
None,
None,
);
Ok(())
}

View File

@ -1,8 +1,10 @@
pub mod egress;
mod generator;
pub mod ingress;
pub mod mux;
pub mod overseer;
pub mod pipeline;
#[cfg(test)]
pub mod test_hls_timing;
pub mod variant;
pub mod viewer;
mod generator;

View File

@ -0,0 +1,146 @@
use crate::egress::EgressResult;
use crate::mux::hls::variant::HlsVariant;
use crate::variant::{StreamMapping, VariantStream};
use anyhow::Result;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
use ffmpeg_rs_raw::Encoder;
use itertools::Itertools;
use log::{trace, warn};
use std::fmt::Display;
use std::fs::{remove_dir_all, File};
use std::path::PathBuf;
use uuid::Uuid;
mod segment;
mod variant;
pub enum HlsVariantStream {
Video {
group: usize,
index: usize,
id: Uuid,
},
Audio {
group: usize,
index: usize,
id: Uuid,
},
Subtitle {
group: usize,
index: usize,
id: Uuid,
},
}
impl HlsVariantStream {
pub fn id(&self) -> &Uuid {
match self {
HlsVariantStream::Video { id, .. } => id,
HlsVariantStream::Audio { id, .. } => id,
HlsVariantStream::Subtitle { id, .. } => id,
}
}
pub fn index(&self) -> &usize {
match self {
HlsVariantStream::Video { index, .. } => index,
HlsVariantStream::Audio { index, .. } => index,
HlsVariantStream::Subtitle { index, .. } => index,
}
}
}
impl Display for HlsVariantStream {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
HlsVariantStream::Video { index, .. } => write!(f, "v:{}", index),
HlsVariantStream::Audio { index, .. } => write!(f, "a:{}", index),
HlsVariantStream::Subtitle { index, .. } => write!(f, "s:{}", index),
}
}
}
#[derive(Clone, Copy, PartialEq)]
pub enum SegmentType {
MPEGTS,
FMP4,
}
pub struct HlsMuxer {
pub out_dir: PathBuf,
pub variants: Vec<HlsVariant>,
}
impl HlsMuxer {
const MASTER_PLAYLIST: &'static str = "live.m3u8";
pub fn new<'a>(
out_dir: PathBuf,
encoders: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
segment_type: SegmentType,
) -> Result<Self> {
if !out_dir.exists() {
std::fs::create_dir_all(&out_dir)?;
}
let mut vars = Vec::new();
for (k, group) in &encoders
.sorted_by(|a, b| a.0.group_id().cmp(&b.0.group_id()))
.chunk_by(|a| a.0.group_id())
{
let var = HlsVariant::new(out_dir.clone(), k, group, segment_type)?;
vars.push(var);
}
let ret = Self {
out_dir,
variants: vars,
};
ret.write_master_playlist()?;
Ok(ret)
}
fn write_master_playlist(&self) -> Result<()> {
let mut pl = m3u8_rs::MasterPlaylist::default();
pl.version = Some(3);
pl.variants = self
.variants
.iter()
.map(|v| v.to_playlist_variant())
.collect();
let mut f_out = File::create(self.out_dir.join(Self::MASTER_PLAYLIST))?;
pl.write_to(&mut f_out)?;
Ok(())
}
/// Mux an encoded packet from [Encoder]
pub unsafe fn mux_packet(
&mut self,
pkt: *mut AVPacket,
variant: &Uuid,
) -> Result<EgressResult> {
for var in self.variants.iter_mut() {
if let Some(vs) = var.streams.iter().find(|s| s.id() == variant) {
// very important for muxer to know which stream this pkt belongs to
(*pkt).stream_index = *vs.index() as _;
return var.process_packet(pkt);
}
}
// This HLS muxer doesn't handle this variant, return None instead of failing
// This can happen when multiple egress handlers are configured with different variant sets
trace!(
"HLS muxer received packet for variant {} which it doesn't handle",
variant
);
Ok(EgressResult::None)
}
}
impl Drop for HlsMuxer {
fn drop(&mut self) {
if let Err(e) = remove_dir_all(&self.out_dir) {
warn!("Failed to clean up hls dir: {} {}", self.out_dir.display(), e);
}
}
}

View File

@ -0,0 +1,75 @@
use crate::mux::hls::variant::HlsVariant;
use crate::mux::SegmentType;
use m3u8_rs::{ByteRange, MediaSegment, MediaSegmentType, Part};
#[derive(PartialEq)]
pub enum HlsSegment {
Full(SegmentInfo),
Partial(PartialSegmentInfo),
}
impl HlsSegment {
pub fn to_media_segment(&self) -> MediaSegmentType {
match self {
HlsSegment::Full(f) => f.to_media_segment(),
HlsSegment::Partial(p) => p.to_media_segment(),
}
}
}
#[derive(PartialEq)]
pub struct SegmentInfo {
pub index: u64,
pub duration: f32,
pub kind: SegmentType,
}
impl SegmentInfo {
pub fn to_media_segment(&self) -> MediaSegmentType {
MediaSegmentType::Full(MediaSegment {
uri: self.filename(),
duration: self.duration,
..MediaSegment::default()
})
}
pub fn filename(&self) -> String {
HlsVariant::segment_name(self.kind, self.index)
}
}
#[derive(PartialEq)]
pub struct PartialSegmentInfo {
pub index: u64,
pub parent_index: u64,
pub parent_kind: SegmentType,
pub duration: f64,
pub independent: bool,
pub byte_range: Option<(u64, Option<u64>)>,
}
impl PartialSegmentInfo {
pub fn to_media_segment(&self) -> MediaSegmentType {
MediaSegmentType::Partial(Part {
uri: self.filename(),
duration: self.duration,
independent: self.independent,
gap: false,
byte_range: self.byte_range.map(|r| ByteRange {
length: r.0,
offset: r.1,
}),
})
}
pub fn filename(&self) -> String {
HlsVariant::segment_name(self.parent_kind, self.parent_index)
}
/// Byte offset where this partial segment ends
pub fn end_pos(&self) -> Option<u64> {
self.byte_range
.as_ref()
.map(|(len, start)| start.unwrap_or(0) + len)
}
}

View File

@ -1,74 +1,21 @@
use crate::egress::{EgressResult, EgressSegment};
use crate::mux::hls::segment::{HlsSegment, PartialSegmentInfo, SegmentInfo};
use crate::mux::{HlsVariantStream, SegmentType};
use crate::variant::{StreamMapping, VariantStream};
use anyhow::{bail, ensure, Result};
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_H264;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVMediaType::AVMEDIA_TYPE_VIDEO;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
av_free, av_opt_set, av_q2d, av_write_frame, avio_close, avio_flush, avio_open, avio_size,
AVPacket, AVStream, AVIO_FLAG_WRITE, AV_NOPTS_VALUE, AV_PKT_FLAG_KEY,
av_free, av_q2d, av_write_frame, avio_close, avio_flush, avio_open, avio_size, AVPacket,
AVIO_FLAG_WRITE, AV_NOPTS_VALUE, AV_PKT_FLAG_KEY,
};
use ffmpeg_rs_raw::{cstr, Encoder, Muxer};
use itertools::Itertools;
use log::{info, trace, warn};
use m3u8_rs::{ByteRange, MediaSegment, MediaSegmentType, Part, PartInf};
use log::{debug, info, trace, warn};
use m3u8_rs::{ExtTag, MediaSegmentType, PartInf, PreloadHint};
use std::collections::HashMap;
use std::fmt::Display;
use std::fs::File;
use std::fs::{create_dir_all, File};
use std::path::PathBuf;
use std::ptr;
use uuid::Uuid;
#[derive(Clone, Copy, PartialEq)]
pub enum SegmentType {
MPEGTS,
FMP4,
}
pub enum HlsVariantStream {
Video {
group: usize,
index: usize,
id: Uuid,
},
Audio {
group: usize,
index: usize,
id: Uuid,
},
Subtitle {
group: usize,
index: usize,
id: Uuid,
},
}
impl HlsVariantStream {
pub fn id(&self) -> &Uuid {
match self {
HlsVariantStream::Video { id, .. } => id,
HlsVariantStream::Audio { id, .. } => id,
HlsVariantStream::Subtitle { id, .. } => id,
}
}
pub fn index(&self) -> &usize {
match self {
HlsVariantStream::Video { index, .. } => index,
HlsVariantStream::Audio { index, .. } => index,
HlsVariantStream::Subtitle { index, .. } => index,
}
}
}
impl Display for HlsVariantStream {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
HlsVariantStream::Video { index, .. } => write!(f, "v:{}", index),
HlsVariantStream::Audio { index, .. } => write!(f, "a:{}", index),
HlsVariantStream::Subtitle { index, .. } => write!(f, "s:{}", index),
}
}
}
pub struct HlsVariant {
/// Name of this variant (720p)
@ -76,124 +23,57 @@ pub struct HlsVariant {
/// MPEG-TS muxer for this variant
mux: Muxer,
/// List of streams ids in this variant
streams: Vec<HlsVariantStream>,
pub(crate) streams: Vec<HlsVariantStream>,
/// Segment length in seconds
segment_length: f32,
segment_length_target: f32,
/// Total number of seconds of video to store
segment_window: f32,
/// Current segment index
idx: u64,
/// Output directory (base)
out_dir: String,
out_dir: PathBuf,
/// List of segments to be included in the playlist
segments: Vec<HlsSegment>,
/// Type of segments to create
segment_type: SegmentType,
/// Ending presentation timestamp
end_pts: i64,
/// Current segment duration in seconds (precise accumulation)
duration: f64,
/// Timestamp of the start of the current segment
current_segment_start: f64,
/// Timestamp of the start of the current partial
current_partial_start: f64,
/// Number of packets written to current segment
packets_written: u64,
/// Reference stream used to track duration
ref_stream_index: i32,
/// HLS-LL: Enable LL-output
low_latency: bool,
/// LL-HLS: Target duration for partial segments
partial_target_duration: f32,
/// HLS-LL: Current partial index
current_partial_index: u64,
/// HLS-LL: Current duration in this partial
current_partial_duration: f64,
}
#[derive(PartialEq)]
enum HlsSegment {
Full(SegmentInfo),
Partial(PartialSegmentInfo),
}
impl HlsSegment {
fn to_media_segment(&self) -> MediaSegmentType {
match self {
HlsSegment::Full(s) => s.to_media_segment(),
HlsSegment::Partial(s) => s.to_media_segment(),
}
}
}
#[derive(PartialEq)]
struct SegmentInfo {
index: u64,
duration: f32,
kind: SegmentType,
}
impl SegmentInfo {
fn to_media_segment(&self) -> MediaSegmentType {
MediaSegmentType::Full(MediaSegment {
uri: self.filename(),
duration: self.duration,
..MediaSegment::default()
})
}
fn filename(&self) -> String {
HlsVariant::segment_name(self.kind, self.index)
}
}
#[derive(PartialEq)]
struct PartialSegmentInfo {
index: u64,
parent_index: u64,
parent_kind: SegmentType,
duration: f64,
independent: bool,
byte_range: Option<(u64, Option<u64>)>,
}
impl PartialSegmentInfo {
fn to_media_segment(&self) -> MediaSegmentType {
MediaSegmentType::Partial(Part {
uri: self.filename(),
duration: self.duration,
independent: self.independent,
gap: false,
byte_range: self.byte_range.map(|r| ByteRange {
length: r.0,
offset: r.1,
}),
})
}
fn filename(&self) -> String {
HlsVariant::segment_name(self.parent_kind, self.parent_index)
}
/// HLS-LL: Whether the next partial segment should be marked as independent
next_partial_independent: bool,
/// Path to initialization segment for fMP4
init_segment_path: Option<String>,
}
impl HlsVariant {
pub fn new<'a>(
out_dir: &'a str,
segment_length: f32,
out_dir: PathBuf,
group: usize,
encoded_vars: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
segment_type: SegmentType,
) -> Result<Self> {
let name = format!("stream_{}", group);
let first_seg = Self::map_segment_path(out_dir, &name, 1, segment_type);
std::fs::create_dir_all(PathBuf::from(&first_seg).parent().unwrap())?;
let mut opts = HashMap::new();
if let SegmentType::FMP4 = segment_type {
opts.insert("fflags".to_string(), "-autobsf".to_string());
opts.insert(
"movflags".to_string(),
"+frag_custom+dash+delay_moov".to_string(),
);
};
let var_dir = out_dir.join(&name);
if !var_dir.exists() {
create_dir_all(&var_dir)?;
}
let mut mux = unsafe {
Muxer::builder()
.with_output_path(
first_seg.as_str(),
var_dir.join("1.ts").to_str().unwrap(),
match segment_type {
SegmentType::MPEGTS => Some("mpegts"),
SegmentType::FMP4 => Some("mp4"),
@ -204,6 +84,7 @@ impl HlsVariant {
let mut streams = Vec::new();
let mut ref_stream_index = -1;
let mut has_video = false;
let mut segment_length = 1.0;
for (var, enc) in encoded_vars {
match var {
@ -218,6 +99,10 @@ impl HlsVariant {
has_video = true;
// Always use video stream as reference for segmentation
ref_stream_index = stream_idx as _;
let sg = v.keyframe_interval as f32 / v.fps;
if sg > segment_length {
segment_length = sg;
}
},
VariantStream::Audio(a) => unsafe {
let stream = mux.add_stream_encoder(enc)?;
@ -251,27 +136,65 @@ impl HlsVariant {
name,
ref_stream_index
);
let mut opts = HashMap::new();
if let SegmentType::FMP4 = segment_type {
//opts.insert("fflags".to_string(), "-autobsf".to_string());
opts.insert(
"movflags".to_string(),
"+frag_custom+dash+delay_moov".to_string(),
);
};
unsafe {
mux.open(Some(opts))?;
//av_dump_format(mux.context(), 0, ptr::null_mut(), 0);
}
Ok(Self {
let mut variant = Self {
name: name.clone(),
segment_length,
segment_window: 30.0,
mux,
streams,
idx: 1,
segments: Vec::new(),
out_dir: out_dir.to_string(),
out_dir: var_dir,
segment_type,
end_pts: AV_NOPTS_VALUE,
duration: 0.0,
current_segment_start: 0.0,
current_partial_start: 0.0,
packets_written: 0,
ref_stream_index,
partial_target_duration: 0.33,
low_latency: false,
partial_target_duration: 0.0,
current_partial_index: 0,
current_partial_duration: 0.0,
})
next_partial_independent: false,
segment_length_target: segment_length,
init_segment_path: None,
};
// Create initialization segment for fMP4
if segment_type == SegmentType::FMP4 {
unsafe {
variant.create_init_segment()?;
}
}
Ok(variant)
}
pub fn segment_length(&self) -> f32 {
let min_segment_length = if self.low_latency {
(self.segment_length_target * 3.0).max(6.0) // make segments 3x longer in LL mode or minimum 6s
} else {
2.0
};
self.segment_length_target.max(min_segment_length)
}
pub fn partial_segment_length(&self) -> f32 {
let seg_size = self.segment_length();
let partial_seg_size = seg_size / 3.0; // 3 segments min
partial_seg_size - partial_seg_size % seg_size
}
pub fn segment_name(t: SegmentType, idx: u64) -> String {
@ -281,64 +204,46 @@ impl HlsVariant {
}
}
pub fn out_dir(&self) -> PathBuf {
PathBuf::from(&self.out_dir).join(&self.name)
}
pub fn map_segment_path(out_dir: &str, name: &str, idx: u64, typ: SegmentType) -> String {
PathBuf::from(out_dir)
.join(name)
.join(Self::segment_name(typ, idx))
.to_string_lossy()
.to_string()
pub fn map_segment_path(&self, idx: u64, typ: SegmentType) -> PathBuf {
self.out_dir.join(Self::segment_name(typ, idx))
}
/// Process a single packet through the muxer
unsafe fn process_packet(&mut self, pkt: *mut AVPacket) -> Result<EgressResult> {
pub(crate) unsafe fn process_packet(&mut self, pkt: *mut AVPacket) -> Result<EgressResult> {
let pkt_stream = *(*self.mux.context())
.streams
.add((*pkt).stream_index as usize);
let pkt_q = av_q2d((*pkt).time_base);
let mut result = EgressResult::None;
let stream_type = (*(*pkt_stream).codecpar).codec_type;
let mut can_split = stream_type == AVMEDIA_TYPE_VIDEO
&& ((*pkt).flags & AV_PKT_FLAG_KEY == AV_PKT_FLAG_KEY);
let mut is_ref_pkt =
stream_type == AVMEDIA_TYPE_VIDEO && (*pkt).stream_index == self.ref_stream_index;
let mut is_ref_pkt = (*pkt).stream_index == self.ref_stream_index;
if (*pkt).pts == AV_NOPTS_VALUE {
can_split = false;
is_ref_pkt = false;
}
// check if current packet is keyframe, flush current segment
if self.packets_written > 1 && can_split && self.duration >= self.segment_length as f64 {
result = self.split_next_seg()?;
}
if is_ref_pkt && self.packets_written > 0 {
let pkt_pts = (*pkt).pts as f64 * pkt_q;
let cur_duration = pkt_pts - self.current_segment_start;
let cur_part_duration = pkt_pts - self.current_partial_start;
// track duration from pts
if is_ref_pkt {
if self.end_pts == AV_NOPTS_VALUE {
self.end_pts = (*pkt).pts;
// check if current packet is keyframe, flush current segment
if can_split && cur_duration >= self.segment_length() as f64 {
result = self.split_next_seg(pkt_pts)?;
} else if self.low_latency && cur_part_duration >= self.partial_target_duration as f64 {
result = self.create_partial_segment(pkt_pts)?;
self.next_partial_independent = can_split;
}
let pts_diff = (*pkt).pts - self.end_pts;
if pts_diff > 0 {
let time_delta = pts_diff as f64 * av_q2d((*pkt).time_base);
self.duration += time_delta;
self.current_partial_duration += time_delta;
}
self.end_pts = (*pkt).pts;
}
// write to current segment
self.mux.write_packet(pkt)?;
self.packets_written += 1;
// HLS-LL: write next partial segment
if is_ref_pkt && self.current_partial_duration >= self.partial_target_duration as f64 {
self.create_partial_segment(can_split)?;
}
Ok(result)
}
@ -347,93 +252,130 @@ impl HlsVariant {
}
/// Create a partial segment for LL-HLS
fn create_partial_segment(&mut self, independent: bool) -> Result<()> {
fn create_partial_segment(&mut self, next_pkt_start: f64) -> Result<EgressResult> {
let ctx = self.mux.context();
let pos = unsafe {
let end_pos = unsafe {
avio_flush((*ctx).pb);
avio_size((*ctx).pb) as u64
};
let previous_partial_end = self.segments.last().and_then(|s| match &s {
HlsSegment::Partial(p) => p.byte_range.as_ref().map(|(len, start)| start.unwrap_or(0) + len),
ensure!(end_pos > 0, "End position cannot be 0");
if self.segment_type == SegmentType::MPEGTS {
ensure!(
end_pos % 188 == 0,
"Invalid end position, must be multiple of 188"
);
}
let previous_end_pos = self
.segments
.last()
.and_then(|s| match &s {
HlsSegment::Partial(p) => p.end_pos(),
_ => None,
});
})
.unwrap_or(0);
let partial_size = end_pos - previous_end_pos;
let partial_info = PartialSegmentInfo {
index: self.current_partial_index,
parent_index: self.idx,
parent_kind: self.segment_type,
duration: self.current_partial_duration,
independent,
byte_range: match previous_partial_end {
Some(prev_end) => Some((pos - prev_end, Some(prev_end))),
_ => Some((pos, Some(0))),
},
duration: next_pkt_start - self.current_partial_start,
independent: self.next_partial_independent,
byte_range: Some((partial_size, Some(previous_end_pos))),
};
trace!(
debug!(
"{} created partial segment {} [{:.3}s, independent={}]",
self.name,
partial_info.index,
partial_info.duration,
independent
self.name, partial_info.index, partial_info.duration, partial_info.independent,
);
self.segments.push(HlsSegment::Partial(partial_info));
self.current_partial_index += 1;
self.current_partial_duration = 0.0;
self.next_partial_independent = false;
self.current_partial_start = next_pkt_start;
self.write_playlist()?;
Ok(EgressResult::None)
}
/// Create initialization segment for fMP4
unsafe fn create_init_segment(&mut self) -> Result<()> {
if self.segment_type != SegmentType::FMP4 || self.init_segment_path.is_some() {
return Ok(());
}
let init_path = PathBuf::from(&self.out_dir)
.join(&self.name)
.join("init.mp4")
.to_string_lossy()
.to_string();
// Create a temporary muxer for initialization segment
let mut init_opts = HashMap::new();
init_opts.insert(
"movflags".to_string(),
"+frag_custom+dash+delay_moov".to_string(),
);
let mut init_mux = Muxer::builder()
.with_output_path(init_path.as_str(), Some("mp4"))?
.build()?;
// Copy stream parameters from main muxer
let main_ctx = self.mux.context();
for i in 0..(*main_ctx).nb_streams {
let src_stream = *(*main_ctx).streams.add(i as usize);
let s = init_mux.add_copy_stream(src_stream)?;
ensure!((*s).index == (*src_stream).index, "Stream index mismatch");
}
init_mux.open(Some(init_opts))?;
av_write_frame(init_mux.context(), ptr::null_mut());
init_mux.close()?;
self.init_segment_path = Some("init.mp4".to_string());
info!("Created fMP4 initialization segment: {}", init_path);
Ok(())
}
/// Reset the muxer state and start the next segment
unsafe fn split_next_seg(&mut self) -> Result<EgressResult> {
unsafe fn split_next_seg(&mut self, next_pkt_start: f64) -> Result<EgressResult> {
let completed_segment_idx = self.idx;
self.idx += 1;
self.current_partial_index = 0;
// Manually reset muxer avio
let ctx = self.mux.context();
av_write_frame(ctx, ptr::null_mut());
let ret = av_write_frame(ctx, ptr::null_mut());
if ret < 0 {
bail!("Failed to split segment {}", ret);
}
avio_flush((*ctx).pb);
avio_close((*ctx).pb);
av_free((*ctx).url as *mut _);
let next_seg_url =
Self::map_segment_path(&self.out_dir, &self.name, self.idx, self.segment_type);
(*ctx).url = cstr!(next_seg_url.as_str());
let next_seg_url = self.map_segment_path(self.idx, self.segment_type);
(*ctx).url = cstr!(next_seg_url.to_str().unwrap());
let ret = avio_open(&mut (*ctx).pb, (*ctx).url, AVIO_FLAG_WRITE);
if ret < 0 {
bail!("Failed to re-init avio");
}
// tell muxer it needs to write headers again
av_opt_set(
(*ctx).priv_data,
cstr!("events_flags"),
cstr!("resend_headers"),
0,
);
// Log the completed segment (previous index), not the next one
let completed_seg_path = Self::map_segment_path(
&self.out_dir,
&self.name,
completed_segment_idx,
self.segment_type,
);
let completed_segment_path = PathBuf::from(&completed_seg_path);
let segment_size = completed_segment_path
.metadata()
.map(|m| m.len())
.unwrap_or(0);
info!(
let completed_seg_path = self.map_segment_path(completed_segment_idx, self.segment_type);
let segment_size = completed_seg_path.metadata().map(|m| m.len()).unwrap_or(0);
let cur_duration = next_pkt_start - self.current_segment_start;
debug!(
"Finished segment {} [{:.3}s, {:.2} kB, {} pkts]",
completed_segment_path
completed_seg_path
.file_name()
.unwrap_or_default()
.to_string_lossy(),
self.duration,
cur_duration,
segment_size as f32 / 1024f32,
self.packets_written
);
@ -452,12 +394,7 @@ impl HlsVariant {
variant: video_var_id,
idx: seg.index,
duration: seg.duration,
path: PathBuf::from(Self::map_segment_path(
&self.out_dir,
&self.name,
seg.index,
self.segment_type,
)),
path: self.map_segment_path(seg.index, self.segment_type),
})
.collect();
@ -465,17 +402,25 @@ impl HlsVariant {
let created = EgressSegment {
variant: video_var_id,
idx: completed_segment_idx,
duration: self.duration as f32,
path: completed_segment_path,
duration: cur_duration as f32,
path: completed_seg_path,
};
if let Err(e) = self.push_segment(completed_segment_idx, self.duration as f32) {
warn!("Failed to update playlist: {}", e);
}
self.segments.push(HlsSegment::Full(SegmentInfo {
index: completed_segment_idx,
duration: if self.playlist_version() >= 6 {
cur_duration.round() as _
} else {
cur_duration as _
},
kind: self.segment_type,
}));
self.write_playlist()?;
// Reset counters for next segment
self.packets_written = 0;
self.duration = 0.0;
self.current_segment_start = next_pkt_start;
Ok(EgressResult::Segments {
created: vec![created],
@ -489,17 +434,6 @@ impl HlsVariant {
.find(|a| matches!(*a, HlsVariantStream::Video { .. }))
}
/// Add a new segment to the variant and return a list of deleted segments
fn push_segment(&mut self, idx: u64, duration: f32) -> Result<()> {
self.segments.push(HlsSegment::Full(SegmentInfo {
index: idx,
duration,
kind: self.segment_type,
}));
self.write_playlist()
}
/// Delete segments which are too old
fn clean_segments(&mut self) -> Result<Vec<SegmentInfo>> {
let drain_from_hls_segment = {
@ -525,11 +459,10 @@ impl HlsVariant {
let mut ret = vec![];
if let Some(seg_match) = drain_from_hls_segment {
if let Some(drain_pos) = self.segments.iter().position(|e| e == seg_match) {
let seg_dir = self.out_dir();
for seg in self.segments.drain(..drain_pos) {
match seg {
HlsSegment::Full(seg) => {
let seg_path = seg_dir.join(seg.filename());
let seg_path = self.out_dir.join(seg.filename());
if let Err(e) = std::fs::remove_file(&seg_path) {
warn!(
"Failed to remove segment file: {} {}",
@ -550,18 +483,56 @@ impl HlsVariant {
Ok(ret)
}
fn playlist_version(&self) -> usize {
if self.low_latency {
6
} else if self.segment_type == SegmentType::FMP4 {
6 // EXT-X-MAP without I-FRAMES-ONLY
} else {
3
}
}
fn write_playlist(&mut self) -> Result<()> {
if self.segments.is_empty() {
return Ok(()); // Don't write empty playlists
}
let mut pl = m3u8_rs::MediaPlaylist::default();
pl.target_duration = (self.segment_length.ceil() as u64).max(1);
pl.segments = self.segments.iter().map(|s| s.to_media_segment()).collect();
pl.version = Some(6);
// Add EXT-X-MAP initialization segment for fMP4
if self.segment_type == SegmentType::FMP4 {
if let Some(ref init_path) = self.init_segment_path {
pl.unknown_tags.push(ExtTag {
tag: "X-MAP".to_string(),
rest: Some(format!("URI=\"{}\"", init_path)),
});
}
}
// append segment preload for next part segment
if let Some(HlsSegment::Partial(partial)) = self.segments.last() {
// TODO: try to estimate if there will be another partial segment
pl.segments.push(MediaSegmentType::PreloadHint(PreloadHint {
hint_type: "PART".to_string(),
uri: partial.filename(),
byte_range_start: partial.end_pos(),
byte_range_length: None,
}));
}
pl.version = Some(self.playlist_version());
pl.target_duration = if self.playlist_version() >= 6 {
self.segment_length().round() as _
} else {
self.segment_length()
};
if self.low_latency {
pl.part_inf = Some(PartInf {
part_target: self.partial_target_duration as f64,
});
}
pl.media_sequence = self
.segments
.iter()
@ -570,39 +541,55 @@ impl HlsVariant {
_ => None,
})
.unwrap_or(self.idx);
// For live streams, don't set end list
pl.end_list = false;
let mut f_out = File::create(self.out_dir().join("live.m3u8"))?;
let mut f_out = File::create(self.out_dir.join("live.m3u8"))?;
pl.write_to(&mut f_out)?;
Ok(())
}
/// https://git.ffmpeg.org/gitweb/ffmpeg.git/blob/HEAD:/libavformat/hlsenc.c#l351
unsafe fn to_codec_attr(&self, stream: *mut AVStream) -> Option<String> {
let p = (*stream).codecpar;
unsafe fn to_codec_attr(&self) -> Option<String> {
let mut codecs = Vec::new();
// Find video and audio streams and build codec string
for stream in &self.streams {
let av_stream = *(*self.mux.context()).streams.add(*stream.index());
let p = (*av_stream).codecpar;
match stream {
HlsVariantStream::Video { .. } => {
if (*p).codec_id == AV_CODEC_ID_H264 {
let data = (*p).extradata;
if !data.is_null() {
let mut id_ptr = ptr::null_mut();
let ds: *mut u16 = data as *mut u16;
if (*ds) == 1 && (*data.add(4)) & 0x1F == 7 {
id_ptr = data.add(5);
} else if (*ds) == 1 && (*data.add(3)) & 0x1F == 7 {
id_ptr = data.add(4);
} else if *data.add(0) == 1 {
id_ptr = data.add(1);
} else {
return None;
// Use profile and level from codec parameters
let profile_idc = (*p).profile as u8;
let level_idc = (*p).level as u8;
// For H.264, constraint flags are typically 0 unless specified
// Common constraint flags: 0x40 (constraint_set1_flag) for baseline
let constraint_flags = match profile_idc {
66 => 0x40, // Baseline profile
_ => 0x00, // Main/High profiles typically have no constraints
};
let avc1_code = format!(
"avc1.{:02x}{:02x}{:02x}",
profile_idc, constraint_flags, level_idc
);
codecs.push(avc1_code);
}
}
HlsVariantStream::Audio { .. } => {
// Standard AAC-LC codec string
codecs.push("mp4a.40.2".to_string());
}
_ => {}
}
}
return Some(format!(
"avc1.{}",
hex::encode([*id_ptr.add(0), *id_ptr.add(1), *id_ptr.add(2)])
));
}
}
if codecs.is_empty() {
None
} else {
Some(codecs.join(","))
}
}
pub fn to_playlist_variant(&self) -> m3u8_rs::VariantStream {
@ -613,9 +600,9 @@ impl HlsVariant {
m3u8_rs::VariantStream {
is_i_frame: false,
uri: format!("{}/live.m3u8", self.name),
bandwidth: 0,
average_bandwidth: Some((*codec_par).bit_rate as u64),
codecs: self.to_codec_attr(av_stream),
bandwidth: (*codec_par).bit_rate as u64,
average_bandwidth: None,
codecs: self.to_codec_attr(),
resolution: Some(m3u8_rs::Resolution {
width: (*codec_par).width as _,
height: (*codec_par).height as _,
@ -631,82 +618,3 @@ impl HlsVariant {
}
}
}
pub struct HlsMuxer {
pub out_dir: PathBuf,
pub variants: Vec<HlsVariant>,
}
impl HlsMuxer {
pub fn new<'a>(
id: &Uuid,
out_dir: &str,
segment_length: f32,
encoders: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
segment_type: SegmentType,
) -> Result<Self> {
let base = PathBuf::from(out_dir).join(id.to_string());
if !base.exists() {
std::fs::create_dir_all(&base)?;
}
let mut vars = Vec::new();
for (k, group) in &encoders
.sorted_by(|a, b| a.0.group_id().cmp(&b.0.group_id()))
.chunk_by(|a| a.0.group_id())
{
let var = HlsVariant::new(
base.to_str().unwrap(),
segment_length,
k,
group,
segment_type,
)?;
vars.push(var);
}
let ret = Self {
out_dir: base,
variants: vars,
};
ret.write_master_playlist()?;
Ok(ret)
}
fn write_master_playlist(&self) -> Result<()> {
let mut pl = m3u8_rs::MasterPlaylist::default();
pl.version = Some(3);
pl.variants = self
.variants
.iter()
.map(|v| v.to_playlist_variant())
.collect();
let mut f_out = File::create(self.out_dir.join("live.m3u8"))?;
pl.write_to(&mut f_out)?;
Ok(())
}
/// Mux an encoded packet from [Encoder]
pub unsafe fn mux_packet(
&mut self,
pkt: *mut AVPacket,
variant: &Uuid,
) -> Result<EgressResult> {
for var in self.variants.iter_mut() {
if let Some(vs) = var.streams.iter().find(|s| s.id() == variant) {
// very important for muxer to know which stream this pkt belongs to
(*pkt).stream_index = *vs.index() as _;
return var.process_packet(pkt);
}
}
// This HLS muxer doesn't handle this variant, return None instead of failing
// This can happen when multiple egress handlers are configured with different variant sets
trace!(
"HLS muxer received packet for variant {} which it doesn't handle",
variant
);
Ok(EgressResult::None)
}
}

View File

@ -1,2 +1,3 @@
mod hls;
pub use hls::*;

View File

@ -4,6 +4,7 @@ use std::mem::transmute;
use std::ops::Sub;
use std::path::{Path, PathBuf};
use std::ptr;
use std::sync::mpsc::Receiver;
use std::sync::Arc;
use std::time::{Duration, Instant};
@ -27,7 +28,7 @@ use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
use ffmpeg_rs_raw::{
cstr, get_frame_from_hw, AudioFifo, Decoder, Demuxer, Encoder, Resample, Scaler, StreamType,
};
use log::{error, info, warn};
use log::{debug, error, info, warn};
use tokio::runtime::Handle;
use uuid::Uuid;
@ -46,6 +47,8 @@ pub enum RunnerState {
start_time: Instant,
gen: FrameGenerator,
},
/// Pipeline should shut down and do any cleanup
Shutdown,
}
impl RunnerState {
@ -58,11 +61,17 @@ impl RunnerState {
pub fn idle_duration(&self) -> Option<Duration> {
match self {
RunnerState::Idle { start_time, .. } => Some(start_time.elapsed()),
RunnerState::Normal => None,
_ => None,
}
}
}
#[derive(Debug, Clone)]
pub enum PipelineCommand {
/// External process requested clean shutdown
Shutdown,
}
/// Pipeline runner is the main entry process for stream transcoding
///
/// Each client connection spawns a new [PipelineRunner] and it should be run in its own thread
@ -108,7 +117,7 @@ pub struct PipelineRunner {
frame_ctr: u64,
/// Output directory where all stream data is saved
out_dir: String,
out_dir: PathBuf,
/// Thumbnail generation interval (0 = disabled)
thumb_interval: u64,
@ -127,6 +136,9 @@ pub struct PipelineRunner {
/// Last audio PTS for continuity in idle mode
last_audio_pts: i64,
/// Command receiver for external process control
cmd_channel: Option<Receiver<PipelineCommand>>,
}
unsafe impl Send for PipelineRunner {}
@ -139,10 +151,11 @@ impl PipelineRunner {
connection: ConnectionInfo,
recv: Box<dyn Read + Send>,
url: Option<String>,
command: Option<Receiver<PipelineCommand>>,
) -> Result<Self> {
Ok(Self {
handle,
out_dir,
out_dir: PathBuf::from(out_dir).join(connection.id.to_string()),
overseer,
connection,
config: Default::default(),
@ -162,6 +175,7 @@ impl PipelineRunner {
max_consecutive_failures: DEFAULT_MAX_CONSECUTIVE_FAILURES,
last_video_pts: 0,
last_audio_pts: 0,
cmd_channel: command,
})
}
@ -208,9 +222,7 @@ impl PipelineRunner {
unsafe fn generate_thumb_from_frame(&mut self, frame: *mut AVFrame) -> Result<()> {
if self.thumb_interval > 0 && (self.frame_ctr % self.thumb_interval) == 0 {
let frame = av_frame_clone(frame).addr();
let dst_pic = PathBuf::from(&self.out_dir)
.join(self.connection.id.to_string())
.join("thumb.webp");
let dst_pic = self.out_dir.join("thumb.webp");
std::thread::spawn(move || unsafe {
let mut frame = frame as *mut AVFrame; //TODO: danger??
let thumb_start = Instant::now();
@ -528,6 +540,13 @@ impl PipelineRunner {
/// EOF, cleanup
unsafe fn flush(&mut self) -> Result<()> {
if self.config.is_some() {
self.handle.block_on(async {
if let Err(e) = self.overseer.on_end(&self.connection.id).await {
error!("Failed to end stream: {e}");
}
});
}
for (var, enc) in &mut self.encoders {
for mut pkt in enc.encode_frame(ptr::null_mut())? {
for eg in self.egress.iter_mut() {
@ -539,14 +558,6 @@ impl PipelineRunner {
for eg in self.egress.iter_mut() {
eg.reset()?;
}
if self.config.is_some() {
self.handle.block_on(async {
if let Err(e) = self.overseer.on_end(&self.connection.id).await {
error!("Failed to end stream: {e}");
}
});
}
Ok(())
}
@ -556,16 +567,12 @@ impl PipelineRunner {
match self.once() {
Ok(c) => {
if !c {
if let Err(e) = self.flush() {
error!("Pipeline flush failed: {}", e);
}
// let drop handle flush
break;
}
}
Err(e) => {
if let Err(e) = self.flush() {
error!("Pipeline flush failed: {}", e);
}
// let drop handle flush
error!("Pipeline run failed: {}", e);
break;
}
@ -574,7 +581,25 @@ impl PipelineRunner {
}
}
fn handle_command(&mut self) -> Result<Option<bool>> {
if let Some(cmd) = &self.cmd_channel {
while let Ok(c) = cmd.try_recv() {
match c {
PipelineCommand::Shutdown => {
self.state = RunnerState::Shutdown;
return Ok(Some(true));
}
_ => warn!("Unexpected command: {:?}", c),
}
}
}
Ok(None)
}
unsafe fn once(&mut self) -> Result<bool> {
if let Some(r) = self.handle_command()? {
return Ok(r);
}
self.setup()?;
let config = if let Some(config) = &self.config {
@ -587,6 +612,7 @@ impl PipelineRunner {
let results = match &mut self.state {
RunnerState::Normal => self.process_normal_mode(&config)?,
RunnerState::Idle { .. } => self.process_idle_mode(&config)?,
_ => return Ok(false), // skip once, nothing to do
};
// egress results - process async operations without blocking if possible
@ -609,7 +635,7 @@ impl PipelineRunner {
let elapsed = Instant::now().sub(self.fps_counter_start).as_secs_f32();
if elapsed >= 2f32 {
let n_frames = self.frame_ctr - self.fps_last_frame_ctr;
info!("Average fps: {:.2}", n_frames as f32 / elapsed);
debug!("Average fps: {:.2}", n_frames as f32 / elapsed);
self.fps_counter_start = Instant::now();
self.fps_last_frame_ctr = self.frame_ctr;
}
@ -702,17 +728,11 @@ impl PipelineRunner {
});
match e {
EgressType::HLS(_) => {
let hls = HlsEgress::new(
&self.connection.id,
&self.out_dir,
6.0, // TODO: configure segment length
encoders,
SegmentType::MPEGTS,
)?;
let hls = HlsEgress::new(self.out_dir.clone(), encoders, SegmentType::MPEGTS)?;
self.egress.push(Box::new(hls));
}
EgressType::Recorder(_) => {
let rec = RecorderEgress::new(&self.connection.id, &self.out_dir, encoders)?;
let rec = RecorderEgress::new(self.out_dir.clone(), encoders)?;
self.egress.push(Box::new(rec));
}
_ => warn!("{} is not implemented", e),
@ -740,7 +760,7 @@ impl Drop for PipelineRunner {
info!(
"PipelineRunner cleaned up resources for stream: {}",
self.connection.key
self.connection.id
);
}
}

View File

@ -0,0 +1,930 @@
use crate::generator::FrameGenerator;
use crate::mux::{HlsMuxer, SegmentType};
use crate::variant::audio::AudioVariant;
use crate::variant::mapping::VariantMapping;
use crate::variant::video::VideoVariant;
use crate::variant::{StreamMapping, VariantStream};
use anyhow::{Context, Result};
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
av_q2d, AVMediaType::AVMEDIA_TYPE_AUDIO, AVMediaType::AVMEDIA_TYPE_VIDEO,
AVPixelFormat::AV_PIX_FMT_YUV420P, AVRational, AVSampleFormat::AV_SAMPLE_FMT_FLTP,
AV_NOPTS_VALUE, AV_PROFILE_H264_MAIN,
};
use ffmpeg_rs_raw::{Demuxer, Encoder};
use m3u8_rs::{parse_media_playlist, MediaSegmentType};
use std::collections::HashMap;
use std::fs;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::time::{Duration, Instant};
use uuid::Uuid;
#[derive(Debug, Clone)]
pub struct HlsTimingResult {
pub playlist_duration: f32,
pub actual_duration: f64,
pub video_duration: f64,
pub audio_duration: f64,
pub difference: f64,
pub segment_name: String,
pub is_partial: bool,
pub independent: bool,
}
#[derive(Debug)]
pub struct HlsTimingTestResult {
pub total_segments: usize,
pub full_segments: usize,
pub partial_segments: usize,
pub independent_partials: usize,
pub total_playlist_duration: f32,
pub total_actual_duration: f64,
pub total_difference: f64,
pub average_difference: f64,
pub min_difference: f64,
pub max_difference: f64,
pub problematic_segments: Vec<HlsTimingResult>,
pub segments: Vec<HlsTimingResult>,
pub test_duration: Duration,
pub success: bool,
pub error_message: Option<String>,
}
impl HlsTimingTestResult {
/// Check if the HLS timing test passed based on thresholds
pub fn passes(&self, max_avg_diff: f64, max_individual_diff: f64) -> bool {
self.success
&& self.average_difference.abs() <= max_avg_diff
&& self
.problematic_segments
.iter()
.all(|s| s.difference.abs() <= max_individual_diff)
}
/// Get a summary of the test results
pub fn summary(&self) -> String {
if !self.success {
return format!(
"FAILED: {}",
self.error_message.as_deref().unwrap_or("Unknown error")
);
}
format!(
"PASSED: {} segments, avg diff: {:.3}s, {} problematic",
self.total_segments,
self.average_difference,
self.problematic_segments.len()
)
}
}
pub struct HlsTimingTester {
max_avg_difference: f64,
max_individual_difference: f64,
problematic_threshold: f64,
}
impl Default for HlsTimingTester {
fn default() -> Self {
Self {
max_avg_difference: 0.1, // 100ms average difference
max_individual_difference: 0.5, // 500ms individual difference
problematic_threshold: 0.2, // 200ms considered problematic
}
}
}
impl HlsTimingTester {
pub fn new(max_avg_diff: f64, max_individual_diff: f64, problematic_threshold: f64) -> Self {
Self {
max_avg_difference: max_avg_diff,
max_individual_difference: max_individual_diff,
problematic_threshold,
}
}
/// Generate and test HLS stream with test pattern
pub fn test_generated_stream(
&self,
output_dir: &Path,
duration_seconds: f32,
segment_type: SegmentType,
) -> Result<HlsTimingTestResult> {
let start_time = Instant::now();
// Generate test stream
let stream_id = Uuid::new_v4();
let out_dir = output_dir.join(stream_id.to_string());
let (_muxer, hls_dir) =
self.generate_test_stream(&out_dir, duration_seconds, segment_type)?;
// Test the generated stream
match self.test_stream_timing_internal(&hls_dir) {
Ok(mut result) => {
result.test_duration = start_time.elapsed();
result.success =
result.passes(self.max_avg_difference, self.max_individual_difference);
Ok(result)
}
Err(e) => Ok(HlsTimingTestResult {
total_segments: 0,
full_segments: 0,
partial_segments: 0,
independent_partials: 0,
total_playlist_duration: 0.0,
total_actual_duration: 0.0,
total_difference: 0.0,
average_difference: 0.0,
min_difference: 0.0,
max_difference: 0.0,
problematic_segments: Vec::new(),
segments: Vec::new(),
test_duration: start_time.elapsed(),
success: false,
error_message: Some(e.to_string()),
}),
}
}
/// Generate test HLS stream with test pattern
fn generate_test_stream(
&self,
output_dir: &Path,
duration_seconds: f32,
segment_type: SegmentType,
) -> Result<(HlsMuxer, PathBuf)> {
const VIDEO_FPS: f32 = 30.0;
const VIDEO_WIDTH: u16 = 1280;
const VIDEO_HEIGHT: u16 = 720;
const SAMPLE_RATE: u32 = 44100;
// Create video encoder
let mut video_encoder = unsafe {
Encoder::new_with_name("libx264")?
.with_stream_index(0)
.with_framerate(VIDEO_FPS)?
.with_bitrate(1_000_000)
.with_pix_fmt(AV_PIX_FMT_YUV420P)
.with_width(VIDEO_WIDTH as _)
.with_height(VIDEO_HEIGHT as _)
.with_level(51)
.with_profile(AV_PROFILE_H264_MAIN)
.open(None)?
};
// Create audio encoder
let mut audio_encoder = unsafe {
Encoder::new_with_name("aac")?
.with_stream_index(1)
.with_default_channel_layout(1)
.with_bitrate(128_000)
.with_sample_format(AV_SAMPLE_FMT_FLTP)
.with_sample_rate(SAMPLE_RATE as _)?
.open(None)?
};
// Create variant streams
let video_stream = VideoVariant {
mapping: VariantMapping {
id: Uuid::new_v4(),
src_index: 0,
dst_index: 0,
group_id: 0,
},
width: VIDEO_WIDTH,
height: VIDEO_HEIGHT,
fps: VIDEO_FPS,
bitrate: 1_000_000,
codec: "libx264".to_string(),
profile: AV_PROFILE_H264_MAIN as usize,
level: 51,
keyframe_interval: 60,
pixel_format: AV_PIX_FMT_YUV420P as u32,
};
let audio_stream = AudioVariant {
mapping: VariantMapping {
id: Uuid::new_v4(),
src_index: 1,
dst_index: 1,
group_id: 0,
},
bitrate: 128_000,
codec: "aac".to_string(),
channels: 1,
sample_rate: SAMPLE_RATE as usize,
sample_fmt: "fltp".to_string(),
};
let video_variant = VariantStream::Video(video_stream.clone());
let audio_variant = VariantStream::Audio(audio_stream.clone());
let variants = vec![
(&video_variant, &video_encoder),
(&audio_variant, &audio_encoder),
];
// Create HLS muxer
let mut hls_muxer =
HlsMuxer::new(output_dir.to_path_buf(), variants.into_iter(), segment_type)?;
// Create frame generator
let frame_size = unsafe { (*audio_encoder.codec_context()).frame_size as _ };
let mut frame_gen = FrameGenerator::new(
VIDEO_FPS,
VIDEO_WIDTH,
VIDEO_HEIGHT,
AV_PIX_FMT_YUV420P,
SAMPLE_RATE,
frame_size,
1,
AVRational {
num: 1,
den: VIDEO_FPS as i32,
},
AVRational {
num: 1,
den: SAMPLE_RATE as i32,
},
)?;
frame_gen.set_realtime(false);
// Generate frames for the specified duration
let total_video_frames = (duration_seconds * VIDEO_FPS) as u64;
let mut video_frames_generated = 0;
while video_frames_generated < total_video_frames {
unsafe {
frame_gen.begin()?;
frame_gen.write_text(
&format!("Video Frame: {}", video_frames_generated),
40.0,
50.0,
50.0,
)?;
frame_gen.write_text(
&format!("Time: {:.1}s", video_frames_generated as f32 / VIDEO_FPS),
40.0,
50.0,
100.0,
)?;
let mut frame = frame_gen.next()?;
if frame.is_null() {
log::warn!("FrameGenerator returned null frame unexpectedly");
break;
}
// Determine if this is audio or video frame and encode accordingly
if (*frame).sample_rate > 0 {
// Audio frame - don't increment video counter
log::debug!("Generated audio frame, PTS: {}", (*frame).pts);
for mut pkt in audio_encoder.encode_frame(frame)? {
let result = hls_muxer.mux_packet(pkt, &audio_stream.id())?;
if let crate::egress::EgressResult::Segments {
created,
deleted: _,
} = result
{
for segment in created {
log::debug!("Created audio segment: {:?}", segment.path);
}
}
ffmpeg_rs_raw::ffmpeg_sys_the_third::av_packet_free(&mut pkt);
}
} else {
// Video frame - increment video counter
log::debug!(
"Generated video frame {}, PTS: {}",
video_frames_generated,
(*frame).pts
);
for mut pkt in video_encoder.encode_frame(frame)? {
let result = hls_muxer.mux_packet(pkt, &video_stream.id())?;
if let crate::egress::EgressResult::Segments {
created,
deleted: _,
} = result
{
for segment in created {
log::debug!("Created video segment: {:?}", segment.path);
}
}
ffmpeg_rs_raw::ffmpeg_sys_the_third::av_packet_free(&mut pkt);
}
video_frames_generated += 1;
}
ffmpeg_rs_raw::ffmpeg_sys_the_third::av_frame_free(&mut frame);
}
}
// Flush encoders to ensure all packets are written
unsafe {
// Flush video encoder
for mut pkt in video_encoder.encode_frame(std::ptr::null_mut())? {
hls_muxer.mux_packet(pkt, &video_stream.id())?;
ffmpeg_rs_raw::ffmpeg_sys_the_third::av_packet_free(&mut pkt);
}
// Flush audio encoder
for mut pkt in audio_encoder.encode_frame(std::ptr::null_mut())? {
hls_muxer.mux_packet(pkt, &audio_stream.id())?;
ffmpeg_rs_raw::ffmpeg_sys_the_third::av_packet_free(&mut pkt);
}
}
log::info!(
"Generated {} video frames ({:.1}s) of test HLS stream at",
video_frames_generated,
video_frames_generated as f32 / VIDEO_FPS
);
Ok((hls_muxer, output_dir.join("stream_0")))
}
/// Test HLS timing for a specific stream directory
pub fn test_stream_timing(&self, hls_dir: &Path) -> HlsTimingTestResult {
let start_time = Instant::now();
match self.test_stream_timing_internal(hls_dir) {
Ok(mut result) => {
result.test_duration = start_time.elapsed();
result.success =
result.passes(self.max_avg_difference, self.max_individual_difference);
result
}
Err(e) => HlsTimingTestResult {
total_segments: 0,
full_segments: 0,
partial_segments: 0,
independent_partials: 0,
total_playlist_duration: 0.0,
total_actual_duration: 0.0,
total_difference: 0.0,
average_difference: 0.0,
min_difference: 0.0,
max_difference: 0.0,
problematic_segments: Vec::new(),
segments: Vec::new(),
test_duration: start_time.elapsed(),
success: false,
error_message: Some(e.to_string()),
},
}
}
fn test_stream_timing_internal(&self, hls_dir: &Path) -> Result<HlsTimingTestResult> {
let playlist_path = hls_dir.join("live.m3u8");
if !playlist_path.exists() {
return Err(anyhow::anyhow!(
"Playlist file does not exist: {:?}",
playlist_path
));
}
// Parse the playlist
let playlist_content =
fs::read_to_string(&playlist_path).context("Failed to read playlist file")?;
let (_, playlist) = parse_media_playlist(playlist_content.as_bytes())
.map_err(|e| anyhow::anyhow!("Failed to parse playlist: {:?}", e))?;
let mut segments = Vec::new();
let mut total_playlist_duration = 0.0f32;
let mut total_actual_duration = 0.0f64;
// Analyze each segment
for segment_type in &playlist.segments {
match segment_type {
MediaSegmentType::Full(segment) => {
let segment_path = hls_dir.join(&segment.uri);
if !segment_path.exists() {
continue; // Skip missing segments
}
let durations = self.analyze_segment(&segment_path)?;
let actual_duration = durations.total_duration;
let video_duration = durations.video_duration;
let audio_duration = durations.audio_duration;
let playlist_duration = segment.duration;
let difference = actual_duration - playlist_duration as f64;
let result = HlsTimingResult {
playlist_duration,
actual_duration,
video_duration,
audio_duration,
difference,
segment_name: segment.uri.clone(),
is_partial: false,
independent: false,
};
segments.push(result);
total_playlist_duration += playlist_duration;
total_actual_duration += actual_duration;
}
MediaSegmentType::Partial(partial) => {
let segment_path = hls_dir.join(&partial.uri);
if !segment_path.exists() {
continue; // Skip missing segments
}
let durations = if let Some(byte_range) = &partial.byte_range {
self.analyze_partial_segment(
&segment_path,
byte_range.length,
byte_range.offset,
)?
} else {
self.analyze_segment(&segment_path)?
};
let actual_duration = durations.total_duration;
let video_duration = durations.video_duration;
let audio_duration = durations.audio_duration;
let playlist_duration = partial.duration as f32;
let difference = actual_duration - playlist_duration as f64;
let result = HlsTimingResult {
playlist_duration,
actual_duration,
video_duration,
audio_duration,
difference,
segment_name: partial.uri.clone(),
is_partial: true,
independent: partial.independent,
};
segments.push(result);
total_playlist_duration += playlist_duration;
total_actual_duration += actual_duration;
}
MediaSegmentType::PreloadHint(_) => {
// Skip preload hints
continue;
}
}
}
// Calculate statistics
let full_segments = segments.iter().filter(|s| !s.is_partial).count();
let partial_segments = segments.iter().filter(|s| s.is_partial).count();
let independent_partials = segments
.iter()
.filter(|s| s.is_partial && s.independent)
.count();
let total_difference = total_actual_duration - total_playlist_duration as f64;
let average_difference = if !segments.is_empty() {
total_difference / segments.len() as f64
} else {
0.0
};
let differences: Vec<f64> = segments.iter().map(|s| s.difference).collect();
let min_difference = differences.iter().fold(f64::INFINITY, |a, &b| a.min(b));
let max_difference = differences.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
// Find problematic segments
let problematic_segments: Vec<HlsTimingResult> = segments
.iter()
.filter(|s| s.difference.abs() > self.problematic_threshold)
.cloned()
.collect();
Ok(HlsTimingTestResult {
total_segments: segments.len(),
full_segments,
partial_segments,
independent_partials,
total_playlist_duration,
total_actual_duration,
total_difference,
average_difference,
min_difference,
max_difference,
problematic_segments,
segments,
test_duration: Duration::from_secs(0), // Will be set by caller
success: true, // Will be determined by caller
error_message: None,
})
}
/// Test multiple HLS streams concurrently
pub async fn test_multiple_streams(
&self,
hls_dirs: Vec<PathBuf>,
) -> HashMap<PathBuf, HlsTimingTestResult> {
let mut results = HashMap::new();
// Run tests concurrently
let futures: Vec<_> = hls_dirs
.into_iter()
.map(|dir| {
let tester = HlsTimingTester::new(
self.max_avg_difference,
self.max_individual_difference,
self.problematic_threshold,
);
let dir_clone = dir.clone();
async move {
let result =
tokio::task::spawn_blocking(move || tester.test_stream_timing(&dir_clone))
.await
.unwrap_or_else(|_| HlsTimingTestResult {
total_segments: 0,
full_segments: 0,
partial_segments: 0,
independent_partials: 0,
total_playlist_duration: 0.0,
total_actual_duration: 0.0,
total_difference: 0.0,
average_difference: 0.0,
min_difference: 0.0,
max_difference: 0.0,
problematic_segments: Vec::new(),
segments: Vec::new(),
test_duration: Duration::from_secs(0),
success: false,
error_message: Some("Task panicked".to_string()),
});
(dir, result)
}
})
.collect();
let resolved_futures = futures::future::join_all(futures).await;
for (dir, result) in resolved_futures {
results.insert(dir, result);
}
results
}
fn analyze_segment(&self, path: &Path) -> Result<SegmentDurations> {
let file = fs::File::open(path)
.with_context(|| format!("Failed to open file: {}", path.display()))?;
self.analyze_segment_with_reader(Box::new(file))
}
fn analyze_partial_segment(
&self,
path: &Path,
length: u64,
offset: Option<u64>,
) -> Result<SegmentDurations> {
let reader = ByteRangeReader::new(path, length, offset)?;
self.analyze_segment_with_reader(Box::new(reader))
}
fn analyze_segment_with_reader(&self, reader: Box<dyn Read>) -> Result<SegmentDurations> {
let mut demuxer = Demuxer::new_custom_io(reader, None)?;
unsafe {
demuxer.probe_input()?;
}
let mut video_start_pts = AV_NOPTS_VALUE;
let mut video_end_pts = AV_NOPTS_VALUE;
let mut audio_start_pts = AV_NOPTS_VALUE;
let mut audio_end_pts = AV_NOPTS_VALUE;
let mut video_last_duration = 0i64;
let mut audio_last_duration = 0i64;
let mut video_stream_idx: Option<usize> = None;
let mut audio_stream_idx: Option<usize> = None;
// Read all packets and track timing
loop {
let packet_result = unsafe { demuxer.get_packet() };
match packet_result {
Ok((pkt, stream)) => {
if pkt.is_null() {
break;
}
unsafe {
let codec_type = (*(*stream).codecpar).codec_type;
let pts = (*pkt).pts;
let duration = (*pkt).duration;
let current_stream_idx = (*stream).index as usize;
match codec_type {
AVMEDIA_TYPE_VIDEO => {
if video_stream_idx.is_none() {
video_stream_idx = Some(current_stream_idx);
}
if pts != AV_NOPTS_VALUE {
if video_start_pts == AV_NOPTS_VALUE {
video_start_pts = pts;
}
video_end_pts = pts;
video_last_duration = duration;
}
}
AVMEDIA_TYPE_AUDIO => {
if audio_stream_idx.is_none() {
audio_stream_idx = Some(current_stream_idx);
}
if pts != AV_NOPTS_VALUE {
if audio_start_pts == AV_NOPTS_VALUE {
audio_start_pts = pts;
}
audio_end_pts = pts;
audio_last_duration = duration;
}
}
_ => {}
}
}
}
Err(_) => break,
}
}
// Calculate durations
let video_duration = if let Some(stream_idx) = video_stream_idx {
if video_start_pts != AV_NOPTS_VALUE && video_end_pts != AV_NOPTS_VALUE {
unsafe {
let stream = demuxer.get_stream(stream_idx)?;
let time_base = (*stream).time_base;
let pts_duration = (video_end_pts - video_start_pts) as f64 * av_q2d(time_base);
let last_pkt_duration = video_last_duration as f64 * av_q2d(time_base);
pts_duration + last_pkt_duration
}
} else {
0.0
}
} else {
0.0
};
let audio_duration = if let Some(stream_idx) = audio_stream_idx {
if audio_start_pts != AV_NOPTS_VALUE && audio_end_pts != AV_NOPTS_VALUE {
unsafe {
let stream = demuxer.get_stream(stream_idx)?;
let time_base = (*stream).time_base;
let pts_duration = (audio_end_pts - audio_start_pts) as f64 * av_q2d(time_base);
let last_pkt_duration = audio_last_duration as f64 * av_q2d(time_base);
pts_duration + last_pkt_duration
}
} else {
0.0
}
} else {
0.0
};
let total_duration = video_duration.max(audio_duration);
Ok(SegmentDurations {
total_duration,
video_duration,
audio_duration,
})
}
}
#[derive(Debug)]
struct SegmentDurations {
total_duration: f64,
video_duration: f64,
audio_duration: f64,
}
/// Custom IO reader for byte range access
struct ByteRangeReader {
file: fs::File,
start_offset: u64,
length: u64,
current_pos: u64,
}
impl ByteRangeReader {
fn new(path: &Path, length: u64, offset: Option<u64>) -> Result<Self> {
use std::io::{Seek, SeekFrom};
let mut file = fs::File::open(path)
.with_context(|| format!("Failed to open file: {}", path.display()))?;
let start_offset = offset.unwrap_or(0);
file.seek(SeekFrom::Start(start_offset))
.with_context(|| format!("Failed to seek to offset {}", start_offset))?;
Ok(ByteRangeReader {
file,
start_offset,
length,
current_pos: 0,
})
}
}
impl Read for ByteRangeReader {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let remaining = self.length - self.current_pos;
if remaining == 0 {
return Ok(0);
}
let to_read = std::cmp::min(buf.len() as u64, remaining) as usize;
let bytes_read = self.file.read(&mut buf[..to_read])?;
self.current_pos += bytes_read as u64;
Ok(bytes_read)
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::tempdir;
#[test]
fn test_timing_tester_creation() {
let tester = HlsTimingTester::default();
assert_eq!(tester.max_avg_difference, 0.1);
assert_eq!(tester.max_individual_difference, 0.5);
assert_eq!(tester.problematic_threshold, 0.2);
}
#[test]
fn test_timing_result_passes() {
let result = HlsTimingTestResult {
total_segments: 10,
full_segments: 8,
partial_segments: 2,
independent_partials: 1,
total_playlist_duration: 20.0,
total_actual_duration: 20.05,
total_difference: 0.05,
average_difference: 0.005,
min_difference: -0.01,
max_difference: 0.02,
problematic_segments: Vec::new(),
segments: Vec::new(),
test_duration: Duration::from_millis(100),
success: true,
error_message: None,
};
assert!(result.passes(0.1, 0.5));
assert!(!result.passes(0.001, 0.5));
}
#[test]
fn test_missing_playlist() {
let temp_dir = tempdir().unwrap();
let tester = HlsTimingTester::default();
let result = tester.test_stream_timing(temp_dir.path());
assert!(!result.success);
assert!(result.error_message.is_some());
assert!(result.error_message.unwrap().contains("does not exist"));
}
#[test]
fn test_generated_hls_stream_mpegts() {
env_logger::try_init().ok();
let temp_dir = tempdir().unwrap();
let tester = HlsTimingTester::new(0.2, 1.0, 0.5); // More lenient thresholds for test
let result = tester.test_generated_stream(
temp_dir.path(),
10.0, // 10 seconds
SegmentType::MPEGTS,
);
match result {
Ok(test_result) => {
assert!(
test_result.success,
"Test should pass: {}",
test_result.summary()
);
assert!(
test_result.total_segments > 0,
"Should have generated segments"
);
assert!(
test_result.total_playlist_duration > 8.0,
"Should have ~10s of content"
);
assert!(test_result.full_segments > 0, "Should have full segments");
println!("✓ MPEG-TS test passed: {}", test_result.summary());
}
Err(e) => {
panic!("Test generation failed: {}", e);
}
}
}
#[ignore]
#[test]
fn test_generated_hls_stream_fmp4() {
env_logger::try_init().ok();
let temp_dir = tempdir().unwrap();
let tester = HlsTimingTester::new(0.2, 1.0, 0.5); // More lenient thresholds for test
let result = tester.test_generated_stream(
temp_dir.path(),
8.0, // 8 seconds
SegmentType::FMP4,
);
match result {
Ok(test_result) => {
assert!(
test_result.success,
"Test should pass: {}",
test_result.summary()
);
assert!(
test_result.total_segments > 0,
"Should have generated segments"
);
assert!(
test_result.total_playlist_duration > 6.0,
"Should have ~8s of content"
);
assert!(test_result.full_segments > 0, "Should have full segments");
println!("✓ fMP4 test passed: {}", test_result.summary());
}
Err(e) => {
panic!("Test generation failed: {}", e);
}
}
}
#[test]
fn test_30_second_stream() {
env_logger::try_init().ok();
let temp_dir = tempdir().unwrap();
let tester = HlsTimingTester::default();
let result = tester.test_generated_stream(
temp_dir.path(),
30.0, // 30 seconds as requested
SegmentType::MPEGTS,
);
match result {
Ok(test_result) => {
println!("{:?}", test_result);
println!("30-second stream test results:");
println!(" Total segments: {}", test_result.total_segments);
println!(" Full segments: {}", test_result.full_segments);
println!(" Partial segments: {}", test_result.partial_segments);
println!(
" Total playlist duration: {:.1}s",
test_result.total_playlist_duration
);
println!(
" Total actual duration: {:.1}s",
test_result.total_actual_duration
);
println!(
" Average difference: {:.3}s",
test_result.average_difference
);
println!(" Test duration: {:?}", test_result.test_duration);
println!(" Result: {}", test_result.summary());
assert!(
test_result.success,
"30s test should pass: {}",
test_result.summary()
);
assert!(
test_result.total_segments >= 2,
"Should have multiple segments for 30s"
);
assert!(
test_result.total_playlist_duration >= 25.0,
"Should have ~30s of content"
);
if !test_result.problematic_segments.is_empty() {
println!(" Problematic segments:");
for seg in &test_result.problematic_segments {
println!(
" {}: {:.3}s difference",
seg.segment_name, seg.difference
);
}
}
}
Err(e) => {
panic!("30-second test generation failed: {}", e);
}
}
}
}

View File

@ -85,7 +85,7 @@ impl TryInto<Encoder> for &VideoVariant {
fn try_into(self) -> Result<Encoder, Self::Error> {
unsafe {
let mut opt = HashMap::new();
if self.codec == "x264" {
if self.codec == "x264" || self.codec == "libx264" {
opt.insert("preset".to_string(), "fast".to_string());
//opt.insert("tune".to_string(), "zerolatency".to_string());
}

View File

@ -1,10 +1,10 @@
use data_encoding::BASE32_NOPAD;
use log::debug;
use sha2::{Digest, Sha256};
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
use std::time::{Duration, Instant};
use tokio::task;
use log::debug;
use sha2::{Digest, Sha256};
use data_encoding::BASE32_NOPAD;
#[derive(Debug, Clone)]
pub struct ViewerInfo {
@ -55,7 +55,13 @@ impl ViewerTracker {
BASE32_NOPAD.encode(fingerprint).to_lowercase()
}
pub fn track_viewer(&self, token: &str, stream_id: &str, ip_address: &str, user_agent: Option<String>) {
pub fn track_viewer(
&self,
token: &str,
stream_id: &str,
ip_address: &str,
user_agent: Option<String>,
) {
let mut viewers = self.viewers.write().unwrap();
let viewer_info = ViewerInfo {
@ -76,14 +82,16 @@ impl ViewerTracker {
pub fn get_viewer_count(&self, stream_id: &str) -> usize {
let viewers = self.viewers.read().unwrap();
viewers.values()
viewers
.values()
.filter(|v| v.stream_id == stream_id)
.count()
}
pub fn get_active_viewers(&self, stream_id: &str) -> Vec<String> {
let viewers = self.viewers.read().unwrap();
viewers.iter()
viewers
.iter()
.filter(|(_, v)| v.stream_id == stream_id)
.map(|(token, _)| token.clone())
.collect()
@ -109,15 +117,20 @@ impl ViewerTracker {
let mut viewers = self.viewers.write().unwrap();
let now = Instant::now();
let expired_tokens: Vec<String> = viewers.iter()
let expired_tokens: Vec<String> = viewers
.iter()
.filter(|(_, viewer)| now.duration_since(viewer.last_seen) > self.timeout_duration)
.map(|(token, _)| token.clone())
.collect();
for token in expired_tokens {
if let Some(viewer) = viewers.remove(&token) {
debug!("Expired viewer {} from stream {} (last seen {:?} ago)",
token, viewer.stream_id, now.duration_since(viewer.last_seen));
debug!(
"Expired viewer {} from stream {} (last seen {:?} ago)",
token,
viewer.stream_id,
now.duration_since(viewer.last_seen)
);
}
}
}
@ -142,7 +155,10 @@ mod tests {
let token1 = ViewerTracker::generate_viewer_token(ip, user_agent);
let token2 = ViewerTracker::generate_viewer_token(ip, user_agent);
assert_eq!(token1, token2, "Same IP and user agent should generate identical tokens");
assert_eq!(
token1, token2,
"Same IP and user agent should generate identical tokens"
);
}
#[test]
@ -155,7 +171,10 @@ mod tests {
let token1 = ViewerTracker::generate_viewer_token(ip1, user_agent);
let token2 = ViewerTracker::generate_viewer_token(ip2, user_agent);
assert_ne!(token1, token2, "Different IPs should generate different tokens");
assert_ne!(
token1, token2,
"Different IPs should generate different tokens"
);
}
#[test]
@ -166,7 +185,10 @@ mod tests {
let token1 = ViewerTracker::generate_viewer_token(ip, None);
let token2 = ViewerTracker::generate_viewer_token(ip, None);
assert_eq!(token1, token2, "Same IP without user agent should generate identical tokens");
assert_eq!(
token1, token2,
"Same IP without user agent should generate identical tokens"
);
}
#[test]
@ -178,8 +200,12 @@ mod tests {
let token = ViewerTracker::generate_viewer_token(ip, user_agent);
// Should be base32 encoded (lowercase, no padding)
assert!(token.chars().all(|c| "abcdefghijklmnopqrstuvwxyz234567".contains(c)),
"Token should only contain base32 characters");
assert!(
token
.chars()
.all(|c| "abcdefghijklmnopqrstuvwxyz234567".contains(c)),
"Token should only contain base32 characters"
);
assert!(token.len() > 10, "Token should be reasonably long");
}
@ -193,6 +219,9 @@ mod tests {
let token1 = ViewerTracker::generate_viewer_token(ip, user_agent1);
let token2 = ViewerTracker::generate_viewer_token(ip, user_agent2);
assert_ne!(token1, token2, "Different user agents should generate different tokens");
assert_ne!(
token1, token2,
"Different user agents should generate different tokens"
);
}
}

View File

@ -94,7 +94,7 @@ impl ZapStreamDb {
pub async fn update_stream(&self, user_stream: &UserStream) -> Result<()> {
sqlx::query(
"update user_stream set state = ?, starts = ?, ends = ?, title = ?, summary = ?, image = ?, thumb = ?, tags = ?, content_warning = ?, goal = ?, pinned = ?, fee = ?, event = ? where id = ?",
"update user_stream set state = ?, starts = ?, ends = ?, title = ?, summary = ?, image = ?, thumb = ?, tags = ?, content_warning = ?, goal = ?, pinned = ?, fee = ?, event = ?, endpoint_id = ? where id = ?",
)
.bind(&user_stream.state)
.bind(&user_stream.starts)
@ -109,6 +109,7 @@ impl ZapStreamDb {
.bind(&user_stream.pinned)
.bind(&user_stream.fee)
.bind(&user_stream.event)
.bind(&user_stream.endpoint_id)
.bind(&user_stream.id)
.execute(&self.db)
.await

View File

@ -1,43 +1,15 @@
ARG IMAGE=rust:bookworm
FROM $IMAGE AS build
FROM voidic/rust-ffmpeg AS build
WORKDIR /app/src
ENV FFMPEG_DIR=/app/ffmpeg
COPY . .
RUN apt update && \
apt install -y \
build-essential \
libx264-dev \
libx265-dev \
libwebp-dev \
libpng-dev \
nasm \
protobuf-compiler \
libclang-dev && \
rm -rf /var/lib/apt/lists/*
RUN git clone --single-branch --branch release/7.1 https://git.v0l.io/ffmpeg/ffmpeg.git && \
cd ffmpeg && \
./configure \
--prefix=$FFMPEG_DIR \
--disable-programs \
--disable-doc \
--disable-network \
--enable-gpl \
--enable-version3 \
--disable-postproc \
--enable-libx264 \
--enable-libx265 \
--enable-libwebp \
--disable-static \
--enable-shared && \
make -j$(nproc) && make install
ENV LD_LIBRARY_PATH=$FFMPEG_DIR/lib
RUN cargo test
RUN cargo install --path ./crates/zap-stream --root /app/build
FROM $IMAGE AS runner
FROM rust:bookworm
WORKDIR /app
RUN apt update && \
apt install -y libx264-164 && \
rm -rf /var/lib/apt/lists/*
COPY --from=build /app/build .
COPY --from=build /app/ffmpeg/lib/ /lib
COPY --from=build /app/src/ffmpeg/lib/ /lib
ENTRYPOINT ["/app/bin/zap-stream"]

View File

@ -3,9 +3,6 @@
# All the endpoints must be valid URI's
endpoints:
- "rtmp://127.0.0.1:3336"
- "srt://127.0.0.1:3335"
- "tcp://127.0.0.1:3334"
- "test-pattern://"
# Public hostname which points to the IP address used to listen for all [endpoints]
endpoints_public_hostname: "localhost"

View File

@ -571,6 +571,8 @@ impl Api {
})
.collect();
// TODO: past streams should include a history entry
Ok(HistoryResponse {
items,
page: 0,
@ -650,8 +652,16 @@ impl Api {
}
/// Track a viewer for viewer count analytics
pub fn track_viewer(&self, token: &str, stream_id: &str, ip_address: &str, user_agent: Option<String>) {
self.overseer.viewer_tracker().track_viewer(token, stream_id, ip_address, user_agent);
pub fn track_viewer(
&self,
token: &str,
stream_id: &str,
ip_address: &str,
user_agent: Option<String>,
) {
self.overseer
.viewer_tracker()
.track_viewer(token, stream_id, ip_address, user_agent);
}
/// Get current viewer count for a stream

View File

@ -0,0 +1,697 @@
use anyhow::{Context, Result};
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
av_q2d, AVMediaType::AVMEDIA_TYPE_AUDIO, AVMediaType::AVMEDIA_TYPE_VIDEO, AV_NOPTS_VALUE,
};
use ffmpeg_rs_raw::Demuxer;
use m3u8_rs::{parse_media_playlist, MediaSegmentType};
use std::env;
use std::fmt;
use std::fs;
use std::io::{Read, Seek, SeekFrom};
use std::path::{Path, PathBuf};
#[derive(Debug)]
struct SegmentInfo {
filename: String,
playlist_duration: f32,
actual_duration: f64,
video_duration: f64,
audio_duration: f64,
difference: f64,
segment_type: SegmentAnalysisType,
}
#[derive(Debug, Clone)]
enum SegmentAnalysisType {
Full,
Partial {
independent: bool,
byte_range: Option<(u64, Option<u64>)>,
},
}
#[derive(Debug)]
struct SegmentDurations {
total_duration: f64,
video_duration: f64,
audio_duration: f64,
video_packets: u64,
audio_packets: u64,
video_start_pts: i64,
video_end_pts: i64,
audio_start_pts: i64,
audio_end_pts: i64,
}
#[derive(Debug)]
struct InitSegmentInfo {
stream_count: usize,
streams: Vec<StreamInfo>,
has_moov: bool,
pixel_format_set: bool,
}
#[derive(Debug)]
struct StreamInfo {
codec_type: String,
codec_name: String,
width: Option<i32>,
height: Option<i32>,
pixel_format: Option<String>,
}
impl fmt::Display for StreamInfo {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.codec_type.as_str() {
"video" => {
if let (Some(w), Some(h)) = (self.width, self.height) {
write!(f, "{} {}x{}", self.codec_name, w, h)?;
} else {
write!(f, "{}", self.codec_name)?;
}
if let Some(ref pix_fmt) = self.pixel_format {
write!(f, " ({})", pix_fmt)?;
}
Ok(())
}
"audio" => write!(f, "{} (audio)", self.codec_name),
_ => write!(f, "{} ({})", self.codec_name, self.codec_type),
}
}
}
/// Custom IO reader that implements Read for byte range access to files
/// This allows us to read only a specific byte range from a file, which is essential
/// for analyzing HLS-LL partial segments that reference byte ranges in larger files.
struct ByteRangeReader {
file: fs::File,
start_offset: u64,
length: u64,
current_pos: u64,
}
impl ByteRangeReader {
/// Create a new ByteRangeReader for the specified file and byte range
fn new(path: &Path, length: u64, offset: Option<u64>) -> Result<Self> {
let mut file = fs::File::open(path)
.with_context(|| format!("Failed to open file: {}", path.display()))?;
let start_offset = offset.unwrap_or(0);
// Seek to the start of our byte range
file.seek(SeekFrom::Start(start_offset))
.with_context(|| format!("Failed to seek to offset {}", start_offset))?;
Ok(ByteRangeReader {
file,
start_offset,
length,
current_pos: 0,
})
}
}
impl Read for ByteRangeReader {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
// Calculate how many bytes we can still read within our range
let remaining = self.length - self.current_pos;
if remaining == 0 {
return Ok(0); // EOF for our byte range
}
// Limit the read to not exceed our byte range
let to_read = std::cmp::min(buf.len() as u64, remaining) as usize;
let bytes_read = self.file.read(&mut buf[..to_read])?;
self.current_pos += bytes_read as u64;
Ok(bytes_read)
}
}
fn main() -> Result<()> {
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
eprintln!("Usage: {} <path_to_hls_directory>", args[0]);
eprintln!(
"Example: {} out/hls/8c220348-fdbb-44cd-94d5-97a11a9ec91d/stream_0",
args[0]
);
std::process::exit(1);
}
let hls_dir = PathBuf::from(&args[1]);
let playlist_path = hls_dir.join("live.m3u8");
if !playlist_path.exists() {
eprintln!("Error: Playlist file {:?} does not exist", playlist_path);
std::process::exit(1);
}
println!("Analyzing HLS stream: {}", hls_dir.display());
println!("Playlist: {}", playlist_path.display());
// Check for initialization segment
let init_path = hls_dir.join("init.mp4");
if init_path.exists() {
println!("Init segment: {}", init_path.display());
match analyze_init_segment(&init_path) {
Ok(info) => {
println!(" Streams: {}", info.stream_count);
for (i, stream_info) in info.streams.iter().enumerate() {
println!(" Stream {}: {}", i, stream_info);
}
if info.has_moov {
println!(" ✓ Contains MOOV box");
} else {
println!(" ✗ Missing MOOV box");
}
if info.pixel_format_set {
println!(" ✓ Pixel format properly set");
} else {
println!(" ✗ Pixel format not set");
}
}
Err(e) => {
println!(" Error analyzing init segment: {}", e);
}
}
} else {
println!("No init segment found");
}
println!();
// Parse the playlist
let playlist_content =
fs::read_to_string(&playlist_path).context("Failed to read playlist file")?;
let (_, playlist) = parse_media_playlist(playlist_content.as_bytes())
.map_err(|e| anyhow::anyhow!("Failed to parse playlist: {:?}", e))?;
// Analyze each segment
let mut segments = Vec::new();
let mut total_playlist_duration = 0.0f32;
let mut total_actual_duration = 0.0f64;
println!("Segment Analysis:");
println!(
"{:<12} {:>4} {:>12} {:>12} {:>12} {:>12} {:>12} {:>12}",
"Segment", "Type", "Playlist", "Actual", "Video", "Audio", "Difference", "Info"
);
println!(
"{:<12} {:>4} {:>12} {:>12} {:>12} {:>12} {:>12} {:>12}",
"--------", "----", "--------", "------", "-----", "-----", "----------", "----"
);
for segment_type in &playlist.segments {
match segment_type {
MediaSegmentType::Full(segment) => {
let segment_path = hls_dir.join(&segment.uri);
if !segment_path.exists() {
eprintln!("Warning: Segment file {:?} does not exist", segment_path);
continue;
}
// Analyze file using demuxer
let durations = analyze_segment(&segment_path)?;
let actual_duration = durations.total_duration;
let video_duration = durations.video_duration;
let audio_duration = durations.audio_duration;
let playlist_duration = segment.duration;
let difference = actual_duration - playlist_duration as f64;
let info = SegmentInfo {
filename: segment.uri.clone(),
playlist_duration,
actual_duration,
video_duration,
audio_duration,
difference,
segment_type: SegmentAnalysisType::Full,
};
println!(
"{:<12} {:>4} {:>12.3} {:>12.3} {:>12.3} {:>12.3} {:>12.3} {:>12}",
info.filename,
"FULL",
info.playlist_duration,
info.actual_duration,
info.video_duration,
info.audio_duration,
info.difference,
""
);
segments.push(info);
total_playlist_duration += playlist_duration;
total_actual_duration += actual_duration;
}
MediaSegmentType::Partial(partial) => {
let segment_path = hls_dir.join(&partial.uri);
if !segment_path.exists() {
eprintln!(
"Warning: Partial segment file {:?} does not exist",
segment_path
);
continue;
}
// For partial segments, we need to analyze them differently since they reference byte ranges
let (actual_duration, video_duration, audio_duration) =
if let Some(byte_range) = &partial.byte_range {
// Analyze partial segment using byte range
let durations = analyze_partial_segment(
&segment_path,
byte_range.length,
byte_range.offset,
)?;
(
durations.total_duration,
durations.video_duration,
durations.audio_duration,
)
} else {
// Fallback to full file analysis if no byte range
let durations = analyze_segment(&segment_path)?;
(
durations.total_duration,
durations.video_duration,
durations.audio_duration,
)
};
let playlist_duration = partial.duration as f32;
let difference = actual_duration - playlist_duration as f64;
let byte_range_info = partial.byte_range.as_ref().map(|br| (br.length, br.offset));
let info = SegmentInfo {
filename: partial.uri.clone(),
playlist_duration,
actual_duration,
video_duration,
audio_duration,
difference,
segment_type: SegmentAnalysisType::Partial {
independent: partial.independent,
byte_range: byte_range_info,
},
};
let info_str = if partial.independent { "IND" } else { "" };
println!(
"{:<12} {:>4} {:>12.3} {:>12.3} {:>12.3} {:>12.3} {:>12.3} {:>12}",
info.filename,
"PART",
info.playlist_duration,
info.actual_duration,
info.video_duration,
info.audio_duration,
info.difference,
info_str
);
segments.push(info);
total_playlist_duration += playlist_duration;
total_actual_duration += actual_duration;
}
MediaSegmentType::PreloadHint(_) => {
// Skip preload hints for analysis
continue;
}
}
}
println!();
// Separate full and partial segments for better analysis
let full_segments: Vec<&SegmentInfo> = segments
.iter()
.filter(|s| matches!(s.segment_type, SegmentAnalysisType::Full))
.collect();
let partial_segments: Vec<&SegmentInfo> = segments
.iter()
.filter(|s| matches!(s.segment_type, SegmentAnalysisType::Partial { .. }))
.collect();
let independent_partials: Vec<&SegmentInfo> = segments
.iter()
.filter(|s| {
matches!(
s.segment_type,
SegmentAnalysisType::Partial {
independent: true,
..
}
)
})
.collect();
println!("Summary:");
println!(" Total segments: {}", segments.len());
println!(" Full segments: {}", full_segments.len());
println!(" Partial segments: {}", partial_segments.len());
println!(" Independent partials: {}", independent_partials.len());
println!(" Total playlist duration: {:.3}s", total_playlist_duration);
println!(" Total actual duration: {:.3}s", total_actual_duration);
println!(
" Total difference: {:.3}s",
total_actual_duration - total_playlist_duration as f64
);
if !segments.is_empty() {
println!(
" Average difference per segment: {:.3}s",
(total_actual_duration - total_playlist_duration as f64) / segments.len() as f64
);
}
// Statistics
let differences: Vec<f64> = segments.iter().map(|s| s.difference).collect();
let min_diff = differences.iter().fold(f64::INFINITY, |a, &b| a.min(b));
let max_diff = differences.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
let avg_diff = differences.iter().sum::<f64>() / differences.len() as f64;
println!();
println!("Difference Statistics:");
println!(" Min difference: {:.3}s", min_diff);
println!(" Max difference: {:.3}s", max_diff);
println!(" Average difference: {:.3}s", avg_diff);
// Check for problematic segments
let problematic: Vec<&SegmentInfo> = segments
.iter()
.filter(|s| s.difference.abs() > 0.5)
.collect();
if !problematic.is_empty() {
println!();
println!("Problematic segments (>0.5s difference):");
for seg in problematic {
println!(" {}: {:.3}s difference", seg.filename, seg.difference);
}
}
// HLS-LL specific analysis
if !partial_segments.is_empty() {
println!();
println!("HLS-LL Analysis:");
let avg_partial_duration: f64 = partial_segments
.iter()
.map(|s| s.playlist_duration as f64)
.sum::<f64>()
/ partial_segments.len() as f64;
println!(" Average partial duration: {:.3}s", avg_partial_duration);
if let Some(part_inf) = &playlist.part_inf {
let target_duration = part_inf.part_target;
println!(" Target partial duration: {:.3}s", target_duration);
println!(
" Partial duration variance: {:.3}s",
(avg_partial_duration - target_duration).abs()
);
}
// Show byte range info for partial segments
let partials_with_ranges = partial_segments
.iter()
.filter_map(|s| {
if let SegmentAnalysisType::Partial {
byte_range: Some((length, offset)),
..
} = &s.segment_type
{
Some((s, length, offset))
} else {
None
}
})
.collect::<Vec<_>>();
if !partials_with_ranges.is_empty() {
println!(
" Partial segments with byte ranges: {}",
partials_with_ranges.len()
);
let avg_range_size = partials_with_ranges
.iter()
.map(|(_, &length, _)| length)
.sum::<u64>() as f64
/ partials_with_ranges.len() as f64;
println!(" Average byte range size: {:.0} bytes", avg_range_size);
}
}
// Check playlist properties
println!();
println!("Playlist Properties:");
println!(" Version: {:?}", playlist.version);
println!(" Target duration: {:?}", playlist.target_duration);
println!(" Media sequence: {:?}", playlist.media_sequence);
if let Some(part_inf) = &playlist.part_inf {
println!(
" Part target: {:.3}s (LL-HLS enabled)",
part_inf.part_target
);
}
// Count preload hints
let preload_hints = playlist
.segments
.iter()
.filter(|s| matches!(s, MediaSegmentType::PreloadHint(_)))
.count();
if preload_hints > 0 {
println!(" Preload hints: {}", preload_hints);
}
Ok(())
}
fn analyze_segment_with_reader(reader: Box<dyn Read>) -> Result<SegmentDurations> {
let mut demuxer = Demuxer::new_custom_io(reader, None)?;
// Probe the input to get stream information
unsafe {
demuxer.probe_input()?;
}
let mut video_start_pts = AV_NOPTS_VALUE;
let mut video_end_pts = AV_NOPTS_VALUE;
let mut audio_start_pts = AV_NOPTS_VALUE;
let mut audio_end_pts = AV_NOPTS_VALUE;
let mut video_last_duration = 0i64;
let mut audio_last_duration = 0i64;
let mut video_packets = 0u64;
let mut audio_packets = 0u64;
let mut video_stream_idx: Option<usize> = None;
let mut audio_stream_idx: Option<usize> = None;
// Read all packets and track timing
loop {
let packet_result = unsafe { demuxer.get_packet() };
match packet_result {
Ok((pkt, stream)) => {
if pkt.is_null() {
break; // End of stream
}
unsafe {
let codec_type = (*(*stream).codecpar).codec_type;
let pts = (*pkt).pts;
let duration = (*pkt).duration;
let current_stream_idx = (*stream).index as usize;
match codec_type {
AVMEDIA_TYPE_VIDEO => {
if video_stream_idx.is_none() {
video_stream_idx = Some(current_stream_idx);
}
if pts != AV_NOPTS_VALUE {
if video_start_pts == AV_NOPTS_VALUE {
video_start_pts = pts;
}
video_end_pts = pts;
video_last_duration = duration;
video_packets += 1;
}
}
AVMEDIA_TYPE_AUDIO => {
if audio_stream_idx.is_none() {
audio_stream_idx = Some(current_stream_idx);
}
if pts != AV_NOPTS_VALUE {
if audio_start_pts == AV_NOPTS_VALUE {
audio_start_pts = pts;
}
audio_end_pts = pts;
audio_last_duration = duration;
audio_packets += 1;
}
}
_ => {}
}
}
}
Err(_) => break, // End of file or error
}
}
// Calculate durations (including last packet duration)
let video_duration = if let Some(stream_idx) = video_stream_idx {
if video_start_pts != AV_NOPTS_VALUE && video_end_pts != AV_NOPTS_VALUE {
unsafe {
let stream = demuxer.get_stream(stream_idx)?;
let time_base = (*stream).time_base;
let pts_duration = (video_end_pts - video_start_pts) as f64 * av_q2d(time_base);
let last_pkt_duration = video_last_duration as f64 * av_q2d(time_base);
pts_duration + last_pkt_duration
}
} else {
0.0
}
} else {
0.0
};
let audio_duration = if let Some(stream_idx) = audio_stream_idx {
if audio_start_pts != AV_NOPTS_VALUE && audio_end_pts != AV_NOPTS_VALUE {
unsafe {
let stream = demuxer.get_stream(stream_idx)?;
let time_base = (*stream).time_base;
let pts_duration = (audio_end_pts - audio_start_pts) as f64 * av_q2d(time_base);
let last_pkt_duration = audio_last_duration as f64 * av_q2d(time_base);
pts_duration + last_pkt_duration
}
} else {
0.0
}
} else {
0.0
};
let total_duration = video_duration.max(audio_duration);
Ok(SegmentDurations {
total_duration,
video_duration,
audio_duration,
video_packets,
audio_packets,
video_start_pts,
video_end_pts,
audio_start_pts,
audio_end_pts,
})
}
fn analyze_segment(path: &Path) -> Result<SegmentDurations> {
let file =
fs::File::open(path).with_context(|| format!("Failed to open file: {}", path.display()))?;
analyze_segment_with_reader(Box::new(file))
}
fn analyze_partial_segment(
path: &Path,
length: u64,
offset: Option<u64>,
) -> Result<SegmentDurations> {
// Create a custom byte range reader for the partial segment
let reader = ByteRangeReader::new(path, length, offset)?;
// Use the custom IO with demuxer to analyze only the byte range
analyze_segment_with_reader(Box::new(reader))
}
fn analyze_init_segment(path: &Path) -> Result<InitSegmentInfo> {
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
av_get_pix_fmt_name, avcodec_get_name, AVPixelFormat::AV_PIX_FMT_NONE,
};
use std::ffi::CStr;
let file = fs::File::open(path)
.with_context(|| format!("Failed to open init segment: {}", path.display()))?;
let mut demuxer = Demuxer::new_custom_io(Box::new(file), None)?;
// Probe the input to get stream information
unsafe {
demuxer.probe_input()?;
}
let mut streams = Vec::new();
let mut pixel_format_set = false;
// Try to get streams - we'll iterate until we hit an error
let mut i = 0;
loop {
let stream_result = unsafe { demuxer.get_stream(i) };
match stream_result {
Ok(stream) => unsafe {
let codecpar = (*stream).codecpar;
let codec_type = (*codecpar).codec_type;
let codec_name = {
let name_ptr = avcodec_get_name((*codecpar).codec_id);
if name_ptr.is_null() {
"unknown".to_string()
} else {
CStr::from_ptr(name_ptr).to_string_lossy().to_string()
}
};
let (codec_type_str, width, height, pixel_format) = match codec_type {
AVMEDIA_TYPE_VIDEO => {
let w = if (*codecpar).width > 0 {
Some((*codecpar).width)
} else {
None
};
let h = if (*codecpar).height > 0 {
Some((*codecpar).height)
} else {
None
};
let pix_fmt = if (*codecpar).format != AV_PIX_FMT_NONE as i32 {
pixel_format_set = true;
// Skip pixel format name resolution for now due to type mismatch
Some("yuv420p".to_string()) // Common default
} else {
None
};
("video".to_string(), w, h, pix_fmt)
}
AVMEDIA_TYPE_AUDIO => ("audio".to_string(), None, None, None),
_ => ("other".to_string(), None, None, None),
};
streams.push(StreamInfo {
codec_type: codec_type_str,
codec_name,
width,
height,
pixel_format,
});
i += 1;
},
Err(_) => break, // No more streams
}
}
let stream_count = streams.len();
// Check if this is a proper MP4 initialization segment by looking for file data
let file_data = fs::read(path)?;
let has_moov = file_data.windows(4).any(|window| window == b"moov");
Ok(InitSegmentInfo {
stream_count,
streams,
has_moov,
pixel_format_set,
})
}

View File

@ -88,19 +88,25 @@ impl HttpServer {
router.insert("/index.html", HttpServerPath::Index).unwrap();
router
.insert(
format!("/{}/{{stream}}/live.m3u8", HlsEgress::PATH),
format!("/{{stream}}/{}/live.m3u8", HlsEgress::PATH),
HttpServerPath::HlsMasterPlaylist,
)
.unwrap();
router
.insert(
format!("/{}/{{stream}}/{{variant}}/live.m3u8", HlsEgress::PATH),
format!("/{{stream}}/{}/{{variant}}/live.m3u8", HlsEgress::PATH),
HttpServerPath::HlsVariantPlaylist,
)
.unwrap();
router
.insert(
format!("/{}/{{stream}}/{{variant}}/{{seg}}.ts", HlsEgress::PATH),
format!("/{{stream}}/{}/{{variant}}/{{seg}}.ts", HlsEgress::PATH),
HttpServerPath::HlsSegmentFile,
)
.unwrap();
router
.insert(
format!("/{{stream}}/{}/{{variant}}/{{seg}}.m4s", HlsEgress::PATH),
HttpServerPath::HlsSegmentFile,
)
.unwrap();
@ -146,7 +152,7 @@ impl HttpServer {
.title
.unwrap_or_else(|| format!("Stream {}", &stream.id[..8])),
summary: stream.summary,
live_url: format!("/{}/{}/live.m3u8", HlsEgress::PATH, stream.id),
live_url: format!("/{}/{}/live.m3u8", stream.id, HlsEgress::PATH),
viewer_count: if viewer_count > 0 {
Some(viewer_count as _)
} else {
@ -381,7 +387,7 @@ impl HttpServer {
.header("server", "zap-stream-core")
.header("access-control-allow-origin", "*")
.header("access-control-allow-headers", "*")
.header("access-control-allow-methods", "HEAD, GET")
.header("access-control-allow-methods", "HEAD, GET, OPTIONS")
}
/// Get a response object for a file body

View File

@ -229,17 +229,23 @@ impl ZapStreamOverseer {
pubkey: &Vec<u8>,
) -> Result<Event> {
// TODO: remove assumption that HLS is enabled
let base_streaming_path = PathBuf::from(HlsEgress::PATH).join(stream.id.to_string());
let pipeline_dir = PathBuf::from(stream.id.to_string());
let extra_tags = vec![
Tag::parse(["p", hex::encode(pubkey).as_str(), "", "host"])?,
Tag::parse([
"streaming",
self.map_to_public_url(base_streaming_path.join("live.m3u8").to_str().unwrap())?
self.map_to_public_url(
pipeline_dir
.join(HlsEgress::PATH)
.join("live.m3u8")
.to_str()
.unwrap(),
)?
.as_str(),
])?,
Tag::parse([
"image",
self.map_to_public_url(base_streaming_path.join("thumb.webp").to_str().unwrap())?
self.map_to_public_url(pipeline_dir.join("thumb.webp").to_str().unwrap())?
.as_str(),
])?,
Tag::parse(["service", self.map_to_public_url("api/v1")?.as_str()])?,
@ -351,7 +357,8 @@ impl Overseer for ZapStreamOverseer {
// Get ingest endpoint configuration based on connection type
let endpoint = self.detect_endpoint(&connection).await?;
let cfg = get_variants_from_endpoint(&stream_info, &endpoint)?;
let caps = parse_capabilities(&endpoint.capabilities.unwrap_or("".to_string()));
let cfg = get_variants_from_endpoint(&stream_info, &caps)?;
if cfg.video_src.is_none() || cfg.variants.is_empty() {
bail!("No video src found");
@ -362,6 +369,27 @@ impl Overseer for ZapStreamOverseer {
name: "hls".to_string(),
variants: cfg.variants.iter().map(|v| v.id()).collect(),
}));
if let Some(EndpointCapability::DVR { height }) = caps
.iter()
.find(|c| matches!(c, EndpointCapability::DVR { .. }))
{
let var = cfg.variants.iter().find(|v| match v {
VariantStream::Video(v) => v.height == *height,
_ => false,
});
match var {
Some(var) => egress.push(EgressType::Recorder(EgressConfig {
name: "dvr".to_string(),
variants: [var.id()].into(),
})),
None => {
warn!(
"Invalid DVR config, no variant found with height {}",
height
);
}
}
}
let stream_id = connection.id.clone();
// insert new stream record
@ -419,10 +447,10 @@ impl Overseer for ZapStreamOverseer {
if let Some(endpoint) = self.db.get_ingest_endpoint(endpoint_id).await? {
endpoint.cost
} else {
0
bail!("Endpoint doesnt exist");
}
} else {
0
bail!("Endpoint id not set on stream");
};
// Convert duration from seconds to minutes and calculate cost
@ -532,7 +560,7 @@ impl ZapStreamOverseer {
let default = endpoints.iter().max_by_key(|e| e.cost);
Ok(endpoints
.iter()
.find(|e| e.name == connection.endpoint)
.find(|e| e.name.eq_ignore_ascii_case(connection.endpoint))
.or(default)
.unwrap()
.clone())
@ -545,13 +573,48 @@ struct EndpointConfig<'a> {
variants: Vec<VariantStream>,
}
enum EndpointCapability {
SourceVariant,
Variant { height: u16, bitrate: u64 },
DVR { height: u16 },
}
fn parse_capabilities(cap: &str) -> Vec<EndpointCapability> {
cap.to_ascii_lowercase()
.split(',')
.map_while(|c| {
let cs = c.split(':').collect::<Vec<&str>>();
match cs[0] {
"variant" if cs[1] == "source" => Some(EndpointCapability::SourceVariant),
"variant" if cs.len() == 3 => {
if let (Ok(h), Ok(br)) = (cs[1].parse(), cs[2].parse()) {
Some(EndpointCapability::Variant {
height: h,
bitrate: br,
})
} else {
warn!("Invalid variant: {}", c);
None
}
}
"dvr" if cs.len() == 2 => {
if let Ok(h) = cs[1].parse() {
Some(EndpointCapability::DVR { height: h })
} else {
warn!("Invalid dvr: {}", c);
None
}
}
_ => None,
}
})
.collect()
}
fn get_variants_from_endpoint<'a>(
info: &'a IngressInfo,
endpoint: &IngestEndpoint,
capabilities: &Vec<EndpointCapability>,
) -> Result<EndpointConfig<'a>> {
let capabilities_str = endpoint.capabilities.as_deref().unwrap_or("");
let capabilities: Vec<&str> = capabilities_str.split(',').collect();
let mut vars: Vec<VariantStream> = vec![];
let video_src = info
@ -568,9 +631,8 @@ fn get_variants_from_endpoint<'a>(
let mut dst_index = 0;
for capability in capabilities {
let parts: Vec<&str> = capability.split(':').collect();
if parts.len() >= 2 && parts[0] == "variant" && parts[1] == "source" {
match capability {
EndpointCapability::SourceVariant => {
// Add copy variant (group for source)
if let Some(video_src) = video_src {
vars.push(VariantStream::CopyVideo(VariantMapping {
@ -593,18 +655,24 @@ fn get_variants_from_endpoint<'a>(
}
group_id += 1;
} else if parts.len() >= 3 && parts[0] == "variant" {
if let (Ok(target_height), Ok(bitrate)) =
(parts[1].parse::<u32>(), parts[2].parse::<u32>())
{
}
EndpointCapability::Variant { height, bitrate } => {
// Add video variant for this group
if let Some(video_src) = video_src {
let output_height = *height;
if video_src.height < output_height as _ {
info!(
"Skipping variant {}p, source would be upscaled from {}p",
height, video_src.height
);
continue;
}
// Calculate dimensions maintaining aspect ratio
let input_width = video_src.width as f32;
let input_height = video_src.height as f32;
let aspect_ratio = input_width / input_height;
let output_height = target_height;
let output_width = (output_height as f32 * aspect_ratio).round() as u16;
// Ensure even dimensions for H.264 compatibility
@ -617,7 +685,7 @@ fn get_variants_from_endpoint<'a>(
output_height + 1
} else {
output_height
} as u16;
};
vars.push(VariantStream::Video(VideoVariant {
mapping: VariantMapping {
@ -627,17 +695,16 @@ fn get_variants_from_endpoint<'a>(
group_id,
},
width: output_width,
height: output_height,
height: output_height as _,
fps: video_src.fps,
bitrate: bitrate as u64,
bitrate: *bitrate as _,
codec: "libx264".to_string(),
profile: 77, // AV_PROFILE_H264_MAIN
level: 51,
keyframe_interval: video_src.fps as u16 * 2,
level: 51, // High 5.1 (4K)
keyframe_interval: video_src.fps as u16,
pixel_format: AV_PIX_FMT_YUV420P as u32,
}));
dst_index += 1;
}
// Add audio variant for the same group
if let Some(audio_src) = audio_src {
@ -660,7 +727,8 @@ fn get_variants_from_endpoint<'a>(
group_id += 1;
}
}
// Handle other capabilities like dvr:720h here if needed
_ => {}
}
}
Ok(EndpointConfig {