Compare commits

...

13 Commits

Author SHA1 Message Date
e7e1f0299d fix: segment duration calc
All checks were successful
continuous-integration/drone Build is passing
feat: add debugging tool for hls segments
2025-06-13 17:42:39 +01:00
338d351727 fix: disable HLS-LL
All checks were successful
continuous-integration/drone Build is passing
fix: thumb.webp path
2025-06-13 13:05:23 +01:00
047b3fec59 fix: hls partial sequencing
All checks were successful
continuous-integration/drone Build is passing
2025-06-13 12:36:20 +01:00
fee5e77407 fix: missing endpoint id 2025-06-13 12:21:40 +01:00
d88f829645 fix: match endpoint 2025-06-13 12:18:39 +01:00
ca70bf964c feat: HLS-LL
Some checks reported errors
continuous-integration/drone Build was killed
2025-06-13 11:30:52 +01:00
cc973f0d9b chore: format thread names 2025-06-12 17:29:22 +01:00
a7ff18b34c fix: add default stream info to stream 2025-06-12 17:25:28 +01:00
09577cc2c8 fix: rtmp ingest
All checks were successful
continuous-integration/drone Build is passing
fix: idle placeholder stream
2025-06-12 14:56:59 +01:00
ad20fbc052 refactor: cleanup rtmp setup 2025-06-12 09:44:25 +01:00
3a38b05630 fix: ffmpeg-rs-raw ref
All checks were successful
continuous-integration/drone Build is passing
2025-06-09 16:40:37 +01:00
1c651108ea fix: various buffering / av sync issues
Some checks reported errors
continuous-integration/drone Build was killed
2025-06-09 16:33:46 +01:00
5d7da09801 refactor: frame gen 2025-06-09 13:08:03 +01:00
27 changed files with 2814 additions and 1349 deletions

2
.gitignore vendored
View File

@ -1,3 +1,3 @@
**/target
.idea/
out/
**/out/

430
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,10 +10,10 @@ members = [
opt-level = 3
lto = true
codegen-units = 1
panic = "abort"
panic = "unwind"
[workspace.dependencies]
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "29ab0547478256c574766b4acc6fcda8ebf4cae6" }
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "056f30c5f9784e5765394469f0d6bef827284b98" }
tokio = { version = "1.36.0", features = ["rt", "rt-multi-thread", "macros"] }
anyhow = { version = "^1.0.91", features = ["backtrace"] }
async-trait = "0.1.77"
@ -24,6 +24,6 @@ url = "2.5.0"
itertools = "0.14.0"
chrono = { version = "^0.4.38", features = ["serde"] }
hex = "0.4.3"
m3u8-rs = "6.0.0"
m3u8-rs = { git = "https://git.v0l.io/Kieran/m3u8-rs.git", rev = "5b7aa0c65994b5ab2780b7ed27d84c03bc32d19f" }
sha2 = "0.10.8"
data-encoding = "2.9.0"

View File

@ -4,18 +4,9 @@ version = "0.1.0"
edition = "2021"
[features]
default = ["test-pattern", "srt", "rtmp"]
default = ["srt", "rtmp"]
srt = ["dep:srt-tokio"]
rtmp = ["dep:rml_rtmp"]
local-overseer = [] # WIP
webhook-overseer = [] # WIP
test-pattern = [
"dep:resvg",
"dep:usvg",
"dep:tiny-skia",
"dep:fontdue",
"dep:ringbuf",
]
[dependencies]
ffmpeg-rs-raw.workspace = true
@ -27,20 +18,23 @@ uuid.workspace = true
serde.workspace = true
hex.workspace = true
itertools.workspace = true
futures-util = "0.3.30"
m3u8-rs.workspace = true
sha2.workspace = true
data-encoding.workspace = true
futures-util = "0.3.30"
resvg = "0.45.1"
usvg = "0.45.1"
tiny-skia = "0.11.4"
fontdue = "0.9.2"
ringbuf = "0.4.7"
libc = "0.2.169"
# srt
srt-tokio = { version = "0.4.3", optional = true }
srt-tokio = { version = "0.4.4", optional = true }
# rtmp
rml_rtmp = { version = "0.8.0", optional = true }
bytes = "1.9.0"
xflv = "0.4.4"
# test-pattern
resvg = { version = "0.44.0", optional = true }
usvg = { version = "0.44.0", optional = true }
tiny-skia = { version = "0.11.4", optional = true }
fontdue = { version = "0.9.2", optional = true }
ringbuf = { version = "0.4.7", optional = true }

View File

@ -1,24 +1,51 @@
use anyhow::Result;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
use ffmpeg_rs_raw::Encoder;
use std::path::PathBuf;
use uuid::Uuid;
use crate::egress::{Egress, EgressResult};
use crate::mux::HlsMuxer;
use crate::mux::{HlsMuxer, SegmentType};
use crate::variant::VariantStream;
/// Alias the muxer directly
pub type HlsEgress = HlsMuxer;
pub struct HlsEgress {
mux: HlsMuxer,
}
impl Egress for HlsMuxer {
impl HlsEgress {
pub const PATH: &'static str = "hls";
pub fn new<'a>(
id: &Uuid,
out_dir: &str,
segment_length: f32,
encoders: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
segment_type: SegmentType,
) -> Result<Self> {
Ok(Self {
mux: HlsMuxer::new(
id,
PathBuf::from(out_dir).join(Self::PATH).to_str().unwrap(),
segment_length,
encoders,
segment_type,
)?,
})
}
}
impl Egress for HlsEgress {
unsafe fn process_pkt(
&mut self,
packet: *mut AVPacket,
variant: &Uuid,
) -> Result<EgressResult> {
self.mux_packet(packet, variant)
self.mux.mux_packet(packet, variant)
}
unsafe fn reset(&mut self) -> Result<()> {
for var in &mut self.variants {
for var in &mut self.mux.variants {
var.reset()?
}
Ok(())

View File

@ -0,0 +1,568 @@
use crate::overseer::IngressStream;
use anyhow::{bail, Result};
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVColorSpace::AVCOL_SPC_RGB;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPictureType::AV_PICTURE_TYPE_NONE;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_RGBA;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVSampleFormat::AV_SAMPLE_FMT_FLTP;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
av_channel_layout_default, av_frame_alloc, av_frame_free, av_frame_get_buffer, av_q2d,
av_rescale_q, AVFrame, AVPixelFormat, AVRational, AVStream,
};
use ffmpeg_rs_raw::Scaler;
use fontdue::layout::{CoordinateSystem, Layout, TextStyle};
use fontdue::Font;
use std::mem::transmute;
use std::ops::Sub;
use std::time::{Duration, Instant};
use std::{ptr, slice};
/// Frame generator
pub struct FrameGenerator {
fps: f32,
width: u16,
height: u16,
video_sample_fmt: AVPixelFormat,
audio_sample_rate: u32,
audio_frame_size: i32,
audio_channels: u8,
video_pts: i64,
audio_pts: i64,
// Timebases for frame generation
video_timebase: AVRational,
audio_timebase: AVRational,
// internal
next_frame: *mut AVFrame,
scaler: Scaler,
font: Font,
start: Instant,
}
impl Drop for FrameGenerator {
fn drop(&mut self) {
unsafe {
if !self.next_frame.is_null() {
av_frame_free(&mut self.next_frame);
self.next_frame = std::ptr::null_mut();
}
}
}
}
impl FrameGenerator {
pub fn new(
fps: f32,
width: u16,
height: u16,
pix_fmt: AVPixelFormat,
sample_rate: u32,
frame_size: i32,
channels: u8,
video_timebase: AVRational,
audio_timebase: AVRational,
) -> Result<Self> {
let font = include_bytes!("../SourceCodePro-Regular.ttf") as &[u8];
let font = Font::from_bytes(font, Default::default()).unwrap();
Ok(Self {
fps,
width,
height,
video_sample_fmt: pix_fmt,
audio_sample_rate: sample_rate,
audio_frame_size: frame_size,
audio_channels: channels,
video_pts: 0,
audio_pts: 0,
video_timebase,
audio_timebase,
font,
start: Instant::now(),
scaler: Scaler::default(),
next_frame: ptr::null_mut(),
})
}
pub fn from_stream(
video_stream: &IngressStream,
audio_stream: Option<&IngressStream>,
) -> Result<Self> {
Ok(Self::from_stream_with_timebase(
video_stream,
audio_stream,
AVRational {
num: 1,
den: video_stream.fps as i32,
},
audio_stream.map(|s| AVRational {
num: 1,
den: s.sample_rate as i32,
}),
)?)
}
pub fn from_stream_with_timebase(
video_stream: &IngressStream,
audio_stream: Option<&IngressStream>,
video_timebase: AVRational,
audio_timebase: Option<AVRational>,
) -> Result<Self> {
Ok(Self::new(
video_stream.fps,
video_stream.width as _,
video_stream.height as _,
unsafe { transmute(video_stream.format as i32) },
audio_stream.map(|i| i.sample_rate as _).unwrap_or(0),
if audio_stream.is_none() { 0 } else { 1024 },
audio_stream.map(|i| i.channels as _).unwrap_or(0),
video_timebase,
audio_timebase.unwrap_or(AVRational { num: 1, den: 1 }),
)?)
}
pub unsafe fn from_av_streams(
video_stream: *const AVStream,
audio_stream: Option<*const AVStream>,
) -> Result<Self> {
if video_stream.is_null() {
bail!("Video stream cannot be null");
}
let video_codec_par = (*video_stream).codecpar;
let video_timebase = (*video_stream).time_base;
// Extract video stream properties
let width = (*video_codec_par).width as u16;
let height = (*video_codec_par).height as u16;
let pix_fmt = unsafe { transmute((*video_codec_par).format) };
// Calculate FPS from timebase
let fps = av_q2d((*video_stream).r_frame_rate) as f32;
// Extract audio stream properties if available
let (sample_rate, channels, audio_timebase) = if let Some(audio_stream) = audio_stream {
if !audio_stream.is_null() {
let audio_codec_par = (*audio_stream).codecpar;
let audio_tb = (*audio_stream).time_base;
(
(*audio_codec_par).sample_rate as u32,
(*audio_codec_par).ch_layout.nb_channels as u8,
audio_tb,
)
} else {
(0, 0, AVRational { num: 1, den: 44100 })
}
} else {
(0, 0, AVRational { num: 1, den: 44100 })
};
let frame_size = if sample_rate > 0 { 1024 } else { 0 };
Ok(Self::new(
fps,
width,
height,
pix_fmt,
sample_rate,
frame_size,
channels,
video_timebase,
audio_timebase,
)?)
}
pub fn frame_no(&self) -> u64 {
(self.video_pts / self.pts_per_frame()) as u64
}
/// Set the starting PTS values for video and audio
pub fn set_starting_pts(&mut self, video_pts: i64, audio_pts: i64) {
self.video_pts = video_pts;
self.audio_pts = audio_pts;
self.start = Instant::now().sub(Duration::from_secs_f64(
video_pts as f64 / self.pts_per_frame() as f64 / self.fps as f64,
));
}
/// Create a new frame for composing text / images
pub fn begin(&mut self) -> Result<()> {
if self.next_frame.is_null() {
unsafe {
let mut src_frame = av_frame_alloc();
if src_frame.is_null() {
bail!("Failed to allocate placeholder video frame");
}
(*src_frame).width = self.width as _;
(*src_frame).height = self.height as _;
(*src_frame).pict_type = AV_PICTURE_TYPE_NONE;
(*src_frame).key_frame = 1;
(*src_frame).colorspace = AVCOL_SPC_RGB;
//internally always use RGBA, we convert frame to target pixel format at the end
(*src_frame).format = AV_PIX_FMT_RGBA as _;
(*src_frame).pts = self.video_pts;
(*src_frame).duration = self.pts_per_frame() as _;
(*src_frame).time_base = self.video_timebase;
if av_frame_get_buffer(src_frame, 0) < 0 {
av_frame_free(&mut src_frame);
bail!("Failed to get frame buffer");
}
self.next_frame = src_frame;
}
}
Ok(())
}
/// Write some text into the next frame
pub fn write_text(&mut self, msg: &str, size: f32, x: f32, y: f32) -> Result<()> {
if self.next_frame.is_null() {
bail!("Must call begin() before writing text")
}
let mut layout = Layout::new(CoordinateSystem::PositiveYDown);
layout.append(&[&self.font], &TextStyle::new(msg, size, 0));
self.write_layout(layout, x, y)?;
Ok(())
}
/// Write text layout into frame
fn write_layout(&mut self, layout: Layout, x: f32, y: f32) -> Result<()> {
for g in layout.glyphs() {
let (metrics, bitmap) = self.font.rasterize_config_subpixel(g.key);
for y1 in 0..metrics.height {
for x1 in 0..metrics.width {
let dst_x = x as usize + x1 + g.x as usize;
let dst_y = y as usize + y1 + g.y as usize;
let offset_src = (x1 + y1 * metrics.width) * 3;
unsafe {
let offset_dst =
4 * dst_x + dst_y * (*self.next_frame).linesize[0] as usize;
let pixel_dst = (*self.next_frame).data[0].add(offset_dst);
*pixel_dst.offset(0) = bitmap[offset_src];
*pixel_dst.offset(1) = bitmap[offset_src + 1];
*pixel_dst.offset(2) = bitmap[offset_src + 2];
}
}
}
}
Ok(())
}
pub unsafe fn fill_color(&mut self, color32: [u8; 4]) -> Result<()> {
if self.next_frame.is_null() {
bail!("Must call begin() before writing frame data")
}
let buf = slice::from_raw_parts_mut(
(*self.next_frame).data[0],
(self.width as usize * self.height as usize * 4) as usize,
);
for z in 0..(self.width as usize * self.height as usize) {
buf[z * 4..z * 4 + 4].copy_from_slice(&color32);
}
Ok(())
}
/// Copy data directly into the frame buffer (must be RGBA data)
pub unsafe fn copy_frame_data(&mut self, data: &[u8]) -> Result<()> {
if self.next_frame.is_null() {
bail!("Must call begin() before writing frame data")
}
let buf = slice::from_raw_parts_mut(
(*self.next_frame).data[0],
(self.width as usize * self.height as usize * 4) as usize,
);
if buf.len() < data.len() {
bail!("Frame buffer is too small");
}
buf.copy_from_slice(data);
Ok(())
}
fn pts_per_frame(&self) -> i64 {
self.video_timebase.den as i64 / (self.video_timebase.num as i64 * self.fps as i64)
}
fn pts_of_nb_samples(&self, n: i64) -> i64 {
let seconds = (n as f64 / self.audio_sample_rate as f64) as f64;
(seconds / unsafe { av_q2d(self.audio_timebase) }) as _
}
/// Generate audio to stay synchronized with video frames
unsafe fn generate_audio_frame(&mut self) -> Result<*mut AVFrame> {
const FREQUENCY: f32 = 440.0; // A4 note
// audio is disabled if sample rate is 0
if self.audio_sample_rate == 0 {
return Ok(ptr::null_mut());
}
// Calculate audio PTS needed to stay ahead of next video frame
let next_video_pts = self.video_pts + self.pts_per_frame();
// Convert video PTS to audio timebase to see how much audio we need
let audio_pts_needed =
av_rescale_q(next_video_pts, self.video_timebase, self.audio_timebase);
// Generate audio if we don't have enough to cover the next video frame
if self.audio_pts < audio_pts_needed {
let audio_frame = av_frame_alloc();
(*audio_frame).format = AV_SAMPLE_FMT_FLTP as _;
(*audio_frame).nb_samples = self.audio_frame_size as _;
(*audio_frame).duration = self.audio_frame_size as _;
(*audio_frame).sample_rate = self.audio_sample_rate as _;
(*audio_frame).pts = self.audio_pts;
(*audio_frame).time_base = self.audio_timebase;
(*audio_frame).duration = self.pts_of_nb_samples(self.audio_frame_size as _);
av_channel_layout_default(&mut (*audio_frame).ch_layout, self.audio_channels as _);
av_frame_get_buffer(audio_frame, 0);
// Generate sine wave samples for all channels
for ch in 0..self.audio_channels {
let data = (*audio_frame).data[ch as usize] as *mut f32;
for i in 0..self.audio_frame_size {
let sample_time =
(self.audio_pts + i as i64) as f32 / self.audio_sample_rate as f32;
let sample_value =
(2.0 * std::f32::consts::PI * FREQUENCY * sample_time).sin() * 0.5;
*data.add(i as _) = sample_value;
}
}
return Ok(audio_frame);
}
Ok(ptr::null_mut())
}
/// Return the next frame for encoding (blocking)
pub unsafe fn next(&mut self) -> Result<*mut AVFrame> {
// set start time to now if this is the first call to next()
if self.video_pts == 0 {
self.start = Instant::now();
}
// try to get audio frames before video frames (non-blocking)
let audio_frame = self.generate_audio_frame()?;
if !audio_frame.is_null() {
self.audio_pts += (*audio_frame).duration;
return Ok(audio_frame);
}
// auto-init frame
if self.next_frame.is_null() {
self.begin()?;
}
let stream_time = Duration::from_secs_f64(
self.video_pts as f64 / self.pts_per_frame() as f64 / self.fps as f64,
);
let real_time = self.start.elapsed();
let wait_time = if stream_time > real_time {
stream_time - real_time
} else {
Duration::new(0, 0)
};
if !wait_time.is_zero() && wait_time.as_secs_f32() > 1f32 / self.fps {
std::thread::sleep(wait_time);
}
// convert to output pixel format, or just return internal frame if it matches output
if self.video_sample_fmt != transmute((*self.next_frame).format) {
let out_frame = self.scaler.process_frame(
self.next_frame,
self.width,
self.height,
self.video_sample_fmt,
)?;
self.video_pts += (*self.next_frame).duration;
av_frame_free(&mut self.next_frame);
self.next_frame = ptr::null_mut();
Ok(out_frame)
} else {
let ret = self.next_frame;
self.video_pts += (*self.next_frame).duration;
self.next_frame = ptr::null_mut();
Ok(ret)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
#[test]
fn test_frame_timing_synchronization() {
unsafe {
let fps = 30.0;
let sample_rate = 44100;
let frame_size = 1024;
let channels = 2;
let mut gen = FrameGenerator::new(
fps,
1280,
720,
AV_PIX_FMT_YUV420P,
sample_rate,
frame_size,
channels,
AVRational {
num: 1,
den: fps as i32,
},
AVRational {
num: 1,
den: sample_rate as i32,
},
)
.unwrap();
let samples_per_frame = sample_rate as f64 / fps as f64; // Expected: 1470 samples per frame
println!("Expected samples per video frame: {:.2}", samples_per_frame);
let mut audio_frames = 0;
let mut video_frames = 0;
let mut total_audio_samples = 0;
// Generate frames for 2 seconds (60 video frames at 30fps)
for i in 0..120 {
let mut frame = gen.next().unwrap();
if (*frame).sample_rate > 0 {
// Audio frame
audio_frames += 1;
total_audio_samples += (*frame).nb_samples as u64;
println!(
"Frame {}: AUDIO - PTS: {}, samples: {}, total_samples: {}",
i,
(*frame).pts,
(*frame).nb_samples,
total_audio_samples
);
} else {
// Video frame
video_frames += 1;
let expected_audio_samples = (video_frames as f64 * samples_per_frame) as u64;
let audio_deficit = if total_audio_samples >= expected_audio_samples {
0
} else {
expected_audio_samples - total_audio_samples
};
println!("Frame {}: VIDEO - PTS: {}, frame_idx: {}, expected_audio: {}, actual_audio: {}, deficit: {}",
i, (*frame).pts, video_frames, expected_audio_samples, total_audio_samples, audio_deficit);
// Verify we have enough audio for this video frame
assert!(
total_audio_samples >= expected_audio_samples,
"Video frame {} needs {} audio samples but only have {}",
video_frames,
expected_audio_samples,
total_audio_samples
);
}
av_frame_free(&mut frame);
}
println!("\nSummary:");
println!("Video frames: {}", video_frames);
println!("Audio frames: {}", audio_frames);
println!("Total audio samples: {}", total_audio_samples);
println!(
"Expected audio samples for {} video frames: {:.2}",
video_frames,
video_frames as f64 * samples_per_frame
);
// Verify the ratio is correct
let expected_total_audio = video_frames as f64 * samples_per_frame;
let sample_accuracy = (total_audio_samples as f64 - expected_total_audio).abs();
println!("Sample accuracy (difference): {:.2}", sample_accuracy);
// Allow for some tolerance due to frame size constraints
assert!(
sample_accuracy < frame_size as f64,
"Audio sample count too far from expected: got {}, expected {:.2}, diff {:.2}",
total_audio_samples,
expected_total_audio,
sample_accuracy
);
}
}
#[test]
fn test_pts_progression() {
unsafe {
let fps = 30.0;
let sample_rate = 44100;
let mut gen = FrameGenerator::new(
fps,
1280,
720,
AV_PIX_FMT_YUV420P,
sample_rate,
1024,
2,
AVRational {
num: 1,
den: fps as i32,
},
AVRational {
num: 1,
den: sample_rate as i32,
},
)
.unwrap();
let mut last_audio_pts = -1i64;
let mut last_video_pts = -1i64;
let mut audio_pts_gaps = Vec::new();
let mut video_pts_gaps = Vec::new();
// Generate 60 frames to test PTS progression
for _ in 0..60 {
let mut frame = gen.next().unwrap();
if (*frame).sample_rate > 0 {
// Audio frame - check PTS progression
if last_audio_pts >= 0 {
let gap = (*frame).pts - last_audio_pts;
audio_pts_gaps.push(gap);
println!("Audio PTS gap: {}", gap);
}
last_audio_pts = (*frame).pts;
} else {
// Video frame - check PTS progression
if last_video_pts >= 0 {
let gap = (*frame).pts - last_video_pts;
video_pts_gaps.push(gap);
println!("Video PTS gap: {}", gap);
}
last_video_pts = (*frame).pts;
}
av_frame_free(&mut frame);
}
// Verify audio PTS gaps are consistent (should be 1024 samples)
for gap in &audio_pts_gaps {
assert_eq!(
*gap, 1024,
"Audio PTS should increment by frame_size (1024)"
);
}
// Verify video PTS gaps are consistent (should be 1 frame)
for gap in &video_pts_gaps {
assert_eq!(*gap, 1, "Video PTS should increment by 1 frame");
}
println!("PTS progression test passed - all gaps are consistent");
}
}
}

View File

@ -5,13 +5,15 @@ use log::info;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::runtime::Handle;
use uuid::Uuid;
pub async fn listen(out_dir: String, path: PathBuf, overseer: Arc<dyn Overseer>) -> Result<()> {
info!("Sending file: {}", path.display());
let info = ConnectionInfo {
id: Uuid::new_v4(),
ip_addr: "127.0.0.1:6969".to_string(),
endpoint: "file-input".to_owned(),
endpoint: "file-input",
app_name: "".to_string(),
key: "test".to_string(),
};

View File

@ -1,10 +1,12 @@
use crate::overseer::Overseer;
use crate::pipeline::runner::PipelineRunner;
use log::{error, info};
use log::{error, info, warn};
use serde::{Deserialize, Serialize};
use std::io::Read;
use std::sync::Arc;
use std::time::Instant;
use tokio::runtime::Handle;
use uuid::Uuid;
pub mod file;
#[cfg(feature = "rtmp")]
@ -12,13 +14,15 @@ pub mod rtmp;
#[cfg(feature = "srt")]
pub mod srt;
pub mod tcp;
#[cfg(feature = "test-pattern")]
pub mod test;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ConnectionInfo {
/// Endpoint of the ingress
pub endpoint: String,
/// Unique ID of this connection / pipeline
pub id: Uuid,
/// Name of the ingest point
pub endpoint: &'static str,
/// IP address of the connection
pub ip_addr: String,
@ -37,33 +41,105 @@ pub fn spawn_pipeline(
seer: Arc<dyn Overseer>,
reader: Box<dyn Read + Send>,
) {
info!("New client connected: {}", &info.ip_addr);
let seer = seer.clone();
let out_dir = out_dir.to_string();
std::thread::spawn(move || unsafe {
match PipelineRunner::new(handle, out_dir, seer, info, reader) {
Ok(mut pl) => loop {
match pl.run() {
Ok(c) => {
if !c {
if let Err(e) = pl.flush() {
error!("Pipeline flush failed: {}", e);
}
break;
}
}
Err(e) => {
if let Err(e) = pl.flush() {
error!("Pipeline flush failed: {}", e);
}
error!("Pipeline run failed: {}", e);
break;
}
}
},
match PipelineRunner::new(handle, out_dir, seer, info, reader, None) {
Ok(pl) => match run_pipeline(pl) {
Ok(_) => {}
Err(e) => {
error!("Failed to create PipelineRunner: {}", e);
error!("Failed to run PipelineRunner: {}", e);
}
},
Err(e) => {
error!("Failed to create PipelineRunner: {}", e);
}
});
}
}
pub fn run_pipeline(mut pl: PipelineRunner) -> anyhow::Result<()> {
info!("New client connected: {}", &pl.connection.ip_addr);
std::thread::Builder::new()
.name(format!(
"client:{}:{}",
pl.connection.endpoint, pl.connection.id
))
.spawn(move || {
pl.run();
})?;
Ok(())
}
/// Common buffered reader functionality for ingress sources
pub struct BufferedReader {
pub buf: Vec<u8>,
pub max_buffer_size: usize,
pub last_buffer_log: Instant,
pub bytes_processed: u64,
pub packets_received: u64,
pub source_name: &'static str,
}
impl BufferedReader {
pub fn new(capacity: usize, max_size: usize, source_name: &'static str) -> Self {
Self {
buf: Vec::with_capacity(capacity),
max_buffer_size: max_size,
last_buffer_log: Instant::now(),
bytes_processed: 0,
packets_received: 0,
source_name,
}
}
/// Add data to buffer with size limit and performance tracking
pub fn add_data(&mut self, data: &[u8]) {
// Inline buffer management to avoid borrow issues
if self.buf.len() + data.len() > self.max_buffer_size {
let bytes_to_drop = (self.buf.len() + data.len()) - self.max_buffer_size;
warn!(
"{} buffer full ({} bytes), dropping {} oldest bytes",
self.source_name,
self.buf.len(),
bytes_to_drop
);
self.buf.drain(..bytes_to_drop);
}
self.buf.extend(data);
// Update performance counters
self.bytes_processed += data.len() as u64;
self.packets_received += 1;
// Log buffer status every 5 seconds
if self.last_buffer_log.elapsed().as_secs() >= 5 {
let buffer_util = (self.buf.len() as f32 / self.max_buffer_size as f32) * 100.0;
let elapsed = self.last_buffer_log.elapsed();
let mbps = (self.bytes_processed as f64 * 8.0) / (elapsed.as_secs_f64() * 1_000_000.0);
let pps = self.packets_received as f64 / elapsed.as_secs_f64();
info!(
"{} ingress: {:.1} Mbps, {:.1} packets/sec, buffer: {}% ({}/{} bytes)",
self.source_name,
mbps,
pps,
buffer_util as u32,
self.buf.len(),
self.max_buffer_size
);
// Reset counters
self.last_buffer_log = Instant::now();
self.bytes_processed = 0;
self.packets_received = 0;
}
}
/// Read data from buffer
pub fn read_buffered(&mut self, buf: &mut [u8]) -> usize {
let to_drain = buf.len().min(self.buf.len());
if to_drain > 0 {
let drain = self.buf.drain(..to_drain);
buf[..to_drain].copy_from_slice(drain.as_slice());
}
to_drain
}
}

View File

@ -1,111 +1,80 @@
use crate::ingress::{spawn_pipeline, ConnectionInfo};
use crate::ingress::{BufferedReader, ConnectionInfo};
use crate::overseer::Overseer;
use anyhow::{bail, Result};
use log::{error, info, warn};
use crate::pipeline::runner::PipelineRunner;
use anyhow::{anyhow, bail, Result};
use bytes::{Bytes, BytesMut};
use log::{error, info};
use rml_rtmp::handshake::{Handshake, HandshakeProcessResult, PeerType};
use rml_rtmp::sessions::{
ServerSession, ServerSessionConfig, ServerSessionEvent, ServerSessionResult,
};
use std::collections::VecDeque;
use std::io::{ErrorKind, Read, Write};
use std::net::TcpStream;
use std::sync::Arc;
use std::time::Duration;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::net::{TcpListener, TcpStream};
use tokio::net::TcpListener;
use tokio::runtime::Handle;
use tokio::time::Instant;
use uuid::Uuid;
use xflv::errors::FlvMuxerError;
use xflv::muxer::FlvMuxer;
const MAX_MEDIA_BUFFER_SIZE: usize = 10 * 1024 * 1024; // 10MB limit
#[derive(PartialEq, Eq, Clone, Hash)]
struct RtmpPublishedStream(String, String);
struct RtmpClient {
socket: std::net::TcpStream,
media_buf: Vec<u8>,
socket: TcpStream,
buffer: BufferedReader,
session: ServerSession,
msg_queue: VecDeque<ServerSessionResult>,
reader_buf: [u8; 4096],
pub published_stream: Option<RtmpPublishedStream>,
last_buffer_log: Instant,
bytes_processed: u64,
frames_received: u64,
muxer: FlvMuxer,
}
impl RtmpClient {
/// Add data to media buffer with size limit to prevent unbounded growth
fn add_to_media_buffer(&mut self, data: &[u8]) {
if self.media_buf.len() + data.len() > MAX_MEDIA_BUFFER_SIZE {
let bytes_to_drop = (self.media_buf.len() + data.len()) - MAX_MEDIA_BUFFER_SIZE;
warn!("RTMP buffer full ({} bytes), dropping {} oldest bytes",
self.media_buf.len(), bytes_to_drop);
self.media_buf.drain(..bytes_to_drop);
}
self.media_buf.extend(data);
// Update performance counters
self.bytes_processed += data.len() as u64;
self.frames_received += 1;
// Log buffer status every 5 seconds
if self.last_buffer_log.elapsed().as_secs() >= 5 {
let buffer_util = (self.media_buf.len() as f32 / MAX_MEDIA_BUFFER_SIZE as f32) * 100.0;
let elapsed = self.last_buffer_log.elapsed();
let mbps = (self.bytes_processed as f64 * 8.0) / (elapsed.as_secs_f64() * 1_000_000.0);
let fps = self.frames_received as f64 / elapsed.as_secs_f64();
info!(
"RTMP ingress: {:.1} Mbps, {:.1} frames/sec, buffer: {}% ({}/{} bytes)",
mbps, fps, buffer_util as u32, self.media_buf.len(), MAX_MEDIA_BUFFER_SIZE
);
// Reset counters
self.last_buffer_log = Instant::now();
self.bytes_processed = 0;
self.frames_received = 0;
}
pub fn new(socket: TcpStream) -> Result<Self> {
socket.set_nonblocking(false)?;
let cfg = ServerSessionConfig::new();
let (ses, res) = ServerSession::new(cfg)?;
Ok(Self {
socket,
session: ses,
buffer: BufferedReader::new(1024 * 1024, MAX_MEDIA_BUFFER_SIZE, "RTMP"),
msg_queue: VecDeque::from(res),
published_stream: None,
muxer: FlvMuxer::new(),
})
}
async fn start(mut socket: TcpStream) -> Result<Self> {
pub fn handshake(&mut self) -> Result<()> {
let mut hs = Handshake::new(PeerType::Server);
let exchange = hs.generate_outbound_p0_and_p1()?;
socket.write_all(&exchange).await?;
self.socket.write_all(&exchange)?;
let mut buf = [0; 4096];
loop {
let r = socket.read(&mut buf).await?;
let r = self.socket.read(&mut buf)?;
if r == 0 {
bail!("EOF reached while reading");
}
match hs.process_bytes(&buf[..r])? {
HandshakeProcessResult::InProgress { response_bytes } => {
socket.write_all(&response_bytes).await?;
self.socket.write_all(&response_bytes)?;
}
HandshakeProcessResult::Completed {
response_bytes,
remaining_bytes,
} => {
socket.write_all(&response_bytes).await?;
self.socket.write_all(&response_bytes)?;
let cfg = ServerSessionConfig::new();
let (mut ses, mut res) = ServerSession::new(cfg)?;
let q = ses.handle_input(&remaining_bytes)?;
res.extend(q);
let ret = Self {
socket: socket.into_std()?,
media_buf: vec![],
session: ses,
msg_queue: VecDeque::from(res),
reader_buf: [0; 4096],
published_stream: None,
last_buffer_log: Instant::now(),
bytes_processed: 0,
frames_received: 0,
};
return Ok(ret);
let q = self.session.handle_input(&remaining_bytes)?;
self.msg_queue.extend(q);
return Ok(());
}
}
}
@ -123,27 +92,28 @@ impl RtmpClient {
Ok(())
}
fn read_data(&mut self) -> Result<()> {
let r = match self.socket.read(&mut self.reader_buf) {
fn read_data(&mut self) -> Result<Option<usize>> {
let mut buf = [0; 4096];
let r = match self.socket.read(&mut buf) {
Ok(r) => r,
Err(e) => {
return match e.kind() {
ErrorKind::WouldBlock => Ok(()),
ErrorKind::Interrupted => Ok(()),
ErrorKind::WouldBlock => Ok(None),
ErrorKind::Interrupted => Ok(None),
_ => Err(anyhow::Error::new(e)),
};
}
};
if r == 0 {
bail!("EOF");
return Ok(Some(0));
}
let mx = self.session.handle_input(&self.reader_buf[..r])?;
let mx = self.session.handle_input(&buf[..r])?;
if !mx.is_empty() {
self.msg_queue.extend(mx);
self.process_msg_queue()?;
}
Ok(())
Ok(Some(r))
}
fn process_msg_queue(&mut self) -> Result<()> {
@ -154,18 +124,52 @@ impl RtmpClient {
}
ServerSessionResult::RaisedEvent(ev) => self.handle_event(ev)?,
ServerSessionResult::UnhandleableMessageReceived(m) => {
// Log unhandleable messages for debugging
// Log unhandleable messages for debugging
error!("Received unhandleable message with {} bytes", m.data.len());
// Only append data if it looks like valid media data
if !m.data.is_empty() && m.data.len() > 4 {
self.add_to_media_buffer(&m.data);
}
}
}
}
Ok(())
}
fn write_flv_header(&mut self, metadata: &rml_rtmp::sessions::StreamMetadata) -> Result<()> {
let has_video = metadata.video_codec_id.is_some();
let has_audio = metadata.audio_codec_id.is_some();
self.muxer
.write_flv_header(has_audio, has_video)
.map_err(|e| anyhow!("failed to write flv header {}", e))?;
self.muxer
.write_previous_tag_size(0)
.map_err(|e| anyhow!("failed to write flv header {}", e))?;
// Extract data from the muxer
let data = self.muxer.writer.extract_current_bytes();
self.buffer.add_data(&data);
info!(
"FLV header written with audio: {}, video: {}",
has_audio, has_video
);
Ok(())
}
fn write_flv_tag(
&mut self,
tag_type: u8,
timestamp: u32,
data: Bytes,
) -> Result<(), FlvMuxerError> {
let body_len = data.len();
self.muxer
.write_flv_tag_header(tag_type, body_len as _, timestamp)?;
self.muxer.write_flv_tag_body(BytesMut::from(data))?;
self.muxer.write_previous_tag_size((11 + body_len) as _)?;
let flv_data = self.muxer.writer.extract_current_bytes();
self.buffer.add_data(&flv_data);
Ok(())
}
fn handle_event(&mut self, event: ServerSessionEvent) -> Result<()> {
match event {
ServerSessionEvent::ClientChunkSizeChanged { new_chunk_size } => {
@ -197,7 +201,9 @@ impl RtmpClient {
self.published_stream = Some(RtmpPublishedStream(app_name, stream_key));
}
}
ServerSessionEvent::PublishStreamFinished { .. } => {}
ServerSessionEvent::PublishStreamFinished { .. } => {
// TODO: shutdown pipeline
}
ServerSessionEvent::StreamMetadataChanged {
app_name,
stream_key,
@ -207,22 +213,19 @@ impl RtmpClient {
"Metadata configured: {}/{} {:?}",
app_name, stream_key, metadata
);
self.write_flv_header(&metadata)?;
}
ServerSessionEvent::AudioDataReceived { data, .. } => {
// Validate audio data before adding to buffer
if !data.is_empty() {
self.add_to_media_buffer(&data);
} else {
error!("Received empty audio data");
}
ServerSessionEvent::AudioDataReceived {
data, timestamp, ..
} => {
self.write_flv_tag(8, timestamp.value, data)
.map_err(|e| anyhow!("failed to write flv tag: {}", e))?;
}
ServerSessionEvent::VideoDataReceived { data, .. } => {
// Validate video data before adding to buffer
if !data.is_empty() {
self.add_to_media_buffer(&data);
} else {
error!("Received empty video data");
}
ServerSessionEvent::VideoDataReceived {
data, timestamp, ..
} => {
self.write_flv_tag(9, timestamp.value, data)
.map_err(|e| anyhow!("failed to write flv tag: {}", e))?;
}
ServerSessionEvent::UnhandleableAmf0Command { .. } => {}
ServerSessionEvent::PlayStreamRequested { request_id, .. } => {
@ -241,18 +244,25 @@ impl RtmpClient {
impl Read for RtmpClient {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
// block this thread until something comes into [media_buf]
while self.media_buf.is_empty() {
if let Err(e) = self.read_data() {
error!("Error reading data: {}", e);
return Ok(0);
};
// Block until we have enough data to fill the buffer
while self.buffer.buf.len() < buf.len() {
match self.read_data() {
Ok(Some(0)) => {
let r = self.buffer.read_buffered(buf);
if r == 0 {
return Err(std::io::Error::other(anyhow!("EOF")));
}
return Ok(r);
}
Err(e) => {
error!("Error reading data: {}", e);
return Ok(0);
}
_ => continue,
}
}
let to_read = buf.len().min(self.media_buf.len());
let drain = self.media_buf.drain(..to_read);
buf[..to_read].copy_from_slice(drain.as_slice());
Ok(to_read)
Ok(self.buffer.read_buffered(buf))
}
}
@ -261,32 +271,44 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
info!("RTMP listening on: {}", &addr);
while let Ok((socket, ip)) = listener.accept().await {
let mut cc = RtmpClient::start(socket).await?;
let addr = addr.clone();
let mut cc = RtmpClient::new(socket.into_std()?)?;
let overseer = overseer.clone();
let out_dir = out_dir.clone();
let handle = Handle::current();
let new_id = Uuid::new_v4();
std::thread::Builder::new()
.name("rtmp-client".to_string())
.name(format!("client:rtmp:{}", new_id))
.spawn(move || {
if let Err(e) = cc.read_until_publish_request(Duration::from_secs(10)) {
error!("{}", e);
} else {
let pr = cc.published_stream.as_ref().unwrap();
let info = ConnectionInfo {
ip_addr: ip.to_string(),
endpoint: addr.clone(),
app_name: pr.0.clone(),
key: pr.1.clone(),
};
spawn_pipeline(
handle,
info,
out_dir.clone(),
overseer.clone(),
Box::new(cc),
);
if let Err(e) = cc.handshake() {
bail!("Error during handshake: {}", e)
}
if let Err(e) = cc.read_until_publish_request(Duration::from_secs(10)) {
bail!("Error waiting for publish request: {}", e)
}
let pr = cc.published_stream.as_ref().unwrap();
let info = ConnectionInfo {
id: new_id,
ip_addr: ip.to_string(),
endpoint: "rtmp",
app_name: pr.0.clone(),
key: pr.1.clone(),
};
let mut pl = match PipelineRunner::new(
handle,
out_dir,
overseer,
info,
Box::new(cc),
None,
) {
Ok(pl) => pl,
Err(e) => {
bail!("Failed to create PipelineRunner {}", e)
}
};
pl.run();
Ok(())
})?;
}
Ok(())

View File

@ -1,15 +1,15 @@
use crate::ingress::{spawn_pipeline, ConnectionInfo};
use crate::ingress::{spawn_pipeline, BufferedReader, ConnectionInfo};
use crate::overseer::Overseer;
use anyhow::Result;
use futures_util::stream::FusedStream;
use futures_util::StreamExt;
use log::{info, warn};
use log::info;
use srt_tokio::{SrtListener, SrtSocket};
use std::io::Read;
use std::net::SocketAddr;
use std::sync::Arc;
use std::time::Instant;
use tokio::runtime::Handle;
use uuid::Uuid;
const MAX_SRT_BUFFER_SIZE: usize = 10 * 1024 * 1024; // 10MB limit
@ -21,7 +21,8 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
while let Some(request) = packets.incoming().next().await {
let socket = request.accept(None).await?;
let info = ConnectionInfo {
endpoint: addr.clone(),
id: Uuid::new_v4(),
endpoint: "srt",
ip_addr: socket.settings().remote.to_string(),
app_name: "".to_string(),
key: socket
@ -38,10 +39,7 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
Box::new(SrtReader {
handle: Handle::current(),
socket,
buf: Vec::with_capacity(4096),
last_buffer_log: Instant::now(),
bytes_processed: 0,
packets_received: 0,
buffer: BufferedReader::new(4096, MAX_SRT_BUFFER_SIZE, "SRT"),
}),
);
}
@ -51,56 +49,21 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
struct SrtReader {
pub handle: Handle,
pub socket: SrtSocket,
pub buf: Vec<u8>,
last_buffer_log: Instant,
bytes_processed: u64,
packets_received: u64,
pub buffer: BufferedReader,
}
impl Read for SrtReader {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let (mut rx, _) = self.socket.split_mut();
while self.buf.len() < buf.len() {
while self.buffer.buf.len() < buf.len() {
if rx.is_terminated() {
return Ok(0);
}
if let Some((_, data)) = self.handle.block_on(rx.next()) {
let data_slice = data.iter().as_slice();
// Inline buffer management to avoid borrow issues
if self.buf.len() + data_slice.len() > MAX_SRT_BUFFER_SIZE {
let bytes_to_drop = (self.buf.len() + data_slice.len()) - MAX_SRT_BUFFER_SIZE;
warn!("SRT buffer full ({} bytes), dropping {} oldest bytes",
self.buf.len(), bytes_to_drop);
self.buf.drain(..bytes_to_drop);
}
self.buf.extend(data_slice);
// Update performance counters
self.bytes_processed += data_slice.len() as u64;
self.packets_received += 1;
// Log buffer status every 5 seconds
if self.last_buffer_log.elapsed().as_secs() >= 5 {
let buffer_util = (self.buf.len() as f32 / MAX_SRT_BUFFER_SIZE as f32) * 100.0;
let elapsed = self.last_buffer_log.elapsed();
let mbps = (self.bytes_processed as f64 * 8.0) / (elapsed.as_secs_f64() * 1_000_000.0);
let pps = self.packets_received as f64 / elapsed.as_secs_f64();
info!(
"SRT ingress: {:.1} Mbps, {:.1} packets/sec, buffer: {}% ({}/{} bytes)",
mbps, pps, buffer_util as u32, self.buf.len(), MAX_SRT_BUFFER_SIZE
);
// Reset counters
self.last_buffer_log = Instant::now();
self.bytes_processed = 0;
self.packets_received = 0;
}
self.buffer.add_data(data_slice);
}
}
let drain = self.buf.drain(..buf.len());
buf.copy_from_slice(drain.as_slice());
Ok(buf.len())
Ok(self.buffer.read_buffered(buf))
}
}

View File

@ -5,6 +5,7 @@ use log::info;
use std::sync::Arc;
use tokio::net::TcpListener;
use tokio::runtime::Handle;
use uuid::Uuid;
pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>) -> Result<()> {
let listener = TcpListener::bind(&addr).await?;
@ -12,12 +13,14 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
info!("TCP listening on: {}", &addr);
while let Ok((socket, ip)) = listener.accept().await {
let info = ConnectionInfo {
id: Uuid::new_v4(),
ip_addr: ip.to_string(),
endpoint: addr.clone(),
endpoint: "tcp",
app_name: "".to_string(),
key: "no-key-tcp".to_string(),
key: "test".to_string(),
};
let socket = socket.into_std()?;
socket.set_nonblocking(false)?;
spawn_pipeline(
Handle::current(),
info,

View File

@ -1,31 +1,33 @@
use crate::generator::FrameGenerator;
use crate::ingress::{spawn_pipeline, ConnectionInfo};
use crate::overseer::Overseer;
use anyhow::Result;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVColorSpace::AVCOL_SPC_RGB;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPictureType::AV_PICTURE_TYPE_NONE;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::{AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P};
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVSampleFormat::AV_SAMPLE_FMT_FLTP;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
av_frame_alloc, av_frame_free, av_frame_get_buffer, av_packet_free, AVRational,
AV_PROFILE_H264_MAIN,
av_frame_free, av_packet_free, AVRational, AV_PROFILE_H264_MAIN,
};
use ffmpeg_rs_raw::{Encoder, Muxer, Scaler};
use fontdue::layout::{CoordinateSystem, Layout, TextStyle};
use fontdue::Font;
use ffmpeg_rs_raw::{Encoder, Muxer};
use log::info;
use ringbuf::traits::{Observer, Split};
use ringbuf::{HeapCons, HeapRb};
use std::io::Read;
use std::sync::Arc;
use std::time::{Duration, Instant};
use std::time::Duration;
use tiny_skia::Pixmap;
use tokio::runtime::Handle;
use uuid::Uuid;
pub async fn listen(out_dir: String, overseer: Arc<dyn Overseer>) -> Result<()> {
info!("Test pattern enabled");
// add a delay, there is a race condition somewhere, the test pattern doesnt always
// get added to active_streams
tokio::time::sleep(Duration::from_secs(1)).await;
let info = ConnectionInfo {
endpoint: "test-pattern".to_string(),
id: Uuid::new_v4(),
endpoint: "test-pattern",
ip_addr: "test-pattern".to_string(),
app_name: "".to_string(),
key: "test".to_string(),
@ -42,33 +44,31 @@ pub async fn listen(out_dir: String, overseer: Arc<dyn Overseer>) -> Result<()>
}
struct TestPatternSrc {
gen: FrameGenerator,
video_encoder: Encoder,
audio_encoder: Encoder,
scaler: Scaler,
muxer: Muxer,
background: Pixmap,
font: [Font; 1],
frame_no: u64,
audio_sample_no: u64,
start: Instant,
muxer: Muxer,
reader: HeapCons<u8>,
}
unsafe impl Send for TestPatternSrc {}
const VIDEO_FPS: f32 = 30.0;
const VIDEO_WIDTH: u16 = 1280;
const VIDEO_HEIGHT: u16 = 720;
const SAMPLE_RATE: u32 = 44100;
impl TestPatternSrc {
pub fn new() -> Result<Self> {
let scaler = Scaler::new();
let video_encoder = unsafe {
Encoder::new_with_name("libx264")?
.with_stream_index(0)
.with_framerate(VIDEO_FPS)?
.with_bitrate(1_000_000)
.with_pix_fmt(AV_PIX_FMT_YUV420P)
.with_width(1280)
.with_height(720)
.with_width(VIDEO_WIDTH as _)
.with_height(VIDEO_HEIGHT as _)
.with_level(51)
.with_profile(AV_PROFILE_H264_MAIN)
.open(None)?
@ -80,22 +80,20 @@ impl TestPatternSrc {
.with_default_channel_layout(1)
.with_bitrate(128_000)
.with_sample_format(AV_SAMPLE_FMT_FLTP)
.with_sample_rate(44100)?
.with_sample_rate(SAMPLE_RATE as _)?
.open(None)?
};
let svg_data = include_bytes!("../../test.svg");
let tree = usvg::Tree::from_data(svg_data, &Default::default())?;
let mut pixmap = Pixmap::new(1280, 720).unwrap();
let mut pixmap = Pixmap::new(VIDEO_WIDTH as _, VIDEO_HEIGHT as _).unwrap();
let render_ts = tiny_skia::Transform::from_scale(
pixmap.width() as f32 / tree.size().width(),
pixmap.height() as f32 / tree.size().height(),
);
resvg::render(&tree, render_ts, &mut pixmap.as_mut());
let font = include_bytes!("../../SourceCodePro-Regular.ttf") as &[u8];
let font = Font::from_bytes(font, Default::default()).unwrap();
let buf = HeapRb::new(1024 * 1024);
let (writer, reader) = buf.split();
@ -109,140 +107,59 @@ impl TestPatternSrc {
m
};
let frame_size = unsafe { (*audio_encoder.codec_context()).frame_size as _ };
Ok(Self {
gen: FrameGenerator::new(
VIDEO_FPS,
VIDEO_WIDTH,
VIDEO_HEIGHT,
AV_PIX_FMT_YUV420P,
SAMPLE_RATE,
frame_size,
1,
AVRational {
num: 1,
den: VIDEO_FPS as i32,
},
AVRational {
num: 1,
den: SAMPLE_RATE as i32,
},
)?,
video_encoder,
audio_encoder,
scaler,
muxer,
background: pixmap,
font: [font],
frame_no: 0,
audio_sample_no: 0,
start: Instant::now(),
reader,
})
}
pub unsafe fn next_pkt(&mut self) -> Result<()> {
let stream_time = Duration::from_secs_f64(self.frame_no as f64 / VIDEO_FPS as f64);
let real_time = Instant::now().duration_since(self.start);
let wait_time = if stream_time > real_time {
stream_time - real_time
} else {
Duration::new(0, 0)
};
if !wait_time.is_zero() && wait_time.as_secs_f32() > 1f32 / VIDEO_FPS {
std::thread::sleep(wait_time);
self.gen.begin()?;
self.gen.copy_frame_data(self.background.data())?;
self.gen
.write_text(&format!("frame={}", self.gen.frame_no()), 40.0, 5.0, 5.0)?;
let mut frame = self.gen.next()?;
if frame.is_null() {
return Ok(());
}
let mut src_frame = unsafe {
let src_frame = av_frame_alloc();
(*src_frame).width = 1280;
(*src_frame).height = 720;
(*src_frame).pict_type = AV_PICTURE_TYPE_NONE;
(*src_frame).key_frame = 1;
(*src_frame).colorspace = AVCOL_SPC_RGB;
(*src_frame).format = AV_PIX_FMT_RGBA as _;
(*src_frame).pts = self.frame_no as i64;
(*src_frame).duration = 1;
av_frame_get_buffer(src_frame, 0);
self.background
.data()
.as_ptr()
.copy_to((*src_frame).data[0] as *mut _, 1280 * 720 * 4);
src_frame
};
let mut layout = Layout::new(CoordinateSystem::PositiveYDown);
layout.clear();
layout.append(
&self.font,
&TextStyle::new(&format!("frame={}", self.frame_no), 40.0, 0),
);
for g in layout.glyphs() {
let (metrics, bitmap) = self.font[0].rasterize_config_subpixel(g.key);
for y in 0..metrics.height {
for x in 0..metrics.width {
let dst_x = x + g.x as usize;
let dst_y = y + g.y as usize;
let offset_src = (x + y * metrics.width) * 3;
unsafe {
let offset_dst = 4 * dst_x + dst_y * (*src_frame).linesize[0] as usize;
let pixel_dst = (*src_frame).data[0].add(offset_dst);
*pixel_dst.offset(0) = bitmap[offset_src];
*pixel_dst.offset(1) = bitmap[offset_src + 1];
*pixel_dst.offset(2) = bitmap[offset_src + 2];
}
}
}
}
// scale/encode video
let mut frame = self
.scaler
.process_frame(src_frame, 1280, 720, AV_PIX_FMT_YUV420P)?;
for mut pkt in self.video_encoder.encode_frame(frame)? {
self.muxer.write_packet(pkt)?;
av_packet_free(&mut pkt);
}
av_frame_free(&mut frame);
av_frame_free(&mut src_frame);
// Generate and encode audio (sine wave)
self.generate_audio_frame()?;
self.frame_no += 1;
Ok(())
}
/// Generate audio to stay synchronized with video frames
unsafe fn generate_audio_frame(&mut self) -> Result<()> {
const SAMPLE_RATE: f32 = 44100.0;
const FREQUENCY: f32 = 440.0; // A4 note
const SAMPLES_PER_FRAME: usize = 1024; // Fixed AAC frame size
// Calculate how many audio samples we should have by now
// At 30fps, each video frame = 1/30 sec = 1470 audio samples at 44.1kHz
let audio_samples_per_video_frame = (SAMPLE_RATE / VIDEO_FPS) as u64; // ~1470 samples
let target_audio_samples = self.frame_no * audio_samples_per_video_frame;
// Generate audio frames to catch up to the target
while self.audio_sample_no < target_audio_samples {
let mut audio_frame = av_frame_alloc();
(*audio_frame).format = AV_SAMPLE_FMT_FLTP as _;
(*audio_frame).nb_samples = SAMPLES_PER_FRAME as _;
(*audio_frame).ch_layout.nb_channels = 1;
(*audio_frame).sample_rate = SAMPLE_RATE as _;
(*audio_frame).pts = self.audio_sample_no as i64;
(*audio_frame).duration = 1;
(*audio_frame).time_base = AVRational {
num: 1,
den: SAMPLE_RATE as _,
};
av_frame_get_buffer(audio_frame, 0);
// Generate sine wave samples
let data = (*audio_frame).data[0] as *mut f32;
for i in 0..SAMPLES_PER_FRAME {
let sample_time = (self.audio_sample_no + i as u64) as f32 / SAMPLE_RATE;
let sample_value =
(2.0 * std::f32::consts::PI * FREQUENCY * sample_time).sin() * 0.5;
*data.add(i) = sample_value;
}
// Encode audio frame
for mut pkt in self.audio_encoder.encode_frame(audio_frame)? {
// if sample_rate is set this frame is audio
if (*frame).sample_rate > 0 {
for mut pkt in self.audio_encoder.encode_frame(frame)? {
self.muxer.write_packet(pkt)?;
av_packet_free(&mut pkt);
}
} else {
for mut pkt in self.video_encoder.encode_frame(frame)? {
self.muxer.write_packet(pkt)?;
av_packet_free(&mut pkt);
}
self.audio_sample_no += SAMPLES_PER_FRAME as u64;
av_frame_free(&mut audio_frame);
}
av_frame_free(&mut frame);
Ok(())
}
}

View File

@ -5,3 +5,4 @@ pub mod overseer;
pub mod pipeline;
pub mod variant;
pub mod viewer;
mod generator;

View File

@ -1,16 +1,16 @@
use crate::egress::{EgressResult, EgressSegment};
use crate::variant::{StreamMapping, VariantStream};
use anyhow::{bail, Result};
use anyhow::{bail, ensure, Result};
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_H264;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVMediaType::AVMEDIA_TYPE_VIDEO;
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
av_free, av_opt_set, av_q2d, av_write_frame, avio_close, avio_flush, avio_open, AVPacket,
AVStream, AVIO_FLAG_WRITE, AV_PKT_FLAG_KEY,
av_free, av_interleaved_write_frame, av_opt_set, av_q2d, avio_close, avio_flush, avio_open,
avio_size, AVPacket, AVStream, AVIO_FLAG_WRITE, AV_NOPTS_VALUE, AV_PKT_FLAG_KEY,
};
use ffmpeg_rs_raw::{cstr, Encoder, Muxer};
use itertools::Itertools;
use log::{info, warn};
use m3u8_rs::MediaSegment;
use log::{info, trace, warn};
use m3u8_rs::{ByteRange, MediaSegment, MediaSegmentType, Part, PartInf, PreloadHint};
use std::collections::HashMap;
use std::fmt::Display;
use std::fs::File;
@ -18,7 +18,7 @@ use std::path::PathBuf;
use std::ptr;
use uuid::Uuid;
#[derive(Clone, Copy)]
#[derive(Clone, Copy, PartialEq)]
pub enum SegmentType {
MPEGTS,
FMP4,
@ -72,41 +72,74 @@ impl Display for HlsVariantStream {
pub struct HlsVariant {
/// Name of this variant (720p)
pub name: String,
name: String,
/// MPEG-TS muxer for this variant
pub mux: Muxer,
mux: Muxer,
/// List of streams ids in this variant
pub streams: Vec<HlsVariantStream>,
streams: Vec<HlsVariantStream>,
/// Segment length in seconds
pub segment_length: f32,
/// Total number of segments to store for this variant
pub segment_window: Option<u16>,
segment_length: f32,
/// Total number of seconds of video to store
segment_window: f32,
/// Current segment index
pub idx: u64,
/// Current segment start time in seconds (duration)
pub pkt_start: f32,
idx: u64,
/// Output directory (base)
pub out_dir: String,
out_dir: String,
/// List of segments to be included in the playlist
pub segments: Vec<SegmentInfo>,
segments: Vec<HlsSegment>,
/// Type of segments to create
pub segment_type: SegmentType,
segment_type: SegmentType,
/// Timestamp of the previous packet
last_pkt_pts: i64,
/// Timestamp of the start of the current segment
current_segment_start: f64,
/// Current segment duration in seconds (precise accumulation)
duration: f64,
/// Number of packets written to current segment
packets_written: u64,
/// Reference stream used to track duration
ref_stream_index: i32,
/// HLS-LL: Enable LL-output
low_latency: bool,
/// LL-HLS: Target duration for partial segments
partial_target_duration: f32,
/// HLS-LL: Current partial index
current_partial_index: u64,
/// HLS-LL: Current duration in this partial
current_partial_duration: f64,
/// HLS-LL: Whether the next partial segment should be marked as independent
next_partial_independent: bool,
}
#[derive(PartialEq)]
enum HlsSegment {
Full(SegmentInfo),
Partial(PartialSegmentInfo),
}
impl HlsSegment {
fn to_media_segment(&self) -> MediaSegmentType {
match self {
HlsSegment::Full(f) => f.to_media_segment(),
HlsSegment::Partial(p) => p.to_media_segment(),
}
}
}
#[derive(PartialEq)]
struct SegmentInfo {
pub index: u64,
pub duration: f32,
pub kind: SegmentType,
index: u64,
duration: f32,
kind: SegmentType,
}
impl SegmentInfo {
fn to_media_segment(&self) -> MediaSegment {
MediaSegment {
fn to_media_segment(&self) -> MediaSegmentType {
MediaSegmentType::Full(MediaSegment {
uri: self.filename(),
duration: self.duration,
title: None,
..MediaSegment::default()
}
})
}
fn filename(&self) -> String {
@ -114,6 +147,42 @@ impl SegmentInfo {
}
}
#[derive(PartialEq)]
struct PartialSegmentInfo {
index: u64,
parent_index: u64,
parent_kind: SegmentType,
duration: f64,
independent: bool,
byte_range: Option<(u64, Option<u64>)>,
}
impl PartialSegmentInfo {
fn to_media_segment(&self) -> MediaSegmentType {
MediaSegmentType::Partial(Part {
uri: self.filename(),
duration: self.duration,
independent: self.independent,
gap: false,
byte_range: self.byte_range.map(|r| ByteRange {
length: r.0,
offset: r.1,
}),
})
}
fn filename(&self) -> String {
HlsVariant::segment_name(self.parent_kind, self.parent_index)
}
/// Byte offset where this partial segment ends
fn end_pos(&self) -> Option<u64> {
self.byte_range
.as_ref()
.map(|(len, start)| start.unwrap_or(0) + len)
}
}
impl HlsVariant {
pub fn new<'a>(
out_dir: &'a str,
@ -146,23 +215,34 @@ impl HlsVariant {
.build()?
};
let mut streams = Vec::new();
let mut ref_stream_index = -1;
let mut has_video = false;
for (var, enc) in encoded_vars {
match var {
VariantStream::Video(v) => unsafe {
let stream = mux.add_stream_encoder(enc)?;
let stream_idx = (*stream).index as usize;
streams.push(HlsVariantStream::Video {
group,
index: (*stream).index as usize,
index: stream_idx,
id: v.id(),
})
});
has_video = true;
// Always use video stream as reference for segmentation
ref_stream_index = stream_idx as _;
},
VariantStream::Audio(a) => unsafe {
let stream = mux.add_stream_encoder(enc)?;
let stream_idx = (*stream).index as usize;
streams.push(HlsVariantStream::Audio {
group,
index: (*stream).index as usize,
index: stream_idx,
id: a.id(),
})
});
if !has_video && ref_stream_index == -1 {
ref_stream_index = stream_idx as _;
}
},
VariantStream::Subtitle(s) => unsafe {
let stream = mux.add_stream_encoder(enc)?;
@ -175,20 +255,38 @@ impl HlsVariant {
_ => bail!("unsupported variant stream"),
}
}
ensure!(
ref_stream_index != -1,
"No reference stream found, cant create variant"
);
trace!(
"{} will use stream index {} as reference for segmentation",
name,
ref_stream_index
);
unsafe {
mux.open(Some(opts))?;
}
Ok(Self {
name: name.clone(),
segment_length,
segment_window: Some(10), //TODO: configure window
segment_window: 30.0,
mux,
streams,
idx: 1,
pkt_start: 0.0,
segments: Vec::new(), // Start with empty segments list
segments: Vec::new(),
out_dir: out_dir.to_string(),
segment_type,
last_pkt_pts: AV_NOPTS_VALUE,
duration: 0.0,
packets_written: 0,
ref_stream_index,
partial_target_duration: 0.33,
current_partial_index: 0,
current_partial_duration: 0.0,
current_segment_start: 0.0,
next_partial_independent: false,
low_latency: false,
})
}
@ -211,41 +309,63 @@ impl HlsVariant {
.to_string()
}
/// Mux a packet created by the encoder for this variant
pub unsafe fn mux_packet(&mut self, pkt: *mut AVPacket) -> Result<EgressResult> {
// Simply process the packet directly - no reordering needed
// FFmpeg's interleaving system should handle packet ordering upstream
self.process_packet(pkt)
}
/// Process a single packet through the muxer
unsafe fn process_packet(&mut self, pkt: *mut AVPacket) -> Result<EgressResult> {
let mut result = EgressResult::None;
let pkt_stream = *(*self.mux.context())
.streams
.add((*pkt).stream_index as usize);
// Match FFmpeg's segmentation logic exactly
let can_split = (*pkt).flags & AV_PKT_FLAG_KEY == AV_PKT_FLAG_KEY
&& (*(*pkt_stream).codecpar).codec_type == AVMEDIA_TYPE_VIDEO;
let pkt_q = av_q2d((*pkt).time_base);
let mut result = EgressResult::None;
let stream_type = (*(*pkt_stream).codecpar).codec_type;
let mut can_split = stream_type == AVMEDIA_TYPE_VIDEO
&& ((*pkt).flags & AV_PKT_FLAG_KEY == AV_PKT_FLAG_KEY);
let mut is_ref_pkt =
stream_type == AVMEDIA_TYPE_VIDEO && (*pkt).stream_index == self.ref_stream_index;
if can_split {
let pkt_q = av_q2d((*pkt).time_base);
let pkt_time = (*pkt).pts as f32 * pkt_q as f32;
let relative_time = pkt_time - self.pkt_start;
// FFmpeg checks: pkt->pts - vs->end_pts > 0 to prevent zero duration
// and av_compare_ts for target duration
let has_positive_duration = relative_time > 0.0;
let target_duration_reached = relative_time >= self.segment_length;
if has_positive_duration && target_duration_reached {
result = self.split_next_seg(pkt_time)?;
}
if (*pkt).pts == AV_NOPTS_VALUE {
can_split = false;
is_ref_pkt = false;
}
// Write packet directly like FFmpeg's ff_write_chained
// HLS-LL: write prev partial segment
if self.low_latency && self.current_partial_duration >= self.partial_target_duration as f64
{
self.create_partial_segment()?;
// HLS-LL: Mark next partial as independent if this packet is a keyframe
if can_split {
self.next_partial_independent = true;
}
}
// check if current packet is keyframe, flush current segment
if self.packets_written > 1 && can_split && self.duration >= self.segment_length as f64 {
result = self.split_next_seg((*pkt).pts as f64 * pkt_q)?;
}
// track duration from pts
if is_ref_pkt {
if self.last_pkt_pts == AV_NOPTS_VALUE {
self.last_pkt_pts = (*pkt).pts;
}
let time_delta = if (*pkt).duration != 0 {
(*pkt).duration as f64 * pkt_q
} else {
((*pkt).pts - self.last_pkt_pts) as f64 * pkt_q
};
if time_delta > 0.0 {
self.duration += time_delta;
if self.low_latency {
self.current_partial_duration += time_delta;
}
}
self.last_pkt_pts = (*pkt).pts;
}
// write to current segment
self.mux.write_packet(pkt)?;
self.packets_written += 1;
Ok(result)
}
@ -253,14 +373,61 @@ impl HlsVariant {
self.mux.close()
}
/// Create a partial segment for LL-HLS
fn create_partial_segment(&mut self) -> Result<()> {
let ctx = self.mux.context();
let end_pos = unsafe {
avio_flush((*ctx).pb);
avio_size((*ctx).pb) as u64
};
let previous_end_pos = self
.segments
.last()
.and_then(|s| match &s {
HlsSegment::Partial(p) => p.end_pos(),
_ => None,
})
.unwrap_or(0);
let independent = self.next_partial_independent;
let partial_size = end_pos - previous_end_pos;
let partial_info = PartialSegmentInfo {
index: self.current_partial_index,
parent_index: self.idx,
parent_kind: self.segment_type,
duration: self.current_partial_duration,
independent,
byte_range: Some((partial_size, Some(previous_end_pos))),
};
trace!(
"{} created partial segment {} [{:.3}s, independent={}]",
self.name,
partial_info.index,
partial_info.duration,
independent
);
self.segments.push(HlsSegment::Partial(partial_info));
self.current_partial_index += 1;
self.current_partial_duration = 0.0;
self.next_partial_independent = false;
self.write_playlist()?;
Ok(())
}
/// Reset the muxer state and start the next segment
unsafe fn split_next_seg(&mut self, pkt_time: f32) -> Result<EgressResult> {
unsafe fn split_next_seg(&mut self, next_pkt_start: f64) -> Result<EgressResult> {
let completed_segment_idx = self.idx;
self.idx += 1;
// Manually reset muxer avio
let ctx = self.mux.context();
av_write_frame(ctx, ptr::null_mut());
let ret = av_interleaved_write_frame(ctx, ptr::null_mut());
if ret < 0 {
bail!("Failed to split segment {}", ret);
}
avio_flush((*ctx).pb);
avio_close((*ctx).pb);
av_free((*ctx).url as *mut _);
@ -282,7 +449,6 @@ impl HlsVariant {
0,
);
let duration = pkt_time - self.pkt_start;
// Log the completed segment (previous index), not the next one
let completed_seg_path = Self::map_segment_path(
&self.out_dir,
@ -295,14 +461,17 @@ impl HlsVariant {
.metadata()
.map(|m| m.len())
.unwrap_or(0);
let cur_duration = next_pkt_start - self.current_segment_start;
info!(
"Finished segment {} [{:.3}s, {} bytes]",
"Finished segment {} [{:.3}s, {:.2} kB, {} pkts]",
completed_segment_path
.file_name()
.unwrap_or_default()
.to_string_lossy(),
duration,
segment_size
cur_duration,
segment_size as f32 / 1024f32,
self.packets_written
);
let video_var_id = self
@ -332,14 +501,23 @@ impl HlsVariant {
let created = EgressSegment {
variant: video_var_id,
idx: completed_segment_idx,
duration,
duration: cur_duration as f32,
path: completed_segment_path,
};
if let Err(e) = self.push_segment(completed_segment_idx, duration) {
warn!("Failed to update playlist: {}", e);
}
self.pkt_start = pkt_time;
self.segments.push(HlsSegment::Full(SegmentInfo {
index: completed_segment_idx,
duration: cur_duration as f32,
kind: self.segment_type,
}));
self.write_playlist()?;
// Reset counters for next segment
self.packets_written = 0;
self.duration = 0.0;
self.current_segment_start = next_pkt_start;
Ok(EgressResult::Segments {
created: vec![created],
deleted,
@ -352,38 +530,53 @@ impl HlsVariant {
.find(|a| matches!(*a, HlsVariantStream::Video { .. }))
}
/// Add a new segment to the variant and return a list of deleted segments
fn push_segment(&mut self, idx: u64, duration: f32) -> Result<()> {
self.segments.push(SegmentInfo {
index: idx,
duration,
kind: self.segment_type,
});
self.write_playlist()
}
/// Delete segments which are too old
fn clean_segments(&mut self) -> Result<Vec<SegmentInfo>> {
const MAX_SEGMENTS: usize = 10;
let mut ret = vec![];
if self.segments.len() > MAX_SEGMENTS {
let n_drain = self.segments.len() - MAX_SEGMENTS;
let seg_dir = self.out_dir();
for seg in self.segments.drain(..n_drain) {
// delete file
let seg_path = seg_dir.join(seg.filename());
if let Err(e) = std::fs::remove_file(&seg_path) {
warn!(
"Failed to remove segment file: {} {}",
seg_path.display(),
e
);
let drain_from_hls_segment = {
let mut acc = 0.0;
let mut seg_match = None;
for seg in self
.segments
.iter()
.filter(|e| matches!(e, HlsSegment::Full(_)))
.rev()
{
if acc >= self.segment_window {
seg_match = Some(seg);
break;
}
acc += match seg {
HlsSegment::Full(seg) => seg.duration,
_ => 0.0,
};
}
seg_match
};
let mut ret = vec![];
if let Some(seg_match) = drain_from_hls_segment {
if let Some(drain_pos) = self.segments.iter().position(|e| e == seg_match) {
let seg_dir = self.out_dir();
for seg in self.segments.drain(..drain_pos) {
match seg {
HlsSegment::Full(seg) => {
let seg_path = seg_dir.join(seg.filename());
if let Err(e) = std::fs::remove_file(&seg_path) {
warn!(
"Failed to remove segment file: {} {}",
seg_path.display(),
e
);
}
trace!("Removed segment file: {}", seg_path.display());
ret.push(seg);
}
_ => {}
}
}
ret.push(seg);
}
}
Ok(ret)
}
@ -393,12 +586,33 @@ impl HlsVariant {
}
let mut pl = m3u8_rs::MediaPlaylist::default();
// Round up target duration to ensure compliance
pl.target_duration = (self.segment_length.ceil() as u64).max(1);
pl.segments = self.segments.iter().map(|s| s.to_media_segment()).collect();
pl.version = Some(3);
pl.media_sequence = self.segments.first().map(|s| s.index).unwrap_or(0);
// For live streams, don't set end list
// append segment preload for next part segment
if let Some(HlsSegment::Partial(partial)) = self.segments.last() {
// TODO: try to estimate if there will be another partial segment
pl.segments.push(MediaSegmentType::PreloadHint(PreloadHint {
hint_type: "PART".to_string(),
uri: partial.filename(),
byte_range_start: partial.end_pos(),
byte_range_length: None,
}));
}
pl.version = Some(if self.low_latency { 6 } else { 3 });
if self.low_latency {
pl.part_inf = Some(PartInf {
part_target: self.partial_target_duration as f64,
});
}
pl.media_sequence = self
.segments
.iter()
.find_map(|s| match s {
HlsSegment::Full(ss) => Some(ss.index),
_ => None,
})
.unwrap_or(self.idx);
pl.end_list = false;
let mut f_out = File::create(self.out_dir().join("live.m3u8"))?;
@ -475,6 +689,9 @@ impl HlsMuxer {
) -> Result<Self> {
let base = PathBuf::from(out_dir).join(id.to_string());
if !base.exists() {
std::fs::create_dir_all(&base)?;
}
let mut vars = Vec::new();
for (k, group) in &encoders
.sorted_by(|a, b| a.0.group_id().cmp(&b.0.group_id()))
@ -522,9 +739,16 @@ impl HlsMuxer {
if let Some(vs) = var.streams.iter().find(|s| s.id() == variant) {
// very important for muxer to know which stream this pkt belongs to
(*pkt).stream_index = *vs.index() as _;
return var.mux_packet(pkt);
return var.process_packet(pkt);
}
}
bail!("Packet doesnt match any variants");
// This HLS muxer doesn't handle this variant, return None instead of failing
// This can happen when multiple egress handlers are configured with different variant sets
trace!(
"HLS muxer received packet for variant {} which it doesn't handle",
variant
);
Ok(EgressResult::None)
}
}

View File

@ -8,12 +8,6 @@ use std::cmp::PartialEq;
use std::path::PathBuf;
use uuid::Uuid;
#[cfg(feature = "local-overseer")]
mod local;
#[cfg(feature = "webhook-overseer")]
mod webhook;
/// A copy of [ffmpeg_rs_raw::DemuxerInfo] without internal ptr
#[derive(PartialEq, Clone)]
pub struct IngressInfo {
@ -32,6 +26,7 @@ pub struct IngressStream {
pub height: usize,
pub fps: f32,
pub sample_rate: usize,
pub channels: u8,
pub language: String,
}

View File

@ -4,10 +4,8 @@ use crate::egress::EgressConfig;
use crate::overseer::IngressInfo;
use crate::variant::VariantStream;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
pub mod runner;
pub mod placeholder;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum EgressType {
@ -41,20 +39,23 @@ impl Display for EgressType {
}
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
#[derive(Clone)]
pub struct PipelineConfig {
pub id: Uuid,
/// Transcoded/Copied stream config
pub variants: Vec<VariantStream>,
/// Output muxers
pub egress: Vec<EgressType>,
/// Source stream information for placeholder generation
pub ingress_info: Option<IngressInfo>,
pub ingress_info: IngressInfo,
/// Primary source video stream
pub video_src: usize,
/// Primary audio source stream
pub audio_src: Option<usize>,
}
impl Display for PipelineConfig {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "\nPipeline Config ID={}", self.id)?;
write!(f, "\nPipeline Config:")?;
write!(f, "\nVariants:")?;
for v in &self.variants {
write!(f, "\n\t{}", v)?;

View File

@ -1,188 +0,0 @@
use anyhow::{bail, Result};
use crate::variant::video::VideoVariant;
use crate::variant::audio::AudioVariant;
use crate::overseer::{IngressStream, IngressStreamType};
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
av_frame_alloc, av_frame_get_buffer, av_frame_free, av_get_sample_fmt, AVFrame,
AVPixelFormat, AVSampleFormat
};
use std::ffi::CString;
/// Placeholder frame generator for idle mode when stream disconnects
pub struct PlaceholderGenerator;
impl PlaceholderGenerator {
/// Generate a placeholder video frame based on ingress stream info
pub unsafe fn generate_video_frame_from_stream(
stream: &IngressStream,
stream_time_base: (i32, i32),
frame_index: u64
) -> Result<*mut AVFrame> {
let frame = av_frame_alloc();
if frame.is_null() {
bail!("Failed to allocate placeholder video frame");
}
(*frame).format = AVPixelFormat::AV_PIX_FMT_YUV420P as i32;
(*frame).width = stream.width as i32;
(*frame).height = stream.height as i32;
(*frame).time_base.num = stream_time_base.0;
(*frame).time_base.den = stream_time_base.1;
// Set PTS based on frame rate and total frame index
let fps = if stream.fps > 0.0 { stream.fps } else { 30.0 };
let time_base_f64 = stream_time_base.0 as f64 / stream_time_base.1 as f64;
(*frame).pts = (frame_index as f64 / fps / time_base_f64) as i64;
if av_frame_get_buffer(frame, 0) < 0 {
av_frame_free(&mut frame);
bail!("Failed to allocate buffer for placeholder video frame");
}
// Fill with black (Y=16, U=V=128 for limited range YUV420P)
let y_size = ((*frame).width * (*frame).height) as usize;
let uv_size = y_size / 4;
if !(*frame).data[0].is_null() {
std::ptr::write_bytes((*frame).data[0], 16, y_size);
}
if !(*frame).data[1].is_null() {
std::ptr::write_bytes((*frame).data[1], 128, uv_size);
}
if !(*frame).data[2].is_null() {
std::ptr::write_bytes((*frame).data[2], 128, uv_size);
}
Ok(frame)
}
/// Generate a placeholder audio frame based on ingress stream info
pub unsafe fn generate_audio_frame_from_stream(
stream: &IngressStream,
stream_time_base: (i32, i32),
frame_index: u64,
sample_fmt: &str,
channels: u32
) -> Result<*mut AVFrame> {
let frame = av_frame_alloc();
if frame.is_null() {
bail!("Failed to allocate placeholder audio frame");
}
// Use the provided sample format
let sample_fmt_cstr = CString::new(sample_fmt)
.map_err(|_| anyhow::anyhow!("Invalid sample format string"))?;
let sample_fmt_int = av_get_sample_fmt(sample_fmt_cstr.as_ptr());
(*frame).format = sample_fmt_int;
(*frame).channels = channels as i32;
(*frame).sample_rate = stream.sample_rate as i32;
(*frame).nb_samples = 1024; // Standard audio frame size
(*frame).time_base.num = stream_time_base.0;
(*frame).time_base.den = stream_time_base.1;
// Set PTS based on sample rate and frame index
let samples_per_second = stream.sample_rate as f64;
let time_base_f64 = stream_time_base.0 as f64 / stream_time_base.1 as f64;
(*frame).pts = ((frame_index * 1024) as f64 / samples_per_second / time_base_f64) as i64;
if av_frame_get_buffer(frame, 0) < 0 {
av_frame_free(&mut frame);
bail!("Failed to allocate buffer for placeholder audio frame");
}
// Fill with silence (zeros)
for i in 0..8 {
if !(*frame).data[i].is_null() && (*frame).linesize[i] > 0 {
std::ptr::write_bytes((*frame).data[i], 0, (*frame).linesize[i] as usize);
}
}
Ok(frame)
}
/// Generate a placeholder black video frame
pub unsafe fn generate_video_frame(
variant: &VideoVariant,
stream_time_base: (i32, i32),
frame_index: u64
) -> Result<*mut AVFrame> {
let frame = av_frame_alloc();
if frame.is_null() {
bail!("Failed to allocate placeholder video frame");
}
(*frame).format = AVPixelFormat::AV_PIX_FMT_YUV420P as i32;
(*frame).width = variant.width as i32;
(*frame).height = variant.height as i32;
(*frame).time_base.num = stream_time_base.0;
(*frame).time_base.den = stream_time_base.1;
// Set PTS based on frame rate and total frame index
let fps = if variant.fps > 0.0 { variant.fps } else { 30.0 };
let time_base_f64 = stream_time_base.0 as f64 / stream_time_base.1 as f64;
(*frame).pts = (frame_index as f64 / fps / time_base_f64) as i64;
if av_frame_get_buffer(frame, 0) < 0 {
av_frame_free(&mut frame);
bail!("Failed to allocate buffer for placeholder video frame");
}
// Fill with black (Y=16, U=V=128 for limited range YUV420P)
let y_size = ((*frame).width * (*frame).height) as usize;
let uv_size = y_size / 4;
if !(*frame).data[0].is_null() {
std::ptr::write_bytes((*frame).data[0], 16, y_size);
}
if !(*frame).data[1].is_null() {
std::ptr::write_bytes((*frame).data[1], 128, uv_size);
}
if !(*frame).data[2].is_null() {
std::ptr::write_bytes((*frame).data[2], 128, uv_size);
}
Ok(frame)
}
/// Generate a placeholder silent audio frame
pub unsafe fn generate_audio_frame(
variant: &AudioVariant,
stream_time_base: (i32, i32),
frame_index: u64
) -> Result<*mut AVFrame> {
let frame = av_frame_alloc();
if frame.is_null() {
bail!("Failed to allocate placeholder audio frame");
}
// Use the sample format from the variant configuration
let sample_fmt_cstr = CString::new(variant.sample_fmt.as_str())
.map_err(|_| anyhow::anyhow!("Invalid sample format string"))?;
let sample_fmt_int = av_get_sample_fmt(sample_fmt_cstr.as_ptr());
(*frame).format = sample_fmt_int;
(*frame).channels = variant.channels as i32;
(*frame).sample_rate = variant.sample_rate as i32;
(*frame).nb_samples = 1024; // Standard audio frame size
(*frame).time_base.num = stream_time_base.0;
(*frame).time_base.den = stream_time_base.1;
// Set PTS based on sample rate and frame index
let samples_per_second = variant.sample_rate as f64;
let time_base_f64 = stream_time_base.0 as f64 / stream_time_base.1 as f64;
(*frame).pts = ((frame_index * 1024) as f64 / samples_per_second / time_base_f64) as i64;
if av_frame_get_buffer(frame, 0) < 0 {
av_frame_free(&mut frame);
bail!("Failed to allocate buffer for placeholder audio frame");
}
// Fill with silence (zeros)
for i in 0..8 {
if !(*frame).data[i].is_null() && (*frame).linesize[i] > 0 {
std::ptr::write_bytes((*frame).data[i], 0, (*frame).linesize[i] as usize);
}
}
Ok(frame)
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,6 @@
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
use std::time::{Duration, Instant};
use uuid::Uuid;
use tokio::task;
use log::debug;
use sha2::{Digest, Sha256};

View File

@ -94,7 +94,7 @@ impl ZapStreamDb {
pub async fn update_stream(&self, user_stream: &UserStream) -> Result<()> {
sqlx::query(
"update user_stream set state = ?, starts = ?, ends = ?, title = ?, summary = ?, image = ?, thumb = ?, tags = ?, content_warning = ?, goal = ?, pinned = ?, fee = ?, event = ? where id = ?",
"update user_stream set state = ?, starts = ?, ends = ?, title = ?, summary = ?, image = ?, thumb = ?, tags = ?, content_warning = ?, goal = ?, pinned = ?, fee = ?, event = ?, endpoint_id = ? where id = ?",
)
.bind(&user_stream.state)
.bind(&user_stream.starts)
@ -109,6 +109,7 @@ impl ZapStreamDb {
.bind(&user_stream.pinned)
.bind(&user_stream.fee)
.bind(&user_stream.event)
.bind(&user_stream.endpoint_id)
.bind(&user_stream.id)
.execute(&self.db)
.await

View File

@ -7,7 +7,7 @@ edition = "2021"
default = ["srt", "rtmp", "test-pattern"]
srt = ["zap-stream-core/srt"]
rtmp = ["zap-stream-core/rtmp"]
test-pattern = ["zap-stream-core/test-pattern", "zap-stream-db/test-pattern"]
test-pattern = ["zap-stream-db/test-pattern"]
[dependencies]
zap-stream-db = { path = "../zap-stream-db" }
@ -43,4 +43,5 @@ pretty_env_logger = "0.5.0"
clap = { version = "4.5.16", features = ["derive"] }
futures-util = "0.3.31"
matchit = "0.8.4"
mustache = "0.9.0"
mustache = "0.9.0"
http-range-header = "0.4.2"

View File

@ -9,45 +9,55 @@
color: white;
font-family: monospace;
}
.container {
padding: 20px;
max-width: 1200px;
margin: 0 auto;
}
.stream-list {
margin: 20px 0;
}
.stream-item {
background: #333;
margin: 10px 0;
padding: 15px;
border-radius: 5px;
}
.stream-title {
font-size: 18px;
font-weight: bold;
margin-bottom: 5px;
}
.stream-link {
color: #00ff00;
text-decoration: none;
}
.stream-link:hover {
text-decoration: underline;
}
.video-player {
margin: 20px 0;
max-width: 800px;
}
video {
width: 100%;
max-width: 800px;
background: #000;
}
.no-streams {
color: #999;
font-style: italic;
}
.player-section {
margin-top: 30px;
border-top: 1px solid #555;
@ -59,19 +69,24 @@
<body>
<div class="container">
<h1>Welcome to {{public_url}}</h1>
<h2>Active Streams</h2>
{{#has_streams}}
<div class="stream-list">
{{#streams}}
<div class="stream-item">
<div class="stream-title">{{title}}</div>
{{#summary}}<div class="stream-summary">{{summary}}</div>{{/summary}}
{{#summary}}
<div class="stream-summary">{{summary}}</div>
{{/summary}}
<div>
<a href="{{live_url}}" class="stream-link">📺 {{live_url}}</a>
{{#viewer_count}}<span style="margin-left: 15px;">👥 {{viewer_count}} viewers</span>{{/viewer_count}}
<a href="{{live_url}}" class="stream-link">{{live_url}}</a>
{{#viewer_count}}<span style="margin-left: 15px;">{{viewer_count}} viewers</span>{{/viewer_count}}
</div>
<button onclick="playStream('{{live_url}}')" style="margin-top: 5px; background: #00ff00; color: black; border: none; padding: 5px 10px; cursor: pointer;">Play</button>
<button onclick="playStream('{{live_url}}')"
style="margin-top: 5px; background: #00ff00; color: black; border: none; padding: 5px 10px; cursor: pointer;">
Play
</button>
</div>
{{/streams}}
</div>
@ -79,15 +94,19 @@
{{^has_streams}}
<div class="no-streams">No active streams</div>
{{/has_streams}}
<div class="player-section">
<h2>Stream Player</h2>
<div class="video-player">
<video id="video-player" controls></video>
</div>
<div style="margin-top: 10px;">
<input type="text" id="stream-url" placeholder="Enter stream URL (e.g., /stream-id/live.m3u8)" style="width: 400px; padding: 5px; margin-right: 10px;">
<button onclick="playCustomStream()" style="background: #00ff00; color: black; border: none; padding: 5px 10px; cursor: pointer;">Play URL</button>
<input type="text" id="stream-url" placeholder="Enter stream URL (e.g., /stream-id/live.m3u8)"
style="width: 400px; padding: 5px; margin-right: 10px;">
<button onclick="playCustomStream()"
style="background: #00ff00; color: black; border: none; padding: 5px 10px; cursor: pointer;">Play
URL
</button>
</div>
</div>
</div>
@ -104,12 +123,12 @@
hls = new Hls();
hls.loadSource(url);
hls.attachMedia(video);
hls.on(Hls.Events.MANIFEST_PARSED, function() {
hls.on(Hls.Events.MANIFEST_PARSED, function () {
video.play();
});
} else if (video.canPlayType('application/vnd.apple.mpegurl')) {
video.src = url;
video.addEventListener('loadedmetadata', function() {
video.addEventListener('loadedmetadata', function () {
video.play();
});
} else {

View File

@ -0,0 +1,275 @@
use anyhow::{Context, Result};
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
av_q2d, AV_NOPTS_VALUE, AVMediaType::AVMEDIA_TYPE_VIDEO, AVMediaType::AVMEDIA_TYPE_AUDIO,
};
use ffmpeg_rs_raw::Demuxer;
use m3u8_rs::{parse_media_playlist, MediaSegmentType};
use std::env;
use std::fs;
use std::path::{Path, PathBuf};
#[derive(Debug)]
struct SegmentInfo {
filename: String,
playlist_duration: f32,
actual_duration: f64,
video_duration: f64,
audio_duration: f64,
difference: f64,
}
#[derive(Debug)]
struct SegmentDurations {
total_duration: f64,
video_duration: f64,
audio_duration: f64,
video_packets: u64,
audio_packets: u64,
video_start_pts: i64,
video_end_pts: i64,
audio_start_pts: i64,
audio_end_pts: i64,
}
fn main() -> Result<()> {
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
eprintln!("Usage: {} <path_to_hls_directory>", args[0]);
eprintln!("Example: {} out/hls/8c220348-fdbb-44cd-94d5-97a11a9ec91d/stream_0", args[0]);
std::process::exit(1);
}
let hls_dir = PathBuf::from(&args[1]);
let playlist_path = hls_dir.join("live.m3u8");
if !playlist_path.exists() {
eprintln!("Error: Playlist file {:?} does not exist", playlist_path);
std::process::exit(1);
}
println!("Analyzing HLS stream: {}", hls_dir.display());
println!("Playlist: {}", playlist_path.display());
println!();
// Parse the playlist
let playlist_content = fs::read_to_string(&playlist_path)
.context("Failed to read playlist file")?;
let (_, playlist) = parse_media_playlist(playlist_content.as_bytes())
.map_err(|e| anyhow::anyhow!("Failed to parse playlist: {:?}", e))?;
// Analyze each segment
let mut segments = Vec::new();
let mut total_playlist_duration = 0.0f32;
let mut total_actual_duration = 0.0f64;
println!("Segment Analysis:");
println!("{:<12} {:>12} {:>12} {:>12} {:>12} {:>12}",
"Segment", "Playlist", "Actual", "Video", "Audio", "Difference");
println!("{:<12} {:>12} {:>12} {:>12} {:>12} {:>12}",
"--------", "--------", "------", "-----", "-----", "----------");
for segment_type in &playlist.segments {
if let MediaSegmentType::Full(segment) = segment_type {
let segment_path = hls_dir.join(&segment.uri);
if !segment_path.exists() {
eprintln!("Warning: Segment file {:?} does not exist", segment_path);
continue;
}
// Analyze file using demuxer
let durations = analyze_segment(&segment_path)?;
let actual_duration = durations.total_duration;
let video_duration = durations.video_duration;
let audio_duration = durations.audio_duration;
let playlist_duration = segment.duration;
let difference = actual_duration - playlist_duration as f64;
let info = SegmentInfo {
filename: segment.uri.clone(),
playlist_duration,
actual_duration,
video_duration,
audio_duration,
difference,
};
println!("{:<12} {:>12.3} {:>12.3} {:>12.3} {:>12.3} {:>12.3}",
info.filename,
info.playlist_duration,
info.actual_duration,
info.video_duration,
info.audio_duration,
info.difference);
segments.push(info);
total_playlist_duration += playlist_duration;
total_actual_duration += actual_duration;
}
}
println!();
println!("Summary:");
println!(" Total segments: {}", segments.len());
println!(" Total playlist duration: {:.3}s", total_playlist_duration);
println!(" Total actual duration: {:.3}s", total_actual_duration);
println!(" Total difference: {:.3}s", total_actual_duration - total_playlist_duration as f64);
println!(" Average difference per segment: {:.3}s",
(total_actual_duration - total_playlist_duration as f64) / segments.len() as f64);
// Statistics
let differences: Vec<f64> = segments.iter().map(|s| s.difference).collect();
let min_diff = differences.iter().fold(f64::INFINITY, |a, &b| a.min(b));
let max_diff = differences.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
let avg_diff = differences.iter().sum::<f64>() / differences.len() as f64;
println!();
println!("Difference Statistics:");
println!(" Min difference: {:.3}s", min_diff);
println!(" Max difference: {:.3}s", max_diff);
println!(" Average difference: {:.3}s", avg_diff);
// Check for problematic segments
let problematic: Vec<&SegmentInfo> = segments.iter()
.filter(|s| s.difference.abs() > 0.5)
.collect();
if !problematic.is_empty() {
println!();
println!("Problematic segments (>0.5s difference):");
for seg in problematic {
println!(" {}: {:.3}s difference", seg.filename, seg.difference);
}
}
// Check playlist properties
println!();
println!("Playlist Properties:");
println!(" Version: {:?}", playlist.version);
println!(" Target duration: {:?}", playlist.target_duration);
println!(" Media sequence: {:?}", playlist.media_sequence);
if let Some(part_inf) = &playlist.part_inf {
println!(" Part target: {:.3}s (LL-HLS enabled)", part_inf.part_target);
}
Ok(())
}
fn analyze_segment(path: &Path) -> Result<SegmentDurations> {
let mut demuxer = Demuxer::new(path.to_str().unwrap())?;
// Probe the input to get stream information
unsafe {
demuxer.probe_input()?;
}
let mut video_start_pts = AV_NOPTS_VALUE;
let mut video_end_pts = AV_NOPTS_VALUE;
let mut audio_start_pts = AV_NOPTS_VALUE;
let mut audio_end_pts = AV_NOPTS_VALUE;
let mut video_last_duration = 0i64;
let mut audio_last_duration = 0i64;
let mut video_packets = 0u64;
let mut audio_packets = 0u64;
let mut video_stream_idx: Option<usize> = None;
let mut audio_stream_idx: Option<usize> = None;
// Read all packets and track timing
loop {
let packet_result = unsafe { demuxer.get_packet() };
match packet_result {
Ok((pkt, stream)) => {
if pkt.is_null() {
break; // End of stream
}
unsafe {
let codec_type = (*(*stream).codecpar).codec_type;
let pts = (*pkt).pts;
let duration = (*pkt).duration;
let current_stream_idx = (*stream).index as usize;
match codec_type {
AVMEDIA_TYPE_VIDEO => {
if video_stream_idx.is_none() {
video_stream_idx = Some(current_stream_idx);
}
if pts != AV_NOPTS_VALUE {
if video_start_pts == AV_NOPTS_VALUE {
video_start_pts = pts;
}
video_end_pts = pts;
video_last_duration = duration;
video_packets += 1;
}
}
AVMEDIA_TYPE_AUDIO => {
if audio_stream_idx.is_none() {
audio_stream_idx = Some(current_stream_idx);
}
if pts != AV_NOPTS_VALUE {
if audio_start_pts == AV_NOPTS_VALUE {
audio_start_pts = pts;
}
audio_end_pts = pts;
audio_last_duration = duration;
audio_packets += 1;
}
}
_ => {}
}
}
}
Err(_) => break, // End of file or error
}
}
// Calculate durations (including last packet duration)
let video_duration = if let Some(stream_idx) = video_stream_idx {
if video_start_pts != AV_NOPTS_VALUE && video_end_pts != AV_NOPTS_VALUE {
unsafe {
let stream = demuxer.get_stream(stream_idx)?;
let time_base = (*stream).time_base;
let pts_duration = (video_end_pts - video_start_pts) as f64 * av_q2d(time_base);
let last_pkt_duration = video_last_duration as f64 * av_q2d(time_base);
pts_duration + last_pkt_duration
}
} else {
0.0
}
} else {
0.0
};
let audio_duration = if let Some(stream_idx) = audio_stream_idx {
if audio_start_pts != AV_NOPTS_VALUE && audio_end_pts != AV_NOPTS_VALUE {
unsafe {
let stream = demuxer.get_stream(stream_idx)?;
let time_base = (*stream).time_base;
let pts_duration = (audio_end_pts - audio_start_pts) as f64 * av_q2d(time_base);
let last_pkt_duration = audio_last_duration as f64 * av_q2d(time_base);
pts_duration + last_pkt_duration
}
} else {
0.0
}
} else {
0.0
};
let total_duration = video_duration.max(audio_duration);
Ok(SegmentDurations {
total_duration,
video_duration,
audio_duration,
video_packets,
audio_packets,
video_start_pts,
video_end_pts,
audio_start_pts,
audio_end_pts,
})
}

View File

@ -1,28 +1,39 @@
use crate::api::Api;
use anyhow::{bail, Result};
use anyhow::{bail, ensure, Context, Result};
use base64::Engine;
use bytes::Bytes;
use chrono::{DateTime, Utc};
use futures_util::TryStreamExt;
use http_body_util::combinators::BoxBody;
use http_body_util::{BodyExt, Full, StreamBody};
use http_range_header::{
parse_range_header, EndPosition, StartPosition, SyntacticallyCorrectRange,
};
use hyper::body::{Frame, Incoming};
use hyper::http::response::Builder;
use hyper::service::Service;
use hyper::{Method, Request, Response};
use log::error;
use hyper::{Request, Response, StatusCode};
use log::{error, warn};
use matchit::Router;
use nostr_sdk::{serde_json, Alphabet, Event, Kind, PublicKey, SingleLetterTag, TagKind};
use serde::{Serialize, Deserialize};
use serde::Serialize;
use std::future::Future;
use std::io::SeekFrom;
use std::ops::Range;
use std::path::PathBuf;
use std::pin::Pin;
use std::pin::{pin, Pin};
use std::sync::Arc;
use std::task::Poll;
use std::time::{Duration, Instant};
use tokio::fs::File;
use tokio::fs::File;
use tokio::io::{AsyncRead, AsyncSeek, ReadBuf};
use tokio::sync::RwLock;
use tokio_util::io::ReaderStream;
use uuid::Uuid;
use zap_stream_core::egress::hls::HlsEgress;
use zap_stream_core::viewer::ViewerTracker;
#[derive(Serialize)]
#[derive(Serialize, Clone)]
struct StreamData {
id: String,
title: String,
@ -33,7 +44,7 @@ struct StreamData {
viewer_count: Option<u64>,
}
#[derive(Serialize)]
#[derive(Serialize, Clone)]
struct IndexTemplateData {
public_url: String,
has_streams: bool,
@ -41,11 +52,19 @@ struct IndexTemplateData {
streams: Vec<StreamData>,
}
struct CachedStreams {
pub struct CachedStreams {
data: IndexTemplateData,
cached_at: Instant,
}
#[derive(Clone)]
pub enum HttpServerPath {
Index,
HlsMasterPlaylist,
HlsVariantPlaylist,
HlsSegmentFile,
}
pub type StreamCache = Arc<RwLock<Option<CachedStreams>>>;
#[derive(Clone)]
@ -54,24 +73,52 @@ pub struct HttpServer {
files_dir: PathBuf,
api: Api,
stream_cache: StreamCache,
router: Router<HttpServerPath>,
}
impl HttpServer {
pub fn new(index_template: String, files_dir: PathBuf, api: Api, stream_cache: StreamCache) -> Self {
pub fn new(
index_template: String,
files_dir: PathBuf,
api: Api,
stream_cache: StreamCache,
) -> Self {
let mut router = Router::new();
router.insert("/", HttpServerPath::Index).unwrap();
router.insert("/index.html", HttpServerPath::Index).unwrap();
router
.insert(
format!("/{}/{{stream}}/live.m3u8", HlsEgress::PATH),
HttpServerPath::HlsMasterPlaylist,
)
.unwrap();
router
.insert(
format!("/{}/{{stream}}/{{variant}}/live.m3u8", HlsEgress::PATH),
HttpServerPath::HlsVariantPlaylist,
)
.unwrap();
router
.insert(
format!("/{}/{{stream}}/{{variant}}/{{seg}}.ts", HlsEgress::PATH),
HttpServerPath::HlsSegmentFile,
)
.unwrap();
Self {
index_template,
files_dir,
api,
stream_cache,
router,
}
}
async fn get_cached_or_fetch_streams(&self) -> Result<IndexTemplateData> {
Self::get_cached_or_fetch_streams_static(&self.stream_cache, &self.api).await
}
async fn get_cached_or_fetch_streams_static(stream_cache: &StreamCache, api: &Api) -> Result<IndexTemplateData> {
const CACHE_DURATION: Duration = Duration::from_secs(60); // 1 minute
async fn get_cached_or_fetch_streams_static(
stream_cache: &StreamCache,
api: &Api,
) -> Result<IndexTemplateData> {
const CACHE_DURATION: Duration = Duration::from_secs(10);
// Check if we have valid cached data
{
@ -86,18 +133,25 @@ impl HttpServer {
// Cache is expired or missing, fetch new data
let active_streams = api.get_active_streams().await?;
let public_url = api.get_public_url();
let template_data = if !active_streams.is_empty() {
let streams: Vec<StreamData> = active_streams
.into_iter()
.map(|stream| {
let viewer_count = api.get_viewer_count(&stream.id);
// TODO: remove HLS assumption
StreamData {
id: stream.id.clone(),
title: stream.title.unwrap_or_else(|| format!("Stream {}", &stream.id[..8])),
title: stream
.title
.unwrap_or_else(|| format!("Stream {}", &stream.id[..8])),
summary: stream.summary,
live_url: format!("/{}/live.m3u8", stream.id),
viewer_count: if viewer_count > 0 { Some(viewer_count) } else { None },
live_url: format!("/{}/{}/live.m3u8", HlsEgress::PATH, stream.id),
viewer_count: if viewer_count > 0 {
Some(viewer_count as _)
} else {
None
},
}
})
.collect();
@ -127,26 +181,97 @@ impl HttpServer {
Ok(template_data)
}
async fn render_index(&self) -> Result<String> {
let template_data = self.get_cached_or_fetch_streams().await?;
let template = mustache::compile_str(&self.index_template)?;
let rendered = template.render_to_string(&template_data)?;
Ok(rendered)
async fn handle_index(
api: Api,
stream_cache: StreamCache,
template: String,
) -> Result<Response<BoxBody<Bytes, anyhow::Error>>, anyhow::Error> {
// Compile template outside async move for better performance
let template = match mustache::compile_str(&template) {
Ok(t) => t,
Err(e) => {
error!("Failed to compile template: {}", e);
return Ok(Self::base_response().status(500).body(BoxBody::default())?);
}
};
let template_data = Self::get_cached_or_fetch_streams_static(&stream_cache, &api).await;
match template_data {
Ok(data) => match template.render_to_string(&data) {
Ok(index_html) => Ok(Self::base_response()
.header("content-type", "text/html")
.body(
Full::new(Bytes::from(index_html))
.map_err(|e| match e {})
.boxed(),
)?),
Err(e) => {
error!("Failed to render template: {}", e);
Ok(Self::base_response().status(500).body(BoxBody::default())?)
}
},
Err(e) => {
error!("Failed to fetch template data: {}", e);
Ok(Self::base_response().status(500).body(BoxBody::default())?)
}
}
}
async fn handle_hls_playlist(
api: &Api,
async fn handle_hls_segment(
req: &Request<Incoming>,
playlist_path: &PathBuf,
segment_path: PathBuf,
) -> Result<Response<BoxBody<Bytes, anyhow::Error>>, anyhow::Error> {
// Extract stream ID from path (e.g., /uuid/live.m3u8 -> uuid)
let path_parts: Vec<&str> = req.uri().path().trim_start_matches('/').split('/').collect();
if path_parts.len() < 2 {
return Ok(Response::builder().status(404).body(BoxBody::default())?);
let mut response = Self::base_response().header("accept-ranges", "bytes");
if let Some(r) = req.headers().get("range") {
if let Ok(ranges) = parse_range_header(r.to_str()?) {
if ranges.ranges.len() > 1 {
warn!("Multipart ranges are not supported, fallback to non-range request");
Self::path_to_response(segment_path).await
} else {
let file = File::open(&segment_path).await?;
let metadata = file.metadata().await?;
let single_range = ranges.ranges.first().unwrap();
let range = match RangeBody::get_range(metadata.len(), single_range) {
Ok(r) => r,
Err(e) => {
warn!("Failed to get range: {}", e);
return Ok(response
.status(StatusCode::RANGE_NOT_SATISFIABLE)
.body(BoxBody::default())?);
}
};
let r_body = RangeBody::new(file, metadata.len(), range.clone());
response = response.status(StatusCode::PARTIAL_CONTENT);
let headers = r_body.get_headers();
for (k, v) in headers {
response = response.header(k, v);
}
let f_stream = ReaderStream::new(r_body);
let body = StreamBody::new(
f_stream
.map_ok(Frame::data)
.map_err(|e| anyhow::anyhow!("Failed to read body: {}", e)),
)
.boxed();
Ok(response.body(body)?)
}
} else {
Ok(Self::base_response().status(400).body(BoxBody::default())?)
}
} else {
Self::path_to_response(segment_path).await
}
let stream_id = path_parts[0];
}
async fn handle_hls_master_playlist(
api: Api,
req: &Request<Incoming>,
stream_id: &str,
playlist_path: PathBuf,
) -> Result<Response<BoxBody<Bytes, anyhow::Error>>, anyhow::Error> {
// Get client IP and User-Agent for tracking
let client_ip = Self::get_client_ip(req);
let user_agent = req
@ -179,21 +304,20 @@ impl HttpServer {
// Read the playlist file
let playlist_content = tokio::fs::read(playlist_path).await?;
// Parse and modify playlist to add viewer token to URLs
let modified_content = Self::add_viewer_token_to_playlist(&playlist_content, &viewer_token)?;
Ok(Response::builder()
// Parse and modify playlist to add viewer token to URLs
let modified_content =
Self::add_viewer_token_to_playlist(&playlist_content, &viewer_token)?;
let response = Self::base_response()
.header("content-type", "application/vnd.apple.mpegurl")
.header("server", "zap-stream-core")
.header("access-control-allow-origin", "*")
.header("access-control-allow-headers", "*")
.header("access-control-allow-methods", "HEAD, GET")
.body(
Full::new(Bytes::from(modified_content))
.map_err(|e| match e {})
.boxed(),
)?)
)?;
Ok(response)
}
fn get_client_ip(req: &Request<Incoming>) -> String {
@ -205,32 +329,33 @@ impl HttpServer {
}
}
}
if let Some(real_ip) = req.headers().get("x-real-ip") {
if let Ok(ip_str) = real_ip.to_str() {
return ip_str.to_string();
}
}
// Fallback to connection IP (note: in real deployment this might be a proxy)
"unknown".to_string()
// use random string as IP to avoid broken view tracker due to proxying
Uuid::new_v4().to_string()
}
fn add_viewer_token_to_playlist(content: &[u8], viewer_token: &str) -> Result<String> {
// Parse the M3U8 playlist using the m3u8-rs crate
let (_, playlist) = m3u8_rs::parse_playlist(content)
.map_err(|e| anyhow::anyhow!("Failed to parse M3U8 playlist: {}", e))?;
match playlist {
m3u8_rs::Playlist::MasterPlaylist(mut master) => {
// For master playlists, add viewer token to variant streams
for variant in &mut master.variants {
variant.uri = Self::add_token_to_url(&variant.uri, viewer_token);
}
// Write the modified playlist back to string
let mut output = Vec::new();
master.write_to(&mut output)
master
.write_to(&mut output)
.map_err(|e| anyhow::anyhow!("Failed to write master playlist: {}", e))?;
String::from_utf8(output)
.map_err(|e| anyhow::anyhow!("Failed to convert playlist to string: {}", e))
@ -242,7 +367,7 @@ impl HttpServer {
}
}
}
fn add_token_to_url(url: &str, viewer_token: &str) -> String {
if url.contains('?') {
format!("{}&vt={}", url, viewer_token)
@ -250,6 +375,27 @@ impl HttpServer {
format!("{}?vt={}", url, viewer_token)
}
}
fn base_response() -> Builder {
Response::builder()
.header("server", "zap-stream-core")
.header("access-control-allow-origin", "*")
.header("access-control-allow-headers", "*")
.header("access-control-allow-methods", "HEAD, GET, OPTIONS")
}
/// Get a response object for a file body
async fn path_to_response(path: PathBuf) -> Result<Response<BoxBody<Bytes, anyhow::Error>>> {
let f = File::open(&path).await?;
let f_stream = ReaderStream::new(f);
let body = StreamBody::new(
f_stream
.map_ok(Frame::data)
.map_err(|e| anyhow::anyhow!("Failed to read body: {}", e)),
)
.boxed();
Ok(Self::base_response().body(body)?)
}
}
impl Service<Request<Incoming>> for HttpServer {
@ -258,93 +404,50 @@ impl Service<Request<Incoming>> for HttpServer {
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
fn call(&self, req: Request<Incoming>) -> Self::Future {
// check is index.html
if req.method() == Method::GET && req.uri().path() == "/"
|| req.uri().path() == "/index.html"
{
let stream_cache = self.stream_cache.clone();
let api = self.api.clone();
// Compile template outside async move for better performance
let template = match mustache::compile_str(&self.index_template) {
Ok(t) => t,
Err(e) => {
error!("Failed to compile template: {}", e);
let path = req.uri().path().to_owned();
// request path as a file path pointing to the output directory
let dst_path = self.files_dir.join(req.uri().path()[1..].to_string());
if let Ok(m) = self.router.at(&path) {
match m.value {
HttpServerPath::Index => {
let api = self.api.clone();
let cache = self.stream_cache.clone();
let template = self.index_template.clone();
return Box::pin(async move { Self::handle_index(api, cache, template).await });
}
HttpServerPath::HlsMasterPlaylist => {
let api = self.api.clone();
let stream_id = m.params.get("stream").map(|s| s.to_string());
let file_path = dst_path.clone();
return Box::pin(async move {
Ok(Response::builder()
.status(500)
.body(BoxBody::default()).unwrap())
let stream_id = stream_id.context("stream id missing")?;
Ok(
Self::handle_hls_master_playlist(api, &req, &stream_id, file_path)
.await?,
)
});
}
};
return Box::pin(async move {
// Use the existing method to get cached template data
let template_data = Self::get_cached_or_fetch_streams_static(&stream_cache, &api).await;
match template_data {
Ok(data) => {
match template.render_to_string(&data) {
Ok(index_html) => Ok(Response::builder()
.header("content-type", "text/html")
.header("server", "zap-stream-core")
.body(
Full::new(Bytes::from(index_html))
.map_err(|e| match e {})
.boxed(),
)?),
Err(e) => {
error!("Failed to render template: {}", e);
Ok(Response::builder()
.status(500)
.body(BoxBody::default())?)
}
}
}
Err(e) => {
error!("Failed to fetch template data: {}", e);
Ok(Response::builder()
.status(500)
.body(BoxBody::default())?)
}
HttpServerPath::HlsVariantPlaylist => {
// let file handler handle this one, may be used later for HLS-LL to create
// delta updates
}
});
HttpServerPath::HlsSegmentFile => {
// handle segment file (range requests)
let file_path = dst_path.clone();
return Box::pin(async move {
Ok(Self::handle_hls_segment(&req, file_path).await?)
});
}
}
}
// check if mapped to file
let dst_path = self.files_dir.join(req.uri().path()[1..].to_string());
// check if mapped to file (not handled route)
if dst_path.exists() {
let api_clone = self.api.clone();
return Box::pin(async move {
let rsp = Response::builder()
.header("server", "zap-stream-core")
.header("access-control-allow-origin", "*")
.header("access-control-allow-headers", "*")
.header("access-control-allow-methods", "HEAD, GET");
if req.method() == Method::HEAD {
return Ok(rsp.body(BoxBody::default())?);
}
// Handle HLS playlists with viewer tracking
if req.uri().path().ends_with("/live.m3u8") {
return Self::handle_hls_playlist(&api_clone, &req, &dst_path).await;
}
// Handle regular files
let f = File::open(&dst_path).await?;
let f_stream = ReaderStream::new(f);
let body = StreamBody::new(
f_stream
.map_ok(Frame::data)
.map_err(|e| Self::Error::new(e)),
)
.boxed();
Ok(rsp.body(body)?)
});
return Box::pin(async move { Self::path_to_response(dst_path).await });
}
// otherwise handle in overseer
// fallback to api handler
let api = self.api.clone();
Box::pin(async move {
match api.handler(req).await {
@ -415,12 +518,21 @@ pub fn check_nip98_auth(req: &Request<Incoming>, public_url: &str) -> Result<Aut
// Construct full URI using public_url + path + query
let request_uri = match req.uri().query() {
Some(query) => format!("{}{}?{}", public_url.trim_end_matches('/'), req.uri().path(), query),
Some(query) => format!(
"{}{}?{}",
public_url.trim_end_matches('/'),
req.uri().path(),
query
),
None => format!("{}{}", public_url.trim_end_matches('/'), req.uri().path()),
};
if !url_tag.eq_ignore_ascii_case(&request_uri) {
bail!("Invalid nostr event, URL tag invalid. Expected: {}, Got: {}", request_uri, url_tag);
bail!(
"Invalid nostr event, URL tag invalid. Expected: {}, Got: {}",
request_uri,
url_tag
);
}
// Check method tag
@ -440,3 +552,110 @@ pub fn check_nip98_auth(req: &Request<Incoming>, public_url: &str) -> Result<Aut
event,
})
}
/// Range request handler over file handle
struct RangeBody {
file: File,
range_start: u64,
range_end: u64,
current_offset: u64,
poll_complete: bool,
file_size: u64,
}
const MAX_UNBOUNDED_RANGE: u64 = 1024 * 1024;
impl RangeBody {
pub fn new(file: File, file_size: u64, range: Range<u64>) -> Self {
Self {
file,
file_size,
range_start: range.start,
range_end: range.end,
current_offset: 0,
poll_complete: false,
}
}
pub fn get_range(file_size: u64, header: &SyntacticallyCorrectRange) -> Result<Range<u64>> {
let range_start = match header.start {
StartPosition::Index(i) => {
ensure!(i < file_size, "Range start out of range");
i
}
StartPosition::FromLast(i) => file_size.saturating_sub(i),
};
let range_end = match header.end {
EndPosition::Index(i) => {
ensure!(i <= file_size, "Range end out of range");
i
}
EndPosition::LastByte => {
(file_size.saturating_sub(1)).min(range_start + MAX_UNBOUNDED_RANGE)
}
};
Ok(range_start..range_end)
}
pub fn get_headers(&self) -> Vec<(&'static str, String)> {
let r_len = (self.range_end - self.range_start) + 1;
vec![
("content-length", r_len.to_string()),
(
"content-range",
format!(
"bytes {}-{}/{}",
self.range_start, self.range_end, self.file_size
),
),
]
}
}
impl AsyncRead for RangeBody {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
let range_start = self.range_start + self.current_offset;
let range_len = self.range_end.saturating_sub(range_start) + 1;
let bytes_to_read = buf.remaining().min(range_len as usize) as u64;
if bytes_to_read == 0 {
return Poll::Ready(Ok(()));
}
// when no pending poll, seek to starting position
if !self.poll_complete {
let pinned = pin!(&mut self.file);
pinned.start_seek(SeekFrom::Start(range_start))?;
self.poll_complete = true;
}
// check poll completion
if self.poll_complete {
let pinned = pin!(&mut self.file);
match pinned.poll_complete(cx) {
Poll::Ready(Ok(_)) => {
self.poll_complete = false;
}
Poll::Ready(Err(e)) => return Poll::Ready(Err(e)),
Poll::Pending => return Poll::Pending,
}
}
// Read data from the file
let pinned = pin!(&mut self.file);
match pinned.poll_read(cx, buf) {
Poll::Ready(Ok(_)) => {
self.current_offset += bytes_to_read;
Poll::Ready(Ok(()))
}
Poll::Ready(Err(e)) => Poll::Ready(Err(e)),
Poll::Pending => {
self.poll_complete = true;
Poll::Pending
}
}
}
}

View File

@ -29,7 +29,6 @@ use crate::monitor::BackgroundMonitor;
use crate::overseer::ZapStreamOverseer;
use crate::settings::Settings;
use zap_stream_core::ingress::{file, tcp};
use zap_stream_core::overseer::Overseer;
mod api;
mod blossom;
@ -76,7 +75,12 @@ async fn main() -> Result<()> {
// Create shared stream cache
let stream_cache: StreamCache = Arc::new(RwLock::new(None));
// HTTP server
let server = HttpServer::new(index_template.to_string(), PathBuf::from(settings.output_dir), api, stream_cache);
let server = HttpServer::new(
index_template.to_string(),
PathBuf::from(settings.output_dir),
api,
stream_cache,
);
tasks.push(tokio::spawn(async move {
let listener = TcpListener::bind(&http_addr).await?;

View File

@ -15,16 +15,17 @@ use std::sync::Arc;
use tokio::sync::RwLock;
use url::Url;
use uuid::Uuid;
use zap_stream_core::egress::hls::HlsEgress;
use zap_stream_core::egress::{EgressConfig, EgressSegment};
use zap_stream_core::ingress::ConnectionInfo;
use zap_stream_core::overseer::{IngressInfo, IngressStreamType, Overseer};
use zap_stream_core::overseer::{IngressInfo, IngressStream, IngressStreamType, Overseer};
use zap_stream_core::pipeline::{EgressType, PipelineConfig};
use zap_stream_core::variant::audio::AudioVariant;
use zap_stream_core::variant::mapping::VariantMapping;
use zap_stream_core::variant::video::VideoVariant;
use zap_stream_core::variant::{StreamMapping, VariantStream};
use zap_stream_core::viewer::ViewerTracker;
use zap_stream_db::{UserStream, UserStreamState, ZapStreamDb};
use zap_stream_db::{IngestEndpoint, UserStream, UserStreamState, ZapStreamDb};
const STREAM_EVENT_KIND: u16 = 30_311;
@ -43,8 +44,6 @@ struct ActiveStreamInfo {
/// zap.stream NIP-53 overseer
#[derive(Clone)]
pub struct ZapStreamOverseer {
/// Dir where HTTP server serves files from
out_dir: String,
/// Database instance for accounts/streams
db: ZapStreamDb,
/// LND node connection
@ -68,7 +67,6 @@ pub struct ZapStreamOverseer {
impl ZapStreamOverseer {
pub async fn new(
out_dir: &String,
public_url: &String,
private_key: &str,
db: &str,
@ -114,7 +112,6 @@ impl ZapStreamOverseer {
client.connect().await;
let overseer = Self {
out_dir: out_dir.clone(),
db,
lnd,
client,
@ -231,16 +228,29 @@ impl ZapStreamOverseer {
stream: &UserStream,
pubkey: &Vec<u8>,
) -> Result<Event> {
// TODO: remove assumption that HLS is enabled
let extra_tags = vec![
Tag::parse(["p", hex::encode(pubkey).as_str(), "", "host"])?,
Tag::parse([
"streaming",
self.map_to_stream_public_url(stream, "live.m3u8")?.as_str(),
self.map_to_public_url(
PathBuf::from(HlsEgress::PATH)
.join(stream.id.to_string())
.join("live.m3u8")
.to_str()
.unwrap(),
)?
.as_str(),
])?,
Tag::parse([
"image",
self.map_to_stream_public_url(stream, "thumb.webp")?
.as_str(),
self.map_to_public_url(
PathBuf::from(stream.id.to_string())
.join("thumb.webp")
.to_str()
.unwrap(),
)?
.as_str(),
])?,
Tag::parse(["service", self.map_to_public_url("api/v1")?.as_str()])?,
];
@ -252,10 +262,6 @@ impl ZapStreamOverseer {
Ok(ev)
}
fn map_to_stream_public_url(&self, stream: &UserStream, path: &str) -> Result<String> {
self.map_to_public_url(&format!("{}/{}", stream.id, path))
}
fn map_to_public_url(&self, path: &str) -> Result<String> {
let u: Url = self.public_url.parse()?;
Ok(u.join(path)?.to_string())
@ -353,32 +359,34 @@ impl Overseer for ZapStreamOverseer {
}
// Get ingest endpoint configuration based on connection type
let endpoint_id = self
.detect_endpoint(&connection)
.await?
.ok_or_else(|| anyhow::anyhow!("No ingest endpoints configured"))?;
let endpoint = self
.db
.get_ingest_endpoint(endpoint_id)
.await?
.ok_or_else(|| anyhow::anyhow!("Ingest endpoint not found"))?;
let endpoint = self.detect_endpoint(&connection).await?;
let variants = get_variants_from_endpoint(&stream_info, &endpoint)?;
let cfg = get_variants_from_endpoint(&stream_info, &endpoint)?;
if cfg.video_src.is_none() || cfg.variants.is_empty() {
bail!("No video src found");
}
let mut egress = vec![];
egress.push(EgressType::HLS(EgressConfig {
name: "hls".to_string(),
variants: variants.iter().map(|v| v.id()).collect(),
variants: cfg.variants.iter().map(|v| v.id()).collect(),
}));
let stream_id = Uuid::new_v4();
let stream_id = connection.id.clone();
// insert new stream record
let mut new_stream = UserStream {
id: stream_id.to_string(),
user_id: uid,
starts: Utc::now(),
state: UserStreamState::Live,
endpoint_id: Some(endpoint_id),
endpoint_id: Some(endpoint.id),
title: user.title.clone(),
summary: user.summary.clone(),
thumb: user.image.clone(),
content_warning: user.content_warning.clone(),
goal: user.goal.clone(),
tags: user.tags.clone(),
..Default::default()
};
let stream_event = self.publish_stream_event(&new_stream, &user.pubkey).await?;
@ -398,9 +406,11 @@ impl Overseer for ZapStreamOverseer {
self.db.update_stream(&new_stream).await?;
Ok(PipelineConfig {
id: stream_id,
variants,
variants: cfg.variants,
egress,
ingress_info: stream_info.clone(),
video_src: cfg.video_src.unwrap().index,
audio_src: cfg.audio_src.map(|s| s.index),
})
}
@ -419,10 +429,10 @@ impl Overseer for ZapStreamOverseer {
if let Some(endpoint) = self.db.get_ingest_endpoint(endpoint_id).await? {
endpoint.cost
} else {
0
bail!("Endpoint doesnt exist");
}
} else {
0
bail!("Endpoint id not set on stream");
};
// Convert duration from seconds to minutes and calculate cost
@ -433,7 +443,7 @@ impl Overseer for ZapStreamOverseer {
.tick_stream(pipeline_id, stream.user_id, duration, cost)
.await?;
if bal <= 0 {
bail!("Not enough balance");
bail!("Balance has run out");
}
// Update last segment time for this stream
@ -514,6 +524,7 @@ impl Overseer for ZapStreamOverseer {
viewer_states.remove(&stream.id);
stream.state = UserStreamState::Ended;
stream.ends = Some(Utc::now());
let event = self.publish_stream_event(&stream, &user.pubkey).await?;
stream.event = Some(event.as_json());
self.db.update_stream(&stream).await?;
@ -525,25 +536,29 @@ impl Overseer for ZapStreamOverseer {
impl ZapStreamOverseer {
/// Detect which ingest endpoint should be used based on connection info
async fn detect_endpoint(&self, connection: &ConnectionInfo) -> Result<Option<u64>> {
// Get all ingest endpoints and match by name against connection endpoint
async fn detect_endpoint(&self, connection: &ConnectionInfo) -> Result<IngestEndpoint> {
let endpoints = self.db.get_ingest_endpoints().await?;
for endpoint in &endpoints {
if endpoint.name == connection.endpoint {
return Ok(Some(endpoint.id));
}
}
// No matching endpoint found, use the most expensive one
Ok(endpoints.into_iter().max_by_key(|e| e.cost).map(|e| e.id))
let default = endpoints.iter().max_by_key(|e| e.cost);
Ok(endpoints
.iter()
.find(|e| e.name.eq_ignore_ascii_case(connection.endpoint))
.or(default)
.unwrap()
.clone())
}
}
fn get_variants_from_endpoint(
info: &IngressInfo,
endpoint: &zap_stream_db::IngestEndpoint,
) -> Result<Vec<VariantStream>> {
struct EndpointConfig<'a> {
video_src: Option<&'a IngressStream>,
audio_src: Option<&'a IngressStream>,
variants: Vec<VariantStream>,
}
fn get_variants_from_endpoint<'a>(
info: &'a IngressInfo,
endpoint: &IngestEndpoint,
) -> Result<EndpointConfig<'a>> {
let capabilities_str = endpoint.capabilities.as_deref().unwrap_or("");
let capabilities: Vec<&str> = capabilities_str.split(',').collect();
@ -658,5 +673,9 @@ fn get_variants_from_endpoint(
// Handle other capabilities like dvr:720h here if needed
}
Ok(vars)
Ok(EndpointConfig {
audio_src,
video_src,
variants: vars,
})
}

View File

@ -70,7 +70,6 @@ impl Settings {
blossom,
} => Ok(Arc::new(
ZapStreamOverseer::new(
&self.output_dir,
&self.public_url,
private_key,
database,