feat: hls progress
This commit is contained in:
parent
0da9bd996f
commit
e111e50199
2
Cargo.lock
generated
2
Cargo.lock
generated
@ -1022,7 +1022,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "ffmpeg-rs-raw"
|
name = "ffmpeg-rs-raw"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
source = "git+https://git.v0l.io/Kieran/ffmpeg-rs-raw.git?rev=0abe0c5229adeb64b013d1895c7eba3d917f05a4#0abe0c5229adeb64b013d1895c7eba3d917f05a4"
|
source = "git+https://git.v0l.io/Kieran/ffmpeg-rs-raw.git?rev=c2ae78acbcbe315137aea94c77b0db7ea538a709#c2ae78acbcbe315137aea94c77b0db7ea538a709"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"ffmpeg-sys-the-third",
|
"ffmpeg-sys-the-third",
|
||||||
|
@ -18,12 +18,11 @@ zap-stream = [
|
|||||||
"tokio/fs",
|
"tokio/fs",
|
||||||
"dep:base64",
|
"dep:base64",
|
||||||
"dep:sha2",
|
"dep:sha2",
|
||||||
"dep:hex"
|
|
||||||
]
|
]
|
||||||
test-pattern = ["dep:resvg", "dep:usvg", "dep:tiny-skia", "dep:fontdue", "dep:ringbuf", "zap-stream-db/test-pattern"]
|
test-pattern = ["dep:resvg", "dep:usvg", "dep:tiny-skia", "dep:fontdue", "dep:ringbuf", "zap-stream-db/test-pattern"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "0abe0c5229adeb64b013d1895c7eba3d917f05a4" }
|
ffmpeg-rs-raw = { git = "https://git.v0l.io/Kieran/ffmpeg-rs-raw.git", rev = "c2ae78acbcbe315137aea94c77b0db7ea538a709" }
|
||||||
tokio = { version = "1.36.0", features = ["rt", "rt-multi-thread", "macros"] }
|
tokio = { version = "1.36.0", features = ["rt", "rt-multi-thread", "macros"] }
|
||||||
anyhow = { version = "^1.0.91", features = ["backtrace"] }
|
anyhow = { version = "^1.0.91", features = ["backtrace"] }
|
||||||
pretty_env_logger = "0.5.0"
|
pretty_env_logger = "0.5.0"
|
||||||
@ -42,6 +41,7 @@ warp = "0.3.7"
|
|||||||
libc = "0.2.162"
|
libc = "0.2.162"
|
||||||
m3u8-rs = "6.0.0"
|
m3u8-rs = "6.0.0"
|
||||||
chrono = "^0.4.38"
|
chrono = "^0.4.38"
|
||||||
|
hex = "0.4.3"
|
||||||
|
|
||||||
# test-pattern
|
# test-pattern
|
||||||
srt-tokio = { version = "0.4.3", optional = true }
|
srt-tokio = { version = "0.4.3", optional = true }
|
||||||
@ -58,4 +58,3 @@ fedimint-tonic-lnd = { version = "0.2.0", optional = true, default-features = fa
|
|||||||
reqwest = { version = "0.12.9", optional = true, features = ["stream"] }
|
reqwest = { version = "0.12.9", optional = true, features = ["stream"] }
|
||||||
base64 = { version = "0.22.1", optional = true }
|
base64 = { version = "0.22.1", optional = true }
|
||||||
sha2 = { version = "0.10.8", optional = true }
|
sha2 = { version = "0.10.8", optional = true }
|
||||||
hex = { version = "0.4.3", optional = true }
|
|
||||||
|
2
TODO.md
Normal file
2
TODO.md
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
- Setup multi-variant output
|
||||||
|
- Manage event lifecycle (close stream)
|
@ -9,6 +9,12 @@ endpoints:
|
|||||||
# Output directory for recording / hls
|
# Output directory for recording / hls
|
||||||
output_dir: "./out"
|
output_dir: "./out"
|
||||||
|
|
||||||
|
# Public URL for serving files for [output_dir]
|
||||||
|
public_url: "http://localhost:8080"
|
||||||
|
|
||||||
|
# Bind address for http server serving files from [output_dir]
|
||||||
|
listen_http: "127.0.0.1:8080"
|
||||||
|
|
||||||
# Overseer is the main control structure which controls access to the service
|
# Overseer is the main control structure which controls access to the service
|
||||||
#
|
#
|
||||||
# ** ONLY 1 OVERSEER CAN BE CONFIGURED AT A TIME **
|
# ** ONLY 1 OVERSEER CAN BE CONFIGURED AT A TIME **
|
||||||
|
@ -1,8 +1,12 @@
|
|||||||
|
use anyhow::{bail, Result};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use config::Config;
|
use config::Config;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::av_version_info;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{av_log_set_callback, av_version_info};
|
||||||
use ffmpeg_rs_raw::rstr;
|
use ffmpeg_rs_raw::{av_log_redirect, rstr};
|
||||||
use log::{error, info};
|
use log::{error, info};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::task::JoinHandle;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use zap_stream_core::egress::http::listen_out_dir;
|
use zap_stream_core::egress::http::listen_out_dir;
|
||||||
@ -12,19 +16,20 @@ use zap_stream_core::ingress::srt;
|
|||||||
use zap_stream_core::ingress::test;
|
use zap_stream_core::ingress::test;
|
||||||
|
|
||||||
use zap_stream_core::ingress::{file, tcp};
|
use zap_stream_core::ingress::{file, tcp};
|
||||||
|
use zap_stream_core::overseer::Overseer;
|
||||||
use zap_stream_core::settings::Settings;
|
use zap_stream_core::settings::Settings;
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
struct Args {}
|
struct Args {}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> anyhow::Result<()> {
|
async fn main() -> Result<()> {
|
||||||
pretty_env_logger::init();
|
pretty_env_logger::init();
|
||||||
|
|
||||||
let _args = Args::parse();
|
let _args = Args::parse();
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
//ffmpeg_sys_next::av_log_set_level(ffmpeg_sys_next::AV_LOG_DEBUG);
|
av_log_set_callback(Some(av_log_redirect));
|
||||||
info!("FFMPEG version={}", rstr!(av_version_info()));
|
info!("FFMPEG version={}", rstr!(av_version_info()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -38,38 +43,55 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
let mut listeners = vec![];
|
let mut listeners = vec![];
|
||||||
for e in &settings.endpoints {
|
for e in &settings.endpoints {
|
||||||
let u: Url = e.parse()?;
|
match try_create_listener(e, &settings.output_dir, &overseer) {
|
||||||
match u.scheme() {
|
Ok(l) => listeners.push(l),
|
||||||
#[cfg(feature = "srt")]
|
Err(e) => error!("{}", e),
|
||||||
"srt" => listeners.push(tokio::spawn(srt::listen(
|
|
||||||
u.host().unwrap().to_string(),
|
|
||||||
overseer.clone(),
|
|
||||||
))),
|
|
||||||
"tcp" => listeners.push(tokio::spawn(tcp::listen(
|
|
||||||
u.host().unwrap().to_string(),
|
|
||||||
overseer.clone(),
|
|
||||||
))),
|
|
||||||
"file" => listeners.push(tokio::spawn(file::listen(
|
|
||||||
u.path().parse()?,
|
|
||||||
overseer.clone(),
|
|
||||||
))),
|
|
||||||
#[cfg(feature = "test-pattern")]
|
|
||||||
"test-pattern" => listeners.push(tokio::spawn(test::listen(overseer.clone()))),
|
|
||||||
_ => {
|
|
||||||
error!("Unknown endpoint config: {e}");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
listeners.push(tokio::spawn(listen_out_dir(
|
listeners.push(tokio::spawn(listen_out_dir(
|
||||||
"0.0.0.0:8080".to_owned(),
|
settings.listen_http,
|
||||||
settings.output_dir,
|
settings.output_dir,
|
||||||
)));
|
)));
|
||||||
|
|
||||||
for handle in listeners {
|
for handle in listeners {
|
||||||
if let Err(e) = handle.await {
|
if let Err(e) = handle.await? {
|
||||||
error!("{e}");
|
error!("{e}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
info!("Server closed");
|
info!("Server closed");
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn try_create_listener(
|
||||||
|
u: &str,
|
||||||
|
out_dir: &str,
|
||||||
|
overseer: &Arc<dyn Overseer>,
|
||||||
|
) -> Result<JoinHandle<Result<()>>> {
|
||||||
|
let url: Url = u.parse()?;
|
||||||
|
match url.scheme() {
|
||||||
|
#[cfg(feature = "srt")]
|
||||||
|
"srt" => Ok(tokio::spawn(srt::listen(
|
||||||
|
out_dir.to_string(),
|
||||||
|
format!("{}:{}", url.host().unwrap(), url.port().unwrap()),
|
||||||
|
overseer.clone(),
|
||||||
|
))),
|
||||||
|
"tcp" => Ok(tokio::spawn(tcp::listen(
|
||||||
|
out_dir.to_string(),
|
||||||
|
format!("{}:{}", url.host().unwrap(), url.port().unwrap()),
|
||||||
|
overseer.clone(),
|
||||||
|
))),
|
||||||
|
"file" => Ok(tokio::spawn(file::listen(
|
||||||
|
out_dir.to_string(),
|
||||||
|
PathBuf::from(url.path()),
|
||||||
|
overseer.clone(),
|
||||||
|
))),
|
||||||
|
#[cfg(feature = "test-pattern")]
|
||||||
|
"test-pattern" => Ok(tokio::spawn(test::listen(
|
||||||
|
out_dir.to_string(),
|
||||||
|
overseer.clone(),
|
||||||
|
))),
|
||||||
|
_ => {
|
||||||
|
bail!("Unknown endpoint config: {u}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -51,7 +51,12 @@ impl Blossom {
|
|||||||
Ok(hex::encode(hash))
|
Ok(hex::encode(hash))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn upload(&self, from_file: &PathBuf, keys: &Keys) -> Result<BlobDescriptor> {
|
pub async fn upload(
|
||||||
|
&self,
|
||||||
|
from_file: &PathBuf,
|
||||||
|
keys: &Keys,
|
||||||
|
mime: Option<&str>,
|
||||||
|
) -> Result<BlobDescriptor> {
|
||||||
let mut f = File::open(from_file).await?;
|
let mut f = File::open(from_file).await?;
|
||||||
let hash = Self::hash_file(&mut f).await?;
|
let hash = Self::hash_file(&mut f).await?;
|
||||||
let auth_event = EventBuilder::new(
|
let auth_event = EventBuilder::new(
|
||||||
@ -69,7 +74,7 @@ impl Blossom {
|
|||||||
let rsp: BlobDescriptor = self
|
let rsp: BlobDescriptor = self
|
||||||
.client
|
.client
|
||||||
.put(self.url.join("/upload").unwrap())
|
.put(self.url.join("/upload").unwrap())
|
||||||
.header("Content-Type", "application/octet-stream")
|
.header("Content-Type", mime.unwrap_or("application/octet-stream"))
|
||||||
.header(
|
.header(
|
||||||
"Authorization",
|
"Authorization",
|
||||||
&format!(
|
&format!(
|
||||||
|
@ -2,7 +2,6 @@ use anyhow::Result;
|
|||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::fmt::{Display, Formatter};
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
@ -13,24 +12,10 @@ pub mod recorder;
|
|||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
pub struct EgressConfig {
|
pub struct EgressConfig {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub out_dir: String,
|
|
||||||
/// Which variants will be used in this muxer
|
/// Which variants will be used in this muxer
|
||||||
pub variants: HashSet<Uuid>,
|
pub variants: HashSet<Uuid>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for EgressConfig {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}: out_dir={}", self.name, self.out_dir)?;
|
|
||||||
if !self.variants.is_empty() {
|
|
||||||
write!(f, "\n\tStreams: ")?;
|
|
||||||
for v in &self.variants {
|
|
||||||
write!(f, "\n\t\t{}", v)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait Egress {
|
pub trait Egress {
|
||||||
unsafe fn process_pkt(&mut self, packet: *mut AVPacket, variant: &Uuid)
|
unsafe fn process_pkt(&mut self, packet: *mut AVPacket, variant: &Uuid)
|
||||||
-> Result<EgressResult>;
|
-> Result<EgressResult>;
|
||||||
|
@ -1,40 +1,51 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
|
||||||
use ffmpeg_rs_raw::{Encoder, Muxer};
|
use ffmpeg_rs_raw::{Encoder, Muxer};
|
||||||
|
use std::collections::HashMap;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::egress::{Egress, EgressConfig, EgressResult};
|
use crate::egress::{Egress, EgressResult};
|
||||||
|
use crate::variant::{StreamMapping, VariantStream};
|
||||||
|
|
||||||
pub struct RecorderEgress {
|
pub struct RecorderEgress {
|
||||||
|
/// Pipeline ID
|
||||||
id: Uuid,
|
id: Uuid,
|
||||||
config: EgressConfig,
|
/// Internal muxer writing the output packets
|
||||||
muxer: Muxer,
|
muxer: Muxer,
|
||||||
|
/// Mapping from Variant ID to stream index
|
||||||
|
var_map: HashMap<Uuid, i32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RecorderEgress {
|
impl RecorderEgress {
|
||||||
pub fn new<'a>(
|
pub fn new<'a>(
|
||||||
config: EgressConfig,
|
id: &Uuid,
|
||||||
variants: impl Iterator<Item = &'a Encoder>,
|
out_dir: &str,
|
||||||
|
variants: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let id = Uuid::new_v4();
|
let base = PathBuf::from(out_dir).join(id.to_string());
|
||||||
let base = PathBuf::from(&config.out_dir).join(id.to_string());
|
|
||||||
|
|
||||||
let out_file = base.join("recording.ts");
|
let out_file = base.join("recording.ts");
|
||||||
fs::create_dir_all(&base)?;
|
fs::create_dir_all(&base)?;
|
||||||
|
|
||||||
|
let mut var_map = HashMap::new();
|
||||||
let muxer = unsafe {
|
let muxer = unsafe {
|
||||||
let mut m = Muxer::builder()
|
let mut m = Muxer::builder()
|
||||||
.with_output_path(out_file.to_str().unwrap(), None)?
|
.with_output_path(out_file.to_str().unwrap(), None)?
|
||||||
.build()?;
|
.build()?;
|
||||||
for var in variants {
|
for (var, enc) in variants {
|
||||||
m.add_stream_encoder(var)?;
|
let stream = m.add_stream_encoder(enc)?;
|
||||||
|
var_map.insert(var.id(), (*stream).index);
|
||||||
}
|
}
|
||||||
m.open(None)?;
|
m.open(None)?;
|
||||||
m
|
m
|
||||||
};
|
};
|
||||||
Ok(Self { id, config, muxer })
|
Ok(Self {
|
||||||
|
id: id.clone(),
|
||||||
|
muxer,
|
||||||
|
var_map,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -44,7 +55,10 @@ impl Egress for RecorderEgress {
|
|||||||
packet: *mut AVPacket,
|
packet: *mut AVPacket,
|
||||||
variant: &Uuid,
|
variant: &Uuid,
|
||||||
) -> Result<EgressResult> {
|
) -> Result<EgressResult> {
|
||||||
if self.config.variants.contains(variant) {
|
if let Some(stream) = self.var_map.get(variant) {
|
||||||
|
// very important for muxer to know which stream this pkt belongs to
|
||||||
|
(*packet).stream_index = *stream;
|
||||||
|
|
||||||
self.muxer.write_packet(packet)?;
|
self.muxer.write_packet(packet)?;
|
||||||
}
|
}
|
||||||
Ok(EgressResult::None)
|
Ok(EgressResult::None)
|
||||||
|
@ -5,16 +5,16 @@ use log::info;
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub async fn listen(path: PathBuf, overseer: Arc<dyn Overseer>) -> Result<()> {
|
pub async fn listen(out_dir: String, path: PathBuf, overseer: Arc<dyn Overseer>) -> Result<()> {
|
||||||
info!("Sending file {}", path.to_str().unwrap());
|
info!("Sending file: {}", path.display());
|
||||||
|
|
||||||
let info = ConnectionInfo {
|
let info = ConnectionInfo {
|
||||||
ip_addr: "127.0.0.1:6969".to_string(),
|
ip_addr: "127.0.0.1:6969".to_string(),
|
||||||
endpoint: "file-input".to_owned(),
|
endpoint: "file-input".to_owned(),
|
||||||
key: "".to_string(),
|
key: "test".to_string(),
|
||||||
};
|
};
|
||||||
let file = std::fs::File::open(path)?;
|
let file = std::fs::File::open(path)?;
|
||||||
spawn_pipeline(info, overseer.clone(), Box::new(file)).await;
|
spawn_pipeline(info, out_dir.clone(), overseer.clone(), Box::new(file)).await;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -27,14 +27,16 @@ pub struct ConnectionInfo {
|
|||||||
|
|
||||||
pub async fn spawn_pipeline(
|
pub async fn spawn_pipeline(
|
||||||
info: ConnectionInfo,
|
info: ConnectionInfo,
|
||||||
|
out_dir: String,
|
||||||
seer: Arc<dyn Overseer>,
|
seer: Arc<dyn Overseer>,
|
||||||
reader: Box<dyn Read + Send>,
|
reader: Box<dyn Read + Send>,
|
||||||
) {
|
) {
|
||||||
info!("New client connected: {}", &info.ip_addr);
|
info!("New client connected: {}", &info.ip_addr);
|
||||||
let handle = Handle::current();
|
let handle = Handle::current();
|
||||||
let seer = seer.clone();
|
let seer = seer.clone();
|
||||||
|
let out_dir = out_dir.to_string();
|
||||||
std::thread::spawn(move || unsafe {
|
std::thread::spawn(move || unsafe {
|
||||||
match PipelineRunner::new(handle, seer, info, reader) {
|
match PipelineRunner::new(handle, out_dir, seer, info, reader) {
|
||||||
Ok(mut pl) => loop {
|
Ok(mut pl) => loop {
|
||||||
if let Err(e) = pl.run() {
|
if let Err(e) = pl.run() {
|
||||||
error!("Pipeline run failed: {}", e);
|
error!("Pipeline run failed: {}", e);
|
||||||
|
@ -9,17 +9,18 @@ use srt_tokio::{SrtListener, SrtSocket};
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tokio::sync::mpsc::unbounded_channel;
|
use tokio::sync::mpsc::unbounded_channel;
|
||||||
|
|
||||||
pub async fn listen(listen_addr: String, overseer: Arc<dyn Overseer>) -> Result<()> {
|
pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>) -> Result<()> {
|
||||||
let (_binding, mut packets) = SrtListener::builder().bind(listen_addr.clone()).await?;
|
let (_binding, mut packets) = SrtListener::builder().bind(&addr).await?;
|
||||||
|
|
||||||
info!("SRT listening on: {}", listen_addr.clone());
|
info!("SRT listening on: {}", &addr);
|
||||||
while let Some(request) = packets.incoming().next().await {
|
while let Some(request) = packets.incoming().next().await {
|
||||||
let mut socket = request.accept(None).await?;
|
let mut socket = request.accept(None).await?;
|
||||||
let info = ConnectionInfo {
|
let info = ConnectionInfo {
|
||||||
endpoint: listen_addr.clone(),
|
endpoint: addr.clone(),
|
||||||
ip_addr: socket.settings().remote.to_string(),
|
ip_addr: socket.settings().remote.to_string(),
|
||||||
|
key: "".to_string(),
|
||||||
};
|
};
|
||||||
spawn_pipeline(info, overseer.clone(), Box::new(socket)).await;
|
spawn_pipeline(info, out_dir.clone(), overseer.clone(), Box::new(socket)).await;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -6,10 +6,10 @@ use tokio::net::TcpListener;
|
|||||||
use crate::ingress::{spawn_pipeline, ConnectionInfo};
|
use crate::ingress::{spawn_pipeline, ConnectionInfo};
|
||||||
use crate::overseer::Overseer;
|
use crate::overseer::Overseer;
|
||||||
|
|
||||||
pub async fn listen(addr: String, overseer: Arc<dyn Overseer>) -> Result<()> {
|
pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>) -> Result<()> {
|
||||||
let listener = TcpListener::bind(addr.clone()).await?;
|
let listener = TcpListener::bind(&addr).await?;
|
||||||
|
|
||||||
info!("TCP listening on: {}", addr.clone());
|
info!("TCP listening on: {}", &addr);
|
||||||
while let Ok((socket, ip)) = listener.accept().await {
|
while let Ok((socket, ip)) = listener.accept().await {
|
||||||
let info = ConnectionInfo {
|
let info = ConnectionInfo {
|
||||||
ip_addr: ip.to_string(),
|
ip_addr: ip.to_string(),
|
||||||
@ -17,7 +17,7 @@ pub async fn listen(addr: String, overseer: Arc<dyn Overseer>) -> Result<()> {
|
|||||||
key: "".to_string(),
|
key: "".to_string(),
|
||||||
};
|
};
|
||||||
let socket = socket.into_std()?;
|
let socket = socket.into_std()?;
|
||||||
spawn_pipeline(info, overseer.clone(), Box::new(socket)).await;
|
spawn_pipeline(info, out_dir.clone(), overseer.clone(), Box::new(socket)).await;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,11 @@
|
|||||||
use crate::ingress::{spawn_pipeline, ConnectionInfo};
|
use crate::ingress::{spawn_pipeline, ConnectionInfo};
|
||||||
use crate::overseer::Overseer;
|
use crate::overseer::Overseer;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_H264;
|
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVColorSpace::AVCOL_SPC_RGB;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVColorSpace::AVCOL_SPC_RGB;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPictureType::AV_PICTURE_TYPE_NONE;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPictureType::AV_PICTURE_TYPE_NONE;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::{AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P};
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::{AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P};
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
||||||
av_frame_alloc, av_frame_get_buffer, AV_PROFILE_H264_MAIN,
|
av_frame_alloc, av_frame_free, av_frame_get_buffer, av_packet_free, AV_PROFILE_H264_MAIN,
|
||||||
};
|
};
|
||||||
use ffmpeg_rs_raw::{Encoder, Muxer, Scaler};
|
use ffmpeg_rs_raw::{Encoder, Muxer, Scaler};
|
||||||
use fontdue::layout::{CoordinateSystem, Layout, TextStyle};
|
use fontdue::layout::{CoordinateSystem, Layout, TextStyle};
|
||||||
@ -19,16 +18,16 @@ use std::sync::Arc;
|
|||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
use tiny_skia::Pixmap;
|
use tiny_skia::Pixmap;
|
||||||
|
|
||||||
pub async fn listen(overseer: Arc<dyn Overseer>) -> Result<()> {
|
pub async fn listen(out_dir: String, overseer: Arc<dyn Overseer>) -> Result<()> {
|
||||||
info!("Test pattern enabled");
|
info!("Test pattern enabled");
|
||||||
|
|
||||||
let info = ConnectionInfo {
|
let info = ConnectionInfo {
|
||||||
endpoint: "test-pattern".to_string(),
|
endpoint: "test-pattern".to_string(),
|
||||||
ip_addr: "test-pattern".to_string(),
|
ip_addr: "test-pattern".to_string(),
|
||||||
key: "test-pattern".to_string(),
|
key: "test".to_string(),
|
||||||
};
|
};
|
||||||
let src = TestPatternSrc::new()?;
|
let src = TestPatternSrc::new()?;
|
||||||
spawn_pipeline(info, overseer.clone(), Box::new(src)).await;
|
spawn_pipeline(info, out_dir.clone(), overseer.clone(), Box::new(src)).await;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,9 +48,9 @@ impl TestPatternSrc {
|
|||||||
pub fn new() -> Result<Self> {
|
pub fn new() -> Result<Self> {
|
||||||
let scaler = Scaler::new();
|
let scaler = Scaler::new();
|
||||||
let encoder = unsafe {
|
let encoder = unsafe {
|
||||||
Encoder::new(AV_CODEC_ID_H264)?
|
Encoder::new_with_name("libx264")?
|
||||||
.with_stream_index(0)
|
.with_stream_index(0)
|
||||||
.with_framerate(30.0)
|
.with_framerate(30.0)?
|
||||||
.with_bitrate(1_000_000)
|
.with_bitrate(1_000_000)
|
||||||
.with_pix_fmt(AV_PIX_FMT_YUV420P)
|
.with_pix_fmt(AV_PIX_FMT_YUV420P)
|
||||||
.with_width(1280)
|
.with_width(1280)
|
||||||
@ -64,7 +63,10 @@ impl TestPatternSrc {
|
|||||||
let svg_data = include_bytes!("../../test.svg");
|
let svg_data = include_bytes!("../../test.svg");
|
||||||
let tree = usvg::Tree::from_data(svg_data, &Default::default())?;
|
let tree = usvg::Tree::from_data(svg_data, &Default::default())?;
|
||||||
let mut pixmap = Pixmap::new(1280, 720).unwrap();
|
let mut pixmap = Pixmap::new(1280, 720).unwrap();
|
||||||
let render_ts = tiny_skia::Transform::from_scale(1f32, 1f32);
|
let render_ts = tiny_skia::Transform::from_scale(
|
||||||
|
pixmap.width() as f32 / tree.size().width(),
|
||||||
|
pixmap.height() as f32 / tree.size().height(),
|
||||||
|
);
|
||||||
resvg::render(&tree, render_ts, &mut pixmap.as_mut());
|
resvg::render(&tree, render_ts, &mut pixmap.as_mut());
|
||||||
|
|
||||||
let font = include_bytes!("../../SourceCodePro-Regular.ttf") as &[u8];
|
let font = include_bytes!("../../SourceCodePro-Regular.ttf") as &[u8];
|
||||||
@ -108,7 +110,7 @@ impl TestPatternSrc {
|
|||||||
|
|
||||||
self.frame_no += 1;
|
self.frame_no += 1;
|
||||||
|
|
||||||
let src_frame = unsafe {
|
let mut src_frame = unsafe {
|
||||||
let src_frame = av_frame_alloc();
|
let src_frame = av_frame_alloc();
|
||||||
|
|
||||||
(*src_frame).width = 1280;
|
(*src_frame).width = 1280;
|
||||||
@ -152,12 +154,15 @@ impl TestPatternSrc {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// scale/encode
|
// scale/encode
|
||||||
let frame = self
|
let mut frame = self
|
||||||
.scaler
|
.scaler
|
||||||
.process_frame(src_frame, 1280, 720, AV_PIX_FMT_YUV420P)?;
|
.process_frame(src_frame, 1280, 720, AV_PIX_FMT_YUV420P)?;
|
||||||
for pkt in self.encoder.encode_frame(frame)? {
|
for mut pkt in self.encoder.encode_frame(frame)? {
|
||||||
self.muxer.write_packet(pkt)?;
|
self.muxer.write_packet(pkt)?;
|
||||||
|
av_packet_free(&mut pkt);
|
||||||
}
|
}
|
||||||
|
av_frame_free(&mut frame);
|
||||||
|
av_frame_free(&mut src_frame);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
136
src/mux/hls.rs
136
src/mux/hls.rs
@ -1,9 +1,10 @@
|
|||||||
use crate::egress::NewSegment;
|
use crate::egress::NewSegment;
|
||||||
use crate::variant::{StreamMapping, VariantStream};
|
use crate::variant::{StreamMapping, VariantStream};
|
||||||
use anyhow::{bail, Result};
|
use anyhow::{bail, Result};
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_H264;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
||||||
av_free, av_opt_set, av_q2d, av_write_frame, avio_flush, avio_open, AVPacket, AVIO_FLAG_WRITE,
|
av_free, av_opt_set, av_q2d, av_write_frame, avio_flush, avio_open, AVPacket, AVStream,
|
||||||
AV_PKT_FLAG_KEY,
|
AVIO_FLAG_WRITE, AV_PKT_FLAG_KEY,
|
||||||
};
|
};
|
||||||
use ffmpeg_rs_raw::{cstr, Encoder, Muxer};
|
use ffmpeg_rs_raw::{cstr, Encoder, Muxer};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
@ -41,6 +42,14 @@ impl HlsVariantStream {
|
|||||||
HlsVariantStream::Subtitle { id, .. } => id,
|
HlsVariantStream::Subtitle { id, .. } => id,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn index(&self) -> &usize {
|
||||||
|
match self {
|
||||||
|
HlsVariantStream::Video { index, .. } => index,
|
||||||
|
HlsVariantStream::Audio { index, .. } => index,
|
||||||
|
HlsVariantStream::Subtitle { index, .. } => index,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for HlsVariantStream {
|
impl Display for HlsVariantStream {
|
||||||
@ -64,6 +73,8 @@ pub struct HlsVariant {
|
|||||||
pub segment_length: f32,
|
pub segment_length: f32,
|
||||||
/// Current segment index
|
/// Current segment index
|
||||||
pub idx: u64,
|
pub idx: u64,
|
||||||
|
/// Current segment start time in seconds (duration)
|
||||||
|
pub pkt_start: f32,
|
||||||
/// Output directory (base)
|
/// Output directory (base)
|
||||||
pub out_dir: String,
|
pub out_dir: String,
|
||||||
/// List of segments to be included in the playlist
|
/// List of segments to be included in the playlist
|
||||||
@ -142,6 +153,7 @@ impl HlsVariant {
|
|||||||
mux,
|
mux,
|
||||||
streams,
|
streams,
|
||||||
idx: 1,
|
idx: 1,
|
||||||
|
pkt_start: 0.0,
|
||||||
segments: Vec::from([SegmentInfo(1, segment_length)]),
|
segments: Vec::from([SegmentInfo(1, segment_length)]),
|
||||||
out_dir: out_dir.to_string(),
|
out_dir: out_dir.to_string(),
|
||||||
})
|
})
|
||||||
@ -165,21 +177,22 @@ impl HlsVariant {
|
|||||||
|
|
||||||
/// Mux a packet created by the encoder for this variant
|
/// Mux a packet created by the encoder for this variant
|
||||||
pub unsafe fn mux_packet(&mut self, pkt: *mut AVPacket) -> Result<Option<NewSegment>> {
|
pub unsafe fn mux_packet(&mut self, pkt: *mut AVPacket) -> Result<Option<NewSegment>> {
|
||||||
|
let pkt_q = av_q2d((*pkt).time_base);
|
||||||
// time of this packet in seconds
|
// time of this packet in seconds
|
||||||
let pkt_time = (*pkt).pts as f32 * av_q2d((*pkt).time_base) as f32;
|
let pkt_time = (*pkt).pts as f32 * pkt_q as f32;
|
||||||
// what segment this pkt should be in (index)
|
// what segment this pkt should be in (index)
|
||||||
let pkt_seg = 1 + (pkt_time / self.segment_length).floor() as u64;
|
let pkt_seg = 1 + (pkt_time / self.segment_length).floor() as u64;
|
||||||
|
|
||||||
let mut result = None;
|
let mut result = None;
|
||||||
let can_split = (*pkt).flags & AV_PKT_FLAG_KEY == AV_PKT_FLAG_KEY;
|
let can_split = (*pkt).flags & AV_PKT_FLAG_KEY == AV_PKT_FLAG_KEY;
|
||||||
if pkt_seg != self.idx && can_split {
|
if pkt_seg != self.idx && can_split {
|
||||||
result = Some(self.split_next_seg()?);
|
result = Some(self.split_next_seg(pkt_time)?);
|
||||||
}
|
}
|
||||||
self.mux.write_packet(pkt)?;
|
self.mux.write_packet(pkt)?;
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe fn split_next_seg(&mut self) -> Result<NewSegment> {
|
unsafe fn split_next_seg(&mut self, pkt_time: f32) -> Result<NewSegment> {
|
||||||
self.idx += 1;
|
self.idx += 1;
|
||||||
|
|
||||||
// Manually reset muxer avio
|
// Manually reset muxer avio
|
||||||
@ -204,9 +217,8 @@ impl HlsVariant {
|
|||||||
0,
|
0,
|
||||||
);
|
);
|
||||||
|
|
||||||
// TODO: calc actual duration
|
let duration = pkt_time - self.pkt_start;
|
||||||
let duration = 2.0;
|
info!("Writing segment {} [{}s]", &next_seg_url, duration);
|
||||||
info!("Writing segment {}", &next_seg_url);
|
|
||||||
if let Err(e) = self.add_segment(self.idx, duration) {
|
if let Err(e) = self.add_segment(self.idx, duration) {
|
||||||
warn!("Failed to update playlist: {}", e);
|
warn!("Failed to update playlist: {}", e);
|
||||||
}
|
}
|
||||||
@ -214,20 +226,24 @@ impl HlsVariant {
|
|||||||
/// Get the video variant for this group
|
/// Get the video variant for this group
|
||||||
/// since this could actually be audio which would not be useful for
|
/// since this could actually be audio which would not be useful for
|
||||||
/// [Overseer] impl
|
/// [Overseer] impl
|
||||||
let video_var = self
|
let video_var = self.video_stream().unwrap_or(self.streams.first().unwrap());
|
||||||
.streams
|
|
||||||
.iter()
|
|
||||||
.find(|a| matches!(*a, HlsVariantStream::Video { .. }))
|
|
||||||
.map_or(Default::default(), |v| v.id().clone());
|
|
||||||
|
|
||||||
// emit result of the previously completed segment,
|
// emit result of the previously completed segment,
|
||||||
let prev_seg = self.idx - 1;
|
let prev_seg = self.idx - 1;
|
||||||
Ok(NewSegment {
|
let ret = NewSegment {
|
||||||
variant: video_var,
|
variant: *video_var.id(),
|
||||||
idx: prev_seg,
|
idx: prev_seg,
|
||||||
duration,
|
duration,
|
||||||
path: PathBuf::from(Self::map_segment_path(&*self.out_dir, &self.name, prev_seg)),
|
path: PathBuf::from(Self::map_segment_path(&*self.out_dir, &self.name, prev_seg)),
|
||||||
})
|
};
|
||||||
|
self.pkt_start = pkt_time;
|
||||||
|
Ok(ret)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn video_stream(&self) -> Option<&HlsVariantStream> {
|
||||||
|
self.streams
|
||||||
|
.iter()
|
||||||
|
.find(|a| matches!(*a, HlsVariantStream::Video { .. }))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_segment(&mut self, idx: u64, duration: f32) -> Result<()> {
|
fn add_segment(&mut self, idx: u64, duration: f32) -> Result<()> {
|
||||||
@ -258,34 +274,104 @@ impl HlsVariant {
|
|||||||
pl.write_to(&mut f_out)?;
|
pl.write_to(&mut f_out)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// https://git.ffmpeg.org/gitweb/ffmpeg.git/blob/HEAD:/libavformat/hlsenc.c#l351
|
||||||
|
unsafe fn to_codec_attr(&self, stream: *mut AVStream) -> Option<String> {
|
||||||
|
let p = (*stream).codecpar;
|
||||||
|
if (*p).codec_id == AV_CODEC_ID_H264 {
|
||||||
|
let data = (*p).extradata;
|
||||||
|
if !data.is_null() {
|
||||||
|
let mut id_ptr = ptr::null_mut();
|
||||||
|
let ds: *mut u16 = data as *mut u16;
|
||||||
|
if (*ds) == 1 && (*data.add(4)) & 0x1F == 7 {
|
||||||
|
id_ptr = data.add(5);
|
||||||
|
} else if (*ds) == 1 && (*data.add(3)) & 0x1F == 7 {
|
||||||
|
id_ptr = data.add(4);
|
||||||
|
} else if *data.add(0) == 1 {
|
||||||
|
id_ptr = data.add(1);
|
||||||
|
} else {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Some(format!(
|
||||||
|
"avc1.{}",
|
||||||
|
hex::encode([*id_ptr.add(0), *id_ptr.add(1), *id_ptr.add(2)])
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_playlist_variant(&self) -> m3u8_rs::VariantStream {
|
||||||
|
unsafe {
|
||||||
|
let pes = self.video_stream().unwrap_or(self.streams.first().unwrap());
|
||||||
|
let av_stream = *(*self.mux.context()).streams.add(*pes.index());
|
||||||
|
let codec_par = (*av_stream).codecpar;
|
||||||
|
m3u8_rs::VariantStream {
|
||||||
|
is_i_frame: false,
|
||||||
|
uri: format!("{}/live.m3u8", self.name),
|
||||||
|
bandwidth: 0,
|
||||||
|
average_bandwidth: Some((*codec_par).bit_rate as u64),
|
||||||
|
codecs: self.to_codec_attr(av_stream),
|
||||||
|
resolution: Some(m3u8_rs::Resolution {
|
||||||
|
width: (*codec_par).width as _,
|
||||||
|
height: (*codec_par).height as _,
|
||||||
|
}),
|
||||||
|
frame_rate: Some(av_q2d((*codec_par).framerate)),
|
||||||
|
hdcp_level: None,
|
||||||
|
audio: None,
|
||||||
|
video: None,
|
||||||
|
subtitles: None,
|
||||||
|
closed_captions: None,
|
||||||
|
other_attributes: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct HlsMuxer {
|
pub struct HlsMuxer {
|
||||||
|
out_dir: PathBuf,
|
||||||
variants: Vec<HlsVariant>,
|
variants: Vec<HlsVariant>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HlsMuxer {
|
impl HlsMuxer {
|
||||||
pub fn new<'a>(
|
pub fn new<'a>(
|
||||||
|
id: &Uuid,
|
||||||
out_dir: &str,
|
out_dir: &str,
|
||||||
segment_length: f32,
|
segment_length: f32,
|
||||||
encoders: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
|
encoders: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let id = Uuid::new_v4();
|
let base = PathBuf::from(out_dir).join(id.to_string());
|
||||||
let base = PathBuf::from(out_dir)
|
|
||||||
.join(id.to_string())
|
|
||||||
.to_string_lossy()
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let mut vars = Vec::new();
|
let mut vars = Vec::new();
|
||||||
for (k, group) in &encoders
|
for (k, group) in &encoders
|
||||||
.sorted_by(|a, b| a.0.group_id().cmp(&b.0.group_id()))
|
.sorted_by(|a, b| a.0.group_id().cmp(&b.0.group_id()))
|
||||||
.chunk_by(|a| a.0.group_id())
|
.chunk_by(|a| a.0.group_id())
|
||||||
{
|
{
|
||||||
let var = HlsVariant::new(&base, segment_length, k, group)?;
|
let var = HlsVariant::new(base.to_str().unwrap(), segment_length, k, group)?;
|
||||||
vars.push(var);
|
vars.push(var);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self { variants: vars })
|
let ret = Self {
|
||||||
|
out_dir: base,
|
||||||
|
variants: vars,
|
||||||
|
};
|
||||||
|
ret.write_master_playlist()?;
|
||||||
|
Ok(ret)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_master_playlist(&self) -> Result<()> {
|
||||||
|
let mut pl = m3u8_rs::MasterPlaylist::default();
|
||||||
|
pl.version = Some(3);
|
||||||
|
pl.variants = self
|
||||||
|
.variants
|
||||||
|
.iter()
|
||||||
|
.map(|v| v.to_playlist_variant())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut f_out = File::create(self.out_dir.join("live.m3u8"))?;
|
||||||
|
pl.write_to(&mut f_out)?;
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Mux an encoded packet from [Encoder]
|
/// Mux an encoded packet from [Encoder]
|
||||||
@ -295,7 +381,9 @@ impl HlsMuxer {
|
|||||||
variant: &Uuid,
|
variant: &Uuid,
|
||||||
) -> Result<Option<NewSegment>> {
|
) -> Result<Option<NewSegment>> {
|
||||||
for var in self.variants.iter_mut() {
|
for var in self.variants.iter_mut() {
|
||||||
if var.streams.iter().any(|s| s.id() == variant) {
|
if let Some(vs) = var.streams.iter().find(|s| s.id() == variant) {
|
||||||
|
// very important for muxer to know which stream this pkt belongs to
|
||||||
|
(*pkt).stream_index = *vs.index() as _;
|
||||||
return var.mux_packet(pkt);
|
return var.mux_packet(pkt);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -71,6 +71,16 @@ pub trait Overseer: Send + Sync {
|
|||||||
duration: f32,
|
duration: f32,
|
||||||
path: &PathBuf,
|
path: &PathBuf,
|
||||||
) -> Result<()>;
|
) -> Result<()>;
|
||||||
|
|
||||||
|
/// At a regular interval, pipeline will emit one of the frames for processing as a
|
||||||
|
/// thumbnail
|
||||||
|
async fn on_thumbnail(
|
||||||
|
&self,
|
||||||
|
pipeline_id: &Uuid,
|
||||||
|
width: usize,
|
||||||
|
height: usize,
|
||||||
|
path: &PathBuf,
|
||||||
|
) -> Result<()>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Settings {
|
impl Settings {
|
||||||
@ -86,13 +96,23 @@ impl Settings {
|
|||||||
database,
|
database,
|
||||||
lnd,
|
lnd,
|
||||||
relays,
|
relays,
|
||||||
|
blossom,
|
||||||
} => {
|
} => {
|
||||||
#[cfg(not(feature = "zap-stream"))]
|
#[cfg(not(feature = "zap-stream"))]
|
||||||
panic!("zap.stream overseer is not enabled");
|
panic!("zap.stream overseer is not enabled");
|
||||||
|
|
||||||
#[cfg(feature = "zap-stream")]
|
#[cfg(feature = "zap-stream")]
|
||||||
Ok(Arc::new(
|
Ok(Arc::new(
|
||||||
ZapStreamOverseer::new(private_key, database, lnd, relays).await?,
|
ZapStreamOverseer::new(
|
||||||
|
&self.output_dir,
|
||||||
|
&self.public_url,
|
||||||
|
private_key,
|
||||||
|
database,
|
||||||
|
lnd,
|
||||||
|
relays,
|
||||||
|
blossom,
|
||||||
|
)
|
||||||
|
.await?,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -123,7 +143,7 @@ pub(crate) fn get_default_variants(info: &IngressInfo) -> Result<Vec<VariantStre
|
|||||||
height: 720,
|
height: 720,
|
||||||
fps: video_src.fps,
|
fps: video_src.fps,
|
||||||
bitrate: 3_000_000,
|
bitrate: 3_000_000,
|
||||||
codec: 27,
|
codec: "libx264".to_string(),
|
||||||
profile: 100,
|
profile: 100,
|
||||||
level: 51,
|
level: 51,
|
||||||
keyframe_interval: video_src.fps as u16 * 2,
|
keyframe_interval: video_src.fps as u16 * 2,
|
||||||
@ -150,10 +170,10 @@ pub(crate) fn get_default_variants(info: &IngressInfo) -> Result<Vec<VariantStre
|
|||||||
group_id: 1,
|
group_id: 1,
|
||||||
},
|
},
|
||||||
bitrate: 192_000,
|
bitrate: 192_000,
|
||||||
codec: 86018,
|
codec: "libfdk_aac".to_string(),
|
||||||
channels: 2,
|
channels: 2,
|
||||||
sample_rate: 48_000,
|
sample_rate: 48_000,
|
||||||
sample_fmt: "fltp".to_owned(),
|
sample_fmt: "s16".to_owned(),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -188,8 +208,6 @@ impl Overseer for StaticOverseer {
|
|||||||
}),*/
|
}),*/
|
||||||
EgressType::HLS(EgressConfig {
|
EgressType::HLS(EgressConfig {
|
||||||
name: "HLS".to_owned(),
|
name: "HLS".to_owned(),
|
||||||
// TODO: this is temp, webhook should not need full config
|
|
||||||
out_dir: "out".to_string(),
|
|
||||||
variants: var_ids,
|
variants: var_ids,
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
@ -206,4 +224,14 @@ impl Overseer for StaticOverseer {
|
|||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
todo!()
|
todo!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn on_thumbnail(
|
||||||
|
&self,
|
||||||
|
pipeline_id: &Uuid,
|
||||||
|
width: usize,
|
||||||
|
height: usize,
|
||||||
|
path: &PathBuf,
|
||||||
|
) -> Result<()> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -39,4 +39,14 @@ impl Overseer for WebhookOverseer {
|
|||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
todo!()
|
todo!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn on_thumbnail(
|
||||||
|
&self,
|
||||||
|
pipeline_id: &Uuid,
|
||||||
|
width: usize,
|
||||||
|
height: usize,
|
||||||
|
path: &PathBuf,
|
||||||
|
) -> Result<()> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -10,33 +10,52 @@ use anyhow::{anyhow, Result};
|
|||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
use fedimint_tonic_lnd::verrpc::VersionRequest;
|
use fedimint_tonic_lnd::verrpc::VersionRequest;
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_MJPEG;
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVFrame;
|
||||||
|
use ffmpeg_rs_raw::Encoder;
|
||||||
use futures_util::FutureExt;
|
use futures_util::FutureExt;
|
||||||
use log::info;
|
use log::info;
|
||||||
use nostr_sdk::bitcoin::PrivateKey;
|
use nostr_sdk::bitcoin::PrivateKey;
|
||||||
use nostr_sdk::{Client, Event, EventBuilder, JsonUtil, Keys, Kind, Tag};
|
use nostr_sdk::prelude::Coordinate;
|
||||||
|
use nostr_sdk::{Client, Event, EventBuilder, JsonUtil, Keys, Kind, Tag, ToBech32};
|
||||||
use std::env::temp_dir;
|
use std::env::temp_dir;
|
||||||
use std::fs::create_dir_all;
|
use std::fs::create_dir_all;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
use url::Url;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
use zap_stream_db::sqlx::Encode;
|
||||||
use zap_stream_db::{UserStream, UserStreamState, ZapStreamDb};
|
use zap_stream_db::{UserStream, UserStreamState, ZapStreamDb};
|
||||||
|
|
||||||
const STREAM_EVENT_KIND: u16 = 30_313;
|
const STREAM_EVENT_KIND: u16 = 30_311;
|
||||||
|
|
||||||
/// zap.stream NIP-53 overseer
|
/// zap.stream NIP-53 overseer
|
||||||
pub struct ZapStreamOverseer {
|
pub struct ZapStreamOverseer {
|
||||||
|
/// Dir where HTTP server serves files from
|
||||||
|
out_dir: String,
|
||||||
|
/// Database instance for accounts/streams
|
||||||
db: ZapStreamDb,
|
db: ZapStreamDb,
|
||||||
|
/// LND node connection
|
||||||
lnd: fedimint_tonic_lnd::Client,
|
lnd: fedimint_tonic_lnd::Client,
|
||||||
|
/// Nostr client for publishing events
|
||||||
client: Client,
|
client: Client,
|
||||||
|
/// Nostr keys used to sign events
|
||||||
keys: Keys,
|
keys: Keys,
|
||||||
|
/// List of blossom servers to upload segments to
|
||||||
|
blossom_servers: Vec<Blossom>,
|
||||||
|
/// Public facing URL pointing to [out_dir]
|
||||||
|
public_url: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ZapStreamOverseer {
|
impl ZapStreamOverseer {
|
||||||
pub async fn new(
|
pub async fn new(
|
||||||
|
out_dir: &String,
|
||||||
|
public_url: &String,
|
||||||
private_key: &str,
|
private_key: &str,
|
||||||
db: &str,
|
db: &str,
|
||||||
lnd: &LndSettings,
|
lnd: &LndSettings,
|
||||||
relays: &Vec<String>,
|
relays: &Vec<String>,
|
||||||
|
blossom_servers: &Option<Vec<String>>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let db = ZapStreamDb::new(db).await?;
|
let db = ZapStreamDb::new(db).await?;
|
||||||
db.migrate().await?;
|
db.migrate().await?;
|
||||||
@ -62,10 +81,18 @@ impl ZapStreamOverseer {
|
|||||||
client.connect().await;
|
client.connect().await;
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
|
out_dir: out_dir.clone(),
|
||||||
db,
|
db,
|
||||||
lnd,
|
lnd,
|
||||||
client,
|
client,
|
||||||
keys,
|
keys,
|
||||||
|
blossom_servers: blossom_servers
|
||||||
|
.as_ref()
|
||||||
|
.unwrap_or(&Vec::new())
|
||||||
|
.into_iter()
|
||||||
|
.map(|b| Blossom::new(b))
|
||||||
|
.collect(),
|
||||||
|
public_url: public_url.clone(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -83,15 +110,11 @@ impl Overseer for ZapStreamOverseer {
|
|||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| anyhow::anyhow!("User not found"))?;
|
.ok_or_else(|| anyhow::anyhow!("User not found"))?;
|
||||||
|
|
||||||
let out_dir = temp_dir().join("zap-stream");
|
|
||||||
create_dir_all(&out_dir)?;
|
|
||||||
|
|
||||||
let variants = get_default_variants(&stream_info)?;
|
let variants = get_default_variants(&stream_info)?;
|
||||||
|
|
||||||
let mut egress = vec![];
|
let mut egress = vec![];
|
||||||
egress.push(EgressType::HLS(EgressConfig {
|
egress.push(EgressType::HLS(EgressConfig {
|
||||||
name: "nip94-hls".to_string(),
|
name: "hls".to_string(),
|
||||||
out_dir: out_dir.to_string_lossy().to_string(),
|
|
||||||
variants: variants.iter().map(|v| v.id()).collect(),
|
variants: variants.iter().map(|v| v.id()).collect(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@ -103,7 +126,8 @@ impl Overseer for ZapStreamOverseer {
|
|||||||
state: UserStreamState::Live,
|
state: UserStreamState::Live,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let stream_event = publish_stream_event(&new_stream, &self.client).await?;
|
let stream_event =
|
||||||
|
publish_stream_event(&new_stream, &self.client, &self.keys, &self.public_url).await?;
|
||||||
new_stream.event = Some(stream_event.as_json());
|
new_stream.event = Some(stream_event.as_json());
|
||||||
|
|
||||||
self.db.insert_stream(&new_stream).await?;
|
self.db.insert_stream(&new_stream).await?;
|
||||||
@ -122,27 +146,43 @@ impl Overseer for ZapStreamOverseer {
|
|||||||
duration: f32,
|
duration: f32,
|
||||||
path: &PathBuf,
|
path: &PathBuf,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let blossom = Blossom::new("http://localhost:8881/");
|
// Upload to blossom servers if configured
|
||||||
let blob = blossom.upload(path, &self.keys).await?;
|
let mut blobs = vec![];
|
||||||
|
for b in &self.blossom_servers {
|
||||||
|
blobs.push(b.upload(path, &self.keys, Some("video/mp2t")).await?);
|
||||||
|
}
|
||||||
|
if let Some(blob) = blobs.first() {
|
||||||
|
let a_tag = format!(
|
||||||
|
"{}:{}:{}",
|
||||||
|
STREAM_EVENT_KIND,
|
||||||
|
self.keys.public_key.to_hex(),
|
||||||
|
pipeline_id
|
||||||
|
);
|
||||||
|
let mut n94 = blob_to_event_builder(blob)?.add_tags(Tag::parse(&["a", &a_tag]));
|
||||||
|
for b in blobs.iter().skip(1) {
|
||||||
|
n94 = n94.add_tags(Tag::parse(&["url", &b.url]));
|
||||||
|
}
|
||||||
|
let n94 = n94.sign_with_keys(&self.keys)?;
|
||||||
|
self.client.send_event(n94).await?;
|
||||||
|
info!("Published N94 segment for {}", a_tag);
|
||||||
|
}
|
||||||
|
|
||||||
let a_tag = format!(
|
Ok(())
|
||||||
"{}:{}:{}",
|
}
|
||||||
pipeline_id,
|
|
||||||
self.keys.public_key.to_hex(),
|
|
||||||
STREAM_EVENT_KIND
|
|
||||||
);
|
|
||||||
// publish nip94 tagged to stream
|
|
||||||
let n96 = blob_to_event_builder(&blob)?
|
|
||||||
.add_tags(Tag::parse(&["a", &a_tag]))
|
|
||||||
.sign_with_keys(&self.keys)?;
|
|
||||||
self.client.send_event(n96).await?;
|
|
||||||
info!("Published N96 segment for {}", a_tag);
|
|
||||||
|
|
||||||
|
async fn on_thumbnail(
|
||||||
|
&self,
|
||||||
|
pipeline_id: &Uuid,
|
||||||
|
width: usize,
|
||||||
|
height: usize,
|
||||||
|
pixels: &PathBuf,
|
||||||
|
) -> Result<()> {
|
||||||
|
// nothing to do
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn stream_to_event_builder(this: &UserStream) -> Result<EventBuilder> {
|
fn stream_to_event_builder(this: &UserStream, keys: &Keys) -> Result<EventBuilder> {
|
||||||
let mut tags = vec![
|
let mut tags = vec![
|
||||||
Tag::parse(&["d".to_string(), this.id.to_string()])?,
|
Tag::parse(&["d".to_string(), this.id.to_string()])?,
|
||||||
Tag::parse(&["status".to_string(), this.state.to_string()])?,
|
Tag::parse(&["status".to_string(), this.state.to_string()])?,
|
||||||
@ -183,11 +223,39 @@ fn stream_to_event_builder(this: &UserStream) -> Result<EventBuilder> {
|
|||||||
tags.push(Tag::parse(&["t".to_string(), tag.to_string()])?);
|
tags.push(Tag::parse(&["t".to_string(), tag.to_string()])?);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(EventBuilder::new(Kind::from(STREAM_EVENT_KIND), "", tags))
|
|
||||||
|
let kind = Kind::from(STREAM_EVENT_KIND);
|
||||||
|
let coord = Coordinate::new(kind, keys.public_key).identifier(this.id);
|
||||||
|
tags.push(Tag::parse(&[
|
||||||
|
"alt",
|
||||||
|
&format!("Watch live on https://zap.stream/{}", coord.to_bech32()?),
|
||||||
|
])?);
|
||||||
|
Ok(EventBuilder::new(kind, "", tags))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn publish_stream_event(this: &UserStream, client: &Client) -> Result<Event> {
|
fn stream_url_mapping(this: &UserStream, public_url: &str) -> Result<String> {
|
||||||
let ev = stream_to_event_builder(this)?
|
let u: Url = public_url.parse()?;
|
||||||
|
// hls muxer always writes the master playlist like this
|
||||||
|
Ok(u.join(&format!("/{}/live.m3u8", this.id))?.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn image_url_mapping(this: &UserStream, public_url: &str) -> Result<String> {
|
||||||
|
let u: Url = public_url.parse()?;
|
||||||
|
// pipeline always writes a thumbnail like this
|
||||||
|
Ok(u.join(&format!("/{}/thumb.webp", this.id))?.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn publish_stream_event(
|
||||||
|
this: &UserStream,
|
||||||
|
client: &Client,
|
||||||
|
keys: &Keys,
|
||||||
|
public_url: &str,
|
||||||
|
) -> Result<Event> {
|
||||||
|
let ev = stream_to_event_builder(this, keys)?
|
||||||
|
.add_tags([
|
||||||
|
Tag::parse(&["streaming", stream_url_mapping(this, public_url)?.as_str()])?,
|
||||||
|
Tag::parse(&["image", image_url_mapping(this, public_url)?.as_str()])?,
|
||||||
|
])
|
||||||
.sign(&client.signer().await?)
|
.sign(&client.signer().await?)
|
||||||
.await?;
|
.await?;
|
||||||
client.send_event(ev.clone()).await?;
|
client.send_event(ev.clone()).await?;
|
||||||
|
@ -31,15 +31,11 @@ impl EgressType {
|
|||||||
|
|
||||||
impl Display for EgressType {
|
impl Display for EgressType {
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(
|
match self {
|
||||||
f,
|
EgressType::HLS(_) => write!(f, "HLS"),
|
||||||
"{}",
|
EgressType::Recorder(_) => write!(f, "Recorder"),
|
||||||
match self {
|
EgressType::RTMPForwarder(_) => write!(f, "RTMPForwarder"),
|
||||||
EgressType::HLS(c) => format!("{}", c),
|
}
|
||||||
EgressType::Recorder(c) => format!("{}", c),
|
|
||||||
EgressType::RTMPForwarder(c) => format!("{}", c),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ use std::collections::{HashMap, HashSet};
|
|||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::mem::transmute;
|
use std::mem::transmute;
|
||||||
use std::ops::Sub;
|
use std::ops::Sub;
|
||||||
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
@ -13,11 +14,15 @@ use crate::overseer::{IngressInfo, IngressStream, IngressStreamType, Overseer};
|
|||||||
use crate::pipeline::{EgressType, PipelineConfig};
|
use crate::pipeline::{EgressType, PipelineConfig};
|
||||||
use crate::variant::{StreamMapping, VariantStream};
|
use crate::variant::{StreamMapping, VariantStream};
|
||||||
use anyhow::{bail, Result};
|
use anyhow::{bail, Result};
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_WEBP;
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPictureType::AV_PICTURE_TYPE_NONE;
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
||||||
av_frame_free, av_get_sample_fmt, av_packet_free, av_rescale_q,
|
av_frame_free, av_get_sample_fmt, av_packet_free, av_q2d, av_rescale_q, AVMediaType,
|
||||||
};
|
};
|
||||||
use ffmpeg_rs_raw::{
|
use ffmpeg_rs_raw::{
|
||||||
cstr, get_frame_from_hw, Decoder, Demuxer, DemuxerInfo, Encoder, Resample, Scaler, StreamType,
|
cstr, get_frame_from_hw, AudioFifo, Decoder, Demuxer, DemuxerInfo, Encoder, Resample, Scaler,
|
||||||
|
StreamType,
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use log::{error, info, warn};
|
use log::{error, info, warn};
|
||||||
@ -47,8 +52,8 @@ pub struct PipelineRunner {
|
|||||||
/// Scaler for a variant (variant_id, Scaler)
|
/// Scaler for a variant (variant_id, Scaler)
|
||||||
scalers: HashMap<Uuid, Scaler>,
|
scalers: HashMap<Uuid, Scaler>,
|
||||||
|
|
||||||
/// Resampler for a variant (variant_id, Resample)
|
/// Resampler for a variant (variant_id, Resample+FIFO)
|
||||||
resampler: HashMap<Uuid, Resample>,
|
resampler: HashMap<Uuid, (Resample, AudioFifo)>,
|
||||||
|
|
||||||
/// Encoder for a variant (variant_id, Encoder)
|
/// Encoder for a variant (variant_id, Encoder)
|
||||||
encoders: HashMap<Uuid, Encoder>,
|
encoders: HashMap<Uuid, Encoder>,
|
||||||
@ -59,25 +64,28 @@ pub struct PipelineRunner {
|
|||||||
/// All configured egress'
|
/// All configured egress'
|
||||||
egress: Vec<Box<dyn Egress>>,
|
egress: Vec<Box<dyn Egress>>,
|
||||||
|
|
||||||
fps_counter_start: Instant,
|
|
||||||
frame_ctr: u64,
|
|
||||||
|
|
||||||
/// Info about the input stream
|
/// Info about the input stream
|
||||||
info: Option<IngressInfo>,
|
info: Option<IngressInfo>,
|
||||||
|
|
||||||
/// Overseer managing this pipeline
|
/// Overseer managing this pipeline
|
||||||
overseer: Arc<dyn Overseer>,
|
overseer: Arc<dyn Overseer>,
|
||||||
|
|
||||||
|
fps_counter_start: Instant,
|
||||||
|
frame_ctr: u64,
|
||||||
|
out_dir: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PipelineRunner {
|
impl PipelineRunner {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
handle: Handle,
|
handle: Handle,
|
||||||
|
out_dir: String,
|
||||||
overseer: Arc<dyn Overseer>,
|
overseer: Arc<dyn Overseer>,
|
||||||
connection: ConnectionInfo,
|
connection: ConnectionInfo,
|
||||||
recv: Box<dyn Read + Send>,
|
recv: Box<dyn Read + Send>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
handle,
|
handle,
|
||||||
|
out_dir,
|
||||||
overseer,
|
overseer,
|
||||||
connection,
|
connection,
|
||||||
config: Default::default(),
|
config: Default::default(),
|
||||||
@ -118,12 +126,39 @@ impl PipelineRunner {
|
|||||||
|
|
||||||
let mut egress_results = vec![];
|
let mut egress_results = vec![];
|
||||||
for frame in frames {
|
for frame in frames {
|
||||||
self.frame_ctr += 1;
|
|
||||||
|
|
||||||
// Copy frame from GPU if using hwaccel decoding
|
// Copy frame from GPU if using hwaccel decoding
|
||||||
let mut frame = get_frame_from_hw(frame)?;
|
let mut frame = get_frame_from_hw(frame)?;
|
||||||
(*frame).time_base = (*stream).time_base;
|
(*frame).time_base = (*stream).time_base;
|
||||||
|
|
||||||
|
let p = (*stream).codecpar;
|
||||||
|
if (*p).codec_type == AVMediaType::AVMEDIA_TYPE_VIDEO {
|
||||||
|
let pts_sec = ((*frame).pts as f64 * av_q2d((*stream).time_base)).floor() as u64;
|
||||||
|
// write thumbnail every 1min
|
||||||
|
if pts_sec % 60 == 0 && pts_sec != 0 {
|
||||||
|
let dst_pic = PathBuf::from(&self.out_dir)
|
||||||
|
.join(config.id.to_string())
|
||||||
|
.join("thumb.webp");
|
||||||
|
let mut sw = Scaler::new();
|
||||||
|
let mut frame = sw.process_frame(
|
||||||
|
frame,
|
||||||
|
(*frame).width as _,
|
||||||
|
(*frame).height as _,
|
||||||
|
AV_PIX_FMT_YUV420P,
|
||||||
|
)?;
|
||||||
|
Encoder::new(AV_CODEC_ID_WEBP)?
|
||||||
|
.with_height((*frame).height)
|
||||||
|
.with_width((*frame).width)
|
||||||
|
.with_pix_fmt(transmute((*frame).format))
|
||||||
|
.open(None)?
|
||||||
|
.save_picture(frame, dst_pic.to_str().unwrap())?;
|
||||||
|
info!("Saved thumb to: {}", dst_pic.display());
|
||||||
|
av_frame_free(&mut frame);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: fix this, multiple video streams in
|
||||||
|
self.frame_ctr += 1;
|
||||||
|
}
|
||||||
|
|
||||||
// Get the variants which want this pkt
|
// Get the variants which want this pkt
|
||||||
let pkt_vars = config
|
let pkt_vars = config
|
||||||
.variants
|
.variants
|
||||||
@ -148,11 +183,21 @@ impl PipelineRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
VariantStream::Audio(a) => {
|
VariantStream::Audio(a) => {
|
||||||
if let Some(r) = self.resampler.get_mut(&a.id()) {
|
if let Some((r, f)) = self.resampler.get_mut(&a.id()) {
|
||||||
let frame_size = (*enc.codec_context()).frame_size;
|
let frame_size = (*enc.codec_context()).frame_size;
|
||||||
// TODO: resample audio fifo
|
|
||||||
new_frame = true;
|
new_frame = true;
|
||||||
r.process_frame(frame, frame_size)?
|
let mut resampled_frame = r.process_frame(frame, frame_size)?;
|
||||||
|
if let Some(ret) =
|
||||||
|
f.buffer_frame(resampled_frame, frame_size as usize)?
|
||||||
|
{
|
||||||
|
av_frame_free(&mut resampled_frame);
|
||||||
|
// assume timebase of the encoder
|
||||||
|
//(*ret).time_base = (*enc.codec_context()).time_base;
|
||||||
|
ret
|
||||||
|
} else {
|
||||||
|
av_frame_free(&mut resampled_frame);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
frame
|
frame
|
||||||
}
|
}
|
||||||
@ -163,6 +208,7 @@ impl PipelineRunner {
|
|||||||
// before encoding frame, rescale timestamps
|
// before encoding frame, rescale timestamps
|
||||||
if !frame.is_null() {
|
if !frame.is_null() {
|
||||||
let enc_ctx = enc.codec_context();
|
let enc_ctx = enc.codec_context();
|
||||||
|
(*frame).pict_type = AV_PICTURE_TYPE_NONE;
|
||||||
(*frame).pts =
|
(*frame).pts =
|
||||||
av_rescale_q((*frame).pts, (*frame).time_base, (*enc_ctx).time_base);
|
av_rescale_q((*frame).pts, (*frame).time_base, (*enc_ctx).time_base);
|
||||||
(*frame).pkt_dts =
|
(*frame).pkt_dts =
|
||||||
@ -288,12 +334,10 @@ impl PipelineRunner {
|
|||||||
}
|
}
|
||||||
VariantStream::Audio(a) => {
|
VariantStream::Audio(a) => {
|
||||||
let enc = a.try_into()?;
|
let enc = a.try_into()?;
|
||||||
let rs = Resample::new(
|
let fmt = av_get_sample_fmt(cstr!(a.sample_fmt.as_str()));
|
||||||
av_get_sample_fmt(cstr!(a.sample_fmt.as_str())),
|
let rs = Resample::new(fmt, a.sample_rate as _, a.channels as _);
|
||||||
a.sample_rate as _,
|
let f = AudioFifo::new(fmt, a.channels as _)?;
|
||||||
a.channels as _,
|
self.resampler.insert(out_stream.id(), (rs, f));
|
||||||
);
|
|
||||||
self.resampler.insert(out_stream.id(), rs);
|
|
||||||
self.encoders.insert(out_stream.id(), enc);
|
self.encoders.insert(out_stream.id(), enc);
|
||||||
}
|
}
|
||||||
_ => continue,
|
_ => continue,
|
||||||
@ -304,27 +348,22 @@ impl PipelineRunner {
|
|||||||
|
|
||||||
// Setup egress
|
// Setup egress
|
||||||
for e in &cfg.egress {
|
for e in &cfg.egress {
|
||||||
|
let c = e.config();
|
||||||
|
let encoders = self.encoders.iter().filter_map(|(k, v)| {
|
||||||
|
if c.variants.contains(k) {
|
||||||
|
let var = cfg.variants.iter().find(|x| x.id() == *k)?;
|
||||||
|
Some((var, v))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
});
|
||||||
match e {
|
match e {
|
||||||
EgressType::HLS(ref c) => {
|
EgressType::HLS(_) => {
|
||||||
let encoders = self.encoders.iter().filter_map(|(k, v)| {
|
let hls = HlsEgress::new(&cfg.id, &self.out_dir, 2.0, encoders)?;
|
||||||
if c.variants.contains(k) {
|
|
||||||
let var = cfg.variants.iter().find(|x| x.id() == *k)?;
|
|
||||||
Some((var, v))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
let hls = HlsEgress::new(&c.out_dir, 2.0, encoders)?;
|
|
||||||
self.egress.push(Box::new(hls));
|
self.egress.push(Box::new(hls));
|
||||||
}
|
}
|
||||||
EgressType::Recorder(ref c) => {
|
EgressType::Recorder(_) => {
|
||||||
let encoders = self
|
let rec = RecorderEgress::new(&cfg.id, &self.out_dir, encoders)?;
|
||||||
.encoders
|
|
||||||
.iter()
|
|
||||||
.filter(|(k, _v)| c.variants.contains(k))
|
|
||||||
.map(|(_, v)| v);
|
|
||||||
let rec = RecorderEgress::new(c.clone(), encoders)?;
|
|
||||||
self.egress.push(Box::new(rec));
|
self.egress.push(Box::new(rec));
|
||||||
}
|
}
|
||||||
_ => warn!("{} is not implemented", e),
|
_ => warn!("{} is not implemented", e),
|
||||||
|
@ -12,6 +12,12 @@ pub struct Settings {
|
|||||||
/// Where to store output (static files)
|
/// Where to store output (static files)
|
||||||
pub output_dir: String,
|
pub output_dir: String,
|
||||||
|
|
||||||
|
/// Public facing URL that maps to [output_dir]
|
||||||
|
pub public_url: String,
|
||||||
|
|
||||||
|
/// Binding address for http server serving files from [output_dir]
|
||||||
|
pub listen_http: String,
|
||||||
|
|
||||||
/// Overseer service see [crate::overseer::Overseer] for more info
|
/// Overseer service see [crate::overseer::Overseer] for more info
|
||||||
pub overseer: OverseerConfig,
|
pub overseer: OverseerConfig,
|
||||||
}
|
}
|
||||||
@ -31,10 +37,16 @@ pub enum OverseerConfig {
|
|||||||
},
|
},
|
||||||
/// NIP-53 service (i.e. zap.stream backend)
|
/// NIP-53 service (i.e. zap.stream backend)
|
||||||
ZapStream {
|
ZapStream {
|
||||||
|
/// MYSQL database connection string
|
||||||
database: String,
|
database: String,
|
||||||
|
/// LND node connection details
|
||||||
lnd: LndSettings,
|
lnd: LndSettings,
|
||||||
|
/// Relays to publish events to
|
||||||
relays: Vec<String>,
|
relays: Vec<String>,
|
||||||
|
/// Nsec to sign nostr events
|
||||||
nsec: String,
|
nsec: String,
|
||||||
|
/// Blossom servers
|
||||||
|
blossom: Option<Vec<String>>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{av_get_sample_fmt, avcodec_get_name};
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::av_get_sample_fmt;
|
||||||
use ffmpeg_rs_raw::{cstr, rstr, Encoder};
|
use ffmpeg_rs_raw::{cstr, Encoder};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::fmt::{Display, Formatter};
|
use std::fmt::{Display, Formatter};
|
||||||
use std::intrinsics::transmute;
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::variant::{StreamMapping, VariantMapping};
|
use crate::variant::{StreamMapping, VariantMapping};
|
||||||
@ -16,8 +15,8 @@ pub struct AudioVariant {
|
|||||||
/// Bitrate of this stream
|
/// Bitrate of this stream
|
||||||
pub bitrate: u64,
|
pub bitrate: u64,
|
||||||
|
|
||||||
/// AVCodecID
|
/// Codec name
|
||||||
pub codec: usize,
|
pub codec: String,
|
||||||
|
|
||||||
/// Number of channels
|
/// Number of channels
|
||||||
pub channels: u16,
|
pub channels: u16,
|
||||||
@ -36,7 +35,7 @@ impl Display for AudioVariant {
|
|||||||
"Audio #{}->{}: {}, {}kbps",
|
"Audio #{}->{}: {}, {}kbps",
|
||||||
self.mapping.src_index,
|
self.mapping.src_index,
|
||||||
self.mapping.dst_index,
|
self.mapping.dst_index,
|
||||||
unsafe { rstr!(avcodec_get_name(transmute(self.codec as i32))) },
|
self.codec,
|
||||||
self.bitrate / 1000
|
self.bitrate / 1000
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -67,8 +66,8 @@ impl TryInto<Encoder> for &AudioVariant {
|
|||||||
|
|
||||||
fn try_into(self) -> Result<Encoder, Self::Error> {
|
fn try_into(self) -> Result<Encoder, Self::Error> {
|
||||||
unsafe {
|
unsafe {
|
||||||
let enc = Encoder::new(transmute(self.codec as u32))?
|
let enc = Encoder::new_with_name(&self.codec)?
|
||||||
.with_sample_rate(self.sample_rate as _)
|
.with_sample_rate(self.sample_rate as _)?
|
||||||
.with_bitrate(self.bitrate as _)
|
.with_bitrate(self.bitrate as _)
|
||||||
.with_default_channel_layout(self.channels as _)
|
.with_default_channel_layout(self.channels as _)
|
||||||
.with_sample_format(av_get_sample_fmt(cstr!(self.sample_fmt.as_bytes())))
|
.with_sample_format(av_get_sample_fmt(cstr!(self.sample_fmt.as_bytes())))
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_H264;
|
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVColorSpace::AVCOL_SPC_BT709;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVColorSpace::AVCOL_SPC_BT709;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{avcodec_get_name, AVCodecID};
|
use ffmpeg_rs_raw::Encoder;
|
||||||
use ffmpeg_rs_raw::{rstr, Encoder};
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fmt::{Display, Formatter};
|
use std::fmt::{Display, Formatter};
|
||||||
@ -28,8 +26,8 @@ pub struct VideoVariant {
|
|||||||
/// Bitrate of this stream
|
/// Bitrate of this stream
|
||||||
pub bitrate: u64,
|
pub bitrate: u64,
|
||||||
|
|
||||||
/// AVCodecID
|
/// Codec name
|
||||||
pub codec: usize,
|
pub codec: String,
|
||||||
|
|
||||||
/// Codec profile
|
/// Codec profile
|
||||||
pub profile: usize,
|
pub profile: usize,
|
||||||
@ -51,7 +49,7 @@ impl Display for VideoVariant {
|
|||||||
"Video #{}->{}: {}, {}x{}, {}fps, {}kbps",
|
"Video #{}->{}: {}, {}x{}, {}fps, {}kbps",
|
||||||
self.mapping.src_index,
|
self.mapping.src_index,
|
||||||
self.mapping.dst_index,
|
self.mapping.dst_index,
|
||||||
unsafe { rstr!(avcodec_get_name(transmute(self.codec as i32))) },
|
self.codec,
|
||||||
self.width,
|
self.width,
|
||||||
self.height,
|
self.height,
|
||||||
self.fps,
|
self.fps,
|
||||||
@ -87,18 +85,18 @@ impl TryInto<Encoder> for &VideoVariant {
|
|||||||
fn try_into(self) -> Result<Encoder, Self::Error> {
|
fn try_into(self) -> Result<Encoder, Self::Error> {
|
||||||
unsafe {
|
unsafe {
|
||||||
let mut opt = HashMap::new();
|
let mut opt = HashMap::new();
|
||||||
if self.codec == transmute::<AVCodecID, u32>(AV_CODEC_ID_H264) as usize {
|
if self.codec == "x264" {
|
||||||
opt.insert("preset".to_string(), "fast".to_string());
|
opt.insert("preset".to_string(), "fast".to_string());
|
||||||
//opt.insert("tune".to_string(), "zerolatency".to_string());
|
//opt.insert("tune".to_string(), "zerolatency".to_string());
|
||||||
}
|
}
|
||||||
let enc = Encoder::new(transmute(self.codec as u32))?
|
let enc = Encoder::new_with_name(&self.codec)?
|
||||||
.with_bitrate(self.bitrate as _)
|
.with_bitrate(self.bitrate as _)
|
||||||
.with_width(self.width as _)
|
.with_width(self.width as _)
|
||||||
.with_height(self.height as _)
|
.with_height(self.height as _)
|
||||||
.with_pix_fmt(transmute(self.pixel_format))
|
.with_pix_fmt(transmute(self.pixel_format))
|
||||||
.with_profile(transmute(self.profile as i32))
|
.with_profile(transmute(self.profile as i32))
|
||||||
.with_level(transmute(self.level as i32))
|
.with_level(transmute(self.level as i32))
|
||||||
.with_framerate(self.fps)
|
.with_framerate(self.fps)?
|
||||||
.with_options(|ctx| {
|
.with_options(|ctx| {
|
||||||
(*ctx).gop_size = self.keyframe_interval as _;
|
(*ctx).gop_size = self.keyframe_interval as _;
|
||||||
(*ctx).keyint_min = self.keyframe_interval as _;
|
(*ctx).keyint_min = self.keyframe_interval as _;
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use crate::UserStream;
|
use crate::UserStream;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use sqlx::{MySqlPool, Row};
|
use sqlx::{MySqlPool, Row};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
pub struct ZapStreamDb {
|
pub struct ZapStreamDb {
|
||||||
db: MySqlPool,
|
db: MySqlPool,
|
||||||
@ -20,7 +21,7 @@ impl ZapStreamDb {
|
|||||||
/// Find user by stream key, typical first lookup from ingress
|
/// Find user by stream key, typical first lookup from ingress
|
||||||
pub async fn find_user_stream_key(&self, key: &str) -> Result<Option<u64>> {
|
pub async fn find_user_stream_key(&self, key: &str) -> Result<Option<u64>> {
|
||||||
#[cfg(feature = "test-pattern")]
|
#[cfg(feature = "test-pattern")]
|
||||||
if key == "test-pattern" {
|
if key == "test" {
|
||||||
// use the 00 pubkey for test sources
|
// use the 00 pubkey for test sources
|
||||||
return Ok(Some(self.upsert_user(&[0; 32]).await?));
|
return Ok(Some(self.upsert_user(&[0; 32]).await?));
|
||||||
}
|
}
|
||||||
@ -83,4 +84,12 @@ impl ZapStreamDb {
|
|||||||
.map_err(anyhow::Error::new)?;
|
.map_err(anyhow::Error::new)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn get_stream(&self, id: &Uuid) -> Result<UserStream> {
|
||||||
|
Ok(sqlx::query_as("select * from user_stream where id = ?")
|
||||||
|
.bind(id)
|
||||||
|
.fetch_one(&self.db)
|
||||||
|
.await
|
||||||
|
.map_err(anyhow::Error::new)?)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user