Refactor test pattern input
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
kieran 2024-09-02 11:13:21 +01:00
parent 57e3eed69e
commit 65d8964632
Signed by: Kieran
GPG Key ID: DE71CEB3925BE941
13 changed files with 328 additions and 163 deletions

120
Cargo.lock generated
View File

@ -61,6 +61,55 @@ version = "0.2.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f"
[[package]]
name = "anstream"
version = "0.6.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"is_terminal_polyfill",
"utf8parse",
]
[[package]]
name = "anstyle"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
[[package]]
name = "anstyle-parse"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a"
dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8"
dependencies = [
"anstyle",
"windows-sys 0.52.0",
]
[[package]]
name = "anyhow"
version = "1.0.80"
@ -248,12 +297,58 @@ dependencies = [
"libloading",
]
[[package]]
name = "clap"
version = "4.5.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019"
dependencies = [
"clap_builder",
"clap_derive",
]
[[package]]
name = "clap_builder"
version = "4.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6"
dependencies = [
"anstream",
"anstyle",
"clap_lex",
"strsim",
]
[[package]]
name = "clap_derive"
version = "4.5.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.52",
]
[[package]]
name = "clap_lex"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
[[package]]
name = "color_quant"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b"
[[package]]
name = "colorchoice"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0"
[[package]]
name = "config"
version = "0.14.0"
@ -706,6 +801,12 @@ dependencies = [
"http",
]
[[package]]
name = "heck"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hermit-abi"
version = "0.3.9"
@ -847,6 +948,12 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]]
name = "itertools"
version = "0.12.1"
@ -1634,6 +1741,7 @@ dependencies = [
"anyhow",
"async-trait",
"bytes",
"clap",
"config",
"ffmpeg-sys-next",
"fontdue",
@ -1674,6 +1782,12 @@ dependencies = [
"float-cmp",
]
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "subtle"
version = "2.5.0"
@ -2085,6 +2199,12 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]]
name = "utf8parse"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "uuid"
version = "1.8.0"

View File

@ -27,3 +27,4 @@ resvg = "0.43.0"
usvg = "0.43.0"
tiny-skia = "0.11.4"
fontdue = "0.9.2"
clap = { version = "4.5.16", features = ["derive"] }

View File

@ -2,14 +2,12 @@ use std::collections::HashMap;
use std::ptr;
use anyhow::Error;
use ffmpeg_sys_next::{av_frame_alloc, AVCodec, avcodec_alloc_context3, avcodec_find_decoder, avcodec_free_context, avcodec_open2, avcodec_parameters_copy, avcodec_parameters_to_context, avcodec_receive_frame, avcodec_send_packet, AVCodecContext, AVERROR, AVERROR_EOF, AVPacket};
use ffmpeg_sys_next::{av_frame_alloc, AVCodec, avcodec_alloc_context3, avcodec_find_decoder, avcodec_free_context, avcodec_open2, avcodec_parameters_to_context, avcodec_receive_frame, avcodec_send_packet, AVCodecContext, AVERROR, AVERROR_EOF, AVPacket};
use ffmpeg_sys_next::AVPictureType::AV_PICTURE_TYPE_NONE;
use tokio::sync::broadcast;
use tokio::sync::mpsc::UnboundedReceiver;
use crate::encode::set_encoded_pkt_timing;
use crate::pipeline::{AVFrameSource, AVPacketSource, PipelinePayload};
use crate::variant::{VariantStream, VideoVariant};
struct CodecContext {
pub context: *mut AVCodecContext,

View File

@ -10,7 +10,6 @@ use tokio::sync::mpsc::error::TryRecvError;
use tokio::time::Instant;
use crate::demux::info::{DemuxStreamInfo, StreamChannelType, StreamInfoChannel};
use crate::encode::set_encoded_pkt_timing;
use crate::pipeline::{AVPacketSource, PipelinePayload};
use crate::utils::get_ffmpeg_error_msg;

View File

@ -8,7 +8,6 @@ use libc::EAGAIN;
use log::info;
use tokio::sync::mpsc::UnboundedSender;
use crate::encode::set_encoded_pkt_timing;
use crate::ipc::Rx;
use crate::pipeline::{AVFrameSource, AVPacketSource, PipelinePayload, PipelineProcessor};
use crate::utils::get_ffmpeg_error_msg;

View File

@ -2,10 +2,13 @@ use std::mem::transmute;
use std::ptr;
use anyhow::Error;
use ffmpeg_sys_next::{av_packet_alloc, av_packet_free, av_packet_rescale_ts, AVCodec, avcodec_alloc_context3, avcodec_find_encoder, avcodec_open2, avcodec_receive_packet, avcodec_send_frame, AVCodecContext, AVERROR, AVFrame, AVRational};
use ffmpeg_sys_next::{
av_packet_alloc, av_packet_free, av_packet_rescale_ts, AVCodec,
avcodec_alloc_context3, avcodec_find_encoder, avcodec_open2, avcodec_receive_packet, avcodec_send_frame,
AVCodecContext, AVERROR, AVFrame, AVRational,
};
use libc::EAGAIN;
use tokio::sync::mpsc::UnboundedSender;
use crate::encode::dump_pkt_info;
use crate::ipc::Rx;
use crate::pipeline::{AVFrameSource, AVPacketSource, PipelinePayload, PipelineProcessor};
@ -122,14 +125,20 @@ where
while let Ok(pkg) = self.chan_in.try_recv_next() {
match pkg {
PipelinePayload::AvFrame(frm, ref src) => unsafe {
let in_stream = match src {
AVFrameSource::Decoder(s) => *s,
let (in_stream, idx) = match src {
AVFrameSource::Decoder(s) => (*s, (*(*s)).index as usize),
AVFrameSource::None(s) => (ptr::null_mut(), *s),
_ => {
return Err(Error::msg(format!("Cannot process frame from: {:?}", src)))
}
};
if self.variant.src_index == (*in_stream).index as usize {
self.process_frame(frm, &(*in_stream).time_base)?;
if self.variant.src_index == idx {
let tb = if in_stream.is_null() {
self.variant.time_base()
} else {
(*in_stream).time_base
};
self.process_frame(frm, &tb)?;
}
},
PipelinePayload::Flush => unsafe {

View File

@ -1,26 +1,27 @@
use std::{ptr, slice};
use std::mem::transmute;
use std::slice;
use std::ops::Add;
use std::time::{Duration, SystemTime};
use std::time::{Duration, Instant};
use ffmpeg_sys_next::{
av_frame_alloc, av_frame_copy_props, av_frame_free, av_frame_get_buffer, av_packet_alloc,
av_packet_free, AV_PROFILE_H264_MAIN, av_q2d, avcodec_alloc_context3, avcodec_find_encoder,
avcodec_open2, avcodec_receive_packet, avcodec_send_frame, AVERROR, AVRational,
EAGAIN, SWS_BILINEAR, sws_getContext, sws_scale_frame,
av_frame_alloc, av_frame_get_buffer, AV_PROFILE_H264_MAIN,
};
use ffmpeg_sys_next::AVCodecID::AV_CODEC_ID_H264;
use ffmpeg_sys_next::AVColorSpace::{AVCOL_SPC_BT709, AVCOL_SPC_RGB};
use ffmpeg_sys_next::AVColorSpace::AVCOL_SPC_RGB;
use ffmpeg_sys_next::AVPictureType::AV_PICTURE_TYPE_NONE;
use ffmpeg_sys_next::AVPixelFormat::{AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P};
use fontdue::layout::{CoordinateSystem, Layout, TextStyle};
use libc::memcpy;
use log::{error, info};
use log::{error, info, warn};
use tokio::runtime::Runtime;
use tokio::sync::mpsc::unbounded_channel;
use uuid::Uuid;
use crate::encode::video::VideoEncoder;
use crate::ingress::ConnectionInfo;
use crate::pipeline::{AVFrameSource, PipelinePayload, PipelineProcessor};
use crate::pipeline::builder::PipelineBuilder;
use crate::scale::Scaler;
use crate::variant::VideoVariant;
pub async fn listen(builder: PipelineBuilder) -> Result<(), anyhow::Error> {
info!("Test pattern enabled");
@ -44,81 +45,86 @@ pub async fn listen(builder: PipelineBuilder) -> Result<(), anyhow::Error> {
break;
}
});
unsafe {
let codec = avcodec_find_encoder(AV_CODEC_ID_H264);
let enc_ctx = avcodec_alloc_context3(codec);
(*enc_ctx).width = WIDTH;
(*enc_ctx).height = HEIGHT;
(*enc_ctx).pix_fmt = AV_PIX_FMT_YUV420P;
(*enc_ctx).colorspace = AVCOL_SPC_BT709;
(*enc_ctx).bit_rate = 1_000_000;
(*enc_ctx).framerate = AVRational { num: FPS, den: 1 };
(*enc_ctx).gop_size = 30;
(*enc_ctx).level = 40;
(*enc_ctx).profile = AV_PROFILE_H264_MAIN;
(*enc_ctx).time_base = AVRational { num: 1, den: FPS };
(*enc_ctx).pkt_timebase = (*enc_ctx).time_base;
let (frame_in, frames_in_rx) = unbounded_channel();
let (sws_tx, sws_rx) = unbounded_channel();
let (frames_out_tx, mut frames_out) = unbounded_channel();
let var = VideoVariant {
id: Uuid::new_v4(),
src_index: 0,
dst_index: 0,
width: WIDTH as u16,
height: HEIGHT as u16,
fps: FPS as u16,
bitrate: 2_000_000,
codec: AV_CODEC_ID_H264 as usize,
profile: AV_PROFILE_H264_MAIN as usize,
level: 40,
keyframe_interval: 2,
pixel_format: AV_PIX_FMT_YUV420P as u32,
};
let mut sws = Scaler::new(frames_in_rx, sws_tx, var.clone());
let mut enc = VideoEncoder::new(sws_rx, frames_out_tx, var.clone());
avcodec_open2(enc_ctx, codec, ptr::null_mut());
let svg_data = std::fs::read("./test.svg").unwrap();
let tree = usvg::Tree::from_data(&svg_data, &Default::default()).unwrap();
let mut pixmap = tiny_skia::Pixmap::new(WIDTH as u32, HEIGHT as u32).unwrap();
let render_ts = tiny_skia::Transform::from_scale(1f32, 1f32);
resvg::render(&tree, render_ts, &mut pixmap.as_mut());
let src_frame = av_frame_alloc();
(*src_frame).width = WIDTH;
(*src_frame).height = HEIGHT;
(*src_frame).pict_type = AV_PICTURE_TYPE_NONE;
(*src_frame).key_frame = 1;
(*src_frame).colorspace = AVCOL_SPC_RGB;
(*src_frame).format = AV_PIX_FMT_RGBA as libc::c_int;
(*src_frame).time_base = (*enc_ctx).time_base;
av_frame_get_buffer(src_frame, 1);
let font = include_bytes!("../../SourceCodePro-Regular.ttf") as &[u8];
let scp = fontdue::Font::from_bytes(font, Default::default()).unwrap();
let mut layout = Layout::new(CoordinateSystem::PositiveYDown);
let fonts = &[&scp];
let sws = sws_getContext(
WIDTH as libc::c_int,
HEIGHT as libc::c_int,
transmute((*src_frame).format),
WIDTH as libc::c_int,
HEIGHT as libc::c_int,
(*enc_ctx).pix_fmt,
SWS_BILINEAR,
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
);
let svg_data = std::fs::read("./test.svg").unwrap();
let tree = usvg::Tree::from_data(&svg_data, &Default::default()).unwrap();
let mut pixmap = tiny_skia::Pixmap::new(WIDTH as u32, HEIGHT as u32).unwrap();
let render_ts = tiny_skia::Transform::from_scale(1f32, 1f32);
resvg::render(&tree, render_ts, &mut pixmap.as_mut());
let start = Instant::now();
let mut frame_number: u64 = 0;
loop {
let stream_time = Duration::from_secs_f64(frame_number as f64 / FPS as f64);
let real_time = Instant::now().duration_since(start);
let wait_time = if stream_time > real_time {
stream_time - real_time
} else {
Duration::new(0, 0)
};
if !wait_time.is_zero() {
std::thread::sleep(wait_time);
}
let font = include_bytes!("../../SourceCodePro-Regular.ttf") as &[u8];
let scp = fontdue::Font::from_bytes(font, Default::default()).unwrap();
let mut layout = Layout::new(CoordinateSystem::PositiveYDown);
let fonts = &[&scp];
frame_number += 1;
let start = SystemTime::now();
let mut frame_number: u64 = 0;
loop {
frame_number += 1;
let src_frame = unsafe {
let src_frame = av_frame_alloc();
(*src_frame).width = WIDTH;
(*src_frame).height = HEIGHT;
(*src_frame).pict_type = AV_PICTURE_TYPE_NONE;
(*src_frame).key_frame = 1;
(*src_frame).colorspace = AVCOL_SPC_RGB;
(*src_frame).format = AV_PIX_FMT_RGBA as libc::c_int;
(*src_frame).pts = frame_number as i64;
(*src_frame).duration = 1;
av_frame_get_buffer(src_frame, 0);
memcpy(
(*src_frame).data[0] as *mut libc::c_void,
pixmap.data().as_ptr() as *const libc::c_void,
(WIDTH * HEIGHT * 4) as libc::size_t,
);
layout.clear();
layout.append(
fonts,
&TextStyle::new(&format!("frame={}", frame_number), 40.0, 0),
);
for g in layout.glyphs() {
let (metrics, bitmap) = scp.rasterize_config_subpixel(g.key);
for y in 0..metrics.height {
for x in 0..metrics.width {
let dst_x = x + g.x as usize;
let dst_y = y + g.y as usize;
let offset_src = (x + y * metrics.width) * 3;
src_frame
};
layout.clear();
layout.append(
fonts,
&TextStyle::new(&format!("frame={}", frame_number), 40.0, 0),
);
for g in layout.glyphs() {
let (metrics, bitmap) = scp.rasterize_config_subpixel(g.key);
for y in 0..metrics.height {
for x in 0..metrics.width {
let dst_x = x + g.x as usize;
let dst_y = y + g.y as usize;
let offset_src = (x + y * metrics.width) * 3;
unsafe {
let offset_dst =
4 * dst_x + dst_y * (*src_frame).linesize[0] as usize;
let pixel_dst = (*src_frame).data[0].add(offset_dst);
@ -128,49 +134,47 @@ pub async fn listen(builder: PipelineBuilder) -> Result<(), anyhow::Error> {
}
}
}
}
let mut dst_frame = av_frame_alloc();
av_frame_copy_props(dst_frame, src_frame);
sws_scale_frame(sws, dst_frame, src_frame);
// scale/encode
if let Err(e) =
frame_in.send(PipelinePayload::AvFrame(src_frame, AVFrameSource::None(0)))
{
error!("Failed to send frames to encoder: {}", e);
pipeline.join().unwrap();
return;
}
if let Err(e) = sws.process() {
error!("Failed to scale frame: {}", e);
pipeline.join().unwrap();
return;
}
match enc.process() {
Ok(_) => {
while let Ok(p) = frames_out.try_recv() {
match p {
PipelinePayload::AvPacket(pkt, _) => unsafe {
let buf = bytes::Bytes::from(slice::from_raw_parts(
(*pkt).data,
(*pkt).size as usize,
));
if let Err(e) = tx.send(buf) {
error!("Failed to send test pkt: {}", e);
// encode
let mut ret = avcodec_send_frame(enc_ctx, dst_frame);
av_frame_free(&mut dst_frame);
while ret > 0 || ret == AVERROR(libc::EAGAIN) {
let mut av_pkt = av_packet_alloc();
ret = avcodec_receive_packet(enc_ctx, av_pkt);
if ret != 0 {
if ret == AVERROR(EAGAIN) {
av_packet_free(&mut av_pkt);
break;
pipeline.join().unwrap();
return;
}
},
_ => {
warn!("Unknown payload from encoder: {:?}", p);
}
}
error!("Encoder failed: {}", ret);
break;
}
let buf = bytes::Bytes::from(slice::from_raw_parts(
(*av_pkt).data,
(*av_pkt).size as usize,
));
if let Err(e) = tx.send(buf) {
error!("Failed to send test pkt: {}", e);
pipeline.join().unwrap();
return ;
}
}
let stream_time = Duration::from_secs_f64(
frame_number as libc::c_double * av_q2d((*enc_ctx).time_base),
);
let real_time = SystemTime::now().duration_since(start).unwrap();
let wait_time = if stream_time > real_time {
stream_time - real_time
} else {
Duration::new(0, 0)
};
if !wait_time.is_zero() {
std::thread::sleep(wait_time);
Err(e) => {
error!("Failed to encode: {}", e);
pipeline.join().unwrap();
return;
}
}
}

View File

@ -1,5 +1,6 @@
use std::ffi::CStr;
use clap::Parser;
use config::Config;
use log::{error, info};
use url::Url;
@ -24,11 +25,24 @@ mod utils;
mod variant;
mod webhook;
#[derive(Parser, Debug)]
struct Args {
/// Add file input at startup
#[arg(long)]
file: Option<String>,
/// Add input test pattern at startup
#[arg(long)]
test_pattern: bool,
}
/// Test: ffmpeg -re -f lavfi -i testsrc -g 2 -r 30 -pix_fmt yuv420p -s 1280x720 -c:v h264 -b:v 2000k -f mpegts srt://localhost:3333
#[tokio::main]
async fn main() -> anyhow::Result<()> {
pretty_env_logger::init();
let args = Args::parse();
unsafe {
//ffmpeg_sys_next::av_log_set_level(ffmpeg_sys_next::AV_LOG_DEBUG);
info!(
@ -64,11 +78,16 @@ async fn main() -> anyhow::Result<()> {
"0.0.0.0:8080".to_owned(),
settings.clone(),
)));
/*listeners.push(tokio::spawn(ingress::file::listen(
"/home/kieran/waypoint_flight.mp4".parse().unwrap(),
builder.clone(),
)));*/
listeners.push(tokio::spawn(ingress::test::listen(builder.clone())));
if let Some(p) = args.file {
listeners.push(tokio::spawn(ingress::file::listen(
p.parse().unwrap(),
builder.clone(),
)));
}
if args.test_pattern {
listeners.push(tokio::spawn(ingress::test::listen(builder.clone())));
}
for handle in listeners {
if let Err(e) = handle.await {

View File

@ -84,6 +84,8 @@ pub enum AVFrameSource {
Scaler(*mut AVStream),
/// Flush frame (empty)
Flush,
/// No context provided, dst_stream manually matched
None(usize),
}
#[derive(Debug, PartialEq)]

View File

@ -1,4 +1,4 @@
use crate::tag_frame::TagFrame;
use std::ops::Sub;
use std::time::Instant;
use anyhow::Error;
@ -16,6 +16,7 @@ use crate::encode::audio::AudioEncoder;
use crate::encode::video::VideoEncoder;
use crate::pipeline::{EgressType, PipelineConfig, PipelinePayload, PipelineProcessor};
use crate::scale::Scaler;
use crate::tag_frame::TagFrame;
use crate::variant::VariantStream;
use crate::webhook::Webhook;
@ -60,30 +61,11 @@ impl PipelineRunner {
}
pub fn run(&mut self) -> Result<(), Error> {
/*if let Some(info) = &self.stream_info {
if let Some(v_stream) = info
.channels
.iter()
.find(|s| s.channel_type == StreamChannelType::Video)
{
let duration = self.frame_no as f64 / v_stream.fps as f64;
let target_time = self.started.add(Duration::from_secs_f64(duration));
let now = Instant::now();
if now < target_time {
let poll_sleep = target_time - now;
std::thread::sleep(poll_sleep);
}
}
}*/
if let Some(cfg) = self.demuxer.process()? {
self.configure_pipeline(cfg)?;
}
let frames = self.decoder.process()?;
if let Some(v) = self.frame_no.checked_add(frames as u64) {
self.frame_no = v;
} else {
panic!("Frame number overflowed, maybe you need a bigger number!");
}
self.frame_no += frames as u64;
// (scalar)-encoder chains
for sw in &mut self.encoders {
@ -95,6 +77,13 @@ impl PipelineRunner {
for eg in &mut self.egress {
eg.process()?;
}
let elapsed = Instant::now().sub(self.started).as_secs_f32();
if elapsed >= 2f32 {
info!("Average fps: {:.2}", self.frame_no as f32 / elapsed);
self.started = Instant::now();
self.frame_no = 0;
}
Ok(())
}

View File

@ -1,30 +1,32 @@
use std::ffi::CStr;
use std::mem::transmute;
use std::ptr;
use anyhow::Error;
use ffmpeg_sys_next::{
av_frame_alloc, av_frame_copy_props, AVFrame, SWS_BILINEAR, sws_freeContext, sws_getContext,
sws_scale_frame, SwsContext,
av_frame_alloc, av_frame_copy_props, av_get_pix_fmt_name, AVFrame,
SWS_BILINEAR, sws_freeContext, sws_getContext, sws_scale_frame, SwsContext,
};
use tokio::sync::broadcast;
use log::info;
use tokio::sync::mpsc::UnboundedSender;
use crate::ipc::Rx;
use crate::pipeline::{AVFrameSource, PipelinePayload, PipelineProcessor};
use crate::utils::get_ffmpeg_error_msg;
use crate::variant::VideoVariant;
pub struct Scaler {
pub struct Scaler<T> {
variant: VideoVariant,
ctx: *mut SwsContext,
chan_in: broadcast::Receiver<PipelinePayload>,
chan_in: T,
chan_out: UnboundedSender<PipelinePayload>,
}
unsafe impl Send for Scaler {}
unsafe impl<TRecv> Send for Scaler<TRecv> {}
unsafe impl Sync for Scaler {}
unsafe impl<TRecv> Sync for Scaler<TRecv> {}
impl Drop for Scaler {
impl<TRecv> Drop for Scaler<TRecv> {
fn drop(&mut self) {
unsafe {
sws_freeContext(self.ctx);
@ -33,9 +35,12 @@ impl Drop for Scaler {
}
}
impl Scaler {
impl<TRecv> Scaler<TRecv>
where
TRecv: Rx<PipelinePayload>,
{
pub fn new(
chan_in: broadcast::Receiver<PipelinePayload>,
chan_in: TRecv,
chan_out: UnboundedSender<PipelinePayload>,
variant: VideoVariant,
) -> Self {
@ -52,8 +57,6 @@ impl Scaler {
frame: *mut AVFrame,
src: &AVFrameSource,
) -> Result<(), Error> {
let dst_fmt = transmute((*frame).format);
if self.ctx.is_null() {
let ctx = sws_getContext(
(*frame).width,
@ -61,7 +64,7 @@ impl Scaler {
transmute((*frame).format),
self.variant.width as libc::c_int,
self.variant.height as libc::c_int,
dst_fmt,
transmute(self.variant.pixel_format),
SWS_BILINEAR,
ptr::null_mut(),
ptr::null_mut(),
@ -70,6 +73,19 @@ impl Scaler {
if ctx.is_null() {
return Err(Error::msg("Failed to create scalar context"));
}
info!(
"Scalar config: {}x{}@{} => {}x{}@{}",
(*frame).width,
(*frame).height,
CStr::from_ptr(av_get_pix_fmt_name(transmute((*frame).format)))
.to_str()
.unwrap(),
self.variant.width,
self.variant.height,
CStr::from_ptr(av_get_pix_fmt_name(transmute(self.variant.pixel_format)))
.to_str()
.unwrap()
);
self.ctx = ctx;
}
@ -90,18 +106,22 @@ impl Scaler {
}
}
impl PipelineProcessor for Scaler {
impl<TRecv> PipelineProcessor for Scaler<TRecv>
where
TRecv: Rx<PipelinePayload>,
{
fn process(&mut self) -> Result<(), Error> {
while let Ok(pkg) = self.chan_in.try_recv() {
while let Ok(pkg) = self.chan_in.try_recv_next() {
match pkg {
PipelinePayload::AvFrame(frm, ref src) => unsafe {
let idx = match src {
AVFrameSource::Decoder(s) => (**s).index,
AVFrameSource::Decoder(s) => (**s).index as usize,
AVFrameSource::None(s) => *s,
_ => {
return Err(Error::msg(format!("Cannot process frame from: {:?}", src)))
}
};
if self.variant.src_index == idx as usize {
if self.variant.src_index == idx {
self.process_frame(frm, src)?;
}
},

View File

@ -125,6 +125,9 @@ pub struct VideoVariant {
/// Keyframe interval in seconds
pub keyframe_interval: u16,
/// Pixel Format
pub pixel_format: u32,
}
impl Display for VideoVariant {

View File

@ -1,3 +1,4 @@
use ffmpeg_sys_next::AVPixelFormat::AV_PIX_FMT_YUV420P;
use uuid::Uuid;
use crate::demux::info::{DemuxStreamInfo, StreamChannelType};
@ -40,6 +41,7 @@ impl Webhook {
profile: 100,
level: 51,
keyframe_interval: 2,
pixel_format: AV_PIX_FMT_YUV420P as u32
}));
}