mirror of
https://github.com/v0l/zap-stream-core.git
synced 2025-06-22 14:28:05 +00:00
Compare commits
27 Commits
09577cc2c8
...
main
Author | SHA1 | Date | |
---|---|---|---|
6c45eece0f
|
|||
add82b6933
|
|||
e6bddcf641
|
|||
b1ebf75244
|
|||
aea0feef05
|
|||
64100947b5
|
|||
166a8bdde9
|
|||
02e4725043
|
|||
686cd7f794
|
|||
6eb7ff9807
|
|||
68fad98000
|
|||
5c2a58ed46
|
|||
2c3ef01d45
|
|||
ea33f72069
|
|||
e91c40806f
|
|||
77eff603d0
|
|||
e056e0427f
|
|||
a046dc5801
|
|||
4787ecd2b4
|
|||
e7e1f0299d
|
|||
338d351727
|
|||
047b3fec59
|
|||
fee5e77407
|
|||
d88f829645
|
|||
ca70bf964c
|
|||
cc973f0d9b
|
|||
a7ff18b34c
|
133
Cargo.lock
generated
133
Cargo.lock
generated
@ -182,7 +182,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -193,7 +193,7 @@ checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -365,7 +365,7 @@ dependencies = [
|
|||||||
"regex",
|
"regex",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"shlex",
|
"shlex",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -674,7 +674,7 @@ dependencies = [
|
|||||||
"heck",
|
"heck",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -906,7 +906,7 @@ dependencies = [
|
|||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"rustc_version",
|
"rustc_version",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -938,7 +938,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -974,6 +974,16 @@ dependencies = [
|
|||||||
"cfg-if",
|
"cfg-if",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "env_filter"
|
||||||
|
version = "0.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0"
|
||||||
|
dependencies = [
|
||||||
|
"log 0.4.25",
|
||||||
|
"regex",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "env_logger"
|
name = "env_logger"
|
||||||
version = "0.10.2"
|
version = "0.10.2"
|
||||||
@ -987,6 +997,19 @@ dependencies = [
|
|||||||
"termcolor",
|
"termcolor",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "env_logger"
|
||||||
|
version = "0.11.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f"
|
||||||
|
dependencies = [
|
||||||
|
"anstream",
|
||||||
|
"anstyle",
|
||||||
|
"env_filter",
|
||||||
|
"jiff",
|
||||||
|
"log 0.4.25",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "equivalent"
|
name = "equivalent"
|
||||||
version = "1.0.1"
|
version = "1.0.1"
|
||||||
@ -1282,7 +1305,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1612,6 +1635,12 @@ dependencies = [
|
|||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "http-range-header"
|
||||||
|
version = "0.4.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "httparse"
|
name = "httparse"
|
||||||
version = "1.10.0"
|
version = "1.10.0"
|
||||||
@ -1891,7 +1920,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2020,6 +2049,30 @@ version = "1.0.14"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
|
checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jiff"
|
||||||
|
version = "0.2.15"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "be1f93b8b1eb69c77f24bbb0afdf66f54b632ee39af40ca21c4365a1d7347e49"
|
||||||
|
dependencies = [
|
||||||
|
"jiff-static",
|
||||||
|
"log 0.4.25",
|
||||||
|
"portable-atomic",
|
||||||
|
"portable-atomic-util",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jiff-static"
|
||||||
|
version = "0.2.15"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.103",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "js-sys"
|
name = "js-sys"
|
||||||
version = "0.3.77"
|
version = "0.3.77"
|
||||||
@ -2147,8 +2200,7 @@ checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "m3u8-rs"
|
name = "m3u8-rs"
|
||||||
version = "6.0.0"
|
version = "6.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "git+https://git.v0l.io/Kieran/m3u8-rs.git?rev=6803eefca2838a8bfae9e19fd516ef36d7d89997#6803eefca2838a8bfae9e19fd516ef36d7d89997"
|
||||||
checksum = "f03cd3335fb5f2447755d45cda9c70f76013626a9db44374973791b0926a86c3"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"nom",
|
"nom",
|
||||||
@ -2438,7 +2490,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2571,7 +2623,7 @@ dependencies = [
|
|||||||
"pest_meta",
|
"pest_meta",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2618,7 +2670,7 @@ checksum = "d56a66c0c55993aa927429d0f8a0abfd74f084e4d9c192cffed01e418d83eefb"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2690,6 +2742,15 @@ version = "1.10.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6"
|
checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "portable-atomic-util"
|
||||||
|
version = "0.2.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507"
|
||||||
|
dependencies = [
|
||||||
|
"portable-atomic",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ppv-lite86"
|
name = "ppv-lite86"
|
||||||
version = "0.2.20"
|
version = "0.2.20"
|
||||||
@ -2705,7 +2766,7 @@ version = "0.5.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "865724d4dbe39d9f3dd3b52b88d859d66bcb2d6a0acfd5ea68a65fb66d4bdc1c"
|
checksum = "865724d4dbe39d9f3dd3b52b88d859d66bcb2d6a0acfd5ea68a65fb66d4bdc1c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"env_logger",
|
"env_logger 0.10.2",
|
||||||
"log 0.4.25",
|
"log 0.4.25",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -2716,7 +2777,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "6924ced06e1f7dfe3fa48d57b9f74f55d8915f5036121bef647ef4b204895fac"
|
checksum = "6924ced06e1f7dfe3fa48d57b9f74f55d8915f5036121bef647ef4b204895fac"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2755,7 +2816,7 @@ dependencies = [
|
|||||||
"prost",
|
"prost",
|
||||||
"prost-types",
|
"prost-types",
|
||||||
"regex",
|
"regex",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -2769,7 +2830,7 @@ dependencies = [
|
|||||||
"itertools 0.12.1",
|
"itertools 0.12.1",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3330,7 +3391,7 @@ checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3578,7 +3639,7 @@ dependencies = [
|
|||||||
"quote",
|
"quote",
|
||||||
"sqlx-core",
|
"sqlx-core",
|
||||||
"sqlx-macros-core",
|
"sqlx-macros-core",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3601,7 +3662,7 @@ dependencies = [
|
|||||||
"sqlx-mysql",
|
"sqlx-mysql",
|
||||||
"sqlx-postgres",
|
"sqlx-postgres",
|
||||||
"sqlx-sqlite",
|
"sqlx-sqlite",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"tokio",
|
"tokio",
|
||||||
"url",
|
"url",
|
||||||
@ -3826,9 +3887,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.96"
|
version = "2.0.103"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80"
|
checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -3870,7 +3931,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3949,7 +4010,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3960,7 +4021,7 @@ checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4059,7 +4120,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4233,7 +4294,7 @@ dependencies = [
|
|||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"prost-build",
|
"prost-build",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4303,7 +4364,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4590,7 +4651,7 @@ dependencies = [
|
|||||||
"log 0.4.25",
|
"log 0.4.25",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -4625,7 +4686,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
"wasm-bindgen-backend",
|
"wasm-bindgen-backend",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
@ -4988,7 +5049,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
"synstructure 0.13.1",
|
"synstructure 0.13.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -5008,6 +5069,7 @@ dependencies = [
|
|||||||
"futures-util",
|
"futures-util",
|
||||||
"hex",
|
"hex",
|
||||||
"http-body-util",
|
"http-body-util",
|
||||||
|
"http-range-header",
|
||||||
"hyper 1.6.0",
|
"hyper 1.6.0",
|
||||||
"hyper-util",
|
"hyper-util",
|
||||||
"log 0.4.25",
|
"log 0.4.25",
|
||||||
@ -5035,8 +5097,10 @@ dependencies = [
|
|||||||
"async-trait",
|
"async-trait",
|
||||||
"bytes",
|
"bytes",
|
||||||
"data-encoding",
|
"data-encoding",
|
||||||
|
"env_logger 0.11.8",
|
||||||
"ffmpeg-rs-raw",
|
"ffmpeg-rs-raw",
|
||||||
"fontdue",
|
"fontdue",
|
||||||
|
"futures",
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"hex",
|
"hex",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
@ -5049,6 +5113,7 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
"sha2 0.10.8",
|
"sha2 0.10.8",
|
||||||
"srt-tokio",
|
"srt-tokio",
|
||||||
|
"tempfile",
|
||||||
"tiny-skia",
|
"tiny-skia",
|
||||||
"tokio",
|
"tokio",
|
||||||
"usvg",
|
"usvg",
|
||||||
@ -5084,7 +5149,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -5104,7 +5169,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
"synstructure 0.13.1",
|
"synstructure 0.13.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -5133,7 +5198,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.96",
|
"syn 2.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -24,6 +24,6 @@ url = "2.5.0"
|
|||||||
itertools = "0.14.0"
|
itertools = "0.14.0"
|
||||||
chrono = { version = "^0.4.38", features = ["serde"] }
|
chrono = { version = "^0.4.38", features = ["serde"] }
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
m3u8-rs = "6.0.0"
|
m3u8-rs = { git = "https://git.v0l.io/Kieran/m3u8-rs.git", rev = "6803eefca2838a8bfae9e19fd516ef36d7d89997" }
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
data-encoding = "2.9.0"
|
data-encoding = "2.9.0"
|
@ -37,4 +37,9 @@ srt-tokio = { version = "0.4.4", optional = true }
|
|||||||
rml_rtmp = { version = "0.8.0", optional = true }
|
rml_rtmp = { version = "0.8.0", optional = true }
|
||||||
bytes = "1.9.0"
|
bytes = "1.9.0"
|
||||||
xflv = "0.4.4"
|
xflv = "0.4.4"
|
||||||
|
futures = "0.3.30"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.8.1"
|
||||||
|
env_logger = "0.11.3"
|
||||||
|
|
||||||
|
@ -1,24 +1,42 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
|
||||||
|
use std::path::PathBuf;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::egress::{Egress, EgressResult};
|
use crate::egress::{Egress, EgressResult, EncoderOrSourceStream};
|
||||||
use crate::mux::HlsMuxer;
|
use crate::mux::{HlsMuxer, SegmentType};
|
||||||
|
use crate::variant::VariantStream;
|
||||||
|
|
||||||
/// Alias the muxer directly
|
/// Alias the muxer directly
|
||||||
pub type HlsEgress = HlsMuxer;
|
pub struct HlsEgress {
|
||||||
|
mux: HlsMuxer,
|
||||||
|
}
|
||||||
|
|
||||||
impl Egress for HlsMuxer {
|
impl HlsEgress {
|
||||||
|
pub const PATH: &'static str = "hls";
|
||||||
|
|
||||||
|
pub fn new<'a>(
|
||||||
|
out_dir: PathBuf,
|
||||||
|
encoders: impl Iterator<Item = (&'a VariantStream, EncoderOrSourceStream<'a>)>,
|
||||||
|
segment_type: SegmentType,
|
||||||
|
) -> Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
mux: HlsMuxer::new(out_dir.join(Self::PATH), encoders, segment_type)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Egress for HlsEgress {
|
||||||
unsafe fn process_pkt(
|
unsafe fn process_pkt(
|
||||||
&mut self,
|
&mut self,
|
||||||
packet: *mut AVPacket,
|
packet: *mut AVPacket,
|
||||||
variant: &Uuid,
|
variant: &Uuid,
|
||||||
) -> Result<EgressResult> {
|
) -> Result<EgressResult> {
|
||||||
self.mux_packet(packet, variant)
|
self.mux.mux_packet(packet, variant)
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe fn reset(&mut self) -> Result<()> {
|
unsafe fn reset(&mut self) -> Result<()> {
|
||||||
for var in &mut self.variants {
|
for var in &mut self.mux.variants {
|
||||||
var.reset()?
|
var.reset()?
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{AVPacket, AVStream};
|
||||||
|
use ffmpeg_rs_raw::Encoder;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
@ -44,3 +45,8 @@ pub struct EgressSegment {
|
|||||||
/// Path on disk to the segment file
|
/// Path on disk to the segment file
|
||||||
pub path: PathBuf,
|
pub path: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub enum EncoderOrSourceStream<'a> {
|
||||||
|
Encoder(&'a Encoder),
|
||||||
|
SourceStream(*mut AVStream),
|
||||||
|
}
|
||||||
|
@ -1,17 +1,14 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
|
||||||
use ffmpeg_rs_raw::{Encoder, Muxer};
|
use ffmpeg_rs_raw::Muxer;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fs;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::egress::{Egress, EgressResult};
|
use crate::egress::{Egress, EgressResult, EncoderOrSourceStream};
|
||||||
use crate::variant::{StreamMapping, VariantStream};
|
use crate::variant::{StreamMapping, VariantStream};
|
||||||
|
|
||||||
pub struct RecorderEgress {
|
pub struct RecorderEgress {
|
||||||
/// Pipeline ID
|
|
||||||
id: Uuid,
|
|
||||||
/// Internal muxer writing the output packets
|
/// Internal muxer writing the output packets
|
||||||
muxer: Muxer,
|
muxer: Muxer,
|
||||||
/// Mapping from Variant ID to stream index
|
/// Mapping from Variant ID to stream index
|
||||||
@ -19,33 +16,37 @@ pub struct RecorderEgress {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl RecorderEgress {
|
impl RecorderEgress {
|
||||||
|
pub const FILENAME: &'static str = "recording.mp4";
|
||||||
|
|
||||||
pub fn new<'a>(
|
pub fn new<'a>(
|
||||||
id: &Uuid,
|
out_dir: PathBuf,
|
||||||
out_dir: &str,
|
variants: impl Iterator<Item = (&'a VariantStream, EncoderOrSourceStream<'a>)>,
|
||||||
variants: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
|
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let base = PathBuf::from(out_dir).join(id.to_string());
|
let out_file = out_dir.join(Self::FILENAME);
|
||||||
|
|
||||||
let out_file = base.join("recording.ts");
|
|
||||||
fs::create_dir_all(&base)?;
|
|
||||||
|
|
||||||
let mut var_map = HashMap::new();
|
let mut var_map = HashMap::new();
|
||||||
let muxer = unsafe {
|
let muxer = unsafe {
|
||||||
let mut m = Muxer::builder()
|
let mut m = Muxer::builder()
|
||||||
.with_output_path(out_file.to_str().unwrap(), None)?
|
.with_output_path(out_file.to_str().unwrap(), None)?
|
||||||
.build()?;
|
.build()?;
|
||||||
for (var, enc) in variants {
|
for (var, enc) in variants {
|
||||||
let stream = m.add_stream_encoder(enc)?;
|
match enc {
|
||||||
var_map.insert(var.id(), (*stream).index);
|
EncoderOrSourceStream::Encoder(enc) => {
|
||||||
|
let stream = m.add_stream_encoder(enc)?;
|
||||||
|
var_map.insert(var.id(), (*stream).index);
|
||||||
|
}
|
||||||
|
EncoderOrSourceStream::SourceStream(stream) => {
|
||||||
|
let stream = m.add_copy_stream(stream)?;
|
||||||
|
var_map.insert(var.id(), (*stream).index);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
m.open(None)?;
|
let mut options = HashMap::new();
|
||||||
|
options.insert("movflags".to_string(), "faststart".to_string());
|
||||||
|
|
||||||
|
m.open(Some(options))?;
|
||||||
m
|
m
|
||||||
};
|
};
|
||||||
Ok(Self {
|
Ok(Self { muxer, var_map })
|
||||||
id: *id,
|
|
||||||
muxer,
|
|
||||||
var_map,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -22,6 +22,7 @@ pub struct FrameGenerator {
|
|||||||
width: u16,
|
width: u16,
|
||||||
height: u16,
|
height: u16,
|
||||||
video_sample_fmt: AVPixelFormat,
|
video_sample_fmt: AVPixelFormat,
|
||||||
|
realtime: bool,
|
||||||
|
|
||||||
audio_sample_rate: u32,
|
audio_sample_rate: u32,
|
||||||
audio_frame_size: i32,
|
audio_frame_size: i32,
|
||||||
@ -71,6 +72,7 @@ impl FrameGenerator {
|
|||||||
fps,
|
fps,
|
||||||
width,
|
width,
|
||||||
height,
|
height,
|
||||||
|
realtime: true,
|
||||||
video_sample_fmt: pix_fmt,
|
video_sample_fmt: pix_fmt,
|
||||||
audio_sample_rate: sample_rate,
|
audio_sample_rate: sample_rate,
|
||||||
audio_frame_size: frame_size,
|
audio_frame_size: frame_size,
|
||||||
@ -86,6 +88,10 @@ impl FrameGenerator {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn set_realtime(&mut self, realtime: bool) {
|
||||||
|
self.realtime = realtime;
|
||||||
|
}
|
||||||
|
|
||||||
pub fn from_stream(
|
pub fn from_stream(
|
||||||
video_stream: &IngressStream,
|
video_stream: &IngressStream,
|
||||||
audio_stream: Option<&IngressStream>,
|
audio_stream: Option<&IngressStream>,
|
||||||
@ -258,11 +264,15 @@ impl FrameGenerator {
|
|||||||
(*self.next_frame).data[0],
|
(*self.next_frame).data[0],
|
||||||
(self.width as usize * self.height as usize * 4) as usize,
|
(self.width as usize * self.height as usize * 4) as usize,
|
||||||
);
|
);
|
||||||
for z in 0..(self.width as usize * self.height as usize) {
|
for chunk in buf.chunks_exact_mut(4) {
|
||||||
buf[z * 4..z * 4 + 4].copy_from_slice(&color32);
|
chunk[0] = color32[0];
|
||||||
|
chunk[1] = color32[1];
|
||||||
|
chunk[2] = color32[2];
|
||||||
|
chunk[3] = color32[3];
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Copy data directly into the frame buffer (must be RGBA data)
|
/// Copy data directly into the frame buffer (must be RGBA data)
|
||||||
pub unsafe fn copy_frame_data(&mut self, data: &[u8]) -> Result<()> {
|
pub unsafe fn copy_frame_data(&mut self, data: &[u8]) -> Result<()> {
|
||||||
if self.next_frame.is_null() {
|
if self.next_frame.is_null() {
|
||||||
@ -354,17 +364,19 @@ impl FrameGenerator {
|
|||||||
self.begin()?;
|
self.begin()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let stream_time = Duration::from_secs_f64(
|
if self.realtime {
|
||||||
self.video_pts as f64 / self.pts_per_frame() as f64 / self.fps as f64,
|
let stream_time = Duration::from_secs_f64(
|
||||||
);
|
self.video_pts as f64 / self.pts_per_frame() as f64 / self.fps as f64,
|
||||||
let real_time = self.start.elapsed();
|
);
|
||||||
let wait_time = if stream_time > real_time {
|
let real_time = self.start.elapsed();
|
||||||
stream_time - real_time
|
let wait_time = if stream_time > real_time {
|
||||||
} else {
|
stream_time - real_time
|
||||||
Duration::new(0, 0)
|
} else {
|
||||||
};
|
Duration::new(0, 0)
|
||||||
if !wait_time.is_zero() && wait_time.as_secs_f32() > 1f32 / self.fps {
|
};
|
||||||
std::thread::sleep(wait_time);
|
if !wait_time.is_zero() && wait_time.as_secs_f32() > 1f32 / self.fps {
|
||||||
|
std::thread::sleep(wait_time);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// convert to output pixel format, or just return internal frame if it matches output
|
// convert to output pixel format, or just return internal frame if it matches output
|
||||||
|
@ -13,10 +13,11 @@ pub async fn listen(out_dir: String, path: PathBuf, overseer: Arc<dyn Overseer>)
|
|||||||
let info = ConnectionInfo {
|
let info = ConnectionInfo {
|
||||||
id: Uuid::new_v4(),
|
id: Uuid::new_v4(),
|
||||||
ip_addr: "127.0.0.1:6969".to_string(),
|
ip_addr: "127.0.0.1:6969".to_string(),
|
||||||
endpoint: "file-input".to_owned(),
|
endpoint: "file-input",
|
||||||
app_name: "".to_string(),
|
app_name: "".to_string(),
|
||||||
key: "test".to_string(),
|
key: "test".to_string(),
|
||||||
};
|
};
|
||||||
|
let url = path.to_str().unwrap().to_string();
|
||||||
let file = std::fs::File::open(path)?;
|
let file = std::fs::File::open(path)?;
|
||||||
spawn_pipeline(
|
spawn_pipeline(
|
||||||
Handle::current(),
|
Handle::current(),
|
||||||
@ -24,6 +25,8 @@ pub async fn listen(out_dir: String, path: PathBuf, overseer: Arc<dyn Overseer>)
|
|||||||
out_dir.clone(),
|
out_dir.clone(),
|
||||||
overseer.clone(),
|
overseer.clone(),
|
||||||
Box::new(file),
|
Box::new(file),
|
||||||
|
Some(url),
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
use crate::overseer::Overseer;
|
use crate::overseer::Overseer;
|
||||||
use crate::pipeline::runner::PipelineRunner;
|
use crate::pipeline::runner::{PipelineCommand, PipelineRunner};
|
||||||
use log::{error, info, warn};
|
use log::{error, info, warn};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
|
use std::sync::mpsc::Receiver;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
use tokio::runtime::Handle;
|
use tokio::runtime::Handle;
|
||||||
@ -21,8 +22,8 @@ pub struct ConnectionInfo {
|
|||||||
/// Unique ID of this connection / pipeline
|
/// Unique ID of this connection / pipeline
|
||||||
pub id: Uuid,
|
pub id: Uuid,
|
||||||
|
|
||||||
/// Endpoint of the ingress
|
/// Name of the ingest point
|
||||||
pub endpoint: String,
|
pub endpoint: &'static str,
|
||||||
|
|
||||||
/// IP address of the connection
|
/// IP address of the connection
|
||||||
pub ip_addr: String,
|
pub ip_addr: String,
|
||||||
@ -40,8 +41,10 @@ pub fn spawn_pipeline(
|
|||||||
out_dir: String,
|
out_dir: String,
|
||||||
seer: Arc<dyn Overseer>,
|
seer: Arc<dyn Overseer>,
|
||||||
reader: Box<dyn Read + Send>,
|
reader: Box<dyn Read + Send>,
|
||||||
|
url: Option<String>,
|
||||||
|
rx: Option<Receiver<PipelineCommand>>,
|
||||||
) {
|
) {
|
||||||
match PipelineRunner::new(handle, out_dir, seer, info, reader, None) {
|
match PipelineRunner::new(handle, out_dir, seer, info, reader, url, rx) {
|
||||||
Ok(pl) => match run_pipeline(pl) {
|
Ok(pl) => match run_pipeline(pl) {
|
||||||
Ok(_) => {}
|
Ok(_) => {}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
@ -58,7 +61,10 @@ pub fn run_pipeline(mut pl: PipelineRunner) -> anyhow::Result<()> {
|
|||||||
info!("New client connected: {}", &pl.connection.ip_addr);
|
info!("New client connected: {}", &pl.connection.ip_addr);
|
||||||
|
|
||||||
std::thread::Builder::new()
|
std::thread::Builder::new()
|
||||||
.name(format!("pipeline-{}", pl.connection.id))
|
.name(format!(
|
||||||
|
"client:{}:{}",
|
||||||
|
pl.connection.endpoint, pl.connection.id
|
||||||
|
))
|
||||||
.spawn(move || {
|
.spawn(move || {
|
||||||
pl.run();
|
pl.run();
|
||||||
})?;
|
})?;
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use crate::ingress::{BufferedReader, ConnectionInfo};
|
use crate::ingress::{BufferedReader, ConnectionInfo};
|
||||||
use crate::overseer::Overseer;
|
use crate::overseer::Overseer;
|
||||||
use crate::pipeline::runner::PipelineRunner;
|
use crate::pipeline::runner::{PipelineCommand, PipelineRunner};
|
||||||
use anyhow::{anyhow, bail, Result};
|
use anyhow::{anyhow, bail, Result};
|
||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
use log::{error, info};
|
use log::{error, info};
|
||||||
@ -11,6 +11,7 @@ use rml_rtmp::sessions::{
|
|||||||
use std::collections::VecDeque;
|
use std::collections::VecDeque;
|
||||||
use std::io::{ErrorKind, Read, Write};
|
use std::io::{ErrorKind, Read, Write};
|
||||||
use std::net::TcpStream;
|
use std::net::TcpStream;
|
||||||
|
use std::sync::mpsc::Sender;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tokio::net::TcpListener;
|
use tokio::net::TcpListener;
|
||||||
@ -32,10 +33,11 @@ struct RtmpClient {
|
|||||||
msg_queue: VecDeque<ServerSessionResult>,
|
msg_queue: VecDeque<ServerSessionResult>,
|
||||||
pub published_stream: Option<RtmpPublishedStream>,
|
pub published_stream: Option<RtmpPublishedStream>,
|
||||||
muxer: FlvMuxer,
|
muxer: FlvMuxer,
|
||||||
|
tx: Sender<PipelineCommand>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RtmpClient {
|
impl RtmpClient {
|
||||||
pub fn new(socket: TcpStream) -> Result<Self> {
|
pub fn new(socket: TcpStream, tx: Sender<PipelineCommand>) -> Result<Self> {
|
||||||
socket.set_nonblocking(false)?;
|
socket.set_nonblocking(false)?;
|
||||||
let cfg = ServerSessionConfig::new();
|
let cfg = ServerSessionConfig::new();
|
||||||
let (ses, res) = ServerSession::new(cfg)?;
|
let (ses, res) = ServerSession::new(cfg)?;
|
||||||
@ -46,6 +48,7 @@ impl RtmpClient {
|
|||||||
msg_queue: VecDeque::from(res),
|
msg_queue: VecDeque::from(res),
|
||||||
published_stream: None,
|
published_stream: None,
|
||||||
muxer: FlvMuxer::new(),
|
muxer: FlvMuxer::new(),
|
||||||
|
tx,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -201,7 +204,13 @@ impl RtmpClient {
|
|||||||
self.published_stream = Some(RtmpPublishedStream(app_name, stream_key));
|
self.published_stream = Some(RtmpPublishedStream(app_name, stream_key));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ServerSessionEvent::PublishStreamFinished { .. } => {}
|
ServerSessionEvent::PublishStreamFinished {
|
||||||
|
app_name,
|
||||||
|
stream_key,
|
||||||
|
} => {
|
||||||
|
self.tx.send(PipelineCommand::Shutdown)?;
|
||||||
|
info!("Stream ending: {app_name}/{stream_key}");
|
||||||
|
}
|
||||||
ServerSessionEvent::StreamMetadataChanged {
|
ServerSessionEvent::StreamMetadataChanged {
|
||||||
app_name,
|
app_name,
|
||||||
stream_key,
|
stream_key,
|
||||||
@ -269,14 +278,15 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
|
|||||||
|
|
||||||
info!("RTMP listening on: {}", &addr);
|
info!("RTMP listening on: {}", &addr);
|
||||||
while let Ok((socket, ip)) = listener.accept().await {
|
while let Ok((socket, ip)) = listener.accept().await {
|
||||||
let mut cc = RtmpClient::new(socket.into_std()?)?;
|
|
||||||
let addr = addr.clone();
|
|
||||||
let overseer = overseer.clone();
|
let overseer = overseer.clone();
|
||||||
let out_dir = out_dir.clone();
|
let out_dir = out_dir.clone();
|
||||||
let handle = Handle::current();
|
let handle = Handle::current();
|
||||||
|
let new_id = Uuid::new_v4();
|
||||||
std::thread::Builder::new()
|
std::thread::Builder::new()
|
||||||
.name("rtmp-client".to_string())
|
.name(format!("client:rtmp:{}", new_id))
|
||||||
.spawn(move || {
|
.spawn(move || {
|
||||||
|
let (tx, rx) = std::sync::mpsc::channel();
|
||||||
|
let mut cc = RtmpClient::new(socket.into_std()?, tx)?;
|
||||||
if let Err(e) = cc.handshake() {
|
if let Err(e) = cc.handshake() {
|
||||||
bail!("Error during handshake: {}", e)
|
bail!("Error during handshake: {}", e)
|
||||||
}
|
}
|
||||||
@ -286,9 +296,9 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
|
|||||||
|
|
||||||
let pr = cc.published_stream.as_ref().unwrap();
|
let pr = cc.published_stream.as_ref().unwrap();
|
||||||
let info = ConnectionInfo {
|
let info = ConnectionInfo {
|
||||||
id: Uuid::new_v4(),
|
id: new_id,
|
||||||
ip_addr: ip.to_string(),
|
ip_addr: ip.to_string(),
|
||||||
endpoint: addr.clone(),
|
endpoint: "rtmp",
|
||||||
app_name: pr.0.clone(),
|
app_name: pr.0.clone(),
|
||||||
key: pr.1.clone(),
|
key: pr.1.clone(),
|
||||||
};
|
};
|
||||||
@ -299,14 +309,13 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
|
|||||||
info,
|
info,
|
||||||
Box::new(cc),
|
Box::new(cc),
|
||||||
None,
|
None,
|
||||||
|
Some(rx),
|
||||||
) {
|
) {
|
||||||
Ok(pl) => pl,
|
Ok(pl) => pl,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
bail!("Failed to create PipelineRunner {}", e)
|
bail!("Failed to create PipelineRunner {}", e)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
//pl.set_demuxer_format("flv");
|
|
||||||
//pl.set_demuxer_buffer_size(1024 * 64);
|
|
||||||
pl.run();
|
pl.run();
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
use crate::ingress::{spawn_pipeline, BufferedReader, ConnectionInfo};
|
use crate::ingress::{spawn_pipeline, BufferedReader, ConnectionInfo};
|
||||||
use crate::overseer::Overseer;
|
use crate::overseer::Overseer;
|
||||||
|
use crate::pipeline::runner::PipelineCommand;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use futures_util::stream::FusedStream;
|
use futures_util::stream::FusedStream;
|
||||||
use futures_util::StreamExt;
|
use futures_util::StreamExt;
|
||||||
@ -7,6 +8,7 @@ use log::info;
|
|||||||
use srt_tokio::{SrtListener, SrtSocket};
|
use srt_tokio::{SrtListener, SrtSocket};
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::net::SocketAddr;
|
use std::net::SocketAddr;
|
||||||
|
use std::sync::mpsc::{channel, Sender};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tokio::runtime::Handle;
|
use tokio::runtime::Handle;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
@ -22,7 +24,7 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
|
|||||||
let socket = request.accept(None).await?;
|
let socket = request.accept(None).await?;
|
||||||
let info = ConnectionInfo {
|
let info = ConnectionInfo {
|
||||||
id: Uuid::new_v4(),
|
id: Uuid::new_v4(),
|
||||||
endpoint: addr.clone(),
|
endpoint: "srt",
|
||||||
ip_addr: socket.settings().remote.to_string(),
|
ip_addr: socket.settings().remote.to_string(),
|
||||||
app_name: "".to_string(),
|
app_name: "".to_string(),
|
||||||
key: socket
|
key: socket
|
||||||
@ -31,6 +33,7 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
|
|||||||
.as_ref()
|
.as_ref()
|
||||||
.map_or(String::new(), |s| s.to_string()),
|
.map_or(String::new(), |s| s.to_string()),
|
||||||
};
|
};
|
||||||
|
let (tx, rx) = channel();
|
||||||
spawn_pipeline(
|
spawn_pipeline(
|
||||||
Handle::current(),
|
Handle::current(),
|
||||||
info,
|
info,
|
||||||
@ -40,7 +43,10 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
|
|||||||
handle: Handle::current(),
|
handle: Handle::current(),
|
||||||
socket,
|
socket,
|
||||||
buffer: BufferedReader::new(4096, MAX_SRT_BUFFER_SIZE, "SRT"),
|
buffer: BufferedReader::new(4096, MAX_SRT_BUFFER_SIZE, "SRT"),
|
||||||
|
tx,
|
||||||
}),
|
}),
|
||||||
|
None,
|
||||||
|
Some(rx),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -50,6 +56,7 @@ struct SrtReader {
|
|||||||
pub handle: Handle,
|
pub handle: Handle,
|
||||||
pub socket: SrtSocket,
|
pub socket: SrtSocket,
|
||||||
pub buffer: BufferedReader,
|
pub buffer: BufferedReader,
|
||||||
|
pub tx: Sender<PipelineCommand>, // TODO: implement clean shutdown
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Read for SrtReader {
|
impl Read for SrtReader {
|
||||||
|
@ -15,7 +15,7 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
|
|||||||
let info = ConnectionInfo {
|
let info = ConnectionInfo {
|
||||||
id: Uuid::new_v4(),
|
id: Uuid::new_v4(),
|
||||||
ip_addr: ip.to_string(),
|
ip_addr: ip.to_string(),
|
||||||
endpoint: addr.clone(),
|
endpoint: "tcp",
|
||||||
app_name: "".to_string(),
|
app_name: "".to_string(),
|
||||||
key: "test".to_string(),
|
key: "test".to_string(),
|
||||||
};
|
};
|
||||||
@ -27,6 +27,8 @@ pub async fn listen(out_dir: String, addr: String, overseer: Arc<dyn Overseer>)
|
|||||||
out_dir.clone(),
|
out_dir.clone(),
|
||||||
overseer.clone(),
|
overseer.clone(),
|
||||||
Box::new(socket),
|
Box::new(socket),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -4,14 +4,15 @@ use crate::overseer::Overseer;
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVSampleFormat::AV_SAMPLE_FMT_FLTP;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVSampleFormat::AV_SAMPLE_FMT_FLTP;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{av_frame_free, av_packet_free, AV_PROFILE_H264_MAIN, AVRational};
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
||||||
|
av_frame_free, av_packet_free, AVRational, AV_PROFILE_H264_MAIN,
|
||||||
|
};
|
||||||
use ffmpeg_rs_raw::{Encoder, Muxer};
|
use ffmpeg_rs_raw::{Encoder, Muxer};
|
||||||
use log::info;
|
use log::info;
|
||||||
use ringbuf::traits::{Observer, Split};
|
use ringbuf::traits::{Observer, Split};
|
||||||
use ringbuf::{HeapCons, HeapRb};
|
use ringbuf::{HeapCons, HeapRb};
|
||||||
use std::io::Read;
|
use std::io::Read;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
|
||||||
use tiny_skia::Pixmap;
|
use tiny_skia::Pixmap;
|
||||||
use tokio::runtime::Handle;
|
use tokio::runtime::Handle;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
@ -19,13 +20,9 @@ use uuid::Uuid;
|
|||||||
pub async fn listen(out_dir: String, overseer: Arc<dyn Overseer>) -> Result<()> {
|
pub async fn listen(out_dir: String, overseer: Arc<dyn Overseer>) -> Result<()> {
|
||||||
info!("Test pattern enabled");
|
info!("Test pattern enabled");
|
||||||
|
|
||||||
// add a delay, there is a race condition somewhere, the test pattern doesnt always
|
|
||||||
// get added to active_streams
|
|
||||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
|
||||||
|
|
||||||
let info = ConnectionInfo {
|
let info = ConnectionInfo {
|
||||||
id: Uuid::new_v4(),
|
id: Uuid::new_v4(),
|
||||||
endpoint: "test-pattern".to_string(),
|
endpoint: "test-pattern",
|
||||||
ip_addr: "test-pattern".to_string(),
|
ip_addr: "test-pattern".to_string(),
|
||||||
app_name: "".to_string(),
|
app_name: "".to_string(),
|
||||||
key: "test".to_string(),
|
key: "test".to_string(),
|
||||||
@ -34,9 +31,11 @@ pub async fn listen(out_dir: String, overseer: Arc<dyn Overseer>) -> Result<()>
|
|||||||
spawn_pipeline(
|
spawn_pipeline(
|
||||||
Handle::current(),
|
Handle::current(),
|
||||||
info,
|
info,
|
||||||
out_dir.clone(),
|
out_dir,
|
||||||
overseer.clone(),
|
overseer,
|
||||||
Box::new(src),
|
Box::new(src),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -115,8 +114,14 @@ impl TestPatternSrc {
|
|||||||
SAMPLE_RATE,
|
SAMPLE_RATE,
|
||||||
frame_size,
|
frame_size,
|
||||||
1,
|
1,
|
||||||
AVRational { num: 1, den: VIDEO_FPS as i32 },
|
AVRational {
|
||||||
AVRational { num: 1, den: SAMPLE_RATE as i32 },
|
num: 1,
|
||||||
|
den: VIDEO_FPS as i32,
|
||||||
|
},
|
||||||
|
AVRational {
|
||||||
|
num: 1,
|
||||||
|
den: SAMPLE_RATE as i32,
|
||||||
|
},
|
||||||
)?,
|
)?,
|
||||||
video_encoder,
|
video_encoder,
|
||||||
audio_encoder,
|
audio_encoder,
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
pub mod egress;
|
pub mod egress;
|
||||||
|
mod generator;
|
||||||
pub mod ingress;
|
pub mod ingress;
|
||||||
pub mod mux;
|
pub mod mux;
|
||||||
pub mod overseer;
|
pub mod overseer;
|
||||||
pub mod pipeline;
|
pub mod pipeline;
|
||||||
|
#[cfg(test)]
|
||||||
|
pub mod test_hls_timing;
|
||||||
pub mod variant;
|
pub mod variant;
|
||||||
pub mod viewer;
|
pub mod viewer;
|
||||||
mod generator;
|
|
||||||
|
@ -1,584 +0,0 @@
|
|||||||
use crate::egress::{EgressResult, EgressSegment};
|
|
||||||
use crate::variant::{StreamMapping, VariantStream};
|
|
||||||
use anyhow::{bail, ensure, Result};
|
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_H264;
|
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVMediaType::AVMEDIA_TYPE_VIDEO;
|
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
|
||||||
av_free, av_opt_set, av_q2d, av_write_frame, avio_close, avio_flush, avio_open, AVPacket,
|
|
||||||
AVStream, AVIO_FLAG_WRITE, AV_NOPTS_VALUE, AV_PKT_FLAG_KEY,
|
|
||||||
};
|
|
||||||
use ffmpeg_rs_raw::{cstr, Encoder, Muxer};
|
|
||||||
use itertools::Itertools;
|
|
||||||
use log::{info, trace, warn};
|
|
||||||
use m3u8_rs::MediaSegment;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::fmt::Display;
|
|
||||||
use std::fs::File;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::ptr;
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
|
||||||
pub enum SegmentType {
|
|
||||||
MPEGTS,
|
|
||||||
FMP4,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum HlsVariantStream {
|
|
||||||
Video {
|
|
||||||
group: usize,
|
|
||||||
index: usize,
|
|
||||||
id: Uuid,
|
|
||||||
},
|
|
||||||
Audio {
|
|
||||||
group: usize,
|
|
||||||
index: usize,
|
|
||||||
id: Uuid,
|
|
||||||
},
|
|
||||||
Subtitle {
|
|
||||||
group: usize,
|
|
||||||
index: usize,
|
|
||||||
id: Uuid,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HlsVariantStream {
|
|
||||||
pub fn id(&self) -> &Uuid {
|
|
||||||
match self {
|
|
||||||
HlsVariantStream::Video { id, .. } => id,
|
|
||||||
HlsVariantStream::Audio { id, .. } => id,
|
|
||||||
HlsVariantStream::Subtitle { id, .. } => id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn index(&self) -> &usize {
|
|
||||||
match self {
|
|
||||||
HlsVariantStream::Video { index, .. } => index,
|
|
||||||
HlsVariantStream::Audio { index, .. } => index,
|
|
||||||
HlsVariantStream::Subtitle { index, .. } => index,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for HlsVariantStream {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
HlsVariantStream::Video { index, .. } => write!(f, "v:{}", index),
|
|
||||||
HlsVariantStream::Audio { index, .. } => write!(f, "a:{}", index),
|
|
||||||
HlsVariantStream::Subtitle { index, .. } => write!(f, "s:{}", index),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct HlsVariant {
|
|
||||||
/// Name of this variant (720p)
|
|
||||||
name: String,
|
|
||||||
/// MPEG-TS muxer for this variant
|
|
||||||
mux: Muxer,
|
|
||||||
/// List of streams ids in this variant
|
|
||||||
streams: Vec<HlsVariantStream>,
|
|
||||||
/// Segment length in seconds
|
|
||||||
segment_length: f32,
|
|
||||||
/// Total number of segments to store for this variant
|
|
||||||
segment_window: Option<u16>,
|
|
||||||
/// Current segment index
|
|
||||||
idx: u64,
|
|
||||||
/// Output directory (base)
|
|
||||||
out_dir: String,
|
|
||||||
/// List of segments to be included in the playlist
|
|
||||||
segments: Vec<SegmentInfo>,
|
|
||||||
/// Type of segments to create
|
|
||||||
segment_type: SegmentType,
|
|
||||||
/// Ending presentation timestamp
|
|
||||||
end_pts: i64,
|
|
||||||
/// Current segment duration in seconds (precise accumulation)
|
|
||||||
duration: f64,
|
|
||||||
/// Number of packets written to current segment
|
|
||||||
packets_written: u64,
|
|
||||||
/// Reference stream used to track duration
|
|
||||||
ref_stream_index: i32,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct SegmentInfo {
|
|
||||||
index: u64,
|
|
||||||
duration: f32,
|
|
||||||
kind: SegmentType,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SegmentInfo {
|
|
||||||
fn to_media_segment(&self) -> MediaSegment {
|
|
||||||
MediaSegment {
|
|
||||||
uri: self.filename(),
|
|
||||||
duration: self.duration,
|
|
||||||
title: None,
|
|
||||||
..MediaSegment::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn filename(&self) -> String {
|
|
||||||
HlsVariant::segment_name(self.kind, self.index)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HlsVariant {
|
|
||||||
pub fn new<'a>(
|
|
||||||
out_dir: &'a str,
|
|
||||||
segment_length: f32,
|
|
||||||
group: usize,
|
|
||||||
encoded_vars: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
|
|
||||||
segment_type: SegmentType,
|
|
||||||
) -> Result<Self> {
|
|
||||||
let name = format!("stream_{}", group);
|
|
||||||
let first_seg = Self::map_segment_path(out_dir, &name, 1, segment_type);
|
|
||||||
std::fs::create_dir_all(PathBuf::from(&first_seg).parent().unwrap())?;
|
|
||||||
|
|
||||||
let mut opts = HashMap::new();
|
|
||||||
if let SegmentType::FMP4 = segment_type {
|
|
||||||
opts.insert("fflags".to_string(), "-autobsf".to_string());
|
|
||||||
opts.insert(
|
|
||||||
"movflags".to_string(),
|
|
||||||
"+frag_custom+dash+delay_moov".to_string(),
|
|
||||||
);
|
|
||||||
};
|
|
||||||
let mut mux = unsafe {
|
|
||||||
Muxer::builder()
|
|
||||||
.with_output_path(
|
|
||||||
first_seg.as_str(),
|
|
||||||
match segment_type {
|
|
||||||
SegmentType::MPEGTS => Some("mpegts"),
|
|
||||||
SegmentType::FMP4 => Some("mp4"),
|
|
||||||
},
|
|
||||||
)?
|
|
||||||
.build()?
|
|
||||||
};
|
|
||||||
let mut streams = Vec::new();
|
|
||||||
let mut ref_stream_index = -1;
|
|
||||||
let mut has_video = false;
|
|
||||||
|
|
||||||
for (var, enc) in encoded_vars {
|
|
||||||
match var {
|
|
||||||
VariantStream::Video(v) => unsafe {
|
|
||||||
let stream = mux.add_stream_encoder(enc)?;
|
|
||||||
let stream_idx = (*stream).index as usize;
|
|
||||||
streams.push(HlsVariantStream::Video {
|
|
||||||
group,
|
|
||||||
index: stream_idx,
|
|
||||||
id: v.id(),
|
|
||||||
});
|
|
||||||
has_video = true;
|
|
||||||
// Always use video stream as reference for segmentation
|
|
||||||
ref_stream_index = stream_idx as _;
|
|
||||||
},
|
|
||||||
VariantStream::Audio(a) => unsafe {
|
|
||||||
let stream = mux.add_stream_encoder(enc)?;
|
|
||||||
let stream_idx = (*stream).index as usize;
|
|
||||||
streams.push(HlsVariantStream::Audio {
|
|
||||||
group,
|
|
||||||
index: stream_idx,
|
|
||||||
id: a.id(),
|
|
||||||
});
|
|
||||||
if !has_video && ref_stream_index == -1 {
|
|
||||||
ref_stream_index = stream_idx as _;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
VariantStream::Subtitle(s) => unsafe {
|
|
||||||
let stream = mux.add_stream_encoder(enc)?;
|
|
||||||
streams.push(HlsVariantStream::Subtitle {
|
|
||||||
group,
|
|
||||||
index: (*stream).index as usize,
|
|
||||||
id: s.id(),
|
|
||||||
})
|
|
||||||
},
|
|
||||||
_ => bail!("unsupported variant stream"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ensure!(
|
|
||||||
ref_stream_index != -1,
|
|
||||||
"No reference stream found, cant create variant"
|
|
||||||
);
|
|
||||||
trace!(
|
|
||||||
"{} will use stream index {} as reference for segmentation",
|
|
||||||
name,
|
|
||||||
ref_stream_index
|
|
||||||
);
|
|
||||||
unsafe {
|
|
||||||
mux.open(Some(opts))?;
|
|
||||||
}
|
|
||||||
Ok(Self {
|
|
||||||
name: name.clone(),
|
|
||||||
segment_length,
|
|
||||||
segment_window: Some(10), //TODO: configure window
|
|
||||||
mux,
|
|
||||||
streams,
|
|
||||||
idx: 1,
|
|
||||||
segments: Vec::new(), // Start with empty segments list
|
|
||||||
out_dir: out_dir.to_string(),
|
|
||||||
segment_type,
|
|
||||||
end_pts: AV_NOPTS_VALUE,
|
|
||||||
duration: 0.0,
|
|
||||||
packets_written: 0,
|
|
||||||
ref_stream_index,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn segment_name(t: SegmentType, idx: u64) -> String {
|
|
||||||
match t {
|
|
||||||
SegmentType::MPEGTS => format!("{}.ts", idx),
|
|
||||||
SegmentType::FMP4 => format!("{}.m4s", idx),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn out_dir(&self) -> PathBuf {
|
|
||||||
PathBuf::from(&self.out_dir).join(&self.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn map_segment_path(out_dir: &str, name: &str, idx: u64, typ: SegmentType) -> String {
|
|
||||||
PathBuf::from(out_dir)
|
|
||||||
.join(name)
|
|
||||||
.join(Self::segment_name(typ, idx))
|
|
||||||
.to_string_lossy()
|
|
||||||
.to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Process a single packet through the muxer
|
|
||||||
unsafe fn process_packet(&mut self, pkt: *mut AVPacket) -> Result<EgressResult> {
|
|
||||||
let pkt_stream = *(*self.mux.context())
|
|
||||||
.streams
|
|
||||||
.add((*pkt).stream_index as usize);
|
|
||||||
|
|
||||||
let mut result = EgressResult::None;
|
|
||||||
let stream_type = (*(*pkt_stream).codecpar).codec_type;
|
|
||||||
let mut can_split = stream_type == AVMEDIA_TYPE_VIDEO
|
|
||||||
&& ((*pkt).flags & AV_PKT_FLAG_KEY == AV_PKT_FLAG_KEY);
|
|
||||||
let mut is_ref_pkt =
|
|
||||||
stream_type == AVMEDIA_TYPE_VIDEO && (*pkt).stream_index == self.ref_stream_index;
|
|
||||||
|
|
||||||
if (*pkt).pts == AV_NOPTS_VALUE {
|
|
||||||
can_split = false;
|
|
||||||
is_ref_pkt = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if current packet is keyframe, flush current segment
|
|
||||||
if self.packets_written > 0 && can_split {
|
|
||||||
trace!(
|
|
||||||
"{} segmentation check: pts={}, duration={:.3}, timebase={}/{}, target={:.3}",
|
|
||||||
self.name,
|
|
||||||
(*pkt).pts,
|
|
||||||
self.duration,
|
|
||||||
(*pkt).time_base.num,
|
|
||||||
(*pkt).time_base.den,
|
|
||||||
self.segment_length
|
|
||||||
);
|
|
||||||
|
|
||||||
if self.duration >= self.segment_length as f64 {
|
|
||||||
result = self.split_next_seg()?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// track duration from pts
|
|
||||||
if is_ref_pkt {
|
|
||||||
if self.end_pts == AV_NOPTS_VALUE {
|
|
||||||
self.end_pts = (*pkt).pts;
|
|
||||||
}
|
|
||||||
let pts_diff = (*pkt).pts - self.end_pts;
|
|
||||||
if pts_diff > 0 {
|
|
||||||
self.duration += pts_diff as f64 * av_q2d((*pkt).time_base);
|
|
||||||
}
|
|
||||||
self.end_pts = (*pkt).pts;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.mux.write_packet(pkt)?;
|
|
||||||
self.packets_written += 1;
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub unsafe fn reset(&mut self) -> Result<()> {
|
|
||||||
self.mux.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Reset the muxer state and start the next segment
|
|
||||||
unsafe fn split_next_seg(&mut self) -> Result<EgressResult> {
|
|
||||||
let completed_segment_idx = self.idx;
|
|
||||||
self.idx += 1;
|
|
||||||
|
|
||||||
// Manually reset muxer avio
|
|
||||||
let ctx = self.mux.context();
|
|
||||||
av_write_frame(ctx, ptr::null_mut());
|
|
||||||
avio_flush((*ctx).pb);
|
|
||||||
avio_close((*ctx).pb);
|
|
||||||
av_free((*ctx).url as *mut _);
|
|
||||||
|
|
||||||
let next_seg_url =
|
|
||||||
Self::map_segment_path(&self.out_dir, &self.name, self.idx, self.segment_type);
|
|
||||||
(*ctx).url = cstr!(next_seg_url.as_str());
|
|
||||||
|
|
||||||
let ret = avio_open(&mut (*ctx).pb, (*ctx).url, AVIO_FLAG_WRITE);
|
|
||||||
if ret < 0 {
|
|
||||||
bail!("Failed to re-init avio");
|
|
||||||
}
|
|
||||||
|
|
||||||
// tell muxer it needs to write headers again
|
|
||||||
av_opt_set(
|
|
||||||
(*ctx).priv_data,
|
|
||||||
cstr!("events_flags"),
|
|
||||||
cstr!("resend_headers"),
|
|
||||||
0,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Log the completed segment (previous index), not the next one
|
|
||||||
let completed_seg_path = Self::map_segment_path(
|
|
||||||
&self.out_dir,
|
|
||||||
&self.name,
|
|
||||||
completed_segment_idx,
|
|
||||||
self.segment_type,
|
|
||||||
);
|
|
||||||
let completed_segment_path = PathBuf::from(&completed_seg_path);
|
|
||||||
let segment_size = completed_segment_path
|
|
||||||
.metadata()
|
|
||||||
.map(|m| m.len())
|
|
||||||
.unwrap_or(0);
|
|
||||||
info!(
|
|
||||||
"Finished segment {} [{:.3}s, {:.2} kB, {} pkts]",
|
|
||||||
completed_segment_path
|
|
||||||
.file_name()
|
|
||||||
.unwrap_or_default()
|
|
||||||
.to_string_lossy(),
|
|
||||||
self.duration,
|
|
||||||
segment_size as f32 / 1024f32,
|
|
||||||
self.packets_written
|
|
||||||
);
|
|
||||||
|
|
||||||
let video_var_id = self
|
|
||||||
.video_stream()
|
|
||||||
.unwrap_or(self.streams.first().unwrap())
|
|
||||||
.id()
|
|
||||||
.clone();
|
|
||||||
|
|
||||||
// cleanup old segments
|
|
||||||
let deleted = self
|
|
||||||
.clean_segments()?
|
|
||||||
.into_iter()
|
|
||||||
.map(|seg| EgressSegment {
|
|
||||||
variant: video_var_id,
|
|
||||||
idx: seg.index,
|
|
||||||
duration: seg.duration,
|
|
||||||
path: PathBuf::from(Self::map_segment_path(
|
|
||||||
&self.out_dir,
|
|
||||||
&self.name,
|
|
||||||
seg.index,
|
|
||||||
self.segment_type,
|
|
||||||
)),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// emit result of the previously completed segment,
|
|
||||||
let created = EgressSegment {
|
|
||||||
variant: video_var_id,
|
|
||||||
idx: completed_segment_idx,
|
|
||||||
duration: self.duration as f32,
|
|
||||||
path: completed_segment_path,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Err(e) = self.push_segment(completed_segment_idx, self.duration as f32) {
|
|
||||||
warn!("Failed to update playlist: {}", e);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.packets_written = 0;
|
|
||||||
self.duration = 0.0;
|
|
||||||
|
|
||||||
Ok(EgressResult::Segments {
|
|
||||||
created: vec![created],
|
|
||||||
deleted,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn video_stream(&self) -> Option<&HlsVariantStream> {
|
|
||||||
self.streams
|
|
||||||
.iter()
|
|
||||||
.find(|a| matches!(*a, HlsVariantStream::Video { .. }))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a new segment to the variant and return a list of deleted segments
|
|
||||||
fn push_segment(&mut self, idx: u64, duration: f32) -> Result<()> {
|
|
||||||
self.segments.push(SegmentInfo {
|
|
||||||
index: idx,
|
|
||||||
duration,
|
|
||||||
kind: self.segment_type,
|
|
||||||
});
|
|
||||||
|
|
||||||
self.write_playlist()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delete segments which are too old
|
|
||||||
fn clean_segments(&mut self) -> Result<Vec<SegmentInfo>> {
|
|
||||||
const MAX_SEGMENTS: usize = 10;
|
|
||||||
|
|
||||||
let mut ret = vec![];
|
|
||||||
if self.segments.len() > MAX_SEGMENTS {
|
|
||||||
let n_drain = self.segments.len() - MAX_SEGMENTS;
|
|
||||||
let seg_dir = self.out_dir();
|
|
||||||
for seg in self.segments.drain(..n_drain) {
|
|
||||||
// delete file
|
|
||||||
let seg_path = seg_dir.join(seg.filename());
|
|
||||||
if let Err(e) = std::fs::remove_file(&seg_path) {
|
|
||||||
warn!(
|
|
||||||
"Failed to remove segment file: {} {}",
|
|
||||||
seg_path.display(),
|
|
||||||
e
|
|
||||||
);
|
|
||||||
}
|
|
||||||
trace!("Removed segment file: {}", seg_path.display());
|
|
||||||
ret.push(seg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(ret)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_playlist(&mut self) -> Result<()> {
|
|
||||||
if self.segments.is_empty() {
|
|
||||||
return Ok(()); // Don't write empty playlists
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut pl = m3u8_rs::MediaPlaylist::default();
|
|
||||||
// Round up target duration to ensure compliance
|
|
||||||
pl.target_duration = (self.segment_length.ceil() as u64).max(1);
|
|
||||||
pl.segments = self.segments.iter().map(|s| s.to_media_segment()).collect();
|
|
||||||
pl.version = Some(3);
|
|
||||||
pl.media_sequence = self.segments.first().map(|s| s.index).unwrap_or(0);
|
|
||||||
// For live streams, don't set end list
|
|
||||||
pl.end_list = false;
|
|
||||||
|
|
||||||
let mut f_out = File::create(self.out_dir().join("live.m3u8"))?;
|
|
||||||
pl.write_to(&mut f_out)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// https://git.ffmpeg.org/gitweb/ffmpeg.git/blob/HEAD:/libavformat/hlsenc.c#l351
|
|
||||||
unsafe fn to_codec_attr(&self, stream: *mut AVStream) -> Option<String> {
|
|
||||||
let p = (*stream).codecpar;
|
|
||||||
if (*p).codec_id == AV_CODEC_ID_H264 {
|
|
||||||
let data = (*p).extradata;
|
|
||||||
if !data.is_null() {
|
|
||||||
let mut id_ptr = ptr::null_mut();
|
|
||||||
let ds: *mut u16 = data as *mut u16;
|
|
||||||
if (*ds) == 1 && (*data.add(4)) & 0x1F == 7 {
|
|
||||||
id_ptr = data.add(5);
|
|
||||||
} else if (*ds) == 1 && (*data.add(3)) & 0x1F == 7 {
|
|
||||||
id_ptr = data.add(4);
|
|
||||||
} else if *data.add(0) == 1 {
|
|
||||||
id_ptr = data.add(1);
|
|
||||||
} else {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
return Some(format!(
|
|
||||||
"avc1.{}",
|
|
||||||
hex::encode([*id_ptr.add(0), *id_ptr.add(1), *id_ptr.add(2)])
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_playlist_variant(&self) -> m3u8_rs::VariantStream {
|
|
||||||
unsafe {
|
|
||||||
let pes = self.video_stream().unwrap_or(self.streams.first().unwrap());
|
|
||||||
let av_stream = *(*self.mux.context()).streams.add(*pes.index());
|
|
||||||
let codec_par = (*av_stream).codecpar;
|
|
||||||
m3u8_rs::VariantStream {
|
|
||||||
is_i_frame: false,
|
|
||||||
uri: format!("{}/live.m3u8", self.name),
|
|
||||||
bandwidth: 0,
|
|
||||||
average_bandwidth: Some((*codec_par).bit_rate as u64),
|
|
||||||
codecs: self.to_codec_attr(av_stream),
|
|
||||||
resolution: Some(m3u8_rs::Resolution {
|
|
||||||
width: (*codec_par).width as _,
|
|
||||||
height: (*codec_par).height as _,
|
|
||||||
}),
|
|
||||||
frame_rate: Some(av_q2d((*codec_par).framerate)),
|
|
||||||
hdcp_level: None,
|
|
||||||
audio: None,
|
|
||||||
video: None,
|
|
||||||
subtitles: None,
|
|
||||||
closed_captions: None,
|
|
||||||
other_attributes: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct HlsMuxer {
|
|
||||||
pub out_dir: PathBuf,
|
|
||||||
pub variants: Vec<HlsVariant>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HlsMuxer {
|
|
||||||
pub fn new<'a>(
|
|
||||||
id: &Uuid,
|
|
||||||
out_dir: &str,
|
|
||||||
segment_length: f32,
|
|
||||||
encoders: impl Iterator<Item = (&'a VariantStream, &'a Encoder)>,
|
|
||||||
segment_type: SegmentType,
|
|
||||||
) -> Result<Self> {
|
|
||||||
let base = PathBuf::from(out_dir).join(id.to_string());
|
|
||||||
|
|
||||||
let mut vars = Vec::new();
|
|
||||||
for (k, group) in &encoders
|
|
||||||
.sorted_by(|a, b| a.0.group_id().cmp(&b.0.group_id()))
|
|
||||||
.chunk_by(|a| a.0.group_id())
|
|
||||||
{
|
|
||||||
let var = HlsVariant::new(
|
|
||||||
base.to_str().unwrap(),
|
|
||||||
segment_length,
|
|
||||||
k,
|
|
||||||
group,
|
|
||||||
segment_type,
|
|
||||||
)?;
|
|
||||||
vars.push(var);
|
|
||||||
}
|
|
||||||
|
|
||||||
let ret = Self {
|
|
||||||
out_dir: base,
|
|
||||||
variants: vars,
|
|
||||||
};
|
|
||||||
ret.write_master_playlist()?;
|
|
||||||
Ok(ret)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_master_playlist(&self) -> Result<()> {
|
|
||||||
let mut pl = m3u8_rs::MasterPlaylist::default();
|
|
||||||
pl.version = Some(3);
|
|
||||||
pl.variants = self
|
|
||||||
.variants
|
|
||||||
.iter()
|
|
||||||
.map(|v| v.to_playlist_variant())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut f_out = File::create(self.out_dir.join("live.m3u8"))?;
|
|
||||||
pl.write_to(&mut f_out)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Mux an encoded packet from [Encoder]
|
|
||||||
pub unsafe fn mux_packet(
|
|
||||||
&mut self,
|
|
||||||
pkt: *mut AVPacket,
|
|
||||||
variant: &Uuid,
|
|
||||||
) -> Result<EgressResult> {
|
|
||||||
for var in self.variants.iter_mut() {
|
|
||||||
if let Some(vs) = var.streams.iter().find(|s| s.id() == variant) {
|
|
||||||
// very important for muxer to know which stream this pkt belongs to
|
|
||||||
(*pkt).stream_index = *vs.index() as _;
|
|
||||||
return var.process_packet(pkt);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This HLS muxer doesn't handle this variant, return None instead of failing
|
|
||||||
// This can happen when multiple egress handlers are configured with different variant sets
|
|
||||||
trace!(
|
|
||||||
"HLS muxer received packet for variant {} which it doesn't handle",
|
|
||||||
variant
|
|
||||||
);
|
|
||||||
Ok(EgressResult::None)
|
|
||||||
}
|
|
||||||
}
|
|
161
crates/core/src/mux/hls/mod.rs
Normal file
161
crates/core/src/mux/hls/mod.rs
Normal file
@ -0,0 +1,161 @@
|
|||||||
|
use crate::egress::{EgressResult, EncoderOrSourceStream};
|
||||||
|
use crate::mux::hls::variant::HlsVariant;
|
||||||
|
use crate::variant::{StreamMapping, VariantStream};
|
||||||
|
use anyhow::Result;
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPacket;
|
||||||
|
use ffmpeg_rs_raw::Encoder;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use log::{trace, warn};
|
||||||
|
use std::fmt::Display;
|
||||||
|
use std::fs::{remove_dir_all, File};
|
||||||
|
use std::ops::Sub;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use tokio::time::Instant;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
mod segment;
|
||||||
|
mod variant;
|
||||||
|
|
||||||
|
pub enum HlsVariantStream {
|
||||||
|
Video {
|
||||||
|
group: usize,
|
||||||
|
index: usize,
|
||||||
|
id: Uuid,
|
||||||
|
},
|
||||||
|
Audio {
|
||||||
|
group: usize,
|
||||||
|
index: usize,
|
||||||
|
id: Uuid,
|
||||||
|
},
|
||||||
|
Subtitle {
|
||||||
|
group: usize,
|
||||||
|
index: usize,
|
||||||
|
id: Uuid,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HlsVariantStream {
|
||||||
|
pub fn id(&self) -> &Uuid {
|
||||||
|
match self {
|
||||||
|
HlsVariantStream::Video { id, .. } => id,
|
||||||
|
HlsVariantStream::Audio { id, .. } => id,
|
||||||
|
HlsVariantStream::Subtitle { id, .. } => id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn index(&self) -> &usize {
|
||||||
|
match self {
|
||||||
|
HlsVariantStream::Video { index, .. } => index,
|
||||||
|
HlsVariantStream::Audio { index, .. } => index,
|
||||||
|
HlsVariantStream::Subtitle { index, .. } => index,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for HlsVariantStream {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
HlsVariantStream::Video { index, .. } => write!(f, "v:{}", index),
|
||||||
|
HlsVariantStream::Audio { index, .. } => write!(f, "a:{}", index),
|
||||||
|
HlsVariantStream::Subtitle { index, .. } => write!(f, "s:{}", index),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq)]
|
||||||
|
pub enum SegmentType {
|
||||||
|
MPEGTS,
|
||||||
|
FMP4,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HlsMuxer {
|
||||||
|
pub out_dir: PathBuf,
|
||||||
|
pub variants: Vec<HlsVariant>,
|
||||||
|
|
||||||
|
last_master_write: Instant,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HlsMuxer {
|
||||||
|
pub const MASTER_PLAYLIST: &'static str = "live.m3u8";
|
||||||
|
|
||||||
|
const MASTER_WRITE_INTERVAL: f32 = 60.0;
|
||||||
|
|
||||||
|
pub fn new<'a>(
|
||||||
|
out_dir: PathBuf,
|
||||||
|
encoders: impl Iterator<Item = (&'a VariantStream, EncoderOrSourceStream<'a>)>,
|
||||||
|
segment_type: SegmentType,
|
||||||
|
) -> Result<Self> {
|
||||||
|
if !out_dir.exists() {
|
||||||
|
std::fs::create_dir_all(&out_dir)?;
|
||||||
|
}
|
||||||
|
let mut vars = Vec::new();
|
||||||
|
for (k, group) in &encoders
|
||||||
|
.sorted_by(|a, b| a.0.group_id().cmp(&b.0.group_id()))
|
||||||
|
.chunk_by(|a| a.0.group_id())
|
||||||
|
{
|
||||||
|
let var = HlsVariant::new(out_dir.clone(), k, group, segment_type)?;
|
||||||
|
vars.push(var);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut ret = Self {
|
||||||
|
out_dir,
|
||||||
|
variants: vars,
|
||||||
|
last_master_write: Instant::now(),
|
||||||
|
};
|
||||||
|
ret.write_master_playlist()?;
|
||||||
|
Ok(ret)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_master_playlist(&mut self) -> Result<()> {
|
||||||
|
let mut pl = m3u8_rs::MasterPlaylist::default();
|
||||||
|
pl.version = Some(3);
|
||||||
|
pl.variants = self
|
||||||
|
.variants
|
||||||
|
.iter()
|
||||||
|
.map(|v| v.to_playlist_variant())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut f_out = File::create(self.out_dir.join(Self::MASTER_PLAYLIST))?;
|
||||||
|
pl.write_to(&mut f_out)?;
|
||||||
|
self.last_master_write = Instant::now();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mux an encoded packet from [Encoder]
|
||||||
|
pub unsafe fn mux_packet(
|
||||||
|
&mut self,
|
||||||
|
pkt: *mut AVPacket,
|
||||||
|
variant: &Uuid,
|
||||||
|
) -> Result<EgressResult> {
|
||||||
|
if Instant::now().sub(self.last_master_write).as_secs_f32() > Self::MASTER_WRITE_INTERVAL {
|
||||||
|
self.write_master_playlist()?;
|
||||||
|
}
|
||||||
|
for var in self.variants.iter_mut() {
|
||||||
|
if let Some(vs) = var.streams.iter().find(|s| s.id() == variant) {
|
||||||
|
// very important for muxer to know which stream this pkt belongs to
|
||||||
|
(*pkt).stream_index = *vs.index() as _;
|
||||||
|
return var.process_packet(pkt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This HLS muxer doesn't handle this variant, return None instead of failing
|
||||||
|
// This can happen when multiple egress handlers are configured with different variant sets
|
||||||
|
trace!(
|
||||||
|
"HLS muxer received packet for variant {} which it doesn't handle",
|
||||||
|
variant
|
||||||
|
);
|
||||||
|
Ok(EgressResult::None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for HlsMuxer {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if let Err(e) = remove_dir_all(&self.out_dir) {
|
||||||
|
warn!(
|
||||||
|
"Failed to clean up hls dir: {} {}",
|
||||||
|
self.out_dir.display(),
|
||||||
|
e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
75
crates/core/src/mux/hls/segment.rs
Normal file
75
crates/core/src/mux/hls/segment.rs
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
use crate::mux::hls::variant::HlsVariant;
|
||||||
|
use crate::mux::SegmentType;
|
||||||
|
use m3u8_rs::{ByteRange, MediaSegment, MediaSegmentType, Part};
|
||||||
|
|
||||||
|
#[derive(PartialEq)]
|
||||||
|
pub enum HlsSegment {
|
||||||
|
Full(SegmentInfo),
|
||||||
|
Partial(PartialSegmentInfo),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HlsSegment {
|
||||||
|
pub fn to_media_segment(&self) -> MediaSegmentType {
|
||||||
|
match self {
|
||||||
|
HlsSegment::Full(f) => f.to_media_segment(),
|
||||||
|
HlsSegment::Partial(p) => p.to_media_segment(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq)]
|
||||||
|
pub struct SegmentInfo {
|
||||||
|
pub index: u64,
|
||||||
|
pub duration: f32,
|
||||||
|
pub kind: SegmentType,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SegmentInfo {
|
||||||
|
pub fn to_media_segment(&self) -> MediaSegmentType {
|
||||||
|
MediaSegmentType::Full(MediaSegment {
|
||||||
|
uri: self.filename(),
|
||||||
|
duration: self.duration,
|
||||||
|
..MediaSegment::default()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn filename(&self) -> String {
|
||||||
|
HlsVariant::segment_name(self.kind, self.index)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq)]
|
||||||
|
pub struct PartialSegmentInfo {
|
||||||
|
pub index: u64,
|
||||||
|
pub parent_index: u64,
|
||||||
|
pub parent_kind: SegmentType,
|
||||||
|
pub duration: f64,
|
||||||
|
pub independent: bool,
|
||||||
|
pub byte_range: Option<(u64, Option<u64>)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialSegmentInfo {
|
||||||
|
pub fn to_media_segment(&self) -> MediaSegmentType {
|
||||||
|
MediaSegmentType::Partial(Part {
|
||||||
|
uri: self.filename(),
|
||||||
|
duration: self.duration,
|
||||||
|
independent: self.independent,
|
||||||
|
gap: false,
|
||||||
|
byte_range: self.byte_range.map(|r| ByteRange {
|
||||||
|
length: r.0,
|
||||||
|
offset: r.1,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn filename(&self) -> String {
|
||||||
|
HlsVariant::segment_name(self.parent_kind, self.parent_index)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Byte offset where this partial segment ends
|
||||||
|
pub fn end_pos(&self) -> Option<u64> {
|
||||||
|
self.byte_range
|
||||||
|
.as_ref()
|
||||||
|
.map(|(len, start)| start.unwrap_or(0) + len)
|
||||||
|
}
|
||||||
|
}
|
660
crates/core/src/mux/hls/variant.rs
Normal file
660
crates/core/src/mux/hls/variant.rs
Normal file
@ -0,0 +1,660 @@
|
|||||||
|
use crate::egress::{EgressResult, EgressSegment, EncoderOrSourceStream};
|
||||||
|
use crate::mux::hls::segment::{HlsSegment, PartialSegmentInfo, SegmentInfo};
|
||||||
|
use crate::mux::{HlsVariantStream, SegmentType};
|
||||||
|
use crate::variant::{StreamMapping, VariantStream};
|
||||||
|
use anyhow::{bail, ensure, Result};
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_H264;
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVMediaType::AVMEDIA_TYPE_VIDEO;
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
||||||
|
av_free, av_get_bits_per_pixel, av_pix_fmt_desc_get, av_q2d, av_write_frame, avio_close,
|
||||||
|
avio_flush, avio_open, avio_size, AVPacket, AVIO_FLAG_WRITE, AV_NOPTS_VALUE, AV_PKT_FLAG_KEY,
|
||||||
|
};
|
||||||
|
use ffmpeg_rs_raw::{cstr, Encoder, Muxer};
|
||||||
|
use log::{debug, info, trace, warn};
|
||||||
|
use m3u8_rs::{ExtTag, MediaSegmentType, PartInf, PreloadHint};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::fs::{create_dir_all, File};
|
||||||
|
use std::mem::transmute;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::ptr;
|
||||||
|
|
||||||
|
pub struct HlsVariant {
|
||||||
|
/// Name of this variant (720p)
|
||||||
|
name: String,
|
||||||
|
/// MPEG-TS muxer for this variant
|
||||||
|
mux: Muxer,
|
||||||
|
/// List of streams ids in this variant
|
||||||
|
pub(crate) streams: Vec<HlsVariantStream>,
|
||||||
|
/// Segment length in seconds
|
||||||
|
segment_length_target: f32,
|
||||||
|
/// Total number of seconds of video to store
|
||||||
|
segment_window: f32,
|
||||||
|
/// Current segment index
|
||||||
|
idx: u64,
|
||||||
|
/// Output directory (base)
|
||||||
|
out_dir: PathBuf,
|
||||||
|
/// List of segments to be included in the playlist
|
||||||
|
segments: Vec<HlsSegment>,
|
||||||
|
/// Type of segments to create
|
||||||
|
segment_type: SegmentType,
|
||||||
|
/// Timestamp of the start of the current segment
|
||||||
|
current_segment_start: f64,
|
||||||
|
/// Timestamp of the start of the current partial
|
||||||
|
current_partial_start: f64,
|
||||||
|
/// Number of packets written to current segment
|
||||||
|
packets_written: u64,
|
||||||
|
/// Reference stream used to track duration
|
||||||
|
ref_stream_index: i32,
|
||||||
|
/// HLS-LL: Enable LL-output
|
||||||
|
low_latency: bool,
|
||||||
|
/// LL-HLS: Target duration for partial segments
|
||||||
|
partial_target_duration: f32,
|
||||||
|
/// HLS-LL: Current partial index
|
||||||
|
current_partial_index: u64,
|
||||||
|
/// HLS-LL: Whether the next partial segment should be marked as independent
|
||||||
|
next_partial_independent: bool,
|
||||||
|
/// Path to initialization segment for fMP4
|
||||||
|
init_segment_path: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HlsVariant {
|
||||||
|
pub fn new<'a>(
|
||||||
|
out_dir: PathBuf,
|
||||||
|
group: usize,
|
||||||
|
encoded_vars: impl Iterator<Item = (&'a VariantStream, EncoderOrSourceStream<'a>)>,
|
||||||
|
segment_type: SegmentType,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let name = format!("stream_{}", group);
|
||||||
|
|
||||||
|
let var_dir = out_dir.join(&name);
|
||||||
|
if !var_dir.exists() {
|
||||||
|
create_dir_all(&var_dir)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut mux = unsafe {
|
||||||
|
Muxer::builder()
|
||||||
|
.with_output_path(
|
||||||
|
var_dir.join("1.ts").to_str().unwrap(),
|
||||||
|
match segment_type {
|
||||||
|
SegmentType::MPEGTS => Some("mpegts"),
|
||||||
|
SegmentType::FMP4 => Some("mp4"),
|
||||||
|
},
|
||||||
|
)?
|
||||||
|
.build()?
|
||||||
|
};
|
||||||
|
let mut streams = Vec::new();
|
||||||
|
let mut ref_stream_index = -1;
|
||||||
|
let mut has_video = false;
|
||||||
|
let mut segment_length = 1.0;
|
||||||
|
|
||||||
|
for (var, enc) in encoded_vars {
|
||||||
|
match enc {
|
||||||
|
EncoderOrSourceStream::Encoder(enc) => match var {
|
||||||
|
VariantStream::Video(v) => unsafe {
|
||||||
|
let stream = mux.add_stream_encoder(enc)?;
|
||||||
|
let stream_idx = (*stream).index as usize;
|
||||||
|
streams.push(HlsVariantStream::Video {
|
||||||
|
group,
|
||||||
|
index: stream_idx,
|
||||||
|
id: v.id(),
|
||||||
|
});
|
||||||
|
has_video = true;
|
||||||
|
ref_stream_index = stream_idx as _;
|
||||||
|
let sg = v.keyframe_interval as f32 / v.fps;
|
||||||
|
if sg > segment_length {
|
||||||
|
segment_length = sg;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
VariantStream::Audio(a) => unsafe {
|
||||||
|
let stream = mux.add_stream_encoder(enc)?;
|
||||||
|
let stream_idx = (*stream).index as usize;
|
||||||
|
streams.push(HlsVariantStream::Audio {
|
||||||
|
group,
|
||||||
|
index: stream_idx,
|
||||||
|
id: a.id(),
|
||||||
|
});
|
||||||
|
if !has_video && ref_stream_index == -1 {
|
||||||
|
ref_stream_index = stream_idx as _;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
VariantStream::Subtitle(s) => unsafe {
|
||||||
|
let stream = mux.add_stream_encoder(enc)?;
|
||||||
|
streams.push(HlsVariantStream::Subtitle {
|
||||||
|
group,
|
||||||
|
index: (*stream).index as usize,
|
||||||
|
id: s.id(),
|
||||||
|
})
|
||||||
|
},
|
||||||
|
_ => bail!("unsupported variant stream"),
|
||||||
|
},
|
||||||
|
EncoderOrSourceStream::SourceStream(stream) => match var {
|
||||||
|
VariantStream::CopyVideo(v) => unsafe {
|
||||||
|
let stream = mux.add_copy_stream(stream)?;
|
||||||
|
let stream_idx = (*stream).index as usize;
|
||||||
|
streams.push(HlsVariantStream::Video {
|
||||||
|
group,
|
||||||
|
index: stream_idx,
|
||||||
|
id: v.id(),
|
||||||
|
});
|
||||||
|
has_video = true;
|
||||||
|
ref_stream_index = stream_idx as _;
|
||||||
|
},
|
||||||
|
VariantStream::CopyAudio(a) => unsafe {
|
||||||
|
let stream = mux.add_copy_stream(stream)?;
|
||||||
|
let stream_idx = (*stream).index as usize;
|
||||||
|
streams.push(HlsVariantStream::Audio {
|
||||||
|
group,
|
||||||
|
index: stream_idx,
|
||||||
|
id: a.id(),
|
||||||
|
});
|
||||||
|
if !has_video && ref_stream_index == -1 {
|
||||||
|
ref_stream_index = stream_idx as _;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => bail!("unsupported variant stream"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ensure!(
|
||||||
|
ref_stream_index != -1,
|
||||||
|
"No reference stream found, cant create variant"
|
||||||
|
);
|
||||||
|
trace!(
|
||||||
|
"{} will use stream index {} as reference for segmentation",
|
||||||
|
name,
|
||||||
|
ref_stream_index
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut opts = HashMap::new();
|
||||||
|
if let SegmentType::FMP4 = segment_type {
|
||||||
|
//opts.insert("fflags".to_string(), "-autobsf".to_string());
|
||||||
|
opts.insert(
|
||||||
|
"movflags".to_string(),
|
||||||
|
"+frag_custom+dash+delay_moov".to_string(),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
mux.open(Some(opts))?;
|
||||||
|
//av_dump_format(mux.context(), 0, ptr::null_mut(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut variant = Self {
|
||||||
|
name: name.clone(),
|
||||||
|
segment_window: 30.0,
|
||||||
|
mux,
|
||||||
|
streams,
|
||||||
|
idx: 1,
|
||||||
|
segments: Vec::new(),
|
||||||
|
out_dir: var_dir,
|
||||||
|
segment_type,
|
||||||
|
current_segment_start: 0.0,
|
||||||
|
current_partial_start: 0.0,
|
||||||
|
packets_written: 0,
|
||||||
|
ref_stream_index,
|
||||||
|
low_latency: false,
|
||||||
|
partial_target_duration: 0.0,
|
||||||
|
current_partial_index: 0,
|
||||||
|
next_partial_independent: false,
|
||||||
|
segment_length_target: segment_length,
|
||||||
|
init_segment_path: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create initialization segment for fMP4
|
||||||
|
if segment_type == SegmentType::FMP4 {
|
||||||
|
unsafe {
|
||||||
|
variant.create_init_segment()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(variant)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn segment_length(&self) -> f32 {
|
||||||
|
let min_segment_length = if self.low_latency {
|
||||||
|
(self.segment_length_target * 3.0).max(6.0) // make segments 3x longer in LL mode or minimum 6s
|
||||||
|
} else {
|
||||||
|
2.0
|
||||||
|
};
|
||||||
|
self.segment_length_target.max(min_segment_length)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn partial_segment_length(&self) -> f32 {
|
||||||
|
let seg_size = self.segment_length();
|
||||||
|
let partial_seg_size = seg_size / 3.0; // 3 segments min
|
||||||
|
partial_seg_size - partial_seg_size % seg_size
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn segment_name(t: SegmentType, idx: u64) -> String {
|
||||||
|
match t {
|
||||||
|
SegmentType::MPEGTS => format!("{}.ts", idx),
|
||||||
|
SegmentType::FMP4 => format!("{}.m4s", idx),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn map_segment_path(&self, idx: u64, typ: SegmentType) -> PathBuf {
|
||||||
|
self.out_dir.join(Self::segment_name(typ, idx))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Process a single packet through the muxer
|
||||||
|
pub(crate) unsafe fn process_packet(&mut self, pkt: *mut AVPacket) -> Result<EgressResult> {
|
||||||
|
let pkt_stream = *(*self.mux.context())
|
||||||
|
.streams
|
||||||
|
.add((*pkt).stream_index as usize);
|
||||||
|
|
||||||
|
let pkt_q = av_q2d((*pkt).time_base);
|
||||||
|
let mut result = EgressResult::None;
|
||||||
|
let stream_type = (*(*pkt_stream).codecpar).codec_type;
|
||||||
|
let mut can_split = stream_type == AVMEDIA_TYPE_VIDEO
|
||||||
|
&& ((*pkt).flags & AV_PKT_FLAG_KEY == AV_PKT_FLAG_KEY);
|
||||||
|
let mut is_ref_pkt = (*pkt).stream_index == self.ref_stream_index;
|
||||||
|
|
||||||
|
if (*pkt).pts == AV_NOPTS_VALUE {
|
||||||
|
can_split = false;
|
||||||
|
is_ref_pkt = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_ref_pkt && self.packets_written > 0 {
|
||||||
|
let pkt_pts = (*pkt).pts as f64 * pkt_q;
|
||||||
|
let cur_duration = pkt_pts - self.current_segment_start;
|
||||||
|
let cur_part_duration = pkt_pts - self.current_partial_start;
|
||||||
|
|
||||||
|
// check if current packet is keyframe, flush current segment
|
||||||
|
if can_split && cur_duration >= self.segment_length() as f64 {
|
||||||
|
result = self.split_next_seg(pkt_pts)?;
|
||||||
|
} else if self.low_latency && cur_part_duration >= self.partial_target_duration as f64 {
|
||||||
|
result = self.create_partial_segment(pkt_pts)?;
|
||||||
|
self.next_partial_independent = can_split;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// write to current segment
|
||||||
|
self.mux.write_packet(pkt)?;
|
||||||
|
self.packets_written += 1;
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub unsafe fn reset(&mut self) -> Result<()> {
|
||||||
|
self.mux.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a partial segment for LL-HLS
|
||||||
|
fn create_partial_segment(&mut self, next_pkt_start: f64) -> Result<EgressResult> {
|
||||||
|
let ctx = self.mux.context();
|
||||||
|
let end_pos = unsafe {
|
||||||
|
avio_flush((*ctx).pb);
|
||||||
|
avio_size((*ctx).pb) as u64
|
||||||
|
};
|
||||||
|
|
||||||
|
ensure!(end_pos > 0, "End position cannot be 0");
|
||||||
|
if self.segment_type == SegmentType::MPEGTS {
|
||||||
|
ensure!(
|
||||||
|
end_pos % 188 == 0,
|
||||||
|
"Invalid end position, must be multiple of 188"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let previous_end_pos = self
|
||||||
|
.segments
|
||||||
|
.last()
|
||||||
|
.and_then(|s| match &s {
|
||||||
|
HlsSegment::Partial(p) => p.end_pos(),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.unwrap_or(0);
|
||||||
|
let partial_size = end_pos - previous_end_pos;
|
||||||
|
let partial_info = PartialSegmentInfo {
|
||||||
|
index: self.current_partial_index,
|
||||||
|
parent_index: self.idx,
|
||||||
|
parent_kind: self.segment_type,
|
||||||
|
duration: next_pkt_start - self.current_partial_start,
|
||||||
|
independent: self.next_partial_independent,
|
||||||
|
byte_range: Some((partial_size, Some(previous_end_pos))),
|
||||||
|
};
|
||||||
|
|
||||||
|
debug!(
|
||||||
|
"{} created partial segment {} [{:.3}s, independent={}]",
|
||||||
|
self.name, partial_info.index, partial_info.duration, partial_info.independent,
|
||||||
|
);
|
||||||
|
self.segments.push(HlsSegment::Partial(partial_info));
|
||||||
|
self.current_partial_index += 1;
|
||||||
|
self.next_partial_independent = false;
|
||||||
|
self.current_partial_start = next_pkt_start;
|
||||||
|
|
||||||
|
self.write_playlist()?;
|
||||||
|
|
||||||
|
Ok(EgressResult::None)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create initialization segment for fMP4
|
||||||
|
unsafe fn create_init_segment(&mut self) -> Result<()> {
|
||||||
|
if self.segment_type != SegmentType::FMP4 || self.init_segment_path.is_some() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let init_path = PathBuf::from(&self.out_dir)
|
||||||
|
.join(&self.name)
|
||||||
|
.join("init.mp4")
|
||||||
|
.to_string_lossy()
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
// Create a temporary muxer for initialization segment
|
||||||
|
let mut init_opts = HashMap::new();
|
||||||
|
init_opts.insert(
|
||||||
|
"movflags".to_string(),
|
||||||
|
"+frag_custom+dash+delay_moov".to_string(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut init_mux = Muxer::builder()
|
||||||
|
.with_output_path(init_path.as_str(), Some("mp4"))?
|
||||||
|
.build()?;
|
||||||
|
|
||||||
|
// Copy stream parameters from main muxer
|
||||||
|
let main_ctx = self.mux.context();
|
||||||
|
for i in 0..(*main_ctx).nb_streams {
|
||||||
|
let src_stream = *(*main_ctx).streams.add(i as usize);
|
||||||
|
let s = init_mux.add_copy_stream(src_stream)?;
|
||||||
|
ensure!((*s).index == (*src_stream).index, "Stream index mismatch");
|
||||||
|
}
|
||||||
|
|
||||||
|
init_mux.open(Some(init_opts))?;
|
||||||
|
av_write_frame(init_mux.context(), ptr::null_mut());
|
||||||
|
init_mux.close()?;
|
||||||
|
|
||||||
|
self.init_segment_path = Some("init.mp4".to_string());
|
||||||
|
info!("Created fMP4 initialization segment: {}", init_path);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reset the muxer state and start the next segment
|
||||||
|
unsafe fn split_next_seg(&mut self, next_pkt_start: f64) -> Result<EgressResult> {
|
||||||
|
let completed_segment_idx = self.idx;
|
||||||
|
self.idx += 1;
|
||||||
|
self.current_partial_index = 0;
|
||||||
|
|
||||||
|
// Manually reset muxer avio
|
||||||
|
let ctx = self.mux.context();
|
||||||
|
let ret = av_write_frame(ctx, ptr::null_mut());
|
||||||
|
if ret < 0 {
|
||||||
|
bail!("Failed to split segment {}", ret);
|
||||||
|
}
|
||||||
|
avio_flush((*ctx).pb);
|
||||||
|
avio_close((*ctx).pb);
|
||||||
|
av_free((*ctx).url as *mut _);
|
||||||
|
|
||||||
|
let next_seg_url = self.map_segment_path(self.idx, self.segment_type);
|
||||||
|
(*ctx).url = cstr!(next_seg_url.to_str().unwrap());
|
||||||
|
|
||||||
|
let ret = avio_open(&mut (*ctx).pb, (*ctx).url, AVIO_FLAG_WRITE);
|
||||||
|
if ret < 0 {
|
||||||
|
bail!("Failed to re-init avio");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log the completed segment (previous index), not the next one
|
||||||
|
let completed_seg_path = self.map_segment_path(completed_segment_idx, self.segment_type);
|
||||||
|
let segment_size = completed_seg_path.metadata().map(|m| m.len()).unwrap_or(0);
|
||||||
|
|
||||||
|
let cur_duration = next_pkt_start - self.current_segment_start;
|
||||||
|
debug!(
|
||||||
|
"Finished segment {} [{:.3}s, {:.2} kB, {} pkts]",
|
||||||
|
completed_seg_path
|
||||||
|
.file_name()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string_lossy(),
|
||||||
|
cur_duration,
|
||||||
|
segment_size as f32 / 1024f32,
|
||||||
|
self.packets_written
|
||||||
|
);
|
||||||
|
|
||||||
|
let video_var_id = self
|
||||||
|
.video_stream()
|
||||||
|
.unwrap_or(self.streams.first().unwrap())
|
||||||
|
.id()
|
||||||
|
.clone();
|
||||||
|
|
||||||
|
// cleanup old segments
|
||||||
|
let deleted = self
|
||||||
|
.clean_segments()?
|
||||||
|
.into_iter()
|
||||||
|
.map(|seg| EgressSegment {
|
||||||
|
variant: video_var_id,
|
||||||
|
idx: seg.index,
|
||||||
|
duration: seg.duration,
|
||||||
|
path: self.map_segment_path(seg.index, self.segment_type),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// emit result of the previously completed segment,
|
||||||
|
let created = EgressSegment {
|
||||||
|
variant: video_var_id,
|
||||||
|
idx: completed_segment_idx,
|
||||||
|
duration: cur_duration as f32,
|
||||||
|
path: completed_seg_path,
|
||||||
|
};
|
||||||
|
|
||||||
|
self.segments.push(HlsSegment::Full(SegmentInfo {
|
||||||
|
index: completed_segment_idx,
|
||||||
|
duration: if self.playlist_version() >= 6 {
|
||||||
|
cur_duration.round() as _
|
||||||
|
} else {
|
||||||
|
cur_duration as _
|
||||||
|
},
|
||||||
|
kind: self.segment_type,
|
||||||
|
}));
|
||||||
|
|
||||||
|
self.write_playlist()?;
|
||||||
|
|
||||||
|
// Reset counters for next segment
|
||||||
|
self.packets_written = 0;
|
||||||
|
self.current_segment_start = next_pkt_start;
|
||||||
|
|
||||||
|
Ok(EgressResult::Segments {
|
||||||
|
created: vec![created],
|
||||||
|
deleted,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn video_stream(&self) -> Option<&HlsVariantStream> {
|
||||||
|
self.streams
|
||||||
|
.iter()
|
||||||
|
.find(|a| matches!(*a, HlsVariantStream::Video { .. }))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete segments which are too old
|
||||||
|
fn clean_segments(&mut self) -> Result<Vec<SegmentInfo>> {
|
||||||
|
let drain_from_hls_segment = {
|
||||||
|
let mut acc = 0.0;
|
||||||
|
let mut seg_match = None;
|
||||||
|
for seg in self
|
||||||
|
.segments
|
||||||
|
.iter()
|
||||||
|
.filter(|e| matches!(e, HlsSegment::Full(_)))
|
||||||
|
.rev()
|
||||||
|
{
|
||||||
|
if acc >= self.segment_window {
|
||||||
|
seg_match = Some(seg);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
acc += match seg {
|
||||||
|
HlsSegment::Full(seg) => seg.duration,
|
||||||
|
_ => 0.0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
seg_match
|
||||||
|
};
|
||||||
|
let mut ret = vec![];
|
||||||
|
if let Some(seg_match) = drain_from_hls_segment {
|
||||||
|
if let Some(drain_pos) = self.segments.iter().position(|e| e == seg_match) {
|
||||||
|
for seg in self.segments.drain(..drain_pos) {
|
||||||
|
match seg {
|
||||||
|
HlsSegment::Full(seg) => {
|
||||||
|
let seg_path = self.out_dir.join(seg.filename());
|
||||||
|
if let Err(e) = std::fs::remove_file(&seg_path) {
|
||||||
|
warn!(
|
||||||
|
"Failed to remove segment file: {} {}",
|
||||||
|
seg_path.display(),
|
||||||
|
e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
trace!("Removed segment file: {}", seg_path.display());
|
||||||
|
|
||||||
|
ret.push(seg);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ret)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn playlist_version(&self) -> usize {
|
||||||
|
if self.low_latency {
|
||||||
|
6
|
||||||
|
} else if self.segment_type == SegmentType::FMP4 {
|
||||||
|
6 // EXT-X-MAP without I-FRAMES-ONLY
|
||||||
|
} else {
|
||||||
|
3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_playlist(&mut self) -> Result<()> {
|
||||||
|
if self.segments.is_empty() {
|
||||||
|
return Ok(()); // Don't write empty playlists
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut pl = m3u8_rs::MediaPlaylist::default();
|
||||||
|
pl.segments = self.segments.iter().map(|s| s.to_media_segment()).collect();
|
||||||
|
|
||||||
|
// Add EXT-X-MAP initialization segment for fMP4
|
||||||
|
if self.segment_type == SegmentType::FMP4 {
|
||||||
|
if let Some(ref init_path) = self.init_segment_path {
|
||||||
|
pl.unknown_tags.push(ExtTag {
|
||||||
|
tag: "X-MAP".to_string(),
|
||||||
|
rest: Some(format!("URI=\"{}\"", init_path)),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// append segment preload for next part segment
|
||||||
|
if let Some(HlsSegment::Partial(partial)) = self.segments.last() {
|
||||||
|
// TODO: try to estimate if there will be another partial segment
|
||||||
|
pl.segments.push(MediaSegmentType::PreloadHint(PreloadHint {
|
||||||
|
hint_type: "PART".to_string(),
|
||||||
|
uri: partial.filename(),
|
||||||
|
byte_range_start: partial.end_pos(),
|
||||||
|
byte_range_length: None,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
pl.version = Some(self.playlist_version());
|
||||||
|
pl.target_duration = if self.playlist_version() >= 6 {
|
||||||
|
self.segment_length().round() as _
|
||||||
|
} else {
|
||||||
|
self.segment_length()
|
||||||
|
};
|
||||||
|
if self.low_latency {
|
||||||
|
pl.part_inf = Some(PartInf {
|
||||||
|
part_target: self.partial_target_duration as f64,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
pl.media_sequence = self
|
||||||
|
.segments
|
||||||
|
.iter()
|
||||||
|
.find_map(|s| match s {
|
||||||
|
HlsSegment::Full(ss) => Some(ss.index),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.unwrap_or(self.idx);
|
||||||
|
pl.end_list = false;
|
||||||
|
|
||||||
|
let mut f_out = File::create(self.out_dir.join("live.m3u8"))?;
|
||||||
|
pl.write_to(&mut f_out)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn to_codec_attr(&self) -> Option<String> {
|
||||||
|
let mut codecs = Vec::new();
|
||||||
|
|
||||||
|
// Find video and audio streams and build codec string
|
||||||
|
for stream in &self.streams {
|
||||||
|
let av_stream = *(*self.mux.context()).streams.add(*stream.index());
|
||||||
|
let p = (*av_stream).codecpar;
|
||||||
|
|
||||||
|
match stream {
|
||||||
|
HlsVariantStream::Video { .. } => {
|
||||||
|
if (*p).codec_id == AV_CODEC_ID_H264 {
|
||||||
|
// Use profile and level from codec parameters
|
||||||
|
let profile_idc = (*p).profile as u8;
|
||||||
|
let level_idc = (*p).level as u8;
|
||||||
|
|
||||||
|
// For H.264, constraint flags are typically 0 unless specified
|
||||||
|
// Common constraint flags: 0x40 (constraint_set1_flag) for baseline
|
||||||
|
let constraint_flags = match profile_idc {
|
||||||
|
66 => 0x40, // Baseline profile
|
||||||
|
_ => 0x00, // Main/High profiles typically have no constraints
|
||||||
|
};
|
||||||
|
|
||||||
|
let avc1_code = format!(
|
||||||
|
"avc1.{:02x}{:02x}{:02x}",
|
||||||
|
profile_idc, constraint_flags, level_idc
|
||||||
|
);
|
||||||
|
codecs.push(avc1_code);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
HlsVariantStream::Audio { .. } => {
|
||||||
|
// Standard AAC-LC codec string
|
||||||
|
codecs.push("mp4a.40.2".to_string());
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if codecs.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(codecs.join(","))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_playlist_variant(&self) -> m3u8_rs::VariantStream {
|
||||||
|
unsafe {
|
||||||
|
let pes = self.video_stream().unwrap_or(self.streams.first().unwrap());
|
||||||
|
let av_stream = *(*self.mux.context()).streams.add(*pes.index());
|
||||||
|
let codec_par = (*av_stream).codecpar;
|
||||||
|
let bitrate = (*codec_par).bit_rate as u64;
|
||||||
|
let fps = av_q2d((*codec_par).framerate);
|
||||||
|
m3u8_rs::VariantStream {
|
||||||
|
is_i_frame: false,
|
||||||
|
uri: format!("{}/live.m3u8", self.name),
|
||||||
|
bandwidth: if bitrate == 0 {
|
||||||
|
// make up bitrate when unknown (copy streams)
|
||||||
|
// this is the bitrate as a raw decoded stream, it's not accurate at all
|
||||||
|
// It only serves the purpose of ordering the copy streams as having the highest bitrate
|
||||||
|
let pix_desc = av_pix_fmt_desc_get(transmute((*codec_par).format));
|
||||||
|
(*codec_par).width as u64
|
||||||
|
* (*codec_par).height as u64
|
||||||
|
* av_get_bits_per_pixel(pix_desc) as u64
|
||||||
|
} else {
|
||||||
|
bitrate
|
||||||
|
},
|
||||||
|
average_bandwidth: None,
|
||||||
|
codecs: self.to_codec_attr(),
|
||||||
|
resolution: Some(m3u8_rs::Resolution {
|
||||||
|
width: (*codec_par).width as _,
|
||||||
|
height: (*codec_par).height as _,
|
||||||
|
}),
|
||||||
|
frame_rate: if fps > 0.0 { Some(fps) } else { None },
|
||||||
|
hdcp_level: None,
|
||||||
|
audio: None,
|
||||||
|
video: None,
|
||||||
|
subtitles: None,
|
||||||
|
closed_captions: None,
|
||||||
|
other_attributes: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,2 +1,3 @@
|
|||||||
mod hls;
|
mod hls;
|
||||||
|
|
||||||
pub use hls::*;
|
pub use hls::*;
|
||||||
|
@ -4,12 +4,13 @@ use std::mem::transmute;
|
|||||||
use std::ops::Sub;
|
use std::ops::Sub;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
|
use std::sync::mpsc::Receiver;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
use crate::egress::hls::HlsEgress;
|
use crate::egress::hls::HlsEgress;
|
||||||
use crate::egress::recorder::RecorderEgress;
|
use crate::egress::recorder::RecorderEgress;
|
||||||
use crate::egress::{Egress, EgressResult};
|
use crate::egress::{Egress, EgressResult, EncoderOrSourceStream};
|
||||||
use crate::generator::FrameGenerator;
|
use crate::generator::FrameGenerator;
|
||||||
use crate::ingress::ConnectionInfo;
|
use crate::ingress::ConnectionInfo;
|
||||||
use crate::mux::SegmentType;
|
use crate::mux::SegmentType;
|
||||||
@ -21,18 +22,18 @@ use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVCodecID::AV_CODEC_ID_WEBP;
|
|||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPictureType::AV_PICTURE_TYPE_NONE;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPictureType::AV_PICTURE_TYPE_NONE;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::AVPixelFormat::AV_PIX_FMT_YUV420P;
|
||||||
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
||||||
av_frame_clone, av_frame_free, av_get_sample_fmt, av_packet_free, av_rescale_q, AVFrame,
|
av_frame_clone, av_frame_free, av_get_sample_fmt, av_packet_clone, av_packet_free,
|
||||||
AVPacket, AV_NOPTS_VALUE,
|
av_rescale_q, AVFrame, AVPacket, AV_NOPTS_VALUE,
|
||||||
};
|
};
|
||||||
use ffmpeg_rs_raw::{
|
use ffmpeg_rs_raw::{
|
||||||
cstr, get_frame_from_hw, AudioFifo, Decoder, Demuxer, Encoder, Resample, Scaler, StreamType,
|
cstr, get_frame_from_hw, AudioFifo, Decoder, Demuxer, Encoder, Resample, Scaler, StreamType,
|
||||||
};
|
};
|
||||||
use log::{error, info, warn};
|
use log::{debug, error, info, warn};
|
||||||
use tokio::runtime::Handle;
|
use tokio::runtime::Handle;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
/// Idle mode timeout in seconds
|
/// Idle mode timeout in seconds
|
||||||
const IDLE_TIMEOUT_SECS: u64 = 600;
|
const IDLE_TIMEOUT_SECS: u64 = 60;
|
||||||
|
|
||||||
/// Circuit breaker threshold for consecutive decode failures
|
/// Circuit breaker threshold for consecutive decode failures
|
||||||
const DEFAULT_MAX_CONSECUTIVE_FAILURES: u32 = 50;
|
const DEFAULT_MAX_CONSECUTIVE_FAILURES: u32 = 50;
|
||||||
@ -46,6 +47,8 @@ pub enum RunnerState {
|
|||||||
start_time: Instant,
|
start_time: Instant,
|
||||||
gen: FrameGenerator,
|
gen: FrameGenerator,
|
||||||
},
|
},
|
||||||
|
/// Pipeline should shut down and do any cleanup
|
||||||
|
Shutdown,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RunnerState {
|
impl RunnerState {
|
||||||
@ -58,11 +61,17 @@ impl RunnerState {
|
|||||||
pub fn idle_duration(&self) -> Option<Duration> {
|
pub fn idle_duration(&self) -> Option<Duration> {
|
||||||
match self {
|
match self {
|
||||||
RunnerState::Idle { start_time, .. } => Some(start_time.elapsed()),
|
RunnerState::Idle { start_time, .. } => Some(start_time.elapsed()),
|
||||||
RunnerState::Normal => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum PipelineCommand {
|
||||||
|
/// External process requested clean shutdown
|
||||||
|
Shutdown,
|
||||||
|
}
|
||||||
|
|
||||||
/// Pipeline runner is the main entry process for stream transcoding
|
/// Pipeline runner is the main entry process for stream transcoding
|
||||||
///
|
///
|
||||||
/// Each client connection spawns a new [PipelineRunner] and it should be run in its own thread
|
/// Each client connection spawns a new [PipelineRunner] and it should be run in its own thread
|
||||||
@ -92,9 +101,6 @@ pub struct PipelineRunner {
|
|||||||
/// Encoder for a variant (variant_id, Encoder)
|
/// Encoder for a variant (variant_id, Encoder)
|
||||||
encoders: HashMap<Uuid, Encoder>,
|
encoders: HashMap<Uuid, Encoder>,
|
||||||
|
|
||||||
/// Simple mapping to copy streams
|
|
||||||
copy_stream: HashMap<Uuid, Uuid>,
|
|
||||||
|
|
||||||
/// All configured egress'
|
/// All configured egress'
|
||||||
egress: Vec<Box<dyn Egress>>,
|
egress: Vec<Box<dyn Egress>>,
|
||||||
|
|
||||||
@ -108,7 +114,7 @@ pub struct PipelineRunner {
|
|||||||
frame_ctr: u64,
|
frame_ctr: u64,
|
||||||
|
|
||||||
/// Output directory where all stream data is saved
|
/// Output directory where all stream data is saved
|
||||||
out_dir: String,
|
out_dir: PathBuf,
|
||||||
|
|
||||||
/// Thumbnail generation interval (0 = disabled)
|
/// Thumbnail generation interval (0 = disabled)
|
||||||
thumb_interval: u64,
|
thumb_interval: u64,
|
||||||
@ -127,6 +133,9 @@ pub struct PipelineRunner {
|
|||||||
|
|
||||||
/// Last audio PTS for continuity in idle mode
|
/// Last audio PTS for continuity in idle mode
|
||||||
last_audio_pts: i64,
|
last_audio_pts: i64,
|
||||||
|
|
||||||
|
/// Command receiver for external process control
|
||||||
|
cmd_channel: Option<Receiver<PipelineCommand>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl Send for PipelineRunner {}
|
unsafe impl Send for PipelineRunner {}
|
||||||
@ -139,10 +148,11 @@ impl PipelineRunner {
|
|||||||
connection: ConnectionInfo,
|
connection: ConnectionInfo,
|
||||||
recv: Box<dyn Read + Send>,
|
recv: Box<dyn Read + Send>,
|
||||||
url: Option<String>,
|
url: Option<String>,
|
||||||
|
command: Option<Receiver<PipelineCommand>>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
handle,
|
handle,
|
||||||
out_dir,
|
out_dir: PathBuf::from(out_dir).join(connection.id.to_string()),
|
||||||
overseer,
|
overseer,
|
||||||
connection,
|
connection,
|
||||||
config: Default::default(),
|
config: Default::default(),
|
||||||
@ -151,7 +161,6 @@ impl PipelineRunner {
|
|||||||
scalers: Default::default(),
|
scalers: Default::default(),
|
||||||
resampler: Default::default(),
|
resampler: Default::default(),
|
||||||
encoders: Default::default(),
|
encoders: Default::default(),
|
||||||
copy_stream: Default::default(),
|
|
||||||
fps_counter_start: Instant::now(),
|
fps_counter_start: Instant::now(),
|
||||||
egress: Vec::new(),
|
egress: Vec::new(),
|
||||||
frame_ctr: 0,
|
frame_ctr: 0,
|
||||||
@ -162,6 +171,7 @@ impl PipelineRunner {
|
|||||||
max_consecutive_failures: DEFAULT_MAX_CONSECUTIVE_FAILURES,
|
max_consecutive_failures: DEFAULT_MAX_CONSECUTIVE_FAILURES,
|
||||||
last_video_pts: 0,
|
last_video_pts: 0,
|
||||||
last_audio_pts: 0,
|
last_audio_pts: 0,
|
||||||
|
cmd_channel: command,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -208,9 +218,7 @@ impl PipelineRunner {
|
|||||||
unsafe fn generate_thumb_from_frame(&mut self, frame: *mut AVFrame) -> Result<()> {
|
unsafe fn generate_thumb_from_frame(&mut self, frame: *mut AVFrame) -> Result<()> {
|
||||||
if self.thumb_interval > 0 && (self.frame_ctr % self.thumb_interval) == 0 {
|
if self.thumb_interval > 0 && (self.frame_ctr % self.thumb_interval) == 0 {
|
||||||
let frame = av_frame_clone(frame).addr();
|
let frame = av_frame_clone(frame).addr();
|
||||||
let dst_pic = PathBuf::from(&self.out_dir)
|
let dst_pic = self.out_dir.join("thumb.webp");
|
||||||
.join(self.connection.id.to_string())
|
|
||||||
.join("thumb.webp");
|
|
||||||
std::thread::spawn(move || unsafe {
|
std::thread::spawn(move || unsafe {
|
||||||
let mut frame = frame as *mut AVFrame; //TODO: danger??
|
let mut frame = frame as *mut AVFrame; //TODO: danger??
|
||||||
let thumb_start = Instant::now();
|
let thumb_start = Instant::now();
|
||||||
@ -354,51 +362,65 @@ impl PipelineRunner {
|
|||||||
|
|
||||||
// Process all packets (original or converted)
|
// Process all packets (original or converted)
|
||||||
let mut egress_results = vec![];
|
let mut egress_results = vec![];
|
||||||
// TODO: For copy streams, skip decoder
|
// only process via decoder if there is more than 1 encoder
|
||||||
let frames = match self.decoder.decode_pkt(packet) {
|
if !self.encoders.is_empty() {
|
||||||
Ok(f) => {
|
let frames = match self.decoder.decode_pkt(packet) {
|
||||||
// Reset failure counter on successful decode
|
Ok(f) => {
|
||||||
self.consecutive_decode_failures = 0;
|
// Reset failure counter on successful decode
|
||||||
f
|
self.consecutive_decode_failures = 0;
|
||||||
}
|
f
|
||||||
Err(e) => {
|
|
||||||
self.consecutive_decode_failures += 1;
|
|
||||||
|
|
||||||
// Enhanced error logging with context
|
|
||||||
let packet_info = if !packet.is_null() {
|
|
||||||
format!(
|
|
||||||
"stream_idx={}, size={}, pts={}, dts={}",
|
|
||||||
(*packet).stream_index,
|
|
||||||
(*packet).size,
|
|
||||||
(*packet).pts,
|
|
||||||
(*packet).dts
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
"null packet".to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
warn!(
|
|
||||||
"Error decoding packet ({}): {}. Consecutive failures: {}/{}. Skipping packet.",
|
|
||||||
packet_info, e, self.consecutive_decode_failures, self.max_consecutive_failures
|
|
||||||
);
|
|
||||||
|
|
||||||
return self.handle_decode_failure(&config);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
for (frame, stream_idx) in frames {
|
|
||||||
let stream = self.demuxer.get_stream(stream_idx as usize)?;
|
|
||||||
// Adjust frame pts time without start_offset
|
|
||||||
// Egress streams don't have a start time offset
|
|
||||||
if !stream.is_null() {
|
|
||||||
if (*stream).start_time != AV_NOPTS_VALUE {
|
|
||||||
(*frame).pts -= (*stream).start_time;
|
|
||||||
}
|
}
|
||||||
(*frame).time_base = (*stream).time_base;
|
Err(e) => {
|
||||||
}
|
self.consecutive_decode_failures += 1;
|
||||||
|
|
||||||
let results = self.process_frame(&config, stream_idx as usize, frame)?;
|
// Enhanced error logging with context
|
||||||
egress_results.extend(results);
|
let packet_info = if !packet.is_null() {
|
||||||
|
format!(
|
||||||
|
"stream_idx={}, size={}, pts={}, dts={}",
|
||||||
|
(*packet).stream_index,
|
||||||
|
(*packet).size,
|
||||||
|
(*packet).pts,
|
||||||
|
(*packet).dts
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
"null packet".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
warn!(
|
||||||
|
"Error decoding packet ({}): {}. Consecutive failures: {}/{}. Skipping packet.",
|
||||||
|
packet_info, e, self.consecutive_decode_failures, self.max_consecutive_failures
|
||||||
|
);
|
||||||
|
|
||||||
|
return self.handle_decode_failure(&config);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
for (frame, stream_idx) in frames {
|
||||||
|
let stream = self.demuxer.get_stream(stream_idx as usize)?;
|
||||||
|
// Adjust frame pts time without start_offset
|
||||||
|
// Egress streams don't have a start time offset
|
||||||
|
if !stream.is_null() {
|
||||||
|
if (*stream).start_time != AV_NOPTS_VALUE {
|
||||||
|
(*frame).pts -= (*stream).start_time;
|
||||||
|
}
|
||||||
|
(*frame).time_base = (*stream).time_base;
|
||||||
|
}
|
||||||
|
|
||||||
|
let results = self.process_frame(&config, stream_idx as usize, frame)?;
|
||||||
|
egress_results.extend(results);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// egress (mux) copy variants
|
||||||
|
for var in config.variants {
|
||||||
|
match var {
|
||||||
|
VariantStream::CopyVideo(v) | VariantStream::CopyAudio(v)
|
||||||
|
if v.src_index == (*packet).stream_index as _ =>
|
||||||
|
{
|
||||||
|
egress_results.extend(Self::egress_packet(&mut self.egress, packet, &v.id())?);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(egress_results)
|
Ok(egress_results)
|
||||||
@ -424,7 +446,6 @@ impl PipelineRunner {
|
|||||||
let enc = if let Some(enc) = self.encoders.get_mut(&var.id()) {
|
let enc = if let Some(enc) = self.encoders.get_mut(&var.id()) {
|
||||||
enc
|
enc
|
||||||
} else {
|
} else {
|
||||||
warn!("Frame had nowhere to go in {} :/", var.id());
|
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -500,7 +521,6 @@ impl PipelineRunner {
|
|||||||
encoder: &mut Encoder,
|
encoder: &mut Encoder,
|
||||||
frame: *mut AVFrame,
|
frame: *mut AVFrame,
|
||||||
) -> Result<Vec<EgressResult>> {
|
) -> Result<Vec<EgressResult>> {
|
||||||
let mut ret = vec![];
|
|
||||||
// before encoding frame, rescale timestamps
|
// before encoding frame, rescale timestamps
|
||||||
if !frame.is_null() {
|
if !frame.is_null() {
|
||||||
let enc_ctx = encoder.codec_context();
|
let enc_ctx = encoder.codec_context();
|
||||||
@ -514,20 +534,37 @@ impl PipelineRunner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let packets = encoder.encode_frame(frame)?;
|
let packets = encoder.encode_frame(frame)?;
|
||||||
// pass new packets to egress
|
let mut ret = vec![];
|
||||||
for mut pkt in packets {
|
for pkt in packets {
|
||||||
for eg in egress.iter_mut() {
|
ret.extend(Self::egress_packet(egress, pkt, &var.id())?);
|
||||||
let er = eg.process_pkt(pkt, &var.id())?;
|
|
||||||
ret.push(er);
|
|
||||||
}
|
|
||||||
av_packet_free(&mut pkt);
|
|
||||||
}
|
}
|
||||||
|
Ok(ret)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn egress_packet(
|
||||||
|
egress: &mut Vec<Box<dyn Egress>>,
|
||||||
|
mut pkt: *mut AVPacket,
|
||||||
|
variant: &Uuid,
|
||||||
|
) -> Result<Vec<EgressResult>> {
|
||||||
|
let mut ret = vec![];
|
||||||
|
for eg in egress.iter_mut() {
|
||||||
|
let mut pkt_clone = av_packet_clone(pkt);
|
||||||
|
let er = eg.process_pkt(pkt_clone, variant)?;
|
||||||
|
av_packet_free(&mut pkt_clone);
|
||||||
|
ret.push(er);
|
||||||
|
}
|
||||||
Ok(ret)
|
Ok(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// EOF, cleanup
|
/// EOF, cleanup
|
||||||
unsafe fn flush(&mut self) -> Result<()> {
|
unsafe fn flush(&mut self) -> Result<()> {
|
||||||
|
if self.config.is_some() {
|
||||||
|
self.handle.block_on(async {
|
||||||
|
if let Err(e) = self.overseer.on_end(&self.connection.id).await {
|
||||||
|
error!("Failed to end stream: {e}");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
for (var, enc) in &mut self.encoders {
|
for (var, enc) in &mut self.encoders {
|
||||||
for mut pkt in enc.encode_frame(ptr::null_mut())? {
|
for mut pkt in enc.encode_frame(ptr::null_mut())? {
|
||||||
for eg in self.egress.iter_mut() {
|
for eg in self.egress.iter_mut() {
|
||||||
@ -539,14 +576,6 @@ impl PipelineRunner {
|
|||||||
for eg in self.egress.iter_mut() {
|
for eg in self.egress.iter_mut() {
|
||||||
eg.reset()?;
|
eg.reset()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.config.is_some() {
|
|
||||||
self.handle.block_on(async {
|
|
||||||
if let Err(e) = self.overseer.on_end(&self.connection.id).await {
|
|
||||||
error!("Failed to end stream: {e}");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -556,16 +585,12 @@ impl PipelineRunner {
|
|||||||
match self.once() {
|
match self.once() {
|
||||||
Ok(c) => {
|
Ok(c) => {
|
||||||
if !c {
|
if !c {
|
||||||
if let Err(e) = self.flush() {
|
// let drop handle flush
|
||||||
error!("Pipeline flush failed: {}", e);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
if let Err(e) = self.flush() {
|
// let drop handle flush
|
||||||
error!("Pipeline flush failed: {}", e);
|
|
||||||
}
|
|
||||||
error!("Pipeline run failed: {}", e);
|
error!("Pipeline run failed: {}", e);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -574,7 +599,25 @@ impl PipelineRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn handle_command(&mut self) -> Result<Option<bool>> {
|
||||||
|
if let Some(cmd) = &self.cmd_channel {
|
||||||
|
while let Ok(c) = cmd.try_recv() {
|
||||||
|
match c {
|
||||||
|
PipelineCommand::Shutdown => {
|
||||||
|
self.state = RunnerState::Shutdown;
|
||||||
|
return Ok(Some(true));
|
||||||
|
}
|
||||||
|
_ => warn!("Unexpected command: {:?}", c),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
|
||||||
unsafe fn once(&mut self) -> Result<bool> {
|
unsafe fn once(&mut self) -> Result<bool> {
|
||||||
|
if let Some(r) = self.handle_command()? {
|
||||||
|
return Ok(r);
|
||||||
|
}
|
||||||
self.setup()?;
|
self.setup()?;
|
||||||
|
|
||||||
let config = if let Some(config) = &self.config {
|
let config = if let Some(config) = &self.config {
|
||||||
@ -587,6 +630,7 @@ impl PipelineRunner {
|
|||||||
let results = match &mut self.state {
|
let results = match &mut self.state {
|
||||||
RunnerState::Normal => self.process_normal_mode(&config)?,
|
RunnerState::Normal => self.process_normal_mode(&config)?,
|
||||||
RunnerState::Idle { .. } => self.process_idle_mode(&config)?,
|
RunnerState::Idle { .. } => self.process_idle_mode(&config)?,
|
||||||
|
_ => return Ok(false), // skip once, nothing to do
|
||||||
};
|
};
|
||||||
|
|
||||||
// egress results - process async operations without blocking if possible
|
// egress results - process async operations without blocking if possible
|
||||||
@ -609,7 +653,7 @@ impl PipelineRunner {
|
|||||||
let elapsed = Instant::now().sub(self.fps_counter_start).as_secs_f32();
|
let elapsed = Instant::now().sub(self.fps_counter_start).as_secs_f32();
|
||||||
if elapsed >= 2f32 {
|
if elapsed >= 2f32 {
|
||||||
let n_frames = self.frame_ctr - self.fps_last_frame_ctr;
|
let n_frames = self.frame_ctr - self.fps_last_frame_ctr;
|
||||||
info!("Average fps: {:.2}", n_frames as f32 / elapsed);
|
debug!("Average fps: {:.2}", n_frames as f32 / elapsed);
|
||||||
self.fps_counter_start = Instant::now();
|
self.fps_counter_start = Instant::now();
|
||||||
self.fps_last_frame_ctr = self.frame_ctr;
|
self.fps_last_frame_ctr = self.frame_ctr;
|
||||||
}
|
}
|
||||||
@ -687,32 +731,33 @@ impl PipelineRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Setup copy streams
|
|
||||||
|
|
||||||
// Setup egress
|
// Setup egress
|
||||||
for e in &cfg.egress {
|
for e in &cfg.egress {
|
||||||
let c = e.config();
|
let c = e.config();
|
||||||
let encoders = self.encoders.iter().filter_map(|(k, v)| {
|
let vars = c
|
||||||
if c.variants.contains(k) {
|
.variants
|
||||||
let var = cfg.variants.iter().find(|x| x.id() == *k)?;
|
.iter()
|
||||||
Some((var, v))
|
.map_while(|x| cfg.variants.iter().find(|z| z.id() == *x));
|
||||||
|
let variant_mapping = vars.map_while(|v| {
|
||||||
|
if let Some(e) = self.encoders.get(&v.id()) {
|
||||||
|
Some((v, EncoderOrSourceStream::Encoder(e)))
|
||||||
} else {
|
} else {
|
||||||
None
|
Some((
|
||||||
|
v,
|
||||||
|
EncoderOrSourceStream::SourceStream(unsafe {
|
||||||
|
self.demuxer.get_stream(v.src_index()).ok()?
|
||||||
|
}),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
match e {
|
match e {
|
||||||
EgressType::HLS(_) => {
|
EgressType::HLS(_) => {
|
||||||
let hls = HlsEgress::new(
|
let hls =
|
||||||
&self.connection.id,
|
HlsEgress::new(self.out_dir.clone(), variant_mapping, SegmentType::MPEGTS)?;
|
||||||
&self.out_dir,
|
|
||||||
2.0, // TODO: configure segment length
|
|
||||||
encoders,
|
|
||||||
SegmentType::MPEGTS,
|
|
||||||
)?;
|
|
||||||
self.egress.push(Box::new(hls));
|
self.egress.push(Box::new(hls));
|
||||||
}
|
}
|
||||||
EgressType::Recorder(_) => {
|
EgressType::Recorder(_) => {
|
||||||
let rec = RecorderEgress::new(&self.connection.id, &self.out_dir, encoders)?;
|
let rec = RecorderEgress::new(self.out_dir.clone(), variant_mapping)?;
|
||||||
self.egress.push(Box::new(rec));
|
self.egress.push(Box::new(rec));
|
||||||
}
|
}
|
||||||
_ => warn!("{} is not implemented", e),
|
_ => warn!("{} is not implemented", e),
|
||||||
@ -735,12 +780,11 @@ impl Drop for PipelineRunner {
|
|||||||
self.encoders.clear();
|
self.encoders.clear();
|
||||||
self.scalers.clear();
|
self.scalers.clear();
|
||||||
self.resampler.clear();
|
self.resampler.clear();
|
||||||
self.copy_stream.clear();
|
|
||||||
self.egress.clear();
|
self.egress.clear();
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
"PipelineRunner cleaned up resources for stream: {}",
|
"PipelineRunner cleaned up resources for stream: {}",
|
||||||
self.connection.key
|
self.connection.id
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
937
crates/core/src/test_hls_timing.rs
Normal file
937
crates/core/src/test_hls_timing.rs
Normal file
@ -0,0 +1,937 @@
|
|||||||
|
use crate::egress::EncoderOrSourceStream;
|
||||||
|
use crate::generator::FrameGenerator;
|
||||||
|
use crate::mux::{HlsMuxer, SegmentType};
|
||||||
|
use crate::variant::audio::AudioVariant;
|
||||||
|
use crate::variant::mapping::VariantMapping;
|
||||||
|
use crate::variant::video::VideoVariant;
|
||||||
|
use crate::variant::{StreamMapping, VariantStream};
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
||||||
|
av_q2d, AVMediaType::AVMEDIA_TYPE_AUDIO, AVMediaType::AVMEDIA_TYPE_VIDEO,
|
||||||
|
AVPixelFormat::AV_PIX_FMT_YUV420P, AVRational, AVSampleFormat::AV_SAMPLE_FMT_FLTP,
|
||||||
|
AV_NOPTS_VALUE, AV_PROFILE_H264_MAIN,
|
||||||
|
};
|
||||||
|
use ffmpeg_rs_raw::{Demuxer, Encoder};
|
||||||
|
use m3u8_rs::{parse_media_playlist, MediaSegmentType};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::fs;
|
||||||
|
use std::io::Read;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::time::{Duration, Instant};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct HlsTimingResult {
|
||||||
|
pub playlist_duration: f32,
|
||||||
|
pub actual_duration: f64,
|
||||||
|
pub video_duration: f64,
|
||||||
|
pub audio_duration: f64,
|
||||||
|
pub difference: f64,
|
||||||
|
pub segment_name: String,
|
||||||
|
pub is_partial: bool,
|
||||||
|
pub independent: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct HlsTimingTestResult {
|
||||||
|
pub total_segments: usize,
|
||||||
|
pub full_segments: usize,
|
||||||
|
pub partial_segments: usize,
|
||||||
|
pub independent_partials: usize,
|
||||||
|
pub total_playlist_duration: f32,
|
||||||
|
pub total_actual_duration: f64,
|
||||||
|
pub total_difference: f64,
|
||||||
|
pub average_difference: f64,
|
||||||
|
pub min_difference: f64,
|
||||||
|
pub max_difference: f64,
|
||||||
|
pub problematic_segments: Vec<HlsTimingResult>,
|
||||||
|
pub segments: Vec<HlsTimingResult>,
|
||||||
|
pub test_duration: Duration,
|
||||||
|
pub success: bool,
|
||||||
|
pub error_message: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HlsTimingTestResult {
|
||||||
|
/// Check if the HLS timing test passed based on thresholds
|
||||||
|
pub fn passes(&self, max_avg_diff: f64, max_individual_diff: f64) -> bool {
|
||||||
|
self.success
|
||||||
|
&& self.average_difference.abs() <= max_avg_diff
|
||||||
|
&& self
|
||||||
|
.problematic_segments
|
||||||
|
.iter()
|
||||||
|
.all(|s| s.difference.abs() <= max_individual_diff)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a summary of the test results
|
||||||
|
pub fn summary(&self) -> String {
|
||||||
|
if !self.success {
|
||||||
|
return format!(
|
||||||
|
"FAILED: {}",
|
||||||
|
self.error_message.as_deref().unwrap_or("Unknown error")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"PASSED: {} segments, avg diff: {:.3}s, {} problematic",
|
||||||
|
self.total_segments,
|
||||||
|
self.average_difference,
|
||||||
|
self.problematic_segments.len()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HlsTimingTester {
|
||||||
|
max_avg_difference: f64,
|
||||||
|
max_individual_difference: f64,
|
||||||
|
problematic_threshold: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for HlsTimingTester {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
max_avg_difference: 0.1, // 100ms average difference
|
||||||
|
max_individual_difference: 0.5, // 500ms individual difference
|
||||||
|
problematic_threshold: 0.2, // 200ms considered problematic
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HlsTimingTester {
|
||||||
|
pub fn new(max_avg_diff: f64, max_individual_diff: f64, problematic_threshold: f64) -> Self {
|
||||||
|
Self {
|
||||||
|
max_avg_difference: max_avg_diff,
|
||||||
|
max_individual_difference: max_individual_diff,
|
||||||
|
problematic_threshold,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate and test HLS stream with test pattern
|
||||||
|
pub fn test_generated_stream(
|
||||||
|
&self,
|
||||||
|
output_dir: &Path,
|
||||||
|
duration_seconds: f32,
|
||||||
|
segment_type: SegmentType,
|
||||||
|
) -> Result<HlsTimingTestResult> {
|
||||||
|
let start_time = Instant::now();
|
||||||
|
|
||||||
|
// Generate test stream
|
||||||
|
let stream_id = Uuid::new_v4();
|
||||||
|
let out_dir = output_dir.join(stream_id.to_string());
|
||||||
|
let (_muxer, hls_dir) =
|
||||||
|
self.generate_test_stream(&out_dir, duration_seconds, segment_type)?;
|
||||||
|
|
||||||
|
// Test the generated stream
|
||||||
|
match self.test_stream_timing_internal(&hls_dir) {
|
||||||
|
Ok(mut result) => {
|
||||||
|
result.test_duration = start_time.elapsed();
|
||||||
|
result.success =
|
||||||
|
result.passes(self.max_avg_difference, self.max_individual_difference);
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
Err(e) => Ok(HlsTimingTestResult {
|
||||||
|
total_segments: 0,
|
||||||
|
full_segments: 0,
|
||||||
|
partial_segments: 0,
|
||||||
|
independent_partials: 0,
|
||||||
|
total_playlist_duration: 0.0,
|
||||||
|
total_actual_duration: 0.0,
|
||||||
|
total_difference: 0.0,
|
||||||
|
average_difference: 0.0,
|
||||||
|
min_difference: 0.0,
|
||||||
|
max_difference: 0.0,
|
||||||
|
problematic_segments: Vec::new(),
|
||||||
|
segments: Vec::new(),
|
||||||
|
test_duration: start_time.elapsed(),
|
||||||
|
success: false,
|
||||||
|
error_message: Some(e.to_string()),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate test HLS stream with test pattern
|
||||||
|
fn generate_test_stream(
|
||||||
|
&self,
|
||||||
|
output_dir: &Path,
|
||||||
|
duration_seconds: f32,
|
||||||
|
segment_type: SegmentType,
|
||||||
|
) -> Result<(HlsMuxer, PathBuf)> {
|
||||||
|
const VIDEO_FPS: f32 = 30.0;
|
||||||
|
const VIDEO_WIDTH: u16 = 1280;
|
||||||
|
const VIDEO_HEIGHT: u16 = 720;
|
||||||
|
const SAMPLE_RATE: u32 = 44100;
|
||||||
|
|
||||||
|
// Create video encoder
|
||||||
|
let mut video_encoder = unsafe {
|
||||||
|
Encoder::new_with_name("libx264")?
|
||||||
|
.with_stream_index(0)
|
||||||
|
.with_framerate(VIDEO_FPS)?
|
||||||
|
.with_bitrate(1_000_000)
|
||||||
|
.with_pix_fmt(AV_PIX_FMT_YUV420P)
|
||||||
|
.with_width(VIDEO_WIDTH as _)
|
||||||
|
.with_height(VIDEO_HEIGHT as _)
|
||||||
|
.with_level(51)
|
||||||
|
.with_profile(AV_PROFILE_H264_MAIN)
|
||||||
|
.open(None)?
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create audio encoder
|
||||||
|
let mut audio_encoder = unsafe {
|
||||||
|
Encoder::new_with_name("aac")?
|
||||||
|
.with_stream_index(1)
|
||||||
|
.with_default_channel_layout(1)
|
||||||
|
.with_bitrate(128_000)
|
||||||
|
.with_sample_format(AV_SAMPLE_FMT_FLTP)
|
||||||
|
.with_sample_rate(SAMPLE_RATE as _)?
|
||||||
|
.open(None)?
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create variant streams
|
||||||
|
let video_stream = VideoVariant {
|
||||||
|
mapping: VariantMapping {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
src_index: 0,
|
||||||
|
dst_index: 0,
|
||||||
|
group_id: 0,
|
||||||
|
},
|
||||||
|
width: VIDEO_WIDTH,
|
||||||
|
height: VIDEO_HEIGHT,
|
||||||
|
fps: VIDEO_FPS,
|
||||||
|
bitrate: 1_000_000,
|
||||||
|
codec: "libx264".to_string(),
|
||||||
|
profile: AV_PROFILE_H264_MAIN as usize,
|
||||||
|
level: 51,
|
||||||
|
keyframe_interval: 60,
|
||||||
|
pixel_format: AV_PIX_FMT_YUV420P as u32,
|
||||||
|
};
|
||||||
|
|
||||||
|
let audio_stream = AudioVariant {
|
||||||
|
mapping: VariantMapping {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
src_index: 1,
|
||||||
|
dst_index: 1,
|
||||||
|
group_id: 0,
|
||||||
|
},
|
||||||
|
bitrate: 128_000,
|
||||||
|
codec: "aac".to_string(),
|
||||||
|
channels: 1,
|
||||||
|
sample_rate: SAMPLE_RATE as usize,
|
||||||
|
sample_fmt: "fltp".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let video_variant = VariantStream::Video(video_stream.clone());
|
||||||
|
let audio_variant = VariantStream::Audio(audio_stream.clone());
|
||||||
|
let variants = vec![
|
||||||
|
(
|
||||||
|
&video_variant,
|
||||||
|
EncoderOrSourceStream::Encoder(&video_encoder),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
&audio_variant,
|
||||||
|
EncoderOrSourceStream::Encoder(&audio_encoder),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Create HLS muxer
|
||||||
|
let mut hls_muxer =
|
||||||
|
HlsMuxer::new(output_dir.to_path_buf(), variants.into_iter(), segment_type)?;
|
||||||
|
|
||||||
|
// Create frame generator
|
||||||
|
let frame_size = unsafe { (*audio_encoder.codec_context()).frame_size as _ };
|
||||||
|
let mut frame_gen = FrameGenerator::new(
|
||||||
|
VIDEO_FPS,
|
||||||
|
VIDEO_WIDTH,
|
||||||
|
VIDEO_HEIGHT,
|
||||||
|
AV_PIX_FMT_YUV420P,
|
||||||
|
SAMPLE_RATE,
|
||||||
|
frame_size,
|
||||||
|
1,
|
||||||
|
AVRational {
|
||||||
|
num: 1,
|
||||||
|
den: VIDEO_FPS as i32,
|
||||||
|
},
|
||||||
|
AVRational {
|
||||||
|
num: 1,
|
||||||
|
den: SAMPLE_RATE as i32,
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
frame_gen.set_realtime(false);
|
||||||
|
|
||||||
|
// Generate frames for the specified duration
|
||||||
|
let total_video_frames = (duration_seconds * VIDEO_FPS) as u64;
|
||||||
|
let mut video_frames_generated = 0;
|
||||||
|
|
||||||
|
while video_frames_generated < total_video_frames {
|
||||||
|
unsafe {
|
||||||
|
frame_gen.begin()?;
|
||||||
|
frame_gen.write_text(
|
||||||
|
&format!("Video Frame: {}", video_frames_generated),
|
||||||
|
40.0,
|
||||||
|
50.0,
|
||||||
|
50.0,
|
||||||
|
)?;
|
||||||
|
frame_gen.write_text(
|
||||||
|
&format!("Time: {:.1}s", video_frames_generated as f32 / VIDEO_FPS),
|
||||||
|
40.0,
|
||||||
|
50.0,
|
||||||
|
100.0,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut frame = frame_gen.next()?;
|
||||||
|
if frame.is_null() {
|
||||||
|
log::warn!("FrameGenerator returned null frame unexpectedly");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine if this is audio or video frame and encode accordingly
|
||||||
|
if (*frame).sample_rate > 0 {
|
||||||
|
// Audio frame - don't increment video counter
|
||||||
|
log::debug!("Generated audio frame, PTS: {}", (*frame).pts);
|
||||||
|
for mut pkt in audio_encoder.encode_frame(frame)? {
|
||||||
|
let result = hls_muxer.mux_packet(pkt, &audio_stream.id())?;
|
||||||
|
if let crate::egress::EgressResult::Segments {
|
||||||
|
created,
|
||||||
|
deleted: _,
|
||||||
|
} = result
|
||||||
|
{
|
||||||
|
for segment in created {
|
||||||
|
log::debug!("Created audio segment: {:?}", segment.path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ffmpeg_rs_raw::ffmpeg_sys_the_third::av_packet_free(&mut pkt);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Video frame - increment video counter
|
||||||
|
log::debug!(
|
||||||
|
"Generated video frame {}, PTS: {}",
|
||||||
|
video_frames_generated,
|
||||||
|
(*frame).pts
|
||||||
|
);
|
||||||
|
for mut pkt in video_encoder.encode_frame(frame)? {
|
||||||
|
let result = hls_muxer.mux_packet(pkt, &video_stream.id())?;
|
||||||
|
if let crate::egress::EgressResult::Segments {
|
||||||
|
created,
|
||||||
|
deleted: _,
|
||||||
|
} = result
|
||||||
|
{
|
||||||
|
for segment in created {
|
||||||
|
log::debug!("Created video segment: {:?}", segment.path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ffmpeg_rs_raw::ffmpeg_sys_the_third::av_packet_free(&mut pkt);
|
||||||
|
}
|
||||||
|
video_frames_generated += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
ffmpeg_rs_raw::ffmpeg_sys_the_third::av_frame_free(&mut frame);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flush encoders to ensure all packets are written
|
||||||
|
unsafe {
|
||||||
|
// Flush video encoder
|
||||||
|
for mut pkt in video_encoder.encode_frame(std::ptr::null_mut())? {
|
||||||
|
hls_muxer.mux_packet(pkt, &video_stream.id())?;
|
||||||
|
ffmpeg_rs_raw::ffmpeg_sys_the_third::av_packet_free(&mut pkt);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flush audio encoder
|
||||||
|
for mut pkt in audio_encoder.encode_frame(std::ptr::null_mut())? {
|
||||||
|
hls_muxer.mux_packet(pkt, &audio_stream.id())?;
|
||||||
|
ffmpeg_rs_raw::ffmpeg_sys_the_third::av_packet_free(&mut pkt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log::info!(
|
||||||
|
"Generated {} video frames ({:.1}s) of test HLS stream at",
|
||||||
|
video_frames_generated,
|
||||||
|
video_frames_generated as f32 / VIDEO_FPS
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok((hls_muxer, output_dir.join("stream_0")))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test HLS timing for a specific stream directory
|
||||||
|
pub fn test_stream_timing(&self, hls_dir: &Path) -> HlsTimingTestResult {
|
||||||
|
let start_time = Instant::now();
|
||||||
|
|
||||||
|
match self.test_stream_timing_internal(hls_dir) {
|
||||||
|
Ok(mut result) => {
|
||||||
|
result.test_duration = start_time.elapsed();
|
||||||
|
result.success =
|
||||||
|
result.passes(self.max_avg_difference, self.max_individual_difference);
|
||||||
|
result
|
||||||
|
}
|
||||||
|
Err(e) => HlsTimingTestResult {
|
||||||
|
total_segments: 0,
|
||||||
|
full_segments: 0,
|
||||||
|
partial_segments: 0,
|
||||||
|
independent_partials: 0,
|
||||||
|
total_playlist_duration: 0.0,
|
||||||
|
total_actual_duration: 0.0,
|
||||||
|
total_difference: 0.0,
|
||||||
|
average_difference: 0.0,
|
||||||
|
min_difference: 0.0,
|
||||||
|
max_difference: 0.0,
|
||||||
|
problematic_segments: Vec::new(),
|
||||||
|
segments: Vec::new(),
|
||||||
|
test_duration: start_time.elapsed(),
|
||||||
|
success: false,
|
||||||
|
error_message: Some(e.to_string()),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_stream_timing_internal(&self, hls_dir: &Path) -> Result<HlsTimingTestResult> {
|
||||||
|
let playlist_path = hls_dir.join("live.m3u8");
|
||||||
|
|
||||||
|
if !playlist_path.exists() {
|
||||||
|
return Err(anyhow::anyhow!(
|
||||||
|
"Playlist file does not exist: {:?}",
|
||||||
|
playlist_path
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the playlist
|
||||||
|
let playlist_content =
|
||||||
|
fs::read_to_string(&playlist_path).context("Failed to read playlist file")?;
|
||||||
|
|
||||||
|
let (_, playlist) = parse_media_playlist(playlist_content.as_bytes())
|
||||||
|
.map_err(|e| anyhow::anyhow!("Failed to parse playlist: {:?}", e))?;
|
||||||
|
|
||||||
|
let mut segments = Vec::new();
|
||||||
|
let mut total_playlist_duration = 0.0f32;
|
||||||
|
let mut total_actual_duration = 0.0f64;
|
||||||
|
|
||||||
|
// Analyze each segment
|
||||||
|
for segment_type in &playlist.segments {
|
||||||
|
match segment_type {
|
||||||
|
MediaSegmentType::Full(segment) => {
|
||||||
|
let segment_path = hls_dir.join(&segment.uri);
|
||||||
|
|
||||||
|
if !segment_path.exists() {
|
||||||
|
continue; // Skip missing segments
|
||||||
|
}
|
||||||
|
|
||||||
|
let durations = self.analyze_segment(&segment_path)?;
|
||||||
|
let actual_duration = durations.total_duration;
|
||||||
|
let video_duration = durations.video_duration;
|
||||||
|
let audio_duration = durations.audio_duration;
|
||||||
|
let playlist_duration = segment.duration;
|
||||||
|
let difference = actual_duration - playlist_duration as f64;
|
||||||
|
|
||||||
|
let result = HlsTimingResult {
|
||||||
|
playlist_duration,
|
||||||
|
actual_duration,
|
||||||
|
video_duration,
|
||||||
|
audio_duration,
|
||||||
|
difference,
|
||||||
|
segment_name: segment.uri.clone(),
|
||||||
|
is_partial: false,
|
||||||
|
independent: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
segments.push(result);
|
||||||
|
total_playlist_duration += playlist_duration;
|
||||||
|
total_actual_duration += actual_duration;
|
||||||
|
}
|
||||||
|
MediaSegmentType::Partial(partial) => {
|
||||||
|
let segment_path = hls_dir.join(&partial.uri);
|
||||||
|
|
||||||
|
if !segment_path.exists() {
|
||||||
|
continue; // Skip missing segments
|
||||||
|
}
|
||||||
|
|
||||||
|
let durations = if let Some(byte_range) = &partial.byte_range {
|
||||||
|
self.analyze_partial_segment(
|
||||||
|
&segment_path,
|
||||||
|
byte_range.length,
|
||||||
|
byte_range.offset,
|
||||||
|
)?
|
||||||
|
} else {
|
||||||
|
self.analyze_segment(&segment_path)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let actual_duration = durations.total_duration;
|
||||||
|
let video_duration = durations.video_duration;
|
||||||
|
let audio_duration = durations.audio_duration;
|
||||||
|
let playlist_duration = partial.duration as f32;
|
||||||
|
let difference = actual_duration - playlist_duration as f64;
|
||||||
|
|
||||||
|
let result = HlsTimingResult {
|
||||||
|
playlist_duration,
|
||||||
|
actual_duration,
|
||||||
|
video_duration,
|
||||||
|
audio_duration,
|
||||||
|
difference,
|
||||||
|
segment_name: partial.uri.clone(),
|
||||||
|
is_partial: true,
|
||||||
|
independent: partial.independent,
|
||||||
|
};
|
||||||
|
|
||||||
|
segments.push(result);
|
||||||
|
total_playlist_duration += playlist_duration;
|
||||||
|
total_actual_duration += actual_duration;
|
||||||
|
}
|
||||||
|
MediaSegmentType::PreloadHint(_) => {
|
||||||
|
// Skip preload hints
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate statistics
|
||||||
|
let full_segments = segments.iter().filter(|s| !s.is_partial).count();
|
||||||
|
let partial_segments = segments.iter().filter(|s| s.is_partial).count();
|
||||||
|
let independent_partials = segments
|
||||||
|
.iter()
|
||||||
|
.filter(|s| s.is_partial && s.independent)
|
||||||
|
.count();
|
||||||
|
let total_difference = total_actual_duration - total_playlist_duration as f64;
|
||||||
|
let average_difference = if !segments.is_empty() {
|
||||||
|
total_difference / segments.len() as f64
|
||||||
|
} else {
|
||||||
|
0.0
|
||||||
|
};
|
||||||
|
|
||||||
|
let differences: Vec<f64> = segments.iter().map(|s| s.difference).collect();
|
||||||
|
let min_difference = differences.iter().fold(f64::INFINITY, |a, &b| a.min(b));
|
||||||
|
let max_difference = differences.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
|
||||||
|
|
||||||
|
// Find problematic segments
|
||||||
|
let problematic_segments: Vec<HlsTimingResult> = segments
|
||||||
|
.iter()
|
||||||
|
.filter(|s| s.difference.abs() > self.problematic_threshold)
|
||||||
|
.cloned()
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(HlsTimingTestResult {
|
||||||
|
total_segments: segments.len(),
|
||||||
|
full_segments,
|
||||||
|
partial_segments,
|
||||||
|
independent_partials,
|
||||||
|
total_playlist_duration,
|
||||||
|
total_actual_duration,
|
||||||
|
total_difference,
|
||||||
|
average_difference,
|
||||||
|
min_difference,
|
||||||
|
max_difference,
|
||||||
|
problematic_segments,
|
||||||
|
segments,
|
||||||
|
test_duration: Duration::from_secs(0), // Will be set by caller
|
||||||
|
success: true, // Will be determined by caller
|
||||||
|
error_message: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test multiple HLS streams concurrently
|
||||||
|
pub async fn test_multiple_streams(
|
||||||
|
&self,
|
||||||
|
hls_dirs: Vec<PathBuf>,
|
||||||
|
) -> HashMap<PathBuf, HlsTimingTestResult> {
|
||||||
|
let mut results = HashMap::new();
|
||||||
|
|
||||||
|
// Run tests concurrently
|
||||||
|
let futures: Vec<_> = hls_dirs
|
||||||
|
.into_iter()
|
||||||
|
.map(|dir| {
|
||||||
|
let tester = HlsTimingTester::new(
|
||||||
|
self.max_avg_difference,
|
||||||
|
self.max_individual_difference,
|
||||||
|
self.problematic_threshold,
|
||||||
|
);
|
||||||
|
let dir_clone = dir.clone();
|
||||||
|
async move {
|
||||||
|
let result =
|
||||||
|
tokio::task::spawn_blocking(move || tester.test_stream_timing(&dir_clone))
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|_| HlsTimingTestResult {
|
||||||
|
total_segments: 0,
|
||||||
|
full_segments: 0,
|
||||||
|
partial_segments: 0,
|
||||||
|
independent_partials: 0,
|
||||||
|
total_playlist_duration: 0.0,
|
||||||
|
total_actual_duration: 0.0,
|
||||||
|
total_difference: 0.0,
|
||||||
|
average_difference: 0.0,
|
||||||
|
min_difference: 0.0,
|
||||||
|
max_difference: 0.0,
|
||||||
|
problematic_segments: Vec::new(),
|
||||||
|
segments: Vec::new(),
|
||||||
|
test_duration: Duration::from_secs(0),
|
||||||
|
success: false,
|
||||||
|
error_message: Some("Task panicked".to_string()),
|
||||||
|
});
|
||||||
|
(dir, result)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let resolved_futures = futures::future::join_all(futures).await;
|
||||||
|
|
||||||
|
for (dir, result) in resolved_futures {
|
||||||
|
results.insert(dir, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
results
|
||||||
|
}
|
||||||
|
|
||||||
|
fn analyze_segment(&self, path: &Path) -> Result<SegmentDurations> {
|
||||||
|
let file = fs::File::open(path)
|
||||||
|
.with_context(|| format!("Failed to open file: {}", path.display()))?;
|
||||||
|
self.analyze_segment_with_reader(Box::new(file))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn analyze_partial_segment(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
length: u64,
|
||||||
|
offset: Option<u64>,
|
||||||
|
) -> Result<SegmentDurations> {
|
||||||
|
let reader = ByteRangeReader::new(path, length, offset)?;
|
||||||
|
self.analyze_segment_with_reader(Box::new(reader))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn analyze_segment_with_reader(&self, reader: Box<dyn Read>) -> Result<SegmentDurations> {
|
||||||
|
let mut demuxer = Demuxer::new_custom_io(reader, None)?;
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
demuxer.probe_input()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut video_start_pts = AV_NOPTS_VALUE;
|
||||||
|
let mut video_end_pts = AV_NOPTS_VALUE;
|
||||||
|
let mut audio_start_pts = AV_NOPTS_VALUE;
|
||||||
|
let mut audio_end_pts = AV_NOPTS_VALUE;
|
||||||
|
let mut video_last_duration = 0i64;
|
||||||
|
let mut audio_last_duration = 0i64;
|
||||||
|
let mut video_stream_idx: Option<usize> = None;
|
||||||
|
let mut audio_stream_idx: Option<usize> = None;
|
||||||
|
|
||||||
|
// Read all packets and track timing
|
||||||
|
loop {
|
||||||
|
let packet_result = unsafe { demuxer.get_packet() };
|
||||||
|
match packet_result {
|
||||||
|
Ok((pkt, stream)) => {
|
||||||
|
if pkt.is_null() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
let codec_type = (*(*stream).codecpar).codec_type;
|
||||||
|
let pts = (*pkt).pts;
|
||||||
|
let duration = (*pkt).duration;
|
||||||
|
let current_stream_idx = (*stream).index as usize;
|
||||||
|
|
||||||
|
match codec_type {
|
||||||
|
AVMEDIA_TYPE_VIDEO => {
|
||||||
|
if video_stream_idx.is_none() {
|
||||||
|
video_stream_idx = Some(current_stream_idx);
|
||||||
|
}
|
||||||
|
if pts != AV_NOPTS_VALUE {
|
||||||
|
if video_start_pts == AV_NOPTS_VALUE {
|
||||||
|
video_start_pts = pts;
|
||||||
|
}
|
||||||
|
video_end_pts = pts;
|
||||||
|
video_last_duration = duration;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
AVMEDIA_TYPE_AUDIO => {
|
||||||
|
if audio_stream_idx.is_none() {
|
||||||
|
audio_stream_idx = Some(current_stream_idx);
|
||||||
|
}
|
||||||
|
if pts != AV_NOPTS_VALUE {
|
||||||
|
if audio_start_pts == AV_NOPTS_VALUE {
|
||||||
|
audio_start_pts = pts;
|
||||||
|
}
|
||||||
|
audio_end_pts = pts;
|
||||||
|
audio_last_duration = duration;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate durations
|
||||||
|
let video_duration = if let Some(stream_idx) = video_stream_idx {
|
||||||
|
if video_start_pts != AV_NOPTS_VALUE && video_end_pts != AV_NOPTS_VALUE {
|
||||||
|
unsafe {
|
||||||
|
let stream = demuxer.get_stream(stream_idx)?;
|
||||||
|
let time_base = (*stream).time_base;
|
||||||
|
let pts_duration = (video_end_pts - video_start_pts) as f64 * av_q2d(time_base);
|
||||||
|
let last_pkt_duration = video_last_duration as f64 * av_q2d(time_base);
|
||||||
|
pts_duration + last_pkt_duration
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
0.0
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
0.0
|
||||||
|
};
|
||||||
|
|
||||||
|
let audio_duration = if let Some(stream_idx) = audio_stream_idx {
|
||||||
|
if audio_start_pts != AV_NOPTS_VALUE && audio_end_pts != AV_NOPTS_VALUE {
|
||||||
|
unsafe {
|
||||||
|
let stream = demuxer.get_stream(stream_idx)?;
|
||||||
|
let time_base = (*stream).time_base;
|
||||||
|
let pts_duration = (audio_end_pts - audio_start_pts) as f64 * av_q2d(time_base);
|
||||||
|
let last_pkt_duration = audio_last_duration as f64 * av_q2d(time_base);
|
||||||
|
pts_duration + last_pkt_duration
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
0.0
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
0.0
|
||||||
|
};
|
||||||
|
|
||||||
|
let total_duration = video_duration.max(audio_duration);
|
||||||
|
|
||||||
|
Ok(SegmentDurations {
|
||||||
|
total_duration,
|
||||||
|
video_duration,
|
||||||
|
audio_duration,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct SegmentDurations {
|
||||||
|
total_duration: f64,
|
||||||
|
video_duration: f64,
|
||||||
|
audio_duration: f64,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Custom IO reader for byte range access
|
||||||
|
struct ByteRangeReader {
|
||||||
|
file: fs::File,
|
||||||
|
start_offset: u64,
|
||||||
|
length: u64,
|
||||||
|
current_pos: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ByteRangeReader {
|
||||||
|
fn new(path: &Path, length: u64, offset: Option<u64>) -> Result<Self> {
|
||||||
|
use std::io::{Seek, SeekFrom};
|
||||||
|
|
||||||
|
let mut file = fs::File::open(path)
|
||||||
|
.with_context(|| format!("Failed to open file: {}", path.display()))?;
|
||||||
|
|
||||||
|
let start_offset = offset.unwrap_or(0);
|
||||||
|
file.seek(SeekFrom::Start(start_offset))
|
||||||
|
.with_context(|| format!("Failed to seek to offset {}", start_offset))?;
|
||||||
|
|
||||||
|
Ok(ByteRangeReader {
|
||||||
|
file,
|
||||||
|
start_offset,
|
||||||
|
length,
|
||||||
|
current_pos: 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Read for ByteRangeReader {
|
||||||
|
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||||
|
let remaining = self.length - self.current_pos;
|
||||||
|
if remaining == 0 {
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
let to_read = std::cmp::min(buf.len() as u64, remaining) as usize;
|
||||||
|
let bytes_read = self.file.read(&mut buf[..to_read])?;
|
||||||
|
self.current_pos += bytes_read as u64;
|
||||||
|
Ok(bytes_read)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_timing_tester_creation() {
|
||||||
|
let tester = HlsTimingTester::default();
|
||||||
|
assert_eq!(tester.max_avg_difference, 0.1);
|
||||||
|
assert_eq!(tester.max_individual_difference, 0.5);
|
||||||
|
assert_eq!(tester.problematic_threshold, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_timing_result_passes() {
|
||||||
|
let result = HlsTimingTestResult {
|
||||||
|
total_segments: 10,
|
||||||
|
full_segments: 8,
|
||||||
|
partial_segments: 2,
|
||||||
|
independent_partials: 1,
|
||||||
|
total_playlist_duration: 20.0,
|
||||||
|
total_actual_duration: 20.05,
|
||||||
|
total_difference: 0.05,
|
||||||
|
average_difference: 0.005,
|
||||||
|
min_difference: -0.01,
|
||||||
|
max_difference: 0.02,
|
||||||
|
problematic_segments: Vec::new(),
|
||||||
|
segments: Vec::new(),
|
||||||
|
test_duration: Duration::from_millis(100),
|
||||||
|
success: true,
|
||||||
|
error_message: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(result.passes(0.1, 0.5));
|
||||||
|
assert!(!result.passes(0.001, 0.5));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_missing_playlist() {
|
||||||
|
let temp_dir = tempdir().unwrap();
|
||||||
|
let tester = HlsTimingTester::default();
|
||||||
|
let result = tester.test_stream_timing(temp_dir.path());
|
||||||
|
|
||||||
|
assert!(!result.success);
|
||||||
|
assert!(result.error_message.is_some());
|
||||||
|
assert!(result.error_message.unwrap().contains("does not exist"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_generated_hls_stream_mpegts() {
|
||||||
|
env_logger::try_init().ok();
|
||||||
|
|
||||||
|
let temp_dir = tempdir().unwrap();
|
||||||
|
let tester = HlsTimingTester::new(0.2, 1.0, 0.5); // More lenient thresholds for test
|
||||||
|
|
||||||
|
let result = tester.test_generated_stream(
|
||||||
|
temp_dir.path(),
|
||||||
|
10.0, // 10 seconds
|
||||||
|
SegmentType::MPEGTS,
|
||||||
|
);
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(test_result) => {
|
||||||
|
assert!(
|
||||||
|
test_result.success,
|
||||||
|
"Test should pass: {}",
|
||||||
|
test_result.summary()
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
test_result.total_segments > 0,
|
||||||
|
"Should have generated segments"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
test_result.total_playlist_duration > 8.0,
|
||||||
|
"Should have ~10s of content"
|
||||||
|
);
|
||||||
|
assert!(test_result.full_segments > 0, "Should have full segments");
|
||||||
|
println!("✓ MPEG-TS test passed: {}", test_result.summary());
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
panic!("Test generation failed: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[ignore]
|
||||||
|
#[test]
|
||||||
|
fn test_generated_hls_stream_fmp4() {
|
||||||
|
env_logger::try_init().ok();
|
||||||
|
|
||||||
|
let temp_dir = tempdir().unwrap();
|
||||||
|
let tester = HlsTimingTester::new(0.2, 1.0, 0.5); // More lenient thresholds for test
|
||||||
|
|
||||||
|
let result = tester.test_generated_stream(
|
||||||
|
temp_dir.path(),
|
||||||
|
8.0, // 8 seconds
|
||||||
|
SegmentType::FMP4,
|
||||||
|
);
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(test_result) => {
|
||||||
|
assert!(
|
||||||
|
test_result.success,
|
||||||
|
"Test should pass: {}",
|
||||||
|
test_result.summary()
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
test_result.total_segments > 0,
|
||||||
|
"Should have generated segments"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
test_result.total_playlist_duration > 6.0,
|
||||||
|
"Should have ~8s of content"
|
||||||
|
);
|
||||||
|
assert!(test_result.full_segments > 0, "Should have full segments");
|
||||||
|
println!("✓ fMP4 test passed: {}", test_result.summary());
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
panic!("Test generation failed: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_30_second_stream() {
|
||||||
|
env_logger::try_init().ok();
|
||||||
|
|
||||||
|
let temp_dir = tempdir().unwrap();
|
||||||
|
let tester = HlsTimingTester::default();
|
||||||
|
|
||||||
|
let result = tester.test_generated_stream(
|
||||||
|
temp_dir.path(),
|
||||||
|
30.0, // 30 seconds as requested
|
||||||
|
SegmentType::MPEGTS,
|
||||||
|
);
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(test_result) => {
|
||||||
|
println!("{:?}", test_result);
|
||||||
|
println!("30-second stream test results:");
|
||||||
|
println!(" Total segments: {}", test_result.total_segments);
|
||||||
|
println!(" Full segments: {}", test_result.full_segments);
|
||||||
|
println!(" Partial segments: {}", test_result.partial_segments);
|
||||||
|
println!(
|
||||||
|
" Total playlist duration: {:.1}s",
|
||||||
|
test_result.total_playlist_duration
|
||||||
|
);
|
||||||
|
println!(
|
||||||
|
" Total actual duration: {:.1}s",
|
||||||
|
test_result.total_actual_duration
|
||||||
|
);
|
||||||
|
println!(
|
||||||
|
" Average difference: {:.3}s",
|
||||||
|
test_result.average_difference
|
||||||
|
);
|
||||||
|
println!(" Test duration: {:?}", test_result.test_duration);
|
||||||
|
println!(" Result: {}", test_result.summary());
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
test_result.success,
|
||||||
|
"30s test should pass: {}",
|
||||||
|
test_result.summary()
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
test_result.total_segments >= 2,
|
||||||
|
"Should have multiple segments for 30s"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
test_result.total_playlist_duration >= 25.0,
|
||||||
|
"Should have ~30s of content"
|
||||||
|
);
|
||||||
|
|
||||||
|
if !test_result.problematic_segments.is_empty() {
|
||||||
|
println!(" Problematic segments:");
|
||||||
|
for seg in &test_result.problematic_segments {
|
||||||
|
println!(
|
||||||
|
" {}: {:.3}s difference",
|
||||||
|
seg.segment_name, seg.difference
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
panic!("30-second test generation failed: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -85,7 +85,7 @@ impl TryInto<Encoder> for &VideoVariant {
|
|||||||
fn try_into(self) -> Result<Encoder, Self::Error> {
|
fn try_into(self) -> Result<Encoder, Self::Error> {
|
||||||
unsafe {
|
unsafe {
|
||||||
let mut opt = HashMap::new();
|
let mut opt = HashMap::new();
|
||||||
if self.codec == "x264" {
|
if self.codec == "x264" || self.codec == "libx264" {
|
||||||
opt.insert("preset".to_string(), "fast".to_string());
|
opt.insert("preset".to_string(), "fast".to_string());
|
||||||
//opt.insert("tune".to_string(), "zerolatency".to_string());
|
//opt.insert("tune".to_string(), "zerolatency".to_string());
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
|
use data_encoding::BASE32_NOPAD;
|
||||||
|
use log::debug;
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::{Arc, RwLock};
|
use std::sync::{Arc, RwLock};
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
use tokio::task;
|
use tokio::task;
|
||||||
use log::debug;
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use data_encoding::BASE32_NOPAD;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ViewerInfo {
|
pub struct ViewerInfo {
|
||||||
@ -55,7 +55,13 @@ impl ViewerTracker {
|
|||||||
BASE32_NOPAD.encode(fingerprint).to_lowercase()
|
BASE32_NOPAD.encode(fingerprint).to_lowercase()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn track_viewer(&self, token: &str, stream_id: &str, ip_address: &str, user_agent: Option<String>) {
|
pub fn track_viewer(
|
||||||
|
&self,
|
||||||
|
token: &str,
|
||||||
|
stream_id: &str,
|
||||||
|
ip_address: &str,
|
||||||
|
user_agent: Option<String>,
|
||||||
|
) {
|
||||||
let mut viewers = self.viewers.write().unwrap();
|
let mut viewers = self.viewers.write().unwrap();
|
||||||
|
|
||||||
let viewer_info = ViewerInfo {
|
let viewer_info = ViewerInfo {
|
||||||
@ -76,14 +82,16 @@ impl ViewerTracker {
|
|||||||
|
|
||||||
pub fn get_viewer_count(&self, stream_id: &str) -> usize {
|
pub fn get_viewer_count(&self, stream_id: &str) -> usize {
|
||||||
let viewers = self.viewers.read().unwrap();
|
let viewers = self.viewers.read().unwrap();
|
||||||
viewers.values()
|
viewers
|
||||||
|
.values()
|
||||||
.filter(|v| v.stream_id == stream_id)
|
.filter(|v| v.stream_id == stream_id)
|
||||||
.count()
|
.count()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_active_viewers(&self, stream_id: &str) -> Vec<String> {
|
pub fn get_active_viewers(&self, stream_id: &str) -> Vec<String> {
|
||||||
let viewers = self.viewers.read().unwrap();
|
let viewers = self.viewers.read().unwrap();
|
||||||
viewers.iter()
|
viewers
|
||||||
|
.iter()
|
||||||
.filter(|(_, v)| v.stream_id == stream_id)
|
.filter(|(_, v)| v.stream_id == stream_id)
|
||||||
.map(|(token, _)| token.clone())
|
.map(|(token, _)| token.clone())
|
||||||
.collect()
|
.collect()
|
||||||
@ -109,15 +117,20 @@ impl ViewerTracker {
|
|||||||
let mut viewers = self.viewers.write().unwrap();
|
let mut viewers = self.viewers.write().unwrap();
|
||||||
let now = Instant::now();
|
let now = Instant::now();
|
||||||
|
|
||||||
let expired_tokens: Vec<String> = viewers.iter()
|
let expired_tokens: Vec<String> = viewers
|
||||||
|
.iter()
|
||||||
.filter(|(_, viewer)| now.duration_since(viewer.last_seen) > self.timeout_duration)
|
.filter(|(_, viewer)| now.duration_since(viewer.last_seen) > self.timeout_duration)
|
||||||
.map(|(token, _)| token.clone())
|
.map(|(token, _)| token.clone())
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
for token in expired_tokens {
|
for token in expired_tokens {
|
||||||
if let Some(viewer) = viewers.remove(&token) {
|
if let Some(viewer) = viewers.remove(&token) {
|
||||||
debug!("Expired viewer {} from stream {} (last seen {:?} ago)",
|
debug!(
|
||||||
token, viewer.stream_id, now.duration_since(viewer.last_seen));
|
"Expired viewer {} from stream {} (last seen {:?} ago)",
|
||||||
|
token,
|
||||||
|
viewer.stream_id,
|
||||||
|
now.duration_since(viewer.last_seen)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -142,7 +155,10 @@ mod tests {
|
|||||||
let token1 = ViewerTracker::generate_viewer_token(ip, user_agent);
|
let token1 = ViewerTracker::generate_viewer_token(ip, user_agent);
|
||||||
let token2 = ViewerTracker::generate_viewer_token(ip, user_agent);
|
let token2 = ViewerTracker::generate_viewer_token(ip, user_agent);
|
||||||
|
|
||||||
assert_eq!(token1, token2, "Same IP and user agent should generate identical tokens");
|
assert_eq!(
|
||||||
|
token1, token2,
|
||||||
|
"Same IP and user agent should generate identical tokens"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -155,7 +171,10 @@ mod tests {
|
|||||||
let token1 = ViewerTracker::generate_viewer_token(ip1, user_agent);
|
let token1 = ViewerTracker::generate_viewer_token(ip1, user_agent);
|
||||||
let token2 = ViewerTracker::generate_viewer_token(ip2, user_agent);
|
let token2 = ViewerTracker::generate_viewer_token(ip2, user_agent);
|
||||||
|
|
||||||
assert_ne!(token1, token2, "Different IPs should generate different tokens");
|
assert_ne!(
|
||||||
|
token1, token2,
|
||||||
|
"Different IPs should generate different tokens"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -166,7 +185,10 @@ mod tests {
|
|||||||
let token1 = ViewerTracker::generate_viewer_token(ip, None);
|
let token1 = ViewerTracker::generate_viewer_token(ip, None);
|
||||||
let token2 = ViewerTracker::generate_viewer_token(ip, None);
|
let token2 = ViewerTracker::generate_viewer_token(ip, None);
|
||||||
|
|
||||||
assert_eq!(token1, token2, "Same IP without user agent should generate identical tokens");
|
assert_eq!(
|
||||||
|
token1, token2,
|
||||||
|
"Same IP without user agent should generate identical tokens"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -178,8 +200,12 @@ mod tests {
|
|||||||
let token = ViewerTracker::generate_viewer_token(ip, user_agent);
|
let token = ViewerTracker::generate_viewer_token(ip, user_agent);
|
||||||
|
|
||||||
// Should be base32 encoded (lowercase, no padding)
|
// Should be base32 encoded (lowercase, no padding)
|
||||||
assert!(token.chars().all(|c| "abcdefghijklmnopqrstuvwxyz234567".contains(c)),
|
assert!(
|
||||||
"Token should only contain base32 characters");
|
token
|
||||||
|
.chars()
|
||||||
|
.all(|c| "abcdefghijklmnopqrstuvwxyz234567".contains(c)),
|
||||||
|
"Token should only contain base32 characters"
|
||||||
|
);
|
||||||
assert!(token.len() > 10, "Token should be reasonably long");
|
assert!(token.len() > 10, "Token should be reasonably long");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -193,6 +219,9 @@ mod tests {
|
|||||||
let token1 = ViewerTracker::generate_viewer_token(ip, user_agent1);
|
let token1 = ViewerTracker::generate_viewer_token(ip, user_agent1);
|
||||||
let token2 = ViewerTracker::generate_viewer_token(ip, user_agent2);
|
let token2 = ViewerTracker::generate_viewer_token(ip, user_agent2);
|
||||||
|
|
||||||
assert_ne!(token1, token2, "Different user agents should generate different tokens");
|
assert_ne!(
|
||||||
|
token1, token2,
|
||||||
|
"Different user agents should generate different tokens"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -94,7 +94,7 @@ impl ZapStreamDb {
|
|||||||
|
|
||||||
pub async fn update_stream(&self, user_stream: &UserStream) -> Result<()> {
|
pub async fn update_stream(&self, user_stream: &UserStream) -> Result<()> {
|
||||||
sqlx::query(
|
sqlx::query(
|
||||||
"update user_stream set state = ?, starts = ?, ends = ?, title = ?, summary = ?, image = ?, thumb = ?, tags = ?, content_warning = ?, goal = ?, pinned = ?, fee = ?, event = ? where id = ?",
|
"update user_stream set state = ?, starts = ?, ends = ?, title = ?, summary = ?, image = ?, thumb = ?, tags = ?, content_warning = ?, goal = ?, pinned = ?, fee = ?, event = ?, endpoint_id = ? where id = ?",
|
||||||
)
|
)
|
||||||
.bind(&user_stream.state)
|
.bind(&user_stream.state)
|
||||||
.bind(&user_stream.starts)
|
.bind(&user_stream.starts)
|
||||||
@ -109,6 +109,7 @@ impl ZapStreamDb {
|
|||||||
.bind(&user_stream.pinned)
|
.bind(&user_stream.pinned)
|
||||||
.bind(&user_stream.fee)
|
.bind(&user_stream.fee)
|
||||||
.bind(&user_stream.event)
|
.bind(&user_stream.event)
|
||||||
|
.bind(&user_stream.endpoint_id)
|
||||||
.bind(&user_stream.id)
|
.bind(&user_stream.id)
|
||||||
.execute(&self.db)
|
.execute(&self.db)
|
||||||
.await
|
.await
|
||||||
@ -357,10 +358,10 @@ impl ZapStreamDb {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get ingest endpoint by id
|
/// Get ingest endpoint by id
|
||||||
pub async fn get_ingest_endpoint(&self, endpoint_id: u64) -> Result<Option<IngestEndpoint>> {
|
pub async fn get_ingest_endpoint(&self, endpoint_id: u64) -> Result<IngestEndpoint> {
|
||||||
Ok(sqlx::query_as("select * from ingest_endpoint where id = ?")
|
Ok(sqlx::query_as("select * from ingest_endpoint where id = ?")
|
||||||
.bind(endpoint_id)
|
.bind(endpoint_id)
|
||||||
.fetch_optional(&self.db)
|
.fetch_one(&self.db)
|
||||||
.await?)
|
.await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,3 +44,4 @@ clap = { version = "4.5.16", features = ["derive"] }
|
|||||||
futures-util = "0.3.31"
|
futures-util = "0.3.31"
|
||||||
matchit = "0.8.4"
|
matchit = "0.8.4"
|
||||||
mustache = "0.9.0"
|
mustache = "0.9.0"
|
||||||
|
http-range-header = "0.4.2"
|
||||||
|
@ -1,43 +1,15 @@
|
|||||||
ARG IMAGE=rust:bookworm
|
FROM voidic/rust-ffmpeg AS build
|
||||||
|
|
||||||
FROM $IMAGE AS build
|
|
||||||
WORKDIR /app/src
|
WORKDIR /app/src
|
||||||
ENV FFMPEG_DIR=/app/ffmpeg
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN apt update && \
|
ENV LD_LIBRARY_PATH=$FFMPEG_DIR/lib
|
||||||
apt install -y \
|
RUN cargo test
|
||||||
build-essential \
|
|
||||||
libx264-dev \
|
|
||||||
libx265-dev \
|
|
||||||
libwebp-dev \
|
|
||||||
libpng-dev \
|
|
||||||
nasm \
|
|
||||||
protobuf-compiler \
|
|
||||||
libclang-dev && \
|
|
||||||
rm -rf /var/lib/apt/lists/*
|
|
||||||
RUN git clone --single-branch --branch release/7.1 https://git.v0l.io/ffmpeg/ffmpeg.git && \
|
|
||||||
cd ffmpeg && \
|
|
||||||
./configure \
|
|
||||||
--prefix=$FFMPEG_DIR \
|
|
||||||
--disable-programs \
|
|
||||||
--disable-doc \
|
|
||||||
--disable-network \
|
|
||||||
--enable-gpl \
|
|
||||||
--enable-version3 \
|
|
||||||
--disable-postproc \
|
|
||||||
--enable-libx264 \
|
|
||||||
--enable-libx265 \
|
|
||||||
--enable-libwebp \
|
|
||||||
--disable-static \
|
|
||||||
--enable-shared && \
|
|
||||||
make -j$(nproc) && make install
|
|
||||||
RUN cargo install --path ./crates/zap-stream --root /app/build
|
RUN cargo install --path ./crates/zap-stream --root /app/build
|
||||||
|
|
||||||
FROM $IMAGE AS runner
|
FROM rust:bookworm
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
RUN apt update && \
|
RUN apt update && \
|
||||||
apt install -y libx264-164 && \
|
apt install -y libx264-164 && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
COPY --from=build /app/build .
|
COPY --from=build /app/build .
|
||||||
COPY --from=build /app/ffmpeg/lib/ /lib
|
COPY --from=build /app/src/ffmpeg/lib/ /lib
|
||||||
ENTRYPOINT ["/app/bin/zap-stream"]
|
ENTRYPOINT ["/app/bin/zap-stream"]
|
@ -3,8 +3,6 @@
|
|||||||
# All the endpoints must be valid URI's
|
# All the endpoints must be valid URI's
|
||||||
endpoints:
|
endpoints:
|
||||||
- "rtmp://127.0.0.1:3336"
|
- "rtmp://127.0.0.1:3336"
|
||||||
- "srt://127.0.0.1:3335"
|
|
||||||
- "tcp://127.0.0.1:3334"
|
|
||||||
|
|
||||||
# Public hostname which points to the IP address used to listen for all [endpoints]
|
# Public hostname which points to the IP address used to listen for all [endpoints]
|
||||||
endpoints_public_hostname: "localhost"
|
endpoints_public_hostname: "localhost"
|
||||||
|
@ -571,6 +571,8 @@ impl Api {
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
// TODO: past streams should include a history entry
|
||||||
|
|
||||||
Ok(HistoryResponse {
|
Ok(HistoryResponse {
|
||||||
items,
|
items,
|
||||||
page: 0,
|
page: 0,
|
||||||
@ -650,8 +652,16 @@ impl Api {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Track a viewer for viewer count analytics
|
/// Track a viewer for viewer count analytics
|
||||||
pub fn track_viewer(&self, token: &str, stream_id: &str, ip_address: &str, user_agent: Option<String>) {
|
pub fn track_viewer(
|
||||||
self.overseer.viewer_tracker().track_viewer(token, stream_id, ip_address, user_agent);
|
&self,
|
||||||
|
token: &str,
|
||||||
|
stream_id: &str,
|
||||||
|
ip_address: &str,
|
||||||
|
user_agent: Option<String>,
|
||||||
|
) {
|
||||||
|
self.overseer
|
||||||
|
.viewer_tracker()
|
||||||
|
.track_viewer(token, stream_id, ip_address, user_agent);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get current viewer count for a stream
|
/// Get current viewer count for a stream
|
||||||
|
697
crates/zap-stream/src/bin/hls_debug.rs
Normal file
697
crates/zap-stream/src/bin/hls_debug.rs
Normal file
@ -0,0 +1,697 @@
|
|||||||
|
use anyhow::{Context, Result};
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
||||||
|
av_q2d, AVMediaType::AVMEDIA_TYPE_AUDIO, AVMediaType::AVMEDIA_TYPE_VIDEO, AV_NOPTS_VALUE,
|
||||||
|
};
|
||||||
|
use ffmpeg_rs_raw::Demuxer;
|
||||||
|
use m3u8_rs::{parse_media_playlist, MediaSegmentType};
|
||||||
|
use std::env;
|
||||||
|
use std::fmt;
|
||||||
|
use std::fs;
|
||||||
|
use std::io::{Read, Seek, SeekFrom};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct SegmentInfo {
|
||||||
|
filename: String,
|
||||||
|
playlist_duration: f32,
|
||||||
|
actual_duration: f64,
|
||||||
|
video_duration: f64,
|
||||||
|
audio_duration: f64,
|
||||||
|
difference: f64,
|
||||||
|
segment_type: SegmentAnalysisType,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
enum SegmentAnalysisType {
|
||||||
|
Full,
|
||||||
|
Partial {
|
||||||
|
independent: bool,
|
||||||
|
byte_range: Option<(u64, Option<u64>)>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct SegmentDurations {
|
||||||
|
total_duration: f64,
|
||||||
|
video_duration: f64,
|
||||||
|
audio_duration: f64,
|
||||||
|
video_packets: u64,
|
||||||
|
audio_packets: u64,
|
||||||
|
video_start_pts: i64,
|
||||||
|
video_end_pts: i64,
|
||||||
|
audio_start_pts: i64,
|
||||||
|
audio_end_pts: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct InitSegmentInfo {
|
||||||
|
stream_count: usize,
|
||||||
|
streams: Vec<StreamInfo>,
|
||||||
|
has_moov: bool,
|
||||||
|
pixel_format_set: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct StreamInfo {
|
||||||
|
codec_type: String,
|
||||||
|
codec_name: String,
|
||||||
|
width: Option<i32>,
|
||||||
|
height: Option<i32>,
|
||||||
|
pixel_format: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for StreamInfo {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self.codec_type.as_str() {
|
||||||
|
"video" => {
|
||||||
|
if let (Some(w), Some(h)) = (self.width, self.height) {
|
||||||
|
write!(f, "{} {}x{}", self.codec_name, w, h)?;
|
||||||
|
} else {
|
||||||
|
write!(f, "{}", self.codec_name)?;
|
||||||
|
}
|
||||||
|
if let Some(ref pix_fmt) = self.pixel_format {
|
||||||
|
write!(f, " ({})", pix_fmt)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
"audio" => write!(f, "{} (audio)", self.codec_name),
|
||||||
|
_ => write!(f, "{} ({})", self.codec_name, self.codec_type),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Custom IO reader that implements Read for byte range access to files
|
||||||
|
/// This allows us to read only a specific byte range from a file, which is essential
|
||||||
|
/// for analyzing HLS-LL partial segments that reference byte ranges in larger files.
|
||||||
|
struct ByteRangeReader {
|
||||||
|
file: fs::File,
|
||||||
|
start_offset: u64,
|
||||||
|
length: u64,
|
||||||
|
current_pos: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ByteRangeReader {
|
||||||
|
/// Create a new ByteRangeReader for the specified file and byte range
|
||||||
|
fn new(path: &Path, length: u64, offset: Option<u64>) -> Result<Self> {
|
||||||
|
let mut file = fs::File::open(path)
|
||||||
|
.with_context(|| format!("Failed to open file: {}", path.display()))?;
|
||||||
|
|
||||||
|
let start_offset = offset.unwrap_or(0);
|
||||||
|
|
||||||
|
// Seek to the start of our byte range
|
||||||
|
file.seek(SeekFrom::Start(start_offset))
|
||||||
|
.with_context(|| format!("Failed to seek to offset {}", start_offset))?;
|
||||||
|
|
||||||
|
Ok(ByteRangeReader {
|
||||||
|
file,
|
||||||
|
start_offset,
|
||||||
|
length,
|
||||||
|
current_pos: 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Read for ByteRangeReader {
|
||||||
|
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||||
|
// Calculate how many bytes we can still read within our range
|
||||||
|
let remaining = self.length - self.current_pos;
|
||||||
|
if remaining == 0 {
|
||||||
|
return Ok(0); // EOF for our byte range
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limit the read to not exceed our byte range
|
||||||
|
let to_read = std::cmp::min(buf.len() as u64, remaining) as usize;
|
||||||
|
let bytes_read = self.file.read(&mut buf[..to_read])?;
|
||||||
|
|
||||||
|
self.current_pos += bytes_read as u64;
|
||||||
|
Ok(bytes_read)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> Result<()> {
|
||||||
|
let args: Vec<String> = env::args().collect();
|
||||||
|
if args.len() != 2 {
|
||||||
|
eprintln!("Usage: {} <path_to_hls_directory>", args[0]);
|
||||||
|
eprintln!(
|
||||||
|
"Example: {} out/hls/8c220348-fdbb-44cd-94d5-97a11a9ec91d/stream_0",
|
||||||
|
args[0]
|
||||||
|
);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
let hls_dir = PathBuf::from(&args[1]);
|
||||||
|
let playlist_path = hls_dir.join("live.m3u8");
|
||||||
|
|
||||||
|
if !playlist_path.exists() {
|
||||||
|
eprintln!("Error: Playlist file {:?} does not exist", playlist_path);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Analyzing HLS stream: {}", hls_dir.display());
|
||||||
|
println!("Playlist: {}", playlist_path.display());
|
||||||
|
|
||||||
|
// Check for initialization segment
|
||||||
|
let init_path = hls_dir.join("init.mp4");
|
||||||
|
if init_path.exists() {
|
||||||
|
println!("Init segment: {}", init_path.display());
|
||||||
|
match analyze_init_segment(&init_path) {
|
||||||
|
Ok(info) => {
|
||||||
|
println!(" Streams: {}", info.stream_count);
|
||||||
|
for (i, stream_info) in info.streams.iter().enumerate() {
|
||||||
|
println!(" Stream {}: {}", i, stream_info);
|
||||||
|
}
|
||||||
|
if info.has_moov {
|
||||||
|
println!(" ✓ Contains MOOV box");
|
||||||
|
} else {
|
||||||
|
println!(" ✗ Missing MOOV box");
|
||||||
|
}
|
||||||
|
if info.pixel_format_set {
|
||||||
|
println!(" ✓ Pixel format properly set");
|
||||||
|
} else {
|
||||||
|
println!(" ✗ Pixel format not set");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!(" Error analyzing init segment: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println!("No init segment found");
|
||||||
|
}
|
||||||
|
println!();
|
||||||
|
|
||||||
|
// Parse the playlist
|
||||||
|
let playlist_content =
|
||||||
|
fs::read_to_string(&playlist_path).context("Failed to read playlist file")?;
|
||||||
|
|
||||||
|
let (_, playlist) = parse_media_playlist(playlist_content.as_bytes())
|
||||||
|
.map_err(|e| anyhow::anyhow!("Failed to parse playlist: {:?}", e))?;
|
||||||
|
|
||||||
|
// Analyze each segment
|
||||||
|
let mut segments = Vec::new();
|
||||||
|
let mut total_playlist_duration = 0.0f32;
|
||||||
|
let mut total_actual_duration = 0.0f64;
|
||||||
|
|
||||||
|
println!("Segment Analysis:");
|
||||||
|
println!(
|
||||||
|
"{:<12} {:>4} {:>12} {:>12} {:>12} {:>12} {:>12} {:>12}",
|
||||||
|
"Segment", "Type", "Playlist", "Actual", "Video", "Audio", "Difference", "Info"
|
||||||
|
);
|
||||||
|
println!(
|
||||||
|
"{:<12} {:>4} {:>12} {:>12} {:>12} {:>12} {:>12} {:>12}",
|
||||||
|
"--------", "----", "--------", "------", "-----", "-----", "----------", "----"
|
||||||
|
);
|
||||||
|
|
||||||
|
for segment_type in &playlist.segments {
|
||||||
|
match segment_type {
|
||||||
|
MediaSegmentType::Full(segment) => {
|
||||||
|
let segment_path = hls_dir.join(&segment.uri);
|
||||||
|
|
||||||
|
if !segment_path.exists() {
|
||||||
|
eprintln!("Warning: Segment file {:?} does not exist", segment_path);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Analyze file using demuxer
|
||||||
|
let durations = analyze_segment(&segment_path)?;
|
||||||
|
let actual_duration = durations.total_duration;
|
||||||
|
let video_duration = durations.video_duration;
|
||||||
|
let audio_duration = durations.audio_duration;
|
||||||
|
|
||||||
|
let playlist_duration = segment.duration;
|
||||||
|
let difference = actual_duration - playlist_duration as f64;
|
||||||
|
|
||||||
|
let info = SegmentInfo {
|
||||||
|
filename: segment.uri.clone(),
|
||||||
|
playlist_duration,
|
||||||
|
actual_duration,
|
||||||
|
video_duration,
|
||||||
|
audio_duration,
|
||||||
|
difference,
|
||||||
|
segment_type: SegmentAnalysisType::Full,
|
||||||
|
};
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"{:<12} {:>4} {:>12.3} {:>12.3} {:>12.3} {:>12.3} {:>12.3} {:>12}",
|
||||||
|
info.filename,
|
||||||
|
"FULL",
|
||||||
|
info.playlist_duration,
|
||||||
|
info.actual_duration,
|
||||||
|
info.video_duration,
|
||||||
|
info.audio_duration,
|
||||||
|
info.difference,
|
||||||
|
""
|
||||||
|
);
|
||||||
|
|
||||||
|
segments.push(info);
|
||||||
|
total_playlist_duration += playlist_duration;
|
||||||
|
total_actual_duration += actual_duration;
|
||||||
|
}
|
||||||
|
MediaSegmentType::Partial(partial) => {
|
||||||
|
let segment_path = hls_dir.join(&partial.uri);
|
||||||
|
|
||||||
|
if !segment_path.exists() {
|
||||||
|
eprintln!(
|
||||||
|
"Warning: Partial segment file {:?} does not exist",
|
||||||
|
segment_path
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// For partial segments, we need to analyze them differently since they reference byte ranges
|
||||||
|
let (actual_duration, video_duration, audio_duration) =
|
||||||
|
if let Some(byte_range) = &partial.byte_range {
|
||||||
|
// Analyze partial segment using byte range
|
||||||
|
let durations = analyze_partial_segment(
|
||||||
|
&segment_path,
|
||||||
|
byte_range.length,
|
||||||
|
byte_range.offset,
|
||||||
|
)?;
|
||||||
|
(
|
||||||
|
durations.total_duration,
|
||||||
|
durations.video_duration,
|
||||||
|
durations.audio_duration,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
// Fallback to full file analysis if no byte range
|
||||||
|
let durations = analyze_segment(&segment_path)?;
|
||||||
|
(
|
||||||
|
durations.total_duration,
|
||||||
|
durations.video_duration,
|
||||||
|
durations.audio_duration,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
let playlist_duration = partial.duration as f32;
|
||||||
|
let difference = actual_duration - playlist_duration as f64;
|
||||||
|
|
||||||
|
let byte_range_info = partial.byte_range.as_ref().map(|br| (br.length, br.offset));
|
||||||
|
|
||||||
|
let info = SegmentInfo {
|
||||||
|
filename: partial.uri.clone(),
|
||||||
|
playlist_duration,
|
||||||
|
actual_duration,
|
||||||
|
video_duration,
|
||||||
|
audio_duration,
|
||||||
|
difference,
|
||||||
|
segment_type: SegmentAnalysisType::Partial {
|
||||||
|
independent: partial.independent,
|
||||||
|
byte_range: byte_range_info,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let info_str = if partial.independent { "IND" } else { "" };
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"{:<12} {:>4} {:>12.3} {:>12.3} {:>12.3} {:>12.3} {:>12.3} {:>12}",
|
||||||
|
info.filename,
|
||||||
|
"PART",
|
||||||
|
info.playlist_duration,
|
||||||
|
info.actual_duration,
|
||||||
|
info.video_duration,
|
||||||
|
info.audio_duration,
|
||||||
|
info.difference,
|
||||||
|
info_str
|
||||||
|
);
|
||||||
|
|
||||||
|
segments.push(info);
|
||||||
|
total_playlist_duration += playlist_duration;
|
||||||
|
total_actual_duration += actual_duration;
|
||||||
|
}
|
||||||
|
MediaSegmentType::PreloadHint(_) => {
|
||||||
|
// Skip preload hints for analysis
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println!();
|
||||||
|
|
||||||
|
// Separate full and partial segments for better analysis
|
||||||
|
let full_segments: Vec<&SegmentInfo> = segments
|
||||||
|
.iter()
|
||||||
|
.filter(|s| matches!(s.segment_type, SegmentAnalysisType::Full))
|
||||||
|
.collect();
|
||||||
|
let partial_segments: Vec<&SegmentInfo> = segments
|
||||||
|
.iter()
|
||||||
|
.filter(|s| matches!(s.segment_type, SegmentAnalysisType::Partial { .. }))
|
||||||
|
.collect();
|
||||||
|
let independent_partials: Vec<&SegmentInfo> = segments
|
||||||
|
.iter()
|
||||||
|
.filter(|s| {
|
||||||
|
matches!(
|
||||||
|
s.segment_type,
|
||||||
|
SegmentAnalysisType::Partial {
|
||||||
|
independent: true,
|
||||||
|
..
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
println!("Summary:");
|
||||||
|
println!(" Total segments: {}", segments.len());
|
||||||
|
println!(" Full segments: {}", full_segments.len());
|
||||||
|
println!(" Partial segments: {}", partial_segments.len());
|
||||||
|
println!(" Independent partials: {}", independent_partials.len());
|
||||||
|
println!(" Total playlist duration: {:.3}s", total_playlist_duration);
|
||||||
|
println!(" Total actual duration: {:.3}s", total_actual_duration);
|
||||||
|
println!(
|
||||||
|
" Total difference: {:.3}s",
|
||||||
|
total_actual_duration - total_playlist_duration as f64
|
||||||
|
);
|
||||||
|
if !segments.is_empty() {
|
||||||
|
println!(
|
||||||
|
" Average difference per segment: {:.3}s",
|
||||||
|
(total_actual_duration - total_playlist_duration as f64) / segments.len() as f64
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Statistics
|
||||||
|
let differences: Vec<f64> = segments.iter().map(|s| s.difference).collect();
|
||||||
|
let min_diff = differences.iter().fold(f64::INFINITY, |a, &b| a.min(b));
|
||||||
|
let max_diff = differences.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
|
||||||
|
let avg_diff = differences.iter().sum::<f64>() / differences.len() as f64;
|
||||||
|
|
||||||
|
println!();
|
||||||
|
println!("Difference Statistics:");
|
||||||
|
println!(" Min difference: {:.3}s", min_diff);
|
||||||
|
println!(" Max difference: {:.3}s", max_diff);
|
||||||
|
println!(" Average difference: {:.3}s", avg_diff);
|
||||||
|
|
||||||
|
// Check for problematic segments
|
||||||
|
let problematic: Vec<&SegmentInfo> = segments
|
||||||
|
.iter()
|
||||||
|
.filter(|s| s.difference.abs() > 0.5)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
if !problematic.is_empty() {
|
||||||
|
println!();
|
||||||
|
println!("Problematic segments (>0.5s difference):");
|
||||||
|
for seg in problematic {
|
||||||
|
println!(" {}: {:.3}s difference", seg.filename, seg.difference);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HLS-LL specific analysis
|
||||||
|
if !partial_segments.is_empty() {
|
||||||
|
println!();
|
||||||
|
println!("HLS-LL Analysis:");
|
||||||
|
let avg_partial_duration: f64 = partial_segments
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.playlist_duration as f64)
|
||||||
|
.sum::<f64>()
|
||||||
|
/ partial_segments.len() as f64;
|
||||||
|
println!(" Average partial duration: {:.3}s", avg_partial_duration);
|
||||||
|
|
||||||
|
if let Some(part_inf) = &playlist.part_inf {
|
||||||
|
let target_duration = part_inf.part_target;
|
||||||
|
println!(" Target partial duration: {:.3}s", target_duration);
|
||||||
|
println!(
|
||||||
|
" Partial duration variance: {:.3}s",
|
||||||
|
(avg_partial_duration - target_duration).abs()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show byte range info for partial segments
|
||||||
|
let partials_with_ranges = partial_segments
|
||||||
|
.iter()
|
||||||
|
.filter_map(|s| {
|
||||||
|
if let SegmentAnalysisType::Partial {
|
||||||
|
byte_range: Some((length, offset)),
|
||||||
|
..
|
||||||
|
} = &s.segment_type
|
||||||
|
{
|
||||||
|
Some((s, length, offset))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
if !partials_with_ranges.is_empty() {
|
||||||
|
println!(
|
||||||
|
" Partial segments with byte ranges: {}",
|
||||||
|
partials_with_ranges.len()
|
||||||
|
);
|
||||||
|
let avg_range_size = partials_with_ranges
|
||||||
|
.iter()
|
||||||
|
.map(|(_, &length, _)| length)
|
||||||
|
.sum::<u64>() as f64
|
||||||
|
/ partials_with_ranges.len() as f64;
|
||||||
|
println!(" Average byte range size: {:.0} bytes", avg_range_size);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check playlist properties
|
||||||
|
println!();
|
||||||
|
println!("Playlist Properties:");
|
||||||
|
println!(" Version: {:?}", playlist.version);
|
||||||
|
println!(" Target duration: {:?}", playlist.target_duration);
|
||||||
|
println!(" Media sequence: {:?}", playlist.media_sequence);
|
||||||
|
if let Some(part_inf) = &playlist.part_inf {
|
||||||
|
println!(
|
||||||
|
" Part target: {:.3}s (LL-HLS enabled)",
|
||||||
|
part_inf.part_target
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count preload hints
|
||||||
|
let preload_hints = playlist
|
||||||
|
.segments
|
||||||
|
.iter()
|
||||||
|
.filter(|s| matches!(s, MediaSegmentType::PreloadHint(_)))
|
||||||
|
.count();
|
||||||
|
if preload_hints > 0 {
|
||||||
|
println!(" Preload hints: {}", preload_hints);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn analyze_segment_with_reader(reader: Box<dyn Read>) -> Result<SegmentDurations> {
|
||||||
|
let mut demuxer = Demuxer::new_custom_io(reader, None)?;
|
||||||
|
|
||||||
|
// Probe the input to get stream information
|
||||||
|
unsafe {
|
||||||
|
demuxer.probe_input()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut video_start_pts = AV_NOPTS_VALUE;
|
||||||
|
let mut video_end_pts = AV_NOPTS_VALUE;
|
||||||
|
let mut audio_start_pts = AV_NOPTS_VALUE;
|
||||||
|
let mut audio_end_pts = AV_NOPTS_VALUE;
|
||||||
|
let mut video_last_duration = 0i64;
|
||||||
|
let mut audio_last_duration = 0i64;
|
||||||
|
let mut video_packets = 0u64;
|
||||||
|
let mut audio_packets = 0u64;
|
||||||
|
let mut video_stream_idx: Option<usize> = None;
|
||||||
|
let mut audio_stream_idx: Option<usize> = None;
|
||||||
|
|
||||||
|
// Read all packets and track timing
|
||||||
|
loop {
|
||||||
|
let packet_result = unsafe { demuxer.get_packet() };
|
||||||
|
match packet_result {
|
||||||
|
Ok((pkt, stream)) => {
|
||||||
|
if pkt.is_null() {
|
||||||
|
break; // End of stream
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
let codec_type = (*(*stream).codecpar).codec_type;
|
||||||
|
let pts = (*pkt).pts;
|
||||||
|
let duration = (*pkt).duration;
|
||||||
|
let current_stream_idx = (*stream).index as usize;
|
||||||
|
|
||||||
|
match codec_type {
|
||||||
|
AVMEDIA_TYPE_VIDEO => {
|
||||||
|
if video_stream_idx.is_none() {
|
||||||
|
video_stream_idx = Some(current_stream_idx);
|
||||||
|
}
|
||||||
|
if pts != AV_NOPTS_VALUE {
|
||||||
|
if video_start_pts == AV_NOPTS_VALUE {
|
||||||
|
video_start_pts = pts;
|
||||||
|
}
|
||||||
|
video_end_pts = pts;
|
||||||
|
video_last_duration = duration;
|
||||||
|
video_packets += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
AVMEDIA_TYPE_AUDIO => {
|
||||||
|
if audio_stream_idx.is_none() {
|
||||||
|
audio_stream_idx = Some(current_stream_idx);
|
||||||
|
}
|
||||||
|
if pts != AV_NOPTS_VALUE {
|
||||||
|
if audio_start_pts == AV_NOPTS_VALUE {
|
||||||
|
audio_start_pts = pts;
|
||||||
|
}
|
||||||
|
audio_end_pts = pts;
|
||||||
|
audio_last_duration = duration;
|
||||||
|
audio_packets += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => break, // End of file or error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate durations (including last packet duration)
|
||||||
|
let video_duration = if let Some(stream_idx) = video_stream_idx {
|
||||||
|
if video_start_pts != AV_NOPTS_VALUE && video_end_pts != AV_NOPTS_VALUE {
|
||||||
|
unsafe {
|
||||||
|
let stream = demuxer.get_stream(stream_idx)?;
|
||||||
|
let time_base = (*stream).time_base;
|
||||||
|
let pts_duration = (video_end_pts - video_start_pts) as f64 * av_q2d(time_base);
|
||||||
|
let last_pkt_duration = video_last_duration as f64 * av_q2d(time_base);
|
||||||
|
pts_duration + last_pkt_duration
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
0.0
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
0.0
|
||||||
|
};
|
||||||
|
|
||||||
|
let audio_duration = if let Some(stream_idx) = audio_stream_idx {
|
||||||
|
if audio_start_pts != AV_NOPTS_VALUE && audio_end_pts != AV_NOPTS_VALUE {
|
||||||
|
unsafe {
|
||||||
|
let stream = demuxer.get_stream(stream_idx)?;
|
||||||
|
let time_base = (*stream).time_base;
|
||||||
|
let pts_duration = (audio_end_pts - audio_start_pts) as f64 * av_q2d(time_base);
|
||||||
|
let last_pkt_duration = audio_last_duration as f64 * av_q2d(time_base);
|
||||||
|
pts_duration + last_pkt_duration
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
0.0
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
0.0
|
||||||
|
};
|
||||||
|
|
||||||
|
let total_duration = video_duration.max(audio_duration);
|
||||||
|
|
||||||
|
Ok(SegmentDurations {
|
||||||
|
total_duration,
|
||||||
|
video_duration,
|
||||||
|
audio_duration,
|
||||||
|
video_packets,
|
||||||
|
audio_packets,
|
||||||
|
video_start_pts,
|
||||||
|
video_end_pts,
|
||||||
|
audio_start_pts,
|
||||||
|
audio_end_pts,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn analyze_segment(path: &Path) -> Result<SegmentDurations> {
|
||||||
|
let file =
|
||||||
|
fs::File::open(path).with_context(|| format!("Failed to open file: {}", path.display()))?;
|
||||||
|
analyze_segment_with_reader(Box::new(file))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn analyze_partial_segment(
|
||||||
|
path: &Path,
|
||||||
|
length: u64,
|
||||||
|
offset: Option<u64>,
|
||||||
|
) -> Result<SegmentDurations> {
|
||||||
|
// Create a custom byte range reader for the partial segment
|
||||||
|
let reader = ByteRangeReader::new(path, length, offset)?;
|
||||||
|
|
||||||
|
// Use the custom IO with demuxer to analyze only the byte range
|
||||||
|
analyze_segment_with_reader(Box::new(reader))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn analyze_init_segment(path: &Path) -> Result<InitSegmentInfo> {
|
||||||
|
use ffmpeg_rs_raw::ffmpeg_sys_the_third::{
|
||||||
|
av_get_pix_fmt_name, avcodec_get_name, AVPixelFormat::AV_PIX_FMT_NONE,
|
||||||
|
};
|
||||||
|
use std::ffi::CStr;
|
||||||
|
|
||||||
|
let file = fs::File::open(path)
|
||||||
|
.with_context(|| format!("Failed to open init segment: {}", path.display()))?;
|
||||||
|
|
||||||
|
let mut demuxer = Demuxer::new_custom_io(Box::new(file), None)?;
|
||||||
|
|
||||||
|
// Probe the input to get stream information
|
||||||
|
unsafe {
|
||||||
|
demuxer.probe_input()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut streams = Vec::new();
|
||||||
|
let mut pixel_format_set = false;
|
||||||
|
|
||||||
|
// Try to get streams - we'll iterate until we hit an error
|
||||||
|
let mut i = 0;
|
||||||
|
loop {
|
||||||
|
let stream_result = unsafe { demuxer.get_stream(i) };
|
||||||
|
match stream_result {
|
||||||
|
Ok(stream) => unsafe {
|
||||||
|
let codecpar = (*stream).codecpar;
|
||||||
|
let codec_type = (*codecpar).codec_type;
|
||||||
|
|
||||||
|
let codec_name = {
|
||||||
|
let name_ptr = avcodec_get_name((*codecpar).codec_id);
|
||||||
|
if name_ptr.is_null() {
|
||||||
|
"unknown".to_string()
|
||||||
|
} else {
|
||||||
|
CStr::from_ptr(name_ptr).to_string_lossy().to_string()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let (codec_type_str, width, height, pixel_format) = match codec_type {
|
||||||
|
AVMEDIA_TYPE_VIDEO => {
|
||||||
|
let w = if (*codecpar).width > 0 {
|
||||||
|
Some((*codecpar).width)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
let h = if (*codecpar).height > 0 {
|
||||||
|
Some((*codecpar).height)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let pix_fmt = if (*codecpar).format != AV_PIX_FMT_NONE as i32 {
|
||||||
|
pixel_format_set = true;
|
||||||
|
// Skip pixel format name resolution for now due to type mismatch
|
||||||
|
Some("yuv420p".to_string()) // Common default
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
("video".to_string(), w, h, pix_fmt)
|
||||||
|
}
|
||||||
|
AVMEDIA_TYPE_AUDIO => ("audio".to_string(), None, None, None),
|
||||||
|
_ => ("other".to_string(), None, None, None),
|
||||||
|
};
|
||||||
|
|
||||||
|
streams.push(StreamInfo {
|
||||||
|
codec_type: codec_type_str,
|
||||||
|
codec_name,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
pixel_format,
|
||||||
|
});
|
||||||
|
|
||||||
|
i += 1;
|
||||||
|
},
|
||||||
|
Err(_) => break, // No more streams
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let stream_count = streams.len();
|
||||||
|
|
||||||
|
// Check if this is a proper MP4 initialization segment by looking for file data
|
||||||
|
let file_data = fs::read(path)?;
|
||||||
|
let has_moov = file_data.windows(4).any(|window| window == b"moov");
|
||||||
|
|
||||||
|
Ok(InitSegmentInfo {
|
||||||
|
stream_count,
|
||||||
|
streams,
|
||||||
|
has_moov,
|
||||||
|
pixel_format_set,
|
||||||
|
})
|
||||||
|
}
|
@ -1,25 +1,36 @@
|
|||||||
use crate::api::Api;
|
use crate::api::Api;
|
||||||
use anyhow::{bail, Result};
|
use anyhow::{bail, ensure, Context, Result};
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use futures_util::TryStreamExt;
|
use futures_util::TryStreamExt;
|
||||||
use http_body_util::combinators::BoxBody;
|
use http_body_util::combinators::BoxBody;
|
||||||
use http_body_util::{BodyExt, Full, StreamBody};
|
use http_body_util::{BodyExt, Full, StreamBody};
|
||||||
|
use http_range_header::{
|
||||||
|
parse_range_header, EndPosition, StartPosition, SyntacticallyCorrectRange,
|
||||||
|
};
|
||||||
use hyper::body::{Frame, Incoming};
|
use hyper::body::{Frame, Incoming};
|
||||||
|
use hyper::http::response::Builder;
|
||||||
use hyper::service::Service;
|
use hyper::service::Service;
|
||||||
use hyper::{Method, Request, Response};
|
use hyper::{Request, Response, StatusCode};
|
||||||
use log::error;
|
use log::{error, warn};
|
||||||
|
use matchit::Router;
|
||||||
use nostr_sdk::{serde_json, Alphabet, Event, Kind, PublicKey, SingleLetterTag, TagKind};
|
use nostr_sdk::{serde_json, Alphabet, Event, Kind, PublicKey, SingleLetterTag, TagKind};
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
|
use std::io::SeekFrom;
|
||||||
|
use std::ops::Range;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::pin::Pin;
|
use std::pin::{pin, Pin};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
use std::task::Poll;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
use tokio::fs::File;
|
use tokio::fs::File;
|
||||||
|
use tokio::io::{AsyncRead, AsyncSeek, ReadBuf};
|
||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
use tokio_util::io::ReaderStream;
|
use tokio_util::io::ReaderStream;
|
||||||
|
use uuid::Uuid;
|
||||||
|
use zap_stream_core::egress::hls::HlsEgress;
|
||||||
use zap_stream_core::viewer::ViewerTracker;
|
use zap_stream_core::viewer::ViewerTracker;
|
||||||
|
|
||||||
#[derive(Serialize, Clone)]
|
#[derive(Serialize, Clone)]
|
||||||
@ -46,6 +57,14 @@ pub struct CachedStreams {
|
|||||||
cached_at: Instant,
|
cached_at: Instant,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub enum HttpServerPath {
|
||||||
|
Index,
|
||||||
|
HlsMasterPlaylist,
|
||||||
|
HlsVariantPlaylist,
|
||||||
|
HlsSegmentFile,
|
||||||
|
}
|
||||||
|
|
||||||
pub type StreamCache = Arc<RwLock<Option<CachedStreams>>>;
|
pub type StreamCache = Arc<RwLock<Option<CachedStreams>>>;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -54,6 +73,7 @@ pub struct HttpServer {
|
|||||||
files_dir: PathBuf,
|
files_dir: PathBuf,
|
||||||
api: Api,
|
api: Api,
|
||||||
stream_cache: StreamCache,
|
stream_cache: StreamCache,
|
||||||
|
router: Router<HttpServerPath>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HttpServer {
|
impl HttpServer {
|
||||||
@ -63,18 +83,43 @@ impl HttpServer {
|
|||||||
api: Api,
|
api: Api,
|
||||||
stream_cache: StreamCache,
|
stream_cache: StreamCache,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
|
let mut router = Router::new();
|
||||||
|
router.insert("/", HttpServerPath::Index).unwrap();
|
||||||
|
router.insert("/index.html", HttpServerPath::Index).unwrap();
|
||||||
|
router
|
||||||
|
.insert(
|
||||||
|
format!("/{{stream}}/{}/live.m3u8", HlsEgress::PATH),
|
||||||
|
HttpServerPath::HlsMasterPlaylist,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
router
|
||||||
|
.insert(
|
||||||
|
format!("/{{stream}}/{}/{{variant}}/live.m3u8", HlsEgress::PATH),
|
||||||
|
HttpServerPath::HlsVariantPlaylist,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
router
|
||||||
|
.insert(
|
||||||
|
format!("/{{stream}}/{}/{{variant}}/{{seg}}.ts", HlsEgress::PATH),
|
||||||
|
HttpServerPath::HlsSegmentFile,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
router
|
||||||
|
.insert(
|
||||||
|
format!("/{{stream}}/{}/{{variant}}/{{seg}}.m4s", HlsEgress::PATH),
|
||||||
|
HttpServerPath::HlsSegmentFile,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
index_template,
|
index_template,
|
||||||
files_dir,
|
files_dir,
|
||||||
api,
|
api,
|
||||||
stream_cache,
|
stream_cache,
|
||||||
|
router,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_cached_or_fetch_streams(&self) -> Result<IndexTemplateData> {
|
|
||||||
Self::get_cached_or_fetch_streams_static(&self.stream_cache, &self.api).await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_cached_or_fetch_streams_static(
|
async fn get_cached_or_fetch_streams_static(
|
||||||
stream_cache: &StreamCache,
|
stream_cache: &StreamCache,
|
||||||
api: &Api,
|
api: &Api,
|
||||||
@ -100,13 +145,14 @@ impl HttpServer {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|stream| {
|
.map(|stream| {
|
||||||
let viewer_count = api.get_viewer_count(&stream.id);
|
let viewer_count = api.get_viewer_count(&stream.id);
|
||||||
|
// TODO: remove HLS assumption
|
||||||
StreamData {
|
StreamData {
|
||||||
id: stream.id.clone(),
|
id: stream.id.clone(),
|
||||||
title: stream
|
title: stream
|
||||||
.title
|
.title
|
||||||
.unwrap_or_else(|| format!("Stream {}", &stream.id[..8])),
|
.unwrap_or_else(|| format!("Stream {}", &stream.id[..8])),
|
||||||
summary: stream.summary,
|
summary: stream.summary,
|
||||||
live_url: format!("/{}/live.m3u8", stream.id),
|
live_url: format!("/{}/{}/live.m3u8", stream.id, HlsEgress::PATH),
|
||||||
viewer_count: if viewer_count > 0 {
|
viewer_count: if viewer_count > 0 {
|
||||||
Some(viewer_count as _)
|
Some(viewer_count as _)
|
||||||
} else {
|
} else {
|
||||||
@ -141,31 +187,97 @@ impl HttpServer {
|
|||||||
Ok(template_data)
|
Ok(template_data)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn render_index(&self) -> Result<String> {
|
async fn handle_index(
|
||||||
let template_data = self.get_cached_or_fetch_streams().await?;
|
api: Api,
|
||||||
let template = mustache::compile_str(&self.index_template)?;
|
stream_cache: StreamCache,
|
||||||
let rendered = template.render_to_string(&template_data)?;
|
template: String,
|
||||||
Ok(rendered)
|
) -> Result<Response<BoxBody<Bytes, anyhow::Error>>, anyhow::Error> {
|
||||||
|
// Compile template outside async move for better performance
|
||||||
|
let template = match mustache::compile_str(&template) {
|
||||||
|
Ok(t) => t,
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to compile template: {}", e);
|
||||||
|
return Ok(Self::base_response().status(500).body(BoxBody::default())?);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let template_data = Self::get_cached_or_fetch_streams_static(&stream_cache, &api).await;
|
||||||
|
|
||||||
|
match template_data {
|
||||||
|
Ok(data) => match template.render_to_string(&data) {
|
||||||
|
Ok(index_html) => Ok(Self::base_response()
|
||||||
|
.header("content-type", "text/html")
|
||||||
|
.body(
|
||||||
|
Full::new(Bytes::from(index_html))
|
||||||
|
.map_err(|e| match e {})
|
||||||
|
.boxed(),
|
||||||
|
)?),
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to render template: {}", e);
|
||||||
|
Ok(Self::base_response().status(500).body(BoxBody::default())?)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to fetch template data: {}", e);
|
||||||
|
Ok(Self::base_response().status(500).body(BoxBody::default())?)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn handle_hls_playlist(
|
async fn handle_hls_segment(
|
||||||
api: &Api,
|
|
||||||
req: &Request<Incoming>,
|
req: &Request<Incoming>,
|
||||||
playlist_path: &PathBuf,
|
segment_path: PathBuf,
|
||||||
) -> Result<Response<BoxBody<Bytes, anyhow::Error>>, anyhow::Error> {
|
) -> Result<Response<BoxBody<Bytes, anyhow::Error>>, anyhow::Error> {
|
||||||
// Extract stream ID from path (e.g., /uuid/live.m3u8 -> uuid)
|
let mut response = Self::base_response().header("accept-ranges", "bytes");
|
||||||
let path_parts: Vec<&str> = req
|
|
||||||
.uri()
|
if let Some(r) = req.headers().get("range") {
|
||||||
.path()
|
if let Ok(ranges) = parse_range_header(r.to_str()?) {
|
||||||
.trim_start_matches('/')
|
if ranges.ranges.len() > 1 {
|
||||||
.split('/')
|
warn!("Multipart ranges are not supported, fallback to non-range request");
|
||||||
.collect();
|
Self::path_to_response(segment_path).await
|
||||||
if path_parts.len() < 2 {
|
} else {
|
||||||
return Ok(Response::builder().status(404).body(BoxBody::default())?);
|
let file = File::open(&segment_path).await?;
|
||||||
|
let metadata = file.metadata().await?;
|
||||||
|
let single_range = ranges.ranges.first().unwrap();
|
||||||
|
let range = match RangeBody::get_range(metadata.len(), single_range) {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Failed to get range: {}", e);
|
||||||
|
return Ok(response
|
||||||
|
.status(StatusCode::RANGE_NOT_SATISFIABLE)
|
||||||
|
.body(BoxBody::default())?);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let r_body = RangeBody::new(file, metadata.len(), range.clone());
|
||||||
|
|
||||||
|
response = response.status(StatusCode::PARTIAL_CONTENT);
|
||||||
|
let headers = r_body.get_headers();
|
||||||
|
for (k, v) in headers {
|
||||||
|
response = response.header(k, v);
|
||||||
|
}
|
||||||
|
let f_stream = ReaderStream::new(r_body);
|
||||||
|
let body = StreamBody::new(
|
||||||
|
f_stream
|
||||||
|
.map_ok(Frame::data)
|
||||||
|
.map_err(|e| anyhow::anyhow!("Failed to read body: {}", e)),
|
||||||
|
)
|
||||||
|
.boxed();
|
||||||
|
Ok(response.body(body)?)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(Self::base_response().status(400).body(BoxBody::default())?)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Self::path_to_response(segment_path).await
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let stream_id = path_parts[0];
|
async fn handle_hls_master_playlist(
|
||||||
|
api: Api,
|
||||||
|
req: &Request<Incoming>,
|
||||||
|
stream_id: &str,
|
||||||
|
playlist_path: PathBuf,
|
||||||
|
) -> Result<Response<BoxBody<Bytes, anyhow::Error>>, anyhow::Error> {
|
||||||
// Get client IP and User-Agent for tracking
|
// Get client IP and User-Agent for tracking
|
||||||
let client_ip = Self::get_client_ip(req);
|
let client_ip = Self::get_client_ip(req);
|
||||||
let user_agent = req
|
let user_agent = req
|
||||||
@ -203,17 +315,15 @@ impl HttpServer {
|
|||||||
let modified_content =
|
let modified_content =
|
||||||
Self::add_viewer_token_to_playlist(&playlist_content, &viewer_token)?;
|
Self::add_viewer_token_to_playlist(&playlist_content, &viewer_token)?;
|
||||||
|
|
||||||
Ok(Response::builder()
|
let response = Self::base_response()
|
||||||
.header("content-type", "application/vnd.apple.mpegurl")
|
.header("content-type", "application/vnd.apple.mpegurl")
|
||||||
.header("server", "zap-stream-core")
|
|
||||||
.header("access-control-allow-origin", "*")
|
|
||||||
.header("access-control-allow-headers", "*")
|
|
||||||
.header("access-control-allow-methods", "HEAD, GET")
|
|
||||||
.body(
|
.body(
|
||||||
Full::new(Bytes::from(modified_content))
|
Full::new(Bytes::from(modified_content))
|
||||||
.map_err(|e| match e {})
|
.map_err(|e| match e {})
|
||||||
.boxed(),
|
.boxed(),
|
||||||
)?)
|
)?;
|
||||||
|
|
||||||
|
Ok(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_client_ip(req: &Request<Incoming>) -> String {
|
fn get_client_ip(req: &Request<Incoming>) -> String {
|
||||||
@ -232,8 +342,8 @@ impl HttpServer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback to connection IP (note: in real deployment this might be a proxy)
|
// use random string as IP to avoid broken view tracker due to proxying
|
||||||
"unknown".to_string()
|
Uuid::new_v4().to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_viewer_token_to_playlist(content: &[u8], viewer_token: &str) -> Result<String> {
|
fn add_viewer_token_to_playlist(content: &[u8], viewer_token: &str) -> Result<String> {
|
||||||
@ -271,6 +381,27 @@ impl HttpServer {
|
|||||||
format!("{}?vt={}", url, viewer_token)
|
format!("{}?vt={}", url, viewer_token)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn base_response() -> Builder {
|
||||||
|
Response::builder()
|
||||||
|
.header("server", "zap-stream-core")
|
||||||
|
.header("access-control-allow-origin", "*")
|
||||||
|
.header("access-control-allow-headers", "*")
|
||||||
|
.header("access-control-allow-methods", "HEAD, GET, OPTIONS")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a response object for a file body
|
||||||
|
async fn path_to_response(path: PathBuf) -> Result<Response<BoxBody<Bytes, anyhow::Error>>> {
|
||||||
|
let f = File::open(&path).await?;
|
||||||
|
let f_stream = ReaderStream::new(f);
|
||||||
|
let body = StreamBody::new(
|
||||||
|
f_stream
|
||||||
|
.map_ok(Frame::data)
|
||||||
|
.map_err(|e| anyhow::anyhow!("Failed to read body: {}", e)),
|
||||||
|
)
|
||||||
|
.boxed();
|
||||||
|
Ok(Self::base_response().body(body)?)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Service<Request<Incoming>> for HttpServer {
|
impl Service<Request<Incoming>> for HttpServer {
|
||||||
@ -279,89 +410,50 @@ impl Service<Request<Incoming>> for HttpServer {
|
|||||||
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
|
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
|
||||||
|
|
||||||
fn call(&self, req: Request<Incoming>) -> Self::Future {
|
fn call(&self, req: Request<Incoming>) -> Self::Future {
|
||||||
// check is index.html
|
let path = req.uri().path().to_owned();
|
||||||
if req.method() == Method::GET && req.uri().path() == "/"
|
// request path as a file path pointing to the output directory
|
||||||
|| req.uri().path() == "/index.html"
|
let dst_path = self.files_dir.join(req.uri().path()[1..].to_string());
|
||||||
{
|
|
||||||
let stream_cache = self.stream_cache.clone();
|
|
||||||
let api = self.api.clone();
|
|
||||||
|
|
||||||
// Compile template outside async move for better performance
|
if let Ok(m) = self.router.at(&path) {
|
||||||
let template = match mustache::compile_str(&self.index_template) {
|
match m.value {
|
||||||
Ok(t) => t,
|
HttpServerPath::Index => {
|
||||||
Err(e) => {
|
let api = self.api.clone();
|
||||||
error!("Failed to compile template: {}", e);
|
let cache = self.stream_cache.clone();
|
||||||
|
let template = self.index_template.clone();
|
||||||
|
return Box::pin(async move { Self::handle_index(api, cache, template).await });
|
||||||
|
}
|
||||||
|
HttpServerPath::HlsMasterPlaylist => {
|
||||||
|
let api = self.api.clone();
|
||||||
|
let stream_id = m.params.get("stream").map(|s| s.to_string());
|
||||||
|
let file_path = dst_path.clone();
|
||||||
return Box::pin(async move {
|
return Box::pin(async move {
|
||||||
Ok(Response::builder()
|
let stream_id = stream_id.context("stream id missing")?;
|
||||||
.status(500)
|
Ok(
|
||||||
.body(BoxBody::default())
|
Self::handle_hls_master_playlist(api, &req, &stream_id, file_path)
|
||||||
.unwrap())
|
.await?,
|
||||||
|
)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
HttpServerPath::HlsVariantPlaylist => {
|
||||||
|
// let file handler handle this one, may be used later for HLS-LL to create
|
||||||
return Box::pin(async move {
|
// delta updates
|
||||||
// Use the existing method to get cached template data
|
|
||||||
let template_data =
|
|
||||||
Self::get_cached_or_fetch_streams_static(&stream_cache, &api).await;
|
|
||||||
|
|
||||||
match template_data {
|
|
||||||
Ok(data) => match template.render_to_string(&data) {
|
|
||||||
Ok(index_html) => Ok(Response::builder()
|
|
||||||
.header("content-type", "text/html")
|
|
||||||
.header("server", "zap-stream-core")
|
|
||||||
.body(
|
|
||||||
Full::new(Bytes::from(index_html))
|
|
||||||
.map_err(|e| match e {})
|
|
||||||
.boxed(),
|
|
||||||
)?),
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to render template: {}", e);
|
|
||||||
Ok(Response::builder().status(500).body(BoxBody::default())?)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to fetch template data: {}", e);
|
|
||||||
Ok(Response::builder().status(500).body(BoxBody::default())?)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
HttpServerPath::HlsSegmentFile => {
|
||||||
|
// handle segment file (range requests)
|
||||||
|
let file_path = dst_path.clone();
|
||||||
|
return Box::pin(async move {
|
||||||
|
Ok(Self::handle_hls_segment(&req, file_path).await?)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// check if mapped to file
|
// check if mapped to file (not handled route)
|
||||||
let dst_path = self.files_dir.join(req.uri().path()[1..].to_string());
|
|
||||||
if dst_path.exists() {
|
if dst_path.exists() {
|
||||||
let api_clone = self.api.clone();
|
return Box::pin(async move { Self::path_to_response(dst_path).await });
|
||||||
return Box::pin(async move {
|
|
||||||
let rsp = Response::builder()
|
|
||||||
.header("server", "zap-stream-core")
|
|
||||||
.header("access-control-allow-origin", "*")
|
|
||||||
.header("access-control-allow-headers", "*")
|
|
||||||
.header("access-control-allow-methods", "HEAD, GET");
|
|
||||||
|
|
||||||
if req.method() == Method::HEAD {
|
|
||||||
return Ok(rsp.body(BoxBody::default())?);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle HLS playlists with viewer tracking
|
|
||||||
if req.uri().path().ends_with("/live.m3u8") {
|
|
||||||
return Self::handle_hls_playlist(&api_clone, &req, &dst_path).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle regular files
|
|
||||||
let f = File::open(&dst_path).await?;
|
|
||||||
let f_stream = ReaderStream::new(f);
|
|
||||||
let body = StreamBody::new(
|
|
||||||
f_stream
|
|
||||||
.map_ok(Frame::data)
|
|
||||||
.map_err(|e| Self::Error::new(e)),
|
|
||||||
)
|
|
||||||
.boxed();
|
|
||||||
Ok(rsp.body(body)?)
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// otherwise handle in overseer
|
// fallback to api handler
|
||||||
let api = self.api.clone();
|
let api = self.api.clone();
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
match api.handler(req).await {
|
match api.handler(req).await {
|
||||||
@ -466,3 +558,110 @@ pub fn check_nip98_auth(req: &Request<Incoming>, public_url: &str) -> Result<Aut
|
|||||||
event,
|
event,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Range request handler over file handle
|
||||||
|
struct RangeBody {
|
||||||
|
file: File,
|
||||||
|
range_start: u64,
|
||||||
|
range_end: u64,
|
||||||
|
current_offset: u64,
|
||||||
|
poll_complete: bool,
|
||||||
|
file_size: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
const MAX_UNBOUNDED_RANGE: u64 = 1024 * 1024;
|
||||||
|
impl RangeBody {
|
||||||
|
pub fn new(file: File, file_size: u64, range: Range<u64>) -> Self {
|
||||||
|
Self {
|
||||||
|
file,
|
||||||
|
file_size,
|
||||||
|
range_start: range.start,
|
||||||
|
range_end: range.end,
|
||||||
|
current_offset: 0,
|
||||||
|
poll_complete: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_range(file_size: u64, header: &SyntacticallyCorrectRange) -> Result<Range<u64>> {
|
||||||
|
let range_start = match header.start {
|
||||||
|
StartPosition::Index(i) => {
|
||||||
|
ensure!(i < file_size, "Range start out of range");
|
||||||
|
i
|
||||||
|
}
|
||||||
|
StartPosition::FromLast(i) => file_size.saturating_sub(i),
|
||||||
|
};
|
||||||
|
let range_end = match header.end {
|
||||||
|
EndPosition::Index(i) => {
|
||||||
|
ensure!(i <= file_size, "Range end out of range");
|
||||||
|
i
|
||||||
|
}
|
||||||
|
EndPosition::LastByte => {
|
||||||
|
(file_size.saturating_sub(1)).min(range_start + MAX_UNBOUNDED_RANGE)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok(range_start..range_end)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_headers(&self) -> Vec<(&'static str, String)> {
|
||||||
|
let r_len = (self.range_end - self.range_start) + 1;
|
||||||
|
vec![
|
||||||
|
("content-length", r_len.to_string()),
|
||||||
|
(
|
||||||
|
"content-range",
|
||||||
|
format!(
|
||||||
|
"bytes {}-{}/{}",
|
||||||
|
self.range_start, self.range_end, self.file_size
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsyncRead for RangeBody {
|
||||||
|
fn poll_read(
|
||||||
|
mut self: Pin<&mut Self>,
|
||||||
|
cx: &mut std::task::Context<'_>,
|
||||||
|
buf: &mut ReadBuf<'_>,
|
||||||
|
) -> Poll<std::io::Result<()>> {
|
||||||
|
let range_start = self.range_start + self.current_offset;
|
||||||
|
let range_len = self.range_end.saturating_sub(range_start) + 1;
|
||||||
|
let bytes_to_read = buf.remaining().min(range_len as usize) as u64;
|
||||||
|
|
||||||
|
if bytes_to_read == 0 {
|
||||||
|
return Poll::Ready(Ok(()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// when no pending poll, seek to starting position
|
||||||
|
if !self.poll_complete {
|
||||||
|
let pinned = pin!(&mut self.file);
|
||||||
|
pinned.start_seek(SeekFrom::Start(range_start))?;
|
||||||
|
self.poll_complete = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// check poll completion
|
||||||
|
if self.poll_complete {
|
||||||
|
let pinned = pin!(&mut self.file);
|
||||||
|
match pinned.poll_complete(cx) {
|
||||||
|
Poll::Ready(Ok(_)) => {
|
||||||
|
self.poll_complete = false;
|
||||||
|
}
|
||||||
|
Poll::Ready(Err(e)) => return Poll::Ready(Err(e)),
|
||||||
|
Poll::Pending => return Poll::Pending,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read data from the file
|
||||||
|
let pinned = pin!(&mut self.file);
|
||||||
|
match pinned.poll_read(cx, buf) {
|
||||||
|
Poll::Ready(Ok(_)) => {
|
||||||
|
self.current_offset += bytes_to_read;
|
||||||
|
Poll::Ready(Ok(()))
|
||||||
|
}
|
||||||
|
Poll::Ready(Err(e)) => Poll::Ready(Err(e)),
|
||||||
|
Poll::Pending => {
|
||||||
|
self.poll_complete = true;
|
||||||
|
Poll::Pending
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -15,6 +15,8 @@ use std::sync::Arc;
|
|||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
use zap_stream_core::egress::hls::HlsEgress;
|
||||||
|
use zap_stream_core::egress::recorder::RecorderEgress;
|
||||||
use zap_stream_core::egress::{EgressConfig, EgressSegment};
|
use zap_stream_core::egress::{EgressConfig, EgressSegment};
|
||||||
use zap_stream_core::ingress::ConnectionInfo;
|
use zap_stream_core::ingress::ConnectionInfo;
|
||||||
use zap_stream_core::overseer::{IngressInfo, IngressStream, IngressStreamType, Overseer};
|
use zap_stream_core::overseer::{IngressInfo, IngressStream, IngressStreamType, Overseer};
|
||||||
@ -227,19 +229,54 @@ impl ZapStreamOverseer {
|
|||||||
stream: &UserStream,
|
stream: &UserStream,
|
||||||
pubkey: &Vec<u8>,
|
pubkey: &Vec<u8>,
|
||||||
) -> Result<Event> {
|
) -> Result<Event> {
|
||||||
let extra_tags = vec![
|
// TODO: remove assumption that HLS is enabled
|
||||||
|
let pipeline_dir = PathBuf::from(stream.id.to_string());
|
||||||
|
let mut extra_tags = vec![
|
||||||
Tag::parse(["p", hex::encode(pubkey).as_str(), "", "host"])?,
|
Tag::parse(["p", hex::encode(pubkey).as_str(), "", "host"])?,
|
||||||
Tag::parse([
|
|
||||||
"streaming",
|
|
||||||
self.map_to_stream_public_url(stream, "live.m3u8")?.as_str(),
|
|
||||||
])?,
|
|
||||||
Tag::parse([
|
Tag::parse([
|
||||||
"image",
|
"image",
|
||||||
self.map_to_stream_public_url(stream, "thumb.webp")?
|
self.map_to_public_url(pipeline_dir.join("thumb.webp").to_str().unwrap())?
|
||||||
.as_str(),
|
.as_str(),
|
||||||
])?,
|
])?,
|
||||||
Tag::parse(["service", self.map_to_public_url("api/v1")?.as_str()])?,
|
Tag::parse(["service", self.map_to_public_url("api/v1")?.as_str()])?,
|
||||||
];
|
];
|
||||||
|
match stream.state {
|
||||||
|
UserStreamState::Live => {
|
||||||
|
extra_tags.push(Tag::parse([
|
||||||
|
"streaming",
|
||||||
|
self.map_to_public_url(
|
||||||
|
pipeline_dir
|
||||||
|
.join(HlsEgress::PATH)
|
||||||
|
.join("live.m3u8")
|
||||||
|
.to_str()
|
||||||
|
.unwrap(),
|
||||||
|
)?
|
||||||
|
.as_str(),
|
||||||
|
])?);
|
||||||
|
}
|
||||||
|
UserStreamState::Ended => {
|
||||||
|
if let Some(ep) = stream.endpoint_id {
|
||||||
|
let endpoint = self.db.get_ingest_endpoint(ep).await?;
|
||||||
|
let caps = parse_capabilities(&endpoint.capabilities);
|
||||||
|
let has_recording = caps
|
||||||
|
.iter()
|
||||||
|
.any(|c| matches!(c, EndpointCapability::DVR { .. }));
|
||||||
|
if has_recording {
|
||||||
|
extra_tags.push(Tag::parse([
|
||||||
|
"recording",
|
||||||
|
self.map_to_public_url(
|
||||||
|
pipeline_dir
|
||||||
|
.join(RecorderEgress::FILENAME)
|
||||||
|
.to_str()
|
||||||
|
.unwrap(),
|
||||||
|
)?
|
||||||
|
.as_str(),
|
||||||
|
])?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
let ev = self
|
let ev = self
|
||||||
.stream_to_event_builder(stream)?
|
.stream_to_event_builder(stream)?
|
||||||
.tags(extra_tags)
|
.tags(extra_tags)
|
||||||
@ -248,10 +285,6 @@ impl ZapStreamOverseer {
|
|||||||
Ok(ev)
|
Ok(ev)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn map_to_stream_public_url(&self, stream: &UserStream, path: &str) -> Result<String> {
|
|
||||||
self.map_to_public_url(&format!("{}/{}", stream.id, path))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn map_to_public_url(&self, path: &str) -> Result<String> {
|
fn map_to_public_url(&self, path: &str) -> Result<String> {
|
||||||
let u: Url = self.public_url.parse()?;
|
let u: Url = self.public_url.parse()?;
|
||||||
Ok(u.join(path)?.to_string())
|
Ok(u.join(path)?.to_string())
|
||||||
@ -351,7 +384,8 @@ impl Overseer for ZapStreamOverseer {
|
|||||||
// Get ingest endpoint configuration based on connection type
|
// Get ingest endpoint configuration based on connection type
|
||||||
let endpoint = self.detect_endpoint(&connection).await?;
|
let endpoint = self.detect_endpoint(&connection).await?;
|
||||||
|
|
||||||
let cfg = get_variants_from_endpoint(&stream_info, &endpoint)?;
|
let caps = parse_capabilities(&endpoint.capabilities);
|
||||||
|
let cfg = get_variants_from_endpoint(&stream_info, &caps)?;
|
||||||
|
|
||||||
if cfg.video_src.is_none() || cfg.variants.is_empty() {
|
if cfg.video_src.is_none() || cfg.variants.is_empty() {
|
||||||
bail!("No video src found");
|
bail!("No video src found");
|
||||||
@ -362,6 +396,34 @@ impl Overseer for ZapStreamOverseer {
|
|||||||
name: "hls".to_string(),
|
name: "hls".to_string(),
|
||||||
variants: cfg.variants.iter().map(|v| v.id()).collect(),
|
variants: cfg.variants.iter().map(|v| v.id()).collect(),
|
||||||
}));
|
}));
|
||||||
|
if let Some(EndpointCapability::DVR { height }) = caps
|
||||||
|
.iter()
|
||||||
|
.find(|c| matches!(c, EndpointCapability::DVR { .. }))
|
||||||
|
{
|
||||||
|
let var = cfg.variants.iter().find(|v| match v {
|
||||||
|
VariantStream::Video(v) => v.height == *height,
|
||||||
|
_ => false,
|
||||||
|
});
|
||||||
|
match var {
|
||||||
|
Some(var) => {
|
||||||
|
// take all streams in the same group as the matching video resolution (video+audio)
|
||||||
|
let vars_in_group = cfg
|
||||||
|
.variants
|
||||||
|
.iter()
|
||||||
|
.filter(|v| v.group_id() == var.group_id());
|
||||||
|
egress.push(EgressType::Recorder(EgressConfig {
|
||||||
|
name: "dvr".to_string(),
|
||||||
|
variants: vars_in_group.map(|v| v.id()).collect(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
warn!(
|
||||||
|
"Invalid DVR config, no variant found with height {}",
|
||||||
|
height
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let stream_id = connection.id.clone();
|
let stream_id = connection.id.clone();
|
||||||
// insert new stream record
|
// insert new stream record
|
||||||
@ -371,6 +433,12 @@ impl Overseer for ZapStreamOverseer {
|
|||||||
starts: Utc::now(),
|
starts: Utc::now(),
|
||||||
state: UserStreamState::Live,
|
state: UserStreamState::Live,
|
||||||
endpoint_id: Some(endpoint.id),
|
endpoint_id: Some(endpoint.id),
|
||||||
|
title: user.title.clone(),
|
||||||
|
summary: user.summary.clone(),
|
||||||
|
thumb: user.image.clone(),
|
||||||
|
content_warning: user.content_warning.clone(),
|
||||||
|
goal: user.goal.clone(),
|
||||||
|
tags: user.tags.clone(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let stream_event = self.publish_stream_event(&new_stream, &user.pubkey).await?;
|
let stream_event = self.publish_stream_event(&new_stream, &user.pubkey).await?;
|
||||||
@ -410,13 +478,10 @@ impl Overseer for ZapStreamOverseer {
|
|||||||
|
|
||||||
// Get the cost per minute from the ingest endpoint, or use default
|
// Get the cost per minute from the ingest endpoint, or use default
|
||||||
let cost_per_minute = if let Some(endpoint_id) = stream.endpoint_id {
|
let cost_per_minute = if let Some(endpoint_id) = stream.endpoint_id {
|
||||||
if let Some(endpoint) = self.db.get_ingest_endpoint(endpoint_id).await? {
|
let ep = self.db.get_ingest_endpoint(endpoint_id).await?;
|
||||||
endpoint.cost
|
ep.cost
|
||||||
} else {
|
|
||||||
0
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
0
|
bail!("Endpoint id not set on stream");
|
||||||
};
|
};
|
||||||
|
|
||||||
// Convert duration from seconds to minutes and calculate cost
|
// Convert duration from seconds to minutes and calculate cost
|
||||||
@ -427,7 +492,7 @@ impl Overseer for ZapStreamOverseer {
|
|||||||
.tick_stream(pipeline_id, stream.user_id, duration, cost)
|
.tick_stream(pipeline_id, stream.user_id, duration, cost)
|
||||||
.await?;
|
.await?;
|
||||||
if bal <= 0 {
|
if bal <= 0 {
|
||||||
bail!("Not enough balance");
|
bail!("Balance has run out");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update last segment time for this stream
|
// Update last segment time for this stream
|
||||||
@ -508,6 +573,7 @@ impl Overseer for ZapStreamOverseer {
|
|||||||
viewer_states.remove(&stream.id);
|
viewer_states.remove(&stream.id);
|
||||||
|
|
||||||
stream.state = UserStreamState::Ended;
|
stream.state = UserStreamState::Ended;
|
||||||
|
stream.ends = Some(Utc::now());
|
||||||
let event = self.publish_stream_event(&stream, &user.pubkey).await?;
|
let event = self.publish_stream_event(&stream, &user.pubkey).await?;
|
||||||
stream.event = Some(event.as_json());
|
stream.event = Some(event.as_json());
|
||||||
self.db.update_stream(&stream).await?;
|
self.db.update_stream(&stream).await?;
|
||||||
@ -525,7 +591,7 @@ impl ZapStreamOverseer {
|
|||||||
let default = endpoints.iter().max_by_key(|e| e.cost);
|
let default = endpoints.iter().max_by_key(|e| e.cost);
|
||||||
Ok(endpoints
|
Ok(endpoints
|
||||||
.iter()
|
.iter()
|
||||||
.find(|e| e.name == connection.endpoint)
|
.find(|e| e.name.eq_ignore_ascii_case(connection.endpoint))
|
||||||
.or(default)
|
.or(default)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.clone())
|
.clone())
|
||||||
@ -538,13 +604,52 @@ struct EndpointConfig<'a> {
|
|||||||
variants: Vec<VariantStream>,
|
variants: Vec<VariantStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enum EndpointCapability {
|
||||||
|
SourceVariant,
|
||||||
|
Variant { height: u16, bitrate: u64 },
|
||||||
|
DVR { height: u16 },
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_capabilities(cap: &Option<String>) -> Vec<EndpointCapability> {
|
||||||
|
if let Some(cap) = cap {
|
||||||
|
cap.to_ascii_lowercase()
|
||||||
|
.split(',')
|
||||||
|
.map_while(|c| {
|
||||||
|
let cs = c.split(':').collect::<Vec<&str>>();
|
||||||
|
match cs[0] {
|
||||||
|
"variant" if cs[1] == "source" => Some(EndpointCapability::SourceVariant),
|
||||||
|
"variant" if cs.len() == 3 => {
|
||||||
|
if let (Ok(h), Ok(br)) = (cs[1].parse(), cs[2].parse()) {
|
||||||
|
Some(EndpointCapability::Variant {
|
||||||
|
height: h,
|
||||||
|
bitrate: br,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
warn!("Invalid variant: {}", c);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"dvr" if cs.len() == 2 => {
|
||||||
|
if let Ok(h) = cs[1].parse() {
|
||||||
|
Some(EndpointCapability::DVR { height: h })
|
||||||
|
} else {
|
||||||
|
warn!("Invalid dvr: {}", c);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
} else {
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn get_variants_from_endpoint<'a>(
|
fn get_variants_from_endpoint<'a>(
|
||||||
info: &'a IngressInfo,
|
info: &'a IngressInfo,
|
||||||
endpoint: &IngestEndpoint,
|
capabilities: &Vec<EndpointCapability>,
|
||||||
) -> Result<EndpointConfig<'a>> {
|
) -> Result<EndpointConfig<'a>> {
|
||||||
let capabilities_str = endpoint.capabilities.as_deref().unwrap_or("");
|
|
||||||
let capabilities: Vec<&str> = capabilities_str.split(',').collect();
|
|
||||||
|
|
||||||
let mut vars: Vec<VariantStream> = vec![];
|
let mut vars: Vec<VariantStream> = vec![];
|
||||||
|
|
||||||
let video_src = info
|
let video_src = info
|
||||||
@ -561,43 +666,48 @@ fn get_variants_from_endpoint<'a>(
|
|||||||
let mut dst_index = 0;
|
let mut dst_index = 0;
|
||||||
|
|
||||||
for capability in capabilities {
|
for capability in capabilities {
|
||||||
let parts: Vec<&str> = capability.split(':').collect();
|
match capability {
|
||||||
|
EndpointCapability::SourceVariant => {
|
||||||
|
// Add copy variant (group for source)
|
||||||
|
if let Some(video_src) = video_src {
|
||||||
|
vars.push(VariantStream::CopyVideo(VariantMapping {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
src_index: video_src.index,
|
||||||
|
dst_index,
|
||||||
|
group_id,
|
||||||
|
}));
|
||||||
|
dst_index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
if parts.len() >= 2 && parts[0] == "variant" && parts[1] == "source" {
|
if let Some(audio_src) = audio_src {
|
||||||
// Add copy variant (group for source)
|
vars.push(VariantStream::CopyAudio(VariantMapping {
|
||||||
if let Some(video_src) = video_src {
|
id: Uuid::new_v4(),
|
||||||
vars.push(VariantStream::CopyVideo(VariantMapping {
|
src_index: audio_src.index,
|
||||||
id: Uuid::new_v4(),
|
dst_index,
|
||||||
src_index: video_src.index,
|
group_id,
|
||||||
dst_index,
|
}));
|
||||||
group_id,
|
dst_index += 1;
|
||||||
}));
|
}
|
||||||
dst_index += 1;
|
|
||||||
|
group_id += 1;
|
||||||
}
|
}
|
||||||
|
EndpointCapability::Variant { height, bitrate } => {
|
||||||
if let Some(audio_src) = audio_src {
|
|
||||||
vars.push(VariantStream::CopyAudio(VariantMapping {
|
|
||||||
id: Uuid::new_v4(),
|
|
||||||
src_index: audio_src.index,
|
|
||||||
dst_index,
|
|
||||||
group_id,
|
|
||||||
}));
|
|
||||||
dst_index += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
group_id += 1;
|
|
||||||
} else if parts.len() >= 3 && parts[0] == "variant" {
|
|
||||||
if let (Ok(target_height), Ok(bitrate)) =
|
|
||||||
(parts[1].parse::<u32>(), parts[2].parse::<u32>())
|
|
||||||
{
|
|
||||||
// Add video variant for this group
|
// Add video variant for this group
|
||||||
if let Some(video_src) = video_src {
|
if let Some(video_src) = video_src {
|
||||||
|
let output_height = *height;
|
||||||
|
if video_src.height < output_height as _ {
|
||||||
|
info!(
|
||||||
|
"Skipping variant {}p, source would be upscaled from {}p",
|
||||||
|
height, video_src.height
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
// Calculate dimensions maintaining aspect ratio
|
// Calculate dimensions maintaining aspect ratio
|
||||||
let input_width = video_src.width as f32;
|
let input_width = video_src.width as f32;
|
||||||
let input_height = video_src.height as f32;
|
let input_height = video_src.height as f32;
|
||||||
let aspect_ratio = input_width / input_height;
|
let aspect_ratio = input_width / input_height;
|
||||||
|
|
||||||
let output_height = target_height;
|
|
||||||
let output_width = (output_height as f32 * aspect_ratio).round() as u16;
|
let output_width = (output_height as f32 * aspect_ratio).round() as u16;
|
||||||
|
|
||||||
// Ensure even dimensions for H.264 compatibility
|
// Ensure even dimensions for H.264 compatibility
|
||||||
@ -610,7 +720,7 @@ fn get_variants_from_endpoint<'a>(
|
|||||||
output_height + 1
|
output_height + 1
|
||||||
} else {
|
} else {
|
||||||
output_height
|
output_height
|
||||||
} as u16;
|
};
|
||||||
|
|
||||||
vars.push(VariantStream::Video(VideoVariant {
|
vars.push(VariantStream::Video(VideoVariant {
|
||||||
mapping: VariantMapping {
|
mapping: VariantMapping {
|
||||||
@ -620,40 +730,40 @@ fn get_variants_from_endpoint<'a>(
|
|||||||
group_id,
|
group_id,
|
||||||
},
|
},
|
||||||
width: output_width,
|
width: output_width,
|
||||||
height: output_height,
|
height: output_height as _,
|
||||||
fps: video_src.fps,
|
fps: video_src.fps,
|
||||||
bitrate: bitrate as u64,
|
bitrate: *bitrate as _,
|
||||||
codec: "libx264".to_string(),
|
codec: "libx264".to_string(),
|
||||||
profile: 77, // AV_PROFILE_H264_MAIN
|
profile: 77, // AV_PROFILE_H264_MAIN
|
||||||
level: 51,
|
level: 51, // High 5.1 (4K)
|
||||||
keyframe_interval: video_src.fps as u16 * 2,
|
keyframe_interval: video_src.fps as u16,
|
||||||
pixel_format: AV_PIX_FMT_YUV420P as u32,
|
pixel_format: AV_PIX_FMT_YUV420P as u32,
|
||||||
}));
|
}));
|
||||||
dst_index += 1;
|
dst_index += 1;
|
||||||
}
|
|
||||||
|
|
||||||
// Add audio variant for the same group
|
// Add audio variant for the same group
|
||||||
if let Some(audio_src) = audio_src {
|
if let Some(audio_src) = audio_src {
|
||||||
vars.push(VariantStream::Audio(AudioVariant {
|
vars.push(VariantStream::Audio(AudioVariant {
|
||||||
mapping: VariantMapping {
|
mapping: VariantMapping {
|
||||||
id: Uuid::new_v4(),
|
id: Uuid::new_v4(),
|
||||||
src_index: audio_src.index,
|
src_index: audio_src.index,
|
||||||
dst_index,
|
dst_index,
|
||||||
group_id,
|
group_id,
|
||||||
},
|
},
|
||||||
bitrate: 192_000,
|
bitrate: 192_000,
|
||||||
codec: "aac".to_string(),
|
codec: "aac".to_string(),
|
||||||
channels: 2,
|
channels: 2,
|
||||||
sample_rate: 48_000,
|
sample_rate: 48_000,
|
||||||
sample_fmt: "fltp".to_owned(),
|
sample_fmt: "fltp".to_owned(),
|
||||||
}));
|
}));
|
||||||
dst_index += 1;
|
dst_index += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
group_id += 1;
|
group_id += 1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
_ => {}
|
||||||
}
|
}
|
||||||
// Handle other capabilities like dvr:720h here if needed
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(EndpointConfig {
|
Ok(EndpointConfig {
|
||||||
|
Reference in New Issue
Block a user