feat: Added async archive recording upload

This commit is contained in:
florian 2023-07-08 20:57:17 +02:00
parent e16dab2428
commit 0c323aefd3
7 changed files with 52 additions and 19 deletions

View File

@ -129,9 +129,13 @@ docker run --name srs-s3-upload -d -p 1935:1935 \
In a streaming application use the following settings, assuming the docker image is run on the local machine:
* Server: `rtmp://localhost`
* Stream Key: `123456` (use any text you like)
* Stream Key: `abcdef` (use any text you like)
When you start the stream, you will see the HLS data being uploaded to the S3 storage bucket. The stream will be accessible from the URL: `https://stream.mydomain.com/123456/stream.m3u8`
When you start the stream, you will see the HLS data being uploaded to the S3 storage bucket.
* The stream will be accessible from the URL: `https://stream.mydomain.com/abcdef/stream.m3u8`
* Generated thumbnails are at: `https://stream.mydomain.com/abcdef/thumbnail.jpg`
![](docs/manual.png)
The directory in the S3/R2 bucket will be created based on the stream key. It is recommended to *change the stream key* for each stream. This prevents issues with some video files being already cached.

View File

@ -24,10 +24,9 @@ rtc_server {
vhost __defaultVhost__ {
hls {
enabled on;
hls_path ./hls
hls_path ./hls;
hls_fragment 2;
hls_window 30;
#hls_td_ratio 1.2; # 1.5 is needed, 1.2 did not work
hls_ts_file [stream]/[seq].ts;
hls_m3u8_file [stream]/stream.m3u8;
}
@ -48,10 +47,8 @@ vhost __defaultVhost__ {
dvr {
# DVR currently saves multiple times, we need to figure out how to save only once
# or sync the file with S3 more efficiently
enabled off;
enabled on;
dvr_path ./archive/[stream]_[2006]-[01]-[02]T[15][04][05].mp4;
dvr_plan segment;
dvr_duration 30;
dvr_wait_keyframe on;
}
http_hooks {
@ -59,5 +56,4 @@ vhost __defaultVhost__ {
on_hls http://127.0.0.1:3000/api/v1/hls;
on_dvr http://127.0.0.1:3000/api/v1/dvrs;
}
}

BIN
docs/manual.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 171 KiB

View File

@ -2,6 +2,14 @@
cd /usr/local/srs/upload
npm start &
# Clear HLS temp directory if needed
if [ -d "/usr/local/srs/hls" ]; then
rm -rf /usr/local/srs/hls/*
fi
# Start SRS
cd /usr/local/srs
./objs/srs -c conf/mysrs.conf

View File

@ -9,3 +9,4 @@ export const PORT = process.env.PORT || 3000;
export const CREATE_THUMBNAIL = process.env.CREATE_THUMBNAIL == undefined || process.env.CREATE_THUMBNAIL === 'true';
export const BUCKET_CLEANUP = process.env.BUCKET_CLEANUP == undefined || process.env.BUCKET_CLEANUP === 'true';
export const UPLOAD_RECORDING = process.env.UPLOAD_VOD == undefined || process.env.UPLOAD_VOD === 'true';

View File

@ -1,8 +1,10 @@
import express from 'express';
import { DVRUpdateEvent, HLSUpdateEvent } from './types';
import { createS3Client, deleteFile, uploadFile } from './s3';
import { S3_BUCKET_NAME, PORT, MAX_TS_FILES_TO_KEEP, BUCKET_CLEANUP, CREATE_THUMBNAIL } from './env';
import { S3_BUCKET_NAME, PORT, MAX_TS_FILES_TO_KEEP, BUCKET_CLEANUP, CREATE_THUMBNAIL, UPLOAD_RECORDING } from './env';
import { createThumbnail } from './thumbnail';
import { Worker } from 'worker_threads';
import path from 'path';
const app = express();
app.use(express.json()); // for parsing application/json
@ -70,19 +72,21 @@ app.post('/api/v1/hls', async (req, res, next) => {
});
app.post('/api/v1/dvrs', async (req, res, next) => {
// console.debug('POST /api/v1/hls called: Received HLS update event.');
// console.debug('POST /api/v1/dvrs called: Received DVR update event.');
const dvrEvent = req.body as DVRUpdateEvent;
//await processOnHlsEvent(hlsEvent);
console.log('Received DVR event. Uploading ${dvrEvent.file} to S3.');
//console.log(JSON.stringify(hlsEvent));
await uploadFile(
s3Client,
`${dvrEvent.cwd}/${dvrEvent.file}`,
S3_BUCKET_NAME,
dvrEvent.file.replace(/^\.\//, ''), // Remove leading ./
'video/mp4'
);
if (UPLOAD_RECORDING) {
// Queue the mp4 upload using a worker thread. The upload can take a
// long time and the webhook would timeout after 30 seconds.
const worker = new Worker(path.resolve(__dirname, './upload-worker.ts'), {
workerData: {
source: `${dvrEvent.cwd}/${dvrEvent.file}`,
target: dvrEvent.file.replace(/^\.\//, ''), // Remove leading ./
contentType: 'video/mp4',
},
});
}
res.send('0'); // srs needs 0 == OK
});

20
src/upload-worker.ts Normal file
View File

@ -0,0 +1,20 @@
require('ts-node').register(); // needed to run typescript in worker threads
const { S3_BUCKET_NAME } = require('./env');
const { createS3Client, uploadFile } = require('./s3');
const { workerData } = require('worker_threads');
const fs = require('fs');
const s3Client = createS3Client();
const { source, target, contentType } = workerData; // as { source: string; target: string; contentType: string };
const upload = async () => {
console.log(`- Worker: uploading file ${source} to ${target} with content type ${contentType}...`);
await uploadFile(s3Client, source, S3_BUCKET_NAME, target, contentType);
console.log(`- Worker: finished uploading file ${source} to ${target}`);
console.log(`- Worker: deleting upload archive file ${source}.`);
fs.unlinkSync(source);
};
// Just upload with all the data passed in through the workerData
upload();