-
-
Save maitrungduc1410/9c640c61a7871390843af00ae1d8758e to your computer and use it in GitHub Desktop.
#!/usr/bin/env bash | |
START_TIME=$SECONDS | |
set -e | |
echo "-----START GENERATING HLS STREAM-----" | |
# Usage create-vod-hls.sh SOURCE_FILE [OUTPUT_NAME] | |
[[ ! "${1}" ]] && echo "Usage: create-vod-hls.sh SOURCE_FILE [OUTPUT_NAME]" && exit 1 | |
# comment/add lines here to control which renditions would be created | |
renditions=( | |
# resolution bitrate audio-rate | |
"426x240 400k 128k" | |
"640x360 800k 128k" | |
"842x480 1400k 192k" | |
"1280x720 2800k 192k" | |
"1920x1080 5000k 256k" | |
) | |
segment_target_duration=10 # try to create a new segment every 10 seconds | |
max_bitrate_ratio=1.07 # maximum accepted bitrate fluctuations | |
rate_monitor_buffer_ratio=1.5 # maximum buffer size between bitrate conformance checks | |
######################################################################### | |
source="${1}" | |
target="${2}" | |
if [[ ! "${target}" ]]; then | |
target="${source##*/}" # leave only last component of path | |
target="${target%.*}" # strip extension | |
fi | |
mkdir -p ${target} | |
# ----CUSTOM---- | |
sourceResolution="$(ffprobe -v error -select_streams v:0 -show_entries stream=width,height -of csv=s=x:p=0 ${source})" | |
# echo ${sourceResolution} | |
arrIN=(${sourceResolution//x/ }) | |
sourceWidth="${arrIN[0]}" | |
sourceHeight="${arrIN[1]}" | |
echo ${sourceWidth} | |
echo ${sourceHeight} | |
sourceAudioBitRate="$(ffprobe -v error -select_streams a:0 -show_entries stream=bit_rate -of csv=s=x:p=0 ${source})" | |
sourceAudioBitRateFormatted=$((sourceAudioBitRate / 1000)) | |
# ----END CUSTOM---- | |
key_frames_interval="$(echo `ffprobe ${source} 2>&1 | grep -oE '[[:digit:]]+(.[[:digit:]]+)? fps' | grep -oE '[[:digit:]]+(.[[:digit:]]+)?'`*2 | bc || echo '')" | |
key_frames_interval=${key_frames_interval:-50} | |
key_frames_interval=$(echo `printf "%.1f\n" $(bc -l <<<"$key_frames_interval/10")`*10 | bc) # round | |
key_frames_interval=${key_frames_interval%.*} # truncate to integer | |
# static parameters that are similar for all renditions | |
static_params="-c:a aac -ar 48000 -c:v h264 -profile:v main -crf 19 -sc_threshold 0" | |
static_params+=" -g ${key_frames_interval} -keyint_min ${key_frames_interval} -hls_time ${segment_target_duration}" | |
static_params+=" -hls_playlist_type vod" | |
# misc params | |
misc_params="-hide_banner -y" | |
master_playlist="#EXTM3U | |
#EXT-X-VERSION:3 | |
" | |
cmd="" | |
resolutionValid=0 | |
prevHeight=0 | |
for rendition in "${renditions[@]}"; do | |
# drop extraneous spaces | |
rendition="${rendition/[[:space:]]+/ }" | |
# rendition fields | |
resolution="$(echo ${rendition} | cut -d ' ' -f 1)" | |
bitrate="$(echo ${rendition} | cut -d ' ' -f 2)" | |
audiorate="$(echo ${rendition} | cut -d ' ' -f 3)" | |
audioBitRateFormatted=${audiorate%?} # remove "k" at the last index | |
# take highest possible audio bit rate | |
if [ $audioBitRateFormatted -gt $sourceAudioBitRateFormatted ]; then | |
audiorate=${sourceAudioBitRateFormatted}k | |
fi | |
# calculated fields | |
width="$(echo ${resolution} | grep -oE '^[[:digit:]]+')" | |
height="$(echo ${resolution} | grep -oE '[[:digit:]]+$')" | |
maxrate="$(echo "`echo ${bitrate} | grep -oE '[[:digit:]]+'`*${max_bitrate_ratio}" | bc)" | |
bufsize="$(echo "`echo ${bitrate} | grep -oE '[[:digit:]]+'`*${rate_monitor_buffer_ratio}" | bc)" | |
bandwidth="$(echo ${bitrate} | grep -oE '[[:digit:]]+')000" | |
name="${height}p" | |
if [ $sourceHeight -le $prevHeight ]; then | |
echo "video source has height smaller than output height (${height})" | |
break | |
fi | |
widthParam=0 | |
heightParam=0 | |
if [ $(((width / sourceWidth) * sourceHeight)) -gt $height ]; then | |
widthParam=-2 | |
heightParam=$height | |
else | |
widthParam=$width | |
heightParam=-2 | |
fi | |
cmd+=" ${static_params} -vf scale=w=${widthParam}:h=${heightParam}" | |
cmd+=" -b:v ${bitrate} -maxrate ${maxrate%.*}k -bufsize ${bufsize%.*}k -b:a ${audiorate}" | |
cmd+=" -hls_segment_filename ${target}/${name}_%03d.ts ${target}/${name}.m3u8" | |
# add rendition entry in the master playlist | |
master_playlist+="#EXT-X-STREAM-INF:BANDWIDTH=${bandwidth},RESOLUTION=${resolution}\n${name}.m3u8\n" | |
resolutionValid=1 | |
prevHeight=${height} | |
done | |
if [ $resolutionValid -eq 1 ]; then | |
# start conversion | |
echo -e "Executing command:\nffmpeg ${misc_params} -i ${source} ${cmd}\n" | |
ffmpeg ${misc_params} -i ${source} ${cmd} | |
# create master playlist file | |
echo -e "${master_playlist}" > ${target}/playlist.m3u8 | |
echo "Done - encoded HLS is at ${target}/" | |
else | |
echo "Video source is too small" | |
exit 1 | |
fi | |
ELAPSED_TIME=$(($SECONDS - $START_TIME)) | |
echo "Elapsed time: ${ELAPSED_TIME}" | |
echo "-----FINISH GENERATING HLS STREAM-----" |
Thank you very much for this script. I use it because I want to stream a full-screen background video (10s video loop) on a static webpage.
My problem if I use the script as it is, is that I will have the video in very low quality (240p) when it starts. I tried to reduce the 'segment_target_duration' and I get better results but for some reason, I can never get more than 5 segments, even if I set 'segment_target_duration' to 1s (or 0.5s). Because of that I always have a low quality (240p) for 2 seconds, then 480p. When the video has finished loading the quality is 720p and then the loop restart at 240p. It never plays in 1080p.I've also tried with 240p,360p and 480p removed (line 12, 13 and 14). It's better but I would like the video to load in 1080p as fast as possible, as it looks quite pixelated at 720p (it's a time-lapse with a lot of details). Also the loading sometimes 'freeze' on mobile (4G).
Is it possible to adjust some settings to have short segments (1s or less) or anything that could help me 'improve' the stream?
My goal would be to get a similar user experience as I get when I open a video on Youtube, where it starts almost instantly with good quality.
As my understand, you want it loads highest resolution (if possible) right at the first time.
For this you need to adjust bandwidth
to match your need, make the bandwidth
lower and it'll load higher resolution on start. For now bandwidth
is calculated based on bitrate
. You have 2 options to do this:
- directly modify
renditions
(line 10). but modifyingrenditions
means other settings will change accordingly - update bandwidth calculation (line 87)
Check the output playlist.m3u8
, it'll show bandwidth ladder
-----START GENERATING HLS STREAM-----
Cannot
open
create-vod-hls.sh: line 44: Cannot open libmwv206dec.so, libmwv206dec.so: cannot open shared object file: No such file or directory
Cannot open libmwv206dec.so, libmwv206dec.so: cannot open shared object file: No such file or directory
Cannot open libmwv206dec.so, libmwv206dec.so: cannot open shared object file: No such file or directory
128000: syntax error: invalid arithmetic operator (error token is ".so, libmwv206dec.so: cannot open shared object file: No such file or directory
Cannot open libmwv206dec.so, libmwv206dec.so: cannot open shared object file: No such file or directory
Cannot open libmwv206dec.so, libmwv206dec.so: cannot open shared object file: No such file or directory
128000")
create-vod-hls.sh: line 78: [: 128: unary operator expected
create-vod-hls.sh: line 90: [: open: integer expression expected
create-vod-hls.sh: line 98: (width / sourceWidth) * sourceHeight: division by 0 (error token is "sourceWidth) * sourceHeight")
Video source is too smallI'm having this error
The error says it is: Video source is too small
. Make sure your video dimension >= 240p. Or create new renditions
for your video dimension (line 10)
Hi @maitrungduc1410,
I want to change frame rate of each rendition as well. For that i tried changing frame rate via fps as well as -r option. But, i cant see frame rate attribute in mediainfo output. Just the frame rate mode = variable is coming.
Can you help ?sample commands tried,
ffmpeg -hide_banner -y -i beach.mp4 -c:a aac -ar 48000 -c:v h264 -profile:v main -crf 19 -sc_threshold 0 -g 60 -keyint_min 60 -hls_time 10 -hls_playlist_type vod -filter_complex drawtext=text=1920x1080-5000k@30:[email protected]:fontsize=30:x=30:y=200,scale=w=1920:h=-2 -r:v 30 -b:v 5000k -maxrate 5350k -bufsize 7500k -b:a 125k -hls_segment_filename beach/1080p_%03d.ts beach/1080p.m3u8
For this what I suggest is, first you convert your original video to the frame rate you want first:
ffmpeg -i <input> -filter:v fps=30 <output>
Then use my script as usual to generate HLS
P/s: read my note below
Note about adding more effect/filter, subtitle, watermark...
If you ever want to apply more effect to your video when generating HLS stream: change sharpness, add filter color, add subtitle, add watermark,.... You should do that in a pre-processing step. Keep my script free on what it was born for: generate HLS, only. Because there're some limitations with -vf
(video filter) which is used in this script and another reason is if you do that then you have to apply it for every rendition -> duplicate work multiple times.
Hi @maitrungduc1410,
to fix the error @throne1986 reported, just add
LC_NUMERIC="en_US.UTF-8"
to the beginning of the script.bc always returns decimal
.
(dot), but printf recognizes number according to local settings and in some languages decimal separator is a,
(coma).Cheers and thanks for this script.
Thank you
I have a mkv video with multiple audio tracks one is in English and other in hindi. When i use this coe it works perfectly and convert videos in to multiple quality playlist but the problem is the output playlist contains only one audio track. Can you please assist me to how can i get all audio tracks from my original mkv file to converted m3u8 playlist
This answer seems what you're looking for. The idea is quite simple:
- Step 1: generate HLS
- Step 2: generate audio track 1
- Step 3: generate audio track 2
- ....
- Step n: generate audio track (n - 1)
Thats great. Thank you for your help.btw can you add this in this fork on next uodate? It will help many peoples.
`m```
Hey, nice script (I guess). But why do I always see "WEBVTT" inside my .vtt files ?
I would expect to see my subtitle here. at least the input source has a subtitle in tx3g embedded format.
Would be awesome to have a version of the script that simply processes every track within the mp4 container and adds it to the HLS stream ... Why not have all streams and audio tracks?! Another thing is Stero and 5.1 sound. any idea how to implement that?
thanks in advance
I have a mkv video with multiple audio tracks one is in English and other in hindi. When i use this coe it works perfectly and convert videos in to multiple quality playlist but the problem is the output playlist contains only one audio track. Can you please assist me to how can i get all audio tracks from my original mkv file to converted m3u8 playlist
This answer seems what you're looking for. The idea is quite simple:
- Step 1: generate HLS
- Step 2: generate audio track 1
- Step 3: generate audio track 2
- ....
- Step n: generate audio track (n - 1)
Thats great. Thank you for your help.btw can you add this in this fork on next uodate? It will help many peoples.
`m```
Can you please share how you do that. I need it badly but am unable to figure it out. and what about subtitles? Please share
Thanks for putting this out. I'm currently working on a nodejs project that use dustjs as the templating engine, how do i generate hls manifest and dash manifest using the nodejs exec command to run shell script within a docker container?
Here's my simple video transcoder server that's uses NodeJS (Express + Multer) and generate HLS using
exec
command, the project is dockerized and can run with Docker.You'll need an Azure storage account in order to run but you may modify my code to remove that part if not needed
Hey there, do you plan in doing a version that is S3 compatible ? Thank you and really appreciate this script you wrote.
I'm not having much time at the moment to make an S3 version for the server, you need to do it yourself. All you have to do is updating queue.js
file, line 53 and 54: https://gitlab.com/maitrungduc1410/video-transcode-server/-/blob/master/queue.js#L54
You replace the content with something like this:
// declare these on top of queue.js
const { S3 } = require('aws-sdk')
const { createReadStream } = require('fs')
const s3 = new S3({
endpoint: 'some endpoint',
accessKeyId: 'accessKeyId',
secretAccessKey: 'secretAccessKey',
});
// below is content for line 53 and 54
const uploadParams = {
Bucket: 'some bucket',
Key: file,
Body: createReadStream(filepath),
ACL: "public-read",
};
const data = await s3.upload(uploadParams).promise()
I'm not having much time to make an S3 version for the server, you need to do it yourself. All you have to do is updating
queue.js
file, line 53 and 54: https://gitlab.com/maitrungduc1410/video-transcode-server/-/blob/master/queue.js#L54You replace the content with something like this:
// declare these on top of queue.js const { S3 } = require('aws-sdk') const { createReadStream } = require('fs') const s3 = new S3({ endpoint: 'some endpoint', accessKeyId: 'accessKeyId', secretAccessKey: 'secretAccessKey', }); // below is content for line 53 and 54 const uploadParams = { Bucket: 'some bucket', Key: file, Body: createReadStream(filepath), ACL: "public-read", }; const data = await s3.upload(uploadParams).promise()
Really appreciate the fast response, I will try to make this modifications. Really appreciate it :)
@JohnTrabusca you're welcome :)
@maitrungduc1410 I'm facing an issue where multer seems not to be saving the file. I'm running the app with pm2 and here's the error:
0|Encoder | /home/encoder/video_transcode/storage/uploads/outputProcess_1d72f6b8f33c7e8069a360c3_1628878083521.mp4 storage/outputs/test-ppppp-80bee0336fd6dc147f863d731628878083716
0|Encoder | JobID 5 has started
0|Encoder | -----START GENERATING HLS STREAM-----
0|Encoder | /home/encoder/video_transcode/storage/uploads/outputProcess_1d72f6b8f33c7e8069a360c3_1628878083521.mp4: No such file or directory
0|Encoder | JobID 5 Failed
0|Encoder | Error: Error when generating HLS stream!
0|Encoder | at ChildProcess.<anonymous> (/home/encoder/video_transcode/hls.js:21:16)
0|Encoder | at ChildProcess.emit (events.js:400:28)
0|Encoder | at Process.ChildProcess._handle.onexit (internal/child_process.js:277:12)
What could be causing this ? I already set storage at 755 with Recursive and the files are own by the account I created not root.
Thanks in Advance.
EDIT:
The files does reach the server via POST
0|Encoder | {
0|Encoder | fieldname: 'file',
0|Encoder | originalname: 'sample_1280x720_surfing_with_audio.mp4',
0|Encoder | encoding: '7bit',
0|Encoder | mimetype: 'video/mp4',
0|Encoder | destination: './storage/uploads/',
0|Encoder | filename: 'cf4eebbc6931ea7bdc960a66_1628884364395.mp4',
0|Encoder | path: 'storage/uploads/cf4eebbc6931ea7bdc960a66_1628884364395.mp4',
0|Encoder | size: 71753110
0|Encoder | }
Fixed the issue, wasn't feeding the right mp4 to Generate since I had removed preprocessing.
Had a issue with this script because I use a charset with comma separator.
Fixed it like this:
key_frames_interval="$(echo `ffprobe ${source} 2>&1 | grep -oE '[[:digit:]]+(.[[:digit:]]+)? fps' | grep -oE '[[:digit:]]+(.[[:digit:]]+)?'`*2 | bc || echo '')"
key_frames_interval=${key_frames_interval:-50}
key_frames_interval=${key_frames_interval/,/\.}
key_frames_interval=$(bc -l <<<"$key_frames_interval/10")
key_frames_interval=${key_frames_interval/\./,}
key_frames_interval=$(echo `printf "%.1f\n" ${key_frames_interval}`)
key_frames_interval=${key_frames_interval/,/\.}
key_frames_interval=$(echo ${key_frames_interval}*10| bc) # round)
key_frames_interval=${key_frames_interval%.*} # truncate to integer
Had a issue with this script because I use a charset with comma separator.
Fixed it like this:
key_frames_interval="$(echo `ffprobe ${source} 2>&1 | grep -oE '[[:digit:]]+(.[[:digit:]]+)? fps' | grep -oE '[[:digit:]]+(.[[:digit:]]+)?'`*2 | bc || echo '')" key_frames_interval=${key_frames_interval:-50} key_frames_interval=${key_frames_interval/,/\.} key_frames_interval=$(bc -l <<<"$key_frames_interval/10") key_frames_interval=${key_frames_interval/\./,} key_frames_interval=$(echo `printf "%.1f\n" ${key_frames_interval}`) key_frames_interval=${key_frames_interval/,/\.} key_frames_interval=$(echo ${key_frames_interval}*10| bc) # round) key_frames_interval=${key_frames_interval%.*} # truncate to integer
@fivethreeo there is actually an easier fix, see my comment
Thanks for this script, it does a really good job.
I'm working on a project where, besides creating the HLS stream, I also want to encrypt it.
I already managed to do it with another ffmpeg script I made, but with this one, I don't see where I could insert the -hls_key_file keyinfo.drm parameter (where keyinfo.drm is a text file that contains my AES128 encryption key). Could you guide me on this ?
@dlobjoie This is what I've done for my script. Hope this might help you.
cmd+=" ${static_params} -vf scale=w=${widthParam}:h=${heightParam}"
cmd+=" -b:v ${bitrate} -maxrate ${maxrate%.*}k -bufsize ${bufsize%.*}k -b:a ${audiorate}"
# Add this line for encryption
cmd+=" -hls_key_info_file KEY_INFO_FILE"
cmd+=" -hls_segment_filename ${target}/${name}_%03d.html ${target}/${name}.m3u8"
@xubmuajkub Thank you very much. I had been looking for a long time.
Did you also automate the addition of the path to the key in the creation of the playlist?
If yes how did you do it?
@dlobjoie Just follow the step they do here https://hlsbook.net/how-to-encrypt-hls-video-with-ffmpeg/
@dlobjoie Just follow the step they do here https://hlsbook.net/how-to-encrypt-hls-video-with-ffmpeg/
Thanks @xubmuajkub that's where I started my HLS adventure a few months ago :)
@dlobjoie in my case, I just want a simple encryption so I only use 1 file for every video.
Hi @maitrungduc1410
Thanks for your amazing script.
Is there a way to add FRAME-RATE
, CODECS
, and AUDIO
for every resolution?
In the case where we want multiple renditions at the same frame size - but at different bitrates - the script doesn't account for it, and the m3u8 points to the same manifest file, e.g.
`#EXTM3U
#EXT-X-VERSION:3
#EXT-X-STREAM-INF:BANDWIDTH=400000,RESOLUTION=384x216
216p.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=700000,RESOLUTION=512x288
288p.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=1100000,RESOLUTION=720x404
404p.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=1750000,RESOLUTION=720x404
404p.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=2750000,RESOLUTION=1280x720
720p.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=3500000,RESOLUTION=1280x720
720p.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=4300000,RESOLUTION=1920x1080
1080p.m3u8
#EXT-X-STREAM-INF:BANDWIDTH=5800000,RESOLUTION=1920x1080
1080p.m3u8
`
Is there a way to solve this; perhaps outputting each rendition and playlist in a separate folder?
Thanks!
Hi @maitrungduc1410 Thanks for your amazing script.
Is there a way to add
FRAME-RATE
,CODECS
, andAUDIO
for every resolution?
FRAME-RATE: not sure
CODECS: now supported: https://trac.ffmpeg.org/ticket/8904
AUDIO: seems possible: https://stackoverflow.com/questions/60017730/create-hls-streamable-audio-file-from-mp3
In the case where we want multiple renditions at the same frame size - but at different bitrates - the script doesn't account for it, and the m3u8 points to the same manifest file, e.g. `#EXTM3U #EXT-X-VERSION:3 #EXT-X-STREAM-INF:BANDWIDTH=400000,RESOLUTION=384x216 216p.m3u8 #EXT-X-STREAM-INF:BANDWIDTH=700000,RESOLUTION=512x288 288p.m3u8 #EXT-X-STREAM-INF:BANDWIDTH=1100000,RESOLUTION=720x404 404p.m3u8 #EXT-X-STREAM-INF:BANDWIDTH=1750000,RESOLUTION=720x404 404p.m3u8 #EXT-X-STREAM-INF:BANDWIDTH=2750000,RESOLUTION=1280x720 720p.m3u8 #EXT-X-STREAM-INF:BANDWIDTH=3500000,RESOLUTION=1280x720 720p.m3u8 #EXT-X-STREAM-INF:BANDWIDTH=4300000,RESOLUTION=1920x1080 1080p.m3u8 #EXT-X-STREAM-INF:BANDWIDTH=5800000,RESOLUTION=1920x1080 1080p.m3u8
`
Is there a way to solve this; perhaps outputting each rendition and playlist in a separate folder?
Thanks!
my script is only for improving and fixing bugs based on the original script.
for your purpose you may need to write your own logics
Encode video not working in some Android Tv specially in Sony Bravia. Tv os version is 7. H.264 codec work on os version 6 or later. But it is not working. Anyone give me more insights ?
Oh, I just keep that value from the original script, anyway, I did some search and can't find some thing official. there're some folks on Stackoverflow or on Quora they said it's
858 x 480