# What is This Script?
# This script allows you to broadcast your AzuraCast radio signal to a remote video stream, using a 
# static video file that loops in the background and dynamically writing the currently playing track
# on top of that video file.
#
# This script replaces the previous "radio-video-stream" project, allowing you to manage files directly from
# within AzuraCast and not requiring any changes to your Docker configuration at all.
# 
# To use this script, you must be running at least AzuraCast 0.19.0 or a later Rolling Release version.
# 
# Before Using This Script:
#  - Upload a video file to a folder under your station's media library (in the example below, named "videostream")
#  - Choose a font to display Now Playing data and upload a TTF version of that font to the same folder
#
# How to Use This Script
#  - Copy the section below.
#  - Visit your station's "Edit Liquidsoap Configuration" section (under "Broadcasting" in newer versions)
#  - Paste the code into the bottom-most configuration section
#  - Customize as necessary with your station's media directory and font specifications
#  - Click "Save Changes" and then "Restart Broadcasting"
#  - Enjoy!

#
# VIDEO STREAM 
#

# Edit This: Station Base Directory
station_base_dir = "/var/azuracast/stations/station_name"

# Edit This: YouTube Stream Key
youtube_key = "abcd-1234-abcd-1234"

# Path to the video file that will loop behind the Now Playing text (you have to provide this)
video_file = station_base_dir ^ "/media/videostream/video.mp4"

# Path to a font (TTF) file that will be used to draw the Now Playing text (you have to provide this)
font_file = station_base_dir ^ "/media/videostream/font.ttf"

# A static file auto-generated by AzuraCast in the "config" dir.
nowplaying_file = station_base_dir ^ "/config/nowplaying.txt"

# Align text
font_size = "50"
font_x = "340"
font_y = "990"
font_color = "white"

# Method to overlay now playing text
def add_nowplaying_text(s) =
  def mkfilter(graph)
    let {video = video_track} = source.tracks(s)
    video_track = ffmpeg.filter.video.input(graph, video_track)
    video_track = ffmpeg.filter.drawtext(fontfile=font_file,fontsize=font_size,x=font_x,y=font_y,fontcolor=font_color,textfile=nowplaying_file,reload=5,graph,video_track)
    video_track = ffmpeg.filter.video.output(graph, video_track)

    source({
      video = video_track
    })
  end

  ffmpeg.filter.create(mkfilter)
end

videostream = single(video_file)
videostream = add_nowplaying_text(videostream)
videostream = source.mux.video(video=videostream, radio)

# Output to YouTube
enc = %ffmpeg(
    format="mpegts", 
    %video.raw(codec="libx264", pixel_format="yuv420p", b="300k", preset="superfast", r=25, g=50),
    %audio(
        codec="aac",
        samplerate=44100,
        channels=2,
        b="96k",
        profile="aac_low"
    )
)

output.youtube.live.hls(key=youtube_key, fallible=true, encoder=enc, videostream)