Created
November 29, 2021 22:21
-
-
Save birme/3c3a75f037c5bcb13d2b91ac7bc56f72 to your computer and use it in GitHub Desktop.
Demux an SRT/MPEG-TS stream using GStreamer
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
* gcc -Wall gst-tsdemux.c -o gst-tsdemux $(pkg-config --cflags --libs gstreamer-1.0) | |
*/ | |
#include <gst/gst.h> | |
#include "utils.h" | |
typedef struct _CustomData { | |
GstElement *pipeline; | |
GstElement *source; | |
GstElement *parser; | |
GstElement *demux; | |
GstElement *aq; | |
GstElement *vq; | |
GstElement *audiosink; | |
GstElement *videosink; | |
} CustomData; | |
static void on_pad_added(GstElement *element, GstPad *pad, CustomData *data) { | |
GstPad *sinkpad_v, *sinkpad_a; | |
const GstStructure *str; | |
const gchar *mediaType; | |
sinkpad_v = gst_element_get_static_pad(data->vq, "sink"); | |
sinkpad_a = gst_element_get_static_pad(data->aq, "sink"); | |
str = gst_caps_get_structure(gst_pad_query_caps(pad, NULL), 0); | |
mediaType = gst_structure_get_name(str); | |
if (g_str_has_prefix(mediaType, "video")) { | |
gst_pad_link(pad, sinkpad_v); | |
} else if (g_str_has_prefix(mediaType, "audio")) { | |
gst_pad_link(pad, sinkpad_a); | |
} | |
gst_object_unref(sinkpad_v); | |
gst_object_unref(sinkpad_a); | |
} | |
int main(int argc, char *argv[]) | |
{ | |
CustomData data; | |
GstBus *bus; | |
GstMessage *msg; | |
GstStateChangeReturn ret; | |
gst_init(&argc, &argv); | |
data.source = gst_element_factory_make("srtclientsrc", "source"); | |
data.parser = gst_element_factory_make("tsparse", "parser"); | |
data.demux = gst_element_factory_make("tsdemux", "demux"); | |
data.aq = gst_element_factory_make("queue", "audioqueue"); | |
data.vq = gst_element_factory_make("queue", "videoqueue"); | |
data.audiosink = gst_element_factory_make("filesink", "audiosink"); | |
data.videosink = gst_element_factory_make("filesink", "videosink"); | |
data.pipeline = gst_pipeline_new("pipeline"); | |
if (!GST_IS_PIPELINE(data.pipeline)) { | |
g_printerr("Failed to create pipeline.\n"); | |
return -1; | |
} | |
gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.parser, data.demux, data.aq, data.vq, data.audiosink, data.videosink, NULL); | |
if (!gst_element_link(data.source, data.parser) | |
|| !gst_element_link(data.parser, data.demux) | |
|| !gst_element_link(data.aq, data.audiosink) | |
|| !gst_element_link(data.vq, data.videosink)) | |
{ | |
g_printerr("Elements could not be linked.\n"); | |
gst_object_unref(data.pipeline); | |
return -1; | |
} | |
if (!g_signal_connect(data.demux, "pad-added", G_CALLBACK(on_pad_added), &data)) { | |
g_printerr("Could not setup dynamic pipeline"); | |
gst_object_unref(data.pipeline); | |
return -1; | |
} | |
g_object_set(data.source, "uri", "srt://127.0.0.1:1234?pkt_size=1316", NULL); | |
g_object_set(data.demux, "emit-stats", TRUE, NULL); | |
g_object_set(data.audiosink, "location", "./audio.raw", NULL); | |
g_object_set(data.videosink, "location", "./video.raw", NULL); | |
g_object_set(data.parser, "set-timestamps", TRUE, NULL); | |
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING); | |
if (ret == GST_STATE_CHANGE_FAILURE) { | |
g_printerr ("Unable to set the pipeline to the playing state.\n"); | |
gst_object_unref (data.pipeline); | |
return -1; | |
} | |
bus = gst_element_get_bus(data.pipeline); | |
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); | |
if (msg != NULL) { | |
print_message(msg); | |
gst_message_unref(msg); | |
} | |
gst_object_unref(bus); | |
gst_element_set_state(data.pipeline, GST_STATE_NULL); | |
gst_object_unref(data.pipeline); | |
return 0; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment