I try to stream a mp4 video in a loop over RTSP. To achieve this I modified the example code from test-appsrc2.c. The idea is to restart the pipeline once I have received the EOS (end of stream) for the generating pipeline.
I successfully receive the EOS but I am not able to either rewind the pipeline nor to play another video. What am I doing wrong?
#include <gst/gst.h>
#include <gst/app/app.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <thread>
typedef struct
{
GstElement *generator_pipe;
GstElement *vid_appsink;
GstElement *vid_appsrc;
GstElement *aud_appsink;
GstElement *aud_appsrc;
} MyContext;
gboolean bus_callback(GstBus *bus, GstMessage *msg, gpointer data)
{
using namespace std::chrono_literals;
GstElement *vin;
GstElement *pipeline = GST_ELEMENT(data);
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
g_print("GST_MESSAGE_EOS: %s\n", gst_message_type_get_name (GST_MESSAGE_TYPE (msg)));
gst_element_set_state(pipeline, GST_STATE_NULL);
std::this_thread::sleep_for(100ms);
vin = gst_bin_get_by_name (GST_BIN (pipeline), "vin");
g_print ("vin: %p\n",vin);
if(vin)
{
gst_util_set_object_arg (G_OBJECT (vin), "location", "video02.mp4");
}
/*
if (!gst_element_seek(pipeline,
1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
GST_SEEK_TYPE_SET, 0, // 1 seconds (in nanoseconds)
GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE))
{
g_print("Seek failed!\n");
}
*/
gst_element_set_state(pipeline, GST_STATE_PLAYING);
break;
case GST_MESSAGE_STATE_CHANGED:
break;
default:
g_print("got message %s\n", gst_message_type_get_name (GST_MESSAGE_TYPE (msg)));
break;
}
return TRUE;
}
/* called when we need to give data to an appsrc */
static void need_data (GstElement * appsrc, guint unused, MyContext * ctx)
{
GstSample *sample;
GstFlowReturn ret;
sample = gst_app_sink_pull_sample (GST_APP_SINK (ctx->vid_appsink));
if (sample) {
GstBuffer *buffer = gst_sample_get_buffer (sample);
GstSegment *seg = gst_sample_get_segment (sample);
GstClockTime pts, dts;
/* Convert the PTS/DTS to running time so they start from 0 */
pts = GST_BUFFER_PTS (buffer);
if (GST_CLOCK_TIME_IS_VALID (pts))
pts = gst_segment_to_running_time (seg, GST_FORMAT_TIME, pts);
dts = GST_BUFFER_DTS (buffer);
if (GST_CLOCK_TIME_IS_VALID (dts))
dts = gst_segment_to_running_time (seg, GST_FORMAT_TIME, dts);
if (buffer) {
/* Make writable so we can adjust the timestamps */
buffer = gst_buffer_copy (buffer);
GST_BUFFER_PTS (buffer) = pts;
GST_BUFFER_DTS (buffer) = dts;
g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
}
/* we don't need the appsink sample anymore */
gst_sample_unref (sample);
}
}
static void ctx_free (MyContext * ctx)
{
g_print ("ctx_free\n");
gst_element_set_state (ctx->generator_pipe, GST_STATE_NULL);
gst_object_unref (ctx->generator_pipe);
gst_object_unref (ctx->vid_appsrc);
gst_object_unref (ctx->vid_appsink);
g_free (ctx);
}
/* called when a new media pipeline is constructed. We can query the
* pipeline and configure our appsrc */
static void media_configure (GstRTSPMediaFactory * factory, GstRTSPMedia * media, gpointer user_data)
{
GstElement *element, *appsrc, *appsink;
GstCaps *caps;
MyContext *ctx;
ctx = g_new0 (MyContext, 1);
gchar* pipeline_description = g_strdup_printf("filesrc name=vin location=%s : qtdemux : h264parse : appsink name=vid max-buffers=3 drop=false",(char*)user_data);
/* This pipeline generates H264 video. The appsinks are kept small so that if delivery is slow,
* encoded buffers are dropped as needed.*/
ctx->generator_pipe = gst_parse_launch(pipeline_description,NULL);
/* make sure the data is freed when the media is gone */
g_object_set_data_full (G_OBJECT (media), "rtsp-extra-data", ctx,(GDestroyNotify) ctx_free);
/* get the element (bin) used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* Find the app source video, and configure it, connect to the
* signals to request data */
/* configure the caps of the video */
// TODO identify the caps from the stream
caps = gst_caps_new_simple ("video/x-h264",
"stream-format", G_TYPE_STRING, "byte-stream",
"alignment", G_TYPE_STRING, "au",
"width", G_TYPE_INT, 2880, "height", G_TYPE_INT, 1860,
"framerate", GST_TYPE_FRACTION, 30, 1, NULL);
ctx->vid_appsrc = appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "videosrc");
ctx->vid_appsink = appsink = gst_bin_get_by_name (GST_BIN (ctx->generator_pipe), "vid");
gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time");
g_object_set (G_OBJECT (appsrc), "caps", caps, NULL);
g_object_set (G_OBJECT (appsink), "caps", caps, NULL);
/* install the callback that will be called when a buffer is needed */
g_signal_connect (appsrc, "need-data", (GCallback) need_data, ctx);
gst_caps_unref (caps);
GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (ctx->generator_pipe));
gst_bus_add_watch (bus, bus_callback, ctx->generator_pipe);
gst_object_unref (bus);
gst_element_set_state (ctx->generator_pipe, GST_STATE_PLAYING);
gst_object_unref (element);
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
if(argc < 2) {
g_print("The video filename is missing\n");
g_print("%s <filename>\n",argv[0]);
return 1;
}
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new ();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, "( appsrc name=videosrc ! h264parse ! rtph264pay name=pay0 pt=96 )");
gst_rtsp_media_factory_set_shared(factory, TRUE);
/* notify when our media is ready, This is called whenever someone asks for
* the media and a new pipeline with our appsrc is created */
g_signal_connect (factory, "media-configure", (GCallback) media_configure, argv[1]);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, "/test", factory);
/* don't need the ref to the mounts anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
gst_rtsp_server_attach (server, NULL);
/* start serving */
g_print ("stream ready at rtsp://127.0.0.1:8554/test\n");
g_main_loop_run (loop);
return 0;
}
The log of my application run with GST_DEBUG=4
can be found here: https://gist.github.com/graugans/a989a78dd7f2c4083e881bb46ce04651
I am on Ubuntu 20.04 with Gstreamer 1.16.3