GStreamer pipeline in C


In a previous post, we implemented live streaming of WebM to browser, using GStreamer and Node.js. In this post, we replace the GStreamer pipeline we spawned in that post, with a native executable that does exactly the same thing.

Here’s the code for the pipeline in C. You can build the code using instructions in this post.

#include <gst/gst.h>
#include <glib.h>

static gboolean
bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
  GMainLoop *loop = (GMainLoop *) data;

  switch (GST_MESSAGE_TYPE (msg)) {

    case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;

    case GST_MESSAGE_ERROR: {
      gchar  *debug;
      GError *error;

      gst_message_parse_error (msg, &error, &debug);
      g_free (debug);

      g_printerr ("Error: %s\n", error->message);
      g_error_free (error);

      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }

  return TRUE;
}

int
main (int argc, char *argv[])
{
  GMainLoop *loop;

  GstElement *pipeline, *videosrc, *colorspace, *videoenc,
    *videoq, *audiosrc, *conv, *audioenc, *audioq, *muxer, *sink;

  GstBus *bus;

  /* Initialisation */
  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);

  /* Check input arguments */
  if (argc != 2) {
    g_printerr ("Usage: %s \n", argv[0]);
    return -1;
  }

  /* Create gstreamer elements */
  pipeline = gst_pipeline_new ("audio-player");
  videosrc = gst_element_factory_make ("videotestsrc", "videosrc");
  colorspace = gst_element_factory_make ("ffmpegcolorspace", "colorspace");
  videoenc = gst_element_factory_make ("vp8enc", "videoenc");
  videoq = gst_element_factory_make ("queue2", "videoq");
  audiosrc = gst_element_factory_make ("audiotestsrc", "audiosrc");
  conv = gst_element_factory_make ("audioconvert", "converter");
  audioenc = gst_element_factory_make ("vorbisenc", "audioenc");
  audioq = gst_element_factory_make ("queue2", "audioq");
  muxer = gst_element_factory_make ("webmmux", "mux");
  sink = gst_element_factory_make ("tcpclientsink", "sink");

  if (!pipeline || !videosrc || !colorspace || !videoenc
    || !videoq || !audiosrc || !conv || !audioenc || !audioq
    || !muxer || !sink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  /* Set up the pipeline */

  /* we set the port number to the sink element */
  g_object_set (G_OBJECT (sink), "port", atoi(argv[1]),
    "host", "localhost", NULL);

  /* set the properties of other elements */
  g_object_set (G_OBJECT (videosrc), "horizontal-speed", 1, NULL);
  g_object_set (G_OBJECT (videosrc), "is-live", 1, NULL);
  g_object_set (G_OBJECT (videoenc), "speed", 2, NULL);
  g_object_set (G_OBJECT (audiosrc), "is-live", 1, NULL);
  g_object_set (G_OBJECT (muxer), "streamable", 1, NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* we add all elements into the pipeline */
  gst_bin_add_many (GST_BIN (pipeline),
    videosrc, colorspace, videoenc, videoq, audiosrc, conv,
    audioenc, audioq, muxer, sink, NULL);

  /* we link the elements together */
  gst_element_link_many (videosrc, colorspace, videoenc,
    videoq, muxer, NULL);
  gst_element_link_many (audiosrc, conv, audioenc, audioq,
    muxer, NULL);
  gst_element_link(muxer, sink);

  /* Set the pipeline to "playing" state*/
  g_print ("Streaming to port: %s\n", argv[1]);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* Iterate */
  g_print ("Running...\n");
  g_main_loop_run (loop);

  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));

  return 0;
}

To test, replace the cmd variable with the name of the executable compiled above, e.g.

cmd = './a.out';

Replace args with just one parameter, the muxPort

args = muxPort;

Then, execute Node.js.

11 thoughts on “GStreamer pipeline in C

  1. Hello Devendra I have implemented a pipeline to receive RTSP form RTSP source i.e., camera but I am getting internal data flow error. My code is as follows:-

    #include <gst/gst.h>
    #include <glib.h>
    
    static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
    {
      GMainLoop *loop = (GMainLoop *) data;
      switch (GST_MESSAGE_TYPE (msg)) {
        case GST_MESSAGE_EOS:
          g_print ("End of stream\n");
          g_main_loop_quit (loop);
          break;
        case GST_MESSAGE_ERROR: {
          gchar *debug;
          GError *error;
          gst_message_parse_error (msg, &error, &debug);
          g_free (debug);
          g_printerr ("Error: %s\n", error->message);
          g_error_free (error);
          g_main_loop_quit (loop);
          break;
          }
        default:
          break;
        }
        return TRUE;
      }
    
    int main (int argc, char *argv[])
    {
      GMainLoop *loop;
      GstElement *pipeline, *source, *sink;
      GstBus *bus;
    
      gst_init (&argc, &argv);
      loop = g_main_loop_new (NULL, FALSE);
    
      if (argc != 2) {
         
        return -1;
      }
    
      pipeline = gst_pipeline_new ("network-player");
      source = gst_element_factory_make ("rtspsrc","file-source");
      sink = gst_element_factory_make ("fakesink","nakli");
    
      if (!pipeline || !source || !sink) {
        g_printerr ("One element could not be created. Exiting.\n");
        return -1;
      }
    
      g_object_set (G_OBJECT (source), "location", argv[1], NULL);
      printf("The address is %s\n",argv[1] );
    
      bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
      gst_bus_add_watch (bus, bus_call, loop);
      gst_object_unref (bus);
    
      gst_bin_add_many (GST_BIN (pipeline),source, sink, NULL);
    
      gst_element_link_many (source, sink, NULL);  
    
    
      /* Set the pipeline to "playing" state*/
    
      g_print ("Now playing: %s\n", argv[1]);
      gst_element_set_state (pipeline, GST_STATE_PLAYING);
    
      /* Iterate */
      g_print ("Running...\n");
      g_main_loop_run (loop);
    
      /* Out of the main loop, clean up nicely */
      g_print ("Returned, stopping playback\n");
      gst_element_set_state (pipeline, GST_STATE_NULL);
      g_print ("Deleting pipeline\n");
      gst_object_unref (GST_OBJECT (pipeline));
      return 0;
    }
    

    But I am getting the following error:-

    Now playing: rtsp://admin:admin123@192.168.1.24:554/axis-media/media.amp
    Running…
    Error: Internal data flow error.
    Returned, stopping playback
    Deleting pipeline

    How to solve this problem.

    1. Hi David, you can try setting the GST_DEBUG environment variable to 3 e.g. export GST_DEBUG=3. Then run your program. GStreamer will print debug messages that may help us figure out what is going wrong.

  2. Hi Devendra, I have a trouble. Can you please tell me where the problem is for converting the following GStreamer pipeline into C code. It is actually for TexasInstruments dm814x processor. It works perfectly well.

    “gst-launch -v filesrc location=vbunny.mov ! qtdemux name=bunny
    bunny.video_00 ! queue ! h264parse access-unit=true ! omx_h264dec ! omx_scaler ! ‘video/x-raw-yuv,width=800,height=480’ ! omx_ctrl display-device=LCD ! omx_videosink display-device=LCD ”

    I wrote the code as follows with my knowledge of GStreamer C which I acquired reading articles. Please tell me my mistakes

    #include <gst/gst.h>
    #include <glib.h>
    
    static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data){
      GMainLoop *loop = (GMainLoop *)data;
      switch(GST_MESSAGE_TYPE(msg)) {
        case GST_MESSAGE_EOS:
          g_print("END of Stream\n");
          g_main_loop_quit(loop);
          break;
        case GST_MESSAGE_ERROR: {
          gchar *debug;
          GError *error;
          gst_message_parse_error(msg, &error, &debug);
          g_free(debug);
          g_printerr("Error: %s,\n ", error->message);
          g_error_free(error);
          g_main_loop_quit(loop);
          break;
        }
        default:
          break;
      }
      return TRUE;
    }
    
    //static void add_pad(GstElement *element, GstPad *pad gpointer data){
    //  GstPad *sinkpad;
    //  GstElement *decoder = (GstElement *)data;
    //}
    
    int main(int argc, char *argv[]) {
      GMainLoop *loop;
      GstElement *pipeline, *videosrc,  *scaler, *ctrl, *videoq, *muxer, *sink, *parser, *videodec;
      GstBus *bus;
    
    
      /* Initialisation */
      gst_init (&argc, &argv);
      loop = g_main_loop_new (NULL, FALSE);
    
      /* Check input arguments */
      if (argc != 2) {
        g_printerr ("Usage: %s \n", argv[0]);
        return -1;
      }
    
      pipeline = gst_pipeline_new("mov-player");
      videosrc = gst_element_factory_make("filesrc", "videosource");
      muxer = gst_element_factory_make("qtdemux", "muxer");
      videoq = gst_element_factory_make("queue","videoqueue");
      parser = gst_element_factory_make("h264parse","H264Parser");
      videodec = gst_element_factory_make("omx_h264dec", "H264Decoder");
      ctrl = gst_element_factory_make("omx_ctrl", "DisplayDeviceCtrl");
      sink = gst_element_factory_make("omx_videosink", "DisplayDeviceOutput");
      scaler = gst_element_factory_make("omx_scaler", "LCDscaler");
    
      if (!pipeline || !videosrc || !ctrl || !videodec || !videoq
            || !muxer || !sink || !scaler ) {
        g_printerr ("One of the elements could not be created. Exiting.\n");
        return -1;
      }
      else
        g_print("Elements are created\n");
    
      g_object_set (G_OBJECT (videosrc), "location", argv[1], NULL);
      g_object_set(G_OBJECT (parser), "access-unit","true", NULL);
      g_object_set(G_OBJECT(ctrl), "display-device", "LCD", NULL);
      g_object_set(G_OBJECT(sink), "display-device", "LCD", NULL);
    
      bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
      gst_bus_add_watch (bus, bus_call, loop);
      gst_object_unref (bus);
    
      gst_bin_add_many(GST_BIN(pipeline), videosrc, muxer, videoq, parser, videodec, scaler, ctrl, sink );
    
      gst_element_link(videosrc, muxer);
      gst_element_link(videodec, videoq);
      gst_element_link(videoq, scaler);
      gst_element_link(scaler, ctrl);
      gst_element_link(ctrl, sink);
    
      // Set the pipeline to state playing, and run the main loop.
      g_print ("Playing.\n");
      gst_element_set_state (pipeline, GST_STATE_PLAYING);
      g_main_loop_run (loop);
    
      // Finished playback, cleanup.
      g_print ("Playback Finished.\n");
      gst_element_set_state (pipeline, GST_STATE_NULL);
      gst_object_unref (GST_OBJECT (pipeline));
      return 1;
    }
    

    Please help me!

    1. Hi Swaroop! I see that you are linking a very specific sink pad of qtdemux to the source pad of queue, then the sink pad of queue to the source pad of h264parse. In code you need to do the same. I don’t see the (de)muxer linked to the queue, and the queue to the parser.

  3. Hi Devendra, I thought its a better idea to first work on PC Gstreamer and then to jump into TI Gstreamer. So, I decided to work the same on normal PC. The following pipeline works perfectly.

    ‘gst-launch filesrc location=big_buck_bunny_1080p_h264.mov ! qtdemux name=demux demux.video_00 ! decodebin ! ffmpegcolorspace ! autovideosink’

    The following is the C code I tried and it ends up with an error also below… Please guide me.

    #include <gst/gst.h>
    #include <glib.h>
    
    GstElement *pipeline, *src, *dmux, *vq, *vbin, *colorspace, *vscale, *vsink;
    static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
    {
      GMainLoop *loop = (GMainLoop *) data;
    
      switch (GST_MESSAGE_TYPE (msg)) {
        case GST_MESSAGE_EOS:
          g_print ("End of stream\n");
          g_main_loop_quit (loop);
          break;
        case GST_MESSAGE_ERROR: {
          gchar *debug;
          GError *error;
          gst_message_parse_error (msg, &error, &debug);
          g_free (debug);
          g_printerr ("Error: %s\n", error->message);
          g_error_free (error);
          g_main_loop_quit (loop);
          break;
        }
        default:
          break;
      }
    
    return TRUE;
    }
    
    static void on_pad_added (GstElement *element, GstPad *pad, gpointer data)
    {
      gchar *name;
      name = gst_pad_get_name (pad);
      g_print ("A new pad %s was created\n", name);
    
      if(strcmp(name,"video_00")==0){
        g_print("Video Pad is being processed \n");
        GstPad *sinkpad;
        sinkpad = gst_element_get_static_pad(vbin,"sink");
        g_assert(sinkpad);
        gst_pad_link(pad, sinkpad);
        gst_object_unref(sinkpad);
      }
    
      g_free (name);
    }
    
    int main (int argc, char *argv[])
    {
      GMainLoop *loop;
      GstBus *bus;
    
      /* Initialisation */
      gst_init (&argc, &argv);
      loop = g_main_loop_new (NULL, FALSE);
    
      /* Create gstreamer elements */
      /*pipeline = gst_parse_launch ("filesrc location=big_buck_bunny_1080p_h264.mov ! qtdemux name=demux
      demux.video_00 ! queue ! decodebin ! ffmpegcolorspace ! videoscale ! autovideosink", &error); */
      pipeline = gst_pipeline_new("mov-player");
      src = gst_element_factory_make("filesrc", "file-source");
      dmux = gst_element_factory_make("qtdemux", "qt-demuxer");
      vq = gst_element_factory_make("queue","video-queue");
      vbin = gst_element_factory_make("decodebin", "dbin");
      colorspace = gst_element_factory_make("ffmpegcolorspace", "color-space");
      vscale = gst_element_factory_make("videoscale", "video-scale");
      vsink = gst_element_factory_make("autovideosink", "video-sink");
    
      if (!pipeline || !src || !dmux || !vq || !vbin || !colorspace || !vscale || !vsink ) {
        g_printerr ("One of the elements could not be created. Exiting.\n");
        return -1;
      }
    
      /* Set up the pipeline */
      g_object_set (G_OBJECT (src), "location", argv[1], NULL);
    
      /* we add a message handler */
      bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
      gst_bus_add_watch (bus, bus_call, loop);
      gst_object_unref (bus);
    
      /* Adding elements to the pipeline */
      gst_bin_add_many(GST_BIN(pipeline), src, dmux, vbin, colorspace, vscale, vsink, NULL);
    
      /* Linking all the elements */
      gst_element_link(src, dmux);
      gst_element_link_many(vbin, colorspace, vscale, vsink, NULL);
      g_signal_connect(dmux, "pad-added", G_CALLBACK(on_pad_added), NULL);
    
      /* Set the pipeline to "playing" state*/
      g_print ("Playing: %s\n", argv[1]);
      gst_element_set_state (pipeline, GST_STATE_PLAYING);
    
      /* Iterate */
      g_print ("Running...\n");
      g_main_loop_run (loop);
    
      /* Out of the main loop, clean up nicely */
      g_print ("Returned, stopping playback\n");
      gst_element_set_state (pipeline, GST_STATE_NULL);
    
      g_print ("Deleting pipeline\n");
      gst_object_unref (GST_OBJECT (pipeline));
    
      return 0;
    }

    The ERROR is as follows:
    Playing: big_buck_bunny_1080p_h264.mov
    Running…
    A new pad video_00 was created
    Video Pad is being processed
    A new pad audio_00 was created
    Error: GStreamer encountered a general stream error.
    Returned, stopping playback
    Deleting pipeline

    Please tell me wher the mistake is…
    Greetings, Swaroop

    1. I tried executing your code but couldn’t get the pipeline to play with decodebin no matter what. On a hunch I replaced it with ffdec_h264 and the pipeline played perfectly.

  4. Hi Devendra,

    PERFECT! Million Thanks…. seems in C code we need to specify the decoder…..Hope I can do the previous work too and catch you soon… This is a big thing for me :) ….. Time to celebrate :)

  5. Hi! guys I am interested in building a raw video and raw audio stream using gstreamer and C. I have built this pipeline using gstreamer:

    gst-launch -v v4l2src ! ffmpegcolorspace ! video/x-raw-yuv,width=320,height=240,framerate=\(fraction\)15/1 ! queue ! videorate ! video/x-raw-yuv,framerate=15/1 ! tcpserversink host=127.0.0.1 port=5000 sync=false

    I am missing the usb audio stream, And the most important part is how I can incorporate this pipeline to the C code shown in this page. If you guys have any clue it’s going to be very helpful.

    Thanks!!!

  6. Hi,

    I am trying some pipelines with Gstreamer and found your blog.
    Could you clarify me this:
    gst-launch-1.0 -v filesrc location=/home/root/Video1/20160719110600A ! matroskademux ! queue ! h264parse ! vpudec frame_plus=0 ! imxvideoconvert_ipu ! video/x-raw, width=1280, height=720, framerate=30/1,format=I420 ! imxvpuenc_h264 gop-size=30 bitrate=2000 ! h264parse ! matroskamux ! filesink location=/home/root/Video1/20160815063727C

    I try this pipeline to transcode one mkv file but I got the following error
    ERROR: from element /GstPipeline:pipeline0/GstMatroskaDemux:matroskademux0: GStreamer encountered a general stream error.
    Additional debug info:
    /home/pod1hc/data/zcam/zcam-custom/build/tmp/work/cortexa9hf-vfp-neon-poky-linux-gnueabi/gstreamer1.0-plugins-good/1.4.1-r0/gst-plugins-good-1.4.1/gst/matroska/matroska-demux.c(4488): gst_matroska_demux_loop (): /GstPipeline:pipeline0/GstMatroskaDemux:matroskademux0:
    stream stopped, reason not-negotiated
    ERROR: pipeline doesn’t want to preroll.
    Setting pipeline to NULL .

    As I enable DEBUG log, it shows me this
    no such pad ‘video_%u’ in element “matroskademux0”

    I don’t know why this happens because gst-inspect matroskademux show that the plugin has the pad

Leave a Reply

Fill in your details below or click an icon to log in:

WordPress.com Logo

You are commenting using your WordPress.com account. Log Out / Change )

Twitter picture

You are commenting using your Twitter account. Log Out / Change )

Facebook photo

You are commenting using your Facebook account. Log Out / Change )

Google+ photo

You are commenting using your Google+ account. Log Out / Change )

Connecting to %s