728x90

명령 결과를 변수에 저장하고 싶을 때 아래처럼 사용합니다. 예를 들어 특정 프로세스의 PID 값을 활용하고 싶을 때 아래처럼 popen을 통해 값을 저장합니다.

void GetCommandResult()
{
    char buf[512];
    
    FILE* pCmd = popen("pidof -s Process","r");
    fgets(buf, 512, pCmd);
    pid_t pid = strtoul(buf, NULL, 10);
}

 

728x90
728x90

Application와 gstreamer의 pipeline 간에 데이터를 주고받는 내용에 대해 살펴봅니다. 이 예제와 basic tutorial 7이 큰 차이가 없어 이 포스팅에서 한번에 다루겠습니다.

 

Basic tutorial 8: Short-cutting the pipeline

Basic tutorial 8: Short-cutting the pipeline Please port this tutorial to python! Please port this tutorial to javascript! Goal Pipelines constructed with GStreamer do not need to be completely closed. Data can be injected into the pipeline and extracted f

gstreamer.freedesktop.org

Application에서 pipeline으로 데이터를 넣을 수 있는 elements를 appsrc, 그 반대를 appsink라고 합니다. 데이터를 넣는 appsrc는 pull mode / push mode가 있는데 pull의 경우 주기적으로 데이터를 넣어주고 push의 경우 원할 때 넣는 방식으로 진행됩니다. 데이터는 Gstbuffer를 통해 파이프라인을 통과합니다.

QT -= gui

CONFIG += c++17 console
CONFIG -= app_bundle

# You can make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000    # disables all the APIs deprecated before Qt 6.0.0

SOURCES += \
        main.cpp

DEPENDPATH += \
    $$PWD/include

INCLUDEPATH += \
    $$PWD/include \
    $$PWD/include/gstreamer-1.0 \
    $$PWD/include/glib-2.0/ \
    $$PWD/include/glib-2.0/include \
    $$PWD/include/orc-0.4 \
    $$PWD/lib/glib-2.0/include \

win32: LIBS += -L$$PWD/lib/ -lgstreamer-1.0 -lgobject-2.0 -lglib-2.0 -lintl -lgstaudio-1.0 -lgstbase-1.0

DESTDIR += \
    $$PWD/bin

# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target

gstaudio-1.0 dll 사용이 필요해 pro 파일도 일부 수정하였습니다.

#include <QCoreApplication>
#include <QDebug>
#include <gst/gst.h>
#include <gst/audio/audio.h>
#include <string.h>

#define CHUNK_SIZE 1024   /* Amount of bytes we are sending in each buffer */
#define SAMPLE_RATE 44100 /* Samples per second we are sending */

/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
  GstElement *pipeline, *app_source, *tee, *audio_queue, *audio_convert1, *audio_resample, *audio_sink;
  GstElement *video_queue, *audio_convert2, *visual, *video_convert, *video_sink;
  GstElement *app_queue, *app_sink;

  guint64 num_samples;   /* Number of samples generated so far (for timestamp generation) */
  gfloat a, b, c, d;     /* For waveform generation */

  guint sourceid;        /* To control the GSource */

  GMainLoop *main_loop;  /* GLib's Main Loop */
} CustomData;

/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc.
 * The idle handler is added to the mainloop when appsrc requests us to start sending data (need-data signal)
 * and is removed when appsrc has enough data (enough-data signal).
 */
static gboolean push_data (CustomData *data) {
  GstBuffer *buffer;
  GstFlowReturn ret;
  int i;
  GstMapInfo map;
  gint16 *raw;
  gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */
  gfloat freq;

  /* Create a new empty buffer */
  buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);

  /* Set its timestamp and duration */
  GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE);
  GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (num_samples, GST_SECOND, SAMPLE_RATE);

  /* Generate some psychodelic waveforms */
  gst_buffer_map (buffer, &map, GST_MAP_WRITE);
  raw = (gint16 *)map.data;
  data->c += data->d;
  data->d -= data->c / 1000;
  freq = 1100 + 1000 * data->d;
  for (i = 0; i < num_samples; i++) {
    data->a += data->b;
    data->b -= data->a / freq;
    raw[i] = (gint16)(500 * data->a);
  }
  gst_buffer_unmap (buffer, &map);
  data->num_samples += num_samples;

  /* Push the buffer into the appsrc */
  g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);

  /* Free the buffer now that we are done with it */
  gst_buffer_unref (buffer);

  if (ret != GST_FLOW_OK) {
    /* We got some error, stop sending data */
    return FALSE;
  }

  return TRUE;
}

/* This signal callback triggers when appsrc needs data. Here, we add an idle handler
 * to the mainloop to start pushing data into the appsrc */
static void start_feed (GstElement *source, guint size, CustomData *data) {
  if (data->sourceid == 0) {
    g_print ("Start feeding\n");
    data->sourceid = g_idle_add ((GSourceFunc) push_data, data);
  }
}

/* This callback triggers when appsrc has enough data and we can stop sending.
 * We remove the idle handler from the mainloop */
static void stop_feed (GstElement *source, CustomData *data) {
  if (data->sourceid != 0) {
    g_print ("Stop feeding\n");
    g_source_remove (data->sourceid);
    data->sourceid = 0;
  }
}

/* The appsink has received a buffer */
static GstFlowReturn new_sample (GstElement *sink, CustomData *data) {
  GstSample *sample;

  /* Retrieve the buffer */
  g_signal_emit_by_name (sink, "pull-sample", &sample);
  if (sample) {
    /* The only thing we do in this example is print a * to indicate a received buffer */
    g_print ("*");
    gst_sample_unref (sample);
    return GST_FLOW_OK;
  }

  return GST_FLOW_ERROR;
}

/* This function is called when an error message is posted on the bus */
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
  GError *err;
  gchar *debug_info;

  /* Print error details on the screen */
  gst_message_parse_error (msg, &err, &debug_info);
  g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
  g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
  g_clear_error (&err);
  g_free (debug_info);

  g_main_loop_quit (data->main_loop);
}

int main(int argc, char *argv[])
{
    QCoreApplication a(argc, argv);

    CustomData data;
      GstPad *tee_audio_pad, *tee_video_pad, *tee_app_pad;
      GstPad *queue_audio_pad, *queue_video_pad, *queue_app_pad;
      GstAudioInfo info;
      GstCaps *audio_caps;
      GstBus *bus;

      /* Initialize custom data structure */
      memset (&data, 0, sizeof (data));
      data.b = 1; /* For waveform generation */
      data.d = 1;

      /* Initialize GStreamer */
      gst_init (&argc, &argv);

      /* Create the elements */
      data.app_source = gst_element_factory_make ("appsrc", "audio_source");
      data.tee = gst_element_factory_make ("tee", "tee");
      data.audio_queue = gst_element_factory_make ("queue", "audio_queue");
      data.audio_convert1 = gst_element_factory_make ("audioconvert", "audio_convert1");
      data.audio_resample = gst_element_factory_make ("audioresample", "audio_resample");
      data.audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink");
      data.video_queue = gst_element_factory_make ("queue", "video_queue");
      data.audio_convert2 = gst_element_factory_make ("audioconvert", "audio_convert2");
      data.visual = gst_element_factory_make ("wavescope", "visual");
      data.video_convert = gst_element_factory_make ("videoconvert", "video_convert");
      data.video_sink = gst_element_factory_make ("autovideosink", "video_sink");
      data.app_queue = gst_element_factory_make ("queue", "app_queue");
      data.app_sink = gst_element_factory_make ("appsink", "app_sink");

      /* Create the empty pipeline */
      data.pipeline = gst_pipeline_new ("test-pipeline");

      if (!data.pipeline || !data.app_source || !data.tee || !data.audio_queue || !data.audio_convert1 ||
          !data.audio_resample || !data.audio_sink || !data.video_queue || !data.audio_convert2 || !data.visual ||
          !data.video_convert || !data.video_sink || !data.app_queue || !data.app_sink) {
        g_printerr ("Not all elements could be created.\n");
        return -1;
      }

      /* Configure wavescope */
      g_object_set (data.visual, "shader", 0, "style", 0, NULL);

      /* Configure appsrc */
      gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, SAMPLE_RATE, 1, NULL);
      audio_caps = gst_audio_info_to_caps (&info);
      g_object_set (data.app_source, "caps", audio_caps, "format", GST_FORMAT_TIME, NULL);
      g_signal_connect (data.app_source, "need-data", G_CALLBACK (start_feed), &data);
      g_signal_connect (data.app_source, "enough-data", G_CALLBACK (stop_feed), &data);

      /* Configure appsink */
      g_object_set (data.app_sink, "emit-signals", TRUE, "caps", audio_caps, NULL);
      g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample), &data);
      gst_caps_unref (audio_caps);

      /* Link all elements that can be automatically linked because they have "Always" pads */
      gst_bin_add_many (GST_BIN (data.pipeline), data.app_source, data.tee, data.audio_queue, data.audio_convert1, data.audio_resample,
          data.audio_sink, data.video_queue, data.audio_convert2, data.visual, data.video_convert, data.video_sink, data.app_queue,
          data.app_sink, NULL);
      if (gst_element_link_many (data.app_source, data.tee, NULL) != TRUE ||
          gst_element_link_many (data.audio_queue, data.audio_convert1, data.audio_resample, data.audio_sink, NULL) != TRUE ||
          gst_element_link_many (data.video_queue, data.audio_convert2, data.visual, data.video_convert, data.video_sink, NULL) != TRUE ||
          gst_element_link_many (data.app_queue, data.app_sink, NULL) != TRUE) {
        g_printerr ("Elements could not be linked.\n");
        gst_object_unref (data.pipeline);
        return -1;
      }

      /* Manually link the Tee, which has "Request" pads */
      tee_audio_pad = gst_element_request_pad_simple (data.tee, "src_%u");
      g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad));
      queue_audio_pad = gst_element_get_static_pad (data.audio_queue, "sink");
      tee_video_pad = gst_element_request_pad_simple (data.tee, "src_%u");
      g_print ("Obtained request pad %s for video branch.\n", gst_pad_get_name (tee_video_pad));
      queue_video_pad = gst_element_get_static_pad (data.video_queue, "sink");
      tee_app_pad = gst_element_request_pad_simple (data.tee, "src_%u");
      g_print ("Obtained request pad %s for app branch.\n", gst_pad_get_name (tee_app_pad));
      queue_app_pad = gst_element_get_static_pad (data.app_queue, "sink");
      if (gst_pad_link (tee_audio_pad, queue_audio_pad) != GST_PAD_LINK_OK ||
          gst_pad_link (tee_video_pad, queue_video_pad) != GST_PAD_LINK_OK ||
          gst_pad_link (tee_app_pad, queue_app_pad) != GST_PAD_LINK_OK) {
        g_printerr ("Tee could not be linked\n");
        gst_object_unref (data.pipeline);
        return -1;
      }
      gst_object_unref (queue_audio_pad);
      gst_object_unref (queue_video_pad);
      gst_object_unref (queue_app_pad);

      /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
      bus = gst_element_get_bus (data.pipeline);
      gst_bus_add_signal_watch (bus);
      g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
      gst_object_unref (bus);

      /* Start playing the pipeline */
      gst_element_set_state (data.pipeline, GST_STATE_PLAYING);

      /* Create a GLib Main Loop and set it to run */
      data.main_loop = g_main_loop_new (NULL, FALSE);
      g_main_loop_run (data.main_loop);

      /* Release the request pads from the Tee, and unref them */
      gst_element_release_request_pad (data.tee, tee_audio_pad);
      gst_element_release_request_pad (data.tee, tee_video_pad);
      gst_element_release_request_pad (data.tee, tee_app_pad);
      gst_object_unref (tee_audio_pad);
      gst_object_unref (tee_video_pad);
      gst_object_unref (tee_app_pad);

      /* Free resources */
      gst_element_set_state (data.pipeline, GST_STATE_NULL);
      gst_object_unref (data.pipeline);


    return a.exec();
}

 

예제의 pipeline은 아래와 같습니다. Tee라는 elements를 통해 하나의 appsrc에서 오는 데이터를 3 갈래로 나눠줍니다.

/* Create the elements */
data.app_source = gst_element_factory_make ("appsrc", "audio_source");
data.tee = gst_element_factory_make ("tee", "tee");
data.audio_queue = gst_element_factory_make ("queue", "audio_queue");
data.audio_convert1 = gst_element_factory_make ("audioconvert", "audio_convert1");
data.audio_resample = gst_element_factory_make ("audioresample", "audio_resample");
data.audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink");
data.video_queue = gst_element_factory_make ("queue", "video_queue");
data.audio_convert2 = gst_element_factory_make ("audioconvert", "audio_convert2");
data.visual = gst_element_factory_make ("wavescope", "visual");
data.video_convert = gst_element_factory_make ("videoconvert", "video_convert");
data.video_sink = gst_element_factory_make ("autovideosink", "video_sink");
data.app_queue = gst_element_factory_make ("queue", "app_queue");
data.app_sink = gst_element_factory_make ("appsink", "app_sink");

queue를 사용하면 multi thread 환경처럼 사용할 수 있습니다.

appsrc elements에 audio caps를 설정하고 set해줍니다.

/* Configure appsrc */
gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, SAMPLE_RATE, 1, NULL);
audio_caps = gst_audio_info_to_caps (&info);
g_object_set (data.app_source, "caps", audio_caps, "format", GST_FORMAT_TIME, NULL);
g_signal_connect (data.app_source, "need-data", G_CALLBACK (start_feed), &data);
g_signal_connect (data.app_source, "enough-data", G_CALLBACK (stop_feed), &data);

여기서 need-data, enough-data signal이 나오는데 source가 data가 필요할때 need-data signal을 이제 충분히 데이터를 받았을 때 enough-data signal을 발생합니다. 이 기준은 내부 데이터 queue에 있는 데이터 양으로 판단합니다.

appsrc 내부 queue의 데이터가 필요할 때 g_idle_add를 통해 등록한 GLib 함수를 호출하게 됩니다. 이 함수는 mainloop가 idle 상태 즉 우선순위가 급한 일이 없을 때 호출됩니다. 

/* This signal callback triggers when appsrc needs data. Here, we add an idle handler
 * to the mainloop to start pushing data into the appsrc */
static void start_feed (GstElement *source, guint size, CustomData *data) {
    if (data->sourceid == 0) {
        g_print ("Start feeding\n");
        data->sourceid = g_idle_add ((GSourceFunc) push_data, data);
    }
}

push_data 함수에서는 buffer를 생성하여 wave 데이터를 생성하고 push-buffer signal을 발생시켜 buffer를 app_src elements의 source pad로 전달합니다. 이 source pad의 buffer는 결국 Tee->App_queue를 통해 app_sink로 전달될 것입니다.

/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc.
 * The idle handler is added to the mainloop when appsrc requests us to start sending data (need-data signal)
 * and is removed when appsrc has enough data (enough-data signal).
 */
static gboolean push_data (CustomData *data) {
    GstBuffer *buffer;
    GstFlowReturn ret;
    int i;
    GstMapInfo map;
    gint16 *raw;
    gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */
    gfloat freq;

    /* Create a new empty buffer */
    buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);

    /* Set its timestamp and duration */
    GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE);
    GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (num_samples, GST_SECOND, SAMPLE_RATE);

    /* Generate some psychodelic waveforms */
    gst_buffer_map (buffer, &map, GST_MAP_WRITE);
    raw = (gint16 *)map.data;
    data->c += data->d;
    data->d -= data->c / 1000;
    freq = 1100 + 1000 * data->d;
    for (i = 0; i < num_samples; i++) {
        data->a += data->b;
        data->b -= data->a / freq;
        raw[i] = (gint16)(500 * data->a);
    }
    gst_buffer_unmap (buffer, &map);
    data->num_samples += num_samples;

    /* Push the buffer into the appsrc */
    g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);

    /* Free the buffer now that we are done with it */
    gst_buffer_unref (buffer);

    if (ret != GST_FLOW_OK) {
        /* We got some error, stop sending data */
        return FALSE;
    }

    return TRUE;
}

app_sink에 new-sample signal을 등록해서 sample이 들어올 때 new_sample 함수가 호출됩니다. 

/* Configure appsink */
g_object_set (data.app_sink, "emit-signals", TRUE, "caps", audio_caps, NULL);
g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample), &data);
gst_caps_unref (audio_caps);

/* The appsink has received a buffer */
static GstFlowReturn new_sample (GstElement *sink, CustomData *data) {
    GstSample *sample;

    /* Retrieve the buffer */
    g_signal_emit_by_name (sink, "pull-sample", &sample);
    if (sample) {
        /* The only thing we do in this example is print a * to indicate a received buffer */
        g_print ("*");
        gst_sample_unref (sample);
        return GST_FLOW_OK;
    }

    return GST_FLOW_ERROR;
}

내부 queue가 어느정도 찼다고 판단되면 stop_feed 함수가 호출되고 등록한 함수를 제거합니다.

/* This callback triggers when appsrc has enough data and we can stop sending.
 * We remove the idle handler from the mainloop */
static void stop_feed (GstElement *source, CustomData *data) {
    if (data->sourceid != 0) {
        g_print ("Stop feeding\n");
        g_source_remove (data->sourceid);
        data->sourceid = 0;
    }
}

위와 같이 GLib를 사용하기 위해서는 main loop를 run상태로 설정해줘야 합니다.

/* Create a GLib Main Loop and set it to run */
data.main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.main_loop);
728x90
728x90

이전 [Qt] gstreamer(3) - Pad, GSignal 를 통해 Pad에 대한 간단히 다뤄보았습니다. 이번 튜토리얼에서는 Pad에 대해 좀 더 자세히 살펴봅니다.

 

Basic tutorial 6: Media formats and Pad Capabilities

Please port this tutorial to python! Please port this tutorial to javascript! Goal Pad Capabilities are a fundamental element of GStreamer, although most of the time they are invisible because the framework handles them automatically. This somewhat theoret

gstreamer.freedesktop.org

Pad는 이전에도 살펴봤듯이 elements들의 입력과 출력을 담당합니다. Pad는 여러 기능(타입)들을 지원하고 오디오 데이터의 경우 범위로 지정할 수 있지만 Pad 간 통과하면서 같은 타입으로 고정됩니다.

Pad는 Pad templates로부터 생성되고 Pad template는 Pad가 가질 수 있는 기능들을 나타냅니다. 

#include <QCoreApplication>
#include <QDebug>
#include "gst/gst.h"

/* Functions below print the Capabilities in a human-friendly format */
static gboolean print_field (GQuark field, const GValue * value, gpointer pfx) {
    gchar *str = gst_value_serialize (value);

    g_print ("%s  %15s: %s\n", (gchar *) pfx, g_quark_to_string (field), str);
    g_free (str);
    return TRUE;
}

static void print_caps (const GstCaps * caps, const gchar * pfx) {
    guint i;

    g_return_if_fail (caps != NULL);

    if (gst_caps_is_any (caps)) {
        g_print ("%sANY\n", pfx);
        return;
    }
    if (gst_caps_is_empty (caps)) {
        g_print ("%sEMPTY\n", pfx);
        return;
    }

    for (i = 0; i < gst_caps_get_size (caps); i++) {
        GstStructure *structure = gst_caps_get_structure (caps, i);

        g_print ("%s%s\n", pfx, gst_structure_get_name (structure));
        gst_structure_foreach (structure, print_field, (gpointer) pfx);
    }
}

/* Prints information about a Pad Template, including its Capabilities */
static void print_pad_templates_information (GstElementFactory * factory) {
    const GList *pads;
    GstStaticPadTemplate *padtemplate;

    g_print ("Pad Templates for %s:\n", gst_element_factory_get_longname (factory));
    if (!gst_element_factory_get_num_pad_templates (factory)) {
        g_print ("  none\n");
        return;
    }

    pads = gst_element_factory_get_static_pad_templates (factory);
    while (pads) {
        padtemplate = static_cast<GstStaticPadTemplate *>(pads->data);
        pads = g_list_next (pads);

        if (padtemplate->direction == GST_PAD_SRC)
            g_print ("  SRC template: '%s'\n", padtemplate->name_template);
        else if (padtemplate->direction == GST_PAD_SINK)
            g_print ("  SINK template: '%s'\n", padtemplate->name_template);
        else
            g_print ("  UNKNOWN!!! template: '%s'\n", padtemplate->name_template);

        if (padtemplate->presence == GST_PAD_ALWAYS)
            g_print ("    Availability: Always\n");
        else if (padtemplate->presence == GST_PAD_SOMETIMES)
            g_print ("    Availability: Sometimes\n");
        else if (padtemplate->presence == GST_PAD_REQUEST)
            g_print ("    Availability: On request\n");
        else
            g_print ("    Availability: UNKNOWN!!!\n");

        if (padtemplate->static_caps.string) {
            GstCaps *caps;
            g_print ("    Capabilities:\n");
            caps = gst_static_caps_get (&padtemplate->static_caps);
            print_caps (caps, "      ");
            gst_caps_unref (caps);

        }

        g_print ("\n");
    }
}

/* Shows the CURRENT capabilities of the requested pad in the given element */
static void print_pad_capabilities (GstElement *element, gchar *pad_name) {
    GstPad *pad = NULL;
    GstCaps *caps = NULL;

    /* Retrieve pad */
    pad = gst_element_get_static_pad (element, pad_name);
    if (!pad) {
        g_printerr ("Could not retrieve pad '%s'\n", pad_name);
        return;
    }

    /* Retrieve negotiated caps (or acceptable caps if negotiation is not finished yet) */
    caps = gst_pad_get_current_caps (pad);
    if (!caps)
        caps = gst_pad_query_caps (pad, NULL);

    /* Print and free */
    g_print ("Caps for the %s pad:\n", pad_name);
    print_caps (caps, "      ");
    gst_caps_unref (caps);
    gst_object_unref (pad);
}

int main(int argc, char *argv[])
{
    QCoreApplication a(argc, argv);

    GstElement *pipeline, *source, *sink;
    GstElementFactory *source_factory, *sink_factory;
    GstBus *bus;
    GstMessage *msg;
    GstStateChangeReturn ret;
    gboolean terminate = FALSE;

    /* Initialize GStreamer */
    gst_init (&argc, &argv);

    /* Create the element factories */
    source_factory = gst_element_factory_find ("audiotestsrc");
    sink_factory = gst_element_factory_find ("autoaudiosink");
    if (!source_factory || !sink_factory) {
        g_printerr ("Not all element factories could be created.\n");
        return -1;
    }

    /* Print information about the pad templates of these factories */
    print_pad_templates_information (source_factory);
    print_pad_templates_information (sink_factory);

    /* Ask the factories to instantiate actual elements */
    source = gst_element_factory_create (source_factory, "source");
    sink = gst_element_factory_create (sink_factory, "sink");

    /* Create the empty pipeline */
    pipeline = gst_pipeline_new ("test-pipeline");

    if (!pipeline || !source || !sink) {
        g_printerr ("Not all elements could be created.\n");
        return -1;
    }

    /* Build the pipeline */
    gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
    if (gst_element_link (source, sink) != TRUE) {
        g_printerr ("Elements could not be linked.\n");
        gst_object_unref (pipeline);
        return -1;
    }

    /* Print initial negotiated caps (in NULL state) */
    g_print ("In NULL state:\n");
    print_pad_capabilities (sink, const_cast<gchar*>("sink"));

    /* Start playing */
    ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr ("Unable to set the pipeline to the playing state (check the bus for error messages).\n");
    }

    /* Wait until error, EOS or State Change */
    bus = gst_element_get_bus (pipeline);
    do {
        msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_STATE_CHANGED));

        /* Parse message */
        if (msg != NULL) {
            GError *err;
            gchar *debug_info;

            switch (GST_MESSAGE_TYPE (msg)) {
            case GST_MESSAGE_ERROR:
                gst_message_parse_error (msg, &err, &debug_info);
                g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
                g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
                g_clear_error (&err);
                g_free (debug_info);
                terminate = TRUE;
                break;
            case GST_MESSAGE_EOS:
                g_print ("End-Of-Stream reached.\n");
                terminate = TRUE;
                break;
            case GST_MESSAGE_STATE_CHANGED:
                /* We are only interested in state-changed messages from the pipeline */
                if (GST_MESSAGE_SRC (msg) == GST_OBJECT (pipeline)) {
                    GstState old_state, new_state, pending_state;
                    gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
                    g_print ("\nPipeline state changed from %s to %s:\n",
                             gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
                    /* Print the current capabilities of the sink element */
                    print_pad_capabilities (sink, const_cast<gchar*>("sink"));
                }
                break;
            default:
                /* We should not reach here because we only asked for ERRORs, EOS and STATE_CHANGED */
                g_printerr ("Unexpected message received.\n");
                break;
            }
            gst_message_unref (msg);
        }
    } while (!terminate);

    /* Free resources */
    gst_object_unref (bus);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
    gst_object_unref (source_factory);
    gst_object_unref (sink_factory);

    return a.exec();
}

이전 튜토리얼 까지는 gst_element_factory_make를 호출해 elemets를 생성했습니다. 이 예제에서는 gst_element_factory_find 함수를 통해 elementsfactory를 먼저 생성 후 이 factory를 통해 elements를 생성합니다.

/* Create the element factories */
source_factory = gst_element_factory_find ("audiotestsrc");
sink_factory = gst_element_factory_find ("autoaudiosink");
if (!source_factory || !sink_factory) {
    g_printerr ("Not all element factories could be created.\n");
    return -1;
}

/* Print information about the pad templates of these factories */
print_pad_templates_information (source_factory);
print_pad_templates_information (sink_factory);

/* Ask the factories to instantiate actual elements */
source = gst_element_factory_create (source_factory, "source");
sink = gst_element_factory_create (sink_factory, "sink");

그리고 그 factory의 pad template 정보들을 아래 함수를 통해 보여주고 있습니다.

/* Prints information about a Pad Template, including its Capabilities */
static void print_pad_templates_information (GstElementFactory * factory) {
    const GList *pads;
    GstStaticPadTemplate *padtemplate;

    g_print ("Pad Templates for %s:\n", gst_element_factory_get_longname (factory));
    if (!gst_element_factory_get_num_pad_templates (factory)) {
        g_print ("  none\n");
        return;
    }

    pads = gst_element_factory_get_static_pad_templates (factory);
    while (pads) {
        padtemplate = static_cast<GstStaticPadTemplate *>(pads->data);
        pads = g_list_next (pads);

        if (padtemplate->direction == GST_PAD_SRC)
            g_print ("  SRC template: '%s'\n", padtemplate->name_template);
        else if (padtemplate->direction == GST_PAD_SINK)
            g_print ("  SINK template: '%s'\n", padtemplate->name_template);
        else
            g_print ("  UNKNOWN!!! template: '%s'\n", padtemplate->name_template);

        if (padtemplate->presence == GST_PAD_ALWAYS)
            g_print ("    Availability: Always\n");
        else if (padtemplate->presence == GST_PAD_SOMETIMES)
            g_print ("    Availability: Sometimes\n");
        else if (padtemplate->presence == GST_PAD_REQUEST)
            g_print ("    Availability: On request\n");
        else
            g_print ("    Availability: UNKNOWN!!!\n");

        if (padtemplate->static_caps.string) {
            GstCaps *caps;
            g_print ("    Capabilities:\n");
            caps = gst_static_caps_get (&padtemplate->static_caps);
            print_caps (caps, "      ");
            gst_caps_unref (caps);

        }

        g_print ("\n");
    }
}

Pad의 현재 capabilities를 gst_pad_get_current_caps통해 확인하고 만약 실패한다면 query를 통해 가져옵니다.

/* Shows the CURRENT capabilities of the requested pad in the given element */
static void print_pad_capabilities (GstElement *element, gchar *pad_name) {
    GstPad *pad = NULL;
    GstCaps *caps = NULL;

    /* Retrieve pad */
    pad = gst_element_get_static_pad (element, pad_name);
    if (!pad) {
        g_printerr ("Could not retrieve pad '%s'\n", pad_name);
        return;
    }

    /* Retrieve negotiated caps (or acceptable caps if negotiation is not finished yet) */
    caps = gst_pad_get_current_caps (pad);
    if (!caps)
        caps = gst_pad_query_caps (pad, NULL);

    /* Print and free */
    g_print ("Caps for the %s pad:\n", pad_name);
    print_caps (caps, "      ");
    gst_caps_unref (caps);
    gst_object_unref (pad);
}

728x90
728x90

Time 형태로 생긴 데이터가 있다고 했을 때(ex) 12:34:56) 이 데이터를 QTime으로 변환 후 Seconds로 변환하기 위해서는 아래 코드를 사용합니다.

int QTimeToSeconds(QString sTime)
{
    QTime tTime = QTime::fromString(sTime,"hh:mm:ss");
    int nSecond = QTime(0,0,0).secsTo(tTime);
    return nSecond;
}

이제 이 Seconds 값을 다시 QTime 형태로 바꾸기 위해선 아래처럼 사용합니다.

QTime SecondsToQTime(int nSeconds)
{
    QTime time(0,0,0);
    time = time.addSecs(nSeconds);
    return time;
}

 

728x90
728x90

이전 튜토리얼까지 진행하면서 gstreamer에 대한 기초를 살펴봤습니다. 이를 활용해서 간단한 미디어 플레이어를 제작해보는 예제를 진행하려고 합니다. Tutorial 5는 GTK를 기반으로 예제를 진행하기 때문에 많은 참고는 할 수 없지만 여기서 중요한 함수를 확인할 수 있습니다.

 

Basic tutorial 5: GUI toolkit integration

Please port this tutorial to python! Please port this tutorial to javascript! Goal This tutorial shows how to integrate GStreamer in a Graphical User Interface (GUI) toolkit like GTK+. Basically, GStreamer takes care of media playback while the GUI toolkit

gstreamer.freedesktop.org

이전까지 예제를 실행하면 Direct 3D 11 renders라는 창이 뜨면서 영상이 재생되었습니다. 이 창을 저희 UI에 이식시켜야 하는게 가장 문제였는데 아래 함수를 통해 이 문제를 해결할 수 있습니다.

 

gst_video_overlay_set_window_handle을 통해 위젯의 handle 값을 넘겨 해당 위젯에서 Play(Draw)가 가능합니다.

그리고 해당 함수를 사용하기 위해서는 lgstvideo-1.0 라이브러리를 사용해야 하기 때문에 project 파일도 아래처럼 추가하였습니다.

QT       += core gui

greaterThan(QT_MAJOR_VERSION, 4): QT += widgets

CONFIG += c++17

# You can make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000    # disables all the APIs deprecated before Qt 6.0.0

SOURCES += \
    MsgBusCheckThread.cpp \
    StreamMgr.cpp \
    main.cpp \
    GstreamerPlayer.cpp

HEADERS += \
    Common.h \
    GstreamerPlayer.h \
    MsgBusCheckThread.h \
    StreamMgr.h

FORMS += \
    GstreamerPlayer.ui

INCLUDEPATH += \
    $$PWD/include \
    $$PWD/include/gstreamer-1.0 \
    $$PWD/include/glib-2.0/ \
    $$PWD/include/glib-2.0/include \
    $$PWD/lib/glib-2.0/include \
    $$PWD/include/orc-0.4 \


win32: LIBS += -L$$PWD/lib/ -lgstreamer-1.0 -lgobject-2.0 -lglib-2.0 -lintl -lgstvideo-1.0 -lgstbase-1.0

DESTDIR += \
    $$PWD/bin

# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target

실행화면은 아래와 같습니다. 실제 play 영상은 QFrame 안에 재생이 되고 있고 Play, Pause, Stop 버튼과 재생 길이를 확인할 수 있는 Slider와 그 밑에는 현재 영상 시점, 영상 길이를 알 수 있는 Label로 이루어져 있습니다.

GstreamerPlayer MainWindow 코드입니다. StreamMgr를 통해 Gstreamer를 control 합니다. play 시에 QFrame의 Handle 값을 넘겨주게 됩니다. StreamMgr를 통해 받아온 영상 시간 관련 정보를 label과 slider에 표시해줍니다.

#include "GstreamerPlayer.h"
#include "ui_GstreamerPlayer.h"
#include <QTime>

GstreamerPlayer::GstreamerPlayer(QWidget *parent)
    : QMainWindow(parent)
    , ui(new Ui::GstreamerPlayer)
    , pMgr(NULL)
{
    ui->setupUi(this);

    pMgr = new StreamMgr(this);

    connect(ui->btnPlay,    SIGNAL(clicked(bool)),            this, SLOT(Slot_Play()));
    connect(ui->btnPause,   SIGNAL(clicked(bool)),            this, SLOT(Slot_Pause()));
    connect(ui->btnStop,    SIGNAL(clicked(bool)),            this, SLOT(Slot_Stop()));
    connect(pMgr,           SIGNAL(emit_changeDuration(int)), this, SLOT(Slot_StreamDuration(int)));
    connect(pMgr,           SIGNAL(emit_currentTime(int)),    this, SLOT(Slot_CurrentTime(int)));
}

GstreamerPlayer::~GstreamerPlayer()
{
    delete ui;
    delete pMgr;
}

void GstreamerPlayer::Slot_Play()
{
    if ( pMgr == NULL ) { return; }
    pMgr->Play();
    pMgr->SetWindowHandle(ui->frame->winId());
    // QFrame의 Handle 값 전달
}

void GstreamerPlayer::Slot_Pause()
{
    if ( pMgr == NULL ) { return; }
    pMgr->Pause();
}

void GstreamerPlayer::Slot_Stop()
{
    if ( pMgr == NULL ) { return; }
    pMgr->Stop();
}

void GstreamerPlayer::Slot_StreamDuration(int nDuration)
{
    ui->bar->setRange(0, nDuration);
    QTime time(0,0,0);
    time = time.addSecs(nDuration);
    ui->lbDuration->setText(time.toString());
}

void GstreamerPlayer::Slot_CurrentTime(int nTime)
{
    ui->bar->setValue(nTime);
    QTime time(0,0,0);
    time = time.addSecs(nTime);
    ui->lbCurrent->setText(time.toString());
}

전체적인 Gstreamer 관리는 StremMgr이란 클래스에서 이뤄집니다. 이 클래스에서 체크해야할 점은 msg를 처리하기 위해 Thread를 따로 생성했다는 것과 GstreamerPlayer에서 전달받은 Handle 값을 gst_video_overlay_set_window_handle에 넘겨줍니다.

#include <QDebug>
#include "StreamMgr.h"
#include "gst/video/videooverlay.h"

StreamMgr::StreamMgr(QObject *parent)
    : QObject{parent}
    , pThread(NULL)
{
    Init();

    pThread = new MsgBusCheckThread();

    if ( pThread == NULL || bus == NULL || data == NULL ) { return; }

    connect(pThread, SIGNAL(emit_changeDuration(int)), this, SLOT(slot_getDuration(int)));
    connect(pThread, SIGNAL(emit_currentTime(int)),    this, SLOT(slot_getTime(int)));

    pThread->SetBus(bus);
    pThread->setData(data);
    pThread->start();
}

StreamMgr::~StreamMgr()
{
    if ( pThread->isRunning() ) {
        pThread->quit();
        pThread->wait();
    }

    delete data;
}

void StreamMgr::Init()
{
    /* Initialize GStreamer */
    gst_init (NULL, NULL);

    data = new CustomData;
    if ( data == NULL ) { return; }

    /* Create the elements */
    data->playbin = gst_element_factory_make ("playbin", "playbin");

    if (!data->playbin) {
        g_printerr ("Not all elements could be created.\n");
        return;
    }

    /* Set the URI to play */
    g_object_set (data->playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);

    /* Listen to the bus */
    bus = gst_element_get_bus (data->playbin);
}

void StreamMgr::slot_getDuration(int nDuration)
{
    emit emit_changeDuration(nDuration);
}

void StreamMgr::slot_getTime(int nTime)
{
    emit emit_currentTime(nTime);
}

void StreamMgr::SetPlayBinState(GstState State)
{
    ret = gst_element_set_state (data->playbin, State);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr ("Unable to set the pipeline to the playing state.\n");
        gst_object_unref (data->playbin);
        return;
    }
}

void StreamMgr::Pause()
{
    SetPlayBinState(GST_STATE_PAUSED);
}

void StreamMgr::Play()
{
    SetPlayBinState(GST_STATE_PLAYING);
}

void StreamMgr::Stop()
{
    SetPlayBinState(GST_STATE_READY);
}

void StreamMgr::SetWindowHandle(qint64 nID)
{
    gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY (data->playbin), static_cast<guintptr>(nID));
}

마지막으로 msg를 처리할 Thread입니다. 나머지 코드는 이전 튜토리얼과 크게 다를것이 없고 gstTimeToSecond를 통해 GST_TIME_FORMAT 형태의 시간 표시를 QTime을 통해 Seconds 값으로 변경해줍니다.

#include "MsgBusCheckThread.h"
#include <QDebug>
#include <QTime>

MsgBusCheckThread::MsgBusCheckThread()
    : QThread()
    , bus(NULL)
    , msg(NULL)
    , data(NULL)
{
}

void MsgBusCheckThread::run()
{
    if ( bus == NULL ) { return; }

    while (true) {
        msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND, (GstMessageType)(GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION));

        /* Parse message */
        if (msg != NULL) {
            HandleMessage (data, msg);
        } else {
            /* We got no message, this means the timeout expired */
            gint64 current = -1;

            /* Query the current position of the stream */
            if (gst_element_query_position (data->playbin, GST_FORMAT_TIME, &current)) {
                int nCurrent = gstTimeToSecond(current);
                emit emit_currentTime(nCurrent);
            }

            /* If we didn't know it yet, query the stream duration */
            if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
                if (gst_element_query_duration (data->playbin, GST_FORMAT_TIME, &data->duration)) {
                    int nDuration = gstTimeToSecond(data->duration);
                    emit emit_changeDuration(nDuration);
                }
            }
        }
    }
}

void MsgBusCheckThread::HandleMessage(CustomData *data, GstMessage *msg)
{
    GError *err;
    gchar *debug_info;

    switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_ERROR:
        gst_message_parse_error (msg, &err, &debug_info);
        g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
        g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
        g_clear_error (&err);
        g_free (debug_info);
        data->terminate = TRUE;
        break;
    case GST_MESSAGE_EOS:
        g_print ("\nEnd-Of-Stream reached.\n");
        data->terminate = TRUE;
        break;
    case GST_MESSAGE_DURATION:
        /* The duration has changed, mark the current one as invalid */
        data->duration = GST_CLOCK_TIME_NONE;
        break;
    case GST_MESSAGE_STATE_CHANGED: {
        GstState old_state, new_state, pending_state;
        gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
        if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
            g_print ("Pipeline state changed from %s to %s:\n",
                     gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));

            /* Remember whether we are in the PLAYING state or not */
            data->playing = (new_state == GST_STATE_PLAYING);

            if (data->playing) {
                /* We just moved to PLAYING. Check if seeking is possible */
                GstQuery *query;
                gint64 start, end;
                query = gst_query_new_seeking (GST_FORMAT_TIME);
                if (gst_element_query (data->playbin, query)) {
                    gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end);
                    if (data->seek_enabled) {
                        g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n",
                                 GST_TIME_ARGS (start), GST_TIME_ARGS (end));
                    } else {
                        g_print ("Seeking is DISABLED for this stream.\n");
                    }
                }
                else {
                    g_printerr ("Seeking query failed.");
                }
                gst_query_unref (query);
            }
        }
    } break;
    default:
        /* We should not reach here */
        g_printerr ("Unexpected message received.\n");
        break;
    }
    gst_message_unref (msg);
}

int MsgBusCheckThread::gstTimeToSecond(gint64 nTime)
{
    QString sTime = QString::asprintf("%" GST_TIME_FORMAT, GST_TIME_ARGS (nTime));
    sTime = sTime.remove(sTime.indexOf("."),sTime.length()-sTime.indexOf("."));
    QTime tTime = QTime::fromString(sTime,"h:mm:ss");
    int nSecond = QTime(0,0,0).secsTo(tTime);
    return nSecond;
}

void MsgBusCheckThread::SetBus(GstBus *pBus)
{
    bus = pBus;
}

void MsgBusCheckThread::setData(CustomData *pData)
{
    data = pData;
}

자세한 코드는 아래 링크 참고 부탁드립니다. Seek 및 기타 기능은 다음 기회에 추가해보도록 하겠습니다.

 

GitHub - psy1064/Gstreamer-Qt: Gstreamer example using Qt5

Gstreamer example using Qt5. Contribute to psy1064/Gstreamer-Qt development by creating an account on GitHub.

github.com

 

728x90
728x90

이전 포스팅에 이어 Tutorial 4을 진행하면서 elemtents에 query를 통해 control 하는 방법에 대해 살펴봅니다.

 

Basic tutorial 4: Time management

Basic tutorial 4: Time management Please port this tutorial to python! Please port this tutorial to javascript! Goal This tutorial shows how to use GStreamer time-related facilities. In particular: How to query the pipeline for information like stream posi

gstreamer.freedesktop.org

#include <QCoreApplication>
#include <QDebug>
#include "gst/gst.h"

/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
  GstElement *playbin;  /* Our one and only element */
  gboolean playing;      /* Are we in the PLAYING state? */
  gboolean terminate;    /* Should we terminate execution? */
  gboolean seek_enabled; /* Is seeking enabled for this media? */
  gboolean seek_done;    /* Have we performed the seek already? */
  gint64 duration;       /* How long does this media last, in nanoseconds */
} CustomData;

/* Forward definition of the message processing function */
static void handle_message (CustomData *data, GstMessage *msg);

int main(int argc, char *argv[])
{
    QCoreApplication a(argc, argv);

    CustomData data;
    GstBus *bus;
    GstMessage *msg;
    GstStateChangeReturn ret;

    data.playing = FALSE;
    data.terminate = FALSE;
    data.seek_enabled = FALSE;
    data.seek_done = FALSE;
    data.duration = GST_CLOCK_TIME_NONE;

    /* Initialize GStreamer */
    gst_init (&argc, &argv);

    /* Create the elements */
    data.playbin = gst_element_factory_make ("playbin", "playbin");

    if (!data.playbin) {
      g_printerr ("Not all elements could be created.\n");
      return -1;
    }

    /* Set the URI to play */
    g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);

    /* Start playing */
    ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
      g_printerr ("Unable to set the pipeline to the playing state.\n");
      gst_object_unref (data.playbin);
      return -1;
    }

    /* Listen to the bus */
    bus = gst_element_get_bus (data.playbin);
    do {
      msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND,
          (GstMessageType)(GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION));

      /* Parse message */
      if (msg != NULL) {
        handle_message (&data, msg);
      } else {
        /* We got no message, this means the timeout expired */
        if (data.playing) {
          gint64 current = -1;

          /* Query the current position of the stream */
          if (!gst_element_query_position (data.playbin, GST_FORMAT_TIME, &current)) {
            g_printerr ("Could not query current position.\n");
          }

          /* If we didn't know it yet, query the stream duration */
          if (!GST_CLOCK_TIME_IS_VALID (data.duration)) {
            if (!gst_element_query_duration (data.playbin, GST_FORMAT_TIME, &data.duration)) {
              g_printerr ("Could not query current duration.\n");
            }
          }

          /* Print current position and total duration */
          g_print ("Position %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r",
              GST_TIME_ARGS (current), GST_TIME_ARGS (data.duration));

          /* If seeking is enabled, we have not done it yet, and the time is right, seek */
          if (data.seek_enabled && !data.seek_done && current > 10 * GST_SECOND) {
            g_print ("\nReached 10s, performing seek...\n");
            gst_element_seek_simple (data.playbin, GST_FORMAT_TIME,
                (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 30 * GST_SECOND);
            data.seek_done = TRUE;
          }
        }
      }
    } while (!data.terminate);

    /* Free resources */
    gst_object_unref (bus);
    gst_element_set_state (data.playbin, GST_STATE_NULL);
    gst_object_unref (data.playbin);

    return a.exec();
}

static void handle_message (CustomData *data, GstMessage *msg) {
  GError *err;
  gchar *debug_info;

  switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_ERROR:
      gst_message_parse_error (msg, &err, &debug_info);
      g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
      g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
      g_clear_error (&err);
      g_free (debug_info);
      data->terminate = TRUE;
      break;
    case GST_MESSAGE_EOS:
      g_print ("\nEnd-Of-Stream reached.\n");
      data->terminate = TRUE;
      break;
    case GST_MESSAGE_DURATION:
      /* The duration has changed, mark the current one as invalid */
      data->duration = GST_CLOCK_TIME_NONE;
      break;
    case GST_MESSAGE_STATE_CHANGED: {
      GstState old_state, new_state, pending_state;
      gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
      if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
        g_print ("Pipeline state changed from %s to %s:\n",
            gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));

        /* Remember whether we are in the PLAYING state or not */
        data->playing = (new_state == GST_STATE_PLAYING);

        if (data->playing) {
          /* We just moved to PLAYING. Check if seeking is possible */
          GstQuery *query;
          gint64 start, end;
          query = gst_query_new_seeking (GST_FORMAT_TIME);
          if (gst_element_query (data->playbin, query)) {
            gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end);
            if (data->seek_enabled) {
              g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n",
                  GST_TIME_ARGS (start), GST_TIME_ARGS (end));
            } else {
              g_print ("Seeking is DISABLED for this stream.\n");
            }
          }
          else {
            g_printerr ("Seeking query failed.");
          }
          gst_query_unref (query);
        }
      }
    } break;
    default:
      /* We should not reach here */
      g_printerr ("Unexpected message received.\n");
      break;
  }
  gst_message_unref (msg);
}

예제를 위해 첫 번째 튜토리얼에서 사용했던 playbin elements를 사용합니다.

/* Create the elements */
data.playbin = gst_element_factory_make ("playbin", "playbin");

if (!data.playbin) {
  g_printerr ("Not all elements could be created.\n");
  return -1;
}

/* Set the URI to play */
g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);

/* Start playing */
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
  g_printerr ("Unable to set the pipeline to the playing state.\n");
  gst_object_unref (data.playbin);
  return -1;
}

이전 튜토리얼까지는 bus로부터 message를 받아오는 함수인 gst_bus_timed_pop_filtered에 timeout을 지정하지 않아 무한 대기하였고 2번째 파라미터에 시간을 지정해주면 timeout이 일어나면 NULL을 반환하게 됩니다.

bus = gst_element_get_bus (data.playbin);
do {
  msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND,
      (GstMessageType)(GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION));

  /* Parse message */
  if (msg != NULL) {
    handle_message (&data, msg);
  } else {
    /* We got no message, this means the timeout expired */
    if (data.playing) {
      gint64 current = -1;

      /* Query the current position of the stream */
      if (!gst_element_query_position (data.playbin, GST_FORMAT_TIME, &current)) {
        g_printerr ("Could not query current position.\n");
      }

      /* If we didn't know it yet, query the stream duration */
      if (!GST_CLOCK_TIME_IS_VALID (data.duration)) {
        if (!gst_element_query_duration (data.playbin, GST_FORMAT_TIME, &data.duration)) {
          g_printerr ("Could not query current duration.\n");
        }
      }

      /* Print current position and total duration */
      g_print ("Position %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r",
          GST_TIME_ARGS (current), GST_TIME_ARGS (data.duration));

      /* If seeking is enabled, we have not done it yet, and the time is right, seek */
      if (data.seek_enabled && !data.seek_done && current > 10 * GST_SECOND) {
        g_print ("\nReached 10s, performing seek...\n");
        gst_element_seek_simple (data.playbin, GST_FORMAT_TIME,
            (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 30 * GST_SECOND);
        data.seek_done = TRUE;
      }
    }
  }
} while (!data.terminate);

여기서 handle_message 함수를 먼저 보겠습니다. STATE_CHANGE case를 보면 playbin의 state를 체크하고 있습니다. 체크했을 때 state가 playing일 경우 영상의 시간 길이(범위)를 query를 통해 가져옵니다.

static void handle_message (CustomData *data, GstMessage *msg) {
  GError *err;
  gchar *debug_info;

  switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_ERROR:
      gst_message_parse_error (msg, &err, &debug_info);
      g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
      g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
      g_clear_error (&err);
      g_free (debug_info);
      data->terminate = TRUE;
      break;
    case GST_MESSAGE_EOS:
      g_print ("\nEnd-Of-Stream reached.\n");
      data->terminate = TRUE;
      break;
    case GST_MESSAGE_DURATION:
      /* The duration has changed, mark the current one as invalid */
      data->duration = GST_CLOCK_TIME_NONE;
      break;
    case GST_MESSAGE_STATE_CHANGED: {
      GstState old_state, new_state, pending_state;
      gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
      if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
        g_print ("Pipeline state changed from %s to %s:\n",
            gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));

        /* Remember whether we are in the PLAYING state or not */
        data->playing = (new_state == GST_STATE_PLAYING);

        if (data->playing) {
          /* We just moved to PLAYING. Check if seeking is possible */
          GstQuery *query;
          gint64 start, end;
          query = gst_query_new_seeking (GST_FORMAT_TIME);
          if (gst_element_query (data->playbin, query)) {
            gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end);
            if (data->seek_enabled) {
              g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n",
                  GST_TIME_ARGS (start), GST_TIME_ARGS (end));
            } else {
              g_print ("Seeking is DISABLED for this stream.\n");
            }
          }
          else {
            g_printerr ("Seeking query failed.");
          }
          gst_query_unref (query);
        }
      }
    } break;
    default:
      /* We should not reach here */
      g_printerr ("Unexpected message received.\n");
      break;
  }
  gst_message_unref (msg);
}

다시 do-while쪽으로 넘어가보겠습니다. gst_bus_timed_pop_filtered에 등록한 메시지 타입이 발생하지 않으면 msg가 NULL 이되어 else로 넘어가게 됩니다.

handle_message에서 체크한대로 play 상태라면 gst_element_query_position를 통해 현재 영상의 position 값을 가져오고 

g_print로 현재 상태를 계속 출력합니다.

 

영상이 10초가 지났으면 gst_element_seek_simple 함수를 통해 영상의 position을 변경합니다.

변경하게 되면 playbin의 상태가 PAUSED -> PLAYING으로 변하기 때문에 다시 handle_message 함수를 타게 됩니다.

 

 

프로그램을 시작하면 아래와 같이 흘러갑니다.

10초 경과 후 영상의 position을 변경합니다.

그 후 영상이 종료될 때까지 재생합니다.

728x90
728x90

기존에 Qt Widget을 이용하여 UI를 구성하는 방법 말고도 Qml을 이용하여 UI를 구성하는 방법에 대해 공부해보려 합니다.

 

Qt Creator를 실행 후 Create Project > Qt Quick Application을 선택합니다.

프로젝트를 생성하면 main.cpp 파일과 main.qml 파일이 자동으로 생성됩니다.

#include <QGuiApplication>
#include <QQmlApplicationEngine>


int main(int argc, char *argv[])
{
    QGuiApplication app(argc, argv);

    QQmlApplicationEngine engine;
    const QUrl url(u"qrc:/untitled/main.qml"_qs);
    QObject::connect(&engine, &QQmlApplicationEngine::objectCreated,
                     &app, [url](QObject *obj, const QUrl &objUrl) {
        if (!obj && url == objUrl)
            QCoreApplication::exit(-1);
    }, Qt::QueuedConnection);
    engine.load(url);

    return app.exec();
}
import QtQuick

Window {
    width: 640
    height: 480
    visible: true
    title: qsTr("Hello World")
}

이 상태로 바로 빌드 후 Run을 하면 빈 화면의 윈도우 창을 볼 수 있습니다.

QML은 마크업 언어로 elements라고 불리는 태그들로 ui를 구성하게 됩니다. Qt Quick으로 작업하면 프론트엔드는 QML과 자바스크립트로 시스템과의 인터페이스나 무거운 작업은 Qt C++로 구성하게 됩니다. 

 

기본적인 elements들은 QtQuick 모듈에 있으며 qml 가장 상단에 QtQuick을 import 함으로써 사용할 수 있습니다.

import QtQuick

각 elements들은 property를 가지게 되고 예로 실행된 Window element들의 width, height, visible, title과 같은 property 값을 넣어 명시해주고 있습니다.

Window {
    width: 640
    height: 480
    visible: true
    title: qsTr("Hello World")
}

elements들의 property는 공식 문서 또는 F1키를 눌러 확인할 수 있습니다.

이제 main.cpp에서 QGuiApplication 생성 후 생성한 qml 파일을 QQmlApplicationEngine로 load하여 실행하게 됩니다. 그 이후 app.exec를 통해 QApplication의 이벤트 루프가 돌게됩니다.

728x90
728x90

이전 포스팅에 이어 Tutorial 3을 진행하면서 pad, GSignal 등의 개념을 살펴봅니다. 이 튜토리얼에서는 demuxer를 이용해 하나의 source에서 오는 media를 2개의 출력으로 분리하는 예제를 보여주면서 pad와 GSignal 등의 개념을 소개합니다.

 

Basic tutorial 3: Dynamic pipelines

Basic tutorial 3: Dynamic pipelines Please port this tutorial to python! Please port this tutorial to javascript! Goal This tutorial shows the rest of the basic concepts required to use GStreamer, which allow building the pipeline "on the fly", as informat

gstreamer.freedesktop.org

#include <QCoreApplication>
#include <QDebug>
#include "gst/gst.h"

/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
    GstElement *pipeline;
    GstElement *source;
    GstElement *convert;
    GstElement *resample;
    GstElement *sink;
} CustomData;

/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);

int main(int argc, char *argv[])
{
    QCoreApplication a(argc, argv);
    
    CustomData data;
    GstBus *bus;
    GstMessage *msg;
    GstStateChangeReturn ret;
    gboolean terminate = FALSE;
    
    /* Initialize GStreamer */
    gst_init (&argc, &argv);
    
    /* Create the elements */
    data.source = gst_element_factory_make ("uridecodebin", "source");
    data.convert = gst_element_factory_make ("audioconvert", "convert");
    data.resample = gst_element_factory_make ("audioresample", "resample");
    data.sink = gst_element_factory_make ("autoaudiosink", "sink");
    
    /* Create the empty pipeline */
    data.pipeline = gst_pipeline_new ("test-pipeline");
    
    if (!data.pipeline || !data.source || !data.convert || !data.resample || !data.sink) {
        g_printerr ("Not all elements could be created.\n");
        return -1;
    }
    
    /* Build the pipeline. Note that we are NOT linking the source at this
   * point. We will do it later. */
    gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert, data.resample, data.sink, NULL);
    if (!gst_element_link_many (data.convert, data.resample, data.sink, NULL)) {
        g_printerr ("Elements could not be linked.\n");
        gst_object_unref (data.pipeline);
        return -1;
    }
    
    /* Set the URI to play */
    g_object_set (data.source, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
    
    /* Connect to the pad-added signal */
    g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
    
    /* Start playing */
    ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr ("Unable to set the pipeline to the playing state.\n");
        gst_object_unref (data.pipeline);
        return -1;
    }
    
    /* Listen to the bus */
    bus = gst_element_get_bus (data.pipeline);
    do {
        msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
                                          (GstMessageType) (GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
        
        /* Parse message */
        if (msg != NULL) {
            GError *err;
            gchar *debug_info;
            
            switch (GST_MESSAGE_TYPE (msg)) {
            case GST_MESSAGE_ERROR:
                gst_message_parse_error (msg, &err, &debug_info);
                g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
                g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
                g_clear_error (&err);
                g_free (debug_info);
                terminate = TRUE;
                break;
            case GST_MESSAGE_EOS:
                g_print ("End-Of-Stream reached.\n");
                terminate = TRUE;
                break;
            case GST_MESSAGE_STATE_CHANGED:
                /* We are only interested in state-changed messages from the pipeline */
                if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
                    GstState old_state, new_state, pending_state;
                    gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
                    g_print ("Pipeline state changed from %s to %s:\n",
                             gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
                }
                break;
            default:
                /* We should not reach here */
                g_printerr ("Unexpected message received.\n");
                break;
            }
            gst_message_unref (msg);
        }
    } while (!terminate);
    
    /* Free resources */
    gst_object_unref (bus);
    gst_element_set_state (data.pipeline, GST_STATE_NULL);
    gst_object_unref (data.pipeline);
    
    return a.exec();
}

/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
    GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
    GstPadLinkReturn ret;
    GstCaps *new_pad_caps = NULL;
    GstStructure *new_pad_struct = NULL;
    const gchar *new_pad_type = NULL;
    
    g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
    
    /* If our converter is already linked, we have nothing to do here */
    if (gst_pad_is_linked (sink_pad)) {
        g_print ("We are already linked. Ignoring.\n");
        goto exit;
    }
    
    /* Check the new pad's type */
    new_pad_caps = gst_pad_get_current_caps (new_pad);
    new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
    new_pad_type = gst_structure_get_name (new_pad_struct);
    if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
        g_print ("It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
        goto exit;
    }
    
    /* Attempt the link */
    ret = gst_pad_link (new_pad, sink_pad);
    if (GST_PAD_LINK_FAILED (ret)) {
        g_print ("Type is '%s' but link failed.\n", new_pad_type);
    } else {
        g_print ("Link succeeded (type '%s').\n", new_pad_type);
    }
    
exit:
    /* Unreference the new pad's caps, if we got them */
    if (new_pad_caps != NULL)
        gst_caps_unref (new_pad_caps);
    
    /* Unreference the sink pad */
    gst_object_unref (sink_pad);
}

elements들은 다른 elements와 통신할 때 pad라는 port를 이용합니다. 아래와 같이 source, sink는 그에 맞는 pad를 가지고 있으며 중간에 오는 filter나 기타 element들은 각자의 pad를 가지게 됩니다.

demuxer는 하나의 sink에서 온 데이터를 audio, video 2개의 source pad로 내보낼 수 있습니다.

source elements는 uridecodebin elements로 생성합니다. uri로 지정한 파일을 raw audio와 video stream으로 변환할 때 사용합니다. audioconvert elements는 raw audio 데이터를 다른 format으로 변환하는 데 사용합니다. audioresample을 통해 raw audio buffer를 resampling 해서 품질을 높입니다. 마지막으로 autoaudiosink elements로 생성합니다. 여기서 sink, source를 제외한 두 elements들이 filter 역할을 하게 됩니다.

data.source = gst_element_factory_make ("uridecodebin", "source");
data.convert = gst_element_factory_make ("audioconvert", "convert");
data.resample = gst_element_factory_make ("audioresample", "resample");
data.sink = gst_element_factory_make ("autoaudiosink", "sink");

이전 튜토리얼에서 playbin을 통해 uri을 setting 해줬고 이 예제에서는 source elements에 uri property 값을 set 해주게 됩니다.

/* Set the URI to play */
g_object_set (data.source, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);

다음으로 이 예제에서 가장 중요하다고 생각하는 GSignal 관련 내용입니다. 내부 동작 방식은 다르겠지만 Qt에서 제공하는 signal-slot과 비슷하게 사용하면 될 거 같습니다.

/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);

elements에서 signal이 발생했을 때 호출할 function을 넘겨주고 그 함수에서 사용할 데이터를 넘겨주게 됩니다.

source elements에 새 pad가 추가될 때 호출이 됩니다. 이 중 raw audio 형식만 골라 audioconvert elements의 sink pad와 link 시켜주는 함수입니다.

/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
    GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
    GstPadLinkReturn ret;
    GstCaps *new_pad_caps = NULL;
    GstStructure *new_pad_struct = NULL;
    const gchar *new_pad_type = NULL;

    g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));

    /* If our converter is already linked, we have nothing to do here */
    if (gst_pad_is_linked (sink_pad)) {
        g_print ("We are already linked. Ignoring.\n");
        goto exit;
    }

    /* Check the new pad's type */
    new_pad_caps = gst_pad_get_current_caps (new_pad);
    new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
    new_pad_type = gst_structure_get_name (new_pad_struct);
    if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
        g_print ("It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
        goto exit;
    }

    /* Attempt the link */
    ret = gst_pad_link (new_pad, sink_pad);
    if (GST_PAD_LINK_FAILED (ret)) {
        g_print ("Type is '%s' but link failed.\n", new_pad_type);
    } else {
        g_print ("Link succeeded (type '%s').\n", new_pad_type);
    }

exit:
    /* Unreference the new pad's caps, if we got them */
    if (new_pad_caps != NULL)
        gst_caps_unref (new_pad_caps);

    /* Unreference the sink pad */
    gst_object_unref (sink_pad);
}

이 코드를 실행하면 튜토리얼에서 진행했던 동영상의 음성만 들리는 것을 확인할 수 있습니다. 그리고 출력 창에는 아래처럼 디버깅 메시지가 찍히게 됩니다.

Pipeline state 관련 내용은 아래 표를 참고하시면 됩니다.

728x90

+ Recent posts