Gstreamer appsrc到文件为空

时间:2019-07-03 08:43:42

标签: video gstreamer h.264 zynq h.265

我正在使用Xilinx Petalinux和Vivado 2018.2工具,这些工具针对具有(视频编解码器)VCU的Zynqmp设备。

我正在Vivado SDK中开发基于gstreamer的应用程序,目标是构建下一个管道:

  1. 捕获来自USB3相机的RAW视频帧(不能使用v4l2,它使用     它自己的API来捕获帧)。将帧包装到GstBuffer并将其推送到 appsrc 管道元素。
  2. 使用硬件VCU(H.264 / H.265)压缩视频。 ( omxh264enc
  3. 将其保存到文件中。 (文件接收器

目前,我可以连接相机,获取帧并将它们包装为GstBuffer类型。

问题是生成的“ output.h264”文件为空。

代码的相关部分是:

    /* Create pipeline */
pipeline = gst_parse_launch("appsrc is-live=TRUE name=xsource  caps= video/x-raw,format=Y800,width=1280,height=1024 ! omxh264enc ! filesink location= media/test/output.h264", NULL);
if(!pipeline)
    goto finish;

/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, NULL);
gst_object_unref (bus);

appsrc=gst_bin_get_by_name(GST_BIN(pipeline), "xsource");

gst_element_set_state(pipeline, GST_STATE_PLAYING);

if(xiGetImage(xiH, 5000, &image) == XI_OK) //Get just one frame
{

    unsigned long buffer_size = image.width*image.height;

    buffer = gst_buffer_new();
    gst_buffer_insert_memory(buffer, -1, gst_memory_new_wrapped(GST_MEMORY_FLAG_READONLY, (guint8*)image.bp, buffer_size, 0, buffer_size, NULL, NULL));
    ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc), buffer);
    if(ret != GST_FLOW_OK){
        break;
    }

}

gst_app_src_end_of_stream(GST_APP_SRC(appsrc));
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));

我检查(在SDK调试模式下)内存和缓冲区不为空,因此将相机接口和缓冲区推入 appsrc 的方法似乎运行良好。我怀疑问题可能出在管道链定义中,但是我尝试了许多配置,但没有成功...

任何想法/线索将不胜感激。

编辑:

按照建议,我尝试等待EOS确认以及代码末尾的错误消息检查:

gst_app_src_end_of_stream(GST_APP_SRC(appsrc));

  /* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg =
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
      GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));

我也尝试加载更多帧以查看是否有帮助,我尝试这样加载500个:

    while(xiGetImage(xiH, 5000, &image) == XI_OK)
{
    unsigned long buffer_size = image.width*image.height;

    buffer = gst_buffer_new();
    gst_buffer_insert_memory(buffer, -1, gst_memory_new_wrapped(GST_MEMORY_FLAG_READONLY, (guint8*)image.bp, buffer_size, 0, buffer_size, NULL, NULL));

    ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc), buffer);
    if(ret != GST_FLOW_OK){
        break;
    }

    if(frames > 500)
    {
         break;
    }else{
        frames++;
    }

}

但不幸的是,它没有帮助,仍然有空文件且没有错误。

还有其他想法/线索吗?

谢谢。

2 个答案:

答案 0 :(得分:0)

大多数视频编码器仅给它们一帧就不会产生任何缓冲区。他们想向前看..或在内部管道中有一些延迟。

因此也许在其中添加更多缓冲区会有帮助。

否则,当您正确地告诉视频帧不再有视频帧时,视频编码器可能会为您提供帧。这意味着:将EOS发送到管道,并等待EOS事件到达总线。到那时编码器有望将这一帧推到文件接收器中。

编辑:我注意到您已经发送了EOS。您可能想要在将管道的状态设置为NULL之前等待总线上的EOS。

答案 1 :(得分:0)

我有类似的问题。我看到的每个示例都以拉动方式使用需求数据回调,而不是像代码中那样推框架。

  • 完成流式传输后,我会执行g_signal_emit_by_name (appsrc, "end-of-stream", &ret);
  • 一直运行,直到在管道总线上看到GST_MESSAGE_EOS。
  • 然后致电gst_app_src_end_of_stream(GST_APP_SRC(appsrc));
  • 然后设置gst_element_set_state (pipeline, GST_STATE_NULL);

完整来源:

// based on https://gstreamer.freedesktop.org/documentation/application-development/advanced/pipeline-manipulation.html?gi-language=c
// and https://gist.github.com/floe/e35100f091315b86a5bf
// compile with:
// g++ -Wall $(pkg-config --cflags gstreamer-1.0) -o gst gst.cpp $(pkg-config --libs gstreamer-1.0) -lgstapp-1.0

#include <gst/gst.h>
#include <gst/app/gstappsrc.h>

#include <stdint.h>
#include <iostream>
using namespace std;

bool run = true;

const int WIDTH = 1280;
const int HEIGHT = 720;
const int FRAME_RATE = 10;

uint16_t b_white[WIDTH*HEIGHT];
uint16_t b_black[WIDTH*HEIGHT];

static void prepare_buffer(GstAppSrc* appsrc) {

    static gboolean white = FALSE;
    static GstClockTime timestamp = 0;
    GstBuffer *buffer;
    guint size;
    GstFlowReturn ret;
    double now = ((double)timestamp / (double)GST_SECOND);
    cout << now << endl;

    if(now > 3.0)
    {
        /* we are EOS, send end-of-stream and remove the source */
        g_signal_emit_by_name (appsrc, "end-of-stream", &ret);
        return;
    }

    size = WIDTH * HEIGHT * 2;
    for (int i = 0; i < WIDTH*HEIGHT; i++) { b_white[i] =  0xFFFF; }

    buffer = gst_buffer_new_wrapped_full( (GstMemoryFlags)0, (gpointer)(white?b_white:b_black), size, 0, size, NULL, NULL );

    white = !white;

    GST_BUFFER_PTS (buffer) = timestamp;
    GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, FRAME_RATE);

    timestamp += GST_BUFFER_DURATION (buffer);

    //This can also trigger callbacks including cb_need_data!
    ret = gst_app_src_push_buffer(appsrc, buffer);

    if (ret != GST_FLOW_OK) {
        run = false;
    }

}

static void cb_need_data (GstElement *appsrc, guint unused_size, gpointer user_data) {
    prepare_buffer((GstAppSrc*)appsrc);
}

static gboolean on_pipeline_message (GstBus * bus, GstMessage * message, GMainLoop *loop)
{
    cout << GST_MESSAGE_TYPE_NAME(message) << endl;

    switch (GST_MESSAGE_TYPE (message)) {
        case GST_MESSAGE_EOS:
            g_print ("Received End of Stream message\n");
            //g_main_loop_quit (loop);
            run = false;
            break;
    }
    return TRUE;
}

gint main (gint argc, gchar *argv[]) {

    GstElement *pipeline, *appsrc, *conv;

    for (int i = 0; i < WIDTH*HEIGHT; i++) { b_black[i] = 0; b_white[i] = 0xFFFF; }

    /* init GStreamer */
    gst_init (&argc, &argv);

    /* setup pipeline */
    pipeline = gst_pipeline_new ("pipeline");
    appsrc = gst_element_factory_make ("appsrc", "source");
    conv = gst_element_factory_make ("videoconvert", "conv");
    GstElement *enc = gst_element_factory_make ("x264enc", "enc");
    GstElement *mux = gst_element_factory_make ("matroskamux", "mux");
    GstElement *outFile = gst_element_factory_make ("filesink", "outFile");

    /* setup */
    g_object_set (G_OBJECT (appsrc), "caps",
            gst_caps_new_simple ("video/x-raw",
                         "format", G_TYPE_STRING, "RGB16",
                         "width", G_TYPE_INT, WIDTH,
                         "height", G_TYPE_INT, HEIGHT,
                         "framerate", GST_TYPE_FRACTION, FRAME_RATE, 1,
                         NULL), NULL);
    gst_bin_add_many (GST_BIN (pipeline), appsrc, conv, enc, mux, outFile, NULL);
    gst_element_link_many (appsrc, conv, enc, mux, outFile, NULL);

    /* setup appsrc */
    g_object_set (G_OBJECT (appsrc),
        "stream-type", 0, // GST_APP_STREAM_TYPE_STREAM
        "format", GST_FORMAT_TIME,
        "is-live", false,
        NULL);
    g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), NULL);

    g_object_set (G_OBJECT (enc),
        "qp-min", 18,
        NULL);

    g_object_set (G_OBJECT (outFile),
        "location", "test.mkv",
        NULL);

    // play
    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    auto bus = gst_element_get_bus (pipeline);

    //Wait for bus events
    while (run) {

        //This can trigger callbacks including cb_need_data
        GstClockTime timeout = 0 * GST_MSECOND;
        GstMessage *msg = gst_bus_timed_pop_filtered (bus, timeout, (GstMessageType)
            (GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
        if(msg == nullptr) continue;

        on_pipeline_message (bus, msg, nullptr);
    }

    gst_object_unref (bus);

    /* clean up */
    gst_app_src_end_of_stream(GST_APP_SRC(appsrc));
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (GST_OBJECT (pipeline));

    return 0;
}

相关问题