1. 程式人生 > >gstreamer appsink+appsrc 畫面分割

gstreamer appsink+appsrc 畫面分割

#include <gst/gst.h>
#include <stdio.h>
#include <gst/gstbuffer.h>
#define VIDEO_WIDTH 1920
#define VIDEO_HEIGHT 1080
#define VIDEO_FORMAT “RGB”
#define PIXEL_SIZE 4

GstElement *pipeline0,*pipeline1,*pipeline2;
static GstFlowReturn
on_new_sample_from_sink (GstElement * sink_elm, void *param )
{
GstSample *sample = NULL;
GstElement *source;
GstBuffer *buffer,*app_buffer;
GstFlowReturn ret;
param = param;
printf(“on_new_sample_from_sink() call!;\n”);
g_signal_emit_by_name (sink_elm, “pull-sample”, &sample, &ret);
buffer = gst_sample_get_buffer (sample);
app_buffer = gst_buffer_copy_region (buffer, (GstBufferCopyFlags)    (GST_BUFFER_COPY_ALL | GST_BUFFER_COPY_DEEP), 0, gst_buffer_get_size(buffer)/2);
gst_sample_unref (sample);
source = gst_bin_get_by_name (GST_BIN (pipeline1), “appsrc1”);
//ret = gst_app_src_push_buffer (GST_APP_SRC (source), app_buffer);
g_signal_emit_by_name( source, “push-buffer”, app_buffer, &ret );//資料送入pipeline
printf(“app_buffer size = %lu ret=%d\n”, gst_buffer_get_size(app_buffer),ret);
gst_object_unref (source);
gst_buffer_unref(app_buffer);
app_buffer = gst_buffer_copy_region (buffer, (GstBufferCopyFlags)(GST_BUFFER_COPY_ALL | GST_BUFFER_COPY_DEEP), gst_buffer_get_size(buffer)/2, gst_buffer_get_size(buffer)/2);
source = gst_bin_get_by_name (GST_BIN (pipeline2), “appsrc2”);
//ret = gst_app_src_push_buffer (GST_APP_SRC (source), app_buffer);
g_signal_emit_by_name( source, “push-buffer”, app_buffer, &ret );//資料送入pipeline
gst_object_unref (source);
gst_buffer_unref(app_buffer);
return ret;
}

gint main (gint argc, gchar argv[])
{
/ init GStreamer */
gst_init (&argc, &argv);
pipeline0 = gst_parse_launch( “filesrc location=”/opt/niliu.mp4" ! typefind ! qtdemux ! video/x-h264 ! avdec_h264 ! videoconvert ! appsink name=appsink caps=“video/x-raw,format=RGB” ",NULL);
GstElement * appsink = gst_bin_get_by_name (GST_BIN (pipeline0), “appsink”);
g_object_set(G_OBJECT(appsink),“emit-signals”,TRUE,“sync”, TRUE,NULL);
g_signal_connect (appsink, “new-sample”,
G_CALLBACK (on_new_sample_from_sink), NULL);

pipeline1 = gst_parse_launch( “appsrc name=appsrc1 ! queue ! videoconvert ! autovideosink name=video_sink”,NULL);

GstElement * appsrc1 = gst_bin_get_by_name (GST_BIN (pipeline1), “appsrc1”);
g_object_set (G_OBJECT (appsrc1), “caps”,
gst_caps_new_simple (“video/x-raw”,
“format”, G_TYPE_STRING, VIDEO_FORMAT,
“width”, G_TYPE_INT, VIDEO_WIDTH,
“height”, G_TYPE_INT, VIDEO_HEIGHT/2,
//“framerate”, GST_TYPE_FRACTION, 25, 1,
NULL), NULL);

pipeline2 = gst_parse_launch( “appsrc name=appsrc2 ! queue ! videoconvert ! autovideosink name=video_sink1”,NULL);
//srcpiple = gst_parse_launch( “appsrc name=appsrc ! queue ! appsink”,NULL);

GstElement * appsrc2 = gst_bin_get_by_name (GST_BIN (pipeline2), “appsrc2”);
g_object_set (G_OBJECT (appsrc2), “caps”,
gst_caps_new_simple (“video/x-raw”,
“format”, G_TYPE_STRING, VIDEO_FORMAT,
“width”, G_TYPE_INT, VIDEO_WIDTH,
“height”, G_TYPE_INT, VIDEO_HEIGHT/2,
“framerate”, GST_TYPE_FRACTION, 25, 1,
NULL), NULL);
gst_element_set_state (pipeline0, GST_STATE_PLAYING);
gst_element_set_state (pipeline1,GST_STATE_PLAYING);
gst_element_set_state (pipeline2,GST_STATE_PLAYING);

g_main_loop_run (g_main_loop_new (NULL, FALSE));

return 0;
}
--------------------- 
作者:__一縷陽光__ 
來源:CSDN 
原文:https://blog.csdn.net/zxr1521904712/article/details/83995082 
版權宣告:本文為博主原創文章,轉載請附上博文連結!