0
我想從對講機中複製mjpeg流(但它相當於IP攝像機)。使用控制檯GST-啓動正常工作:Gstreamer管道與gst-launch一起使用,但不在代碼中。從IP攝像機複製mjpeg流
gst-launch-1.0 souphttpsrc location="http://192.168.1.191/api/camera/snapshot?width=640&height=480&fps=10" timeout=5 ! multipartdemux ! jpegdec ! videoconvert ! ximagesink
但是,當我嘗試建立一個應用程序要做到這一點,這是行不通的。
我的代碼:
Pipeline state changed from NULL to READY:
Pipeline state changed from READY to PAUSED:
Error received from element video_demux: Could not demultiplex stream.
Debugging information: multipartdemux.c(475): multipart_parse_header(): /GstPipeline:new-pipeline/GstMultipartDemux:video_demux:
Boundary not found in the multipart header
任何想法,我缺少什麼:
#include <gst/gst.h>
#include <glib.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *v_demux;
GstElement *v_decoder;
GstElement *v_convert;
GstElement *v_sink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
/** Main function */
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements
*
* souphttpsrc -> multipartdemux (~>) jpegdec -> videoconvert -> ximagesink
*
* ~> Sometimes pad
*
* */
data.source = gst_element_factory_make ("souphttpsrc", "video_source");
data.v_demux = gst_element_factory_make ("multipartdemux", "video_demux");
data.v_decoder = gst_element_factory_make ("jpegdec", "video_decoder");
data.v_convert = gst_element_factory_make ("videoconvert", "video_convert");
data.v_sink = gst_element_factory_make ("ximagesink", "video_sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("new-pipeline");
if (!data.pipeline || !data.source ||
!data.v_demux || !data.v_decoder || !data.v_convert || !data.v_sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Configure elements */
g_object_set(G_OBJECT(data.source), "location", argv[1], NULL);
g_object_set(G_OBJECT(data.source), "timeout", 5, NULL);
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (data.pipeline), data.source,
data.v_demux, data.v_decoder, data.v_convert, data.v_sink,
NULL);
if (gst_element_link_many (data.source, data.v_demux, NULL) != TRUE ||
gst_element_link_many (data.v_decoder, data.v_convert, data.v_sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Connect to the pad-added signal */
g_signal_connect (data.v_demux, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = NULL;
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* Get information of the new pad's type */
new_pad_caps = gst_pad_get_current_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
/* Get pad from the correspondent converter */
if (g_str_has_prefix (new_pad_type, "video")) {
sink_pad = gst_element_get_static_pad (data->v_decoder, "sink");
} else {
g_print (" It has type '%s' -> So exit\n", new_pad_type);
return;
}
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
gst_object_unref (sink_pad);
return;
}
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL) {
gst_caps_unref (new_pad_caps);
}
/* Unreference the sink pad */
if (sink_pad != NULL) {
gst_object_unref (sink_pad);
}
}
當我運行該程序的輸出?
在此先感謝。
我也嘗試過,但沒有使用multipartdemux,但結果相似。它使用gst-launch而不是代碼。 – Omsitelta
我不知道它爲什麼不起作用,但是您可能會發現使用gst_parse_launch()創建管道更容易,而不是手動執行。也許它會使它工作。 –
感謝您的評論,但由於整個應用程序的性質(我在這裏發佈的snipet只是gstreamer的一部分),我有一個視頻/音頻服務器,而且我正在使用嵌入式系統(我需要優化該過程)gst-parse-launch它不是一個選項。 無論如何謝謝你的建議。 – Omsitelta