2016-06-24 2 views
0

Ich möchte einen MJPEG-Stream von einer Gegensprechanlage reproduzieren (entspricht aber einer IP-Kamera). Mit gst-Start in der Konsole funktioniert:Gstreamer-Pipeline funktioniert mit GST-Start, aber nicht im Code. Wiedergabe eines MJPEG-Streams von einer IP-Kamera

gst-launch-1.0 souphttpsrc location="http://192.168.1.191/api/camera/snapshot?width=640&height=480&fps=10" timeout=5 ! multipartdemux ! jpegdec ! videoconvert ! ximagesink 

aber wenn ich versuche, eine Anwendung zu erstellen, dies zu tun, ist es nicht funktioniert.

Mein Code:

#include <gst/gst.h> 
#include <glib.h> 

/* Structure to contain all our information, so we can pass it to callbacks */ 
typedef struct _CustomData { 
    GstElement *pipeline; 
    GstElement *source; 
    GstElement *v_demux; 
    GstElement *v_decoder; 
    GstElement *v_convert; 
    GstElement *v_sink; 
} CustomData; 

/* Handler for the pad-added signal */ 
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data); 

/** Main function */ 
int main(int argc, char *argv[]) { 
    CustomData data; 
    GstBus *bus; 
    GstMessage *msg; 
    GstStateChangeReturn ret; 
    gboolean terminate = FALSE; 

    /* Initialize GStreamer */ 
    gst_init (&argc, &argv); 

    /* Create the elements 
    * 
    * souphttpsrc -> multipartdemux (~>) jpegdec -> videoconvert -> ximagesink 
    * 
    * ~> Sometimes pad 
    * 
    * */ 
    data.source = gst_element_factory_make ("souphttpsrc", "video_source"); 
    data.v_demux = gst_element_factory_make ("multipartdemux", "video_demux"); 
    data.v_decoder = gst_element_factory_make ("jpegdec", "video_decoder"); 
    data.v_convert = gst_element_factory_make ("videoconvert", "video_convert"); 
    data.v_sink = gst_element_factory_make ("ximagesink", "video_sink"); 


    /* Create the empty pipeline */ 
    data.pipeline = gst_pipeline_new ("new-pipeline"); 

    if (!data.pipeline || !data.source || 
     !data.v_demux || !data.v_decoder || !data.v_convert || !data.v_sink) { 
    g_printerr ("Not all elements could be created.\n"); 
    return -1; 
    } 

    /* Configure elements */ 
    g_object_set(G_OBJECT(data.source), "location", argv[1], NULL); 
    g_object_set(G_OBJECT(data.source), "timeout", 5, NULL); 

    /* Link all elements that can be automatically linked because they have "Always" pads */ 
    gst_bin_add_many (GST_BIN (data.pipeline), data.source, 
     data.v_demux, data.v_decoder, data.v_convert, data.v_sink, 
     NULL); 
    if (gst_element_link_many (data.source, data.v_demux, NULL) != TRUE || 
     gst_element_link_many (data.v_decoder, data.v_convert, data.v_sink, NULL) != TRUE) { 
    g_printerr ("Elements could not be linked.\n"); 
    gst_object_unref (data.pipeline); 
    return -1; 
    } 

    /* Connect to the pad-added signal */ 
    g_signal_connect (data.v_demux, "pad-added", G_CALLBACK (pad_added_handler), &data); 

    /* Start playing */ 
    ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING); 
    if (ret == GST_STATE_CHANGE_FAILURE) { 
    g_printerr ("Unable to set the pipeline to the playing state.\n"); 
    gst_object_unref (data.pipeline); 
    return -1; 
    } 

    /* Listen to the bus */ 
    bus = gst_element_get_bus (data.pipeline); 
    do { 
    msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, 
     GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS); 

    /* Parse message */ 
    if (msg != NULL) { 
     GError *err; 
     gchar *debug_info; 

     switch (GST_MESSAGE_TYPE (msg)) { 
     case GST_MESSAGE_ERROR: 
      gst_message_parse_error (msg, &err, &debug_info); 
      g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); 
      g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); 
      g_clear_error (&err); 
      g_free (debug_info); 
      terminate = TRUE; 
      break; 
     case GST_MESSAGE_EOS: 
      g_print ("End-Of-Stream reached.\n"); 
      terminate = TRUE; 
      break; 
     case GST_MESSAGE_STATE_CHANGED: 
      /* We are only interested in state-changed messages from the pipeline */ 
      if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) { 
      GstState old_state, new_state, pending_state; 
      gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); 
      g_print ("Pipeline state changed from %s to %s:\n", 
       gst_element_state_get_name (old_state), gst_element_state_get_name (new_state)); 
      } 
      break; 
     default: 
      /* We should not reach here */ 
      g_printerr ("Unexpected message received.\n"); 
      break; 
     } 
     gst_message_unref (msg); 
    } 
    } while (!terminate); 

    /* Free resources */ 
    gst_object_unref (bus); 
    gst_element_set_state (data.pipeline, GST_STATE_NULL); 
    gst_object_unref (data.pipeline); 
    return 0; 
} 

/* This function will be called by the pad-added signal */ 
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) { 
    GstPad *sink_pad = NULL; 
    GstPadLinkReturn ret; 
    GstCaps *new_pad_caps = NULL; 
    GstStructure *new_pad_struct = NULL; 
    const gchar *new_pad_type = NULL; 

    g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src)); 

    /* Get information of the new pad's type */ 
    new_pad_caps = gst_pad_get_current_caps (new_pad); 
    new_pad_struct = gst_caps_get_structure (new_pad_caps, 0); 
    new_pad_type = gst_structure_get_name (new_pad_struct); 

    /* Get pad from the correspondent converter */ 
    if (g_str_has_prefix (new_pad_type, "video")) { 
    sink_pad = gst_element_get_static_pad (data->v_decoder, "sink"); 
    } else { 
    g_print (" It has type '%s' -> So exit\n", new_pad_type); 
    return; 
    } 

    /* If our converter is already linked, we have nothing to do here */ 
    if (gst_pad_is_linked (sink_pad)) { 
    g_print (" We are already linked. Ignoring.\n"); 
    gst_object_unref (sink_pad); 
    return; 
    } 

    ret = gst_pad_link (new_pad, sink_pad); 
    if (GST_PAD_LINK_FAILED (ret)) { 
    g_print (" Type is '%s' but link failed.\n", new_pad_type); 
    } else { 
    g_print (" Link succeeded (type '%s').\n", new_pad_type); 
    } 

    /* Unreference the new pad's caps, if we got them */ 
    if (new_pad_caps != NULL) { 
    gst_caps_unref (new_pad_caps); 
    } 

    /* Unreference the sink pad */ 
    if (sink_pad != NULL) { 
    gst_object_unref (sink_pad); 
    } 
} 

Der Ausgang, wenn ich das Programm ausführen:

Pipeline state changed from NULL to READY: 
Pipeline state changed from READY to PAUSED: 
Error received from element video_demux: Could not demultiplex stream. 
Debugging information: multipartdemux.c(475): multipart_parse_header(): /GstPipeline:new-pipeline/GstMultipartDemux:video_demux: 
Boundary not found in the multipart header 

Jede Idee, was bin ich dabei?

Vielen Dank im Voraus.

+0

Ich habe auch versucht, ohne multipartdemux, aber mit ähnlichem Ergebnis. Es funktioniert mit gst-launch, aber nicht im Code. – Omsitelta

+0

Ich weiß nicht, warum es nicht funktioniert, aber Sie könnten es einfacher finden, gst_parse_launch() zu verwenden, um die Pipeline zu erstellen, anstatt es manuell zu tun. Und vielleicht wird es funktionieren. –

+0

Danke für Ihren Kommentar, aber aufgrund der Natur der gesamten Anwendung (das Snipet, das ich hier gepostet habe, ist es nur der Teil von gstreamer), wo ich einen Video/Audio-Server habe und mit eingebetteten Systemen arbeite (und ich muss optimieren der Prozess) gst-parse-launch es ist keine Option. Vielen Dank für Ihren Vorschlag. – Omsitelta

Antwort

0

Ich habe manchmal gefunden, dass das Hinzufügen von Warteschlangen hilft, vielleicht eine vor dem jpegdec? Versuchen Sie vielleicht auch ein JPEGPARSE vor dem JPEGDEC.

Verwandte Themen