gstreamer 纠错
GStreamer基础教程3:动态管道 里面的code不能正常运行。 参考:GStreamer Windows 基辅3:全手动,声像作例,汝作大像也。导演:卡卡,你说老毛子东西啊?应该是 G 公创 Pad 极,1 曰 Video,二曰 Audio 需要改变成
#include /* Structure to contain all our information, so we can pass it to callbacks */typedef struct _CustomData{ GstElement* pipeline; GstElement* source; // audio GstElement* audioConvert; GstElement* audioResample; GstElement* audioSink; // video GstElement* videoConvert; GstElement* videoSink;} CustomData; /* Handler for the pad-added signal */static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data); int main(int argc, char *argv[]) { CustomData data; GstBus *bus; GstMessage *msg; GstStateChangeReturn ret; gboolean terminate = FALSE; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Create the elements */ data.source = gst_element_factory_make("uridecodebin", "source"); // audio data.audioConvert = gst_element_factory_make("audioconvert", "convert"); data.audioResample = gst_element_factory_make("audioresample", "resample"); data.audioSink = gst_element_factory_make("autoaudiosink", "sink"); // video data.videoConvert = gst_element_factory_make("videoconvert", "videoconvert"); data.videoSink = gst_element_factory_make("autovideosink", "video-sink"); data.pipeline = gst_pipeline_new("test-pipeline"); if (!data.pipeline || !data.source || !data.audioConvert || !data.audioResample || !data.audioSink || !data.videoConvert || !data.videoSink) { g_printerr("Not all elements could be created.\n"); return -1; } /* Build the pipeline. Note that we are NOT linking the source at this * point. We will do it later. */ gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.audioConvert, data.audioResample, data.audioSink, NULL); if (!gst_element_link_many(data.audioConvert, data.audioResample, data.audioSink, NULL)) { g_printerr("Audio elements could not be linked.\n"); gst_object_unref(data.pipeline); return -1; } gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.videoConvert, data.videoSink, NULL); if (!gst_element_link_many(data.videoConvert, data.videoSink, NULL)) { g_printerr("Video elements could not be linked.\n"); gst_object_unref(data.pipeline); return -1; } /* Set the URI to play */ g_object_set (data.source, "uri", "NULL); /* Connect to the pad-added signal */ g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data); /* Start playing */ ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (data.pipeline); return -1; } /* Listen to the bus */ bus = gst_element_get_bus (data.pipeline); do { msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Parse message */ if (msg != NULL) { GError *err; gchar *debug_info; switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_ERROR: gst_message_parse_error (msg, &err, &debug_info); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error (&err); g_free (debug_info); terminate = TRUE; break; case GST_MESSAGE_EOS: g_print ("End-Of-Stream reached.\n"); terminate = TRUE; break; case GST_MESSAGE_STATE_CHANGED: /* We are only interested in state-changed messages from the pipeline */ if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) { GstState old_state, new_state, pending_state; gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); g_print ("Pipeline state changed from %s to %s:\n", gst_element_state_get_name (old_state), gst_element_state_get_name (new_state)); } break; default: /* We should not reach here */ g_printerr ("Unexpected message received.\n"); break; } gst_message_unref (msg); } } while (!terminate); /* Free resources */ gst_object_unref (bus); gst_element_set_state (data.pipeline, GST_STATE_NULL); gst_object_unref (data.pipeline); return 0;} static void pad_added_handler(GstElement* src, GstPad* new_pad, CustomData* data) { GstPad* audioPad = NULL; GstPad* videoPad = NULL; gboolean audioType = FALSE; GstPadLinkReturn ret; GstCaps* new_pad_caps = NULL; GstStructure* new_pad_struct = NULL; const gchar* new_pad_type = NULL; g_print("\n ==========> Begin link\n"); g_print("\nReceived new pad '%s' from '%s':\n", GST_PAD_NAME(new_pad), GST_ELEMENT_NAME(src)); /* Check the new pad's type */ new_pad_caps = gst_pad_get_current_caps(new_pad); new_pad_struct = gst_caps_get_structure(new_pad_caps, 0); new_pad_type = gst_structure_get_name(new_pad_struct); g_print("\n\n--------------> Testing audio/video type --------------> \n\n", new_pad_type); if (g_str_has_prefix(new_pad_type, "audio/x-raw")) { g_print("Found Raw Audio Type: '%s'\n", new_pad_type); audioType = TRUE; goto link; } else if (g_str_has_prefix(new_pad_type, "video/x-raw")) { g_print("Found Raw Video Type: '%s'\n", new_pad_type); audioType = FALSE; goto link; } else { g_print("It has type '%s' which is not raw audio/video type. Ignoring.\n", new_pad_type); goto exit; }link: /* Attempt the link */ if (audioType) { audioPad = gst_element_get_static_pad(data->audioConvert, "sink"); if (gst_pad_is_linked(audioPad)) { g_print("We are already linked. Ignoring.\n"); goto exit; } // link audio ret = gst_pad_link(new_pad, audioPad); } else { videoPad = gst_element_get_static_pad(data->videoConvert, "sink"); if (gst_pad_is_linked(videoPad)) { g_print("We are already linked. Ignoring.\n"); goto exit; } // link video ret = gst_pad_link(new_pad, videoPad); } if (GST_PAD_LINK_FAILED(ret)) { g_print("Type is '%s' but link failed.\n", new_pad_type); } else { g_print("Link succeeded (type '%s').\n", new_pad_type); } if (audioType) { gst_object_unref(audioPad); } else { gst_object_unref(videoPad); }exit: /* Unreference the new pad's caps, if we got them */ if (new_pad_caps != NULL) gst_caps_unref (new_pad_caps); }
版权声明:本文内容由网络用户投稿,版权归原作者所有,本站不拥有其著作权,亦不承担相应法律责任。如果您发现本站中有涉嫌抄袭或描述失实的内容,请联系我们jiasou666@gmail.com 处理,核实后本网站将在24小时内删除侵权内容。
暂时没有评论,来抢沙发吧~