import sys, os
import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst, GObject, GLib
GObject.threads_init()
Gst.init(None)
def on_pad_added(src1, pad, dst):
apad = dst.get_static_pad("sink")
pad.link(apad)
while True:
pipe = Gst.Pipeline.new("test")
video_source = Gst.ElementFactory.make("rtspsrc", "video_source")
audio_source = Gst.ElementFactory.make("rtspsrc", "audio_source")
video_depay = Gst.ElementFactory.make("rtph264depay", "video_depay")
audio_depay = Gst.ElementFactory.make("rtppcmudepay", "audio_depay")
video_parse = Gst.ElementFactory.make("h264parse", "parse")
video_decode = Gst.ElementFactory.make("vaapih264dec", "video_decode")
audio_decode = Gst.ElementFactory.make("mulawdec", "audio_decode")
video_convert = Gst.ElementFactory.make("videoconvert", "video_convert")
audio_convert = Gst.ElementFactory.make("audioconvert", "audio_convert")
audio_resample = Gst.ElementFactory.make("audioresample", "audio_resample")
video_queue = Gst.ElementFactory.make("queue", "video_queue")
audio_queue = Gst.ElementFactory.make("queue", "audio_queue")
video_sink = Gst.ElementFactory.make("autovideosink", "video_sink")
audio_sink = Gst.ElementFactory.make("autoaudiosink", "audio_sink")
elements_video = [video_source, video_queue, video_depay, video_parse, video_decode, video_convert, video_sink]
elements_audio = [audio_source, audio_queue, audio_depay, audio_decode, audio_convert, audio_resample, audio_sink]
video_source.set_property("location", "rtsp://192.168.50.246")
video_source.set_property("user-id", "admin")
video_source.set_property("user-pw", "admin12345")
audio_source.set_property("location", "rtsp://192.168.50.246")
audio_source.set_property("user-id", "admin")
audio_source.set_property("user-pw", "admin12345")
video_source.connect("pad-added", on_pad_added, video_queue)
audio_source.connect("pad-added", on_pad_added, audio_queue)
for element in elements_video:
pipe.add(element)
for element in elements_audio:
pipe.add(element)
video_source.link(video_queue)
video_queue.link(video_depay)
video_depay.link(video_parse)
video_parse.link(video_decode)
video_decode.link(video_convert)
video_convert.link(video_sink)
audio_source.link(audio_queue)
audio_queue.link(audio_depay)
audio_depay.link(audio_decode)
audio_decode.link(audio_convert)
audio_convert.link(audio_resample)
audio_resample.link(audio_sink)
pipe.set_state(Gst.State.PLAYING)
mainloop = GObject.MainLoop()
Gst.debug_set_active(True)
Gst.debug_set_default_threshold(4)
mainloop.run()
- Dec 09 Mon 2019 13:25
receive rtsp streaming(program python)
- Dec 09 Mon 2019 13:19
receive rtsp streaming(program c)
#include <gst/gst.h>
void on_pad_added (GstElement *element, GstPad *pad, gpointer data){
gst_element_link_pads(element, gst_pad_get_name(pad), GST_ELEMENT(data), "sink");
}
int main(int argc, char *argv[]) {
GstElement *pipeline, *audio_source, *tee, *audio_queue, *audio_convert, *audio_resample, *audio_sink;
GstElement *video_queue, *visual, *video_convert, *video_sink, *video_source, *video_depay, *video_parse, *video_decode;
GstElement *audio_depay, *audio_decode;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
audio_source = gst_element_factory_make ("rtspsrc", "audio_source");
audio_depay = gst_element_factory_make("rtppcmudepay", "audio_depay");
audio_decode = gst_element_factory_make("mulawdec", "audio_decode");
audio_queue = gst_element_factory_make ("queue", "audio_queue");
audio_convert = gst_element_factory_make ("audioconvert", "audio_convert");
audio_resample = gst_element_factory_make ("audioresample", "audio_resample");
audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink");
video_queue = gst_element_factory_make ("queue", "video_queue");
video_convert = gst_element_factory_make ("videoconvert", "video_convert");
video_sink = gst_element_factory_make ("autovideosink", "video_sink");
video_source = gst_element_factory_make("rtspsrc", "video_source");
video_depay = gst_element_factory_make("rtph264depay", "video_depay");
video_parse = gst_element_factory_make("h264parse", "h264parse");
video_decode = gst_element_factory_make("vaapih264dec", "hw_h264_decode");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !audio_source || !tee || !audio_queue || !audio_convert || !audio_resample || !audio_sink ||
!video_queue || !visual || !video_convert || !video_sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Configure elements */
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (pipeline), audio_source, audio_queue, audio_convert, audio_resample, audio_sink,
audio_depay, audio_decode, NULL);
gst_bin_add_many (GST_BIN (pipeline), video_queue, video_convert, video_sink, video_source, video_depay, video_parse, video_decode, NULL);
if (gst_element_link_many (video_queue, video_depay, video_parse, video_decode, video_convert, video_sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
if (gst_element_link_many (audio_queue, audio_depay, audio_decode, audio_convert, audio_resample, audio_sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
g_signal_connect(video_source, "pad-added", G_CALLBACK(on_pad_added), video_queue);
g_signal_connect(audio_source, "pad-added", G_CALLBACK(on_pad_added), audio_queue);
g_object_set(G_OBJECT(video_source), "location", "rtsp://192.168.50.246", "user-id","admin", "user-pw","admin12345",NULL);
g_object_set(G_OBJECT(audio_source), "location", "rtsp://192.168.50.246", "user-id","admin", "user-pw","admin12345",NULL);
/* Start playing the pipeline */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
- May 05 Sun 2019 14:13
ubuntu 1604下在python3環境中啟動caffe
- Apr 11 Thu 2019 14:18
GStreamer 應用 - 接收rtsp 影像
原型
rtspsrc lolcaiton=rtsp://192.168.50.246 user-id=admin user-pw=admin protocols=4 ! rtph264depay ! h264parse ! avdec_h264 ! videoscale ! videorate ! videoconvert ! autovideosink
- Apr 03 Wed 2019 09:22
GStreamer 教學(3) 什麼條件下你會用到Dynamic pad, Request pad 以及Ghost pad?
- Apr 02 Tue 2019 18:35
GStreamer 教學(2) Bus的作用是什麼?
- Apr 02 Tue 2019 18:00
GStreamer 教學(1) Gstreamer會用到的名詞
- Apr 02 Tue 2019 17:40
GStreamer 教學(0) 學Gstreamer 需要知道的知識
- Mar 21 Fri 2014 21:27
2014年 secutech 的 NVR
- Jan 10 Fri 2014 23:52
ONVIF 到底好不好???