import sys, os
import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst, GObject, GLib
GObject.threads_init()
Gst.init(None)
def on_pad_added(src1, pad, dst):
    apad = dst.get_static_pad("sink")
    pad.link(apad)

while True:
    pipe = Gst.Pipeline.new("test")
    video_source = Gst.ElementFactory.make("rtspsrc", "video_source")
    audio_source = Gst.ElementFactory.make("rtspsrc", "audio_source")
    video_depay = Gst.ElementFactory.make("rtph264depay", "video_depay")
    audio_depay = Gst.ElementFactory.make("rtppcmudepay", "audio_depay")
    video_parse = Gst.ElementFactory.make("h264parse", "parse")
    video_decode = Gst.ElementFactory.make("vaapih264dec", "video_decode")
    audio_decode = Gst.ElementFactory.make("mulawdec", "audio_decode")
    video_convert = Gst.ElementFactory.make("videoconvert", "video_convert")
    audio_convert = Gst.ElementFactory.make("audioconvert", "audio_convert")
    audio_resample = Gst.ElementFactory.make("audioresample", "audio_resample")
    video_queue = Gst.ElementFactory.make("queue", "video_queue")
    audio_queue = Gst.ElementFactory.make("queue", "audio_queue")
    video_sink = Gst.ElementFactory.make("autovideosink", "video_sink")
    audio_sink = Gst.ElementFactory.make("autoaudiosink", "audio_sink")


    elements_video = [video_source, video_queue, video_depay, video_parse, video_decode, video_convert, video_sink]
    
    elements_audio = [audio_source, audio_queue, audio_depay, audio_decode, audio_convert, audio_resample, audio_sink]
    
    
    video_source.set_property("location", "rtsp://192.168.50.246")
    video_source.set_property("user-id", "admin")
    video_source.set_property("user-pw", "admin12345")
    audio_source.set_property("location", "rtsp://192.168.50.246")
    audio_source.set_property("user-id", "admin")
    audio_source.set_property("user-pw", "admin12345")
    
    video_source.connect("pad-added", on_pad_added, video_queue)
    audio_source.connect("pad-added", on_pad_added, audio_queue)
    
    for element in elements_video:
        pipe.add(element)
    for element in elements_audio:
        pipe.add(element)
    video_source.link(video_queue)
    video_queue.link(video_depay)
    video_depay.link(video_parse)
    video_parse.link(video_decode)

    video_decode.link(video_convert)
    video_convert.link(video_sink)
    audio_source.link(audio_queue)
    audio_queue.link(audio_depay)
    audio_depay.link(audio_decode)
    audio_decode.link(audio_convert)
    audio_convert.link(audio_resample)
    audio_resample.link(audio_sink)

    pipe.set_state(Gst.State.PLAYING)
    mainloop = GObject.MainLoop()
    Gst.debug_set_active(True)
    Gst.debug_set_default_threshold(4)
    mainloop.run()


文章標籤

stephenwei 發表在 痞客邦 留言(0) 人氣()

#include <gst/gst.h>

void on_pad_added (GstElement *element, GstPad *pad, gpointer data){
    gst_element_link_pads(element, gst_pad_get_name(pad), GST_ELEMENT(data), "sink");
}

int main(int argc, char *argv[]) {
  GstElement *pipeline, *audio_source, *tee, *audio_queue, *audio_convert, *audio_resample, *audio_sink;

  GstElement *video_queue, *visual, *video_convert, *video_sink, *video_source, *video_depay, *video_parse, *video_decode;
  GstElement *audio_depay, *audio_decode;
  GstBus *bus;
  GstMessage *msg;

  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  /* Create the elements */
  audio_source = gst_element_factory_make ("rtspsrc", "audio_source");
  audio_depay = gst_element_factory_make("rtppcmudepay", "audio_depay");
  audio_decode = gst_element_factory_make("mulawdec", "audio_decode");
  audio_queue = gst_element_factory_make ("queue", "audio_queue");
  audio_convert = gst_element_factory_make ("audioconvert", "audio_convert");
  audio_resample = gst_element_factory_make ("audioresample", "audio_resample");
  audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink");
  video_queue = gst_element_factory_make ("queue", "video_queue");

  video_convert = gst_element_factory_make ("videoconvert", "video_convert");
  video_sink = gst_element_factory_make ("autovideosink", "video_sink");
  video_source = gst_element_factory_make("rtspsrc", "video_source");
  video_depay = gst_element_factory_make("rtph264depay", "video_depay");
  video_parse = gst_element_factory_make("h264parse", "h264parse");
  video_decode = gst_element_factory_make("vaapih264dec", "hw_h264_decode");


  /* Create the empty pipeline */
  pipeline = gst_pipeline_new ("test-pipeline");

  if (!pipeline || !audio_source || !tee || !audio_queue || !audio_convert || !audio_resample || !audio_sink ||
      !video_queue || !visual || !video_convert || !video_sink) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }

  /* Configure elements */


  /* Link all elements that can be automatically linked because they have "Always" pads */

  gst_bin_add_many (GST_BIN (pipeline), audio_source, audio_queue, audio_convert, audio_resample, audio_sink,
      audio_depay, audio_decode, NULL);

  gst_bin_add_many (GST_BIN (pipeline), video_queue, video_convert, video_sink, video_source, video_depay, video_parse, video_decode, NULL);
  if (gst_element_link_many (video_queue, video_depay, video_parse, video_decode, video_convert, video_sink, NULL) != TRUE) {
    g_printerr ("Elements could not be linked.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  if (gst_element_link_many (audio_queue, audio_depay, audio_decode, audio_convert, audio_resample, audio_sink, NULL) != TRUE) {
    g_printerr ("Elements could not be linked.\n");
    gst_object_unref (pipeline);
    return -1;
  }

    g_signal_connect(video_source, "pad-added", G_CALLBACK(on_pad_added), video_queue);
    g_signal_connect(audio_source, "pad-added", G_CALLBACK(on_pad_added), audio_queue);
    g_object_set(G_OBJECT(video_source), "location", "rtsp://192.168.50.246", "user-id","admin", "user-pw","admin12345",NULL);
    g_object_set(G_OBJECT(audio_source), "location", "rtsp://192.168.50.246", "user-id","admin", "user-pw","admin12345",NULL);

    /* Start playing the pipeline */
    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    /* Wait until error or EOS */
  bus = gst_element_get_bus (pipeline);
  msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);


  /* Free resources */
  if (msg != NULL)
    gst_message_unref (msg);
  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_object_unref (pipeline);
  return 0;
}

文章標籤

stephenwei 發表在 痞客邦 留言(0) 人氣()

下載 caffe 後安裝

mkdir build

文章標籤

stephenwei 發表在 痞客邦 留言(0) 人氣()

原型

rtspsrc lolcaiton=rtsp://192.168.50.246 user-id=admin user-pw=admin protocols=4 ! rtph264depay ! h264parse ! avdec_h264 ! videoscale ! videorate ! videoconvert ! autovideosink

文章標籤

stephenwei 發表在 痞客邦 留言(0) 人氣()

如官方網站提供的文件

Dynamic Pad

文章標籤

stephenwei 發表在 痞客邦 留言(0) 人氣()

解說請去這裡

Bus通常是用來監聽的, 當在 Bus 的 callback 收到 message 訊號時,  我們可以根據 message 的 type 來做相對應的反應

文章標籤

stephenwei 發表在 痞客邦 留言(0) 人氣()

以下是我自己的看法, 你可以參照官網

在Gstreamer 的架構當中, 一個管線(pipeline)通常會有一個以上的Bin以及2個以上的元件(Element),

文章標籤

stephenwei 發表在 痞客邦 留言(0) 人氣()

想寫一份Gstreamer的文章, 因為實在是不懂為什麼他沒什麼資源,  明明很多家企業都在用它

所以又開始在這個blog寫寫東西

文章標籤

stephenwei 發表在 痞客邦 留言(0) 人氣()

結束了~只花了3小時我就不知道要做什麼

今年沒有什麼變化 ~ 或者說NVR已經成熟?

stephenwei 發表在 痞客邦 留言(0) 人氣()

訂了protocol已經夠糟糕了, 然後stream 卻用RTSP.

我不得不說,每家IPCAM的RTSP都很精采

stephenwei 發表在 痞客邦 留言(0) 人氣()

1 2