gstreamer将RTSP转jpg图片保存

简介: gstreamer将RTSP转jpg图片保存

命令行:

gst-launch-1.0 rtspsrc location="rtsp://admin:admin@127.0.0.1:554/h264/ch1/sub/av_stream" ! rtph264depay ! h264parse ! openh264dec ! videorate ! jpegenc ! multifilesink location="img_%06.jpg"

代码方式一:

void Test()
{
  GstElement *pipeline;
  GstBus *bus;
  GstMessage *msg;
  gst_init(NULL, NULL);
  pipeline =
    gst_parse_launch
    ("gst-launch-1.0 rtspsrc location = rtsp://admin:admin@127.0.0.1:554/h264/ch1/sub/av_stream !rtph264depay !h264parse !openh264dec !videorate !jpegenc !multifilesink location = img_%06.jpg", NULL);
  /* Start playing */
  GstStateChangeReturn res = gst_element_set_state(pipeline, GST_STATE_PLAYING);
  if (res == GST_STATE_CHANGE_FAILURE)
  {
    g_printerr("Unable to set the pipeline to the playing state.\n");
    gst_object_unref(pipeline);
    Sleep(1000);
    return;
  }
  bus = gst_element_get_bus(pipeline);
  msg =
    gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,
      (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
  if (msg != NULL)
    gst_message_unref(msg);
  gst_object_unref(bus);
  gst_element_set_state(pipeline, GST_STATE_NULL);
  gst_object_unref(pipeline);
  return;
}

代码方式二:

#include <gst/gst.h>
#include <Windows.h>
#include <iostream>
#include <thread>
GstMessage *msg;
GMainLoop  *_loop;
GstBus*     _bus;
GstElement* _pipeline;
GstElement* _source;
GstElement* _rtpbin;
GstElement* _depay;
GstElement* _parse;
GstElement* _dec;
GstElement* _rate;
GstElement* _enc;
GstElement* _capsfilter;
GstElement* _sink;
struct Exception : std::exception {};
template<typename T>
T* chk(T* pointer) {
  if (pointer == nullptr) {
    throw Exception();
  }
  return pointer;
}
void _onPadAdded(GstElement *src, GstPad *src_pad, gpointer user_data)
{
  GstPad* sink_pad = (GstPad*)user_data;
  gst_pad_link(src_pad, sink_pad);
}
void Init()
{
  gst_init(NULL, NULL);
  _pipeline = chk(gst_pipeline_new("pipline"));
  _source = chk(gst_element_factory_make("rtspsrc", "src"));
  _depay = chk(gst_element_factory_make("rtph264depay", "depay"));
  _parse = chk(gst_element_factory_make("h264parse", "parse"));
  _dec = chk(gst_element_factory_make("openh264dec", "dec"));
  _rate = chk(gst_element_factory_make("videorate", "rate"));
  _enc = chk(gst_element_factory_make("jpegenc", "enc"));
  _capsfilter = chk(gst_element_factory_make("capsfilter", "filter"));
  _sink = chk(gst_element_factory_make("multifilesink", "sink"));
  GstPad* depay_sink = gst_element_get_static_pad(_depay, "sink");
  GstCaps* depay_sink_caps = gst_caps_new_simple("application/x-rtp",
    //"format", G_TYPE_STRING, "rgb",
    //"width", G_TYPE_INT, 1920,
    //"height", G_TYPE_INT, 1080,
    "framerate", GST_TYPE_FRACTION, 1, 1,
     NULL);
  gst_pad_use_fixed_caps(depay_sink);
  gst_pad_set_caps(depay_sink, depay_sink_caps);
  gst_object_unref(depay_sink);
  g_object_set(_source, "latency", 0, NULL);
  g_object_set(_capsfilter, "caps-change-mode", 1, NULL);
  g_object_set(_sink, "location", "E:/pic/img_%d.jpg", NULL);
  gst_bin_add_many(GST_BIN(_pipeline), _source, _depay, _parse, _dec, _rate, _enc, _capsfilter, _sink, NULL);
  g_signal_connect(_source, "pad-added", G_CALLBACK(&_onPadAdded), gst_element_get_static_pad(_depay, "sink"));
  gboolean bsuccess = gst_element_link_many(_depay, _parse, _dec, _rate, _enc, _capsfilter, _sink, NULL);
  if (!bsuccess) {
    g_print("Failed to link one or more elements!\n");
    gst_element_unlink_many(_depay, _parse, _dec, _rate, _enc, _capsfilter, _sink, NULL);
    //Sleep(1000);
    //continue;
  }
  g_object_set(_source, "location", "rtsp://admin:admin@127.0.0.1:554/h264/ch1/sub/av_stream", NULL);
  while (1)
  {
    _bus = gst_element_get_bus(_pipeline);
    msg =
      gst_bus_timed_pop_filtered(_bus, GST_CLOCK_TIME_NONE,
      (GstMessageType)(GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
    if (msg != NULL)
    {
      GError *err;
      gchar *debug_info;
      switch (GST_MESSAGE_TYPE(msg))
      {
      case GST_MESSAGE_ERROR:
      {
        gst_message_parse_error(msg, &err, &debug_info);
        g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message);
        g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
        g_clear_error(&err);
        g_free(debug_info);
        break;
      }
      break;
      case GST_MESSAGE_EOS:
      {
        g_print("End-Of-Streamreached.\n");
        break;
      }
      break;
      case GST_MESSAGE_STATE_CHANGED:
      {
        /* We are only interested in state-changed messages from the pipeline */
        if (GST_MESSAGE_SRC(msg) == GST_OBJECT(_pipeline))
        {
          GstState old_state, new_state, pending_state;
          gst_message_parse_state_changed(msg,
            &old_state,
            &new_state,
            &pending_state);
          g_print("Pipeline state changed from %s to %s:\n",
            gst_element_state_get_name(old_state),
            gst_element_state_get_name(new_state));
          char buf[216] = { 0 };
          sprintf(buf, "Pipeline state changed from %s to %s:\n",
            gst_element_state_get_name(old_state),
            gst_element_state_get_name(new_state));
          if (pending_state == GST_STATE_NULL)
          {
            break;
          }
        }
      }
      break;
      default:
      {
        /* We shouldnot reach here */
        g_printerr("Unexpected message received.\n");
        break;
      }
      }
    }
  }
  if (msg != NULL)
    gst_message_unref(msg);
  gst_object_unref(_bus);
  gst_element_set_state(_pipeline, GST_STATE_NULL);
  gst_object_unref(_pipeline);
  return;
}
void PausePictureCapture()
{
  GstStateChangeReturn res = gst_element_set_state(_pipeline, GST_STATE_PAUSED);
  if (res == GST_STATE_CHANGE_FAILURE)
  {
    g_printerr("Unable to set the pipeline to the paused state.\n");
    gst_object_unref(_pipeline);
  }
}
void StartPictureCapture()
{
  GstStateChangeReturn res = gst_element_set_state(_pipeline, GST_STATE_PLAYING);
  if (res == GST_STATE_CHANGE_FAILURE)
  {
    g_printerr("Unable to set the pipeline to the playing state.\n");
    gst_object_unref(_pipeline);
  }
}
int main()
{
  std::thread([]() {
    Init();
  }).detach();
  Sleep(10000);
  for (int i = 0; i < 10; i++)
  {
    StartPictureCapture();
    Sleep(10000);
    PausePictureCapture();
    Sleep(10000);
  }
  getchar();
  return 0;
}
相关文章
|
8月前
ffmpeg 命令提取音视频数据-ffmpeg导出h265裸流-ffmpeg导出h264裸流
ffmpeg 命令提取音视频数据-ffmpeg导出h265裸流-ffmpeg导出h264裸流
207 0
|
3月前
|
编解码 计算机视觉 Python
Opencv学习笔记(九):通过CV2将摄像头视频流保存为视频文件
使用OpenCV库通过CV2将摄像头视频流保存为视频文件,包括定义视频编码格式、设置保存路径、通过write写入视频文件,并提供了相应的Python代码示例。
189 0
|
存储 Cloud Native Linux
音视频 ffmpeg命令图片与视频互转
音视频 ffmpeg命令图片与视频互转
|
Linux
Linux下采集摄像头的图像再保存为JPG图片存放到本地(YUYV转JPG)
Linux下采集摄像头的图像再保存为JPG图片存放到本地(YUYV转JPG)
2081 1
Linux下采集摄像头的图像再保存为JPG图片存放到本地(YUYV转JPG)
|
图形学
Qt&Vtk-003-读取jpg、png、dicom等格式图片
本文其实才能算是真正的Qt与Vtk结合,具体实现JPG、PNG、TIFF、DICOM、BMP及一个3D Cube显示。
703 1
Qt&Vtk-003-读取jpg、png、dicom等格式图片
|
Web App开发 存储 iOS开发
一日一技:把webp图片保存为png
一日一技:把webp图片保存为png
269 0
|
编解码 计算机视觉
使用ffmpeg将图片拼接为视频
本文介绍下如何使用ffmpeg将大量图片拼接成一个视频,并介绍其中部分参数的含义。
1225 0
使用ffmpeg将图片拼接为视频
使用ffmpeg合并两个视频文件
使用ffmpeg合并两个视频文件
268 0
|
编译器 C语言 Windows
FFMPEG视频开发:Window系统下载部署FFMPEG库并获取摄像头数据保存为MP4文件存放到本地(使用FFMPEG本身接口获取摄像头数据)
FFMPEG视频开发:Window系统下载部署FFMPEG库并获取摄像头数据保存为MP4文件存放到本地(使用FFMPEG本身接口获取摄像头数据)
480 0
FFMPEG视频开发:Window系统下载部署FFMPEG库并获取摄像头数据保存为MP4文件存放到本地(使用FFMPEG本身接口获取摄像头数据)