正文
一、抓图
这个功能没有在VC++服务器端找到对应的代码,但是GOOGLE到了一段CSDN求助的代码:
if (ret == 0 )
{
CString str;
str.Format( " ch%02d_%s.jpg " ,iLastSelect,csStartTime);
FILE * pFile = fopen(str.GetBuffer( 0 ), " wb " ); // Buffer应该是个缓冲区
if (pFile)
{
fwrite(bb,cc, 1 ,pFile); // 存储图像
fclose(pFile);
另外一段代码:http://topic.csdn.net/t/20060721/09/4894821.html
C#:
int size = 704 * 576 * 2 ;
HikVisionSDK.GetJpegImage(ChannelHandle, imageBuf, out size, 100 );
using (MemoryStream ms = new MemoryStream(imageBuf))
{
Image image = Image.FromStream(ms, true );
image.Save( " C:\\1.jpg " );
}
注意GetJpegImage的参数说明!!并且请注意,由于这个示例,发现前面的(GetJpegImage/GetOriginalImage)API错误了,请你及时更新!!
public static extern int GetOriginalImage(IntPtr hChannelHandle, byte[] ImageBuf, out int Size);
public static extern int GetJpegImage(IntPtr hChannelHandle, byte[] ImageBuf, out int Size, uint nQuality);
保存为bmp的方法请自行尝试,应该是差不多的: )
二、录像
关于录像的文件总共有三个部分,分别是文件头、数据流和文件尾,这里先给出代码,然后再进行说明。
VC++:
CHKVisionDlg::OnStart() StreamDirectReadCallback CHKVisionDlg::OnStop()
m_bDspPreset[i] = TRUE;
if (m_bDspPreset[i]){
char fileName[ 256 ];
sprintf(fileName, " d:\\stream%d_%d.264 " , i, gFileNum ++/ GetTotalDSPs());
gFileHandle[i] = _open(fileName, _O_CREAT | _O_BINARY | _O_WRONLY | _O_TRUNC, _S_IREAD | _S_IWRITE);
if (gFileHandle[i] == - 1 ){
TRACE( " channel %d file open error\n,i " );
return ;
}
gChannelFrames[i] = 0 ;
gChannelTotalLength[i] = 0 ;
gChannelFramesLost[i] = 0 ;
gChannelOverflow[i] = 0 ;
gCurrentFileLen[i] = 0 ;
_write(gFileHandle[i], FileHeader[i], FileHeaderLen);
// could not be start again untill stopped first
// m_bDspPreset[i] = FALSE;
gCaptureStartedNum ++ ;
// let the threads have chance to run
// Sleep(500);
} else
gFileHandle[i] = - 1 ;
}
{
// CHKVisionDlg * lpDlg = (CHKVisionDlg*)context;
// return lpDlg->ProcCallBack(channelNum, DataBuf, Length, frameType);
int i,status = 0 ;
CString ctip;
int nframetype = 0 ;
// if cap images we need clean the queue here
// if (!bCapture)
// return 0;
// no errors
if (frameType > 0 ) {
if (frameType == PktSysHeader){
// store the file header
memcpy(FileHeader[channelNum], DataBuf, Length);
FileHeaderLen = Length;
TRACE( " channel %d get the file header !\n " ,channelNum);
}
if (frameType == PktIFrames || frameType == PktSubIFrames){
status = 1 ;
}
else {
status = 0 ;
}
if (frameType == PktMotionDetection){
// m_VideoWin.DrawVect(channelNum, (char *)DataBuf, Length);
return 0 ;
}
if (frameType == PktOrigImage){
return 0 ;
}
}
if (Length == 0 ){
TRACE( " no data ?\n " );
return 0 ;
}
// if(frameType == PktIFrames){
// int iii=1;
// }
ULONG currentTime = timeGetTime();
gChannelTotalLength[channelNum] += Length;
gCurrentFileLen[channelNum] += Length;
if (currentTime > StartTime + 1000 ){
CString str,str2;
str.Format( " %d " , (gChannelTotalLength[dcurrentwin] * 8 / (currentTime - StartTime)));
for (i = 0 ;i < g_nChannelTotal;i ++ )
gChannelTotalLength[i] = 0 ;
StartTime = currentTime;
CHKVisionDlg * pMain = (CHKVisionDlg * )AfxGetMainWnd();
pMain -> GetDlgItem(IDC_BPS) -> SetWindowText((LPCTSTR)str);
}
// if (m_sframe && channelNum ==0)
// {
// if((frameType == PktSFrames && nframetype ==4 )||(frameType == PktSysHeader))
// {
// MP4_ServerWriteData(channelNum,(unsigned char *)DataBuf, Length,frameType,status);
// }
// }
// MP4_ServerWriteData(channelNum,(unsigned char *)DataBuf, Length,frameType,status);
if (frameType == PktAudioFrames)
{
_write(gFileHandleQcif[channelNum],DataBuf,Length);
MP4_ServerWriteDataEx(channelNum,(unsigned char * )DataBuf, Length,frameType,status, 1 );
_write(gFileHandle[channelNum], DataBuf, Length);
MP4_ServerWriteDataEx(channelNum,(unsigned char * )DataBuf, Length,frameType,status, 0 );
} else if (frameType == PktSubIFrames || frameType == PktSubPFrames || frameType == PktSubBBPFrames || frameType == PktSubSysHeader)
{
_write(gFileHandleQcif[channelNum],DataBuf,Length);
MP4_ServerWriteDataEx(channelNum,(unsigned char * )DataBuf, Length,frameType,status, 1 );
} else
{
// _write(gFileHandle[channelNum], DataBuf, Length);
MP4_ServerWriteDataEx(channelNum,(unsigned char * )DataBuf, Length,frameType,status, 0 );
}
return 0 ;
}
if (m_bDspPreset[i]){
ASSERT(gFileHandle[i] != - 1 );
// StopVideoCapture(ChannelHandle[i]);
// lseek(gFileHandle[i], 0, SEEK_SET);
// FRAMES_STATISTICS fs;
// GetFramesStatistics(ChannelHandle[i], &fs);
// ULONG frames = fs.AudioFrames + fs.VideoFrames;
// TRACE("channel %i has %x frames written\n", i, frames);
#define END_CODE 0x00000002
ULONG endCode = END_CODE;
_write(gFileHandle[i], & endCode, sizeof (ULONG));
_close(gFileHandle[i]);
/// add v34
if (bEncodeCifAndQcif[i])
_close(gFileHandleQcif[i]);
gCaptureStartedNum -- ;
}
}
代码说明:
1. 从StartCap和StopCap的按钮事件可以看得出主要实现写文件头和文件尾的功能,注意_write函数。
2. 而上一章我们讲到了回调函数StreamDirectReadCallback,主要是将数据写到内存中,从代码能看出回调中是边写内存边写文件的代码,而且输出就是.264文件。由于回调从启动开始(允许被客户端访问),就一直不停的在调用这个回调,根据断点调试可以看得出当frameType == PktSysHeader时表示的就是文件头,并且只执行一次,这样在点击StartCap按钮时就直接将这个保存的文件头的数据写入文件了,用UE打开.264的文件可以发现前几个字符总是以4HKH开头的文件。
3. 注意gFileHandle是一个文件指针数组,文件被打开后回调中就一直往这个文件指针写数据!!
C#:
byte [] FileHeader;
// 文件头长度
int FileHeaderLen;
// 是否开始捕获文件 0 未启用 1 启用
volatile int CaptureState;
/// <summary>
/// 开始录像
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void btnStart_Click( object sender, EventArgs e)
{
// 写入头文件
using (FileStream fs = new FileStream( " C:\\hik.264 " , FileMode.Create))
{
BinaryWriter bw = new BinaryWriter(fs);
bw.Write(FileHeader);
bw.Flush();
bw.Close();
}
CaptureState = 1 ;
}
uint endCode = 0x00000002 ;
/// <summary>
/// 停止录像
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void btnStop_Click( object sender, EventArgs e)
{
CaptureState = 0 ;
using (FileStream fs = new FileStream( " C:\\hik.264 " , FileMode.Append))
{
BinaryWriter bw = new BinaryWriter(fs);
bw.Write(endCode);
bw.Close();
}
}
public int STREAM_DIRECT_READ_CALLBACK1( int channelNum, IntPtr DataBuf, int Length, FrameType_t frameType, IntPtr context)
{
// int status = 0;
// HikServer.MP4_ServerWriteDataEx(channelNum, DataBuf, Length, (int)frameType, status, 0);
// return 0;
int status = 0 ;
if (frameType > 0 )
{
if (frameType == FrameType_t.PktSysHeader)
{
FileHeader = new byte [Length];
Marshal.Copy(DataBuf, FileHeader, 0 , Length);
FileHeaderLen = Length;
}
if (frameType == FrameType_t.PktIFrames || frameType == FrameType_t.PktSubIFrames)
status = 1 ;
else
status = 0 ;
if (frameType == FrameType_t.PktMotionDetection || frameType == FrameType_t.PktOrigImage)
return 0 ;
}
if (Length == 0 )
{
// TRACE("no data ?\n");
return 0 ;
}
if (frameType == FrameType_t.PktAudioFrames)
{
WriterVideoCapture(Length, DataBuf);
// 写文件
// _write(gFileHandleQcif[channelNum],DataBuf,Length);
// HikServer.MP4_ServerWriteDataEx(channelNum, DataBuf, Length, (int)frameType, status, 1);
// _write(gFileHandle[channelNum], DataBuf, Length);
HikServer.MP4_ServerWriteDataEx(channelNum, DataBuf, Length, ( int )frameType, status, 0 );
}
else if (frameType == FrameType_t.PktSubIFrames || frameType == FrameType_t.PktSubPFrames || frameType == FrameType_t.PktSubBBPFrames || frameType == FrameType_t.PktSubSysHeader)
{
// _write(gFileHandleQcif[channelNum],DataBuf,Length);
HikServer.MP4_ServerWriteDataEx(channelNum, DataBuf, Length, ( int )frameType, status, 1 );
}
else
{
WriterVideoCapture(Length, DataBuf);
HikServer.MP4_ServerWriteDataEx(channelNum, DataBuf, Length, ( int )frameType, status, 0 );
}
return 0 ;
}
/// <summary>
/// 将数据流写入视频文件
/// </summary>
/// <param name="length"></param>
/// <param name="dataBuf"></param>
private void WriterVideoCapture( int length, IntPtr dataBuf)
{
if (CaptureState == 1 )
{
using (FileStream fs = new FileStream( " C:\\hik.264 " , FileMode.Append))
{
BinaryWriter bw = new BinaryWriter(fs);
byte [] byteBuf = new byte [length];
Marshal.Copy(dataBuf, byteBuf, 0 , length);
bw.Write(byteBuf);
bw.Flush();
bw.Close();
}
}
}
代码说明:
1. 回调函数STREAM_DIRECT_READ_CALLBACK1是在上篇文章的基础上修改的,也主要是参照的VC++的源代码改写的。
2. CaptureState变量主要用于STREAM_DIRECT_READ_CALLBACK1中控制是否写文件。
3. btnStart_Click与btnStop_Click分别代表界面上的开始录像和停止录像按钮。
4. 注意写文件的方式,开始录像用FileMode.Create,持续写入用FileMode.Append。
补充:
1. 录像的时候务必考虑单录像文件的大小以及磁盘空间不够的问题,最好还能考虑下分时段监控等。
2. 注意保存文件头的变量FileHeader,如果分文件连续保存的话有可能出现第一个文件能播放,后面的都不能播放了,可能是文件头变量的数据类型问题,你可以换byte[] -> IntPtr保存试试看。
3. 自带的示例里面有播放器极其源码,打开播放器,直接将.264文件拖拽到里面就可以播放了;如果报错那么说明你的录像有问题!!
本文转自over140 51CTO博客,原文链接:http://blog.51cto.com/over140/586651,如需转载请自行联系原作者