我的第一个项目:用kinect录视频库

时间:2021-03-10 16:44:58

kinect深度视频去噪

kinectmod32.dll

http://pan.baidu.com/s/1DsGqX

下载后改名kinect.dll

替换掉Redist\OpenNI2\Drivers\kinect.dll

代码:

 /*
* =====================================================================================
*
* Filename: first_auoto_record.cpp
*
* Description:
* kinect自动录像,用于采集视频数据库,录制的视频每小段长为5min,(由子线程的模拟键盘延时决定,不同机器获取的视频长度不一样)
*
*
*
* Version: 1.0
* Created: 2013/10/14 16:37:10
* Revision: none
* Compiler: gcc
*
* Author: @礼杨_HDU (), yuliyang@qq.com
* Organization:
*
* =====================================================================================
*/
#include <stdlib.h>
#include <windows.h>
#include <iostream>
#include <conio.h>
#include <string>
#include "OpenNI.h"
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp" #define RESOLUTION 640,480
#define RECORDRESOLUTION 590,440
#define ADRESOLUTION 45,40,590,440
#define FPS 20
#define GRAYTH 10
#define REPAIRERANGE 5
#define COLORTH 10 using namespace std;
using namespace cv;
using namespace openni;
/*
* === FUNCTION ======================================================================
* Name: 子线程
* Description: 使用子进程给自己发送按键消息
* =====================================================================================
*/
DWORD WINAPI ThreadFunction(LPVOID lpParameter)
{
printf("in thread......");
while (TRUE){
//your code INPUT Input1 = { };
Input1.type = INPUT_KEYBOARD;
Input1.mi.dwFlags = KEYEVENTF_EXTENDEDKEY;
Input1.ki.wVk = 0x20; //
SendInput( , &Input1, sizeof( INPUT ) );
INPUT Input2 = { };
Input2.type = INPUT_KEYBOARD;
Input2.mi.dwFlags = KEYEVENTF_EXTENDEDKEY;
Input2.ki.wVk = 0x0d; // 回车
SendInput( , &Input2, sizeof( INPUT ) );
Sleep();//每5分钟发送一个按键消息,即录下一段视频
}
return true;
} /********************************************
Global variable
********************************************/
//Openni status
Status result = STATUS_OK;
// open device
Device device;
//OpenNI2 image
VideoFrameRef oniDepthImg;
VideoFrameRef oniColorImg;
// create stream
VideoStream oniDepthStream;
VideoStream oniColorStream;
// set video mode
VideoMode modeDepth;
VideoMode modeColor;
//OpenCV image
cv::Mat cvDepthImg;
cv::Mat cvDepthImg2;
cv::Mat cvColorImg;
cv::Mat cvColorImg2;
//OpenCV adjusted image
cv::Mat cvAdjustDepthImg;
cv::Mat cvAdjustColorImg;
//Resolution
Size se=Size(RESOLUTION);
Size recordse=Size(RECORDRESOLUTION);
/*********************************************
function declaration
*********************************************/
void CheckOpenNIError( Status result, string status )
{
if( result != STATUS_OK )
cerr << status << " Error: " << OpenNI::getExtendedError() << endl;
}
void iniOpenNI(void );//初始化OpenNI
void releaseResource(void );//释放资源
void OringinCapture(void );//
void RemoveNoiseCapture(void );//
void OringinRecord(void );//普通录制视频(640,480),录制深度视频(没有去噪)和彩色视频
void RemoveNoiseRecord(void );//录制去噪后的视频,深度视频(未去噪)、深度视频(去噪)、彩色视频
void RemoveNoise(void );//去除深度图像中的噪音
bool pixelRepaire(int ,int ,int );//修复空洞像素点
bool rangeRepaire(int ,int ,int );//范围修复
void RepaireTest(void );
void Test(void ); int main( int argc, char** argv )
{
HANDLE hT=CreateThread(NULL,,ThreadFunction,NULL,,NULL);
iniOpenNI();
//OringinCapture();
//RemoveNoiseCapture();
//OringinRecord(); RemoveNoiseRecord();
releaseResource();
//RepaireTest();
//Test();
return ;
}
//
void iniOpenNI()
{
// initialize OpenNI2
result = OpenNI::initialize();
CheckOpenNIError( result, "initialize context" ); //open video device
result = device.open( openni::ANY_DEVICE );
CheckOpenNIError( result, "initialize context" ); //creat depth stream
result = oniDepthStream.create( device, openni::SENSOR_DEPTH );
CheckOpenNIError( result, "initialize context" );
//set depth mode
modeDepth.setResolution( RESOLUTION );
modeDepth.setFps( FPS );
modeDepth.setPixelFormat( PIXEL_FORMAT_DEPTH_1_MM );
oniDepthStream.setVideoMode(modeDepth);
// start depth stream
result = oniDepthStream.start();
CheckOpenNIError( result, "initialize context" ); // create color stream
result = oniColorStream.create( device, openni::SENSOR_COLOR );
CheckOpenNIError( result, "initialize context" );
// set color video mode
modeColor.setResolution( RESOLUTION );
modeColor.setFps( FPS );
modeColor.setPixelFormat( PIXEL_FORMAT_RGB888 );
oniColorStream.setVideoMode( modeColor);
// start color stream
result = oniColorStream.start();
CheckOpenNIError( result, "initialize context" ); // set depth and color imge registration mode
if( device.isImageRegistrationModeSupported(IMAGE_REGISTRATION_DEPTH_TO_COLOR ) )
{
cout << "support" << endl;
device.setImageRegistrationMode( IMAGE_REGISTRATION_DEPTH_TO_COLOR );
} }
//
void releaseResource()
{
//OpenNI2 destroy
oniDepthStream.destroy();
oniColorStream.destroy();
device.close();
OpenNI::shutdown();
}
//
void OringinCapture()
{
char DepthFilename[];
char ColorFilename[];
int n=;
while(true)
{
if( oniColorStream.readFrame( &oniColorImg ) == STATUS_OK )
{
// convert data into OpenCV type
cv::Mat cvRGBImg( oniColorImg.getHeight(), oniColorImg.getWidth(), CV_8UC3, (void*)oniColorImg.getData() );
cvtColor(cvRGBImg,cvColorImg,CV_RGB2GRAY);
//cv::cvtColor( cvRGBImg, cvColorImg, CV_RGB2BGR );
//colorVideoWriter.write(cvColorImg);
}
if( oniDepthStream.readFrame( &oniDepthImg ) == STATUS_OK )
{
cv::Mat cvRawImg16U( oniDepthImg.getHeight(), oniDepthImg.getWidth(), CV_16UC1, (void*)oniDepthImg.getData() );
cvRawImg16U.convertTo( cvDepthImg, CV_8UC1, 255.0/(oniDepthStream.getMaxPixelValue()));
//【5】
// convert depth image GRAY to BGR
//cv::cvtColor(cvDepthImg,cvDepthImg,CV_GRAY2BGR);
//depthVideoWriter.write(cvDepthImg);
}
cvAdjustDepthImg=Mat(cvDepthImg,Rect(ADRESOLUTION));
cvAdjustColorImg=Mat(cvColorImg,Rect(ADRESOLUTION));
if(_kbhit())//_kbhit()
{
n++;
sprintf(DepthFilename,"depthimage%03d.jpg",n);
sprintf(ColorFilename,"colorimage%03d.jpg",n);
imwrite(DepthFilename,cvAdjustDepthImg);
imwrite(ColorFilename,cvAdjustColorImg);
cout << "已经保存了" << n << "副图片" << endl;
system("PAUSE");
}
imshow("depth",cvAdjustDepthImg);
imshow("color",cvAdjustColorImg);
waitKey();
}
}
//
void RemoveNoiseCapture()
{
char DepthFilename[];
char ColorFilename[];
int n=;
while(true)
{
if( oniColorStream.readFrame( &oniColorImg ) == STATUS_OK )
{
// convert data into OpenCV type
cv::Mat cvRGBImg( oniColorImg.getHeight(), oniColorImg.getWidth(), CV_8UC3, (void*)oniColorImg.getData() );
cvtColor(cvRGBImg,cvColorImg,CV_RGB2GRAY);
//cv::cvtColor( cvRGBImg, cvColorImg, CV_RGB2BGR );
//colorVideoWriter.write(cvColorImg);
}
if( oniDepthStream.readFrame( &oniDepthImg ) == STATUS_OK )
{
cv::Mat cvRawImg16U( oniDepthImg.getHeight(), oniDepthImg.getWidth(), CV_16UC1, (void*)oniDepthImg.getData() );
cvRawImg16U.convertTo( cvDepthImg, CV_8UC1, 255.0/(oniDepthStream.getMaxPixelValue()));
//【5】
// convert depth image GRAY to BGR
//cv::cvtColor(cvDepthImg,cvDepthImg,CV_GRAY2BGR);
//depthVideoWriter.write(cvDepthImg);
}
cvAdjustDepthImg=Mat(cvDepthImg,Rect(ADRESOLUTION));
cvAdjustColorImg=Mat(cvColorImg,Rect(ADRESOLUTION));
if(_kbhit())//_kbhit()
{
n++;
sprintf(DepthFilename,"depthimage%03d.jpg",n);
sprintf(ColorFilename,"colorimage%03d.jpg",n);
imwrite(DepthFilename,cvAdjustDepthImg);
imwrite(ColorFilename,cvAdjustColorImg);
cout << "已经保存了" << n << "副图片" << endl;
system("PAUSE");
}
imshow("depth",cvAdjustDepthImg);
imshow("color",cvAdjustColorImg);
waitKey();
} }
//
void OringinRecord()
{
int n=;
int operation;
char DepthFilename[];
char ColorFilename[];
while(true)
{
n++;
sprintf(DepthFilename,"oringindepthvideo%03d.avi",n);
sprintf(ColorFilename,"oringincolorvideo%03d.avi",n);
VideoWriter colorVideoWriter=VideoWriter(ColorFilename,CV_FOURCC('X','V','I','D'),FPS,se);
VideoWriter depthVideoWriter=VideoWriter(DepthFilename,CV_FOURCC('X','V','I','D'),FPS,se);
namedWindow("彩色图",);
namedWindow("深度图",);
while(true)
{
if( oniColorStream.readFrame( &oniColorImg ) == STATUS_OK )
{
// convert data into OpenCV type
cv::Mat cvRGBImg( oniColorImg.getHeight(), oniColorImg.getWidth(), CV_8UC3, (void*)oniColorImg.getData() );
cv::cvtColor( cvRGBImg, cvColorImg, CV_RGB2BGR );
colorVideoWriter.write(cvColorImg);
cv::imshow( "彩色图", cvColorImg );
}
if( oniDepthStream.readFrame( &oniDepthImg ) == STATUS_OK )
{
cv::Mat cvRawImg16U( oniDepthImg.getHeight(), oniDepthImg.getWidth(), CV_16UC1, (void*)oniDepthImg.getData() );
cvRawImg16U.convertTo( cvDepthImg, CV_8UC1, 255.0/(oniDepthStream.getMaxPixelValue()));
//【5】
// convert depth image GRAY to BGR
cv::cvtColor(cvDepthImg,cvDepthImg,CV_GRAY2BGR);
depthVideoWriter.write(cvDepthImg);
cv::imshow( "深度图", cvDepthImg );
}
int key;
key=waitKey();
if(key==)
{
break;
}
}
destroyWindow("彩色图");
destroyWindow("深度图");
cout << "已经录制了" << n << "段视频" << endl;
}
}
/*
* === FUNCTION ======================================================================
* Name: removenoiserecord
* Description: 包含录取彩色图像,去噪后的灰度图像,未去噪声的深度图像和去噪后的深度图像
*
* =====================================================================================
*/
void RemoveNoiseRecord()
{
int n=; char DepthFilename[];
char ColorFilename[];
char removeDepthFilename[];
char removeColorFilename[];
Mat t1;
Mat t2;
while(true)
{
n++;
sprintf(removeDepthFilename,"removedepthvideo%03d.avi",n);
sprintf(removeColorFilename,"removecolorvideo%03d.avi",n);
sprintf(DepthFilename,"oringindepthvideo%03d.avi",n); /* 未去噪的深度图像 */
sprintf(ColorFilename,"oringincolorvideo%03d.avi",n); /* 未去噪声的彩色图像 */ VideoWriter removecolorVideoWriter=VideoWriter(removeColorFilename,CV_FOURCC('X','V','I','D'),FPS,recordse);
VideoWriter removedepthVideoWriter=VideoWriter(removeDepthFilename,CV_FOURCC('X','V','I','D'),FPS,recordse); VideoWriter colorVideoWriter=VideoWriter(ColorFilename,CV_FOURCC('X','V','I','D'),FPS,se);
VideoWriter depthVideoWriter=VideoWriter(DepthFilename,CV_FOURCC('X','V','I','D'),FPS,se);//
namedWindow("去噪灰度图",);
namedWindow("去噪深度图",);
namedWindow("未去噪彩色图",);
namedWindow("未去噪深度图",);
while(true)
{
if( oniColorStream.readFrame( &oniColorImg ) == STATUS_OK )
{
// convert data into OpenCV type
cv::Mat cvRGBImg( oniColorImg.getHeight(), oniColorImg.getWidth(), CV_8UC3, (void*)oniColorImg.getData() );
cv::cvtColor( cvRGBImg, cvColorImg2, CV_RGB2BGR );
cvColorImg2=Mat(cvColorImg2,Rect(ADRESOLUTION));
colorVideoWriter.write(cvColorImg2);
cv::imshow( "未去噪彩色图", cvColorImg2 );
cvtColor(cvRGBImg,cvColorImg,CV_RGB2GRAY);
//colorVideoWriter.write(cvColorImg); }
if( oniDepthStream.readFrame( &oniDepthImg ) == STATUS_OK )
{
cv::Mat cvRawImg16U( oniDepthImg.getHeight(), oniDepthImg.getWidth(), CV_16UC1, (void*)oniDepthImg.getData() );
cvRawImg16U.convertTo( cvDepthImg, CV_8UC1, 255.0/(oniDepthStream.getMaxPixelValue()));
cv::cvtColor(cvDepthImg,cvDepthImg2,CV_GRAY2BGR);
cvDepthImg2=Mat(cvDepthImg2,Rect(ADRESOLUTION));
depthVideoWriter.write(cvDepthImg2);
cv::imshow( "未去噪深度图", cvDepthImg2 );
}
//调整图像尺寸
cvAdjustDepthImg=Mat(cvDepthImg,Rect(ADRESOLUTION));
cvAdjustColorImg=Mat(cvColorImg,Rect(ADRESOLUTION)); RemoveNoise();
cvtColor(cvAdjustColorImg,cvAdjustColorImg,CV_GRAY2BGR);
cvtColor(cvAdjustDepthImg,cvAdjustDepthImg,CV_GRAY2BGR);
removecolorVideoWriter.write(cvAdjustColorImg);
removedepthVideoWriter.write(cvAdjustDepthImg);
imshow("去噪灰度图",cvAdjustColorImg);
imshow("去噪深度图",cvAdjustDepthImg);
int key;
key=waitKey();
if(key==)
{
break;
}
}
destroyWindow("去噪灰度图");
destroyWindow("去噪深度图");
destroyWindow("未去噪彩色图");
destroyWindow("未去噪深度图"); cout << "已经录制了" << n << "段视频" << endl;
}
}
//
void RemoveNoise()
{
clock_t start,finish;
double totaltime=0.0;
start=clock(); for(int j=(cvAdjustDepthImg.rows-);j>=;j--)//depthImage.rows,行数
{
const uchar* mj=cvAdjustDepthImg.ptr<uchar>(j);
for(int i=(cvAdjustDepthImg.cols-);i>=;i--)//depthImage.cols,列数
{
//修复空洞
if(mj[i]<=GRAYTH)
{
uchar colorpixel=cvAdjustColorImg.at<uchar>(j,i);
bool reResult=false;
//分黑色和非黑色区域分开处理
if(colorpixel<GRAYTH*)
{
//像素点修复
for(int k=;k<REPAIRERANGE*;k++)
{
reResult=pixelRepaire(i,j,k);
if(reResult)
break;
}
//go down
if(!reResult)
{
for(int k=;k<=;k++)
{
if((j+k)<)
{
if(cvAdjustDepthImg.at<uchar>(j+k,i)>GRAYTH)
{
cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j+k,i);
reResult=true;
break;
}
}
else
{
break;
}
}
}
//go up
if(!reResult)
{
for(int k=;k<=;k++)
{
if((j-k)>=)
{
if(cvAdjustDepthImg.at<uchar>(j-k,i)>GRAYTH)
{
cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j-k,i);
reResult=true;
break;
}
}
else
{
break;
}
}
}
}
else
{
//消除物体边界的噪音
for(int k=;k<;k++)
{
if((i+k)< && !reResult)
{
if(abs(cvAdjustColorImg.at<uchar>(j,i+k)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(j,i+k)>GRAYTH)
{
cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j,i+k);
reResult=true;
}
}
else
{
break;
}
}
}
//像素点周围寻找可利用点(考虑彩色图像是否匹配)
if(!reResult)
{
for(int k=;k<REPAIRERANGE;k++)
{
reResult=pixelRepaire(i,j,k);
if(reResult)
break;
}
}
//范围寻找可利用点(不管彩色图像是否匹配)
if(!reResult)
{
for(int k=;k<REPAIRERANGE*;k++)
{
reResult=rangeRepaire(i,j,k);
if(reResult)
break;
}
}
}
}
} finish=clock();
totaltime=(double)(finish-start)/CLOCKS_PER_SEC;
//cout<<"\n此帧图像的去噪时间为"<< totaltime << "秒!"<< endl;
}
//像素点修复
bool pixelRepaire(int i,int j,int repaireRange)
{
uchar colorpixel=cvAdjustColorImg.at<uchar>(j,i);
int x=;
int y=;
int n=;//符合要求的点的数量
int sum=;//符合要求的点的灰度值的和
for(y=j-repaireRange;y<=j+repaireRange;y++)
{
if(y>= && y<)
{
//上下边界寻找
if(y==(j-repaireRange) || y==(j+repaireRange))
{
for(x=i-repaireRange;x<=i+repaireRange;x++)
{
if(x>= && x<)
{
if(abs(cvAdjustColorImg.at<uchar>(y,x)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(y,x)>GRAYTH)
{
n++;
sum=sum+cvAdjustDepthImg.at<uchar>(y,x);
}
} }
}
//左右边界寻找
else
{
//左
x=i-repaireRange;
if(x>= && x<)
{
if(abs(cvAdjustColorImg.at<uchar>(y,x)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(y,x)>GRAYTH)
{
n++;
sum=sum+cvAdjustDepthImg.at<uchar>(y,x);
}
}
//右
x=i+repaireRange;
if(x>= && x<)
{
if(abs(cvAdjustColorImg.at<uchar>(y,x)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(y,x)>GRAYTH)
{
n++;
sum=sum+cvAdjustDepthImg.at<uchar>(y,x);
}
}
}
} }
if(n<repaireRange*)
{return false;}
else
{
cvAdjustDepthImg.at<uchar>(j,i)=(uchar)(sum/n);
return true;
} }
//范围性修复
bool rangeRepaire(int i,int j,int repaireRange)
{
uchar colorpixel=cvAdjustColorImg.at<uchar>(j,i);
int x=;
int y=;
int n=;
int sum=;
for(y=j-repaireRange;y<=j+repaireRange;y++)
{
if(y>= && y<)
{
for(x=i-repaireRange;x<=i+repaireRange;x++)
{
if(x>= && x<)
{
if(cvAdjustDepthImg.at<uchar>(y,x)>GRAYTH)
{
n++;
sum=sum+cvAdjustDepthImg.at<uchar>(y,x);
}
}
}
}
}
if(n<=repaireRange*)
{
return false;
}
else
{
cvAdjustDepthImg.at<uchar>(j,i)=(uchar)(sum/n);
return true;
}
}
//
void RepaireTest()
{
cvAdjustColorImg=imread("colorimage005.jpg",);
cvAdjustDepthImg=imread("depthimage005.jpg",);
Mat oriDepthImg=cvAdjustDepthImg.clone();
clock_t start,finish;
double totaltime=0.0;
start=clock();
for(int j=(cvAdjustDepthImg.rows-);j>=;j--)//depthImage.rows,行数
{
const uchar* mj=cvAdjustDepthImg.ptr<uchar>(j);
for(int i=(cvAdjustDepthImg.cols-);i>=;i--)//depthImage.cols,列数
{
//修复空洞
if(mj[i]<=GRAYTH)
{
uchar colorpixel=cvAdjustColorImg.at<uchar>(j,i);
bool reResult=false;
//分黑色和非黑色区域分开处理
if(colorpixel<GRAYTH*)
{
//像素点修复
for(int k=;k<REPAIRERANGE*;k++)
{
reResult=pixelRepaire(i,j,k);
if(reResult)
break;
}
//go down
if(!reResult)
{
for(int k=;k<=;k++)
{
if((j+k)<)
{
if(cvAdjustDepthImg.at<uchar>(j+k,i)>GRAYTH)
{
cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j+k,i);
reResult=true;
break;
}
}
else
{
break;
}
}
}
//go up
if(!reResult)
{
for(int k=;k<=;k++)
{
if((j-k)>=)
{
if(cvAdjustDepthImg.at<uchar>(j-k,i)>GRAYTH)
{
cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j-k,i);
reResult=true;
break;
}
}
else
{
break;
}
}
}
}
else
{
//消除物体边界的噪音
for(int k=;k<;k++)
{
if((i+k)< && !reResult)
{
if(abs(cvAdjustColorImg.at<uchar>(j,i+k)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(j,i+k)>GRAYTH)
{
cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j,i+k);
reResult=true;
}
}
else
{
break;
}
}
}
//像素点周围寻找可利用点(考虑彩色图像是否匹配)
if(!reResult)
{
for(int k=;k<REPAIRERANGE;k++)
{
reResult=pixelRepaire(i,j,k);
if(reResult)
break;
}
}
//范围寻找可利用点(不管彩色图像是否匹配)
if(!reResult)
{
for(int k=;k<REPAIRERANGE*;k++)
{
reResult=rangeRepaire(i,j,k);
if(reResult)
break;
}
}
}
}
}
// for(int j=(cvAdjustDepthImg.rows-1);j>=0;j--)//depthImage.rows,行数
// {
// const uchar* mj=cvAdjustDepthImg.ptr<uchar>(j);
// for(int i=(cvAdjustDepthImg.cols-1);i>=0;i--)//depthImage.cols,列数
// {
// //修复空洞
// if(mj[i]<=GRAYTH)
// {
// uchar colorpixel=cvAdjustColorImg.at<uchar>(j,i);
// bool reResult=false;
// //从右向左寻找可利用点
// for(int k=1;k<30;k++)
// {
// if((i+k)<590 && !reResult)
// {
// if(abs(cvAdjustColorImg.at<uchar>(j,i+k)-colorpixel)<=COLORTH && cvAdjustDepthImg.at<uchar>(j,i+k)>GRAYTH)
// {
// cvAdjustDepthImg.at<uchar>(j,i)=cvAdjustDepthImg.at<uchar>(j,i+k);
// reResult=true;
// }
// }
// else
// {
// break;
// }
// }
// //像素点修复
// if(!reResult)
// {
// for(int k=1;k<REPAIRERANGE;k++)
// {
// reResult=pixelRepaire(i,j,k);
// if(reResult)
// break;
// }
// }
// }
// }
// }
//ccc=cvAdjustDepthImg.clone();
/*
for(int j=(cvAdjustDepthImg.rows-1);j>=0;j--)//depthImage.rows,行数
{
const uchar* mj=cvAdjustDepthImg.ptr<uchar>(j);
for(int i=(cvAdjustDepthImg.cols-1);i>=0;i--)//depthImage.cols,列数
{
//修复空洞
if(mj[i]<=GRAYTH)
{
bool reResult;
for(int k=1;k<=REPAIRERANGE;k++)
{
reResult=rangeRepaire(i,j,k);
if(reResult)
break;
}
}
}
}
*/
//Mat xxx;
//Mat zzz=cvAdjustDepthImg.clone();
//GaussianBlur(cvAdjustDepthImg,xxx,Size(3,3),0,0);
//bilateralFilter(cvAdjustDepthImg,xxx,3,5*2,5*2);
finish=clock();
totaltime=(double)(finish-start)/CLOCKS_PER_SEC;
cout<<"\n此帧图像的去噪时间为"<< totaltime << "秒!"<< endl;
imshow("去噪深度",cvAdjustDepthImg);
imshow("深度",oriDepthImg);
imshow("彩色",cvAdjustColorImg);
waitKey();
//system("PAUSE"); }
//
void Test()
{
//cvColorImg=imread("colorimage003.jpg",0);
//cout << (int)cvColorImg.at<uchar>(369,272) << endl ;
//cout << (int)cvColorImg.at<uchar>(73,394) << endl ;
//cout << (int)cvColorImg.at<uchar>(428,256) << endl ;
//cout << (int)cvColorImg.at<uchar>(326,553) << endl ;
cvAdjustColorImg=imread("colorimage003.jpg",);
cvAdjustDepthImg=imread("depthimage003.jpg",);
Mat t;
addWeighted(cvAdjustDepthImg,0.8,cvAdjustColorImg,0.2,,t);
imshow("",t);
waitKey();
}

在录视频中遇到的一个问题是录成的视频只有6K大小,花了我半天时间,原来是窗口大小的问题。

如果录取的原视频的大小也是要调整为

 RECORDRESOLUTION 590,440的话colorVideoWriter里的参数不能为se,应该为 recordse否则就会有6k问题