- 初始環境:win10(64位)+vs2015+opencv2.4.13
- 硬體:kinect v1
- 最終環境:win10(64位)+vs2015+opencv2.4.13+kinect SDK 1.8+OpenNI2.2+NITE2.2
1.配置openni2環境(kinect SDK 1.8+OpenNI2.2+NITE2.2)
參考網址:
-(1)http://blog.csdn.net/chenxin_130/article/details/8580636
-(2)https://social.msdn.microsoft.com/Forums/en-US/bae80fb0-fce5-468e-a292-fe46381af3e5/how-to-install-openni-22-nite-22-kinect-sdk-18-windows-7-3264-bit?forum=kinectsdk
-(3)http://www.cnblogs.com/tornadomeet/archive/2012/11/16/2772681.html
(1)一開始不要把kinect插在電腦上!先安裝Kinect for Windows SDK 1.8(Kinect for Windows SDK 1.8 官方下載下傳網址),再連接配接kinect,可以從裝置管理器裡面看或者通過運作sample檢驗是否安裝成功。(如果一開始就先把kinect連上了,windows會自動裝上不正确的驅動,後面在裝置管理器裡會發現不能識别kinect,隻能識别為“未知裝置”,這時候先把這個未知裝置的驅動删了,然後把kinect拔了,裝了SDK以後再插上kinect)
(2)安裝好驅動以後,先安裝openni2.2(64位openni2.2 CSDN下載下傳網址,32位openni2.2 CSDN下載下傳網址),再安裝nite2.2(64位nite2.2 CSDN下載下傳網址,32位nite2.2 CSDN下載下傳網址)。安裝包從網址(2)下載下傳,或者從CSDN上搜。網址(2)中建議64位電腦需要同時安裝openni2.2和nite2.2的64位和32位版本,但我嘗試過兩種版本都安裝,安裝後會導緻打不開kinect,是以隻裝64位或者隻裝32位的openni和nite就好了。
(3)每次運作exe程式,将OpenNI2\Redist\檔案夾下所有檔案粘貼到exe同目錄下。
(4)如果有kinect初始化失敗的情況,可以嘗試用管理者身份打開記事本,打開”C:\Program Files (x86)\OpenNI2\Redist\OpenNI2\Drivers\PS1080.ini”,将”;UsbInterface=2”修改為”UsbInterface=0”(記得去掉UsbInterface前的”;”)。(測試openni2 sample 如NiViewer時,需修改sample同目錄下的”OpenNI2\Drivers\PS1080.ini”檔案)
(4)配置VS開發環境,見參考網址(1)。
2.擷取kinect v1圖像
(1)擷取彩色圖和深度圖程式
參考網址:http://blog.csdn.net/chenxin_130/article/details/8619909
(2)擷取紅外圖像
代碼如下:
#include <stdlib.h>
#include <iostream>
#include <string>
#include "OpenNI.h"
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
using namespace std;
using namespace cv;
using namespace openni;
void CheckOpenNIError(Status result, string status)
{
if (result != STATUS_OK)
cerr << status << " Error: " << OpenNI::getExtendedError() << endl;
}
int main(int argc, char** argv)
{
Status result = STATUS_OK;
//OpenNI2 image
VideoFrameRef oniIrImg;
//OpenCV image
cv::Mat cvIrImg;
cv::namedWindow("Ir");
char key = ;
//【1】
// initialize OpenNI2
result = OpenNI::initialize();
CheckOpenNIError(result, "initialize context");
// open device
Device device;
result = device.open(openni::ANY_DEVICE);
//【2】
// create Ir stream
VideoStream oniIrStream;
result = oniIrStream.create(device, openni::SENSOR_IR);
//【3】
// set Ir video mode
VideoMode modeIr;
modeIr.setResolution(, );
modeIr.setFps();
modeIr.setPixelFormat(PIXEL_FORMAT_GRAY16);
oniIrStream.setVideoMode(modeIr);
// start Ir stream
result = oniIrStream.start();
while (key != )
{
if (oniIrStream.readFrame(&oniIrImg) == STATUS_OK)
{
cv::Mat cvRawImg16U(oniIrImg.getHeight(), oniIrImg.getWidth(), CV_16UC1, (void*)oniIrImg.getData());
cvRawImg16U.convertTo(cvIrImg, CV_8U);
cv::imshow("Ir", cvIrImg);
}
key = cv::waitKey();
}
//cv destroy
cv::destroyWindow("Ir");
//OpenNI2 destroy
oniIrStream.destroy();
device.close();
OpenNI::shutdown();
return ;
}
3.運作程式
注意:假設project位置為”..\Visual Studio 2015\Projects\GetKinectIRImageProject\”,程式在Debug x86模式下運作,需要将”C:\Program Files (x86)\OpenNI2\Redist”下所有檔案粘貼到”..\Visual Studio 2015\Projects\GetKinectIRImageProject\Debug\”檔案夾下。

4.讀取彩色(RGB)圖和紅外(IR)圖并儲存
/*************************
OpenNI2 Deep, Color and Fusion Image
Author: Xin Chen, 2013.2
Blog: http://blog.csdn.net/chenxin_130
*************************/
/*************************
Add IR image getting and Image saving
Author: LuHaiyan, 2017.2
Blog: http://blog.csdn.net/u011988573
*************************/
//否則會出現錯誤:This function or variable may be unsafe.Consider using localtime_s instead
#pragma warning(disable:4996)
#include<time.h> //擷取時間
#include <direct.h> //建立檔案夾
#include <stdlib.h>
#include <iostream>
#include <string>
#include "OpenNI.h"
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
using namespace std;
using namespace cv;
using namespace openni;
/*
部落格(http://www.cnblogs.com/tornadomeet/archive/2012/11/16/2772681.html)中提到,
Kinect在使用時,微軟官方推薦的距離為1220mm(4’)~3810mm(12.5’),
網友heresy在他的博文Kinect + OpenNI 的深度值中統計過,
kinect在距離為1.0m時其精度大概是3mm,而當距離是3.0m時,其精度大概是3cm,
是以當kinect在官方推薦的距離範圍内使用時,
如果是1.2m時,其精度應該在3mm附近,
如果是3.6m時其精度就大于3cm了,
是以距離越遠,其深度值精度越低。
另外,通過OpenNI擷取到的深度資訊(即z坐标)的機關是mm,
這一點在程式程式設計中要注意,且一般的深度值用12bit(其實是可以用13bit表示的)表示,即最大值為4095,也就是代表4.095m,
是以平時我們采集到的深度資料如果需要擴充到灰階圖,可以乘以一個因子255/4095
(為了加快該結果,一般設定為256/4096,即轉換後的灰階值每變化1,代表kinect采集到的深度值變化了16mm,
如果當人的距離為1米左右時,本來精度是3mm,現在經過歸一化後精度确更加下降了,
如果其深度值為0表示該位置處偵測不到像素點的深度。
*/
#define DEPTH_SCALE_FACTOR 256./4096.
string getTime()
{
time_t rawtime;
struct tm * timeinfo;
char buffer[];
time(&rawtime);
timeinfo = localtime(&rawtime);
strftime(buffer, sizeof(buffer), "%Y_%m_%d_%H_%M_%S", timeinfo);
string time_now = buffer;
cout << time_now << endl;
return time_now;
}
void CheckOpenNIError(Status result, string status)
{
if (result != STATUS_OK)
cerr << status << " Error: " << OpenNI::getExtendedError() << endl;
}
int main(int argc, char** argv)
{
Status result = STATUS_OK;
//OpenNI2 image
//VideoFrameRef oniDepthImg;
VideoFrameRef oniColorImg;
VideoFrameRef oniIrImg;
//OpenCV image
//cv::Mat cvDepthImg;
cv::Mat cvBGRImg;
//cv::Mat cvFusionImg;
cv::Mat cvIrImg;
//cv::namedWindow("depth");
cv::namedWindow("rgb");
//cv::namedWindow("fusion");
cv::namedWindow("Ir");
//【1】
// initialize OpenNI2
result = OpenNI::initialize();
CheckOpenNIError(result, "initialize context");
// open device
Device device;
result = device.open(openni::ANY_DEVICE);
//【2】
create depth stream
//VideoStream oniDepthStream;
//result = oniDepthStream.create(device, openni::SENSOR_DEPTH);
//
【3】
set depth video mode
//VideoMode modeDepth;
//modeDepth.setResolution(640, 480);
//modeDepth.setFps(30);
//modeDepth.setPixelFormat(PIXEL_FORMAT_DEPTH_1_MM);
//oniDepthStream.setVideoMode(modeDepth);
start depth stream
//result = oniDepthStream.start();
// create Ir stream
VideoStream oniIrStream;
result = oniIrStream.create(device, openni::SENSOR_IR);
//【3】
// set Ir video mode
VideoMode modeIr;
modeIr.setResolution(, );
modeIr.setFps();
modeIr.setPixelFormat(PIXEL_FORMAT_GRAY16);
oniIrStream.setVideoMode(modeIr);
// start Ir stream
//result = oniIrStream.start();
// create color stream
VideoStream oniColorStream;
result = oniColorStream.create(device, openni::SENSOR_COLOR);
// set color video mode
VideoMode modeColor;
modeColor.setResolution(, );
modeColor.setFps();
modeColor.setPixelFormat(PIXEL_FORMAT_RGB888);
oniColorStream.setVideoMode(modeColor);
//【4】
// set depth and color imge registration mode
/*if (device.isImageRegistrationModeSupported(IMAGE_REGISTRATION_DEPTH_TO_COLOR))
{
device.setImageRegistrationMode(IMAGE_REGISTRATION_DEPTH_TO_COLOR);
}*/
// start color stream
result = oniColorStream.start();
// mkdir
/*string path_of_all = "F:\\Data_kinect\\test\\";
string path_of_rgb_d = path_of_all + getTime();
_mkdir(const_cast<char*>(path_of_rgb_d.c_str()));*/
char pathOfAll[]= "F:\\Data_kinect\\test\\";
strcat(pathOfAll, getTime().c_str());
strcat(pathOfAll, "\\");
_mkdir(pathOfAll);
char path[];
char path_rgb[];
char path_ir[];
//string path_of_rgb = path_of_rgb_d + "\\rgb_image";
//string path_of_depth = path_of_rgb_d + "\\depth_image";
//string path_of_ir = path_of_rgb_d + "\\ir_image";
//string path_of_fusion = path_of_rgb_d + "\\fusion_image";
//_mkdir(const_cast<char*>(path_of_rgb.c_str()));
//_mkdir(const_cast<char*>(path_of_depth.c_str()));
//_mkdir(const_cast<char*>(path_of_fusion.c_str()));
//_mkdir(const_cast<char*>(path_of_ir.c_str()));
// image index
int frame_index = ;
char key = 'a';
while (key != )
{
// read frame
if (oniColorStream.readFrame(&oniColorImg) == STATUS_OK)
{
// convert data into OpenCV type
cv::Mat cvRGBImg(oniColorImg.getHeight(), oniColorImg.getWidth(), CV_8UC3, (void*)oniColorImg.getData());
cv::cvtColor(cvRGBImg, cvBGRImg, CV_RGB2BGR);
flip(cvBGRImg, cvBGRImg, );
cv::imshow("rgb", cvBGRImg);
}
//if (oniDepthStream.readFrame(&oniDepthImg) == STATUS_OK)
//{
// cv::Mat cvRawImg16U(oniDepthImg.getHeight(), oniDepthImg.getWidth(), CV_16UC1, (void*)oniDepthImg.getData());
// //cvRawImg16U.convertTo(cvDepthImg, CV_8U, 255.0 / (oniDepthStream.getMaxPixelValue()));
// cvRawImg16U.convertTo(cvDepthImg, CV_8U, DEPTH_SCALE_FACTOR);
// //cvRawImg16U.convertTo(cvDepthImg, CV_8U);
// //inverse depth color
// /*for (int i = 0; i < cvDepthImg.rows; i++)
// {
// uchar* pData = cvDepthImg.ptr<uchar>(i);
// for (int j = 0; j < cvDepthImg.cols ; j++)
// {
// pData[j] = cv::saturate_cast<uchar>(255-pData[j]);
// }
// }*/
// //【5】
// // convert depth image GRAY to BGR
// //cv::cvtColor(cvDepthImg, cvFusionImg, CV_GRAY2BGR);
// cv::imshow("depth", cvDepthImg);
//}
//【6】
//cv::addWeighted(cvBGRImg, 0.5, cvFusionImg, 1.0, 0, cvFusionImg);
//cv::imshow("fusion", cvFusionImg);
key = waitKey();
if (key == 's') {
//save image to certain path
/*stringstream stream;
stream << frame_index;
string frame_index_str = stream.str();*/
/*string rgb_path = path_of_rgb + "\\";
rgb_path += frame_index_str + ".jpg";*/
/*string depth_path = path_of_depth + "\\";
depth_path += frame_index_str + ".jpg";*/
//string fusion_path = path_of_fusion + "\\";
//fusion_path += frame_index_str + ".jpg";
/*string ir_path = path_of_ir + "\\";
ir_path += frame_index_str + ".jpg";*/
strcpy(path, pathOfAll);
sprintf(path_rgb, "%04d_rgb.jpg", frame_index);
strcat(path, path_rgb);
cout <<"rgb:"<< path << endl;
imwrite(path, cvBGRImg);
//imwrite(depth_path, cvDepthImg);
//imwrite(fusion_path, cvFusionImg);
oniColorStream.stop();
oniIrStream.start();
if (oniIrStream.readFrame(&oniIrImg) == STATUS_OK)
{
cv::Mat cvRawImg16U(oniIrImg.getHeight(), oniIrImg.getWidth(), CV_16UC1, (void*)oniIrImg.getData());
cvRawImg16U.convertTo(cvIrImg, CV_8U);
flip(cvIrImg, cvIrImg, );
cv::imshow("Ir", cvIrImg);
}
strcpy(path, pathOfAll);
sprintf(path_ir, "%04d_ir.jpg", frame_index);
strcat(path, path_ir);
cout << "ir:" << path << endl;
imwrite(path, cvIrImg);
cout <<"Index "<< frame_index << " rgb and ir image saved." << endl;
oniIrStream.stop();
oniColorStream.start();
frame_index++;
}
}
//cv destroy
//cv::destroyWindow("depth");
cv::destroyWindow("rgb");
//cv::destroyWindow("fusion");
cv::destroyWindow("Ir");
//OpenNI2 destroy
//oniDepthStream.destroy();
oniColorStream.destroy();
oniIrStream.destroy();
device.close();
OpenNI::shutdown();
return ;
}