对Astra S深度相机的操作又进行封装一次,工程文件和第三方库文件在我的资源中有下载

工程包含目录为:

//按照自己的路径进行更改
D:\Program Files\ThirdParty\OpenNI2\Include
D:\Program Files\ThirdParty\OpenCV241\Include
D:\Program Files\ThirdParty\Eigen3.2.5

库目录为:

//按照自己的路径进行更改
D:\Program Files\ThirdParty\OpenNI2\Lib
D:\Program Files\ThirdParty\OpenCV241\Lib

附加依赖项

glut32.lib
OpenNI2.lib
opencv_calib3d2410.lib
opencv_core2410.lib
opencv_highgui2410.lib

.h文件

#include <OpenNI.h>
#include <iostream>
#include <ctime>
#include "opencv/highgui.h"
#include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/core/core.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/calib3d/calib3d.hpp"
#include <fstream>
#include <omp.h>#define SAMPLE_READ_WAIT_TIMEOUT 2000 //2000msusing namespace std;
using namespace cv;
using namespace openni;typedef struct
{/* Red value of this pixel. */uint8_t b;/* Green value of this pixel. */uint8_t g;/* Blue value of this pixel. */uint8_t r;
} OniBGR888Pixel;class CameraOperate
{private:openni::Device device1;openni::Device device2;VideoStream depth1, color1;openni::VideoFrameRef m_depth_frame1;openni::VideoFrameRef m_RGB_frame1;openni::VideoFrameRef m_depth_frame2;openni::VideoFrameRef m_RGB_frame2;openni::VideoFrameRef frame1;VideoStream depth2, color2;openni::VideoFrameRef frame2;int readyStream ;float vertex[1024][1280][3];int mHeight ;int mWidth ;public:CameraOperate();~CameraOperate();int InitializeDevice();                                              //传感器的初始化函数int OpenDevice(const char* uri,int Num);                                     //打开传感器函数int CloseDevice(int Num);                                                   //关闭传感器函数int CreateRGBStream(int Num);                                               //创建RGB_IR流int CreateDepthStream(int Num);                                             //创建Depth流int StartRGBStream(int Num);                                                //开始RGB_IR流int SatrtDepthStream(int Num);                                              //开始Depth流int StopRGBStream(int Num);                                                 //暂停RGB_IR流int StopDepthStream(int Num);                                               //暂停Depth流int DestoryRGBStream();                                              //暂停和销毁RGB_IR流int DestoryDepthStream(int Num);                                            //暂停和销毁Depth流int ReadRGBDepth_Frame(Mat& color, Mat& depth, int Num);                     //读取RGB、IR和Depthvoid ConvertProjectiveToWorld(int u, int v, int z, float& px, float& py, float& pz);void TransformPointToPoint(float dst[3], const float src[3]);void ConvertWorldToProjective(float x, float y, float z, float& u, float& v);int FrameToDepth(const VideoFrameRef& frame, Mat& depth);void MappingDepth2Color(cv::Mat &src, cv::Mat &dst);int MatToPointCloud(Mat depthMap, Mat colorMap);int SavePointCloud(const char* fileName);int FrameToRGB(const VideoFrameRef &frame, Mat &color);};

.cpp文件

#include "CCameraOperate.h"//构造函数CameraOperate::CameraOperate()
{readyStream = -1;mHeight = 0;mWidth = 0;
}//析构函数CameraOperate::~CameraOperate(){}//初始化函数int CameraOperate::InitializeDevice(){openni::Status rc = openni::STATUS_OK;rc = openni::OpenNI::initialize();if (rc != openni::STATUS_OK){printf("Initialize failed\n%s\n", openni::OpenNI::getExtendedError());return 1;}return 0;}//开启设备int CameraOperate::OpenDevice(const char* uri,int Num){if (Num == 1){openni::Status rc = openni::STATUS_OK;rc = device1.open(uri);if (rc != openni::STATUS_OK){printf("Couldn't open device1\n%s\n", openni::OpenNI::getExtendedError());return 2;}}else{openni::Status rc = openni::STATUS_OK;rc = device2.open(uri);if (rc != openni::STATUS_OK){printf("Couldn't open device2\n%s\n", openni::OpenNI::getExtendedError());return 2;}}return 0;}//关闭设备int  CameraOperate::CloseDevice(int Num){if (Num == 1){device1.close();}else{device2.close();}return 0;}//创建RGB_IR流int CameraOperate::CreateRGBStream(int Num){if (Num == 1){openni::Status rc = openni::STATUS_OK;rc = color1.create(device1, openni::SENSOR_COLOR);if (rc != openni::STATUS_OK){printf("Couldn't Create RGB Stream\n%s\n", openni::OpenNI::getExtendedError());return 3;}}else{openni::Status rc = openni::STATUS_OK;rc = color2.create(device2, openni::SENSOR_COLOR);if (rc != openni::STATUS_OK){printf("Couldn't Create RGB Stream\n%s\n", openni::OpenNI::getExtendedError());return 3;}}return 0;}int CameraOperate::CreateDepthStream(int Num){if (Num == 1){openni::Status rc = openni::STATUS_OK;rc = depth1.create(device1, openni::SENSOR_DEPTH);if (rc != openni::STATUS_OK){printf("Couldn't Create Depth Stream\n%s\n", openni::OpenNI::getExtendedError());return 4;}}else{openni::Status rc = openni::STATUS_OK;rc = depth2.create(device2, openni::SENSOR_DEPTH);if (rc != openni::STATUS_OK){printf("Couldn't Create Depth Stream\n%s\n", openni::OpenNI::getExtendedError());return 4;}}return 0;}int CameraOperate::StartRGBStream(int Num){if (Num==1){openni::Status rc = color1.start();if (rc != openni::STATUS_OK){printf("Couldn't start the rgb stream\n%s\n", openni::OpenNI::getExtendedError());return 5;}}else{openni::Status rc = color2.start();if (rc != openni::STATUS_OK){printf("Couldn't start the rgb stream\n%s\n", openni::OpenNI::getExtendedError());return 5;}}return 0;}int CameraOperate::SatrtDepthStream(int Num){if (Num == 1){openni::Status rc = openni::STATUS_OK;if (!depth1.isValid()){int ba = 0;return -1;}rc = depth1.start();if (rc != openni::STATUS_OK){printf("Couldn't start the depth stream\n%s\n", openni::OpenNI::getExtendedError());return 6;}}else{openni::Status rc = openni::STATUS_OK;if (!depth2.isValid()){int ba = 0;return -1;}rc = depth2.start();if (rc != openni::STATUS_OK){printf("Couldn't start the depth stream\n%s\n", openni::OpenNI::getExtendedError());return 6;}}return 0;}//停止RGBStreamint CameraOperate::StopRGBStream(int Num){if (Num == 1){color1.stop();}else{color2.stop();}return 0;}//停止DepthStreamint CameraOperate::StopDepthStream(int Num){if (Num == 1){if (!depth1.isValid()){int ba = 0;return -1;}depth1.stop();}else{if (!depth2.isValid()){int ba = 0;return -1;}depth2.stop();}return 0;}//销毁RGBStreamint CameraOperate::DestoryRGBStream(){color1.destroy();return 0;}//销毁DepthStreamint CameraOperate::DestoryDepthStream(int Num){if (Num == 1){depth1.destroy();}else{depth2.destroy();}return 0;}//读取RGB、Depth一帧的数据并转换成为Mat形式的Color和Depth矩阵int CameraOperate::ReadRGBDepth_Frame(Mat& BGRcolor1, Mat& depth3,int Num){if (Num==1){openni::Status rc = openni::STATUS_OK;Mat  cvRGBImg1(480, 640, CV_8UC3);VideoStream* streams_Depth1 =&depth1 ;int readyStream_Depth1 = -1;rc = OpenNI::waitForAnyStream(&streams_Depth1, 1, &readyStream_Depth1, SAMPLE_READ_WAIT_TIMEOUT);if (rc == STATUS_OK){depth1.readFrame(&m_depth_frame1);//color1.readFrame(&frame1);if (m_depth_frame1.isValid()){FrameToDepth(m_depth_frame1, depth3);}}VideoStream* streams_RGB1 = &color1;int readyStream_RGB1 = -1;rc = OpenNI::waitForAnyStream(&streams_RGB1, 1, &readyStream_RGB1, SAMPLE_READ_WAIT_TIMEOUT);if (rc == STATUS_OK){color1.readFrame(&m_RGB_frame1);//color1.readFrame(&frame1);if (m_RGB_frame1.isValid()){FrameToRGB(m_RGB_frame1, BGRcolor1);}}}else{openni::Status rc = openni::STATUS_OK;Mat  cvRGBImg2(480, 640, CV_8UC3);VideoStream* streams_Depth2 = &depth2;int readyStream_Depth2 = -1;rc = OpenNI::waitForAnyStream(&streams_Depth2, 1, &readyStream_Depth2, SAMPLE_READ_WAIT_TIMEOUT);if (rc == STATUS_OK){depth2.readFrame(&m_depth_frame2);//color1.readFrame(&frame1);if (m_depth_frame2.isValid()){FrameToDepth(m_depth_frame2, depth3);}}VideoStream* streams_RGB2 = &color2;int readyStream_RGB2 = -1;rc = OpenNI::waitForAnyStream(&streams_RGB2, 1, &readyStream_RGB2, SAMPLE_READ_WAIT_TIMEOUT);if (rc == STATUS_OK){color2.readFrame(&m_RGB_frame2);if (m_RGB_frame2.isValid()){FrameToRGB(m_RGB_frame2, BGRcolor1);}}}return 0;}void CameraOperate::ConvertProjectiveToWorld(int u, int v, int z, float& px, float& py, float& pz){float ifx, ify;ifx = 1. / 550.31324;ify = 1. / 530.95258;float tx = (u - 308.68595) * ifx;float ty = (v - 239.44117) * ify;float x0 = tx;float y0 = ty;for (int j = 0; j < 5; j++){double r2 = tx * tx + ty * ty;double icdist = (1) / (1 + ((0.00000000 * r2 + 0.31352)*r2 - 0.07364)*r2);double deltaX = 2 * (-0.01153) * tx*ty + (-0.00525) * (r2 + 2 * tx*tx);double deltaY = (-0.01153) * (r2 + 2 * ty*ty) + 2 * (-0.00525) * tx*ty;tx = (x0 - deltaX)*icdist;ty = (y0 - deltaY)*icdist;}px = z * tx;py = z * ty;pz = z;}void CameraOperate::TransformPointToPoint(float dst[3], const float src[3]){dst[0] = 0.99990161 * src[0] + -0.00319713 * src[1] + -0.01365809 * src[2] + 0.02504946;dst[1] = 0.00327611 * src[0] + 0.99997802 * src[1] + 0.00576428 * src[2] + 0.00006017;dst[2] = 0.01363936 * src[0] + -0.00580846 * src[1] + 0.99989011 * src[2] + -0.00260149;}void CameraOperate::ConvertWorldToProjective(float x, float y, float z, float& u, float& v){float tx = x / z;float ty = y / z;float r2 = tx*tx + ty*ty;float f = 1 + -0.01595141 * r2 + 0.02629677 * r2*r2 + 0.00000000 * r2*r2*r2;tx *= f;ty *= f;float dx = tx + 2 * 0.00000000 * tx*ty + 0.00000000 * (r2 + 2 * tx*tx);float dy = ty + 2 * 0.00000000 * tx*ty + 0.00000000 * (r2 + 2 * ty*ty);tx = dx;ty = dy;u = tx * 524.18651391 + 320.79175117;v = ty * 486.50458757 + 240.99268524;}int CameraOperate::FrameToDepth(const VideoFrameRef& frame, Mat& depth){uint16_t *pPixel;for (int y = 0; y < 480; y++){pPixel = ((uint16_t*)((char*)frame.getData() + ((int)(y)* frame.getStrideInBytes())));uint16_t* data = (uint16_t*)depth.ptr<uchar>(y);for (int x = 0; x < 640; x++){*data++ = (*pPixel);pPixel++;}}return 0;}void CameraOperate::MappingDepth2Color(cv::Mat &src, cv::Mat &dst){double  z;float pixel[2];float point[3], to_point[3];uint16_t u, v, d;uint16_t u_rgb = 0, v_rgb = 0;cv::Mat newdepth(dst.rows, dst.cols, CV_16UC1, cv::Scalar(0));for (v = 0; v < src.rows; v++){for (u = 0; u < src.cols; u++){d = src.at<uint16_t>(v, u);z = (double)d;ConvertProjectiveToWorld(u, v, z, point[0], point[1], point[2]);TransformPointToPoint(to_point, point);ConvertWorldToProjective(to_point[0], to_point[1], to_point[2], pixel[0], pixel[1]);u_rgb = (uint16_t)(pixel[0]);v_rgb = (uint16_t)(pixel[1]);if (u_rgb < 0 || u_rgb >= newdepth.cols || v_rgb < 0 || v_rgb >= newdepth.rows) continue;uint16_t *val = (uint16_t *)newdepth.ptr<uchar>(v_rgb)+u_rgb;*val = d;}}dst = newdepth;}int CameraOperate::MatToPointCloud(Mat depthMap, Mat colorMap){int depthW = depthMap.cols;int depthH = depthMap.rows;mWidth = depthW;mHeight = depthH;// update vertex
#pragma omp parallel forfor (int v = 0; v < depthH; v++){unsigned short* pDepth = (unsigned short*)depthMap.ptr<uchar>(v);for (int u = 0; u < depthW; u++){int z = *pDepth++;float px = 0;float py = 0;float pz = 0;ConvertProjectiveToWorld(u, v, z, px, py, pz);            //调用ConvertProjectiveToWorld函数把深度转换成世界坐标系vertex[v][u][0] = px;vertex[v][u][1] = py;vertex[v][u][2] = (float)z;}}return 0;}int CameraOperate::SavePointCloud(const char* fileName){ofstream ouF;ouF.open(fileName, ofstream::out);if (!ouF){cerr << "failed to open the file : " << fileName << endl;return -1;}/*ouF << "ply" << std::endl;ouF << "format ascii 1.0" << std::endl;ouF << "comment made by Orbbec " << std::endl;ouF << "comment Orbbec Co.,Ltd." << std::endl;ouF << "element vertex " << mWidth * mHeight << std::endl;ouF << "property float32 x" << std::endl;ouF << "property float32 y" << std::endl;ouF << "property float32 z" << std::endl;ouF << "property uint8 red" << std::endl;ouF << "property uint8 green" << std::endl;ouF << "property uint8 blue" << std::endl;ouF << "element face 0" << std::endl;ouF << "property list uint8 int32 vertex_index" << std::endl;ouF << "end_header" << std::endl;*/for (int v = 0; v < mHeight; v++){for (int u = 0; u < mWidth; u++){float x = vertex[v][u][0];float y = vertex[v][u][1];float z = vertex[v][u][2];ouF << x << " " << y << " " << z << std::endl;//ouF << x << " " << y << " " << z << " " << r << " " << g << " " << b << std::endl;}}ouF.close();return 0;}int CameraOperate::FrameToRGB(const VideoFrameRef &frame, Mat &color){double resizeFactor = std::min((640 / (double)frame.getWidth()), (480 / (double)frame.getHeight()));unsigned int texture_x = (unsigned int)(640 - (resizeFactor * frame.getWidth())) / 2;unsigned int texture_y = (unsigned int)(480 - (resizeFactor * frame.getHeight())) / 2;OniBGR888Pixel ss; for (unsigned int y = 0; y < (480 - 2 * texture_y); ++y){uint8_t* data = (uint8_t*)color.ptr<uchar>(y);for (unsigned int x = 0; x < (640 - 2 * texture_x); ++x){OniRGB888Pixel* streamPixel = (OniRGB888Pixel*)((char*)frame.getData() + ((int)(y / resizeFactor) * frame.getStrideInBytes())) + (int)(x / resizeFactor);ss.b = streamPixel->b;ss.g = streamPixel->g;ss.r = streamPixel->r;memcpy(data, &ss, 3);data = data + 3;}}return 0;}

主函数

/*****************************************************************************
*                                                                            *
*  OpenNI 2.x Alpha                                                          *
*  Copyright (C) 2012 PrimeSense Ltd.                                        *
*                                                                            *
*  This file is part of OpenNI.                                              *
*                                                                            *
*  Licensed under the Apache License, Version 2.0 (the "License");           *
*  you may not use this file except in compliance with the License.          *
*  You may obtain a copy of the License at                                   *
*                                                                            *
*      http://www.apache.org/licenses/LICENSE-2.0                            *
*                                                                            *
*  Unless required by applicable law or agreed to in writing, software       *
*  distributed under the License is distributed on an "AS IS" BASIS,         *
*  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  *
*  See the License for the specific language governing permissions and       *
*  limitations under the License.                                            *
*                                                                            *
*****************************************************************************/
#include <OpenNI.h>
#include <iostream>
#include <ctime>
#include "opencv/highgui.h"
#include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/core/core.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/calib3d/calib3d.hpp"
#include <fstream>
#include <omp.h>
#include "CCameraOperate.h"#define SAMPLE_READ_WAIT_TIMEOUT 2000 //2000ms
#define REC_WIDTH   640;
#define REC_HEIGHT  480;
#define CONFIG_INI          "./config.ini"
#define CAMERA_PARAMS_INI   "./camera_params.ini"using namespace openni;
using namespace std;
using namespace cv;void WriteLog(double RunTime, double NotStartStream1, double WaitFailed1, double NotStartStream2, double WaitFailed2, double RunNum)
{SYSTEMTIME st;GetLocalTime(&st);FILE *fp;fp = fopen("log.txt", "at");fprintf(fp, "MyLogInfo: %d:%d:%d:%d\n ", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);fprintf(fp, "运行时间:%f\n", RunTime);fprintf(fp, "运行次数:%f \n", RunNum);fprintf(fp, "设备1流开始失败次数:%f  设备1等待失败次数:%f \n", NotStartStream1, WaitFailed1);fprintf(fp, "设备2流开始失败次数:%f  设备2等待失败次数:%f \n", NotStartStream2, WaitFailed2);fclose(fp);
}int main(int argc, char** argv)
{openni::Status rc = openni::STATUS_OK;openni::Status rc1 = openni::STATUS_OK;openni::Status rc2 = openni::STATUS_OK;//传感器1的矩阵Mat     cvRGBImg1(480, 640, CV_8UC3);Mat     cvBGRImg1(480, 640, CV_8UC3);Mat     cvIRImg1(480, 640, CV_16UC1);Mat     cvDepthImg1(480, 640, CV_16UC1);Mat     CaliDepth1(480, 640, CV_16UC1);//传感器2的矩阵Mat     cvRGBImg2(480, 640, CV_8UC3);Mat     cvIRImg2(480, 640, CV_16UC1);Mat     cvDepthImg2(480, 640, CV_16UC1);Mat     CaliDepth2(480, 640, CV_16UC1);Mat     cvBGRImg2(480, 640, CV_8UC3);CameraOperate camera;
//  CameraOperate camera2;rc = openni::OpenNI::initialize();//初始化OpenNI环境if (rc != openni::STATUS_OK){printf("%s: Initialize failed\n%s\n", argv[0], openni::OpenNI::getExtendedError());return 1;}openni::Array<openni::DeviceInfo> deviceList;openni::OpenNI::enumerateDevices(&deviceList);//获取到设备列表const char* device1Uri;//设备标识const char* device2Uri;switch (argc){case 1:if (deviceList.getSize() < 2){printf("Missing devices\n");openni::OpenNI::shutdown();return 1;}device1Uri = deviceList[1].getUri();device2Uri = deviceList[0].getUri();break;case 2:if (deviceList.getSize() < 1){printf("Missing devices\n");openni::OpenNI::shutdown();return 1;}device1Uri = argv[1];if (strcmp(deviceList[0].getUri(), device1Uri) != 0)device2Uri = deviceList[0].getUri();elsedevice2Uri = deviceList[1].getUri();break;default:device1Uri = argv[1];device2Uri = argv[2];}SYSTEMTIME sys;double Time0 = 0;double Time1 = 0;double Time11 = 0;double Time2 = 0;double Time22 = 0;double Time33 = 0;double Sum = 0;GetLocalTime(&sys);time_t t1;time_t t2;time(&t1);double NotStartStream1 = 0;                       //设备1不能打开流的次数double NotStartStream2 = 0;                       //设备2不能打开流的次数double WaitFailed1 = 0;                            //设备1等待超时的次数double WaitFailed2 = 0;                            //设备1等待超时的次数int OpenStatue1=camera.OpenDevice(device1Uri,1);int CreateDepthStatue1 = camera.CreateRGBStream(1);int CreateColorStatue1 = camera.CreateDepthStream(1);int StartRGBStatue1 = camera.StartRGBStream(1);if ((OpenStatue1 || CreateDepthStatue1 || CreateColorStatue1 || StartRGBStatue1)!=0){printf("设备1初始化有问题!");}int OpenStatue2 = camera.OpenDevice(device2Uri,2);int CreateDepthStatue2 = camera.CreateRGBStream(2);int CreateColorStatue2 = camera.CreateDepthStream(2);int StartRGBStatue2 = camera.StartRGBStream(2);if ((OpenStatue2 || CreateDepthStatue2 || CreateColorStatue2 || StartRGBStatue2) != 0){printf("设备2初始化有问题!");}while (1){if (rc1 != openni::STATUS_OK){int OpenStatue1 = camera.OpenDevice(device1Uri, 1);int CreateColorStatue1 = camera.CreateDepthStream(1);}if (rc2 != openni::STATUS_OK){int OpenStatue2 = camera.OpenDevice(device1Uri, 2);int CreateColorStatue2 = camera.CreateDepthStream(2);}while (1){GetLocalTime(&sys);                                                                          //得到系统时间Time0 = sys.wMinute * 60 * 1000 + sys.wSecond * 1000 + sys.wMilliseconds;/*******************设备1的启读取********************************************/int OpenDeviceStatue1 = camera.SatrtDepthStream(1);if (OpenDeviceStatue1 != 0){printf("%s: Couldn't start depth1 stream %d on device %s\n%s\n", argv[0], openni::SENSOR_DEPTH, device1Uri, openni::OpenNI::getExtendedError());NotStartStream1++;rc1 = rc;camera.StopDepthStream(1);camera.DestoryDepthStream(1);camera.CloseDevice(1);break;                                                                                   //调到主循环外面重新启动设备1}int ReadFrameStatue1 = camera.ReadRGBDepth_Frame(cvBGRImg1, cvDepthImg1, 1);if (ReadFrameStatue1 == 0){IplImage Image1 = IplImage(cvBGRImg1);cvSaveImage("Image1.jpg", &Image1);camera.MappingDepth2Color(cvDepthImg1, CaliDepth1);camera.MatToPointCloud(CaliDepth1, cvRGBImg1);camera.SavePointCloud("PointCloud1.txt");rc1 = openni::STATUS_OK;}int StopStatue1=camera.StopDepthStream(1);if (StopStatue1 != 0){printf("Stop failed! (timeout is %d ms)\n%s\n", SAMPLE_READ_WAIT_TIMEOUT, OpenNI::getExtendedError());}//camera1.DestoryDepthStream(1);GetLocalTime(&sys);Time1 = sys.wMinute * 60 * 1000 + sys.wSecond * 1000 + sys.wMilliseconds;Time11 = Time1 - Time0;cout << "设备1花费时间为:" << Time11 << endl;/*******************设备2的启读取********************************************/int OpenDeviceStatue2 = camera.SatrtDepthStream(2);if (OpenDeviceStatue2 != 0){printf("%s: Couldn't start depth2 stream %d on device %s\n%s\n", argv[0], openni::SENSOR_DEPTH, device1Uri, openni::OpenNI::getExtendedError());NotStartStream2++;DWORD dw = GetLastError();rc2 = rc;camera.StopDepthStream(2);camera.DestoryDepthStream(2);camera.CloseDevice(2);break;                                                                                   //调到主循环外面重新启动设备1}int ReadFrameStatue2 = camera.ReadRGBDepth_Frame(cvBGRImg2, cvDepthImg2, 2);if (ReadFrameStatue2 == 0){IplImage Image2 = IplImage(cvBGRImg2);cvSaveImage("Image2.jpg", &Image2);camera.MappingDepth2Color(cvDepthImg2, CaliDepth2);camera.MatToPointCloud(CaliDepth2, cvRGBImg2);camera.SavePointCloud("PointCloud2.txt");rc2 = openni::STATUS_OK;}int StopStatue2 = camera.StopDepthStream(2);if (StopStatue2 != 0){printf("Stop failed! (timeout is %d ms)\n%s\n", SAMPLE_READ_WAIT_TIMEOUT, OpenNI::getExtendedError());}GetLocalTime(&sys);Time2 = sys.wMinute * 60 * 1000 + sys.wSecond * 1000 + sys.wMilliseconds;Time22 = Time2 - Time1;cout << "设备2花费时间为:" << Time22 << endl;Time33 = Time2 - Time0;cout << "设备1,2花费时间为:" << Time33 << endl;cout << "运行次数" << Sum << endl;Sum++;time(&t2);cout << "运行时间为:" << t2 - t1 << endl;cout << endl;if ((int)Sum % 100 == 0){WriteLog(t2 - t1, NotStartStream1, WaitFailed1, NotStartStream2, WaitFailed2, Sum);}}}}

奥比中光(Astra S)开发(两个相机分别获得点云和RGB图像)相关推荐

  1. Thinker Board 2开发板上使用奥比中光 astra 深度相机

    Thinker Board 2 国产开发板 arm架构 上使用奥比中光 astra 深度相机 准备工作 1.下载astraSDK 选择linux_arm 下载 https://developer.or ...

  2. NVIDIA Jetson TX2 解决奥比中光 Astra pro相机的ros 打不开深度信息/camera/depth/image

    背景: NVIDIA Jetson TX2 安装奥比中光 Astra pro相机的ROS 驱动后可以打开彩色相机, 打不开深度信息,有点捉急,换了一台相机,还是如此,说明相机没问题驱动有问题. 打开奥 ...

  3. ROS探索-乐视(奥比中光)Astra Pro深度相机

    ROS探索-奥比中光Astra Pro 一.驱动 二.问题 一.驱动 支持 ROS Kinetic and Melodic. 安装 ROS. 安装依赖 sudo apt install ros-$RO ...

  4. 乐视体感三合一奥比中光Astra Pro相机彩色和深度(红外)相机标定

    具体标定过程参考 乐视体感三合一奥比中光Astra Pro相机彩色和深度(红外)相机标定 由于在标定过程中IR图像过暗无法进行标定,故对其数据进行放大处理,相关代码如下: #!/usr/bin/env ...

  5. 奥比中光astra pro深度相机采集彩色图像跑单目orb_slam3

    奥比中光astra pro用尽各种办法,采集不到彩色图,有能同时采集彩图和深度图的告诉兄弟一下,下面是我的装机过程: 在ros2 foxy版安装奥比中光Astra Pro RGBD深度像机_JT_BO ...

  6. 奥比中光 astra 乐视三合一体感摄像头采集深度图彩色图并保存

    本文参考的文章: 目录 ROS下开发 运行ROS节点 查看相机的话题及画面 订阅画面并保存 Python环境下开发 调用摄像头并保存采集画面 C++环境下开发 C++环境配置 代码编译 ROS下开发 ...

  7. 奥比中光深度摄像头_奥比中光astra摄像头总结

    1. 奥比中光摄像头(astra pro)在kinetic和melodic下同时显示深度图像和彩色图像(rgb)的方法前言:乐视摄像头采用的是奥比中光astra pro,套了个外壳就原价899,但出事 ...

  8. PCL官网学习OpenNI Grabber 调用奥比中光Astra s 遇到问题openni2_grabber.cpp @ 325 : No devices connected.

    PCL官网学习OpenNI Grabber 调用奥比中光Astra s 遇到问题openni2_grabber.cpp @ 325 : No devices connected. 问题描述 termi ...

  9. 奥比中光Astra Pro Demo示例

    Orbbec Astra Pro 设备调试 Demo 使用奥比中光Astra Pro捕获深度图与彩色图代码 使用奥比中光Astra Pro捕获深度图与彩色图代码 #include <astra/ ...

最新文章

  1. 【C++】何时需要自定义拷贝构造函数和赋值符
  2. flannel源码分析--newSubnetManager
  3. 280. Wiggle Sort
  4. hpe服务器有哪些型号,HPE ProLiant DL80 Gen9 服务器 - 惠普服务器配置参数
  5. 如何看linux是32位还是64位--转
  6. 使用 Debian 从 0 开始搭建 hexo 博客
  7. java实现选择排序 带打印,选择排序算法的JAVA实现
  8. 【SPFA】【最短路/次短路】GF打Dota
  9. python D40 以及多表查询
  10. 如何跨服务器访问html 页面,html页面如何跨域访问另一页面内容,并将部分内容呈现出来?...
  11. C语言关键字能用大写字母,C语言关键字及其解释
  12. [MSSQL] 数据库置疑怎么处理?
  13. 微信小程序 后端返回数据为字符串,转json方法
  14. Echarts饼图自定义颜色配置(图色、选择颜色、字体颜色)
  15. 20190628 《此生,未完成》-- 于娟
  16. RN综合演练,仿美团电商(谢谢你的STAR)
  17. 艺赛旗(RPA)国家企业信用信息公示系统验证码破解(二)
  18. [学习笔记]Windows CMD/bat
  19. 【Java实现导出Word文档功能 XDocReport +FreeMarker】
  20. Angular7入门辅助教程(一)——Angular模块

热门文章

  1. vue3+DataV+Echarts搭建数据大屏模板(建议收藏)
  2. 史上最全的App推广入门篇【新手必备】
  3. oracle的安装(Oracle11G release2)
  4. 计算机机房负载功率因数,大型计算机机房为主负载的场地的UPS谐波的抑制方案...
  5. 3g内存 android 平板,9.6寸四核3g平板电脑 MTK6582安卓4.4系统 1G16G双卡通话平板电
  6. 精美flash课件制作全攻略
  7. 网页视频播放器(easyplayer和vue-video-player的使用)
  8. 游戏开发流程以及Cocos2d与Cocos2dx区别
  9. Python抓取网页中的动态序列化数据
  10. html2canvas图片的文字偏移,html2canvas转图片遇到的坑(图片偏移,图片模糊,字体改变)...