1.保存/加载地图

先说方法:在加载的相机参数文件.yaml的最前面加上下面两行就行。

System.LoadAtlasFromFile: "MH01_to_MH05_stereo_inertial.osa"
System.SaveAtlasToFile: "MH01_to_MH05_stereo_inertial.osa"

第一行表示从本地加载名为"MH01_to_MH05_stereo_inertial.osa"的地图文件,第二行表示保存名为"MH01_to_MH05_stereo_inertial.osa"的地图到本地。第一次运行建图时注释掉第一行,只使用第二行,加载地图重定位时反过来,亲测同时使用会报错

2.发布位姿

以双目节点ros_stereo.cc为例修改。计算的位姿由mpSLAM->TrackStereo函数返回,返回类型是Sophus::SE3f,弄一个变量Tcw_SE3f接受返回值:

Sophus::SE3f Tcw_SE3f=mpSLAM->TrackStereo(imLeft,imRight,cv_ptrLeft->header.stamp.toSec());

Sophus::SE3f不知道是啥,也不知道怎么解析提取想要的数据,但是cv::Mat格式的我知道怎么做,于是将Sophus::SE3f转换成cv::Mat:

cv::Mat Tcw;
Sophus::SE3f Tcw_SE3f=mpSLAM->TrackStereo(imLeft,imRight,cv_ptrLeft->header.stamp.toSec());
Eigen::Matrix4f Tcw_Matrix = Tcw_SE3f.matrix();
cv::eigen2cv(Tcw_Matrix, Tcw);

这时候Tcw就是cv::Mat格式了。
然后将Tcw发布:

PublishPose(Tcw);

构造发布器:

ros::Publisher PosPub = nh.advertise<geometry_msgs::PoseStamped>("ORB_SLAM3/pose", 5);

PublishPose函数中,解析提取数据部分关键代码如下:

geometry_msgs::PoseStamped poseMSG;if(!Tcw.empty()){cv::Mat Rwc = Tcw.rowRange(0,3).colRange(0,3).t();cv::Mat twc = -Rwc*Tcw.rowRange(0,3).col(3);vector<float> q = ORB_SLAM3::Converter::toQuaternion(Rwc);poseMSG.pose.position.x = - twc.at<float>(0);poseMSG.pose.position.y = -twc.at<float>(2);poseMSG.pose.position.z = twc.at<float>(1);poseMSG.pose.orientation.x = q[0];poseMSG.pose.orientation.y = q[1];poseMSG.pose.orientation.z = q[2];poseMSG.pose.orientation.w = q[3];poseMSG.header.frame_id = "VSLAM";poseMSG.header.stamp = ros::Time::now();pPosPub->publish(poseMSG);

完整代码如下:

#include <Eigen/Dense>
#include <opencv2/core/eigen.hpp>
#include<iostream>
#include<algorithm>
#include<fstream>
#include<chrono>
#include<ros/ros.h>
#include <cv_bridge/cv_bridge.h>
#include <message_filters/subscriber.h>
#include <message_filters/time_synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include<opencv2/core/core.hpp>
#include"../../../include/System.h"
#include <tf/transform_broadcaster.h>
#include "Converter.h"using namespace std;class ImageGrabber
{public:ImageGrabber(ORB_SLAM3::System* pSLAM):mpSLAM(pSLAM){}void GrabStereo(const sensor_msgs::ImageConstPtr& msgLeft,const sensor_msgs::ImageConstPtr& msgRight);void PublishPose(cv::Mat Tcw);ORB_SLAM3::System* mpSLAM;bool do_rectify;cv::Mat M1l,M2l,M1r,M2r;ros::Publisher* pPosPub;
};void ImageGrabber::PublishPose(cv::Mat Tcw)
{geometry_msgs::PoseStamped poseMSG;if(!Tcw.empty()){cv::Mat Rwc = Tcw.rowRange(0,3).colRange(0,3).t();cv::Mat twc = -Rwc*Tcw.rowRange(0,3).col(3);vector<float> q = ORB_SLAM3::Converter::toQuaternion(Rwc);poseMSG.pose.position.x = - twc.at<float>(0);poseMSG.pose.position.y = -twc.at<float>(2);poseMSG.pose.position.z = twc.at<float>(1);poseMSG.pose.orientation.x = q[0];poseMSG.pose.orientation.y = q[1];poseMSG.pose.orientation.z = q[2];poseMSG.pose.orientation.w = q[3];poseMSG.header.frame_id = "VSLAM";poseMSG.header.stamp = ros::Time::now();pPosPub->publish(poseMSG);}
}int main(int argc, char **argv)
{ros::init(argc, argv, "RGBD");ros::start();if(argc != 4){cerr << endl << "Usage: rosrun ORB_SLAM3 Stereo path_to_vocabulary path_to_settings do_rectify" << endl;ros::shutdown();return 1;}    // Create SLAM system. It initializes all system threads and gets ready to process frames.ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::STEREO,true);ImageGrabber igb(&SLAM);stringstream ss(argv[3]);ss >> boolalpha >> igb.do_rectify;if(igb.do_rectify){      // Load settings related to stereo calibrationcv::FileStorage fsSettings(argv[2], cv::FileStorage::READ);if(!fsSettings.isOpened()){cerr << "ERROR: Wrong path to settings" << endl;return -1;}cv::Mat K_l, K_r, P_l, P_r, R_l, R_r, D_l, D_r;fsSettings["LEFT.K"] >> K_l;fsSettings["RIGHT.K"] >> K_r;fsSettings["LEFT.P"] >> P_l;fsSettings["RIGHT.P"] >> P_r;fsSettings["LEFT.R"] >> R_l;fsSettings["RIGHT.R"] >> R_r;fsSettings["LEFT.D"] >> D_l;fsSettings["RIGHT.D"] >> D_r;int rows_l = fsSettings["LEFT.height"];int cols_l = fsSettings["LEFT.width"];int rows_r = fsSettings["RIGHT.height"];int cols_r = fsSettings["RIGHT.width"];if(K_l.empty() || K_r.empty() || P_l.empty() || P_r.empty() || R_l.empty() || R_r.empty() || D_l.empty() || D_r.empty() ||rows_l==0 || rows_r==0 || cols_l==0 || cols_r==0){cerr << "ERROR: Calibration parameters to rectify stereo are missing!" << endl;return -1;}cv::initUndistortRectifyMap(K_l,D_l,R_l,P_l.rowRange(0,3).colRange(0,3),cv::Size(cols_l,rows_l),CV_32F,igb.M1l,igb.M2l);cv::initUndistortRectifyMap(K_r,D_r,R_r,P_r.rowRange(0,3).colRange(0,3),cv::Size(cols_r,rows_r),CV_32F,igb.M1r,igb.M2r);}ros::NodeHandle nh;message_filters::Subscriber<sensor_msgs::Image> left_sub(nh, "/zed2i/zed_node/left/image_rect_color", 1);message_filters::Subscriber<sensor_msgs::Image> right_sub(nh, "/zed2i/zed_node/right/image_rect_color", 1);typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image> sync_pol;message_filters::Synchronizer<sync_pol> sync(sync_pol(10), left_sub,right_sub);sync.registerCallback(boost::bind(&ImageGrabber::GrabStereo,&igb,_1,_2));ros::Publisher PosPub = nh.advertise<geometry_msgs::PoseStamped>("ORB_SLAM3/pose", 5);igb.pPosPub = &(PosPub);ros::spin();// Stop all threads// Save camera trajectorySLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory_TUM_Format.txt");SLAM.SaveTrajectoryTUM("FrameTrajectory_TUM_Format.txt");SLAM.SaveTrajectoryKITTI("FrameTrajectory_KITTI_Format.txt");SLAM.Shutdown();ros::shutdown();return 0;
}void ImageGrabber::GrabStereo(const sensor_msgs::ImageConstPtr& msgLeft,const sensor_msgs::ImageConstPtr& msgRight)
{// Copy the ros image message to cv::Mat.cv_bridge::CvImageConstPtr cv_ptrLeft;try{cv_ptrLeft = cv_bridge::toCvShare(msgLeft);}catch (cv_bridge::Exception& e){ROS_ERROR("cv_bridge exception: %s", e.what());return;}cv_bridge::CvImageConstPtr cv_ptrRight;try{cv_ptrRight = cv_bridge::toCvShare(msgRight);}catch (cv_bridge::Exception& e){ROS_ERROR("cv_bridge exception: %s", e.what());return;}if(do_rectify){cv::Mat Tcw;cv::Mat imLeft, imRight;cv::remap(cv_ptrLeft->image,imLeft,M1l,M2l,cv::INTER_LINEAR);cv::remap(cv_ptrRight->image,imRight,M1r,M2r,cv::INTER_LINEAR);Sophus::SE3f Tcw_SE3f=mpSLAM->TrackStereo(imLeft,imRight,cv_ptrLeft->header.stamp.toSec());Eigen::Matrix4f Tcw_Matrix = Tcw_SE3f.matrix();cv::eigen2cv(Tcw_Matrix, Tcw);// mpSLAM->TrackStereo(imLeft,imRight,cv_ptrLeft->header.stamp.toSec());PublishPose(Tcw);// cout << "Tcw:" << Tcw.matrix() << endl;}else{cv::Mat Tcw;Sophus::SE3f Tcw_SE3f=mpSLAM->TrackStereo(cv_ptrLeft->image,cv_ptrRight->image,cv_ptrLeft->header.stamp.toSec());Eigen::Matrix4f Tcw_Matrix = Tcw_SE3f.matrix();cv::eigen2cv(Tcw_Matrix, Tcw);// mpSLAM->TrackStereo(cv_ptrLeft->image,cv_ptrRight->image,cv_ptrLeft->header.stamp.toSec());PublishPose(Tcw);// cout << "Tcw:" << Tcw.matrix() << endl;}}

相应的,ros_stereo_inertial.cc修改后代码如下:

/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/#include <Eigen/Dense>
#include <opencv2/core/eigen.hpp>
#include<iostream>
#include<algorithm>
#include<fstream>
#include<chrono>
#include<vector>
#include<queue>
#include<thread>
#include<mutex>#include "geometry_msgs/PoseStamped.h"#include<ros/ros.h>
#include<cv_bridge/cv_bridge.h>
#include<sensor_msgs/Imu.h>
#include<opencv2/core/core.hpp>#include"../../../include/System.h"
#include"../include/ImuTypes.h"using namespace std;class ImuGrabber
{public:ImuGrabber(){};void GrabImu(const sensor_msgs::ImuConstPtr &imu_msg);queue<sensor_msgs::ImuConstPtr> imuBuf;std::mutex mBufMutex;
};class ImageGrabber
{public:ImageGrabber(ORB_SLAM3::System* pSLAM, ImuGrabber *pImuGb, const bool bRect, const bool bClahe): mpSLAM(pSLAM), mpImuGb(pImuGb), do_rectify(bRect), mbClahe(bClahe){}void PublishPose(cv::Mat Tcw);void GrabImageLeft(const sensor_msgs::ImageConstPtr& msg);void GrabImageRight(const sensor_msgs::ImageConstPtr& msg);cv::Mat GetImage(const sensor_msgs::ImageConstPtr &img_msg);void SyncWithImu();queue<sensor_msgs::ImageConstPtr> imgLeftBuf, imgRightBuf;std::mutex mBufMutexLeft,mBufMutexRight;ORB_SLAM3::System* mpSLAM;ImuGrabber *mpImuGb;const bool do_rectify;cv::Mat M1l,M2l,M1r,M2r;const bool mbClahe;cv::Ptr<cv::CLAHE> mClahe = cv::createCLAHE(3.0, cv::Size(8, 8));ros::Publisher* pPosPub;
};void ImageGrabber::PublishPose(cv::Mat Tcw)
{geometry_msgs::PoseStamped poseMSG;if(!Tcw.empty()){cv::Mat Rwc = Tcw.rowRange(0,3).colRange(0,3).t();// Rwc与Twc分别代表的就是相机在世界坐标系下的旋转与平移cv::Mat twc = -Rwc*Tcw.rowRange(0,3).col(3);vector<float> q = ORB_SLAM3::Converter::toQuaternion(Rwc);/*cv::Mat Rwc = Tcw.rowRange(0,3).colRange(0,3).t();cv::Mat twc = -Rwc*Tcw.rowRange(0,3).col(3);tf::Matrix3x3 M(Rwc.at<float>(0,0),Rwc.at<float>(0,1),Rwc.at<float>(0,2),Rwc.at<float>(1,0),Rwc.at<float>(1,1),Rwc.at<float>(1,2),Rwc.at<float>(2,0),Rwc.at<float>(2,1),Rwc.at<float>(2,2));tf::Vector3 V(twc.at<float>(0), twc.at<float>(1), twc.at<float>(2));tf::Transform tfTcw(M,V);//mTfBr.sendTransform(tf::StampedTransform(tfTcw,ros::Time::now(), "ORB_SLAM/World", "ORB_SLAM/Camera"));*/poseMSG.pose.position.x = - twc.at<float>(0);cout << "PublishPose position.x = " << poseMSG.pose.position.x << endl;poseMSG.pose.position.y = -twc.at<float>(2);cout << "PublishPose position.y = " << poseMSG.pose.position.y << endl;poseMSG.pose.position.z = twc.at<float>(1);cout << "PublishPose position.z = " << poseMSG.pose.position.z << endl;poseMSG.pose.orientation.x = q[0];cout << "PublishPose orientation.x = " << poseMSG.pose.orientation.x << endl;poseMSG.pose.orientation.y = q[1];cout << "PublishPose orientation.y = " << poseMSG.pose.orientation.y << endl;poseMSG.pose.orientation.z = q[2];cout << "PublishPose orientation.z = " << poseMSG.pose.orientation.z << endl;poseMSG.pose.orientation.w = q[3];cout << "PublishPose orientation.w = " << poseMSG.pose.orientation.w << endl;poseMSG.header.frame_id = "VSLAM";poseMSG.header.stamp = ros::Time::now();// poseMSG.pose.position.x = twc.at<float>(0);// cout << "PublishPose position.x = " << poseMSG.pose.position.x << endl;// poseMSG.pose.position.y = twc.at<float>(2);// cout << "PublishPose position.y = " << poseMSG.pose.position.y << endl;// poseMSG.pose.position.z = twc.at<float>(1);// cout << "PublishPose position.z = " << poseMSG.pose.position.z << endl;// poseMSG.pose.orientation.x = q[0];// cout << "PublishPose orientation.x = " << poseMSG.pose.orientation.x << endl;// poseMSG.pose.orientation.y = q[1];// cout << "PublishPose orientation.y = " << poseMSG.pose.orientation.y << endl;// poseMSG.pose.orientation.z = q[2];// cout << "PublishPose orientation.z = " << poseMSG.pose.orientation.z << endl;// poseMSG.pose.orientation.w = q[3];// cout << "PublishPose orientation.w = " << poseMSG.pose.orientation.w << endl;// poseMSG.header.frame_id = "VSLAM";// poseMSG.header.stamp = ros::Time::now();pPosPub->publish(poseMSG);// geometry_msgs::PoseStamped camera_pose;// camera_pose.header.frame_id="stereo";// camera_pose.header.stamp=ros::Time::now();// camera_pose.pose.position.x = poseMSG.pose.position.y;// camera_pose.pose.position.y = -poseMSG.pose.position.x;// camera_pose.pose.position.z =  poseMSG.pose.position.z;// camera_pose.pose.orientation = poseMSG.pose.orientation;// pPosPub->publish(camera_pose);//mlbLost.push_back(mState==LOST);}
}int main(int argc, char **argv)
{ros::init(argc, argv, "Stereo_Inertial");ros::start();ros::NodeHandle n("~");ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Info);bool bEqual = false;if(argc < 4 || argc > 5){cerr << endl << "Usage: rosrun ORB_SLAM3 Stereo_Inertial path_to_vocabulary path_to_settings do_rectify [do_equalize]" << endl;ros::shutdown();return 1;}std::string sbRect(argv[3]);if(argc==5){std::string sbEqual(argv[4]);if(sbEqual == "true")bEqual = true;}// Create SLAM system. It initializes all system threads and gets ready to process frames.ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_STEREO,true);ImuGrabber imugb;ImageGrabber igb(&SLAM,&imugb,sbRect == "true",bEqual);if(igb.do_rectify){      // Load settings related to stereo calibrationcv::FileStorage fsSettings(argv[2], cv::FileStorage::READ);if(!fsSettings.isOpened()){cerr << "ERROR: Wrong path to settings" << endl;return -1;}cv::Mat K_l, K_r, P_l, P_r, R_l, R_r, D_l, D_r;fsSettings["LEFT.K"] >> K_l;fsSettings["RIGHT.K"] >> K_r;fsSettings["LEFT.P"] >> P_l;fsSettings["RIGHT.P"] >> P_r;fsSettings["LEFT.R"] >> R_l;fsSettings["RIGHT.R"] >> R_r;fsSettings["LEFT.D"] >> D_l;fsSettings["RIGHT.D"] >> D_r;int rows_l = fsSettings["LEFT.height"];int cols_l = fsSettings["LEFT.width"];int rows_r = fsSettings["RIGHT.height"];int cols_r = fsSettings["RIGHT.width"];if(K_l.empty() || K_r.empty() || P_l.empty() || P_r.empty() || R_l.empty() || R_r.empty() || D_l.empty() || D_r.empty() ||rows_l==0 || rows_r==0 || cols_l==0 || cols_r==0){cerr << "ERROR: Calibration parameters to rectify stereo are missing!" << endl;return -1;}cv::initUndistortRectifyMap(K_l,D_l,R_l,P_l.rowRange(0,3).colRange(0,3),cv::Size(cols_l,rows_l),CV_32F,igb.M1l,igb.M2l);cv::initUndistortRectifyMap(K_r,D_r,R_r,P_r.rowRange(0,3).colRange(0,3),cv::Size(cols_r,rows_r),CV_32F,igb.M1r,igb.M2r);}// Maximum delay, 5 secondsros::Subscriber sub_imu = n.subscribe("/zed2i/zed_node/imu/data", 1000, &ImuGrabber::GrabImu, &imugb); ros::Subscriber sub_img_left = n.subscribe("/zed2i/zed_node/left/image_rect_color", 100, &ImageGrabber::GrabImageLeft,&igb);ros::Subscriber sub_img_right = n.subscribe("/zed2i/zed_node/right/image_rect_color", 100, &ImageGrabber::GrabImageRight,&igb);std::thread sync_thread(&ImageGrabber::SyncWithImu,&igb);ros::Publisher PosPub = n.advertise<geometry_msgs::PoseStamped>("ORB_SLAM3_imu/pose", 5);igb.pPosPub = &(PosPub);ros::spin();return 0;
}void ImageGrabber::GrabImageLeft(const sensor_msgs::ImageConstPtr &img_msg)
{mBufMutexLeft.lock();if (!imgLeftBuf.empty())imgLeftBuf.pop();imgLeftBuf.push(img_msg);mBufMutexLeft.unlock();
}void ImageGrabber::GrabImageRight(const sensor_msgs::ImageConstPtr &img_msg)
{mBufMutexRight.lock();if (!imgRightBuf.empty())imgRightBuf.pop();imgRightBuf.push(img_msg);mBufMutexRight.unlock();
}cv::Mat ImageGrabber::GetImage(const sensor_msgs::ImageConstPtr &img_msg)
{// Copy the ros image message to cv::Mat.cv_bridge::CvImageConstPtr cv_ptr;try{cv_ptr = cv_bridge::toCvShare(img_msg, sensor_msgs::image_encodings::MONO8);}catch (cv_bridge::Exception& e){ROS_ERROR("cv_bridge exception: %s", e.what());}if(cv_ptr->image.type()==0){return cv_ptr->image.clone();}else{std::cout << "Error type" << std::endl;return cv_ptr->image.clone();}
}void ImageGrabber::SyncWithImu()
{const double maxTimeDiff = 0.01;while(1){cv::Mat imLeft, imRight;double tImLeft = 0, tImRight = 0;if (!imgLeftBuf.empty()&&!imgRightBuf.empty()&&!mpImuGb->imuBuf.empty()){tImLeft = imgLeftBuf.front()->header.stamp.toSec();tImRight = imgRightBuf.front()->header.stamp.toSec();this->mBufMutexRight.lock();while((tImLeft-tImRight)>maxTimeDiff && imgRightBuf.size()>1){imgRightBuf.pop();tImRight = imgRightBuf.front()->header.stamp.toSec();}this->mBufMutexRight.unlock();this->mBufMutexLeft.lock();while((tImRight-tImLeft)>maxTimeDiff && imgLeftBuf.size()>1){imgLeftBuf.pop();tImLeft = imgLeftBuf.front()->header.stamp.toSec();}this->mBufMutexLeft.unlock();if((tImLeft-tImRight)>maxTimeDiff || (tImRight-tImLeft)>maxTimeDiff){// std::cout << "big time difference" << std::endl;continue;}if(tImLeft>mpImuGb->imuBuf.back()->header.stamp.toSec())continue;this->mBufMutexLeft.lock();imLeft = GetImage(imgLeftBuf.front());imgLeftBuf.pop();this->mBufMutexLeft.unlock();this->mBufMutexRight.lock();imRight = GetImage(imgRightBuf.front());imgRightBuf.pop();this->mBufMutexRight.unlock();vector<ORB_SLAM3::IMU::Point> vImuMeas;mpImuGb->mBufMutex.lock();if(!mpImuGb->imuBuf.empty()){// Load imu measurements from buffervImuMeas.clear();while(!mpImuGb->imuBuf.empty() && mpImuGb->imuBuf.front()->header.stamp.toSec()<=tImLeft){double t = mpImuGb->imuBuf.front()->header.stamp.toSec();cv::Point3f acc(mpImuGb->imuBuf.front()->linear_acceleration.x, mpImuGb->imuBuf.front()->linear_acceleration.y, mpImuGb->imuBuf.front()->linear_acceleration.z);cv::Point3f gyr(mpImuGb->imuBuf.front()->angular_velocity.x, mpImuGb->imuBuf.front()->angular_velocity.y, mpImuGb->imuBuf.front()->angular_velocity.z);vImuMeas.push_back(ORB_SLAM3::IMU::Point(acc,gyr,t));mpImuGb->imuBuf.pop();}}mpImuGb->mBufMutex.unlock();if(mbClahe){mClahe->apply(imLeft,imLeft);mClahe->apply(imRight,imRight);}if(do_rectify){cv::remap(imLeft,imLeft,M1l,M2l,cv::INTER_LINEAR);cv::remap(imRight,imRight,M1r,M2r,cv::INTER_LINEAR);}cv::Mat Tcw;Sophus::SE3f Tcw_SE3f=mpSLAM->TrackStereo(imLeft,imRight,tImLeft,vImuMeas);Eigen::Matrix4f Tcw_Matrix = Tcw_SE3f.matrix();cv::eigen2cv(Tcw_Matrix, Tcw);// mpSLAM->TrackStereo(imLeft,imRight,cv_ptrLeft->header.stamp.toSec());PublishPose(Tcw);std::chrono::milliseconds tSleep(1);std::this_thread::sleep_for(tSleep);}}
}void ImuGrabber::GrabImu(const sensor_msgs::ImuConstPtr &imu_msg)
{mBufMutex.lock();imuBuf.push(imu_msg);mBufMutex.unlock();return;
}

最后的牢骚
一开始看官网上说orb_slam3 V1.0新增了保存加载地图功能,但我运行一遍程序发现他并没有保存啥地图。到代码里搜savemap也搜不到,研究了好久才发现人家的函数名都变了,地图不叫map叫atlas。。。
还有orb_slam3发布也两年了,就是没有人给他加个发布位姿功能,反正github上我没发现,找到一个好像人家还写错了。也有可能已经有人改进了但就是没开源。。。
另外本人才疏学浅,c++才自学了几个月,如有错误还请指正,反正代码可以运行而且功能也是正常的。

orb_slam3实现保存/加载地图功能and发布位姿功能相关推荐

  1. docker保存linux镜像,docker导入导出容器和保存加载镜像

    系统环境:centos7.4 版本: # docker -v 1.docker容器导入导出 (1)查看:# docker ps -a (2)导出---export # docker export we ...

  2. R语言保存加载工作空间或者工作空间数据对象实战(Save Load RData Workspace)

    R语言保存加载工作空间或者工作空间数据对象实战(Save & Load RData Workspace) 目录 R语言保存加载工作空间或者工作空间数据对象实战(Save & Load ...

  3. Matlab停在载入界面,试图在Matlab用户界面中实现保存/加载对象功能时遇到了困难...

    我尝试在Matlab(R2009A)用户界面中实现保存/加载函数.我的对象实现了一个布局函数,它为对象生成一个用户界面.我正在尝试实现保存/加载按钮的回调."保存"按钮起作用,并将 ...

  4. 【GDScript】保存/加载物品装备数据

    > Godot 3.3 rc6 接着上个文章 [Godot]加载文件数据 我们开始给装备栏和物品栏制作保存数据的功能.(文末有文件项目链接) 我们在 FileManager.gd 脚本里添加如下 ...

  5. word2vec模型训练保存加载及简单使用

    目录 word2vec模型训练保存加载及简单使用 一 word2vec简介 二.模型训练和保存及加载 模型训练 模型保存和加载 模型的增量训练 三.模型常用API 四.文本相似度计算--文档级别 wo ...

  6. unity保存加载慢_掌握Unity 5中的保存和加载功能

    unity保存加载慢 Thanks to Vincent Quarles for kindly helping to peer review this article. 感谢Vincent Quarl ...

  7. 6.2 模型保存 --- 加载和保存模型结构权重

    一.只保存/加载模型的结构 保存模型的结构,而非其权重或训练配置项: json_string = model.to_json() model.save('my_model.h5') my_model_ ...

  8. android 图片变量,Android开发实现ImageView加载摄像头拍摄的大图功能

    本文实例讲述了Android开发实现ImageView加载摄像头拍摄的大图功能.分享给大家供大家参考,具体如下: 这个方法是从官方demo中摘录的,在此记录学习. 权限 android:name=&q ...

  9. Pytorch模型训练保存/加载(搭建完整流程)

    文章目录 前言 模型训练完整步骤 模型保存与加载 GPU训练 "借鸡生蛋" 模型使用 本博文优先在掘金社区发布! 前言 我们这边还是以CIARF10这个模型为例子. 现在的话先说明 ...

最新文章

  1. 12月第1周.BIZ域名总量TOP10:仅中德澳3国持续上涨
  2. 安卓指令和命令学习总结
  3. 利用宝塔linux面板+苹果CMS搭建电影网站(二)网站的配置+电影资源的爬取上传
  4. ant指定servlet版本_Spring工具 - AntPathMatcherUrlPathHelper(针对URL进行处理)
  5. python格式化代码工具_python 代码格式化工具:YAPF
  6. PHP深复制与浅复制
  7. Windows Tftpd32 DHCP服务器 使用
  8. Java顶尖程序员需要看的书
  9. 0710 mux协议的作用(ppp拨号时如何和gprs进行at指令交互)
  10. 如何用matlab画正态分布曲线
  11. php str_replace替换特殊字符
  12. 英语数字的 android,英语数字听力手机版
  13. 计算机软件用户体验报告,软件项目用户体验性测试报告.doc
  14. python爬取阿里云漏洞库完整版
  15. Resilience4j简介
  16. mininet-ovs转发行为与流表不对应
  17. mysql连接数尖刺激增_mysql最大连接数max_connections
  18. 一文读懂RFID固定资产管理软件
  19. “好奇号”的火星发展观
  20. layui + layer弹出层增删改的操作

热门文章

  1. 五分之一金融机构将从2018年开始探索加密货币交易
  2. 洞房花烛夜山西人必喝的和气拌汤——西红柿鸡蛋拌汤
  3. laravel-excel使用(老猫包子店的故事)
  4. 2018数据结构课程设计报告
  5. wifidog 源码初分析(三)
  6. 50个极好的bootstrap模板下载地址
  7. 论文笔记之Soft Q-learning
  8. summernote图片上传
  9. 劝说语合理使用计算机,现代汉语语法部分练习,带答案
  10. 【数据结构与算法】普里姆算法的介绍和修路问题程序实现