363 lines
10 KiB
C++
363 lines
10 KiB
C++
/**
|
||
* This file is part of ORB-SLAM3
|
||
*
|
||
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
|
||
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
|
||
*
|
||
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
|
||
* License as published by the Free Software Foundation, either version 3 of the License, or
|
||
* (at your option) any later version.
|
||
*
|
||
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
|
||
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||
* GNU General Public License for more details.
|
||
*
|
||
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
|
||
* If not, see <http://www.gnu.org/licenses/>.
|
||
*/
|
||
/*
|
||
* Copyright (C) 2021, Yubao Liu, AISL, TOYOHASHI UNIVERSITY of TECHNOLOGY
|
||
* Email: yubao.liu.ra@tut.jp
|
||
*
|
||
*/
|
||
|
||
#ifndef TRACKING_H
|
||
#define TRACKING_H
|
||
|
||
#include <opencv2/core/core.hpp>
|
||
#include <opencv2/features2d/features2d.hpp>
|
||
#include <opencv2/video/tracking.hpp>
|
||
|
||
#include "Atlas.h"
|
||
#include "Frame.h"
|
||
#include "FrameDrawer.h"
|
||
#include "ImuTypes.h"
|
||
#include "Initializer.h"
|
||
#include "KeyFrameDatabase.h"
|
||
#include "LocalMapping.h"
|
||
#include "LoopClosing.h"
|
||
#include "MapDrawer.h"
|
||
#include "ORBVocabulary.h"
|
||
#include "ORBextractor.h"
|
||
#include "System.h"
|
||
#include "Viewer.h"
|
||
|
||
#include "GeometricCamera.h"
|
||
|
||
#include <mutex>
|
||
#include <unordered_set>
|
||
|
||
//myx :接收Time of each request
|
||
#include <std_msgs/Float32.h>
|
||
|
||
namespace ORB_SLAM3 {
|
||
|
||
class Viewer;
|
||
class FrameDrawer;
|
||
class Atlas;
|
||
class LocalMapping;
|
||
class LoopClosing;
|
||
class System;
|
||
|
||
class Tracking {
|
||
private:
|
||
/*myx :延时
|
||
ros::NodeHandle nh_;
|
||
ros::Subscriber time_sub_;
|
||
float time_of_request_ = 0; // 用于存储 /time_of_request 的时间数据
|
||
// 回调函数,用于接收 /time_of_request 的数据
|
||
*/
|
||
public:
|
||
/*myx :延时
|
||
void timeCallback(const std_msgs::Float32::ConstPtr& msg); // 回调函数
|
||
*/
|
||
Tracking(System* pSys, ORBVocabulary* pVoc, FrameDrawer* pFrameDrawer, MapDrawer* pMapDrawer, Atlas* pAtlas,
|
||
KeyFrameDatabase* pKFDB, const string& strSettingPath, const int sensor, const string& _nameSeq = std::string());
|
||
|
||
~Tracking();
|
||
|
||
// Preprocess the input and call Track(). Extract features and performs stereo matching.
|
||
cv::Mat GrabImageStereo(const cv::Mat& imRectLeft, const cv::Mat& imRectRight, const double& timestamp, string filename);
|
||
cv::Mat GrabImageRGBD(const cv::Mat& imRGB, const cv::Mat& imD, const double& timestamp, string filename);
|
||
cv::Mat GrabImageMonocular(const cv::Mat& im, const double& timestamp, string filename);
|
||
// cv::Mat GrabImageImuMonocular(const cv::Mat &im, const double ×tamp);
|
||
|
||
void GrabImuData(const IMU::Point& imuMeasurement);
|
||
|
||
void SetLocalMapper(LocalMapping* pLocalMapper);
|
||
void SetLoopClosing(LoopClosing* pLoopClosing);
|
||
void SetViewer(Viewer* pViewer);
|
||
void SetStepByStep(bool bSet);
|
||
|
||
// Load new settings
|
||
// The focal lenght should be similar or scale prediction will fail when projecting points
|
||
void ChangeCalibration(const string& strSettingPath);
|
||
|
||
// Use this function if you have deactivated local mapping and you only want to localize the camera.
|
||
void InformOnlyTracking(const bool& flag);
|
||
|
||
void UpdateFrameIMU(const float s, const IMU::Bias& b, KeyFrame* pCurrentKeyFrame);
|
||
KeyFrame* GetLastKeyFrame()
|
||
{
|
||
return mpLastKeyFrame;
|
||
}
|
||
|
||
void CreateMapInAtlas();
|
||
std::mutex mMutexTracks;
|
||
|
||
//--
|
||
void NewDataset();
|
||
int GetNumberDataset();
|
||
int GetMatchesInliers();
|
||
|
||
// semantic
|
||
void SemanticUpdateLocalKeyFrames(KeyFrame& currentKF);
|
||
std::vector<KeyFrame*> mvpSemanticLocalKeyFrames;
|
||
std::vector<MapPoint*> mvpSemanticLocalMapPoints;
|
||
void SemanticUpdateLocalPoints(KeyFrame& currentKF);
|
||
// void SemanticSearchLocalPoints(KeyFrame& currentKF);
|
||
|
||
public:
|
||
// Tracking states
|
||
enum eTrackingState {
|
||
SYSTEM_NOT_READY = -1,
|
||
NO_IMAGES_YET = 0,
|
||
NOT_INITIALIZED = 1,
|
||
OK = 2,
|
||
RECENTLY_LOST = 3,
|
||
LOST = 4,
|
||
OK_KLT = 5
|
||
};
|
||
|
||
eTrackingState mState;
|
||
eTrackingState mLastProcessedState;
|
||
|
||
// Input sensor
|
||
int mSensor;
|
||
|
||
// Current Frame
|
||
Frame mCurrentFrame;
|
||
Frame mLastFrame;
|
||
|
||
cv::Mat mImGray;
|
||
|
||
//TODO save color iamge that used for semantic segmentation
|
||
cv::Mat mImRGB;
|
||
Frame* mpCurrentFrame;
|
||
// void SemanticTracking(Frame* lastFrame, Frame* currentFrame);
|
||
|
||
// Semantic
|
||
// void SemanticTracking(KeyFrame* lastKF, KeyFrame* currentFrame);
|
||
void PoseOptimization(KeyFrame* currentKF, bool bUpdateMap);
|
||
void PoseGraphOptimizer(KeyFrame* LastKF, KeyFrame* currentKF, bool bUpdateMap);
|
||
void SemanticBA(KeyFrame* currentKF);
|
||
|
||
void PredictFeatureProbability(KeyFrame* lastKF, KeyFrame* curKF);
|
||
// void PredictMovingProbability(KeyFrame* lastKF, Frame* curKF);
|
||
void PredictSemanticMask(KeyFrame* lastKF, KeyFrame* currentKF);
|
||
|
||
// Initialization Variables (Monocular)
|
||
std::vector<int> mvIniLastMatches;
|
||
std::vector<int> mvIniMatches;
|
||
std::vector<cv::Point2f> mvbPrevMatched;
|
||
std::vector<cv::Point3f> mvIniP3D;
|
||
Frame mInitialFrame;
|
||
|
||
// Lists used to recover the full camera trajectory at the end of the execution.
|
||
// Basically we store the reference keyframe for each frame and its relative transformation
|
||
list<cv::Mat> mlRelativeFramePoses;
|
||
list<KeyFrame*> mlpReferences;
|
||
list<double> mlFrameTimes;
|
||
list<bool> mlbLost;
|
||
|
||
// frames with estimated pose
|
||
int mTrackedFr;
|
||
bool mbStep;
|
||
|
||
// True if local mapping is deactivated and we are performing only localization
|
||
bool mbOnlyTracking;
|
||
|
||
void Reset(bool bLocMap = false);
|
||
void ResetActiveMap(bool bLocMap = false);
|
||
|
||
float mMeanTrack;
|
||
bool mbInitWith3KFs;
|
||
double t0; // time-stamp of first read frame
|
||
double t0vis; // time-stamp of first inserted keyframe
|
||
double t0IMU; // time-stamp of IMU initialization
|
||
|
||
vector<MapPoint*> GetLocalMapMPS();
|
||
|
||
//TEST--
|
||
cv::Mat M1l, M2l;
|
||
cv::Mat M1r, M2r;
|
||
|
||
bool mbWriteStats;
|
||
|
||
protected:
|
||
// Main tracking function. It is independent of the input sensor.
|
||
void Track();
|
||
|
||
// Map initialization for stereo and RGB-D
|
||
void StereoInitialization();
|
||
|
||
// Map initialization for monocular
|
||
void MonocularInitialization();
|
||
void CreateNewMapPoints();
|
||
cv::Mat ComputeF12(KeyFrame*& pKF1, KeyFrame*& pKF2);
|
||
void CreateInitialMapMonocular();
|
||
|
||
void CheckReplacedInLastFrame();
|
||
bool TrackReferenceKeyFrame();
|
||
void UpdateLastFrame();
|
||
bool TrackWithMotionModel();
|
||
bool PredictStateIMU();
|
||
|
||
bool Relocalization();
|
||
|
||
void UpdateLocalMap();
|
||
void UpdateLocalPoints();
|
||
void UpdateLocalKeyFrames();
|
||
|
||
bool TrackLocalMap();
|
||
bool TrackLocalMap_old();
|
||
void SearchLocalPoints();
|
||
|
||
bool NeedNewKeyFrame();
|
||
void CreateNewKeyFrame();
|
||
|
||
// Perform preintegration from last frame
|
||
void PreintegrateIMU();
|
||
|
||
// Reset IMU biases and compute frame velocity
|
||
void ResetFrameIMU();
|
||
void ComputeGyroBias(const vector<Frame*>& vpFs, float& bwx, float& bwy, float& bwz);
|
||
void ComputeVelocitiesAccBias(const vector<Frame*>& vpFs, float& bax, float& bay, float& baz);
|
||
|
||
bool mbMapUpdated;
|
||
|
||
// Imu preintegration from last frame
|
||
IMU::Preintegrated* mpImuPreintegratedFromLastKF;
|
||
|
||
// Queue of IMU measurements between frames
|
||
std::list<IMU::Point> mlQueueImuData;
|
||
|
||
// Vector of IMU measurements from previous to current frame (to be filled by PreintegrateIMU)
|
||
std::vector<IMU::Point> mvImuFromLastFrame;
|
||
std::mutex mMutexImuQueue;
|
||
|
||
// Imu calibration parameters
|
||
IMU::Calib* mpImuCalib;
|
||
|
||
// Last Bias Estimation (at keyframe creation)
|
||
IMU::Bias mLastBias;
|
||
|
||
// In case of performing only localization, this flag is true when there are no matches to
|
||
// points in the map. Still tracking will continue if there are enough matches with temporal points.
|
||
// In that case we are doing visual odometry. The system will try to do relocalization to recover
|
||
// "zero-drift" localization to the map.
|
||
bool mbVO;
|
||
|
||
//Other Thread Pointers
|
||
LocalMapping* mpLocalMapper;
|
||
LoopClosing* mpLoopClosing;
|
||
|
||
//ORB
|
||
ORBextractor *mpORBextractorLeft, *mpORBextractorRight;
|
||
ORBextractor* mpIniORBextractor;
|
||
|
||
//BoW
|
||
ORBVocabulary* mpORBVocabulary;
|
||
KeyFrameDatabase* mpKeyFrameDB;
|
||
|
||
// Initalization (only for monocular)
|
||
Initializer* mpInitializer;
|
||
bool mbSetInit;
|
||
|
||
//Local Map
|
||
KeyFrame* mpReferenceKF;
|
||
std::vector<KeyFrame*> mvpLocalKeyFrames;
|
||
std::vector<MapPoint*> mvpLocalMapPoints;
|
||
|
||
// System
|
||
System* mpSystem;
|
||
|
||
//Drawers
|
||
Viewer* mpViewer;
|
||
FrameDrawer* mpFrameDrawer;
|
||
MapDrawer* mpMapDrawer;
|
||
bool bStepByStep;
|
||
|
||
//Atlas
|
||
Atlas* mpAtlas;
|
||
|
||
//Calibration matrix
|
||
cv::Mat mK;
|
||
cv::Mat mDistCoef;
|
||
float mbf;
|
||
|
||
//New KeyFrame rules (according to fps)
|
||
int mMinFrames;
|
||
int mMaxFrames;
|
||
|
||
int mnFirstImuFrameId;
|
||
int mnFramesToResetIMU;
|
||
|
||
// Threshold close/far points
|
||
// Points seen as close by the stereo/RGBD sensor are considered reliable
|
||
// and inserted from just one frame. Far points requiere a match in two keyframes.
|
||
float mThDepth;
|
||
|
||
// For RGB-D inputs only. For some datasets (e.g. TUM) the depthmap values are scaled.
|
||
float mDepthMapFactor;
|
||
|
||
//Current matches in frame
|
||
int mnMatchesInliers;
|
||
|
||
//Last Frame, KeyFrame and Relocalisation Info
|
||
KeyFrame* mpLastKeyFrame;
|
||
unsigned int mnLastKeyFrameId;
|
||
unsigned int mnLastRelocFrameId;
|
||
double mTimeStampLost;
|
||
double time_recently_lost;
|
||
|
||
unsigned int mnFirstFrameId;
|
||
unsigned int mnInitialFrameId;
|
||
unsigned int mnLastInitFrameId;
|
||
|
||
bool mbCreatedMap;
|
||
|
||
//Motion Model
|
||
cv::Mat mVelocity;
|
||
|
||
//Color order (true RGB, false BGR, ignored if grayscale)
|
||
bool mbRGB;
|
||
|
||
list<MapPoint*> mlpTemporalPoints;
|
||
|
||
//int nMapChangeIndex;
|
||
|
||
int mnNumDataset;
|
||
|
||
ofstream f_track_stats;
|
||
|
||
ofstream f_track_times;
|
||
double mTime_PreIntIMU;
|
||
double mTime_PosePred;
|
||
double mTime_LocalMapTrack;
|
||
double mTime_NewKF_Dec;
|
||
|
||
GeometricCamera *mpCamera, *mpCamera2;
|
||
|
||
int initID, lastID;
|
||
|
||
cv::Mat mTlr;
|
||
|
||
public:
|
||
cv::Mat mImRight;
|
||
};
|
||
|
||
} //namespace ORB_SLAM
|
||
|
||
#endif // TRACKING_H
|