/** * This file is part of ORB-SLAM2. * * Copyright (C) 2014-2016 Raúl Mur-Artal (University of Zaragoza) * For more information see * * ORB-SLAM2 is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * ORB-SLAM2 is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with ORB-SLAM2. If not, see . */ #include "System.h" #include "Converter.h" #include #include #include #include namespace ORB_SLAM2 { System::System(const string &strVocFile, const string &strSettingsFile, const eSensor sensor, const bool bUseViewer) : mSensor(sensor), mpViewer(static_cast(NULL)), mbReset(false), mbActivateLocalizationMode(false), mbDeactivateLocalizationMode(false) { // Output welcome message cout << endl << "ORB-SLAM2 Copyright (C) 2014-2016 Raul Mur-Artal, University of Zaragoza." << endl << "This program comes with ABSOLUTELY NO WARRANTY;" << endl << "This is free software, and you are welcome to redistribute it" << endl << "under certain conditions. See LICENSE.txt." << endl << endl; cout << "Input sensor was set to: "; if (mSensor == MONOCULAR) cout << "Monocular" << endl; else if (mSensor == STEREO) cout << "Stereo" << endl; else if (mSensor == RGBD) cout << "RGB-D" << endl; // Check settings file cv::FileStorage fsSettings(strSettingsFile.c_str(), cv::FileStorage::READ); if (!fsSettings.isOpened()) { cerr << "Failed to open settings file at: " << strSettingsFile << endl; exit(-1); } // Load ORB Vocabulary cout << endl << "Loading ORB Vocabulary. This could take a while..." << endl; mpVocabulary = new ORBVocabulary(); bool bVocLoad = mpVocabulary->loadFromTextFile(strVocFile); if (!bVocLoad) { cerr << "Wrong path to vocabulary. " << endl; cerr << "Falied to open at: " << strVocFile << endl; exit(-1); } cout << "Vocabulary loaded!" << endl << endl; // Create KeyFrame Database mpKeyFrameDatabase = new KeyFrameDatabase(*mpVocabulary); // Create the Map mpMap = new Map(); // Create Drawers. These are used by the Viewer mpFrameDrawer = new FrameDrawer(mpMap); mpMapDrawer = new MapDrawer(mpMap, strSettingsFile); // Initialize the Tracking thread //(it will live in the main thread of execution, the one that called this constructor) mpTracker = new Tracking(this, mpVocabulary, mpFrameDrawer, mpMapDrawer, mpMap, mpKeyFrameDatabase, strSettingsFile, mSensor); // Initialize the Local Mapping thread and launch mpLocalMapper = new LocalMapping(mpMap, mSensor == MONOCULAR); mptLocalMapping = new thread(&ORB_SLAM2::LocalMapping::Run, mpLocalMapper); // Initialize the Loop Closing thread and launch mpLoopCloser = new LoopClosing(mpMap, mpKeyFrameDatabase, mpVocabulary, mSensor != MONOCULAR); mptLoopClosing = new thread(&ORB_SLAM2::LoopClosing::Run, mpLoopCloser); // 语义地图 mpSemantic_Maper = new Semantic_Map(strSettingsFile); mptSemantic_Mapping = new thread(&ORB_SLAM2::Semantic_Map::Run, mpSemantic_Maper); // Initialize the Viewer thread and launch if (bUseViewer) { mpViewer = new Viewer(this, mpFrameDrawer, mpMapDrawer, mpTracker, strSettingsFile); mptViewer = new thread(&Viewer::Run, mpViewer); mpTracker->SetViewer(mpViewer); } // Set pointers between threads mpTracker->SetLocalMapper(mpLocalMapper); mpTracker->SetLoopClosing(mpLoopCloser); mpLocalMapper->SetTracker(mpTracker); mpLocalMapper->SetLoopCloser(mpLoopCloser); // mpLocalMapper->SetViewer(mpSemantic_Maper); //在LocalMapper接收关键帧信息绘图 mpLoopCloser->SetTracker(mpTracker); mpLoopCloser->SetLocalMapper(mpLocalMapper); mpLocalMapper->Set_semantic_map(mpSemantic_Maper); // 语义地图 } cv::Mat System::TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, const double ×tamp, vector> &vvLabel, cv::Mat &imbev, double &delta_t) { if (mSensor != STEREO) { cerr << "ERROR: you called TrackStereo but input sensor was not set to STEREO." << endl; exit(-1); } // Check mode change { unique_lock lock(mMutexMode); if (mbActivateLocalizationMode) { mpLocalMapper->RequestStop(); // Wait until Local Mapping has effectively stopped while (!mpLocalMapper->isStopped()) { usleep(1000); } mpTracker->InformOnlyTracking(true); mbActivateLocalizationMode = false; } if (mbDeactivateLocalizationMode) { mpTracker->InformOnlyTracking(false); mpLocalMapper->Release(); mbDeactivateLocalizationMode = false; } } // Check reset { unique_lock lock(mMutexReset); if (mbReset) { mpTracker->Reset(); mbReset = false; } } cv::Mat Tcw = mpTracker->GrabImageStereo(imLeft, imRight, timestamp, vvLabel, imbev, delta_t); unique_lock lock2(mMutexState); mTrackingState = mpTracker->mState; mTrackedMapPoints = mpTracker->mCurrentFrame.mvpMapPoints; mTrackedKeyPointsUn = mpTracker->mCurrentFrame.mvKeysUn; return Tcw; } cv::Mat System::TrackRGBD(const cv::Mat &im, const cv::Mat &depthmap, const double ×tamp, const vector> &vvLabel) { if (mSensor != RGBD) { cerr << "ERROR: you called TrackRGBD but input sensor was not set to RGBD." << endl; exit(-1); } // Check mode change { unique_lock lock(mMutexMode); if (mbActivateLocalizationMode) { mpLocalMapper->RequestStop(); // Wait until Local Mapping has effectively stopped while (!mpLocalMapper->isStopped()) { usleep(1000); } mpTracker->InformOnlyTracking(true); mbActivateLocalizationMode = false; } if (mbDeactivateLocalizationMode) { mpTracker->InformOnlyTracking(false); mpLocalMapper->Release(); mbDeactivateLocalizationMode = false; } } // Check reset { unique_lock lock(mMutexReset); if (mbReset) { mpTracker->Reset(); mbReset = false; } } cv::Mat Tcw = mpTracker->GrabImageRGBD(im, depthmap, timestamp, vvLabel); unique_lock lock2(mMutexState); mTrackingState = mpTracker->mState; mTrackedMapPoints = mpTracker->mCurrentFrame.mvpMapPoints; mTrackedKeyPointsUn = mpTracker->mCurrentFrame.mvKeysUn; return Tcw; } cv::Mat System::TrackMonocular(const cv::Mat &im, const double ×tamp) { if (mSensor != MONOCULAR) { cerr << "ERROR: you called TrackMonocular but input sensor was not set to Monocular." << endl; exit(-1); } // Check mode change { unique_lock lock(mMutexMode); if (mbActivateLocalizationMode) { mpLocalMapper->RequestStop(); // Wait until Local Mapping has effectively stopped while (!mpLocalMapper->isStopped()) { usleep(1000); } mpTracker->InformOnlyTracking(true); mbActivateLocalizationMode = false; } if (mbDeactivateLocalizationMode) { mpTracker->InformOnlyTracking(false); mpLocalMapper->Release(); mbDeactivateLocalizationMode = false; } } // Check reset { unique_lock lock(mMutexReset); if (mbReset) { mpTracker->Reset(); mbReset = false; } } cv::Mat Tcw = mpTracker->GrabImageMonocular(im, timestamp); unique_lock lock2(mMutexState); mTrackingState = mpTracker->mState; mTrackedMapPoints = mpTracker->mCurrentFrame.mvpMapPoints; mTrackedKeyPointsUn = mpTracker->mCurrentFrame.mvKeysUn; return Tcw; } void System::ActivateLocalizationMode() { unique_lock lock(mMutexMode); mbActivateLocalizationMode = true; } void System::DeactivateLocalizationMode() { unique_lock lock(mMutexMode); mbDeactivateLocalizationMode = true; } bool System::MapChanged() { static int n = 0; int curn = mpMap->GetLastBigChangeIdx(); if (n < curn) { n = curn; return true; } else return false; } void System::Reset() { unique_lock lock(mMutexReset); mbReset = true; } void System::Shutdown() { mpLocalMapper->RequestFinish(); mpLoopCloser->RequestFinish(); mpSemantic_Maper->RequestFinish(); if (mpViewer) { mpViewer->RequestFinish(); while (!mpViewer->isFinished()) usleep(5000); } // Wait until all thread have effectively stopped while (!mpLocalMapper->isFinished() || !mpLoopCloser->isFinished() || mpLoopCloser->isRunningGBA()) { usleep(5000); } while (!mpSemantic_Maper->isFinished()) { usleep(5000); } if (mpViewer) pangolin::BindToContext("ORB-SLAM2: Map Viewer"); } void System::SaveTrajectoryTUM(const string &filename) { cout << endl << "Saving camera trajectory to " << filename << " ..." << endl; if (mSensor == MONOCULAR) { cerr << "ERROR: SaveTrajectoryTUM cannot be used for monocular." << endl; return; } vector vpKFs = mpMap->GetAllKeyFrames(); sort(vpKFs.begin(), vpKFs.end(), KeyFrame::lId); // Transform all keyframes so that the first keyframe is at the origin. // After a loop closure the first keyframe might not be at the origin. cv::Mat Two = vpKFs[0]->GetPoseInverse(); ofstream f; f.open(filename.c_str()); f << fixed; // Frame pose is stored relative to its reference keyframe (which is optimized by BA and pose graph). // We need to get first the keyframe pose and then concatenate the relative transformation. // Frames not localized (tracking failure) are not saved. // For each frame we have a reference keyframe (lRit), the timestamp (lT) and a flag // which is true when tracking failed (lbL). list::iterator lRit = mpTracker->mlpReferences.begin(); list::iterator lT = mpTracker->mlFrameTimes.begin(); list::iterator lbL = mpTracker->mlbLost.begin(); for (list::iterator lit = mpTracker->mlRelativeFramePoses.begin(), lend = mpTracker->mlRelativeFramePoses.end(); lit != lend; lit++, lRit++, lT++, lbL++) { if (*lbL) continue; KeyFrame *pKF = *lRit; cv::Mat Trw = cv::Mat::eye(4, 4, CV_32F); // If the reference keyframe was culled, traverse the spanning tree to get a suitable keyframe. while (pKF->isBad()) { Trw = Trw * pKF->mTcp; pKF = pKF->GetParent(); } Trw = Trw * pKF->GetPose() * Two; cv::Mat Tcw = (*lit) * Trw; cv::Mat Rwc = Tcw.rowRange(0, 3).colRange(0, 3).t(); cv::Mat twc = -Rwc * Tcw.rowRange(0, 3).col(3); vector q = Converter::toQuaternion(Rwc); f << setprecision(6) << *lT << " " << setprecision(9) << twc.at(0) << " " << twc.at(1) << " " << twc.at(2) << " " << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << endl; } f.close(); cout << endl << "trajectory saved!" << endl; } void System::SaveKeyFrameTrajectoryTUM(const string &filename) { cout << endl << "Saving keyframe trajectory to " << filename << " ..." << endl; vector vpKFs = mpMap->GetAllKeyFrames(); sort(vpKFs.begin(), vpKFs.end(), KeyFrame::lId); // Transform all keyframes so that the first keyframe is at the origin. // After a loop closure the first keyframe might not be at the origin. // cv::Mat Two = vpKFs[0]->GetPoseInverse(); ofstream f; f.open(filename.c_str()); f << fixed; for (size_t i = 0; i < vpKFs.size(); i++) { KeyFrame *pKF = vpKFs[i]; // pKF->SetPose(pKF->GetPose()*Two); if (pKF->isBad()) continue; cv::Mat R = pKF->GetRotation().t(); vector q = Converter::toQuaternion(R); cv::Mat t = pKF->GetCameraCenter(); f << setprecision(6) << pKF->mTimeStamp << setprecision(7) << " " << t.at(0) << " " << t.at(1) << " " << t.at(2) << " " << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << endl; } f.close(); cout << endl << "trajectory saved!" << endl; } void System::SaveTrajectoryKITTI(const string &filename) { cout << endl << "Saving camera trajectory to " << filename << " ..." << endl; if (mSensor == MONOCULAR) { cerr << "ERROR: SaveTrajectoryKITTI cannot be used for monocular." << endl; return; } cout << endl << "Saving Car pose" << endl; vector vpCar = mpMap->GetAllMapCars(); cout << endl << "vpCar.size() = " << vpCar.size() << endl; for (size_t i = 0; i < vpCar.size(); i++) { vpCar[i]->Save_car_v(); if (vpCar[i]->isBad()) { continue; } // vpCar[i]->Save_car_v(); } cout << endl << "Successful Saved Car Pose" << endl; vector vpKFs = mpMap->GetAllKeyFrames(); sort(vpKFs.begin(), vpKFs.end(), KeyFrame::lId); // Transform all keyframes so that the first keyframe is at the origin. // After a loop closure the first keyframe might not be at the origin. cv::Mat Two = vpKFs[0]->GetPoseInverse(); ofstream f; f.open(filename.c_str()); f << fixed; // Frame pose is stored relative to its reference keyframe (which is optimized by BA and pose graph). // We need to get first the keyframe pose and then concatenate the relative transformation. // Frames not localized (tracking failure) are not saved. // For each frame we have a reference keyframe (lRit), the timestamp (lT) and a flag // which is true when tracking failed (lbL). list::iterator lRit = mpTracker->mlpReferences.begin(); list::iterator lT = mpTracker->mlFrameTimes.begin(); for (list::iterator lit = mpTracker->mlRelativeFramePoses.begin(), lend = mpTracker->mlRelativeFramePoses.end(); lit != lend; lit++, lRit++, lT++) { ORB_SLAM2::KeyFrame *pKF = *lRit; cv::Mat Trw = cv::Mat::eye(4, 4, CV_32F); while (pKF->isBad()) { // cout << "bad parent" << endl; Trw = Trw * pKF->mTcp; pKF = pKF->GetParent(); } Trw = Trw * pKF->GetPose() * Two; cv::Mat Tcw = (*lit) * Trw; cv::Mat Rwc = Tcw.rowRange(0, 3).colRange(0, 3).t(); cv::Mat twc = -Rwc * Tcw.rowRange(0, 3).col(3); f << setprecision(9) << Rwc.at(0, 0) << " " << Rwc.at(0, 1) << " " << Rwc.at(0, 2) << " " << twc.at(0) << " " << Rwc.at(1, 0) << " " << Rwc.at(1, 1) << " " << Rwc.at(1, 2) << " " << twc.at(1) << " " << Rwc.at(2, 0) << " " << Rwc.at(2, 1) << " " << Rwc.at(2, 2) << " " << twc.at(2) << endl; } f.close(); cout << endl << "trajectory saved!" << endl; } int System::GetTrackingState() { unique_lock lock(mMutexState); return mTrackingState; } vector System::GetTrackedMapPoints() { unique_lock lock(mMutexState); return mTrackedMapPoints; } vector System::GetTrackedKeyPointsUn() { unique_lock lock(mMutexState); return mTrackedKeyPointsUn; } } // namespace ORB_SLAM