SLProject  4.2.000
A platform independent 3D computer graphics framework for desktop OS, Android, iOS and online in web browsers
WAIModeOrbSlam2.cpp
Go to the documentation of this file.
1 #include <WAIModeOrbSlam2.h>
2 #include <AverageTiming.h>
3 #include <Utils.h>
4 
5 WAI::ModeOrbSlam2::ModeOrbSlam2(ORB_SLAM2::KPextractor* kpExtractor,
6  ORB_SLAM2::KPextractor* kpIniExtractor,
7  cv::Mat cameraMat,
8  cv::Mat distortionMat,
9  const Params& params,
10  std::string orbVocFile,
11  bool applyMinAccScoreFilter)
12  : mpExtractor(kpExtractor),
13  mpIniExtractor(kpIniExtractor),
14  _params(params),
15  _applyMinAccScoreFilter(applyMinAccScoreFilter)
16 {
17  //we have to reset global static stuff here
18  WAIKeyFrame::nNextId = 0; //will be updated when a map is loaded
21  // Tell WAIFrame to compute image bounds on first instantiation
23 
24  cameraMat.convertTo(_cameraMat, CV_32F);
25  distortionMat.convertTo(_distortionMat, CV_32F);
26 
27  //load visual vocabulary for relocalization
28  if (!WAIOrbVocabulary::initialize(orbVocFile))
29  throw std::runtime_error("ModeOrbSlam2: could not find vocabulary file: " + orbVocFile);
31 
32  //instantiate and load slam map
34 
35  _map = new WAIMap("Map");
36 
37  if (_map->KeyFramesInMap())
38  _initialized = true;
39  else
40  _initialized = false;
41 
42  //instantiate local mapping
43  mpLocalMapper = new ORB_SLAM2::LocalMapping(_map, 1, mpVocabulary, _params.cullRedundantPerc);
44  mpLoopCloser = new ORB_SLAM2::LoopClosing(_map, mpKeyFrameDatabase, mpVocabulary, false, false);
45 
46  mpLocalMapper->SetLoopCloser(mpLoopCloser);
47  mpLoopCloser->SetLocalMapper(mpLocalMapper);
48 
49  if (!_params.serial)
50  {
51  mptLocalMapping = new std::thread(&LocalMapping::Run, mpLocalMapper);
52  //if (!_params.fixOldKfs)
53  mptLoopClosing = new std::thread(&LoopClosing::Run, mpLoopCloser);
54  }
55 
57  _pose = cv::Mat(4, 4, CV_32F);
58 }
59 
60 WAI::ModeOrbSlam2::ModeOrbSlam2(ORB_SLAM2::KPextractor* kpExtractor,
61  ORB_SLAM2::KPextractor* kpIniExtractor,
62  ORB_SLAM2::KPextractor* kpMarkerExtractor,
63  std::string markerFile,
64  cv::Mat cameraMat,
65  cv::Mat distortionMat,
66  const Params& params,
67  std::string orbVocFile,
68  bool applyMinAccScoreFilter)
69  : mpExtractor(kpExtractor),
70  mpIniExtractor(kpIniExtractor),
71  _markerExtractor(kpMarkerExtractor),
72  _params(params),
73  _applyMinAccScoreFilter(applyMinAccScoreFilter)
74 {
75  //we have to reset global static stuff here
76  WAIKeyFrame::nNextId = 0; //will be updated when a map is loaded
79  // Tell WAIFrame to compute image bounds on first instantiation
81 
82  cameraMat.convertTo(_cameraMat, CV_32F);
83  distortionMat.convertTo(_distortionMat, CV_32F);
84 
85  //load visual vocabulary for relocalization
86  if (!WAIOrbVocabulary::initialize(orbVocFile))
87  throw std::runtime_error("ModeOrbSlam2: could not find vocabulary file: " + orbVocFile);
89 
90  //instantiate and load slam map
92 
93  _map = new WAIMap("Map");
94 
95  if (_map->KeyFramesInMap())
96  _initialized = true;
97  else
98  _initialized = false;
99 
100  //instantiate local mapping
101  mpLocalMapper = new ORB_SLAM2::LocalMapping(_map, 1, mpVocabulary, _params.cullRedundantPerc);
102  mpLoopCloser = new ORB_SLAM2::LoopClosing(_map, mpKeyFrameDatabase, mpVocabulary, false, false);
103 
104  mpLocalMapper->SetLoopCloser(mpLoopCloser);
105  mpLoopCloser->SetLocalMapper(mpLocalMapper);
106 
107  if (!_params.serial)
108  {
109  mptLocalMapping = new std::thread(&LocalMapping::Run, mpLocalMapper);
110  //if (!_params.fixOldKfs)
111  mptLoopClosing = new std::thread(&LoopClosing::Run, mpLoopCloser);
112  }
113 
115  _pose = cv::Mat(4, 4, CV_32F);
116 
117  if (_markerExtractor && !markerFile.empty())
119 }
120 
121 void WAI::ModeOrbSlam2::setVocabulary(std::string orbVocFile)
122 {
123  requestStateIdle();
124  WAIOrbVocabulary::free();
125  WAIOrbVocabulary::initialize(orbVocFile);
126  mpVocabulary = WAIOrbVocabulary::get();
127  mpKeyFrameDatabase->changeVocabulary(*mpVocabulary, getKeyFrames());
128  if (mpLocalMapper)
129  mpLocalMapper->SetVocabulary(mpVocabulary);
130  if (mpLoopCloser)
131  mpLoopCloser->SetVocabulary(mpVocabulary);
132  resume();
133 }
134 
136 {
137  if (!_params.serial)
138  {
139  mpLocalMapper->RequestFinish();
140  //if (!_params.fixOldKfs)
141  mpLoopCloser->RequestFinish();
142 
143  // Wait until all thread have effectively stopped
144  mptLocalMapping->join();
145  if (mptLoopClosing)
146  mptLoopClosing->join();
147  }
148 
149  if (mpLocalMapper) delete mpLocalMapper;
150  if (mpLoopCloser) delete mpLoopCloser;
151 }
152 
153 bool WAI::ModeOrbSlam2::getPose(cv::Mat* pose)
154 {
155  bool result = 0;
156 
157  if (_state == TrackingState_TrackingOK && _poseSet)
158  {
159  *pose = _pose;
160  result = 1;
161  }
162 
163  return result;
164 }
165 
166 bool WAI::ModeOrbSlam2::update(cv::Mat& imageGray, cv::Mat& imageRGB)
167 {
168  _poseSet = false;
169  stateTransition();
170 
171  switch (_state)
172  {
174  {
175  initialize(imageGray, imageRGB);
176  }
177  break;
178 
181  {
182  //relocalize or track 3d points
183  track3DPts(imageGray, imageRGB);
184  }
185  break;
186 
187  case TrackingState_Idle:
188  case TrackingState_None:
189  default:
190  {
191  }
192  break;
193  }
194 
195  return _poseSet;
196 }
197 
199 {
200  int result = _map->MapPointsInMap();
201 
202  return result;
203 }
204 
206 {
207  int result = mnMatchesInliers;
208 
209  return result;
210 }
211 
213 {
214  int result = _map->KeyFramesInMap();
215 
216  return result;
217 }
218 
220 {
221  std::string result = mpLoopCloser->getStatusString();
222 
223  return result;
224 }
225 
227 {
228  int result = _map->getNumLoopClosings();
229 
230  return result;
231 }
232 
234 {
235  int result = mpLoopCloser->numOfKfsInQueue();
236 
237  return result;
238 }
239 
241 {
242  std::lock_guard<std::mutex> guard(_nMapMatchesLock);
243  return mnMatchesInliers;
244 }
245 //-----------------------------------------------------------------------------
247 {
248  std::lock_guard<std::mutex> guard(_mapLock);
249  return _map->KeyFramesInMap();
250 }
251 //-----------------------------------------------------------------------------
253 {
254  std::lock_guard<std::mutex> guard(_poseDiffLock);
255  return _poseDifference;
256 }
257 
259 {
260  return _meanReprojectionError;
261 }
262 
264 {
265  std::string printableState = "";
266 
267  switch (_state)
268  {
270  {
271  printableState = "INITIALIZING";
272  }
273  break;
274 
275  case TrackingState_Idle:
276  {
277  printableState = "IDLE";
278  }
279  break;
280 
282  {
283  printableState = "TRACKING_LOST"; //motion model tracking
284  }
285  break;
286 
288  {
289  printableState = "TRACKING_OK";
290  }
291  break;
292 
293  case TrackingState_None:
294  {
295  printableState = "TRACKING_NONE";
296  }
297  break;
298 
299  default:
300  {
301  printableState = "";
302  }
303  break;
304  }
305 
306  return printableState;
307 }
308 
310 {
311  switch (_trackingType)
312  {
313  case TrackingType_MotionModel:
314  return "Motion Model";
315  case TrackingType_OptFlow:
316  return "Optical Flow";
317  case TrackingType_ORBSLAM:
318  return "ORB-SLAM";
319  case TrackingType_None:
320  default:
321  return "None";
322  }
323 }
324 
325 std::vector<WAIMapPoint*> WAI::ModeOrbSlam2::getMapPoints()
326 {
327  std::lock_guard<std::mutex> guard(_mapLock);
328 
329  std::vector<WAIMapPoint*> result = _map->GetAllMapPoints();
330 
331  return result;
332 }
333 
335 {
336  std::vector<WAIMapPoint*> result;
337 
338  if (_mpUL)
339  {
340  if (_mpUL->isBad())
341  {
342  Utils::log("WAI", "_mpUL->isBad()!!!");
343  }
344  result.push_back(_mpUL);
345  }
346  if (_mpUR)
347  {
348  if (_mpUR->isBad())
349  {
350  Utils::log("WAI", "_mpUR->isBad()!!!");
351  }
352  result.push_back(_mpUR);
353  }
354  if (_mpLL)
355  {
356  if (_mpLL->isBad())
357  {
358  Utils::log("WAI", "_mpLL->isBad()!!!");
359  }
360  result.push_back(_mpLL);
361  }
362  if (_mpLR)
363  {
364  if (_mpLR->isBad())
365  {
366  Utils::log("WAI", "_mpLR->isBad()!!!");
367  }
368  result.push_back(_mpLR);
369  }
370 
371  return result;
372 }
373 
374 std::vector<WAIMapPoint*> WAI::ModeOrbSlam2::getMatchedMapPoints()
375 {
376  std::lock_guard<std::mutex> guard(_mapLock);
377 
378  std::vector<WAIMapPoint*> result;
379 
380  if (_optFlowOK)
381  {
382  result = _optFlowMapPtsLastFrame;
383  }
384  else
385  {
386  for (int i = 0; i < mCurrentFrame.N; i++)
387  {
388  if (mCurrentFrame.mvpMapPoints[i])
389  {
390  if (mCurrentFrame.mvpMapPoints[i]->Observations() > 0)
391  result.push_back(mCurrentFrame.mvpMapPoints[i]);
392  }
393  }
394  }
395 
396  return result;
397 }
398 
399 std::pair<std::vector<cv::Vec3f>, std::vector<cv::Vec2f>> WAI::ModeOrbSlam2::getMatchedCorrespondances()
400 {
401  std::lock_guard<std::mutex> guard(_mapLock);
402 
403  std::vector<cv::Vec3f> points3d;
404  std::vector<cv::Vec2f> points2d;
405 
406  for (int i = 0; i < mCurrentFrame.N; i++)
407  {
408  WAIMapPoint* mp = mCurrentFrame.mvpMapPoints[i];
409  if (mp)
410  {
411  if (mp->Observations() > 0)
412  {
413  WAI::V3 _v = mp->worldPosVec();
414  cv::Vec3f v;
415  v[0] = _v.x;
416  v[1] = _v.y;
417  v[2] = _v.z;
418  points3d.push_back(v);
419  points2d.push_back(mCurrentFrame.mvKeysUn[i].pt);
420  }
421  }
422  }
423 
424  return std::pair<std::vector<cv::Vec3f>, std::vector<cv::Vec2f>>(points3d, points2d);
425 }
426 
427 std::pair<std::vector<cv::Vec3f>, std::vector<cv::Vec2f>> WAI::ModeOrbSlam2::getCorrespondances()
428 {
429  std::lock_guard<std::mutex> guard(_mapLock);
430 
431  std::vector<cv::Vec3f> points3d;
432  std::vector<cv::Vec2f> points2d;
433 
434  for (int i = 0; i < mCurrentFrame.N; i++)
435  {
436  WAIMapPoint* mp = mCurrentFrame.mvpMapPoints[i];
437  if (mp)
438  {
439  WAI::V3 _v = mp->worldPosVec();
440  cv::Vec3f v;
441  v[0] = _v.x;
442  v[1] = _v.y;
443  v[2] = _v.z;
444  points3d.push_back(v);
445  points2d.push_back(mCurrentFrame.mvKeys[i].pt);
446  }
447  }
448 
449  return std::pair<std::vector<cv::Vec3f>, std::vector<cv::Vec2f>>(points3d, points2d);
450 }
451 
452 std::vector<WAIMapPoint*> WAI::ModeOrbSlam2::getLocalMapPoints()
453 {
454  std::lock_guard<std::mutex> guard(_mapLock);
455  std::vector<WAIMapPoint*> result = mvpLocalMapPoints;
456 
457  return result;
458 }
459 
460 std::vector<WAIKeyFrame*> WAI::ModeOrbSlam2::getKeyFrames()
461 {
462  std::lock_guard<std::mutex> guard(_mapLock);
463  std::vector<WAIKeyFrame*> result = _map->GetAllKeyFrames();
464 
465  return result;
466 }
467 
469 {
470  std::lock_guard<std::mutex> guard(_optFlowLock);
471  return _params.trackOptFlow;
472 }
473 
475 {
476  std::lock_guard<std::mutex> guard(_optFlowLock);
477  _params.trackOptFlow = flag;
478  _optFlowOK = false;
479 }
480 
482 {
483  _params.onlyTracking = true;
484  if (!_params.serial)
485  {
486  mpLocalMapper->RequestStop();
487  while (!mpLocalMapper->isStopped())
488  {
489  std::this_thread::sleep_for(std::chrono::microseconds(10));
490  }
491  }
492  mpLocalMapper->InterruptBA();
493 }
494 
496 {
497  _params.onlyTracking = false;
498  resume();
499 }
500 
502 {
503  std::lock_guard<std::mutex> guard(_mutexStates);
504 
505  //store last state
506  //mLastProcessedState = _state;
507 
508  if (_idleRequested)
509  {
510  _state = TrackingState_Idle;
511  _idleRequested = false;
512  }
513  else if (_state == TrackingState_Idle)
514  {
515  if (_resumeRequested)
516  {
517  if (!_initialized)
518  {
520  }
521  else
522  {
524  }
525 
526  _resumeRequested = false;
527  }
528  }
529  else if (_state == TrackingState_Initializing)
530  {
531  if (_initialized)
532  {
533  if (_bOK)
534  {
535  _state = TrackingState_TrackingOK;
536  }
537  else
538  {
540  }
541  }
542  }
543  else if (_state == TrackingState_TrackingOK)
544  {
545  if (!_bOK)
546  {
548  }
549  }
550  else if (_state == TrackingState_TrackingLost)
551  {
552  if (_bOK)
553  {
554  _state = TrackingState_TrackingOK;
555  }
556  }
557 }
558 
559 void WAI::ModeOrbSlam2::initialize(cv::Mat& imageGray, cv::Mat& imageRGB)
560 {
561  //1. if there are more than 100 keypoints in the current frame, the Initializer is instantiated
562  //2. if there are less than 100 keypoints in the next frame, the Initializer is deinstantiated again
563  //3. else if there are more than 100 keypoints we try to match the keypoints in the current with the initial frame
564  //4. if we found less than 100 matches between the current and the initial keypoints, the Initializer is deinstantiated
565  //5. else we try to initialize: that means a homograhy and a fundamental matrix are calculated in parallel and 3D points are triangulated initially
566  //6. if the initialization (by homograhy or fundamental matrix) was successful an inital map is created:
567  // - two keyframes are generated from the initial and the current frame and added to keyframe database and map
568  // - a mappoint is instantiated from the triangulated 3D points and all necessary members are calculated (distinctive descriptor, depth and normal, add observation reference of keyframes)
569  // - a global bundle adjustment is applied
570  // - the two new keyframes are added to the local mapper and the local mapper is started twice
571  // - the tracking state is changed to TRACKING/INITIALIZED
572 
573  // NOTE(dgj1): was originally 100
574  int matchesNeeded = 100;
575 
576  // Get Map Mutex -> Map cannot be changed
577  std::unique_lock<std::mutex> lock(_map->mMutexMapUpdate, std::defer_lock);
578  if (!_params.serial)
579  {
580  lock.lock();
581  }
582 
583  mCurrentFrame = WAIFrame(imageGray,
584  0.0,
585  mpIniExtractor,
586  _cameraMat,
587  _distortionMat,
588  mpVocabulary,
589  _params.retainImg);
590 
591  if (!mpInitializer)
592  {
593 // NOTE(dgj1): find matches with marker if necessary
594 // We only initialize if the first frame has enough matches to the marker
595 // TODO(dgj1): rethink this constraint
596 #if 0
597  if (_createMarkerMap)
598  {
599  ORBmatcher matcher(0.9, true);
600  std::vector<cv::Point2f> prevMatched(_markerFrame.mvKeysUn.size());
601  for (size_t i = 0; i < _markerFrame.mvKeysUn.size(); i++)
602  prevMatched[i] = _markerFrame.mvKeysUn[i].pt;
603 
604  std::vector<int> markerMatchesToCurrentFrame;
605  int nmatches = matcher.SearchForInitialization(_markerFrame, mCurrentFrame, prevMatched, markerMatchesToCurrentFrame, 100);
606 
607  if (nmatches <= 30)
608  {
609  return;
610  }
611  }
612 #endif
613 
614  // Set Reference Frame
615  if (mCurrentFrame.mvKeys.size() > matchesNeeded)
616  {
617  mInitialFrame = WAIFrame(mCurrentFrame);
618  mLastFrame = WAIFrame(mCurrentFrame);
619  mvbPrevMatched.resize(mCurrentFrame.mvKeysUn.size());
620  //ghm1: we store the undistorted keypoints of the initial frame in an extra vector
621  //todo: why not using mInitialFrame.mvKeysUn????
622  for (size_t i = 0; i < mCurrentFrame.mvKeysUn.size(); i++)
623  mvbPrevMatched[i] = mCurrentFrame.mvKeysUn[i].pt;
624 
625  // TODO(jan): is this necessary?
626  if (mpInitializer)
627  delete mpInitializer;
628 
629  mpInitializer = new ORB_SLAM2::Initializer(mCurrentFrame, 1.0, 200);
630  //ghm1: clear mvIniMatches. it contains the index of the matched keypoint in the current frame
631  fill(mvIniMatches.begin(), mvIniMatches.end(), -1);
632 
633  return;
634  }
635  }
636  else
637  {
638  // Try to initialize
639  if ((int)mCurrentFrame.mvKeys.size() <= matchesNeeded)
640  {
641  delete mpInitializer;
642  mpInitializer = static_cast<Initializer*>(NULL);
643  fill(mvIniMatches.begin(), mvIniMatches.end(), -1);
644  return;
645  }
646 
647  // Find correspondences
648  ORBmatcher matcher(0.9, true);
649  int nmatches = matcher.SearchForInitialization(mInitialFrame, mCurrentFrame, mvbPrevMatched, mvIniMatches, 100);
650 
651  // Check if there are enough correspondences
652  if (nmatches < matchesNeeded)
653  {
654  delete mpInitializer;
655  mpInitializer = static_cast<Initializer*>(NULL);
656  return;
657  }
658 
659  for (unsigned int i = 0; i < mInitialFrame.mvKeys.size(); i++)
660  {
661  cv::rectangle(imageRGB,
662  mInitialFrame.mvKeys[i].pt,
663  cv::Point(mInitialFrame.mvKeys[i].pt.x + 3, mInitialFrame.mvKeys[i].pt.y + 3),
664  cv::Scalar(0, 0, 255));
665  }
666 
667  //ghm1: decorate image with tracked matches
668  for (unsigned int i = 0; i < mvIniMatches.size(); i++)
669  {
670  if (mvIniMatches[i] >= 0)
671  {
672  cv::line(imageRGB,
673  mInitialFrame.mvKeys[i].pt,
674  mCurrentFrame.mvKeys[mvIniMatches[i]].pt,
675  cv::Scalar(0, 255, 0));
676  }
677  }
678 
679  cv::Mat Rcw; // Current Camera Rotation
680  cv::Mat tcw; // Current Camera Translation
681  vector<bool> vbTriangulated; // Triangulated Correspondences (mvIniMatches)
682 
683  if (mpInitializer->Initialize(mCurrentFrame, mvIniMatches, Rcw, tcw, mvIniP3D, vbTriangulated))
684  {
685  //cv::imwrite("mCurrentFrame.png", mCurrentFrame.imgGray);
686  //cv::imwrite("mInitialFrame.png", mInitialFrame.imgGray);
687 
688  //std::cout << "Initialize num matches: " << mvIniMatches.size() << std::endl;
689  for (size_t i = 0, iend = mvIniMatches.size(); i < iend; i++)
690  {
691  if (mvIniMatches[i] >= 0 && !vbTriangulated[i])
692  {
693  mvIniMatches[i] = -1;
694  nmatches--;
695  }
696  }
697 
698  //std::cout << "ModeOrbSlam2::initialize" << std::endl;
699  //std::cout << "R: " << Rcw << std::endl;
700  //std::cout << "t: " << tcw << std::endl;
701 
702  // Set Frame Poses
703  mInitialFrame.SetPose(cv::Mat::eye(4, 4, CV_32F));
704  cv::Mat Tcw = cv::Mat::eye(4, 4, CV_32F);
705  Rcw.copyTo(Tcw.rowRange(0, 3).colRange(0, 3));
706  tcw.copyTo(Tcw.rowRange(0, 3).col(3));
707  mCurrentFrame.SetPose(Tcw);
708 
709  bool mapInitializedSuccessfully = createInitialMapMonocular(matchesNeeded);
710 
711  if (mapInitializedSuccessfully)
712  {
713  //mark tracking as initialized
714  _initialized = true;
715  _bOK = true;
716  }
717 
718  //ghm1: in the original implementation the initialization is defined in the track() function and this part is always called at the end!
719  // Store frame pose information to retrieve the complete camera trajectory afterwards.
720  if (!mCurrentFrame.mTcw.empty() && mCurrentFrame.mpReferenceKF)
721  {
722  cv::Mat Tcr = mCurrentFrame.mTcw * mCurrentFrame.mpReferenceKF->GetPoseInverse();
723  mlRelativeFramePoses.push_back(Tcr);
724  mlpReferences.push_back(mpReferenceKF);
725  mlFrameTimes.push_back(mCurrentFrame.mTimeStamp);
726  mlbLost.push_back(_state == TrackingState_TrackingLost);
727  }
728  else if (mlRelativeFramePoses.size())
729  {
730  // This can happen if tracking is lost
731  mlRelativeFramePoses.push_back(mlRelativeFramePoses.back());
732  mlpReferences.push_back(mlpReferences.back());
733  mlFrameTimes.push_back(mlFrameTimes.back());
734  mlbLost.push_back(_state == TrackingState_TrackingLost);
735  }
736  }
737  }
738 }
739 
740 bool WAI::ModeOrbSlam2::posInGrid(const cv::KeyPoint& kp, int& posX, int& posY, int minX, int minY)
741 {
742  posX = (int)round((kp.pt.x - minX) * _optFlowGridElementWidthInv);
743  posY = (int)round((kp.pt.y - minY) * _optFlowGridElementHeightInv);
744 
745  //Keypoint's coordinates are undistorted, which could cause to go out of the image
746  if (posX < 0 || posX >= OPTFLOW_GRID_COLS || posY < 0 || posY >= OPTFLOW_GRID_ROWS)
747  return false;
748 
749  return true;
750 }
751 
752 void WAI::ModeOrbSlam2::track3DPts(cv::Mat& imageGray, cv::Mat& imageRGB)
753 {
754  mCurrentFrame = WAIFrame(imageGray,
755  0.0,
756  mpExtractor,
757  _cameraMat,
758  _distortionMat,
759  mpVocabulary,
760  _params.retainImg);
761 
762  // Get Map Mutex -> Map cannot be changed
763  std::unique_lock<std::mutex> lock(_map->mMutexMapUpdate, std::defer_lock);
764  if (!_params.serial)
765  {
766  lock.lock();
767  }
768 
769  //mLastProcessedState = mState;
770  //bool bOK;
771  _bOK = false;
772  //trackingType = TrackingType_None;
773 
774  if (!_params.onlyTracking)
775  {
776  // Local Mapping is activated. This is the normal behaviour, unless
777  // you explicitly activate the "only tracking" mode.
778 
779  if (_state == TrackingState_TrackingOK)
780  {
781  // Local Mapping might have changed some MapPoints tracked in last frame
782  checkReplacedInLastFrame();
783 
784  if (mVelocity.empty() || mCurrentFrame.mnId < mnLastRelocFrameId + 2)
785  {
786  _bOK = trackReferenceKeyFrame();
787  //trackingType = TrackingType_ORBSLAM;
788  }
789  else
790  {
791  _bOK = trackWithMotionModel();
792  //trackingType = TrackingType_MotionModel;
793 
794  if (!_bOK)
795  {
796  _bOK = trackReferenceKeyFrame();
797  //trackingType = TrackingType_ORBSLAM;
798  }
799  }
800  }
801  else
802  {
803  _bOK = relocalization(mCurrentFrame, mpKeyFrameDatabase, &mnLastRelocFrameId, *_map, _applyMinAccScoreFilter, false);
804  }
805  }
806  else
807  {
808  // Localization Mode: Local Mapping is deactivated
809  if (_state == TrackingState_TrackingLost)
810  {
811  _bOK = relocalization(mCurrentFrame, mpKeyFrameDatabase, &mnLastRelocFrameId, *_map, _applyMinAccScoreFilter);
812  _optFlowOK = false;
813  //cout << "Relocalization: " << bOK << endl;
814  }
815  else
816  {
817  //if NOT visual odometry tracking
818  if (!mbVO) // In last frame we tracked enough MapPoints from the Map
819  {
820  if (!mVelocity.empty())
821  { //we have a valid motion model
822 
823  _bOK = trackWithMotionModel();
824  //trackingType = TrackingType_MotionModel;
825  //cout << "TrackWithMotionModel: " << bOK << endl;
826  }
827  else
828  {
829  //we have NO valid motion model
830  // All keyframes that observe a map point are included in the local map.
831  // Every current frame gets a reference keyframe assigned which is the keyframe
832  // from the local map that shares most matches with the current frames local map points matches.
833  // It is updated in UpdateLocalKeyFrames().
834  _bOK = trackReferenceKeyFrame();
835  //trackingType = TrackingType_ORBSLAM;
836  //cout << "TrackReferenceKeyFrame" << endl;
837  }
838  }
839  else // In last frame we tracked mainly "visual odometry" points.
840  {
841  // We compute two camera poses, one from motion model and one doing relocalization.
842  // If relocalization is sucessfull we choose that solution, otherwise we retain
843  // the "visual odometry" solution.
844  bool bOKMM = false;
845  bool bOKReloc = false;
846  vector<WAIMapPoint*> vpMPsMM;
847  vector<bool> vbOutMM;
848  cv::Mat TcwMM;
849  if (!mVelocity.empty())
850  {
851  bOKMM = trackWithMotionModel();
852  vpMPsMM = mCurrentFrame.mvpMapPoints;
853  TcwMM = mCurrentFrame.mTcw.clone();
854  }
855  bOKReloc = relocalization(mCurrentFrame, mpKeyFrameDatabase, &mnLastRelocFrameId, *_map, _applyMinAccScoreFilter);
856  //relocalization method is not valid but the velocity model method
857  if (bOKMM && !bOKReloc)
858  {
859  mCurrentFrame.SetPose(TcwMM);
860  mCurrentFrame.mvpMapPoints = vpMPsMM;
861 
862  if (mbVO)
863  {
864  for (int i = 0; i < mCurrentFrame.N; i++)
865  {
866  //if (mCurrentFrame.mvpMapPoints[i] && !mCurrentFrame.mvbOutlier[i])
867  if (mCurrentFrame.mvpMapPoints[i])
868  {
869  mCurrentFrame.mvpMapPoints[i]->IncreaseFound();
870  }
871  }
872  }
873  }
874  else if (bOKReloc)
875  {
876  mbVO = false;
877  }
878 
879  _bOK = bOKReloc || bOKMM;
880  //trackingType = TrackingType_None;
881  }
882  }
883  }
884 
885  // If we have an initial estimation of the camera pose and matching. Track the local map.
886  if (!_params.onlyTracking)
887  {
888  if (_bOK)
889  {
890 
891  _bOK = trackLocalMap();
892  }
893  }
894  else
895  {
896  // mbVO true means that there are few matches to MapPoints in the map. We cannot retrieve
897  // a local map and therefore we do not perform TrackLocalMap(). Once the system relocalizes
898  // the camera we will use the local map again.
899  if (_bOK && !mbVO)
900  _bOK = trackLocalMap();
901  }
902 
903  if (_params.trackOptFlow && _bOK && _state == TrackingState_TrackingOK)
904  {
905  //We always run the optical flow additionally, because it gives
906  //a more stable pose. We use this pose if successful.
907  _optFlowOK = trackWithOptFlow();
908  }
909 
910  //if (bOK)
911  // mState = OK;
912  //else
913  // mState = LOST;
914 
915  // If tracking were good
916  if (_bOK)
917  {
918  // Update motion model
919  if (!mLastFrame.mTcw.empty())
920  {
921  //cout << "mLastFrame.mTcw: " << mLastFrame.mTcw << endl;
922  cv::Mat LastTwc = cv::Mat::eye(4, 4, CV_32F);
923  //cout << "LastTwc eye: " << LastTwc << endl;
924  mLastFrame.GetRotationInverse().copyTo(LastTwc.rowRange(0, 3).colRange(0, 3)); //mRwc
925  //cout << "LastTwc rot: " << LastTwc << endl;
926  const auto& cc = mLastFrame.GetCameraCenter(); //this is the translation of the frame w.r.t the world
927  //cout << cc << endl;
928  cc.copyTo(LastTwc.rowRange(0, 3).col(3));
929  //cout << "LastTwc total: " << LastTwc << endl;
930  //this concatenates the motion difference between the last and the before-last frame (so it is no real velocity but a transformation)
931  mVelocity = mCurrentFrame.mTcw * LastTwc;
932  }
933  else
934  {
935  mVelocity = cv::Mat();
936  }
937 
938  //set current pose
939  {
940  cv::Mat Tcw;
941  if (_optFlowOK)
942  {
943  Tcw = _optFlowTcw.clone();
944  }
945  else
946  {
947  Tcw = mCurrentFrame.mTcw.clone();
948  }
949 
950  _pose = Tcw;
951  _poseSet = true;
952  }
953 
954  // Clean VO matches
955  /*
956  for (int i = 0; i < mCurrentFrame.N; i++)
957  {
958  WAIMapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
959  if (pMP)
960  {
961  if (pMP->Observations() < 1)
962  {
963  mCurrentFrame.mvbOutlier[i] = false;
964  mCurrentFrame.mvpMapPoints[i] = static_cast<WAIMapPoint*>(NULL);
965  }
966  }
967  }
968  */
969 
970  //ghm1: manual local mapping of current frame
971  if (needNewKeyFrame())
972  {
973  createNewKeyFrame();
974 
975  if (_params.serial)
976  {
977  //call local mapper
978  mpLocalMapper->RunOnce();
979  //normally the loop closing would feed the keyframe database, but we do it here
980  //mpKeyFrameDatabase->add(mpLastKeyFrame);
981 
982  //loop closing
983  //if (!_params.fixOldKfs)
984  mpLoopCloser->RunOnce();
985  }
986 
987  //update visualization of map, it may have changed because of global bundle adjustment.
988  //map points will be updated with next decoration.
989  _mapHasChanged = true;
990  }
991 
992  // We allow points with high innovation (considererd outliers by the Huber Function)
993  // pass to the new keyframe, so that bundle adjustment will finally decide
994  // if they are outliers or not. We don't want next frame to estimate its position
995  // with those points so we discard them in the frame.
996  /*
997  for (int i = 0; i < mCurrentFrame.N; i++)
998  {
999  if (mCurrentFrame.mvpMapPoints[i] && mCurrentFrame.mvbOutlier[i])
1000  {
1001  mCurrentFrame.mvpMapPoints[i] = static_cast<WAIMapPoint*>(NULL);
1002  }
1003  }
1004  */
1005  }
1006 
1007  if (!mCurrentFrame.mpReferenceKF)
1008  {
1009  mCurrentFrame.mpReferenceKF = mpReferenceKF;
1010  }
1011 
1012  decorate(imageRGB);
1013 
1014  mLastFrame = WAIFrame(mCurrentFrame);
1015 
1016  // Store frame pose information to retrieve the complete camera trajectory afterwards.
1017  if (mCurrentFrame.mpReferenceKF && !mCurrentFrame.mTcw.empty())
1018  {
1019  cv::Mat Tcr = mCurrentFrame.mTcw * mCurrentFrame.mpReferenceKF->GetPoseInverse(); //Tcr = Tcw * Twr (current wrt reference = world wrt current * reference wrt world
1020  //relative frame poses are used to refer a frame to reference frame
1021  mlRelativeFramePoses.push_back(Tcr);
1022  mlpReferences.push_back(mpReferenceKF);
1023  mlFrameTimes.push_back(mCurrentFrame.mTimeStamp);
1024  mlbLost.push_back(_state == TrackingState_TrackingLost);
1025  }
1026  else if (mlRelativeFramePoses.size() && mlpReferences.size() && mlFrameTimes.size())
1027  {
1028  // This can happen if tracking is lost
1029  mlRelativeFramePoses.push_back(mlRelativeFramePoses.back());
1030  mlpReferences.push_back(mlpReferences.back());
1031  mlFrameTimes.push_back(mlFrameTimes.back());
1032  mlbLost.push_back(_state == TrackingState_TrackingLost);
1033  }
1034 }
1035 
1037 {
1038  //ghm1: reset nNextId to 0! This is important otherwise the first keyframe cannot be identified via its id and a lot of stuff gets messed up!
1039  //One problem we identified is in UpdateConnections: the first one is not allowed to have a parent,
1040  //because the second will set the first as a parent too. We get problems later during culling.
1041  //This also fixes a problem in first GlobalBundleAdjustment which messed up the map after a reset.
1043 
1044  // Create KeyFrames
1045  WAIKeyFrame* pKFini = new WAIKeyFrame(mInitialFrame, mpKeyFrameDatabase);
1046  WAIKeyFrame* pKFcur = new WAIKeyFrame(mCurrentFrame, mpKeyFrameDatabase);
1047 
1048  pKFini->ComputeBoW(mpVocabulary);
1049  pKFcur->ComputeBoW(mpVocabulary);
1050 
1051  // Insert KFs in the map
1052  _map->AddKeyFrame(pKFini);
1053  _map->AddKeyFrame(pKFcur);
1054 
1055  // Create MapPoints and asscoiate to keyframes
1056  for (size_t i = 0; i < mvIniMatches.size(); i++)
1057  {
1058  if (mvIniMatches[i] < 0)
1059  continue;
1060 
1061  //Create MapPoint.
1062  cv::Mat worldPos(mvIniP3D[i]);
1063 
1064  WAIMapPoint* pMP = new WAIMapPoint(worldPos, pKFcur);
1065 
1066  pKFini->AddMapPoint(pMP, i);
1067  pKFcur->AddMapPoint(pMP, mvIniMatches[i]);
1068 
1069  pMP->AddObservation(pKFini, i);
1070  pMP->AddObservation(pKFcur, mvIniMatches[i]);
1071 
1073  pMP->UpdateNormalAndDepth();
1074 
1075  //Fill Current Frame structure
1076  mCurrentFrame.mvpMapPoints[mvIniMatches[i]] = pMP;
1077 
1078  //Add to Map
1079  _map->AddMapPoint(pMP);
1080  }
1081 
1082  // Update Connections
1083  pKFini->UpdateConnections();
1084  pKFcur->UpdateConnections();
1085 
1086  //cout << "New Map created with " << _map->MapPointsInMap() << " points" << endl;
1087 
1088  // Bundle Adjustment
1089  Optimizer::GlobalBundleAdjustemnt(_map, 20);
1090 
1091  // Set median depth to 1
1092  float medianDepth = pKFini->ComputeSceneMedianDepth(2);
1093  float invMedianDepth = 1.0f / medianDepth;
1094 
1095  if (medianDepth < 0 || pKFcur->TrackedMapPoints(1) < mapPointsNeeded)
1096  {
1097  Utils::log("WAI", "Wrong initialization, reseting...");
1098  reset();
1099  return false;
1100  }
1101 
1102  // Scale initial baseline
1103  cv::Mat Tc2w = pKFcur->GetPose();
1104  Tc2w.col(3).rowRange(0, 3) = Tc2w.col(3).rowRange(0, 3) * invMedianDepth;
1105  pKFcur->SetPose(Tc2w);
1106 
1107  // Scale points
1108  vector<WAIMapPoint*> vpAllMapPoints = pKFini->GetMapPointMatches();
1109  for (size_t iMP = 0; iMP < vpAllMapPoints.size(); iMP++)
1110  {
1111  if (vpAllMapPoints[iMP])
1112  {
1113  WAIMapPoint* pMP = vpAllMapPoints[iMP];
1114  pMP->SetWorldPos(pMP->GetWorldPos() * invMedianDepth);
1115  }
1116  }
1117 
1118  mpLocalMapper->InsertKeyFrame(pKFini);
1119  mpLocalMapper->InsertKeyFrame(pKFcur);
1120 
1121  mCurrentFrame.SetPose(pKFcur->GetPose());
1122  mnLastKeyFrameId = mCurrentFrame.mnId;
1123  mpLastKeyFrame = pKFcur;
1124 
1125  mvpLocalKeyFrames.push_back(pKFcur);
1126  mvpLocalKeyFrames.push_back(pKFini);
1127  mvpLocalMapPoints = _map->GetAllMapPoints();
1128 
1129  mpReferenceKF = pKFcur;
1130  mCurrentFrame.mpReferenceKF = pKFcur;
1131 
1132  mLastFrame = WAIFrame(mCurrentFrame);
1133 
1134  _map->SetReferenceMapPoints(mvpLocalMapPoints);
1135 
1136  _map->mvpKeyFrameOrigins.push_back(pKFini);
1137 
1138  //ghm1: run local mapping once
1139  if (_params.serial)
1140  {
1141  mpLocalMapper->RunOnce();
1142  //todo: why two times??
1143  mpLocalMapper->RunOnce();
1144  }
1145 
1146  // Bundle Adjustment
1147  //Utils::log("WAI","Number of Map points after local mapping: %i", _map->MapPointsInMap());
1148 
1149  //ghm1: add keyframe to scene graph. this position is wrong after bundle adjustment!
1150  //set map dirty, the map will be updated in next decoration
1151  _mapHasChanged = true;
1152  return true;
1153 }
1154 
1156 {
1157  for (int i = 0; i < mLastFrame.N; i++)
1158  {
1159  WAIMapPoint* pMP = mLastFrame.mvpMapPoints[i];
1160 
1161  if (pMP)
1162  {
1163  WAIMapPoint* pRep = pMP->GetReplaced();
1164  if (pRep)
1165  {
1166  mLastFrame.mvpMapPoints[i] = pRep;
1167  }
1168  }
1169  }
1170 }
1171 
1173 {
1174  if (_params.onlyTracking)
1175  return false;
1176 
1177  // If Local Mapping is freezed by a Loop Closure do not insert keyframes
1178  if (mpLocalMapper->isStopped() || mpLocalMapper->stopRequested())
1179  return false;
1180 
1181  const int nKFs = _map->KeyFramesInMap();
1182 
1183  // Do not insert keyframes if not enough frames have passed from last relocalisation
1184  // unless less than 30 keyframes are in the map
1185  if (mCurrentFrame.mnId < mnLastRelocFrameId + mMaxFrames && nKFs > mMaxFrames)
1186  return false;
1187 
1188  // Tracked MapPoints in the reference keyframe
1189  int nMinObs = 3;
1190  if (nKFs <= 2)
1191  nMinObs = 2;
1192  int nRefMatches = mpReferenceKF->TrackedMapPoints(nMinObs);
1193 
1194  // Local Mapping accept keyframes?
1195  bool bLocalMappingIdle = mpLocalMapper->AcceptKeyFrames();
1196 
1197  // Thresholds
1198  float thRefRatio = 0.9f;
1199  if (nKFs < 2)
1200  thRefRatio = 0.4f;
1201 
1202  // Condition 1a: More than "MaxFrames" have passed from last keyframe insertion
1203  const bool c1a = mCurrentFrame.mnId >= mnLastKeyFrameId + mMaxFrames;
1204  // Condition 1b: More than "MinFrames" have passed and Local Mapping is idle
1205  const bool c1b = (mCurrentFrame.mnId >= mnLastKeyFrameId + mMinFrames && bLocalMappingIdle);
1206  // Condition 2: Few tracked points compared to reference keyframe. Lots of visual odometry compared to map matches.
1207  const bool c2 = ((mnMatchesInliers < nRefMatches * thRefRatio) && mnMatchesInliers > 15);
1208 
1209  if ((c1a || c1b) && c2)
1210  {
1211  // If the mapping accepts keyframes, insert keyframe.
1212  // Otherwise send a signal to interrupt BA
1213  if (bLocalMappingIdle)
1214  {
1215  return true;
1216  }
1217  else
1218  {
1219  mpLocalMapper->InterruptBA();
1220  return false;
1221  }
1222  }
1223  else
1224  {
1225  return false;
1226  }
1227 }
1228 
1230 {
1231  if (!mpLocalMapper->SetNotStop(true))
1232  return;
1233 
1234  WAIKeyFrame* pKF = new WAIKeyFrame(mCurrentFrame, mpKeyFrameDatabase);
1235 
1236  mpReferenceKF = pKF;
1237  mCurrentFrame.mpReferenceKF = pKF;
1238  mpLocalMapper->InsertKeyFrame(pKF);
1239 
1240  mpLocalMapper->SetNotStop(false);
1241 
1242  mnLastKeyFrameId = mCurrentFrame.mnId;
1243  mpLastKeyFrame = pKF;
1244 }
1245 
1247 {
1248  Utils::log("WAI", "System Reseting");
1249 
1250  // Reset Local Mapping
1251  if (!_params.serial)
1252  {
1253  mpLocalMapper->RequestReset();
1254  }
1255  else
1256  {
1257  mpLocalMapper->reset();
1258  }
1259 
1260  //// Reset Loop Closing
1261  if (!_params.serial)
1262  {
1263  //if (!_params.fixOldKfs)
1264  mpLoopCloser->RequestReset();
1265  }
1266  else
1267  {
1268  mpLoopCloser->reset();
1269  }
1270 
1271  // Clear BoW Database
1272  mpKeyFrameDatabase->clear();
1273 
1274  // Clear Map (this erase MapPoints and KeyFrames)
1275  _map->clear();
1276 
1278  WAIFrame::nNextId = 0;
1281 
1282  _bOK = false;
1283  _initialized = false;
1284 
1285  if (mpInitializer)
1286  {
1287  delete mpInitializer;
1288  mpInitializer = static_cast<Initializer*>(nullptr);
1289  }
1290 
1291  mlRelativeFramePoses.clear();
1292  mlpReferences.clear();
1293  mlFrameTimes.clear();
1294  mlbLost.clear();
1295 
1296  mCurrentFrame = WAIFrame();
1297  mpLastKeyFrame = nullptr;
1298  mpReferenceKF = nullptr;
1299  mvpLocalMapPoints.clear();
1300  mvpLocalKeyFrames.clear();
1301  mnMatchesInliers = 0;
1302  mnLastKeyFrameId = 0;
1303  mnLastRelocFrameId = 0;
1304 
1305  _state = TrackingState_Initializing;
1306 }
1307 
1309 {
1310  bool result = _initialized;
1311 
1312  return result;
1313 }
1314 
1316 {
1317  if (!_params.serial)
1318  {
1319  mpLocalMapper->RequestStop();
1320  while (!mpLocalMapper->isStopped())
1321  {
1322  std::this_thread::sleep_for(std::chrono::microseconds(10));
1323  }
1324  }
1325 
1326  requestStateIdle();
1327  while (!hasStateIdle())
1328  {
1329  std::this_thread::sleep_for(std::chrono::microseconds(10));
1330  }
1331 }
1332 
1334 {
1335  if (!_params.serial)
1336  {
1337  mpLocalMapper->Release();
1338  //mptLocalMapping = new thread(&LocalMapping::Run, mpLocalMapper);
1339  //mptLoopClosing = new thread(&LoopClosing::Run, mpLoopCloser);
1340  }
1341 
1342  requestResume();
1343 }
1344 
1346 {
1347  {
1348  std::unique_lock<std::mutex> guard(_mutexStates);
1349  resetRequests();
1350  _idleRequested = true;
1351  }
1352 
1353  stateTransition();
1354 }
1355 
1357 {
1358  {
1359  std::unique_lock<std::mutex> guard(_mutexStates);
1360  resetRequests();
1361  _resumeRequested = true;
1362  }
1363 
1364  stateTransition();
1365 }
1366 
1368 {
1369  std::unique_lock<std::mutex> guard(_mutexStates);
1370 
1371  bool result = (_state == TrackingState_Idle);
1372 
1373  return result;
1374 }
1375 
1377 {
1378  _idleRequested = false;
1379  _resumeRequested = false;
1380 }
1381 
1382 void WAI::ModeOrbSlam2::findMatches(std::vector<cv::Point2f>& vP2D, std::vector<cv::Point3f>& vP3Dw)
1383 {
1384  // Compute Bag of Words Vector
1385  mCurrentFrame.ComputeBoW();
1386 
1387  // Relocalization is performed when tracking is lost
1388  // Track Lost: Query WAIKeyFrame Database for keyframe candidates for relocalisation
1389  vector<WAIKeyFrame*> vpCandidateKFs = mpKeyFrameDatabase->DetectRelocalizationCandidates(&mCurrentFrame);
1390 
1391  if (vpCandidateKFs.empty())
1392  return;
1393 
1394  //vector<WAIKeyFrame*> vpCandidateKFs = mpKeyFrameDatabase->keyFrames();
1395  const int nKFs = vpCandidateKFs.size();
1396 
1397  // We perform first an ORB matching with each candidate
1398  ORBmatcher matcher(0.75, true);
1399 
1400  vector<vector<WAIMapPoint*>> vvpMapPointMatches;
1401  vvpMapPointMatches.resize(nKFs);
1402 
1403  for (int i = 0; i < nKFs; i++)
1404  {
1405  WAIKeyFrame* pKF = vpCandidateKFs[i];
1406  int nmatches = matcher.SearchByBoW(pKF, mCurrentFrame, vvpMapPointMatches[i]);
1407  if (nmatches < 15)
1408  continue;
1409  int idx = 0;
1410 
1411  for (size_t j = 0; j < vvpMapPointMatches[i].size(); j++)
1412  {
1413  WAIMapPoint* pMP = vvpMapPointMatches[i][j];
1414 
1415  if (pMP && pMP->Observations() > 1)
1416  {
1417  const cv::KeyPoint& kp = mCurrentFrame.mvKeys[j];
1418  vP2D.push_back(kp.pt);
1419  auto Pos = pMP->worldPosVec();
1420  vP3Dw.push_back(cv::Point3f(Pos.x, Pos.y, Pos.z));
1421  }
1422  }
1423  }
1424 }
1425 
1427  WAIKeyFrameDB* keyFrameDB,
1428  unsigned int* lastRelocFrameId,
1429  WAIMap& waiMap,
1430  bool applyMinAccScoreFilter,
1431  bool relocWithAllKFs)
1432 {
1433  AVERAGE_TIMING_START("Relocalization");
1434  // Compute Bag of Words Vector
1435  currentFrame.ComputeBoW();
1436 
1437  // Relocalization is performed when tracking is lost
1438  // Track Lost: Query WAIKeyFrame Database for keyframe candidates for relocalisation
1439  vector<WAIKeyFrame*> vpCandidateKFs;
1440  if (relocWithAllKFs)
1441  {
1442  //alternative candidate search (test code)
1443  vpCandidateKFs = waiMap.GetAllKeyFrames();
1444  }
1445  else
1446  {
1447  //TODO(luc): test with 2nd argument to true
1448  vpCandidateKFs = keyFrameDB->DetectRelocalizationCandidates(&currentFrame, applyMinAccScoreFilter);
1449  }
1450 
1451  if (!vpCandidateKFs.size() && waiMap.KeyFramesInMap() < 5)
1452  {
1453  vpCandidateKFs = waiMap.GetAllKeyFrames();
1454  }
1455 
1456  //std::cout << "N after DetectRelocalizationCandidates: " << vpCandidateKFs.size() << std::endl;
1457 
1458  if (vpCandidateKFs.empty())
1459  {
1460  AVERAGE_TIMING_STOP("Relocalization");
1461  return false;
1462  }
1463 
1464  //vector<WAIKeyFrame*> vpCandidateKFs = mpKeyFrameDatabase->keyFrames();
1465  const int nKFs = vpCandidateKFs.size();
1466 
1467  // We perform first an ORB matching with each candidate
1468  // If enough matches are found we setup a PnP solver
1469  // Best match < 0.75 * second best match (default is 0.6)
1470  ORBmatcher matcher(0.75, true);
1471 
1472  vector<PnPsolver*> vpPnPsolvers;
1473  vpPnPsolvers.resize(nKFs);
1474 
1475  vector<vector<WAIMapPoint*>> vvpMapPointMatches;
1476  vvpMapPointMatches.resize(nKFs);
1477 
1478  vector<bool> vbDiscarded;
1479  vbDiscarded.resize(nKFs);
1480 
1481  int nCandidates = 0;
1482 
1483  for (int i = 0; i < nKFs; i++)
1484  {
1485  WAIKeyFrame* pKF = vpCandidateKFs[i];
1486  if (pKF->isBad())
1487  vbDiscarded[i] = true;
1488  else
1489  {
1490  int nmatches = matcher.SearchByBoW(pKF, currentFrame, vvpMapPointMatches[i]);
1491  //cout << "Num matches: " << nmatches << endl;
1492  if (nmatches < 15)
1493  {
1494  vbDiscarded[i] = true;
1495  continue;
1496  }
1497  else
1498  {
1499  PnPsolver* pSolver = new PnPsolver(currentFrame, vvpMapPointMatches[i]);
1500  pSolver->SetRansacParameters(0.99, 10, 300, 4, 0.5, 5.991);
1501  vpPnPsolvers[i] = pSolver;
1502  nCandidates++;
1503  }
1504  }
1505  }
1506 
1507  // Alternatively perform some iterations of P4P RANSAC
1508  // Until we found a camera pose supported by enough inliers
1509  bool bMatch = false;
1510  ORBmatcher matcher2(0.9, true);
1511 
1512  while (nCandidates > 0 && !bMatch)
1513  {
1514  for (int i = 0; i < nKFs; i++)
1515  {
1516  if (vbDiscarded[i])
1517  continue;
1518 
1519  // Perform 5 Ransac Iterations
1520  vector<bool> vbInliers;
1521  vector<bool> vbOutliers;
1522  int nInliers;
1523  bool bNoMore;
1524 
1525  PnPsolver* pSolver = vpPnPsolvers[i];
1526  cv::Mat Tcw = pSolver->iterate(5, bNoMore, vbInliers, nInliers);
1527 
1528  // If Ransac reachs max. iterations discard keyframe
1529  if (bNoMore)
1530  {
1531  vbDiscarded[i] = true;
1532  nCandidates--;
1533  }
1534 
1535  // If a Camera Pose is computed, optimize
1536  if (!Tcw.empty())
1537  {
1538  Tcw.copyTo(currentFrame.mTcw);
1539 
1540  set<WAIMapPoint*> sFound;
1541 
1542  const int np = vbInliers.size();
1543 
1544  for (int j = 0; j < np; j++)
1545  {
1546  if (vbInliers[j])
1547  {
1548  currentFrame.mvpMapPoints[j] = vvpMapPointMatches[i][j];
1549  sFound.insert(vvpMapPointMatches[i][j]);
1550  }
1551  else
1552  currentFrame.mvpMapPoints[j] = NULL;
1553  }
1554 
1555  int nGood = Optimizer::PoseOptimization(&currentFrame, vbOutliers);
1556 
1557  if (nGood < 10)
1558  continue;
1559 
1560  /*
1561  for (int io = 0; io < currentFrame.N; io++)
1562  if (currentFrame.mvbOutlier[io])
1563  currentFrame.mvpMapPoints[io] = static_cast<WAIMapPoint*>(NULL);
1564  */
1565 
1566  // If few inliers, search by projection in a coarse window and optimize again:
1567  //ghm1: mappoints seen in the keyframe which was found as candidate via BoW-search are projected into
1568  //the current frame using the position that was calculated using the matches from BoW matcher
1569  if (nGood < 50)
1570  {
1571  int nadditional = matcher2.SearchByProjection(currentFrame, vpCandidateKFs[i], sFound, 10, 100);
1572 
1573  if (nadditional + nGood >= 50)
1574  {
1575  nGood = Optimizer::PoseOptimization(&currentFrame, vbOutliers);
1576 
1577  // If many inliers but still not enough, search by projection again in a narrower window
1578  // the camera has been already optimized with many points
1579  if (nGood > 30 && nGood < 50)
1580  {
1581  sFound.clear();
1582  for (int ip = 0; ip < currentFrame.N; ip++)
1583  if (currentFrame.mvpMapPoints[ip])
1584  sFound.insert(currentFrame.mvpMapPoints[ip]);
1585  nadditional = matcher2.SearchByProjection(currentFrame, vpCandidateKFs[i], sFound, 3, 64);
1586 
1587  // Final optimization
1588  if (nGood + nadditional >= 50)
1589  {
1590  nGood = Optimizer::PoseOptimization(&currentFrame);
1591  }
1592  }
1593  }
1594  }
1595 
1596  // If the pose is supported by enough inliers stop ransacs and continue
1597  if (nGood >= 50)
1598  {
1599  bMatch = true;
1600  break;
1601  }
1602  }
1603  }
1604  }
1605 
1606  AVERAGE_TIMING_STOP("Relocalization");
1607  if (!bMatch)
1608  {
1609  return false;
1610  }
1611  else
1612  {
1613  *lastRelocFrameId = currentFrame.mnId;
1614  return true;
1615  }
1616 }
1617 
1619 {
1620  return mCurrentFrame;
1621 }
1622 
1624 {
1625  //This routine is called if current tracking state is OK but we have NO valid motion model
1626  //1. Berechnung des BoW-Vectors für den current frame
1627  //2. using BoW we search mappoint matches (from reference keyframe) with orb in current frame (ORB that belong to the same vocabulary node (at a certain level))
1628  //3. if there are less than 15 matches return.
1629  //4. we use the pose found for the last frame as initial pose for the current frame
1630  //5. This pose is optimized using the matches to map points found by BoW search with reference frame
1631  //6. Matches classified as outliers by the optimization routine are updated in the mvpMapPoints vector in the current frame and the valid matches are counted
1632  //7. If there are more than 10 valid matches the reference frame tracking was successful.
1633 
1634  AVERAGE_TIMING_START("TrackReferenceKeyFrame");
1635  // Compute Bag of Words vector
1636  mCurrentFrame.ComputeBoW();
1637 
1638  // We perform first an ORB matching with the reference keyframe
1639  // If enough matches are found we setup a PnP solver
1640  ORBmatcher matcher(0.7, true);
1641  vector<WAIMapPoint*> vpMapPointMatches;
1642 
1643  int nmatches = matcher.SearchByBoW(mpReferenceKF, mCurrentFrame, vpMapPointMatches);
1644 
1645  if (nmatches < 15)
1646  {
1647  AVERAGE_TIMING_STOP("TrackReferenceKeyFrame");
1648  return false;
1649  }
1650 
1651  mCurrentFrame.mvpMapPoints = vpMapPointMatches;
1652  mCurrentFrame.SetPose(mLastFrame.mTcw);
1653 
1654  nmatches = Optimizer::PoseOptimization(&mCurrentFrame);
1655 
1656  // Discard outliers
1657  /*
1658  int nmatchesMap = 0;
1659  for (int i = 0; i < mCurrentFrame.N; i++)
1660  {
1661  if (mCurrentFrame.mvpMapPoints[i])
1662  {
1663  if (mCurrentFrame.mvbOutlier[i])
1664  {
1665  WAIMapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
1666 
1667  mCurrentFrame.mvpMapPoints[i] = static_cast<WAIMapPoint*>(NULL);
1668  mCurrentFrame.mvbOutlier[i] = false;
1669  pMP->mbTrackInView = false;
1670  pMP->mnLastFrameSeen = mCurrentFrame.mnId;
1671  nmatches--;
1672  }
1673  else if (mCurrentFrame.mvpMapPoints[i]->Observations() > 0)
1674  nmatchesMap++;
1675  }
1676  }
1677  */
1678 
1679  AVERAGE_TIMING_STOP("TrackReferenceKeyFrame");
1680  //return nmatchesMap >= 10;
1681  return nmatches >= 10;
1682 }
1683 
1685 {
1686  // We have an estimation of the camera pose and some map points tracked in the frame.
1687  // We retrieve the local map and try to find matches to points in the local map.
1688 
1689  //(UpdateLocalKeyFrames())
1690  //1. For all matches to mappoints we search for the keyframes in which theses mappoints have been observed
1691  //2. We set the keyframe with the most common matches to mappoints as reference keyframe. Simultaniously a list of localKeyFrames is maintained (mvpLocalKeyFrames)
1692  //(UpdateLocalPoints())
1693  //3. Pointers to map points are added to mvpLocalMapPoints and the id of the current frame is stored into mappoint instance (mnTrackReferenceForFrame).
1694  //(SearchLocalPoints())
1695  //4. The so found local map is searched for additional matches. We check if it is not matched already, if it is in frustum and then the ORBMatcher is used to search feature matches by projection.
1696  //(ORBMatcher::searchByProjection())
1697  //5.
1698  //(this function)
1699  //6. The Pose is optimized using the found additional matches and the already found pose as initial guess
1700 
1701  AVERAGE_TIMING_START("trackLocalMap");
1702  updateLocalMap();
1703  searchLocalPoints();
1704 
1705  // Optimize Pose
1706  Optimizer::PoseOptimization(&mCurrentFrame);
1707  mnMatchesInliers = 0;
1708 
1709  // Update MapPoints Statistics
1710  for (int i = 0; i < mCurrentFrame.N; i++)
1711  {
1712  if (mCurrentFrame.mvpMapPoints[i])
1713  {
1714  mCurrentFrame.mvpMapPoints[i]->IncreaseFound();
1715  if (!_params.onlyTracking)
1716  {
1717  if (mCurrentFrame.mvpMapPoints[i]->Observations() > 0)
1718  {
1719  mnMatchesInliers++;
1720  }
1721  }
1722  else
1723  {
1724  mnMatchesInliers++;
1725  }
1726  //else if (mSensor == System::STEREO)
1727  // mCurrentFrame.mvpMapPoints[i] = static_cast<WAIMapPoint*>(NULL);
1728  }
1729  }
1730 
1731  AVERAGE_TIMING_STOP("trackLocalMap");
1732  // Decide if the tracking was succesful
1733  // More restrictive if there was a relocalization recently
1734  if (mCurrentFrame.mnId < mnLastRelocFrameId + mMaxFrames && mnMatchesInliers < 50)
1735  {
1736  //cout << "mnMatchesInliers: " << mnMatchesInliers << endl;
1737  return false;
1738  }
1739 
1740  if (mnMatchesInliers < 30)
1741  return false;
1742  else
1743  return true;
1744 }
1745 
1747 {
1748  // This is for visualization
1749  //mpMap->SetReferenceMapPoints(mvpLocalMapPoints);
1750 
1751  // Update
1752  updateLocalKeyFrames();
1753  updateLocalPoints();
1754 }
1755 
1757 {
1758  // Do not search map points already matched
1759  for (vector<WAIMapPoint*>::iterator vit = mCurrentFrame.mvpMapPoints.begin(), vend = mCurrentFrame.mvpMapPoints.end(); vit != vend; vit++)
1760  {
1761  WAIMapPoint* pMP = *vit;
1762  if (pMP)
1763  {
1764  if (pMP->isBad())
1765  {
1766  *vit = static_cast<WAIMapPoint*>(NULL);
1767  }
1768  else
1769  {
1770  pMP->IncreaseVisible();
1771  pMP->mnLastFrameSeen = mCurrentFrame.mnId;
1772  pMP->mbTrackInView = false;
1773  }
1774  }
1775  }
1776 
1777  int nToMatch = 0;
1778 
1779  // Project points in frame and check its visibility
1780  for (vector<WAIMapPoint*>::iterator vit = mvpLocalMapPoints.begin(), vend = mvpLocalMapPoints.end(); vit != vend; vit++)
1781  {
1782  WAIMapPoint* pMP = *vit;
1783  if (pMP->mnLastFrameSeen == mCurrentFrame.mnId)
1784  continue;
1785  if (pMP->isBad())
1786  continue;
1787  // Project (this fills WAIMapPoint variables for matching)
1788  if (mCurrentFrame.isInFrustum(pMP, 0.5))
1789  {
1790  //ghm1 test:
1791  //if (!_image.empty())
1792  //{
1793  // WAIPoint2f ptProj(pMP->mTrackProjX, pMP->mTrackProjY);
1794  // cv::rectangle(_image,
1795  // cv::Rect(ptProj.x - 3, ptProj.y - 3, 7, 7),
1796  // Scalar(0, 0, 255));
1797  //}
1798  pMP->IncreaseVisible();
1799  nToMatch++;
1800  }
1801  }
1802 
1803  if (nToMatch > 0)
1804  {
1805  ORBmatcher matcher(0.8);
1806  int th = 1;
1807  // If the camera has been relocalised recently, perform a coarser search
1808  if (mCurrentFrame.mnId < mnLastRelocFrameId + 2)
1809  th = 5;
1810  matcher.SearchByProjection(mCurrentFrame, mvpLocalMapPoints, th);
1811  }
1812 }
1813 
1815 {
1816  // Each map point vote for the keyframes in which it has been observed
1817  map<WAIKeyFrame*, int> keyframeCounter;
1818  for (int i = 0; i < mCurrentFrame.N; i++)
1819  {
1820  if (mCurrentFrame.mvpMapPoints[i])
1821  {
1822  WAIMapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
1823  if (!pMP->isBad())
1824  {
1825  const map<WAIKeyFrame*, size_t> observations = pMP->GetObservations();
1826  for (map<WAIKeyFrame*, size_t>::const_iterator it = observations.begin(), itend = observations.end(); it != itend; it++)
1827  keyframeCounter[it->first]++;
1828  }
1829  else
1830  {
1831  mCurrentFrame.mvpMapPoints[i] = NULL;
1832  }
1833  }
1834  }
1835 
1836  if (keyframeCounter.empty())
1837  return;
1838 
1839  int max = 0;
1840  WAIKeyFrame* pKFmax = static_cast<WAIKeyFrame*>(NULL);
1841 
1842  mvpLocalKeyFrames.clear();
1843  mvpLocalKeyFrames.reserve(3 * keyframeCounter.size());
1844 
1845  // All keyframes that observe a map point are included in the local map. Also check which keyframe shares most points
1846  for (map<WAIKeyFrame*, int>::const_iterator it = keyframeCounter.begin(), itEnd = keyframeCounter.end(); it != itEnd; it++)
1847  {
1848  WAIKeyFrame* pKF = it->first;
1849 
1850  if (pKF->isBad())
1851  continue;
1852 
1853  if (it->second > max)
1854  {
1855  max = it->second;
1856  pKFmax = pKF;
1857  }
1858 
1859  mvpLocalKeyFrames.push_back(it->first);
1860  pKF->mnTrackReferenceForFrame = mCurrentFrame.mnId;
1861  }
1862 
1863  // Include also some not-already-included keyframes that are neighbors to already-included keyframes
1864  for (auto itKF = mvpLocalKeyFrames.begin(); itKF != mvpLocalKeyFrames.end(); itKF++)
1865  {
1866  // Limit the number of keyframes
1867  if (mvpLocalKeyFrames.size() > 80)
1868  break;
1869 
1870  WAIKeyFrame* pKF = *itKF;
1871 
1872  const vector<WAIKeyFrame*> vNeighs = pKF->GetBestCovisibilityKeyFrames(10);
1873 
1874  for (vector<WAIKeyFrame*>::const_iterator itNeighKF = vNeighs.begin(), itEndNeighKF = vNeighs.end(); itNeighKF != itEndNeighKF; itNeighKF++)
1875  {
1876  WAIKeyFrame* pNeighKF = *itNeighKF;
1877  if (!pNeighKF->isBad())
1878  {
1879  if (pNeighKF->mnTrackReferenceForFrame != mCurrentFrame.mnId)
1880  {
1881  mvpLocalKeyFrames.push_back(pNeighKF);
1882  pNeighKF->mnTrackReferenceForFrame = mCurrentFrame.mnId;
1883  break;
1884  }
1885  }
1886  }
1887 
1888  const set<WAIKeyFrame*> spChilds = pKF->GetChilds();
1889  for (set<WAIKeyFrame*>::const_iterator sit = spChilds.begin(), send = spChilds.end(); sit != send; sit++)
1890  {
1891  WAIKeyFrame* pChildKF = *sit;
1892  if (!pChildKF->isBad())
1893  {
1894  if (pChildKF->mnTrackReferenceForFrame != mCurrentFrame.mnId)
1895  {
1896  mvpLocalKeyFrames.push_back(pChildKF);
1897  pChildKF->mnTrackReferenceForFrame = mCurrentFrame.mnId;
1898  break;
1899  }
1900  }
1901  }
1902 
1903  WAIKeyFrame* pParent = pKF->GetParent();
1904  if (pParent)
1905  {
1906  if (pParent->mnTrackReferenceForFrame != mCurrentFrame.mnId)
1907  {
1908  mvpLocalKeyFrames.push_back(pParent);
1909  pParent->mnTrackReferenceForFrame = mCurrentFrame.mnId;
1910  break;
1911  }
1912  }
1913  }
1914 
1915  if (pKFmax)
1916  {
1917  mpReferenceKF = pKFmax;
1918  mCurrentFrame.mpReferenceKF = mpReferenceKF;
1919  }
1920 }
1921 
1923 {
1924  mvpLocalMapPoints.clear();
1925 
1926  for (vector<WAIKeyFrame*>::const_iterator itKF = mvpLocalKeyFrames.begin(), itEndKF = mvpLocalKeyFrames.end(); itKF != itEndKF; itKF++)
1927  {
1928  WAIKeyFrame* pKF = *itKF;
1929  const vector<WAIMapPoint*> vpMPs = pKF->GetMapPointMatches();
1930 
1931  for (vector<WAIMapPoint*>::const_iterator itMP = vpMPs.begin(), itEndMP = vpMPs.end(); itMP != itEndMP; itMP++)
1932  {
1933  WAIMapPoint* pMP = *itMP;
1934  if (!pMP)
1935  continue;
1936  if (pMP->mnTrackReferenceForFrame == mCurrentFrame.mnId)
1937  continue;
1938  if (!pMP->isBad())
1939  {
1940  mvpLocalMapPoints.push_back(pMP);
1941  pMP->mnTrackReferenceForFrame = mCurrentFrame.mnId;
1942  }
1943  }
1944  }
1945 }
1946 
1948 {
1949  //This method is called if tracking is OK and we have a valid motion model
1950  //1. UpdateLastFrame(): ...
1951  //2. We set an initial pose into current frame, which is the pose of the last frame corrected by the motion model (expected motion since last frame)
1952  //3. Reinitialization of the assotiated map points to key points in the current frame to NULL
1953  //4. We search for matches with associated mappoints from lastframe by projection to the current frame. A narrow window is used.
1954  //5. If we found less than 20 matches we search again as before but in a wider search window.
1955  //6. If we have still less than 20 matches tracking with motion model was unsuccessful
1956  //7. Else the pose is Optimized
1957  //8. Matches classified as outliers by the optimization routine are updated in the mvpMapPoints vector in the current frame and the valid matches are counted
1958  //9. If less than 10 matches to the local map remain the tracking with visual odometry is activated (mbVO = true) and that means no tracking with motion model or reference keyframe
1959  //10. The tracking with motion model was successful, if we found more than 20 matches to map points
1960  AVERAGE_TIMING_START("TrackWithMotionModel");
1961  ORBmatcher matcher(0.9, true);
1962 
1963  // Update last frame pose according to its reference keyframe
1964  // Create "visual odometry" points if in Localization Mode
1965  updateLastFrame();
1966 
1967  //this adds the motion differnce between the last and the before-last frame to the pose of the last frame to estimate the position of the current frame
1968  mCurrentFrame.SetPose(mVelocity * mLastFrame.mTcw);
1969 
1970  fill(mCurrentFrame.mvpMapPoints.begin(), mCurrentFrame.mvpMapPoints.end(), static_cast<WAIMapPoint*>(NULL));
1971 
1972  // Project points seen in previous frame
1973  int th = 15;
1974  int nmatches = matcher.SearchByProjection(mCurrentFrame, mLastFrame, th, true);
1975 
1976  // If few matches, uses a wider window search
1977  if (nmatches < 20)
1978  {
1979  fill(mCurrentFrame.mvpMapPoints.begin(), mCurrentFrame.mvpMapPoints.end(), static_cast<WAIMapPoint*>(NULL));
1980  nmatches = matcher.SearchByProjection(mCurrentFrame, mLastFrame, 2 * th, true);
1981  }
1982 
1983  if (nmatches < 20)
1984  {
1985  AVERAGE_TIMING_STOP("TrackWithMotionModel");
1986  return false;
1987  }
1988 
1989  // Optimize frame pose with all matches
1990  Optimizer::PoseOptimization(&mCurrentFrame);
1991 
1992  // Discard outliers
1993  int nmatchesMap = 0;
1994  for (int i = 0; i < mCurrentFrame.N; i++)
1995  {
1996  if (mCurrentFrame.mvpMapPoints[i])
1997  {
1998  if (mCurrentFrame.mvpMapPoints[i]->Observations() > 0)
1999  nmatchesMap++;
2000  /*
2001  if (mCurrentFrame.mvbOutlier[i])
2002  {
2003  WAIMapPoint* pMP = mCurrentFrame.mvpMapPoints[i];
2004 
2005  mCurrentFrame.mvpMapPoints[i] = static_cast<WAIMapPoint*>(NULL);
2006  mCurrentFrame.mvbOutlier[i] = false;
2007  pMP->mbTrackInView = false;
2008  pMP->mnLastFrameSeen = mCurrentFrame.mnId;
2009  nmatches--;
2010  }
2011  else if (mCurrentFrame.mvpMapPoints[i]->Observations() > 0)
2012  nmatchesMap++;
2013  */
2014  }
2015  }
2016 
2017  AVERAGE_TIMING_STOP("TrackWithMotionModel");
2018  if (_params.onlyTracking)
2019  {
2020  mbVO = nmatchesMap < 10;
2021  return nmatches > 20;
2022  }
2023 
2024  return nmatchesMap >= 10;
2025 }
2026 
2028 { //parameter of this function:
2029  int addThres = 2;
2030  float maxReprojError = 10.0;
2031 
2032  if (mLastFrame.mvKeys.size() < 100)
2033  {
2034  return false;
2035  }
2036 
2037  std::vector<uint8_t> status;
2038  std::vector<float> err;
2039  cv::Size winSize(15, 15);
2040 
2041  cv::TermCriteria criteria(cv::TermCriteria::COUNT | cv::TermCriteria::EPS,
2042  1, // terminate after this many iterations, or
2043  0.03); // when the search window moves by less than this
2044 
2045  std::vector<cv::Point2f> keyPointCoordinatesLastFrame;
2046  vector<WAIMapPoint*> matchedMapPoints;
2047  vector<cv::KeyPoint> matchedKeyPoints;
2048 
2049  if (_optFlowOK)
2050  {
2051  //last time we successfully tracked with optical flow
2052  matchedMapPoints = _optFlowMapPtsLastFrame;
2053  matchedKeyPoints = _optFlowKeyPtsLastFrame;
2054  for (int i = 0; i < _optFlowKeyPtsLastFrame.size(); i++)
2055  {
2056  keyPointCoordinatesLastFrame.push_back(_optFlowKeyPtsLastFrame[i].pt);
2057  }
2058  }
2059  else
2060  {
2061  //this is the first run of optical flow after lost state
2062  for (int i = 0; i < mLastFrame.mvpMapPoints.size(); i++)
2063  {
2064  //if (mLastFrame.mvpMapPoints[i] && !mvbOutlier[i])
2065  if (mLastFrame.mvpMapPoints[i])
2066  {
2067  keyPointCoordinatesLastFrame.push_back(mLastFrame.mvKeys[i].pt);
2068 
2069  matchedMapPoints.push_back(mLastFrame.mvpMapPoints[i]);
2070  matchedKeyPoints.push_back(mLastFrame.mvKeys[i]);
2071  }
2072  }
2073  }
2074 
2075  if (!keyPointCoordinatesLastFrame.size())
2076  {
2077  return false;
2078  }
2079 
2080  // Find closest possible feature points based on optical flow
2081  std::vector<cv::Point2f> pred2DPoints(keyPointCoordinatesLastFrame.size());
2082 
2083  cv::calcOpticalFlowPyrLK(
2084  mLastFrame.imgGray, // Previous frame
2085  mCurrentFrame.imgGray, // Current frame
2086  keyPointCoordinatesLastFrame, // Previous and current keypoints coordinates.The latter will be
2087  pred2DPoints, // expanded if more good coordinates are detected during OptFlow
2088  status, // Output vector for keypoint correspondences (1 = match found)
2089  err, // Error size for each flow
2090  winSize, // Search window for each pyramid level
2091  3, // Max levels of pyramid creation
2092  criteria, // Configuration from above
2093  0, // Additional flags
2094  0.01); // Minimal Eigen threshold
2095 
2096  // Only use points which are not wrong in any way during the optical flow calculation
2097  std::vector<cv::Point2f> frame2DPoints;
2098  std::vector<cv::Point3f> model3DPoints;
2099  vector<WAIMapPoint*> trackedMapPoints;
2100  vector<cv::KeyPoint> trackedKeyPoints;
2101 
2102  mnMatchesInliers = 0;
2103 
2104  for (size_t i = 0; i < status.size(); i++)
2105  {
2106  if (status[i])
2107  {
2108  // TODO(jan): if pred2DPoints is really expanded during optflow, then the association
2109  // to 3D points is maybe broken?
2110  frame2DPoints.push_back(pred2DPoints[i]);
2111  cv::Point3f p3(matchedMapPoints[i]->GetWorldPos());
2112  model3DPoints.push_back(p3);
2113 
2114  matchedKeyPoints[i].pt.x = pred2DPoints[i].x;
2115  matchedKeyPoints[i].pt.y = pred2DPoints[i].y;
2116 
2117  trackedMapPoints.push_back(matchedMapPoints[i]);
2118  trackedKeyPoints.push_back(matchedKeyPoints[i]);
2119 
2120  std::lock_guard<std::mutex> guard(_nMapMatchesLock);
2121  mnMatchesInliers++;
2122  }
2123  }
2124 
2125  //todo ghm1:
2126  //- insert tracked points into grid
2127  //- update grid with matches from current frames tracked map points
2128  //- how can we make sure that we do not track the same point multiple times?
2129  // -> we know the pointer to the mappoints and only add a new tracking points whose mappoint is not in a gridcell yet
2130  //- we dont want to track too many points, so we prefer points with the most observations
2131  _optFlowGridElementWidthInv = static_cast<float>(OPTFLOW_GRID_COLS) / static_cast<float>(WAIFrame::mnMaxX - WAIFrame::mnMinX);
2132  _optFlowGridElementHeightInv = static_cast<float>(OPTFLOW_GRID_ROWS) / static_cast<float>(WAIFrame::mnMaxY - WAIFrame::mnMinY);
2133  std::vector<std::size_t> gridOptFlow[OPTFLOW_GRID_COLS][OPTFLOW_GRID_ROWS];
2134  std::vector<std::size_t> gridCurrFrame[OPTFLOW_GRID_COLS][OPTFLOW_GRID_ROWS];
2135  //insert optical flow points into grid
2136  for (int i = 0; i < trackedKeyPoints.size(); ++i)
2137  {
2138  int nGridPosX, nGridPosY;
2139  if (posInGrid(trackedKeyPoints[i], nGridPosX, nGridPosY, WAIFrame::mnMinX, WAIFrame::mnMinY))
2140  gridOptFlow[nGridPosX][nGridPosY].push_back(i);
2141  }
2142  //insert current frame points into grid
2143  for (int i = 0; i < mCurrentFrame.mvpMapPoints.size(); ++i)
2144  {
2145  if (mCurrentFrame.mvpMapPoints[i] != NULL)
2146  {
2147  int nGridPosX, nGridPosY;
2148  if (posInGrid(mCurrentFrame.mvKeys[i], nGridPosX, nGridPosY, WAIFrame::mnMinX, WAIFrame::mnMinY))
2149  gridCurrFrame[nGridPosX][nGridPosY].push_back(i);
2150  }
2151  }
2152 
2153  //try to add tracking points from gridCurrFrame to trackedMapPoints and trackedKeyPoints where missing in gridOptFlow
2154  for (int i = 0; i < OPTFLOW_GRID_COLS; i++)
2155  {
2156  for (int j = 0; j < OPTFLOW_GRID_ROWS; j++)
2157  {
2158  const auto& optFlowCell = gridOptFlow[i][j];
2159  if (optFlowCell.size() < addThres)
2160  {
2161  const std::vector<size_t>& indices = gridCurrFrame[i][j];
2162  for (auto indexCF : indices)
2163  {
2164  const cv::KeyPoint& keyPt = mCurrentFrame.mvKeys[indexCF];
2165  WAIMapPoint* mapPt = mCurrentFrame.mvpMapPoints[indexCF];
2166  if (mapPt)
2167  {
2168  //check that this map point is not already referenced in this cell of gridOptFlow
2169  bool alreadyContained = false;
2170  for (auto indexOF : optFlowCell)
2171  {
2172  if (trackedMapPoints[indexOF] == mapPt)
2173  {
2174  alreadyContained = true;
2175  break;
2176  }
2177  }
2178 
2179  if (!alreadyContained)
2180  {
2181  //add to tracking set of mappoints and keypoints
2182  trackedKeyPoints.push_back(keyPt);
2183  trackedMapPoints.push_back(mapPt);
2184  frame2DPoints.push_back(keyPt.pt);
2185  model3DPoints.push_back(cv::Point3f(mapPt->GetWorldPos()));
2186  }
2187  }
2188  }
2189  }
2190  }
2191  }
2192 
2193  if (trackedKeyPoints.size() < matchedKeyPoints.size() * 0.75)
2194  {
2195  return false;
2196  }
2197 
2198  /////////////////////
2199  // Pose Estimation //
2200  /////////////////////
2201 
2202  cv::Mat rvec = cv::Mat::zeros(3, 1, CV_64FC1);
2203  cv::Mat tvec = cv::Mat::zeros(3, 1, CV_64FC1);
2204  cv::Mat om = mCurrentFrame.mTcw;
2205  cv::Rodrigues(om.rowRange(0, 3).colRange(0, 3), rvec);
2206  tvec = om.colRange(3, 4).rowRange(0, 3);
2207 
2208  bool foundPose = cv::solvePnP(model3DPoints,
2209  frame2DPoints,
2210  _cameraMat,
2211  _distortionMat,
2212  rvec,
2213  tvec,
2214  true);
2215 
2216  if (foundPose)
2217  {
2218  cv::Mat Tcw = cv::Mat::eye(4, 4, CV_32F);
2219  Tcw.at<float>(0, 3) = tvec.at<float>(0, 0);
2220  Tcw.at<float>(1, 3) = tvec.at<float>(1, 0);
2221  Tcw.at<float>(2, 3) = tvec.at<float>(2, 0);
2222 
2223  cv::Mat Rcw = cv::Mat::zeros(3, 3, CV_32F);
2224  cv::Rodrigues(rvec, Rcw);
2225 
2226  Tcw.at<float>(0, 0) = Rcw.at<float>(0, 0);
2227  Tcw.at<float>(1, 0) = Rcw.at<float>(1, 0);
2228  Tcw.at<float>(2, 0) = Rcw.at<float>(2, 0);
2229  Tcw.at<float>(0, 1) = Rcw.at<float>(0, 1);
2230  Tcw.at<float>(1, 1) = Rcw.at<float>(1, 1);
2231  Tcw.at<float>(2, 1) = Rcw.at<float>(2, 1);
2232  Tcw.at<float>(0, 2) = Rcw.at<float>(0, 2);
2233  Tcw.at<float>(1, 2) = Rcw.at<float>(1, 2);
2234  Tcw.at<float>(2, 2) = Rcw.at<float>(2, 2);
2235  _optFlowTcw = Tcw;
2236 
2237  //remove points with bad reprojection error:
2238  //project mappoints onto image plane
2239  std::vector<cv::Point2f> projectedPts;
2240  cv::projectPoints(model3DPoints,
2241  rvec,
2242  tvec,
2243  _cameraMat,
2244  _distortionMat,
2245  projectedPts);
2246 
2247  _optFlowMapPtsLastFrame.clear();
2248  _optFlowKeyPtsLastFrame.clear();
2249  for (int i = 0; i < trackedMapPoints.size(); ++i)
2250  {
2251  //calculate reprojection error
2252  float error = cv::norm(cv::Mat(projectedPts[i]), cv::Mat(frame2DPoints[i]));
2253  if (error < maxReprojError)
2254  {
2255  _optFlowMapPtsLastFrame.push_back(trackedMapPoints[i]);
2256  _optFlowKeyPtsLastFrame.push_back(trackedKeyPoints[i]);
2257  }
2258  }
2259  //trackingType = TrackingType_OptFlow;
2260  }
2261 
2262  return foundPose;
2263 }
2264 
2266 {
2267  // Update pose according to reference keyframe
2268  WAIKeyFrame* pRef = mLastFrame.mpReferenceKF;
2269  //cout << "pRef pose: " << pRef->GetPose() << endl;
2270  cv::Mat Tlr = mlRelativeFramePoses.back();
2271  //GHM1:
2272  //l = last, w = world, r = reference
2273  //Tlr is the relative transformation for the last frame wrt to reference frame
2274  //(because the relative pose for the current frame is added at the end of tracking)
2275  //Refer last frame pose to world: Tlw = Tlr * Trw
2276  //So it seems, that the frames pose does not always refer to world frame...?
2277  mLastFrame.SetPose(Tlr * pRef->GetPose());
2278 }
2279 
2280 //void WAI::ModeOrbSlam2::globalBundleAdjustment()
2281 //{
2282 // Optimizer::GlobalBundleAdjustemnt(_map, 20);
2283 // //_mapNode->updateAll(*_map);
2284 //}
2285 
2286 #if 0
2287 size_t WAI::ModeOrbSlam2::getSizeOf()
2288 {
2289  size_t size = 0;
2290 
2291  size += sizeof(*this);
2292  //add size of local mapping
2293  //add size of loop closing
2294  //add size of
2295 
2296  return size;
2297 }
2298 #endif
2299 
2301 {
2302  WAIKeyFrame* result = mCurrentFrame.mpReferenceKF;
2303 
2304  return result;
2305 }
2306 
2307 void WAI::ModeOrbSlam2::decorate(cv::Mat& image)
2308 {
2309  //calculation of mean reprojection error of all matches
2310  calculateMeanReprojectionError();
2311  //calculate pose difference
2312  calculatePoseDifference();
2313  //decorateVideoWithKeyPoints(image);
2314  //decorateVideoWithKeyPointMatches(image);
2315  //decorate scene with matched map points, local map points and matched map points
2316  //decorateScene();
2317 }
2318 
2320 {
2321  //calculation of mean reprojection error
2322  double reprojectionError = 0.0;
2323  int n = 0;
2324 
2325  //current frame extrinsic
2326  const cv::Mat Rcw = mCurrentFrame.GetRotationCW();
2327  const cv::Mat tcw = mCurrentFrame.GetTranslationCW();
2328  //current frame intrinsics
2329  const float fx = mCurrentFrame.fx;
2330  const float fy = mCurrentFrame.fy;
2331  const float cx = mCurrentFrame.cx;
2332  const float cy = mCurrentFrame.cy;
2333 
2334  for (size_t i = 0; i < mCurrentFrame.N; i++)
2335  {
2336  if (mCurrentFrame.mvpMapPoints[i])
2337  {
2338  if (mCurrentFrame.mvpMapPoints[i]->Observations() > 0)
2339  {
2340  // 3D in absolute coordinates
2341  cv::Mat Pw = mCurrentFrame.mvpMapPoints[i]->GetWorldPos();
2342  // 3D in camera coordinates
2343  const cv::Mat Pc = Rcw * Pw + tcw;
2344  const float& PcX = Pc.at<float>(0);
2345  const float& PcY = Pc.at<float>(1);
2346  const float& PcZ = Pc.at<float>(2);
2347 
2348  // Check positive depth
2349  if (PcZ < 0.0f)
2350  continue;
2351 
2352  // Project in image and check it is not outside
2353  const float invz = 1.0f / PcZ;
2354  const float u = fx * PcX * invz + cx;
2355  const float v = fy * PcY * invz + cy;
2356 
2357  cv::Point2f ptProj(u, v);
2358  //Use distorted points because we have to undistort the image later
2359  const auto& ptImg = mCurrentFrame.mvKeysUn[i].pt;
2360 
2361  ////draw projected point
2362  //cv::rectangle(image,
2363  // cv::Rect(ptProj.x - 3, ptProj.y - 3, 7, 7),
2364  // Scalar(255, 0, 0));
2365 
2366  reprojectionError += cv::norm(cv::Mat(ptImg), cv::Mat(ptProj));
2367  n++;
2368  }
2369  }
2370  }
2371 
2372  {
2373  std::lock_guard<std::mutex> guard(_meanProjErrorLock);
2374  if (n > 0)
2375  {
2376  _meanReprojectionError = reprojectionError / n;
2377  }
2378  else
2379  {
2380  _meanReprojectionError = -1;
2381  }
2382  }
2383 }
2384 
2386 {
2387  std::lock_guard<std::mutex> guard(_poseDiffLock);
2388  //calculation of L2 norm of the difference between the last and the current camera pose
2389  if (!mLastFrame.mTcw.empty() && !mCurrentFrame.mTcw.empty())
2390  _poseDifference = norm(mLastFrame.mTcw - mCurrentFrame.mTcw);
2391  else
2392  _poseDifference = -1.0;
2393 }
2394 
2396 {
2397  //show rectangle for all keypoints in current image
2398  for (size_t i = 0; i < mCurrentFrame.N; i++)
2399  {
2400  //Use distorted points because we have to undistort the image later
2401  //const auto& pt = mCurrentFrame.mvKeys[i].pt;
2402  const auto& pt = mCurrentFrame.mvKeys[i].pt;
2403  cv::rectangle(image,
2404  cv::Rect(pt.x - 3, pt.y - 3, 7, 7),
2405  cv::Scalar(0, 0, 255));
2406  }
2407 }
2408 
2410 {
2411  //show rectangle for key points in video that where matched to map points
2412  if (_optFlowOK)
2413  {
2414  for (size_t i = 0; i < _optFlowKeyPtsLastFrame.size(); i++)
2415  {
2416  //Use distorted points because we have to undistort the image later
2417  const auto& pt = _optFlowKeyPtsLastFrame[i].pt;
2418  cv::rectangle(image,
2419  cv::Rect(pt.x - 3, pt.y - 3, 7, 7),
2420  cv::Scalar(0, 255, 0));
2421  }
2422  }
2423  else
2424  {
2425  for (size_t i = 0; i < mCurrentFrame.N; i++)
2426  {
2427  if (mCurrentFrame.mvpMapPoints[i])
2428  {
2429  if (mCurrentFrame.mvpMapPoints[i]->Observations() > 0)
2430  {
2431  //Use distorted points because we have to undistort the image later
2432  const auto& pt = mCurrentFrame.mvKeys[i].pt;
2433  cv::rectangle(image,
2434  cv::Rect(pt.x - 3, pt.y - 3, 7, 7),
2435  cv::Scalar(0, 255, 0));
2436  }
2437  }
2438  }
2439  }
2440 }
2441 
2443  WAIKeyFrame* kfCand,
2444  cv::Mat& homography,
2445  int minMatches)
2446 {
2447  bool result = false;
2448 
2449  ORBmatcher matcher(0.9, true);
2450 
2451  std::vector<int> markerMatchesToCurrentFrame;
2452  int nmatches = matcher.SearchForMarkerMap(markerFrame, *kfCand, markerMatchesToCurrentFrame);
2453 
2454  if (nmatches > minMatches)
2455  {
2456  std::vector<cv::Point2f> markerPoints;
2457  std::vector<cv::Point2f> framePoints;
2458 
2459  for (int j = 0; j < markerMatchesToCurrentFrame.size(); j++)
2460  {
2461  if (markerMatchesToCurrentFrame[j] >= 0)
2462  {
2463  markerPoints.push_back(markerFrame.mvKeysUn[j].pt);
2464  framePoints.push_back(kfCand->mvKeysUn[markerMatchesToCurrentFrame[j]].pt);
2465  }
2466  }
2467 
2468  homography = cv::findHomography(markerPoints,
2469  framePoints,
2470  cv::RANSAC);
2471 
2472  if (!homography.empty())
2473  {
2474  homography.convertTo(homography, CV_32F);
2475  result = true;
2476  }
2477  }
2478 
2479  return result;
2480 }
2481 
2482 WAIFrame WAI::ModeOrbSlam2::createMarkerFrame(std::string markerFile, KPextractor* markerExtractor)
2483 {
2484  cv::Mat markerImgGray = cv::imread(markerFile, cv::IMREAD_GRAYSCALE);
2485 
2486  float fyCam = _cameraMat.at<float>(1, 1);
2487  float cyCam = _cameraMat.at<float>(1, 2);
2488  float fov = 2.0f * atan2(cyCam, fyCam) * 180.0f / M_PI;
2489 
2490  float cx = (float)markerImgGray.cols * 0.5f;
2491  float cy = (float)markerImgGray.rows * 0.5f;
2492  float fy = cy / tanf(fov * 0.5f * M_PI / 180.0);
2493  float fx = fy;
2494 
2495  // TODO(dgj1): pass actual calibration for marker frame?
2496  cv::Mat markerCameraMat = (cv::Mat_<float>(3, 3) << fx, 0, cx, 0, fy, cy, 0, 0, 1);
2497  cv::Mat markerDistortionMat = cv::Mat::zeros(4, 1, CV_32F);
2498 
2499  WAIFrame result = WAIFrame(markerImgGray, 0.0f, markerExtractor, markerCameraMat, markerDistortionMat, mpVocabulary, true);
2500  result = WAIFrame(markerImgGray, 0.0f, markerExtractor, markerCameraMat, markerDistortionMat, mpVocabulary, true);
2501  result = WAIFrame(markerImgGray, 0.0f, markerExtractor, markerCameraMat, markerDistortionMat, mpVocabulary, true);
2502  return result;
2503 }
2504 
2506  cv::Mat& nodeTransform,
2507  float markerWidthInM)
2508 {
2509  // Additional steps to save marker map
2510  // 1. Find matches to marker on two keyframes
2511  // 1.a Extract features from marker image
2512  WAIFrame markerFrame = createMarkerFrame(markerFile, _markerExtractor);
2513 
2514  // 1.b Find keyframes with enough matches to marker image
2515  std::vector<WAIKeyFrame*> kfs = _map->GetAllKeyFrames();
2516 
2517  WAIKeyFrame* matchedKf1 = nullptr;
2518  WAIKeyFrame* matchedKf2 = nullptr;
2519 
2520  cv::Mat ul = cv::Mat(cv::Point3f(0, 0, 1));
2521  cv::Mat ur = cv::Mat(cv::Point3f(markerFrame.imgGray.cols, 0, 1));
2522  cv::Mat ll = cv::Mat(cv::Point3f(0, markerFrame.imgGray.rows, 1));
2523  cv::Mat lr = cv::Mat(cv::Point3f(markerFrame.imgGray.cols, markerFrame.imgGray.rows, 1));
2524 
2525  cv::Mat ulKf1, urKf1, llKf1, lrKf1, ulKf2, urKf2, llKf2, lrKf2;
2526  cv::Mat ul3D, ur3D, ll3D, lr3D;
2527  cv::Mat AC, AB, n;
2528 
2529  for (int i1 = 0; i1 < kfs.size() - 1; i1++)
2530  {
2531  WAIKeyFrame* kfCand1 = kfs[i1];
2532 
2533  if (kfCand1->isBad()) continue;
2534 
2535  // 2. Calculate homography between the keyframes and marker
2536  cv::Mat homography1;
2537  if (findMarkerHomography(markerFrame, kfCand1, homography1, 50))
2538  {
2539  // 3.a Calculate position of the markers cornerpoints on first keyframe in 2D
2540  // NOTE(dgj1): assumption that intrinsic camera parameters are the same
2541  // TODO(dgj1): think about this assumption
2542  ulKf1 = homography1 * ul;
2543  ulKf1 /= ulKf1.at<float>(2, 0);
2544  urKf1 = homography1 * ur;
2545  urKf1 /= urKf1.at<float>(2, 0);
2546  llKf1 = homography1 * ll;
2547  llKf1 /= llKf1.at<float>(2, 0);
2548  lrKf1 = homography1 * lr;
2549  lrKf1 /= lrKf1.at<float>(2, 0);
2550 
2551  for (int i2 = i1 + 1; i2 < kfs.size(); i2++)
2552  {
2553  WAIKeyFrame* kfCand2 = kfs[i2];
2554 
2555  if (kfCand2->isBad()) continue;
2556 
2557  cv::Mat homography2;
2558  if (findMarkerHomography(markerFrame, kfCand2, homography2, 50))
2559  {
2560  // 3.b Calculate position of the markers cornerpoints on second keyframe in 2D
2561  // NOTE(dgj1): assumption that intrinsic camera parameters are the same
2562  // TODO(dgj1): think about this assumption
2563  ulKf2 = homography2 * ul;
2564  ulKf2 /= ulKf2.at<float>(2, 0);
2565  urKf2 = homography2 * ur;
2566  urKf2 /= urKf2.at<float>(2, 0);
2567  llKf2 = homography2 * ll;
2568  llKf2 /= llKf2.at<float>(2, 0);
2569  lrKf2 = homography2 * lr;
2570  lrKf2 /= lrKf2.at<float>(2, 0);
2571 
2572  // 4. Triangulate position of the markers cornerpoints
2573  cv::Mat Rcw1 = kfCand1->GetRotation();
2574  cv::Mat Rwc1 = Rcw1.t();
2575  cv::Mat tcw1 = kfCand1->GetTranslation();
2576  cv::Mat Tcw1(3, 4, CV_32F);
2577  Rcw1.copyTo(Tcw1.colRange(0, 3));
2578  tcw1.copyTo(Tcw1.col(3));
2579 
2580  const float& fx1 = kfCand1->fx;
2581  const float& fy1 = kfCand1->fy;
2582  const float& cx1 = kfCand1->cx;
2583  const float& cy1 = kfCand1->cy;
2584  const float& invfx1 = kfCand1->invfx;
2585  const float& invfy1 = kfCand1->invfy;
2586 
2587  cv::Mat Rcw2 = kfCand2->GetRotation();
2588  cv::Mat Rwc2 = Rcw2.t();
2589  cv::Mat tcw2 = kfCand2->GetTranslation();
2590  cv::Mat Tcw2(3, 4, CV_32F);
2591  Rcw2.copyTo(Tcw2.colRange(0, 3));
2592  tcw2.copyTo(Tcw2.col(3));
2593 
2594  const float& fx2 = kfCand2->fx;
2595  const float& fy2 = kfCand2->fy;
2596  const float& cx2 = kfCand2->cx;
2597  const float& cy2 = kfCand2->cy;
2598  const float& invfx2 = kfCand2->invfx;
2599  const float& invfy2 = kfCand2->invfy;
2600 
2601  {
2602  cv::Mat ul1 = (cv::Mat_<float>(3, 1) << (ulKf1.at<float>(0, 0) - cx1) * invfx1, (ulKf1.at<float>(1, 0) - cy1) * invfy1, 1.0);
2603  cv::Mat ul2 = (cv::Mat_<float>(3, 1) << (ulKf2.at<float>(0, 0) - cx2) * invfx2, (ulKf2.at<float>(1, 0) - cy2) * invfy2, 1.0);
2604 
2605  // Linear Triangulation Method
2606  cv::Mat A(4, 4, CV_32F);
2607  A.row(0) = ul1.at<float>(0) * Tcw1.row(2) - Tcw1.row(0);
2608  A.row(1) = ul1.at<float>(1) * Tcw1.row(2) - Tcw1.row(1);
2609  A.row(2) = ul2.at<float>(0) * Tcw2.row(2) - Tcw2.row(0);
2610  A.row(3) = ul2.at<float>(1) * Tcw2.row(2) - Tcw2.row(1);
2611 
2612  cv::Mat w, u, vt;
2613  cv::SVD::compute(A, w, u, vt, cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
2614 
2615  ul3D = vt.row(3).t();
2616 
2617  if (ul3D.at<float>(3) != 0)
2618  {
2619  // Euclidean coordinates
2620  ul3D = ul3D.rowRange(0, 3) / ul3D.at<float>(3);
2621  }
2622  }
2623 
2624  {
2625  cv::Mat ur1 = (cv::Mat_<float>(3, 1) << (urKf1.at<float>(0, 0) - cx1) * invfx1, (urKf1.at<float>(1, 0) - cy1) * invfy1, 1.0);
2626  cv::Mat ur2 = (cv::Mat_<float>(3, 1) << (urKf2.at<float>(0, 0) - cx2) * invfx2, (urKf2.at<float>(1, 0) - cy2) * invfy2, 1.0);
2627 
2628  // Linear Triangulation Method
2629  cv::Mat A(4, 4, CV_32F);
2630  A.row(0) = ur1.at<float>(0) * Tcw1.row(2) - Tcw1.row(0);
2631  A.row(1) = ur1.at<float>(1) * Tcw1.row(2) - Tcw1.row(1);
2632  A.row(2) = ur2.at<float>(0) * Tcw2.row(2) - Tcw2.row(0);
2633  A.row(3) = ur2.at<float>(1) * Tcw2.row(2) - Tcw2.row(1);
2634 
2635  cv::Mat w, u, vt;
2636  cv::SVD::compute(A, w, u, vt, cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
2637 
2638  ur3D = vt.row(3).t();
2639 
2640  if (ur3D.at<float>(3) != 0)
2641  {
2642  // Euclidean coordinates
2643  ur3D = ur3D.rowRange(0, 3) / ur3D.at<float>(3);
2644  }
2645  }
2646 
2647  {
2648  cv::Mat ll1 = (cv::Mat_<float>(3, 1) << (llKf1.at<float>(0, 0) - cx1) * invfx1, (llKf1.at<float>(1, 0) - cy1) * invfy1, 1.0);
2649  cv::Mat ll2 = (cv::Mat_<float>(3, 1) << (llKf2.at<float>(0, 0) - cx2) * invfx2, (llKf2.at<float>(1, 0) - cy2) * invfy2, 1.0);
2650 
2651  // Linear Triangulation Method
2652  cv::Mat A(4, 4, CV_32F);
2653  A.row(0) = ll1.at<float>(0) * Tcw1.row(2) - Tcw1.row(0);
2654  A.row(1) = ll1.at<float>(1) * Tcw1.row(2) - Tcw1.row(1);
2655  A.row(2) = ll2.at<float>(0) * Tcw2.row(2) - Tcw2.row(0);
2656  A.row(3) = ll2.at<float>(1) * Tcw2.row(2) - Tcw2.row(1);
2657 
2658  cv::Mat w, u, vt;
2659  cv::SVD::compute(A, w, u, vt, cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
2660 
2661  ll3D = vt.row(3).t();
2662 
2663  if (ll3D.at<float>(3) != 0)
2664  {
2665  // Euclidean coordinates
2666  ll3D = ll3D.rowRange(0, 3) / ll3D.at<float>(3);
2667  }
2668  }
2669 
2670  {
2671  cv::Mat lr1 = (cv::Mat_<float>(3, 1) << (lrKf1.at<float>(0, 0) - cx1) * invfx1, (lrKf1.at<float>(1, 0) - cy1) * invfy1, 1.0);
2672  cv::Mat lr2 = (cv::Mat_<float>(3, 1) << (lrKf2.at<float>(0, 0) - cx2) * invfx2, (lrKf2.at<float>(1, 0) - cy2) * invfy2, 1.0);
2673 
2674  // Linear Triangulation Method
2675  cv::Mat A(4, 4, CV_32F);
2676  A.row(0) = lr1.at<float>(0) * Tcw1.row(2) - Tcw1.row(0);
2677  A.row(1) = lr1.at<float>(1) * Tcw1.row(2) - Tcw1.row(1);
2678  A.row(2) = lr2.at<float>(0) * Tcw2.row(2) - Tcw2.row(0);
2679  A.row(3) = lr2.at<float>(1) * Tcw2.row(2) - Tcw2.row(1);
2680 
2681  cv::Mat w, u, vt;
2682  cv::SVD::compute(A, w, u, vt, cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
2683 
2684  lr3D = vt.row(3).t();
2685 
2686  if (lr3D.at<float>(3) != 0)
2687  {
2688  // Euclidean coordinates
2689  lr3D = lr3D.rowRange(0, 3) / lr3D.at<float>(3);
2690  }
2691  }
2692 
2693  AC = ll3D - ul3D;
2694  AB = ur3D - ul3D;
2695 
2696  cv::Vec3f vAC = AC;
2697  cv::Vec3f vAB = AB;
2698 
2699  cv::Vec3f vn = vAB.cross(vAC);
2700  n = cv::Mat(vn);
2701 
2702  cv::Mat AD = lr3D - ul3D;
2703  cv::Vec3f vAD = AD;
2704 
2705  float d = cv::norm(vn.dot(vAD)) / cv::norm(vn);
2706  if (d < 0.01f)
2707  {
2708  matchedKf1 = kfCand1;
2709  matchedKf2 = kfCand2;
2710 
2711  break;
2712  }
2713  }
2714  }
2715  }
2716 
2717  if (matchedKf2) break;
2718  }
2719 
2720  if (!matchedKf1 || !matchedKf2)
2721  {
2722  return false;
2723  }
2724 
2725  // 5. Cull mappoints outside of marker
2726  std::vector<WAIMapPoint*> mapPoints = _map->GetAllMapPoints();
2727 
2728  cv::Mat system = cv::Mat::zeros(3, 3, CV_32F);
2729  AC.copyTo(system.rowRange(0, 3).col(0));
2730  AB.copyTo(system.rowRange(0, 3).col(1));
2731  n.copyTo(system.rowRange(0, 3).col(2));
2732 
2733  cv::Mat systemInv = system.inv();
2734 
2735  for (int i = 0; i < mapPoints.size(); i++)
2736  {
2737  WAIMapPoint* mp = mapPoints[i];
2738 
2739  if (mp->isBad()) continue;
2740 
2741  cv::Mat sol = systemInv * (mp->GetWorldPos() - ul3D);
2742 
2743  if (sol.at<float>(0, 0) < 0 || sol.at<float>(0, 0) > 1 ||
2744  sol.at<float>(1, 0) < 0 || sol.at<float>(1, 0) > 1 ||
2745  sol.at<float>(2, 0) < -0.1f || sol.at<float>(2, 0) > 0.1f)
2746  {
2747  mp->SetBadFlag();
2748  }
2749  }
2750 #if 1
2751  for (int i = 0; i < kfs.size(); i++)
2752  {
2753  WAIKeyFrame* kf = kfs[i];
2754 
2755  if (kf->mnId == 0 || kf->isBad()) continue;
2756 
2757  int mpCount = 0;
2758 
2759  std::vector<WAIMapPoint*> mps = kf->GetMapPointMatches();
2760  for (int j = 0; j < mps.size(); j++)
2761  {
2762  WAIMapPoint* mp = mps[j];
2763 
2764  if (!mp || mp->isBad()) continue;
2765 
2766  mpCount++;
2767  }
2768 
2769  if (mpCount <= 0)
2770  {
2771  kf->SetBadFlag();
2772  }
2773  }
2774 
2775 #else
2776 
2777  // Cull redundant keyframes
2778  float cullRedundantPerc = 0.95f; //TODO(dgj1): make parametrizable
2779  for (int i = 0; i < kfs.size(); i++)
2780  {
2781  WAIKeyFrame* kf = kfs[i];
2782 
2783  vector<WAIKeyFrame*> vpLocalKeyFrames = kf->GetVectorCovisibleKeyFrames();
2784 
2785  for (vector<WAIKeyFrame*>::iterator vit = vpLocalKeyFrames.begin(), vend = vpLocalKeyFrames.end(); vit != vend; vit++)
2786  {
2787  WAIKeyFrame* pKF = *vit;
2788  //do not cull the first keyframe
2789  if (pKF->mnId == 0)
2790  continue;
2791  //do not cull fixed keyframes
2792  if (pKF->isFixed())
2793  continue;
2794 
2795  const vector<WAIMapPoint*> vpMapPoints = pKF->GetMapPointMatches();
2796 
2797  const int thObs = 3;
2798  int nRedundantObservations = 0;
2799  int nMPs = 0;
2800  for (size_t i = 0, iend = vpMapPoints.size(); i < iend; i++)
2801  {
2802  WAIMapPoint* pMP = vpMapPoints[i];
2803  if (pMP)
2804  {
2805  if (!pMP->isBad())
2806  {
2807  nMPs++;
2808  if (pMP->Observations() > thObs)
2809  {
2810  const int& scaleLevel = pKF->mvKeysUn[i].octave;
2811  const std::map<WAIKeyFrame*, size_t> observations = pMP->GetObservations();
2812  int nObs = 0;
2813  for (std::map<WAIKeyFrame*, size_t>::const_iterator mit = observations.begin(), mend = observations.end(); mit != mend; mit++)
2814  {
2815  WAIKeyFrame* pKFi = mit->first;
2816  if (pKFi == pKF)
2817  continue;
2818  const int& scaleLeveli = pKFi->mvKeysUn[mit->second].octave;
2819 
2820  if (scaleLeveli <= scaleLevel + 1)
2821  {
2822  nObs++;
2823  if (nObs >= thObs)
2824  {
2825  break;
2826  }
2827  }
2828  }
2829  if (nObs >= thObs)
2830  {
2831  nRedundantObservations++;
2832  }
2833  }
2834  }
2835  }
2836  }
2837 
2838  if (nMPs == 0 || nRedundantObservations > cullRedundantPerc * nMPs)
2839  {
2840  pKF->SetBadFlag();
2841  }
2842  }
2843  }
2844 #endif
2845  cv::Mat systemNorm = cv::Mat::zeros(3, 3, CV_32F);
2846  systemNorm.rowRange(0, 3).col(0) = system.rowRange(0, 3).col(1) / cv::norm(AB);
2847  systemNorm.rowRange(0, 3).col(1) = system.rowRange(0, 3).col(0) / cv::norm(AC);
2848  systemNorm.rowRange(0, 3).col(2) = system.rowRange(0, 3).col(2) / cv::norm(n);
2849 
2850  cv::Mat systemNormInv = systemNorm.inv();
2851 
2852  nodeTransform = cv::Mat::eye(4, 4, CV_32F);
2853  cv::Mat ul3Dinv = -systemNormInv * ul3D;
2854  ul3Dinv.copyTo(nodeTransform.rowRange(0, 3).col(3));
2855  systemNormInv.copyTo(nodeTransform.rowRange(0, 3).colRange(0, 3));
2856 
2857  cv::Mat scaleMat = cv::Mat::eye(4, 4, CV_32F);
2858  float markerWidthInRef = cv::norm(ul3D - ur3D);
2859  float scaleFactor = markerWidthInM / markerWidthInRef;
2860  scaleMat.at<float>(0, 0) = scaleFactor;
2861  scaleMat.at<float>(1, 1) = scaleFactor;
2862  scaleMat.at<float>(2, 2) = scaleFactor;
2863 
2864  nodeTransform = scaleMat * nodeTransform;
2865 
2866  if (_mpUL)
2867  {
2868  delete _mpUL;
2869  _mpUL = nullptr;
2870  }
2871  if (_mpUR)
2872  {
2873  delete _mpUR;
2874  _mpUR = nullptr;
2875  }
2876  if (_mpLL)
2877  {
2878  delete _mpLL;
2879  _mpLL = nullptr;
2880  }
2881  if (_mpLR)
2882  {
2883  delete _mpLR;
2884  _mpLR = nullptr;
2885  }
2886 
2887  _mpUL = new WAIMapPoint(0, ul3D, false);
2888  _mpUR = new WAIMapPoint(0, ur3D, false);
2889  _mpLL = new WAIMapPoint(0, ll3D, false);
2890  _mpLR = new WAIMapPoint(0, lr3D, false);
2891 
2892  return true;
2893 }
2894 
2896 {
2897  int result = mCurrentFrame.N;
2898 
2899  return result;
2900 }
#define AVERAGE_TIMING_STOP(name)
Definition: AverageTiming.h:97
#define AVERAGE_TIMING_START(name)
Definition: AverageTiming.h:96
#define OPTFLOW_GRID_COLS
#define OPTFLOW_GRID_ROWS
std::string getPrintableState()
ORB_SLAM2::ORBVocabulary * mpVocabulary
std::vector< WAIMapPoint * > getMatchedMapPoints()
void track3DPts(cv::Mat &imageGray, cv::Mat &imageRGB)
void setTrackOptFlow(bool flag)
bool doMarkerMapPreprocessing(std::string markerFile, cv::Mat &nodeTransform, float markerWidthInM)
WAIFrame createMarkerFrame(std::string markerFile, KPextractor *markerExtractor)
std::vector< WAIMapPoint * > getLocalMapPoints()
std::string getPrintableType()
static bool relocalization(WAIFrame &currentFrame, WAIKeyFrameDB *keyFrameDB, unsigned int *lastRelocFrameId, WAIMap &waiMap, bool applyMinAccScoreFilter=true, bool relocWithAllKFs=false)
std::vector< WAIMapPoint * > getMapPoints()
WAIKeyFrameDB * mpKeyFrameDatabase
void calculateMeanReprojectionError()
void decorate(cv::Mat &image)
bool update(cv::Mat &imageGray, cv::Mat &imageRGB)
ORB_SLAM2::KPextractor * _markerExtractor
ModeOrbSlam2(ORB_SLAM2::KPextractor *kpExtractor, ORB_SLAM2::KPextractor *kpIniExtractor, cv::Mat cameraMat, cv::Mat distortionMat, const Params &params, std::string orbVocFile, bool applyMinAccScoreFilter=false)
std::thread * mptLoopClosing
bool findMarkerHomography(WAIFrame &markerFrame, WAIKeyFrame *kfCand, cv::Mat &homography, int minMatches)
void decorateVideoWithKeyPointMatches(cv::Mat &image)
std::pair< std::vector< cv::Vec3f >, std::vector< cv::Vec2f > > getMatchedCorrespondances()
void setVocabulary(std::string orbVocFile)
std::vector< WAIKeyFrame * > getKeyFrames()
WAIKeyFrame * currentKeyFrame()
TrackingState _state
void decorateVideoWithKeyPoints(cv::Mat &image)
void initialize(cv::Mat &imageGray, cv::Mat &imageRGB)
int getKeyFramesInLoopCloseQueueCount()
std::pair< std::vector< cv::Vec3f >, std::vector< cv::Vec2f > > getCorrespondances()
std::thread * mptLocalMapping
bool getPose(cv::Mat *pose)
ORB_SLAM2::LocalMapping * mpLocalMapper
ORB_SLAM2::LoopClosing * mpLoopCloser
void findMatches(std::vector< cv::Point2f > &vP2D, std::vector< cv::Point3f > &vP3Dw)
bool createInitialMapMonocular(int mapPointsNeeded)
float getMeanReprojectionError()
bool posInGrid(const cv::KeyPoint &kp, int &posX, int &posY, int minX, int minY)
std::string getLoopCloseStatus()
std::vector< WAIMapPoint * > getMarkerCornerMapPoints()
cv::Mat mTcw
Definition: WAIFrame.h:155
static float mnMaxX
Definition: WAIFrame.h:178
static float mnMinY
Definition: WAIFrame.h:179
std::vector< WAIMapPoint * > mvpMapPoints
Definition: WAIFrame.h:147
long unsigned int mnId
Definition: WAIFrame.h:159
cv::Mat imgGray
Definition: WAIFrame.h:185
static float mnMaxY
Definition: WAIFrame.h:180
static long unsigned int nNextId
Definition: WAIFrame.h:158
void ComputeBoW()
Definition: WAIFrame.cpp:317
static float mnMinX
Definition: WAIFrame.h:177
int N
Definition: WAIFrame.h:123
std::vector< cv::KeyPoint > mvKeysUn
Definition: WAIFrame.h:129
static bool mbInitialComputations
Definition: WAIFrame.h:182
AR Keyframe database class.
Definition: WAIKeyFrameDB.h:46
std::vector< WAIKeyFrame * > DetectRelocalizationCandidates(WAIFrame *F, float minCommonWordFactor, bool applyMinAccScoreFilter=false)
AR Keyframe node class.
Definition: WAIKeyFrame.h:60
const float cy
Definition: WAIKeyFrame.h:214
float ComputeSceneMedianDepth(const int q)
void SetPose(const cv::Mat &Tcw)
std::vector< WAIKeyFrame * > GetBestCovisibilityKeyFrames(const int &N)
void SetBadFlag()
void AddMapPoint(WAIMapPoint *pMP, size_t idx)
cv::Mat GetTranslation()
WAIKeyFrame * GetParent()
const float fx
Definition: WAIKeyFrame.h:214
long unsigned int mnId
Definition: WAIKeyFrame.h:175
bool isFixed() const
cv::Mat GetPose()
std::set< WAIKeyFrame * > GetChilds()
const std::vector< cv::KeyPoint > mvKeysUn
Definition: WAIKeyFrame.h:220
const float fy
Definition: WAIKeyFrame.h:214
const float cx
Definition: WAIKeyFrame.h:214
cv::Mat GetRotation()
const float invfy
Definition: WAIKeyFrame.h:214
const float invfx
Definition: WAIKeyFrame.h:214
void UpdateConnections(std::map< WAIKeyFrame *, int > KFcounter, bool buildSpanningTree)
std::vector< WAIKeyFrame * > GetVectorCovisibleKeyFrames()
std::vector< WAIMapPoint * > GetMapPointMatches()
void ComputeBoW(WAIOrbVocabulary *vocabulary)
static long unsigned int nNextId
Definition: WAIKeyFrame.h:174
Definition: WAIMap.h:52
long unsigned int KeyFramesInMap()
Definition: WAIMap.cpp:116
std::vector< WAIKeyFrame * > GetAllKeyFrames()
Definition: WAIMap.cpp:104
void AddObservation(WAIKeyFrame *pKF, size_t idx)
void SetBadFlag()
long unsigned int mnId
Definition: WAIMapPoint.h:108
static long unsigned int nNextId
Definition: WAIMapPoint.h:110
std::map< WAIKeyFrame *, size_t > GetObservations()
int Observations()
void UpdateNormalAndDepth()
long unsigned int mnLastFrameSeen
Definition: WAIMapPoint.h:124
void IncreaseVisible(int n=1)
void ComputeDistinctiveDescriptors()
cv::Mat GetWorldPos()
void SetWorldPos(const cv::Mat &Pos)
WAI::V3 worldPosVec()
Definition: WAIMapPoint.cpp:95
bool mbTrackInView
Definition: WAIMapPoint.h:120
WAIMapPoint * GetReplaced()
std::vector< char > & get(std::string path)
Definition: SLIOMemory.cpp:24
void log(const char *tag, const char *format,...)
logs a formatted string platform independently
Definition: Utils.cpp:1103
@ TrackingState_Initializing
@ TrackingState_Idle
@ TrackingState_TrackingOK
@ TrackingState_None
@ TrackingState_TrackingLost
float x
Definition: WAIMath.h:33
float z
Definition: WAIMath.h:33
float y
Definition: WAIMath.h:33