diff --git a/CMakeLists.txt b/CMakeLists.txt index 8d12f9aa4e..eb36792fc4 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -33,6 +33,9 @@ SET(PROJECT_VERSION_PATCH ${RTABMAP_PATCH_VERSION}) SET(PROJECT_SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}") ####### COMPILATION PARAMS ####### +set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + # In case of Makefiles if the user does not setup CMAKE_BUILD_TYPE, assume it's Release: IF(${CMAKE_GENERATOR} MATCHES ".*Makefiles") IF("${CMAKE_BUILD_TYPE}" STREQUAL "") diff --git a/corelib/include/rtabmap/core/FlannIndex.h b/corelib/include/rtabmap/core/FlannIndex.h index a1cd253e56..a9b7f46358 100644 --- a/corelib/include/rtabmap/core/FlannIndex.h +++ b/corelib/include/rtabmap/core/FlannIndex.h @@ -29,8 +29,10 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #define CORELIB_SRC_FLANNINDEX_H_ #include "rtabmap/core/rtabmap_core_export.h" // DLL export/import defines +#include #include #include +#include namespace rtabmap { @@ -40,11 +42,19 @@ class RTABMAP_CORE_EXPORT FlannIndex FlannIndex(); virtual ~FlannIndex(); + // serialize and save methods + void save(const std::filesystem::path& path); + void serialize(const std::filesystem::path& filename); + + // deserialize and load methods + void load(const std::filesystem::path& dir); + void deserialize(const std::filesystem::path& file); + void release(); - size_t indexedFeatures() const; + [[nodiscard]] size_t indexedFeatures() const; // return Bytes - size_t memoryUsed() const; + [[nodiscard]] size_t memoryUsed() const; // Note that useDistanceL1 doesn't have any effect if LSH is used void buildLinearIndex( @@ -71,8 +81,8 @@ class RTABMAP_CORE_EXPORT FlannIndex bool isBuilt(); - int featuresType() const {return featuresType_;} - int featuresDim() const {return featuresDim_;} + [[nodiscard]] int featuresType() const {return featuresType_;} + [[nodiscard]] int featuresDim() const {return featuresDim_;} std::vector addPoints(const cv::Mat & features); diff --git a/corelib/include/rtabmap/core/Memory.h b/corelib/include/rtabmap/core/Memory.h index aa11fe6dd7..4bb7ffe438 100644 --- a/corelib/include/rtabmap/core/Memory.h +++ b/corelib/include/rtabmap/core/Memory.h @@ -35,6 +35,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "rtabmap/core/SensorData.h" #include "rtabmap/core/Link.h" #include "rtabmap/core/Features2d.h" +#include #include #include #include @@ -226,7 +227,7 @@ class RTABMAP_CORE_EXPORT Memory void dumpMemoryTree(const char * fileNameTree) const; virtual void dumpMemory(std::string directory) const; virtual void dumpSignatures(const char * fileNameSign, bool words3D) const; - void dumpDictionary(const char * fileNameRef, const char * fileNameDesc) const; + void dumpDictionary(std::string_view dir) const; unsigned long getMemoryUsed() const; //Bytes void generateGraph(const std::string & fileName, const std::set & ids = std::set()); diff --git a/corelib/include/rtabmap/core/VWDictionary.h b/corelib/include/rtabmap/core/VWDictionary.h index fbfa7ba542..5328413037 100644 --- a/corelib/include/rtabmap/core/VWDictionary.h +++ b/corelib/include/rtabmap/core/VWDictionary.h @@ -29,12 +29,15 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "rtabmap/core/rtabmap_core_export.h" // DLL export/import defines +#include #include #include #include #include #include +#include #include "rtabmap/core/Parameters.h" +#include namespace rtabmap { @@ -74,7 +77,8 @@ class RTABMAP_CORE_EXPORT VWDictionary } public: - VWDictionary(const ParametersMap & parameters = ParametersMap()); + explicit VWDictionary(const ParametersMap & parameters = ParametersMap()); + void setSavedIndex(const std::filesystem::path& dir); virtual ~VWDictionary(); virtual void parseParameters(const ParametersMap & parameters); @@ -86,60 +90,65 @@ class RTABMAP_CORE_EXPORT VWDictionary int signatureId); virtual void addWord(VisualWord * vw); - std::vector findNN(const std::list & vws) const; - std::vector findNN(const cv::Mat & descriptors) const; + [[nodiscard]] std::vector findNN(const std::list & vws) const; + [[nodiscard]] std::vector findNN(const cv::Mat & queryIn) const; void addWordRef(int wordId, int signatureId); void removeAllWordRef(int wordId, int signatureId); - const VisualWord * getWord(int id) const; - VisualWord * getUnusedWord(int id) const; + [[nodiscard]] const VisualWord * getWord(int id) const; + [[nodiscard]] VisualWord * getUnusedWord(int id) const; void setLastWordId(int id) {_lastWordId = id;} - const std::map & getVisualWords() const {return _visualWords;} - float getNndrRatio() const {return _nndrRatio;} - unsigned int getNotIndexedWordsCount() const {return (int)_notIndexedWords.size();} - int getLastIndexedWordId() const; - int getTotalActiveReferences() const {return _totalActiveReferences;} - unsigned int getIndexedWordsCount() const; - unsigned int getIndexMemoryUsed() const; // KB - unsigned long getMemoryUsed() const; //Bytes + [[nodiscard]] const std::map & getVisualWords() const {return _visualWords;} + [[nodiscard]] float getNndrRatio() const {return _nndrRatio;} + [[nodiscard]] unsigned int getNotIndexedWordsCount() const {return (int)_notIndexedWords.size();} + [[nodiscard]] int getLastIndexedWordId() const; + [[nodiscard]] int getTotalActiveReferences() const {return _totalActiveReferences;} + [[nodiscard]] unsigned int getIndexedWordsCount() const; + [[nodiscard]] unsigned int getIndexMemoryUsed() const; // KB + [[nodiscard]] unsigned long getMemoryUsed() const; //Bytes bool setNNStrategy(NNStrategy strategy); // Return true if the search tree has been re-initialized - bool isIncremental() const {return _incrementalDictionary;} - bool isIncrementalFlann() const {return _incrementalFlann;} + [[nodiscard]] bool isIncremental() const {return _incrementalDictionary;} + [[nodiscard]] bool isIncrementalFlann() const {return _incrementalFlann;} void setIncrementalDictionary(); void setFixedDictionary(const std::string & dictionaryPath); void exportDictionary(const char * fileNameReferences, const char * fileNameDescriptors) const; void clear(bool printWarningsIfNotEmpty = true); - std::vector getUnusedWords() const; - std::vector getUnusedWordIds() const; - unsigned int getUnusedWordsSize() const {return (int)_unusedWords.size();} + [[nodiscard]] std::vector getUnusedWords() const; + [[nodiscard]] std::vector getUnusedWordIds() const; + [[nodiscard]] unsigned int getUnusedWordsSize() const {return (int)_unusedWords.size();} void removeWords(const std::vector & words); // caller must delete the words void deleteUnusedWords(); + void save(std::string_view dir) const; + void load(std::string_view dir); + void loadMapIndex(const std::filesystem::path& file); + public: static cv::Mat convertBinTo32F(const cv::Mat & descriptorsIn, bool byteToFloat = true); static cv::Mat convert32FToBin(const cv::Mat & descriptorsIn, bool byteToFloat = true); protected: int getNextId(); - -protected: + void saveVars(const std::filesystem::path& filename) const; std::map _visualWords; // int _totalActiveReferences; // keep track of all references for updating the common signature private: + bool _useSavedFile{false}; bool _incrementalDictionary; bool _incrementalFlann; float _rebalancingFactor; bool _byteToFloat; float _nndrRatio; + std::string save_dir_; std::string _dictionaryPath; // a pre-computed dictionary (.txt or .db) std::string _newDictionaryPath; // a pre-computed dictionary (.txt or .db) bool _newWordsComparedTogether; int _lastWordId; bool useDistanceL1_; - FlannIndex * _flannIndex; + FlannIndex * _flannIndex; // save cv::Mat _dataTree; NNStrategy _strategy; std::map _mapIndexId; diff --git a/corelib/src/FlannIndex.cpp b/corelib/src/FlannIndex.cpp index a593257780..141ff33892 100644 --- a/corelib/src/FlannIndex.cpp +++ b/corelib/src/FlannIndex.cpp @@ -33,7 +33,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. namespace rtabmap { FlannIndex::FlannIndex(): - index_(0), + index_(nullptr), nextIndex_(0), featuresType_(0), featuresDim_(0), @@ -47,6 +47,33 @@ FlannIndex::~FlannIndex() this->release(); } +void FlannIndex::save(const std::filesystem::path& path) +{ + UDEBUG(""); + if(!index_) + { + UERROR("Can not save FlannIndex when index_ is nullptr"); + return; + } + + if (featuresType_ == CV_8UC1) + { + static_cast>*>(index_)->save(path); + } + else if (useDistanceL1_) + { + static_cast>*>(index_)->save(path); + } + else if (featuresDim_ <= 3) + { + static_cast>*>(index_)->save(path); + } + else + { + static_cast>*>(index_)->save(path); + } +} + void FlannIndex::release() { UDEBUG(""); @@ -54,24 +81,24 @@ void FlannIndex::release() { if(featuresType_ == CV_8UC1) { - delete (rtflann::Index >*)index_; + delete static_cast >*>(index_); } else { if(useDistanceL1_) { - delete (rtflann::Index >*)index_; + delete static_cast >*>(index_); } else if(featuresDim_ <= 3) { - delete (rtflann::Index >*)index_; + delete static_cast>*>(index_); } else { - delete (rtflann::Index >*)index_; + delete static_cast >*>(index_); } } - index_ = 0; + index_ = nullptr; } nextIndex_ = 0; isLSH_ = false; @@ -80,6 +107,118 @@ void FlannIndex::release() UDEBUG(""); } +void FlannIndex::deserialize(const std::filesystem::path& file) +{ + std::ifstream inFile(file, std::ios::binary); + if (!inFile.is_open()) { + throw std::runtime_error("Unable to open file for reading"); + } + + int mapSize; + inFile.read(reinterpret_cast(&mapSize), sizeof(mapSize)); + inFile.read(reinterpret_cast(&nextIndex_), sizeof(nextIndex_)); + inFile.read(reinterpret_cast(&featuresType_), sizeof(featuresType_)); + inFile.read(reinterpret_cast(&featuresDim_), sizeof(featuresDim_)); + inFile.read(reinterpret_cast(&isLSH_), sizeof(isLSH_)); + inFile.read(reinterpret_cast(&useDistanceL1_), sizeof(useDistanceL1_)); + inFile.read(reinterpret_cast(&rebalancingFactor_), sizeof(rebalancingFactor_)); + + addedDescriptors_.clear(); + for (int i = 0; i < mapSize; ++i) { + // Read the size of the compressed data + int compressedSize; + inFile.read(reinterpret_cast(&compressedSize), sizeof(compressedSize)); + std::vector compressedBuffer(compressedSize); + inFile.read(compressedBuffer.data(), compressedSize); + + // Decompress + size_t decompressedSize = LZ4_compressBound(compressedSize); // Estimate decompressed size + std::vector decompressedBuffer(decompressedSize); + [[maybe_unused]]int actualDecompressedSize = LZ4_decompress_safe(compressedBuffer.data(), decompressedBuffer.data(), compressedSize, (int)decompressedSize); + // UDEBUG("Size decriptor after decompressed: %ld", actualDecompressedSize); + + // Extract data from decompressed buffer + char* dataPtr = decompressedBuffer.data(); + int key, rows, cols, type, channels; + memcpy(&key, dataPtr, sizeof(key)); dataPtr += sizeof(key); + memcpy(&rows, dataPtr, sizeof(rows)); dataPtr += sizeof(rows); + memcpy(&cols, dataPtr, sizeof(cols)); dataPtr += sizeof(cols); + memcpy(&type, dataPtr, sizeof(type)); dataPtr += sizeof(type); + memcpy(&channels, dataPtr, sizeof(channels)); dataPtr += sizeof(channels); + cv::Mat mat(rows, cols, type, dataPtr); + addedDescriptors_[key] = mat.clone(); // Use clone if necessary + } +} + +// Define a function to build the index +template +rtflann::Index* buildIndex(const rtflann::IndexParams& params) +{ + return new rtflann::Index(params); +} + +void FlannIndex::load(const std::filesystem::path& dir) +{ + auto start = std::chrono::high_resolution_clock::now(); + auto var_file = dir / "vars.bin"; + deserialize(var_file); + + // Define function pointers for index building functions + if(index_) + { + if(featuresType_ == CV_8UC1) + { + delete static_cast >*>(index_); + } + else + { + if(useDistanceL1_) + { + delete static_cast >*>(index_); + } + else if(featuresDim_ <= 3) + { + delete static_cast>*>(index_); + } + else + { + delete static_cast >*>(index_); + } + } + index_ = nullptr; + } + + auto index_file = dir / "index.bin"; + rtflann::IndexParams params; + params["filename"] = index_file.string(); + params["algorithm"] = rtflann::FLANN_INDEX_SAVED; + params["save_dataset"] = true; + + if(featuresType_ == CV_8UC1) + { + index_ = buildIndex>(params); + assert(index_); + } + else + { + if(useDistanceL1_) + { + index_ = buildIndex>(params); + } + else if(featuresDim_ <= 3) + { + index_ = buildIndex>(params); + } + else + { + index_ = buildIndex>(params); + } + } + auto end = std::chrono::high_resolution_clock::now(); + auto duration = std::chrono::duration_cast(end - start); + std::cout << "Done build index with saved index at: " << index_file.string() << " within " << duration.count() << " ms" << std::endl; +} + size_t FlannIndex::indexedFeatures() const { if(!index_) @@ -146,7 +285,7 @@ void FlannIndex::buildLinearIndex( { UDEBUG(""); this->release(); - UASSERT(index_ == 0); + UASSERT(index_ == nullptr); UASSERT(features.type() == CV_32FC1 || features.type() == CV_8UC1); featuresType_ = features.type(); featuresDim_ = features.cols; @@ -154,7 +293,7 @@ void FlannIndex::buildLinearIndex( rebalancingFactor_ = rebalancingFactor; rtflann::LinearIndexParams params; - + params["save_dataset"] = true; if(featuresType_ == CV_8UC1) { rtflann::Matrix dataset(features.data, features.rows, features.cols); @@ -207,7 +346,7 @@ void FlannIndex::buildKDTreeIndex( { UDEBUG(""); this->release(); - UASSERT(index_ == 0); + UASSERT(index_ == nullptr); UASSERT(features.type() == CV_32FC1 || features.type() == CV_8UC1); featuresType_ = features.type(); featuresDim_ = features.cols; @@ -215,6 +354,7 @@ void FlannIndex::buildKDTreeIndex( rebalancingFactor_ = rebalancingFactor; rtflann::KDTreeIndexParams params(trees); + params["save_dataset"] = true; if(featuresType_ == CV_8UC1) { @@ -269,7 +409,7 @@ void FlannIndex::buildKDTreeSingleIndex( { UDEBUG(""); this->release(); - UASSERT(index_ == 0); + UASSERT(index_ == nullptr); UASSERT(features.type() == CV_32FC1 || features.type() == CV_8UC1); featuresType_ = features.type(); featuresDim_ = features.cols; @@ -331,15 +471,18 @@ void FlannIndex::buildLSHIndex( { UDEBUG(""); this->release(); - UASSERT(index_ == 0); + UASSERT(index_ == nullptr); UASSERT(features.type() == CV_8UC1); featuresType_ = features.type(); featuresDim_ = features.cols; useDistanceL1_ = true; rebalancingFactor_ = rebalancingFactor; + auto params = rtflann::LshIndexParams(12, 20, 2); + params["save_dataset"] = true; + rtflann::Matrix dataset(features.data, features.rows, features.cols); - index_ = new rtflann::Index >(dataset, rtflann::LshIndexParams(12, 20, 2)); + index_ = new rtflann::Index >(dataset, params); ((rtflann::Index >*)index_)->buildIndex(); // incremental FLANN: we should add all headers separately in case we remove @@ -362,7 +505,7 @@ void FlannIndex::buildLSHIndex( bool FlannIndex::isBuilt() { - return index_!=0; + return index_!=nullptr; } std::vector FlannIndex::addPoints(const cv::Mat & features) @@ -370,7 +513,7 @@ std::vector FlannIndex::addPoints(const cv::Mat & features) if(!index_) { UERROR("Flann index not yet created!"); - return std::vector(); + return {}; } UASSERT(features.type() == featuresType_); UASSERT(features.cols == featuresDim_); @@ -379,7 +522,7 @@ std::vector FlannIndex::addPoints(const cv::Mat & features) if(featuresType_ == CV_8UC1) { rtflann::Matrix points(features.data, features.rows, features.cols); - rtflann::Index > * index = (rtflann::Index >*)index_; + auto * index = (rtflann::Index >*)index_; removedPts = index->removedCount(); index->addPoints(points, 0); // Rebuild index if it is now X times in size @@ -396,7 +539,7 @@ std::vector FlannIndex::addPoints(const cv::Mat & features) rtflann::Matrix points((float*)features.data, features.rows, features.cols); if(useDistanceL1_) { - rtflann::Index > * index = (rtflann::Index >*)index_; + auto * index = (rtflann::Index >*)index_; removedPts = index->removedCount(); index->addPoints(points, 0); // Rebuild index if it doubles in size @@ -410,7 +553,7 @@ std::vector FlannIndex::addPoints(const cv::Mat & features) } else if(featuresDim_ <= 3) { - rtflann::Index > * index = (rtflann::Index >*)index_; + auto * index = (rtflann::Index >*)index_; removedPts = index->removedCount(); index->addPoints(points, 0); // Rebuild index if it doubles in size @@ -424,7 +567,7 @@ std::vector FlannIndex::addPoints(const cv::Mat & features) } else { - rtflann::Index > * index = (rtflann::Index >*)index_; + auto * index = (rtflann::Index >*)index_; removedPts = index->removedCount(); index->addPoints(points, 0); // Rebuild index if it doubles in size @@ -442,9 +585,9 @@ std::vector FlannIndex::addPoints(const cv::Mat & features) { UASSERT(removedPts == removedIndexes_.size()); // clean not used features - for(std::list::iterator iter=removedIndexes_.begin(); iter!=removedIndexes_.end(); ++iter) + for(int & removedIndexe : removedIndexes_) { - addedDescriptors_.erase(*iter); + addedDescriptors_.erase(removedIndexe); } removedIndexes_.clear(); } @@ -491,7 +634,7 @@ void FlannIndex::removePoint(unsigned int index) ((rtflann::Index >*)index_)->removePoint(index); } - removedIndexes_.push_back(index); + removedIndexes_.push_back((int)index); } void FlannIndex::knnSearch( @@ -592,4 +735,46 @@ void FlannIndex::radiusSearch( } } +void FlannIndex::serialize(const std::filesystem::path& filename) +{ + std::ofstream outFile(filename, std::ios::binary); + if (!outFile.is_open()) { + throw std::runtime_error("Unable to open file for writing"); + } + int mapSize = (int)addedDescriptors_.size(); + outFile.write(reinterpret_cast(&mapSize), sizeof(mapSize)); + outFile.write(reinterpret_cast(&nextIndex_), sizeof(nextIndex_)); + outFile.write(reinterpret_cast(&featuresType_), sizeof(featuresType_)); + outFile.write(reinterpret_cast(&featuresDim_), sizeof(featuresDim_)); + outFile.write(reinterpret_cast(&isLSH_), sizeof(isLSH_)); + outFile.write(reinterpret_cast(&useDistanceL1_), sizeof(useDistanceL1_)); + outFile.write(reinterpret_cast(&rebalancingFactor_), sizeof(rebalancingFactor_)); + for (const auto& entry : addedDescriptors_) { + // Prepare data for compression + std::vector buffer; + int key = entry.first; + const cv::Mat& m = entry.second; + int rows = m.rows; + int cols = m.cols; + int type = m.type(); + int channels = m.channels(); + buffer.insert(buffer.end(), reinterpret_cast(&key), reinterpret_cast(&key) + sizeof(key)); + buffer.insert(buffer.end(), reinterpret_cast(&rows), reinterpret_cast(&rows) + sizeof(rows)); + buffer.insert(buffer.end(), reinterpret_cast(&cols), reinterpret_cast(&cols) + sizeof(cols)); + buffer.insert(buffer.end(), reinterpret_cast(&type), reinterpret_cast(&type) + sizeof(type)); + buffer.insert(buffer.end(), reinterpret_cast(&channels), reinterpret_cast(&channels) ) + sizeof(channels); + int dataSize = rows * cols * (int)m.elemSize(); + buffer.insert(buffer.end(), reinterpret_cast(m.data), reinterpret_cast(m.data) + dataSize); + + // Compress the buffer + size_t compressedSize = LZ4_compressBound((int)buffer.size()); + std::vector compressedBuffer(compressedSize); + int actualCompressedSize = LZ4_compress_default(buffer.data(), compressedBuffer.data(), (int)buffer.size(), (int)compressedSize); + + // Write the size of the compressed data followed by the compressed data itself + outFile.write(reinterpret_cast(&actualCompressedSize), sizeof(actualCompressedSize)); + outFile.write(compressedBuffer.data(), actualCompressedSize); + } + outFile.close(); +} } /* namespace rtabmap */ diff --git a/corelib/src/Memory.cpp b/corelib/src/Memory.cpp index 4e70acad47..308748fe0b 100644 --- a/corelib/src/Memory.cpp +++ b/corelib/src/Memory.cpp @@ -374,6 +374,7 @@ void Memory::loadDataFromDb(bool postInitClosingEvents) // Now load the dictionary if we have a connection if(postInitClosingEvents) UEventsManager::post(new RtabmapEventInit("Loading dictionary...")); + _vwd->setSavedIndex(std::filesystem::path(_dbDriver->getUrl()).parent_path() / "vocabulary"); UDEBUG("Loading dictionary..."); if(loadAllNodesInWM) { @@ -3589,17 +3590,17 @@ void Memory::removeVirtualLinks(int signatureId) void Memory::dumpMemory(std::string directory) const { UINFO("Dumping memory to directory \"%s\"", directory.c_str()); - this->dumpDictionary((directory+"/DumpMemoryWordRef.txt").c_str(), (directory+"/DumpMemoryWordDesc.txt").c_str()); + this->dumpDictionary(directory); this->dumpSignatures((directory + "/DumpMemorySign.txt").c_str(), false); this->dumpSignatures((directory + "/DumpMemorySign3.txt").c_str(), true); this->dumpMemoryTree((directory + "/DumpMemoryTree.txt").c_str()); } -void Memory::dumpDictionary(const char * fileNameRef, const char * fileNameDesc) const +void Memory::dumpDictionary(std::string_view dir) const { if(_vwd) { - _vwd->exportDictionary(fileNameRef, fileNameDesc); + _vwd->save(dir); } } diff --git a/corelib/src/Rtabmap.cpp b/corelib/src/Rtabmap.cpp index c815fbc8df..14cf4639e8 100644 --- a/corelib/src/Rtabmap.cpp +++ b/corelib/src/Rtabmap.cpp @@ -321,7 +321,7 @@ void Rtabmap::init(const ParametersMap & parameters, const std::string & databas _databasePath = databasePath; if(!_databasePath.empty()) { - UASSERT(UFile::getExtension(_databasePath).compare("db") == 0); + UASSERT(UFile::getExtension(_databasePath) == "db"); UINFO("Using database \"%s\".", _databasePath.c_str()); } else @@ -4887,14 +4887,8 @@ void Rtabmap::dumpData() const UDEBUG(""); if(_memory) { - if(this->getWorkingDir().empty()) - { - UERROR("Working directory not set."); - } - else - { - _memory->dumpMemory(this->getWorkingDir()); - } + auto working_dir = std::filesystem::path(_databasePath).parent_path(); + _memory->dumpDictionary((working_dir / "vocabulary").string()); } } diff --git a/corelib/src/VWDictionary.cpp b/corelib/src/VWDictionary.cpp index fc1bb172f8..d077b8146e 100644 --- a/corelib/src/VWDictionary.cpp +++ b/corelib/src/VWDictionary.cpp @@ -30,13 +30,10 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "rtabmap/core/Signature.h" #include "rtabmap/core/DBDriver.h" -#include "rtabmap/core/Parameters.h" #include "rtabmap/core/FlannIndex.h" #include "rtabmap/utilite/UtiLite.h" -#include - #if CV_MAJOR_VERSION < 3 #ifdef HAVE_OPENCV_GPU #include @@ -48,9 +45,6 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #endif #endif -#include -#include - #define KDTREE_SIZE 4 #define KNN_CHECKS 32 @@ -99,7 +93,7 @@ void VWDictionary::parseParameters(const ParametersMap & parameters) bool incrementalDictionary = _incrementalDictionary; if((iter=parameters.find(Parameters::kKpDictionaryPath())) != parameters.end()) { - _newDictionaryPath = (*iter).second.c_str(); + _newDictionaryPath = (*iter).second; } if((iter=parameters.find(Parameters::kKpIncrementalDictionary())) != parameters.end()) { @@ -110,7 +104,8 @@ void VWDictionary::parseParameters(const ParametersMap & parameters) bool treeUpdated = false; if((iter=parameters.find(Parameters::kKpNNStrategy())) != parameters.end()) { - NNStrategy nnStrategy = (NNStrategy)std::atoi((*iter).second.c_str()); + // auto nnStrategy = (NNStrategy)std::atoi((*iter).second.c_str()); + auto nnStrategy = static_cast(std::strtol((*iter).second.c_str(), nullptr, 10)); treeUpdated = this->setNNStrategy(nnStrategy); } if(!treeUpdated && byteToFloat!=_byteToFloat && _strategy == kNNFlannKdTree) @@ -129,12 +124,33 @@ void VWDictionary::parseParameters(const ParametersMap & parameters) _incrementalDictionary = incrementalDictionary; } +void VWDictionary::setSavedIndex(const std::filesystem::path& dir) +{ + // check if saved files are needed + _useSavedFile = std::filesystem::exists(dir / "index.bin") && + std::filesystem::exists(dir / "vars.bin") && + std::filesystem::exists(dir / "VWDictionary.bin") && + !std::filesystem::exists(dir / "tmp.txt"); + + // set directory if files are existed + save_dir_ = dir; + + if(_useSavedFile) + { + UINFO("Use saved files at %s to load vocabulary!", save_dir_.c_str()); + } + else + { + UINFO("Can not find necessary files at %s to load vocabulary, rebuild flannIndex from database", save_dir_.c_str()); + } +} + void VWDictionary::setIncrementalDictionary() { if(!_incrementalDictionary) { _incrementalDictionary = true; - if(_visualWords.size()) + if(!_visualWords.empty()) { UWARN("Incremental dictionary set: already loaded visual words (%d) from the fixed dictionary will be included in the incremental one.", _visualWords.size()); } @@ -148,24 +164,24 @@ void VWDictionary::setFixedDictionary(const std::string & dictionaryPath) UDEBUG(""); if(!dictionaryPath.empty()) { - if((!_incrementalDictionary && _dictionaryPath.compare(dictionaryPath) != 0) || - _visualWords.size() == 0) + if((!_incrementalDictionary && _dictionaryPath != dictionaryPath) || + _visualWords.empty()) { UINFO("incremental=%d, oldPath=%s newPath=%s, visual words=%d", _incrementalDictionary?1:0, _dictionaryPath.c_str(), dictionaryPath.c_str(), (int)_visualWords.size()); - if(UFile::getExtension(dictionaryPath).compare("db") == 0) + if(UFile::getExtension(dictionaryPath) == "db") { UWARN("Loading fixed vocabulary \"%s\", this may take a while...", dictionaryPath.c_str()); DBDriver * driver = DBDriver::create(); if(driver->openConnection(dictionaryPath, false)) { driver->load(this, false); - for(std::map::iterator iter=_visualWords.begin(); iter!=_visualWords.end(); ++iter) + for(auto & _visualWord : _visualWords) { - iter->second->setSaved(true); + _visualWord.second->setSaved(true); } - _incrementalDictionary = _visualWords.size()==0; + _incrementalDictionary = _visualWords.empty(); driver->closeConnection(false); } else @@ -190,11 +206,11 @@ void VWDictionary::setFixedDictionary(const std::string & dictionaryPath) std::getline(file, str); strList = uSplitNumChar(str); int dimension = 0; - for(std::list::iterator iter = strList.begin(); iter != strList.end(); ++iter) + for(auto & iter : strList) { - if(uIsDigit(iter->at(0))) + if(uIsDigit(iter.at(0))) { - dimension = std::atoi(iter->c_str()); + dimension = static_cast(std::strtol(iter.c_str(), nullptr, 10)); break; } } @@ -214,8 +230,9 @@ void VWDictionary::setFixedDictionary(const std::string & dictionaryPath) if((int)strList.size() == dimension+1) { //first one is the visual word id - std::list::iterator iter = strList.begin(); - int id = std::atoi(iter->c_str()); + auto iter = strList.begin(); + auto id = static_cast(std::strtol(iter->c_str(), nullptr, 10)); + // int id = std::atoi(iter->c_str()); cv::Mat descriptor(1, dimension, CV_32F); ++iter; int i=0; @@ -230,7 +247,7 @@ void VWDictionary::setFixedDictionary(const std::string & dictionaryPath) UERROR("Loaded word has not the same size (%d) than descriptor size previously detected (%d).", i, dimension); } - VisualWord * vw = new VisualWord(id, descriptor, 0); + auto * vw = new VisualWord(id, descriptor, 0); vw->setSaved(true); _visualWords.insert(_visualWords.end(), std::pair(id, vw)); _notIndexedWords.insert(_notIndexedWords.end(), id); @@ -241,7 +258,7 @@ void VWDictionary::setFixedDictionary(const std::string & dictionaryPath) UWARN("Cannot parse line \"%s\"", str.c_str()); } } - if(_visualWords.size()) + if(!_visualWords.empty()) { UWARN("Loaded %d words!", (int)_visualWords.size()); } @@ -254,9 +271,9 @@ void VWDictionary::setFixedDictionary(const std::string & dictionaryPath) file.close(); } - if(_visualWords.size() == 0) + if(_visualWords.empty()) { - _incrementalDictionary = _visualWords.size()==0; + _incrementalDictionary = _visualWords.empty(); UWARN("No words loaded, cannot set a fixed dictionary.", (int)_visualWords.size()); } else @@ -277,7 +294,7 @@ void VWDictionary::setFixedDictionary(const std::string & dictionaryPath) UERROR("Cannot change to a fixed dictionary if there are already words (%d) in the incremental one.", _visualWords.size()); } } - else if(_incrementalDictionary && _visualWords.size()) + else if(_incrementalDictionary && !_visualWords.empty()) { UWARN("Cannot change to fixed dictionary, %d words already loaded as incremental", (int)_visualWords.size()); } @@ -346,7 +363,7 @@ bool VWDictionary::setNNStrategy(NNStrategy strategy) int VWDictionary::getLastIndexedWordId() const { - if(_mapIndexId.size()) + if(!_mapIndexId.empty()) { return _mapIndexId.rbegin()->second; } @@ -370,7 +387,7 @@ unsigned long VWDictionary::getMemoryUsed() const { long memoryUsage = sizeof(VWDictionary); memoryUsage += getIndexMemoryUsed(); - memoryUsage += _dataTree.total()*_dataTree.elemSize(); + memoryUsage += long( _dataTree.total()*_dataTree.elemSize() ); if(!_visualWords.empty()) { memoryUsage += _visualWords.size()*(sizeof(int) + _visualWords.rbegin()->second->getMemoryUsed() + sizeof(std::map::iterator)) + sizeof(std::map); @@ -411,7 +428,7 @@ cv::Mat VWDictionary::convertBinTo32F(const cv::Mat & descriptorsIn, bool byteTo for(int i=0; i(i); + auto * ptrOut = descriptorsOut.ptr(i); for(int j=0; j(i); + const auto * ptrIn = descriptorsIn.ptr(i); unsigned char * ptrOut = descriptorsOut.ptr(i); for(int j=0; jsetFixedDictionary(_newDictionaryPath); - if(!_incrementalDictionary && !_notIndexedWords.size()) + if(!_incrementalDictionary && _notIndexedWords.empty()) { // No need to update the search index if we // use a fixed dictionary and the index is @@ -482,29 +500,30 @@ void VWDictionary::update() } } - if(_notIndexedWords.size() || _visualWords.size() == 0 || _removedIndexedWords.size()) + if(!_notIndexedWords.empty() || _visualWords.empty() || !_removedIndexedWords.empty()) { if(_incrementalFlann && _strategy < kNNBruteForce && - _visualWords.size()) + !_visualWords.empty()) { ULOGGER_DEBUG("Incremental FLANN: Removing %d words...", (int)_removedIndexedWords.size()); - for(std::set::iterator iter=_removedIndexedWords.begin(); iter!=_removedIndexedWords.end(); ++iter) + for(int _removedIndexedWord : _removedIndexedWords) { - UASSERT(uContains(_mapIdIndex, *iter)); - UASSERT(uContains(_mapIndexId, _mapIdIndex.at(*iter))); - _flannIndex->removePoint(_mapIdIndex.at(*iter)); - _mapIndexId.erase(_mapIdIndex.at(*iter)); - _mapIdIndex.erase(*iter); + UASSERT(uContains(_mapIdIndex, _removedIndexedWord)); + UASSERT(uContains(_mapIndexId, _mapIdIndex.at(_removedIndexedWord))); + _flannIndex->removePoint(_mapIdIndex.at(_removedIndexedWord)); + _mapIndexId.erase(_mapIdIndex.at(_removedIndexedWord)); + _mapIdIndex.erase(_removedIndexedWord); } - ULOGGER_DEBUG("Incremental FLANN: Removing %d words... done!", (int)_removedIndexedWords.size()); + UINFO("Incremental FLANN: Removing %d words... done!", (int)_removedIndexedWords.size()); - if(_notIndexedWords.size()) + // not rebuild tree when init + if(!_notIndexedWords.empty() && !_useSavedFile) { - ULOGGER_DEBUG("Incremental FLANN: Inserting %d words...", (int)_notIndexedWords.size()); - for(std::set::iterator iter=_notIndexedWords.begin(); iter!=_notIndexedWords.end(); ++iter) + UINFO("Incremental FLANN: Inserting %d words...", (int)_notIndexedWords.size()); + for(const int& notIndexedWord : _notIndexedWords) { - VisualWord* w = uValue(_visualWords, *iter, (VisualWord*)0); + VisualWord* w = uValue(_visualWords, notIndexedWord, (VisualWord*)nullptr); UASSERT(w); cv::Mat descriptor; @@ -553,7 +572,7 @@ void VWDictionary::update() UASSERT(descriptor.cols == _flannIndex->featuresDim()); UASSERT(descriptor.type() == _flannIndex->featuresType()); UASSERT(descriptor.rows == 1); - index = _flannIndex->addPoints(descriptor).front(); + index = (int)_flannIndex->addPoints(descriptor).front(); } std::pair::iterator, bool> inserted; inserted = _mapIndexId.insert(std::pair(index, w->id())); @@ -561,21 +580,29 @@ void VWDictionary::update() inserted = _mapIdIndex.insert(std::pair(w->id(), index)); UASSERT(inserted.second); } - ULOGGER_DEBUG("Incremental FLANN: Inserting %d words... done!", (int)_notIndexedWords.size()); + UINFO("Incremental FLANN: Inserting %d words... done!", (int)_notIndexedWords.size()); + } + + // load tree and mapId when files are provided + // load only once initing -> set _useSavedFile = false after inited + if (_useSavedFile) + { + load(save_dir_); + _useSavedFile = false; } } else if(_strategy >= kNNBruteForce && - _notIndexedWords.size() && - _removedIndexedWords.size() == 0 && - _visualWords.size() && + !_notIndexedWords.empty() && + _removedIndexedWords.empty() && + !_visualWords.empty() && _dataTree.rows) { //just add not indexed words int i = _dataTree.rows; _dataTree.reserve(_dataTree.rows + _notIndexedWords.size()); - for(std::set::iterator iter=_notIndexedWords.begin(); iter!=_notIndexedWords.end(); ++iter) + for(int _notIndexedWord : _notIndexedWords) { - VisualWord* w = uValue(_visualWords, *iter, (VisualWord*)0); + VisualWord* w = uValue(_visualWords, _notIndexedWord, (VisualWord*)nullptr); UASSERT(w); UASSERT(w->getDescriptor().cols == _dataTree.cols); UASSERT(w->getDescriptor().type() == _dataTree.type()); @@ -593,7 +620,7 @@ void VWDictionary::update() _dataTree = cv::Mat(); _flannIndex->release(); - if(_visualWords.size()) + if(!_visualWords.empty()) { UTimer timer; timer.start(); @@ -625,9 +652,9 @@ void VWDictionary::update() UASSERT(dim > 0); // Create the data matrix - _dataTree = cv::Mat(_visualWords.size(), dim, type); // SURF descriptors are CV_32F - std::map::const_iterator iter = _visualWords.begin(); - for(unsigned int i=0; i < _visualWords.size(); ++i, ++iter) + _dataTree = cv::Mat((int)_visualWords.size(), dim, type); // SURF descriptors are CV_32F + auto iter = _visualWords.begin(); + for(auto i=0; i < (int)_visualWords.size(); ++i, ++iter) { cv::Mat descriptor; if(iter->second->getDescriptor().type() == CV_8U) @@ -649,7 +676,7 @@ void VWDictionary::update() UASSERT_MSG(descriptor.type() == type, uFormat("%d vs %d", descriptor.type(), type).c_str()); UASSERT_MSG(descriptor.cols == dim, uFormat("%d vs %d", descriptor.cols, dim).c_str()); - descriptor.copyTo(_dataTree.row(i)); + descriptor.copyTo(_dataTree.row((int)i)); _mapIndexId.insert(_mapIndexId.end(), std::pair(i, iter->second->id())); _mapIdIndex.insert(_mapIdIndex.end(), std::pair(iter->second->id(), i)); } @@ -677,7 +704,7 @@ void VWDictionary::update() ULOGGER_DEBUG("Time to create kd tree = %f s", timer.ticks()); } } - UDEBUG("Dictionary updated! (size=%d added=%d removed=%d)", + UINFO("Dictionary updated! (size=%d added=%d removed=%d)", _dataTree.rows, _notIndexedWords.size(), _removedIndexedWords.size()); } else @@ -687,6 +714,9 @@ void VWDictionary::update() _notIndexedWords.clear(); _removedIndexedWords.clear(); UDEBUG(""); + auto end = std::chrono::high_resolution_clock::now(); + auto duration = std::chrono::duration_cast(end - start); + UINFO("Update took: %d ms", duration.count()); } void VWDictionary::clear(bool printWarningsIfNotEmpty) @@ -694,18 +724,18 @@ void VWDictionary::clear(bool printWarningsIfNotEmpty) ULOGGER_DEBUG(""); if(printWarningsIfNotEmpty) { - if(_visualWords.size() && _incrementalDictionary) + if(!_visualWords.empty() && _incrementalDictionary) { UWARN("Visual dictionary would be already empty here (%d words still in dictionary).", (int)_visualWords.size()); } - if(_notIndexedWords.size()) + if(!_notIndexedWords.empty()) { UWARN("Not indexed words should be empty here (%d words still not indexed)", (int)_notIndexedWords.size()); } } - for(std::map::iterator i=_visualWords.begin(); i!=_visualWords.end(); ++i) + for(auto & _visualWord : _visualWords) { - delete (*i).second; + delete _visualWord.second; } _visualWords.clear(); _notIndexedWords.clear(); @@ -727,7 +757,7 @@ int VWDictionary::getNextId() void VWDictionary::addWordRef(int wordId, int signatureId) { - VisualWord * vw = 0; + VisualWord * vw = nullptr; vw = uValue(_visualWords, wordId, vw); if(vw) { @@ -744,12 +774,12 @@ void VWDictionary::addWordRef(int wordId, int signatureId) void VWDictionary::removeAllWordRef(int wordId, int signatureId) { - VisualWord * vw = 0; + VisualWord * vw = nullptr; vw = uValue(_visualWords, wordId, vw); if(vw) { _totalActiveReferences -= vw->removeAllRef(signatureId); - if(vw->getReferences().size() == 0) + if(vw->getReferences().empty()) { _unusedWords.insert(std::pair(vw->id(), vw)); } @@ -778,7 +808,7 @@ std::list VWDictionary::addNewWords( // verify we have the same features int dim = 0; int type = -1; - if(_visualWords.size()) + if(!_visualWords.empty()) { dim = _visualWords.begin()->second->getDescriptor().cols; type = _visualWords.begin()->second->getDescriptor().type(); @@ -858,13 +888,13 @@ std::list VWDictionary::addNewWords( if(_strategy == kNNFlannNaive || _strategy == kNNFlannKdTree || _strategy == kNNFlannLSH) { - _flannIndex->knnSearch(descriptors, results, dists, k, KNN_CHECKS); + _flannIndex->knnSearch(descriptors, results, dists, (int)k, KNN_CHECKS); } else if(_strategy == kNNBruteForce) { bruteForce = true; cv::BFMatcher matcher(descriptors.type()==CV_8U?cv::NORM_HAMMING:cv::NORM_L2SQR); - matcher.knnMatch(descriptors, _dataTree, matches, k); + matcher.knnMatch(descriptors, _dataTree, matches, (int)k); } else if(_strategy == kNNBruteForceGPU) { @@ -953,12 +983,12 @@ std::list VWDictionary::addNewWords( } } } - else if(bruteForce && matches.size()) + else if(bruteForce && !matches.empty()) { - for(unsigned int j=0; j= 0.0f && id != 0) { if(isL2NotSqr) @@ -983,10 +1013,10 @@ std::list VWDictionary::addNewWords( UASSERT(descriptors.cols == newWords.cols && descriptors.type() == newWords.type()); matcher.knnMatch(descriptors.row(i), newWords, matchesNewWords, newWords.rows>1?2:1); UASSERT(matchesNewWords.size() == 1); - for(unsigned int j=0; j= 0.0f && id != 0) { fullResults.insert(std::pair(d, id)); @@ -1001,7 +1031,7 @@ std::list VWDictionary::addNewWords( if(_incrementalDictionary) { bool badDist = false; - if(fullResults.size() == 0) + if(fullResults.empty()) { badDist = true; } @@ -1024,7 +1054,7 @@ std::list VWDictionary::addNewWords( if(badDist) { // use original descriptor - VisualWord * vw = new VisualWord(getNextId(), descriptorsIn.row(i), signatureId); + auto * vw = new VisualWord(getNextId(), descriptorsIn.row(i), signatureId); _visualWords.insert(_visualWords.end(), std::pair(vw->id(), vw)); _notIndexedWords.insert(_notIndexedWords.end(), vw->id()); newWords.push_back(descriptors.row(i)); @@ -1047,7 +1077,7 @@ std::list VWDictionary::addNewWords( wordIds.push_back(fullResults.begin()->second); } } - else if(fullResults.size()) + else if(!fullResults.empty()) { // If the dictionary is not incremental, just take the nearest word ++dupWordsCountFromDict; @@ -1063,7 +1093,7 @@ std::list VWDictionary::addNewWords( dupWordsCountFromDict+dupWordsCountFromLast, dupWordsCountFromLast); UDEBUG("total time %fs", timer.ticks()); - _totalActiveReferences += _notIndexedWords.size(); + _totalActiveReferences += (int)_notIndexedWords.size(); return wordIds; } @@ -1072,7 +1102,7 @@ std::vector VWDictionary::findNN(const std::list & vws) const UTimer timer; timer.start(); - if(_visualWords.size() && vws.size()) + if(!_visualWords.empty() && !vws.empty()) { int type = (*vws.begin())->getDescriptor().type(); int dim = (*vws.begin())->getDescriptor().cols; @@ -1092,8 +1122,8 @@ std::vector VWDictionary::findNN(const std::list & vws) const // fill the request matrix int index = 0; VisualWord * vw; - cv::Mat query(vws.size(), dim, type); - for(std::list::const_iterator iter=vws.begin(); iter!=vws.end(); ++iter, ++index) + cv::Mat query((int)vws.size(), dim, type); + for(auto iter=vws.begin(); iter!=vws.end(); ++iter, ++index) { vw = *iter; UASSERT(vw); @@ -1116,7 +1146,7 @@ std::vector VWDictionary::findNN(const cv::Mat & queryIn) const std::vector resultIds(queryIn.rows, 0); unsigned int k=2; // k nearest neighbor - if(_visualWords.size() && queryIn.rows) + if(!_visualWords.empty() && queryIn.rows) { // verify we have the same features int dim = _visualWords.begin()->second->getDescriptor().cols; @@ -1185,13 +1215,13 @@ std::vector VWDictionary::findNN(const cv::Mat & queryIn) const if(_strategy == kNNFlannNaive || _strategy == kNNFlannKdTree || _strategy == kNNFlannLSH) { - _flannIndex->knnSearch(query, results, dists, k, KNN_CHECKS); + _flannIndex->knnSearch(query, results, dists, (int)k, KNN_CHECKS); } else if(_strategy == kNNBruteForce) { bruteForce = true; cv::BFMatcher matcher(query.type()==CV_8U?cv::NORM_HAMMING:cv::NORM_L2SQR); - matcher.knnMatch(query, _dataTree, matches, k); + matcher.knnMatch(query, _dataTree, matches, (int)k); } else if(_strategy == kNNBruteForceGPU) { @@ -1256,10 +1286,10 @@ std::vector VWDictionary::findNN(const cv::Mat & queryIn) const std::vector > matchesNotIndexed; if(!_notIndexedWords.empty()) { - cv::Mat dataNotIndexed = cv::Mat::zeros(_notIndexedWords.size(), query.cols, query.type()); + cv::Mat dataNotIndexed = cv::Mat::zeros((int)_notIndexedWords.size(), query.cols, query.type()); unsigned int index = 0; VisualWord * vw; - for(std::set::iterator iter = _notIndexedWords.begin(); iter != _notIndexedWords.end(); ++iter, ++index) + for(auto iter = _notIndexedWords.begin(); iter != _notIndexedWords.end(); ++iter, ++index) { vw = _visualWords.at(*iter); @@ -1279,8 +1309,8 @@ std::vector VWDictionary::findNN(const cv::Mat & queryIn) const { descriptor = vw->getDescriptor(); } - UASSERT(vw != 0 && descriptor.cols == query.cols && descriptor.type() == query.type()); - descriptor.copyTo(dataNotIndexed.row(index)); + UASSERT(vw != nullptr && descriptor.cols == query.cols && descriptor.type() == query.type()); + descriptor.copyTo(dataNotIndexed.row((int)index)); mapIndexIdNotIndexed.insert(mapIndexIdNotIndexed.end(), std::pair(index, vw->id())); } // Find nearest neighbor @@ -1315,12 +1345,12 @@ std::vector VWDictionary::findNN(const cv::Mat & queryIn) const } } } - else if(bruteForce && matches.size()) + else if(bruteForce && !matches.empty()) { - for(unsigned int j=0; j= 0.0f && id != 0) { if(isL2NotSqr) @@ -1334,12 +1364,12 @@ std::vector VWDictionary::findNN(const cv::Mat & queryIn) const } // not indexed.. - if(matchesNotIndexed.size()) + if(!matchesNotIndexed.empty()) { - for(unsigned int j=0; j= 0.0f && id != 0) { fullResults.insert(std::pair(d, id)); @@ -1354,7 +1384,7 @@ std::vector VWDictionary::findNN(const cv::Mat & queryIn) const if(_incrementalDictionary) { bool badDist = false; - if(fullResults.size() == 0) + if(fullResults.empty()) { badDist = true; } @@ -1379,7 +1409,7 @@ std::vector VWDictionary::findNN(const cv::Mat & queryIn) const resultIds[i] = fullResults.begin()->second; // Accepted } } - else if(fullResults.size()) + else if(!fullResults.empty()) { //Just take the nearest if the dictionary is not incremental resultIds[i] = fullResults.begin()->second; // Accepted @@ -1396,7 +1426,7 @@ void VWDictionary::addWord(VisualWord * vw) { _visualWords.insert(std::pair(vw->id(), vw)); _notIndexedWords.insert(vw->id()); - if(vw->getReferences().size()) + if(!vw->getReferences().empty()) { _totalActiveReferences += uSum(uValues(vw->getReferences())); } @@ -1413,12 +1443,12 @@ void VWDictionary::addWord(VisualWord * vw) const VisualWord * VWDictionary::getWord(int id) const { - return uValue(_visualWords, id, (VisualWord *)0); + return uValue(_visualWords, id, (VisualWord *)nullptr); } VisualWord * VWDictionary::getUnusedWord(int id) const { - return uValue(_unusedWords, id, (VisualWord *)0); + return uValue(_unusedWords, id, (VisualWord *)nullptr); } std::vector VWDictionary::getUnusedWords() const @@ -1434,13 +1464,13 @@ std::vector VWDictionary::getUnusedWordIds() const void VWDictionary::removeWords(const std::vector & words) { //UDEBUG("Removing %d words from dictionary (current size=%d)", (int)words.size(), (int)_visualWords.size()); - for(unsigned int i=0; iid()); - _unusedWords.erase(words[i]->id()); - if(_notIndexedWords.erase(words[i]->id()) == 0) + _visualWords.erase(word->id()); + _unusedWords.erase(word->id()); + if(_notIndexedWords.erase(word->id()) == 0) { - _removedIndexedWords.insert(words[i]->id()); + _removedIndexedWords.insert(word->id()); } } } @@ -1449,9 +1479,9 @@ void VWDictionary::deleteUnusedWords() { std::vector unusedWords = uValues(_unusedWords); removeWords(unusedWords); - for(unsigned int i=0; i::const_iterator iter=_visualWords.begin(); iter!=_visualWords.end(); ++iter) + for(auto _visualWord : _visualWords) { // References if(foutRef) { - fprintf(foutRef, "%d ", (*iter).first); - const std::map ref = (*iter).second->getReferences(); - for(std::map::const_iterator jter=ref.begin(); jter!=ref.end(); ++jter) + fprintf(foutRef, "%d ", _visualWord.first); + const std::map ref = _visualWord.second->getReferences(); + for(auto jter : ref) { - for(int i=0; i<(*jter).second; ++i) + for(int i=0; igetDescriptor().data; - int dim = (*iter).second->getDescriptor().cols; + fprintf(foutDesc, "%d ", _visualWord.first); + const auto * desc = (const float *)_visualWord.second->getDescriptor().data; + int dim = _visualWord.second->getDescriptor().cols; for(int i=0; isave(index_path); + UINFO("saved index and its data points at: %s", index_path.c_str()); + + // serialization binary map: Index <-> Id + std::filesystem::path mapIdexId = std::filesystem::path(dir) / "VWDictionary.bin"; + UINFO("serializing class at : %s", mapIdexId.c_str()); + saveVars(mapIdexId); + UINFO("serialized class at : %s", mapIdexId.c_str()); + + // serialization flannIndex class + std::filesystem::path flannIndex_path = std::filesystem::path( dir ) / "vars.bin"; + UINFO("serializing class at : %s", flannIndex_path.c_str()); + _flannIndex->serialize(flannIndex_path); + UINFO("serialized class at : %s", flannIndex_path.c_str()); +} + +void VWDictionary::load(std::string_view dir) +{ + delete _flannIndex; + _flannIndex = new rtabmap::FlannIndex() ; + _flannIndex->load(dir); + loadMapIndex(std::filesystem::path(dir) / "VWDictionary.bin"); +} + +void VWDictionary::loadMapIndex(const std::filesystem::path& file) +{ + std::ifstream inFile(file, std::ios::binary); + if (!inFile.is_open()) { + throw std::runtime_error("Unable to open file for reading"); + } + + int size, key, value; + inFile.read(reinterpret_cast(&size), sizeof(size)); + _mapIndexId.clear(); + _mapIdIndex.clear(); + + for (int i = 0; i < size; ++i) { + inFile.read(reinterpret_cast(&key), sizeof(key)); + inFile.read(reinterpret_cast(&value), sizeof(value)); + _mapIndexId[key] = value; + _mapIdIndex[value] = key; + } + + inFile.close(); +} + +void VWDictionary::saveVars(const std::filesystem::path& filename) const +{ + std::ofstream outFile(filename, std::ios::binary); + if (!outFile.is_open()) { + throw std::runtime_error("Unable to open file for writing"); + } + + int size = (int)_mapIndexId.size(); + outFile.write(reinterpret_cast(&size), sizeof(size)); + for (const auto& pair : _mapIndexId) { + outFile.write(reinterpret_cast(&pair.first), sizeof(pair.first)); + outFile.write(reinterpret_cast(&pair.second), sizeof(pair.second)); + } + + outFile.close(); +} } // namespace rtabmap diff --git a/corelib/src/python/PyDetector.cpp b/corelib/src/python/PyDetector.cpp index 46f9f25619..d5ee6cb5a2 100644 --- a/corelib/src/python/PyDetector.cpp +++ b/corelib/src/python/PyDetector.cpp @@ -207,6 +207,13 @@ std::vector PyDetector::generateKeypointsImpl(const cv::Mat & imag cv::Mat descriptor = cv::Mat(1, dim, CV_32FC1, &c_out[i]).clone(); descriptors_.push_back(descriptor); } + if (keypoints.size() != (size_t)descriptors_.rows) + { + UWARN("keypoints size is not match descriptors size %d vs %d", static_cast(keypoints.size()), descriptors_.rows); + keypoints.clear(); + descriptors_ = cv::Mat(); + } + } } else