@inproceedings{24c9cae558af43eb91ef38b38e64e3f5,
title = "Orbeez-SLAM: A Real-time Monocular Visual SLAM with ORB Features and NeRF-realized Mapping",
abstract = "A spatial AI that can perform complex tasks through visual signals and cooperate with humans is highly anticipated. To achieve this, we need a visual SLAM that easily adapts to new scenes without pre-training and generates dense maps for downstream tasks in real-time. None of the previous learning-based and non-learning-based visual SLAMs satisfy all needs due to the intrinsic limitations of their components. In this work, we develop a visual SLAM named Orbeez-SLAM, which successfully collaborates with implicit neural representation and visual odometry to achieve our goals. Moreover, Orbeez-SLAM can work with the monocular camera since it only needs RGB inputs, making it widely applicable to the real world. Results show that our SLAM is up to 800x faster than the strong baseline with superior rendering outcomes. Code link: https://github.com/MarvinChung/Orbeez-SLAM.",
author = "Chung, {Chi Ming} and Tseng, {Yang Che} and Hsu, {Ya Ching} and Shi, {Xiang Qian} and Hua, {Yun Hung} and Yeh, {Jia Fong} and Chen, {Wen Chin} and Chen, {Yi Ting} and Hsu, {Winston H.}",
note = "Publisher Copyright: {\textcopyright} 2023 IEEE.; 2023 IEEE International Conference on Robotics and Automation, ICRA 2023 ; Conference date: 29-05-2023 Through 02-06-2023",
year = "2023",
doi = "10.1109/ICRA48891.2023.10160950",
language = "English",
series = "Proceedings - IEEE International Conference on Robotics and Automation",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "9400--9406",
booktitle = "Proceedings - ICRA 2023",
address = "美國",
}