-
Notifications
You must be signed in to change notification settings - Fork 25
/
RatoJMS2022.bib
13 lines (13 loc) · 1.61 KB
/
RatoJMS2022.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
@article{RATO2022497,
title = {A sensor-to-pattern calibration framework for multi-modal industrial collaborative cells},
journal = {Journal of Manufacturing Systems},
volume = {64},
pages = {497-507},
year = {2022},
issn = {0278-6125},
doi = {https://doi.org/10.1016/j.jmsy.2022.07.006},
url = {https://www.sciencedirect.com/science/article/pii/S0278612522001182},
author = {Daniela Rato and Miguel Oliveira and Vítor Santos and Manuel Gomes and Angel Sappa},
keywords = {Calibration, Collaborative cell, Multi-modal, Multi-sensor},
abstract = {Collaborative robotic industrial cells are workspaces where robots collaborate with human operators. In this context, safety is paramount, and for that a complete perception of the space where the collaborative robot is inserted is necessary. To ensure this, collaborative cells are equipped with a large set of sensors of multiple modalities, covering the entire work volume. However, the fusion of information from all these sensors requires an accurate extrinsic calibration. The calibration of such complex systems is challenging, due to the number of sensors and modalities, and also due to the small overlapping fields of view between the sensors, which are positioned to capture different viewpoints of the cell. This paper proposes a sensor to pattern methodology that can calibrate a complex system such as a collaborative cell in a single optimization procedure. Our methodology can tackle RGB and Depth cameras, as well as LiDARs. Results show that our methodology is able to accurately calibrate a collaborative cell containing three RGB cameras, a depth camera and three 3D LiDARs.}
}