Disclaimer: The papers below are intended for private viewing by the page owner or those who otherwise have legitimate access to them. No part of it may in any form or by any electronic, mechanical, photocopying, recording, or any other means be reproduced, stored in a retrieval system or be broadcast or transmitted without the prior permission of the respective publishers. If your organization has a valid subscription of the journals, click on the DOI link for the legitimate copy of the papers.
2024
- T. Nguyen, C. Hamesse, T. Dutrannois, T. Halleux, G. De Cubber, R. Haelterman, and B. Janssens, “Visual-based Localization Methods for Unmanned Aerial Vehicles in Landing Operation on Maritime Vessel," Acta IMEKO, vol. 13, iss. 4, p. 1–13, 2024.
[BibTeX] [Download PDF] [DOI]@article{nguyen_visual_2024, title = {Visual-based {Localization} {Methods} for {Unmanned} {Aerial} {Vehicles} in {Landing} {Operation} on {Maritime} {Vessel}}, volume = {13}, issn = {2221-870X}, url = {https://acta.imeko.org/index.php/acta-imeko/article/view/1575}, doi = {10.21014/actaimeko.v13i4.1575}, number = {4}, journal = {Acta IMEKO}, author = {Nguyen, Tien-Thanh and Hamesse, Charles and Dutrannois, Thomas and Halleux, Timothy and De Cubber, Geert and Haelterman, Rob and Janssens, Bart}, month = nov, year = {2024}, pages = {1--13}, unit= {meca-ras}, project= {MarLand} }
- P. Petsioti, M. Zyczkowski, K. Brewczyski, K. Cichulski, K. Kaminski, R. Razvan, A. Mohamoud, C. Church, A. Koniaris, G. De Cubber, and D. Doroftei, “Methodological Approach for the Development of Standard C-UAS Scenarios," Open Research Europe, vol. 4, iss. 240, 2024.
[BibTeX] [Download PDF] [DOI]@Article{ 10.12688/openreseurope.18339.1, AUTHOR = {Petsioti, P. and Zyczkowski, M. and Brewczyski, K. and Cichulski, K. and Kaminski, K. and Razvan, R. and Mohamoud, A. and Church, C. and Koniaris, A. and De Cubber, G. and Doroftei, D.}, TITLE = {Methodological Approach for the Development of Standard C-UAS Scenarios}, JOURNAL = {Open Research Europe}, VOLUME = {4}, YEAR = {2024}, NUMBER = {240}, DOI = {10.12688/openreseurope.18339.1}, URL = {https://open-research-europe.ec.europa.eu/articles/4-240/v1}, unit= {meca-ras}, project= {COURAGEOUS} }
- K. D. Brewczyński, M. Życzkowski, K. Cichulski, K. A. Kamiński, P. Petsioti, and G. De Cubber, “Methods for Assessing the Effectiveness of Modern Counter Unmanned Aircraft Systems," Remote Sensing, vol. 16, iss. 19, 2024.
[BibTeX] [Abstract] [Download PDF] [DOI]
Given the growing threat posed by the widespread availability of unmanned aircraft systems (UASs), which can be utilised for various unlawful activities, the need for a standardised method to evaluate the effectiveness of systems capable of detecting, tracking, and identifying (DTI) these devices has become increasingly urgent. This article draws upon research conducted under the European project COURAGEOUS, where 260 existing drone detection systems were analysed, and a methodology was developed for assessing the suitability of C-UASs in relation to specific threat scenarios. The article provides an overview of the most commonly employed technologies in C-UASs, such as radars, visible light cameras, thermal imaging cameras, laser range finders (lidars), and acoustic sensors. It explores the advantages and limitations of each technology, highlighting their reliance on different physical principles, and also briefly touches upon the legal implications associated with their deployment. The article presents the research framework and provides a structural description, alongside the functional and performance requirements, as well as the defined metrics. Furthermore, the methodology for testing the usability and effectiveness of individual C-UAS technologies in addressing specific threat scenarios is elaborated. Lastly, the article offers a concise list of prospective research directions concerning the analysis and evaluation of these technologies.
@Article{rs16193714, AUTHOR = {Brewczyński, Konrad D. and Życzkowski, Marek and Cichulski, Krzysztof and Kamiński, Kamil A. and Petsioti, Paraskevi and De Cubber, Geert}, TITLE = {Methods for Assessing the Effectiveness of Modern Counter Unmanned Aircraft Systems}, JOURNAL = {Remote Sensing}, VOLUME = {16}, YEAR = {2024}, NUMBER = {19}, ARTICLE-NUMBER = {3714}, URL = {https://www.mdpi.com/2072-4292/16/19/3714}, ISSN = {2072-4292}, ABSTRACT = {Given the growing threat posed by the widespread availability of unmanned aircraft systems (UASs), which can be utilised for various unlawful activities, the need for a standardised method to evaluate the effectiveness of systems capable of detecting, tracking, and identifying (DTI) these devices has become increasingly urgent. This article draws upon research conducted under the European project COURAGEOUS, where 260 existing drone detection systems were analysed, and a methodology was developed for assessing the suitability of C-UASs in relation to specific threat scenarios. The article provides an overview of the most commonly employed technologies in C-UASs, such as radars, visible light cameras, thermal imaging cameras, laser range finders (lidars), and acoustic sensors. It explores the advantages and limitations of each technology, highlighting their reliance on different physical principles, and also briefly touches upon the legal implications associated with their deployment. The article presents the research framework and provides a structural description, alongside the functional and performance requirements, as well as the defined metrics. Furthermore, the methodology for testing the usability and effectiveness of individual C-UAS technologies in addressing specific threat scenarios is elaborated. Lastly, the article offers a concise list of prospective research directions concerning the analysis and evaluation of these technologies.}, DOI = {10.3390/rs16193714}, unit= {meca-ras}, project= {COURAGEOUS}, url={https://www.mdpi.com/2072-4292/16/19/3714} }
- D. Doroftei, G. De Cubber, S. Lo Bue, and H. De Smet, “Quantitative Assessment of Drone Pilot Performance," Drones, vol. 8, iss. 9, 2024.
[BibTeX] [Abstract] [Download PDF] [DOI]
This paper introduces a quantitative methodology for assessing drone pilot performance, aiming to reduce drone-related incidents by understanding the human factors influencing performance. The challenge lies in balancing evaluations in operationally relevant environments with those in a standardized test environment for statistical relevance. The proposed methodology employs a novel virtual test environment that records not only basic flight metrics but also complex mission performance metrics, such as the video quality from a target. A group of Belgian Defence drone pilots were trained using this simulator system, yielding several practical results. These include a human-performance model linking human factors to pilot performance, an AI co-pilot providing real-time flight performance guidance, a tool for generating optimal flight trajectories, a mission planning tool for ideal pilot assignment, and a method for iterative training improvement based on quantitative input. The training results with real pilots demonstrate the methodology’s effectiveness in evaluating pilot performance for complex military missions, suggesting its potential as a valuable addition to new pilot training programs.
@Article{drones8090482, AUTHOR = {Doroftei, Daniela and De Cubber, Geert and Lo Bue, Salvatore and De Smet, Hans}, TITLE = {Quantitative Assessment of Drone Pilot Performance}, JOURNAL = {Drones}, VOLUME = {8}, YEAR = {2024}, unit= {meca-ras}, NUMBER = {9}, ARTICLE-NUMBER = {482}, URL = {https://www.mdpi.com/2504-446X/8/9/482}, ISSN = {2504-446X}, project= {ALPHONSE}, ABSTRACT = {This paper introduces a quantitative methodology for assessing drone pilot performance, aiming to reduce drone-related incidents by understanding the human factors influencing performance. The challenge lies in balancing evaluations in operationally relevant environments with those in a standardized test environment for statistical relevance. The proposed methodology employs a novel virtual test environment that records not only basic flight metrics but also complex mission performance metrics, such as the video quality from a target. A group of Belgian Defence drone pilots were trained using this simulator system, yielding several practical results. These include a human-performance model linking human factors to pilot performance, an AI co-pilot providing real-time flight performance guidance, a tool for generating optimal flight trajectories, a mission planning tool for ideal pilot assignment, and a method for iterative training improvement based on quantitative input. The training results with real pilots demonstrate the methodology’s effectiveness in evaluating pilot performance for complex military missions, suggesting its potential as a valuable addition to new pilot training programs.}, DOI = {10.3390/drones8090482} }
- M. Kegeleirs, D. G. Ramos, K. Hasselmann, L. Garattoni, G. Francesca, and M. Birattari, “Transferability in the automatic off-line design of robot swarms: from sim-to-real to embodiment and design-method transfer across different platforms," IEEE Robotics and Automation Letters, 2024.
[BibTeX] [Download PDF] [DOI]@article{kegeleirs2024transferability, title={Transferability in the automatic off-line design of robot swarms: from sim-to-real to embodiment and design-method transfer across different platforms}, author={Kegeleirs, Miquel and Ramos, David Garz{\'o}n and Hasselmann, Ken and Garattoni, Lorenzo and Francesca, Gianpiero and Birattari, Mauro}, journal={IEEE Robotics and Automation Letters}, year={2024}, doi={https://doi.org/10.1109/LRA.2024.3360013}, url={https://ieeexplore.ieee.org/document/10416330}, publisher={IEEE}, unit= {meca-ras}, project= {AIDEDEX} }
2023
- K. Hasselmann and Q. Lurkin, “Stimulating student engagement with an AI board game tournament," arXiv preprint arXiv:2304.11376, 2023.
[BibTeX]@article{hasselmann2023stimulating, title={Stimulating student engagement with an AI board game tournament}, author={Hasselmann, Ken and Lurkin, Quentin}, journal={arXiv preprint arXiv:2304.11376}, year={2023} }
- K. Hasselmann, A. Ligot, and M. Birattari, “Automatic modular design of robot swarms based on repertoires of behaviors generated via novelty search," Swarm and Evolutionary Computation, vol. 83, p. 101395, 2023.
[BibTeX]@article{hasselmann2023automatic, title={Automatic modular design of robot swarms based on repertoires of behaviors generated via novelty search}, author={Hasselmann, Ken and Ligot, Antoine and Birattari, Mauro}, journal={Swarm and Evolutionary Computation}, volume={83}, pages={101395}, year={2023}, publisher={Elsevier} }
2022
- D. Garzón Ramos, M. Salman, K. Ubeda Arriaza, K. Hasselmann, and M. Birattari, “MoCA: a modular RGB color arena for swarm robotics experiments," IRIDIA Technical Report Series, 2022.
[BibTeX]@article{garzon2022moca, title={MoCA: a modular RGB color arena for swarm robotics experiments}, author={Garz{\'o}n Ramos, David and Salman, Muhammad and Ubeda Arriaza, Keneth and Hasselmann, Ken and Birattari, Mauro}, journal={IRIDIA Technical Report Series}, year={2022} }
2021
- K. Hasselmann, A. Ligot, J. Ruddick, and M. Birattari, “Empirical assessment and comparison of neuro-evolutionary methods for the automatic off-line design of robot swarms," Nature communications, vol. 12, iss. 1, p. 4345, 2021.
[BibTeX]@article{hasselmann2021empirical, title={Empirical assessment and comparison of neuro-evolutionary methods for the automatic off-line design of robot swarms}, author={Hasselmann, Ken and Ligot, Antoine and Ruddick, Julian and Birattari, Mauro}, journal={Nature communications}, volume={12}, number={1}, pages={4345}, year={2021}, publisher={Nature Publishing Group UK London} }
- K. Mathiassen, F. E. Schneider, P. Bounker, A. Tiderko, G. D. Cubber, M. Baksaas, J. Główka, R. Kozik, T. Nussbaumer, J. Röning, J. Pellenz, and A. Volk, “Demonstrating interoperability between unmanned ground systems and command and control systems," International Journal of Intelligent Defence Support Systems, vol. 6, iss. 2, pp. 100-129, 2021.
[BibTeX] [Download PDF] [DOI]@article{doi:10.1504/IJIDSS.2021.115236, author = {Mathiassen, Kim and Schneider, Frank E. and Bounker, Paul and Tiderko, Alexander and Cubber, Geert De and Baksaas, Magnus and Główka, Jakub and Kozik, Rafał and Nussbaumer, Thomas and Röning, Juha and Pellenz, Johannes and Volk, André}, title = {Demonstrating interoperability between unmanned ground systems and command and control systems}, journal = {International Journal of Intelligent Defence Support Systems}, volume = {6}, number = {2}, pages = {100-129}, year = {2021}, doi = {10.1504/IJIDSS.2021.115236}, url = {https://www.inderscienceonline.com/doi/abs/10.1504/IJIDSS.2021.115236}, eprint = {https://www.inderscienceonline.com/doi/pdf/10.1504/IJIDSS.2021.115236}, project = {ICARUS, iMUGs}, doi = {10.1504/ijidss.2021.115236}, unit= {meca-ras} }
- G. De Cubber, R. Lahouli, D. Doroftei, and R. Haelterman, “Distributed coverage optimisation for a fleet of unmanned maritime systems," ACTA IMEKO, vol. 10, iss. 3, pp. 36-43, 2021.
[BibTeX] [Abstract] [Download PDF] [DOI]
Unmanned maritime systems (UMS) can provide important benefits for maritime law enforcement agencies for tasks such as area surveillance and patrolling, especially when they are able to work together as one coordinated system. In this context, this paper proposes a methodology that optimises the coverage of a fleet of UMS, thereby maximising the opportunities for identifying threats. Unlike traditional approaches to maritime coverage optimisation, which are also used, for example, in search and rescue operations when searching for victims at sea, this approach takes into consideration the limited seaworthiness of small UMS, compared with traditional large ships, by incorporating the danger level into the design of the optimiser.
@ARTICLE{cubberimeko2021, author={De Cubber, Geert and Lahouli, Rihab and Doroftei, Daniela and Haelterman, Rob}, journal={ACTA IMEKO}, title={Distributed coverage optimisation for a fleet of unmanned maritime systems}, year={2021}, volume={10}, number={3}, pages={36-43}, issn={2221-870X}, url={https://acta.imeko.org/index.php/acta-imeko/article/view/IMEKO-ACTA-10%20%282021%29-03-07/pdf}, project={MarSur, SSAVE}, publisher={IMEKO}, month=oct, abstract = {Unmanned maritime systems (UMS) can provide important benefits for maritime law enforcement agencies for tasks such as area surveillance and patrolling, especially when they are able to work together as one coordinated system. In this context, this paper proposes a methodology that optimises the coverage of a fleet of UMS, thereby maximising the opportunities for identifying threats. Unlike traditional approaches to maritime coverage optimisation, which are also used, for example, in search and rescue operations when searching for victims at sea, this approach takes into consideration the limited seaworthiness of small UMS, compared with traditional large ships, by incorporating the danger level into the design of the optimiser. }, doi={http://dx.doi.org/10.21014/acta_imeko.v10i3.1031}, unit= {meca-ras}}
2020
- K. Hasselmann and M. Birattari, “Modular automatic design of collective behaviors for robots endowed with local communication capabilities," PeerJ Computer Science, vol. 6, p. e291, 2020.
[BibTeX]@article{hasselmann2020modular, title={Modular automatic design of collective behaviors for robots endowed with local communication capabilities}, author={Hasselmann, Ken and Birattari, Mauro}, journal={PeerJ Computer Science}, volume={6}, pages={e291}, year={2020}, publisher={PeerJ Inc.} }
- M. Birattari, A. Ligot, and K. Hasselmann, “Disentangling automatic and semi-automatic approaches to the optimization-based design of control software for robot swarms," Nature Machine Intelligence, vol. 2, iss. 9, p. 494–499, 2020.
[BibTeX]@article{birattari2020disentangling, title={Disentangling automatic and semi-automatic approaches to the optimization-based design of control software for robot swarms}, author={Birattari, Mauro and Ligot, Antoine and Hasselmann, Ken}, journal={Nature Machine Intelligence}, volume={2}, number={9}, pages={494--499}, year={2020}, publisher={Nature Publishing Group UK London} }
- M. Salman, D. Garzón Ramos, K. Hasselmann, and M. Birattari, “Phormica: Photochromic pheromone release and detection system for stigmergic coordination in robot swarms," Frontiers in Robotics and AI, vol. 7, p. 591402, 2020.
[BibTeX]@article{salman2020phormica, title={Phormica: Photochromic pheromone release and detection system for stigmergic coordination in robot swarms}, author={Salman, Muhammad and Garz{\'o}n Ramos, David and Hasselmann, Ken and Birattari, Mauro}, journal={Frontiers in Robotics and AI}, volume={7}, pages={591402}, year={2020}, publisher={Frontiers Media SA} }
- H. Balta, J. Velagic, H. Beglerovic, G. De Cubber, and B. Siciliano, “3D Registration and Integrated Segmentation Framework for Heterogeneous Unmanned Robotic Systems," Remote Sensing, vol. 12, iss. 10, p. 1608, 2020.
[BibTeX] [Abstract] [Download PDF] [DOI]
The paper proposes a novel framework for registering and segmenting 3D point clouds of large-scale natural terrain and complex environments coming from a multisensor heterogeneous robotics system, consisting of unmanned aerial and ground vehicles. This framework involves data acquisition and pre-processing, 3D heterogeneous registration and integrated multi-sensor based segmentation modules. The first module provides robust and accurate homogeneous registrations of 3D environmental models based on sensors’ measurements acquired from the ground (UGV) and aerial (UAV) robots. For 3D UGV registration, we proposed a novel local minima escape ICP (LME-ICP) method, which is based on the well known iterative closest point (ICP) algorithm extending it by the introduction of our local minima estimation and local minima escape mechanisms. It did not require any prior known pose estimation information acquired from sensing systems like odometry, global positioning system (GPS), or inertial measurement units (IMU). The 3D UAV registration has been performed using the Structure from Motion (SfM) approach. In order to improve and speed up the process of outliers removal for large-scale outdoor environments, we introduced the Fast Cluster Statistical Outlier Removal (FCSOR) method. This method was used to filter out the noise and to downsample the input data, which will spare computational and memory resources for further processing steps. Then, we co-registered a point cloud acquired from a laser ranger (UGV) and a point cloud generated from images (UAV) generated by the SfM method. The 3D heterogeneous module consists of a semi-automated 3D scan registration system, developed with the aim to overcome the shortcomings of the existing fully automated 3D registration approaches. This semi-automated registration system is based on the novel Scale Invariant Registration Method (SIRM). The SIRM provides the initial scaling between two heterogenous point clouds and provides an adaptive mechanism for tuning the mean scale, based on the difference between two consecutive estimated point clouds’ alignment error values. Once aligned, the resulting homogeneous ground-aerial point cloud is further processed by a segmentation module. For this purpose, we have proposed a system for integrated multi-sensor based segmentation of 3D point clouds. This system followed a two steps sequence: ground-object segmentation and color-based region-growing segmentation. The experimental validation of the proposed 3D heterogeneous registration and integrated segmentation framework was performed on large-scale datasets representing unstructured outdoor environments, demonstrating the potential and benefits of the proposed semi-automated 3D registration system in real-world environments.
@Article{balta20203Dregistration, author = {Balta, Haris and Velagic, Jasmin and Beglerovic, Halil and De Cubber, Geert and Siciliano, Bruno}, journal = {Remote Sensing}, title = {3D Registration and Integrated Segmentation Framework for Heterogeneous Unmanned Robotic Systems}, year = {2020}, month = may, number = {10}, pages = {1608}, volume = {12}, abstract = {The paper proposes a novel framework for registering and segmenting 3D point clouds of large-scale natural terrain and complex environments coming from a multisensor heterogeneous robotics system, consisting of unmanned aerial and ground vehicles. This framework involves data acquisition and pre-processing, 3D heterogeneous registration and integrated multi-sensor based segmentation modules. The first module provides robust and accurate homogeneous registrations of 3D environmental models based on sensors’ measurements acquired from the ground (UGV) and aerial (UAV) robots. For 3D UGV registration, we proposed a novel local minima escape ICP (LME-ICP) method, which is based on the well known iterative closest point (ICP) algorithm extending it by the introduction of our local minima estimation and local minima escape mechanisms. It did not require any prior known pose estimation information acquired from sensing systems like odometry, global positioning system (GPS), or inertial measurement units (IMU). The 3D UAV registration has been performed using the Structure from Motion (SfM) approach. In order to improve and speed up the process of outliers removal for large-scale outdoor environments, we introduced the Fast Cluster Statistical Outlier Removal (FCSOR) method. This method was used to filter out the noise and to downsample the input data, which will spare computational and memory resources for further processing steps. Then, we co-registered a point cloud acquired from a laser ranger (UGV) and a point cloud generated from images (UAV) generated by the SfM method. The 3D heterogeneous module consists of a semi-automated 3D scan registration system, developed with the aim to overcome the shortcomings of the existing fully automated 3D registration approaches. This semi-automated registration system is based on the novel Scale Invariant Registration Method (SIRM). The SIRM provides the initial scaling between two heterogenous point clouds and provides an adaptive mechanism for tuning the mean scale, based on the difference between two consecutive estimated point clouds’ alignment error values. Once aligned, the resulting homogeneous ground-aerial point cloud is further processed by a segmentation module. For this purpose, we have proposed a system for integrated multi-sensor based segmentation of 3D point clouds. This system followed a two steps sequence: ground-object segmentation and color-based region-growing segmentation. The experimental validation of the proposed 3D heterogeneous registration and integrated segmentation framework was performed on large-scale datasets representing unstructured outdoor environments, demonstrating the potential and benefits of the proposed semi-automated 3D registration system in real-world environments.}, doi = {10.3390/rs12101608}, project = {NRTP,ICARUS,TIRAMISU,MarSur}, publisher = {MDPI}, url = {https://www.mdpi.com/2072-4292/12/10/1608/pdf}, unit= {meca-ras} }
- B. Pairet, F. Cantalloube, and L. Jacques, “MAYONNAISE: a morphological components analysis pipeline for circumstellar disks and exoplanets imaging in the near infrared," arXiv preprint arXiv:2008.05170, 2020.
[BibTeX]@article{pairet2020mayonnaise, title={MAYONNAISE: a morphological components analysis pipeline for circumstellar disks and exoplanets imaging in the near infrared}, author={Pairet, Beno{\^\i}t and Cantalloube, Faustine and Jacques, Laurent}, journal={arXiv preprint arXiv:2008.05170}, year={2020} }
- C. J. Jenkins and S. Papili, “Potential UXO seabed migration in the German Bight," Hydrographische Nachrichten, vol. 116, pp. 72-76, 2020.
[BibTeX] [DOI]@article{sonia01, title={Potential UXO seabed migration in the German Bight}, author={Jenkins, C.J. and Papili, S.}, journal={Hydrographische Nachrichten}, volume={116}, pages={72-76}, year={2020}, month={06}, doi={10.23784/HN116-11}, project = {DISCIMBA}, publisher={Elsevier} }
2019
- M. Birattari, A. Ligot, D. Bozhinoski, M. Brambilla, G. Francesca, L. Garattoni, D. Garzón Ramos, K. Hasselmann, M. Kegeleirs, J. Kuckling, and others, “Automatic off-line design of robot swarms: a manifesto," Frontiers in Robotics and AI, vol. 6, p. 59, 2019.
[BibTeX]@article{birattari2019automatic, title={Automatic off-line design of robot swarms: a manifesto}, author={Birattari, Mauro and Ligot, Antoine and Bozhinoski, Darko and Brambilla, Manuele and Francesca, Gianpiero and Garattoni, Lorenzo and Garz{\'o}n Ramos, David and Hasselmann, Ken and Kegeleirs, Miquel and Kuckling, Jonas and others}, journal={Frontiers in Robotics and AI}, volume={6}, pages={59}, year={2019}, publisher={Frontiers Media SA} }
- D. Doroftei and G. De Cubber, “Using a qualitative and quantitative validation methodology to evaluate a drone detection system," ACTA IMEKO, vol. 8, iss. 4, p. 20–27, 2019.
[BibTeX] [Abstract] [Download PDF] [DOI]
Now that the use of drones is becoming more common, the need to regulate the access to airspace for these systems is becoming more pressing. A necessary tool in order to do this is a means of detecting drones. Numerous parties have started the development of such drone detection systems. A big problem with these systems is that the evaluation of the performance of drone detection systems is a difficult operation that requires the careful consideration of all technical and non-technical aspects of the system under test. Indeed, weather conditions and small variations in the appearance of the targets can have a huge difference on the performance of the systems. In order to provide a fair evaluation, it is therefore paramount that a validation procedure that finds a compromise between the requirements of end users (who want tests to be performed in operational conditions) and platform developers (who want statistically relevant tests) is followed. Therefore, we propose in this article a qualitative and quantitative validation methodology for drone detection systems. The proposed validation methodology seeks to find this compromise between operationally relevant benchmarking (by providing qualitative benchmarking under varying environmental conditions) and statistically relevant evaluation (by providing quantitative score sheets under strictly described conditions).
@Article{doroftei2019using, author = {Doroftei, Daniela and De Cubber, Geert}, journal = {{ACTA} {IMEKO}}, title = {Using a qualitative and quantitative validation methodology to evaluate a drone detection system}, year = {2019}, month = dec, number = {4}, pages = {20--27}, volume = {8}, abstract = {Now that the use of drones is becoming more common, the need to regulate the access to airspace for these systems is becoming more pressing. A necessary tool in order to do this is a means of detecting drones. Numerous parties have started the development of such drone detection systems. A big problem with these systems is that the evaluation of the performance of drone detection systems is a difficult operation that requires the careful consideration of all technical and non-technical aspects of the system under test. Indeed, weather conditions and small variations in the appearance of the targets can have a huge difference on the performance of the systems. In order to provide a fair evaluation, it is therefore paramount that a validation procedure that finds a compromise between the requirements of end users (who want tests to be performed in operational conditions) and platform developers (who want statistically relevant tests) is followed. Therefore, we propose in this article a qualitative and quantitative validation methodology for drone detection systems. The proposed validation methodology seeks to find this compromise between operationally relevant benchmarking (by providing qualitative benchmarking under varying environmental conditions) and statistically relevant evaluation (by providing quantitative score sheets under strictly described conditions).}, doi = {10.21014/acta_imeko.v8i4.682}, pdf = {https://acta.imeko.org/index.php/acta-imeko/article/view/IMEKO-ACTA-08%20%282019%29-04-05/pdf}, project = {SafeShore}, publisher = {{IMEKO} International Measurement Confederation}, url = {https://acta.imeko.org/index.php/acta-imeko/article/view/IMEKO-ACTA-08%20%282019%29-04-05/pdf}, unit= {meca-ras} }
- N. Nauwynck, H. Balta, G. De Cubber, and H. Sahli, “A proof of concept of the in-flight launch of unmanned aerial vehicles in a search and rescue scenario," ACTA IMEKO, vol. 8, iss. 4, p. 13–19, 2019.
[BibTeX] [Abstract] [Download PDF] [DOI]
This article considers the development of a system to enable the in-flight-launch of one aerial system by another. The article discusses how an optimal release mechanism was developed taking into account the aerodynamics of one specific mothership and child Unmanned Aerial Vehicle (UAV). Furthermore, it discusses the PID-based control concept that was introduced in order to autonomously stabilise the child UAV after being released from the mothership UAV. Finally, the article demonstrates how the concept of a mothership and child UAV combination could be taken advantage of in the context of a search and rescue operation.
@Article{nauwynck2019proof, author = {Nauwynck, Niels and Balta, Haris and De Cubber, Geert and Sahli, Hichem}, journal = {{ACTA} {IMEKO}}, title = {A proof of concept of the in-flight launch of unmanned aerial vehicles in a search and rescue scenario}, year = {2019}, month = dec, number = {4}, pages = {13--19}, volume = {8}, abstract = {This article considers the development of a system to enable the in-flight-launch of one aerial system by another. The article discusses how an optimal release mechanism was developed taking into account the aerodynamics of one specific mothership and child Unmanned Aerial Vehicle (UAV). Furthermore, it discusses the PID-based control concept that was introduced in order to autonomously stabilise the child UAV after being released from the mothership UAV. Finally, the article demonstrates how the concept of a mothership and child UAV combination could be taken advantage of in the context of a search and rescue operation.}, doi = {10.21014/acta_imeko.v8i4.681}, publisher = {{IMEKO} International Measurement Confederation}, project = {ICARUS, NRTP}, url = {https://acta.imeko.org/index.php/acta-imeko/article/view/IMEKO-ACTA-08 (2019)-04-04}, unit= {meca-ras} }
- B. Pairet, F. Cantalloube, C. A. Gomez Gonzalez, O. Absil, and L. Jacques, “STIM map: detection map for exoplanets imaging beyond asymptotic Gaussian residual speckle noise," Monthly Notices of the Royal Astronomical Society, vol. 487, iss. 2, p. 2262–2277, 2019.
[BibTeX]@article{pairet2019stim, title={STIM map: detection map for exoplanets imaging beyond asymptotic Gaussian residual speckle noise}, author={Pairet, Beno{\^\i}t and Cantalloube, Faustine and Gomez Gonzalez, Carlos A and Absil, Olivier and Jacques, Laurent}, journal={Monthly Notices of the Royal Astronomical Society}, volume={487}, number={2}, pages={2262--2277}, year={2019}, publisher={Oxford University Press} }
- B. Pairet, L. Jacques, and F. Cantalloube, “Iterative Low-rank and rotating sparsity promotion for circumstellar disks imaging," Proceedings of SPARS’19, vol. 1, p. 1, 2019.
[BibTeX]@article{pairet2019iterative, title={Iterative Low-rank and rotating sparsity promotion for circumstellar disks imaging}, author={Pairet, Beno{\^\i}t and Jacques, Laurent and Cantalloube, Faustine}, journal={Proceedings of SPARS'19}, volume={1}, pages={1}, year={2019} }
- V. Christiaens, S. Casassus, O. Absil, F. Cantalloube, C. Gomez Gonzalez, J. Girard, R. Ram{‘i}rez, B. Pairet, V. Salinas, D. J. Price, and others, “Separating extended disc features from the protoplanet in PDS 70 using VLT/SINFONI," Monthly Notices of the Royal Astronomical Society, vol. 486, iss. 4, p. 5819–5837, 2019.
[BibTeX]@article{christiaens2019separating, title={Separating extended disc features from the protoplanet in PDS 70 using VLT/SINFONI}, author={Christiaens, Valentin and Casassus, Simon and Absil, Olivier and Cantalloube, Faustine and Gomez Gonzalez, C and Girard, J and Ram{\'\i}rez, R and Pairet, B and Salinas, V and Price, Daniel J and others}, journal={Monthly Notices of the Royal Astronomical Society}, volume={486}, number={4}, pages={5819--5837}, year={2019}, publisher={Oxford University Press} }
- D. Mesa, M. Keppler, F. Cantalloube, L. Rodet, B. Charnay, R. Gratton, M. Langlois, A. Boccaletti, M. Bonnefoy, A. Vigan, B. Pairet, and others, “VLT/SPHERE exploration of the young multiplanetary system PDS70," Astronomy & Astrophysics, vol. 632, p. A25, 2019.
[BibTeX]@article{mesa2019vlt, title={VLT/SPHERE exploration of the young multiplanetary system PDS70}, author={Mesa, D and Keppler, M and Cantalloube, F and Rodet, L and Charnay, B and Gratton, R and Langlois, M and Boccaletti, A and Bonnefoy, M and Vigan, Arthur and Pairet, B and others}, journal={Astronomy \& Astrophysics}, volume={632}, pages={A25}, year={2019}, publisher={EDP Sciences} }
- F. Tedesco, S. Sarkar, and A. Casavola, “Turn-Based Supervision Architectures for Dynamic Networks involving Plug-and-Play Operations," IFAC-PapersOnLine, vol. 52, iss. 3, pp. 90-95, 2019.
[BibTeX] [Abstract] [Download PDF] [DOI]
In this note, we consider a novel supervision algorithm for the distributed management of interconnected linear systems sharing coupling coordination constraints. The proposed supervision approach is based on the recently developed Turn-Based Command Governor strategy (Casavola et al. (2018)) where the agents in the networks follow a particular round-robin policy in updating their commands. Such a strategy is here extended to deal with so called Plug-and-Play (PnP) operations. Such a problem has been faced in a formal way by providing the conditions that allow PnP operations without violating existing constraints. In this respect, the notation of Pluggability of systems that aim at joining the same network is introduced as a structural property. A final example is presented to illustrate the effectiveness of the proposed strategy.
@article{TEDESCO201990, title = {Turn-Based Supervision Architectures for Dynamic Networks involving Plug-and-Play Operations}, journal = {IFAC-PapersOnLine}, volume = {52}, number = {3}, pages = {90-95}, year = {2019}, note = {15th IFAC Symposium on Large Scale Complex Systems LSS 2019}, issn = {2405-8963}, doi = {https://doi.org/10.1016/j.ifacol.2019.06.016}, url = {https://www.sciencedirect.com/science/article/pii/S2405896319301004}, author = {Francesco Tedesco and Soumic Sarkar and Alessandro Casavola}, abstract = {In this note, we consider a novel supervision algorithm for the distributed management of interconnected linear systems sharing coupling coordination constraints. The proposed supervision approach is based on the recently developed Turn-Based Command Governor strategy (Casavola et al. (2018)) where the agents in the networks follow a particular round-robin policy in updating their commands. Such a strategy is here extended to deal with so called Plug-and-Play (PnP) operations. Such a problem has been faced in a formal way by providing the conditions that allow PnP operations without violating existing constraints. In this respect, the notation of Pluggability of systems that aim at joining the same network is introduced as a structural property. A final example is presented to illustrate the effectiveness of the proposed strategy.} }
2018
- K. Hasselmann, A. Ligot, G. Francesca, and M. Birattari, “Reference models for AutoMoDe," IRIDIA, Université libre de Bruxelles, Brussels, Belgium, Tech. Rep. TR/IRIDIA/2018-002, 2018.
[BibTeX]@article{hasselmann2018reference, title={Reference models for AutoMoDe}, author={Hasselmann, Ken and Ligot, Antoine and Francesca, Gianpiero and Birattari, M}, journal={IRIDIA, Universit{\'e} libre de Bruxelles, Brussels, Belgium, Tech. Rep. TR/IRIDIA/2018-002}, year={2018} }
- Y. Baudoin, D. Doroftei, G. de Cubber, J. Habumuremyi, H. Balta, and I. Doroftei, “Unmanned Ground and Aerial Robots Supporting Mine Action Activities," Journal of Physics: Conference Series, vol. 1065, iss. 17, p. 172009, 2018.
[BibTeX] [Abstract] [Download PDF] [DOI]
During the Humanitarian‐demining actions, teleoperation of sensors or multi‐sensor heads can enhance‐detection process by allowing more precise scanning, which is use‐ ful for the optimization of the signal processing algorithms. This chapter summarizes the technologies and experiences developed during 16 years through national and/or European‐funded projects, illustrated by some contributions of our own laboratory, located at the Royal Military Academy of Brussels, focusing on the detection of unexploded devices and the implementation of mobile robotics systems on minefields
@Article{baudoin2018unmanned, author = {Baudoin, Yvan and Doroftei, Daniela and de Cubber, Geert and Habumuremyi, Jean-Claude and Balta, Haris and Doroftei, Ioan}, title = {Unmanned Ground and Aerial Robots Supporting Mine Action Activities}, year = {2018}, month = aug, number = {17}, organization = {IOP Publishing}, pages = {172009}, publisher = {{IOP} Publishing}, volume = {1065}, abstract = {During the Humanitarian‐demining actions, teleoperation of sensors or multi‐sensor heads can enhance‐detection process by allowing more precise scanning, which is use‐ ful for the optimization of the signal processing algorithms. This chapter summarizes the technologies and experiences developed during 16 years through national and/or European‐funded projects, illustrated by some contributions of our own laboratory, located at the Royal Military Academy of Brussels, focusing on the detection of unexploded devices and the implementation of mobile robotics systems on minefields}, doi = {10.1088/1742-6596/1065/17/172009}, journal = {Journal of Physics: Conference Series}, project = {TIRAMISU}, url = {https://iopscience.iop.org/article/10.1088/1742-6596/1065/17/172009/pdf}, unit= {meca-ras} }
- I. Lahouli, R. Haelterman, J. Degroote, M. Shimoni, G. De Cubber, and R. Attia, “Accelerating existing non-blind image deblurring techniques through a strap-on limited-memory switched Broyden method," IEICE TRANSACTIONS on Information and Systems, vol. 1, iss. 1, p. 8, 2018.
[BibTeX] [Abstract] [Download PDF] [DOI]
Video surveillance from airborne platforms can suffer from many sources of blur, like vibration, low-end optics, uneven lighting conditions, etc. Many different algorithms have been developed in the past that aim to recover the deblurred image but often incur substantial CPU-time, which is not always available on-board. This paper shows how a strap-on quasi-Newton method can accelerate the convergence of existing iterative methods with little extra overhead while keeping the performance of the original algorithm, thus paving the way for (near) real-time applications using on-board processing.
@Article{lahouli2018accelerating, author = {Lahouli, Ichraf and Haelterman, Robby and Degroote, Joris and Shimoni, Michal and De Cubber, Geert and Attia, Rabah}, journal = {IEICE TRANSACTIONS on Information and Systems}, title = {Accelerating existing non-blind image deblurring techniques through a strap-on limited-memory switched {Broyden} method}, year = {2018}, number = {1}, pages = {8}, volume = {1}, abstract = {Video surveillance from airborne platforms can suffer from many sources of blur, like vibration, low-end optics, uneven lighting conditions, etc. Many different algorithms have been developed in the past that aim to recover the deblurred image but often incur substantial CPU-time, which is not always available on-board. This paper shows how a strap-on quasi-Newton method can accelerate the convergence of existing iterative methods with little extra overhead while keeping the performance of the original algorithm, thus paving the way for (near) real-time applications using on-board processing.}, doi = {10.1587/transinf.2017mvp0022}, file = {:lahouli2018accelerating - Accelerating Existing Non Blind Image Deblurring Techniques through a Strap on Limited Memory Switched Broyden Method.PDF:PDF}, publisher = {The Institute of Electronics, Information and Communication Engineers}, project = {SafeShore}, url = {https://www.jstage.jst.go.jp/article/transinf/E101.D/5/E101.D_2017MVP0022/_pdf/-char/en}, unit= {meca-ras} }
- I. Lahouli, E. Karakasis, R. Haelterman, Z. Chtourou, G. De Cubber, A. Gasteratos, and R. Attia, “Hot spot method for pedestrian detection using saliency maps, discrete Chebyshev moments and support vector machine," IET Image Processing, vol. 12, iss. 7, p. 1284–1291, 2018.
[BibTeX] [Abstract] [Download PDF] [DOI]
The increasing risks of border intrusions or attacks on sensitive facilities and the growing availability of surveillance cameras lead to extensive research efforts for robust detection of pedestrians using images. However, the surveillance of borders or sensitive facilities poses many challenges including the need to set up many cameras to cover the whole area of interest, the high bandwidth requirements for data streaming and the high-processing requirements. Driven by day and night capabilities of the thermal sensors and the distinguished thermal signature of humans, the authors propose a novel and robust method for the detection of pedestrians using thermal images. The method is composed of three steps: a detection which is based on a saliency map in conjunction with a contrast-enhancement technique, a shape description based on discrete Chebyshev moments and a classification step using a support vector machine classifier. The performance of the method is tested using two different thermal datasets and is compared with the conventional maximally stable extremal regions detector. The obtained results prove the robustness and the superiority of the proposed framework in terms of true and false positives rates and computational costs which make it suitable for low-performance processing platforms and real-time applications.
@Article{lahouli2018hot, author = {Lahouli, Ichraf and Karakasis, Evangelos and Haelterman, Robby and Chtourou, Zied and De Cubber, Geert and Gasteratos, Antonios and Attia, Rabah}, journal = {IET Image Processing}, title = {Hot spot method for pedestrian detection using saliency maps, discrete {Chebyshev} moments and support vector machine}, year = {2018}, number = {7}, pages = {1284--1291}, volume = {12}, abstract = {The increasing risks of border intrusions or attacks on sensitive facilities and the growing availability of surveillance cameras lead to extensive research efforts for robust detection of pedestrians using images. However, the surveillance of borders or sensitive facilities poses many challenges including the need to set up many cameras to cover the whole area of interest, the high bandwidth requirements for data streaming and the high-processing requirements. Driven by day and night capabilities of the thermal sensors and the distinguished thermal signature of humans, the authors propose a novel and robust method for the detection of pedestrians using thermal images. The method is composed of three steps: a detection which is based on a saliency map in conjunction with a contrast-enhancement technique, a shape description based on discrete Chebyshev moments and a classification step using a support vector machine classifier. The performance of the method is tested using two different thermal datasets and is compared with the conventional maximally stable extremal regions detector. The obtained results prove the robustness and the superiority of the proposed framework in terms of true and false positives rates and computational costs which make it suitable for low-performance processing platforms and real-time applications.}, doi = {10.1049/iet-ipr.2017.0221}, publisher = {IET Digital Library}, project = {SafeShore}, url = {https://ieeexplore.ieee.org/document/8387035}, unit= {meca-ras} }
- A. Durand-Petiteville, L. E. Flécher, V. Cadenat, T. Sentenac, and S. Vougioukas, “Tree detection with low-cost 3D sensors for autonomous navigation in orchards," Robotics and Autonomous Letters, p. 8, 2018.
[BibTeX]@article{RAL_2018, title={Tree detection with low-cost 3D sensors for autonomous navigation in orchards}, journal={Robotics and Autonomous Letters}, author={A Durand-Petiteville and E Le Flécher and V Cadenat and T Sentenac and S Vougioukas}, year={2018}, pages={8} }
2017
- M. Buric and G. De Cubber, “Counter Remotely Piloted Aircraft Systems," MTA Review, vol. 27, iss. 1, 2017.
[BibTeX] [Abstract] [Download PDF] [DOI]
An effective Counter Remotely Aircraft System is a major objective of many researchers and industries entities. Their activity is strongly impelled by the operational requirements of the Law Enforcement Authorities and naturally follows both the course of the latest terrorist events and technological developments. The designing process of an effective Counter Remotely Aircraft System needs to benefit from a systemic approach, starting from the legal aspects, and ending with the technical ones. From a technical point of view, the system has to work according to the five “kill chain” model starting with the detection phase, going on with the classification, prioritization, tracking and neutralization of the targets and ending with the forensic phase.
@Article{buric2017counter, author = {Buric, Marian and De Cubber, Geert}, journal = {MTA Review}, title = {Counter Remotely Piloted Aircraft Systems}, year = {2017}, number = {1}, volume = {27}, abstract = {An effective Counter Remotely Aircraft System is a major objective of many researchers and industries entities. Their activity is strongly impelled by the operational requirements of the Law Enforcement Authorities and naturally follows both the course of the latest terrorist events and technological developments. The designing process of an effective Counter Remotely Aircraft System needs to benefit from a systemic approach, starting from the legal aspects, and ending with the technical ones. From a technical point of view, the system has to work according to the five “kill chain” model starting with the detection phase, going on with the classification, prioritization, tracking and neutralization of the targets and ending with the forensic phase.}, doi = {10.5281/zenodo.1115502}, keywords = {Counter Remotely Piloted Aircraft Systems, drone, drone detection tracking and neutralization, RPAS, SafeShore}, language = {en}, publisher = {Military Technical Academy Publishing House}, project = {SafeShore}, url = {http://mecatron.rma.ac.be/pub/2017/Counter Remotely Piloted Aircraft Systems.pdf}, unit= {meca-ras} }
2016
- H. Balta, J. Bedkowski, S. Govindaraj, K. Majek, P. Musialik, D. Serrano, K. Alexis, R. Siegwart, and G. De Cubber, “Integrated Data Management for a Fleet of Search-and-rescue Robots," Journal of Field Robotics, vol. 34, iss. 3, p. 539–582, 2016.
[BibTeX] [Abstract] [Download PDF] [DOI]
Search‐and‐rescue operations have recently been confronted with the introduction of robotic tools that assist the human search‐and‐rescue workers in their dangerous but life‐saving job of searching for human survivors after major catastrophes. However, the world of search and rescue is highly reliant on strict procedures for the transfer of messages, alarms, data, and command and control over the deployed assets. The introduction of robotic tools into this world causes an important structural change in this procedural toolchain. Moreover, the introduction of search‐and‐rescue robots acting as data gatherers could potentially lead to an information overload toward the human search‐and‐rescue workers, if the data acquired by these robotic tools are not managed in an intelligent way. With that in mind, we present in this paper an integrated data combination and data management architecture that is able to accommodate real‐time data gathered by a fleet of robotic vehicles on a crisis site, and we present and publish these data in a way that is easy to understand by end‐users. In the scope of this paper, a fleet of unmanned ground and aerial search‐and‐rescue vehicles is considered, developed within the scope of the European ICARUS project. As a first step toward the integrated data‐management methodology, the different robotic systems require an interoperable framework in order to pass data from one to another and toward the unified command and control station. As a second step, a data fusion methodology will be presented, combining the data acquired by the different heterogenic robotic systems. The computation needed for this process is done in a novel mobile data center and then (as a third step) published in a software as a service (SaaS) model. The SaaS model helps in providing access to robotic data over ubiquitous Ethernet connections. As a final step, we show how the presented data‐management architecture allows for reusing recorded exercises with real robots and rescue teams for training purposes and teaching search‐and‐rescue personnel how to handle the different robotic tools. The system was validated in two experiments. First, in the controlled environment of a military testing base, a fleet of unmanned ground and aerial vehicles was deployed in an earthquake‐response scenario. The data gathered by the different interoperable robotic systems were combined by a novel mobile data center and presented to the end‐user public. Second, an unmanned aerial system was deployed on an actual mission with an international relief team to help with the relief operations after major flooding in Bosnia in the spring of 2014. Due to the nature of the event (floods), no ground vehicles were deployed here, but all data acquired by the aerial system (mainly three‐dimensional maps) were stored in the ICARUS data center, where they were securely published for authorized personnel all over the world. This mission (which is, to our knowledge, the first recorded deployment of an unmanned aerial system by an official governmental international search‐and‐rescue team in another country) proved also the concept of the procedural integration of the ICARUS data management system into the existing procedural toolchain of the search and rescue workers, and this in an international context (deployment from Belgium to Bosnia). The feedback received from the search‐and‐rescue personnel on both validation exercises was highly positive, proving that the ICARUS data management system can efficiently increase the situational awareness of the search‐and‐rescue personnel.
@Article{balta2017integrated, author = {Haris Balta and Janusz Bedkowski and Shashank Govindaraj and Karol Majek and Pawel Musialik and Daniel Serrano and Kostas Alexis and Roland Siegwart and De Cubber, Geert}, journal = {Journal of Field Robotics}, title = {Integrated Data Management for a Fleet of Search-and-rescue Robots}, year = {2016}, month = jul, number = {3}, pages = {539--582}, volume = {34}, abstract = {Search‐and‐rescue operations have recently been confronted with the introduction of robotic tools that assist the human search‐and‐rescue workers in their dangerous but life‐saving job of searching for human survivors after major catastrophes. However, the world of search and rescue is highly reliant on strict procedures for the transfer of messages, alarms, data, and command and control over the deployed assets. The introduction of robotic tools into this world causes an important structural change in this procedural toolchain. Moreover, the introduction of search‐and‐rescue robots acting as data gatherers could potentially lead to an information overload toward the human search‐and‐rescue workers, if the data acquired by these robotic tools are not managed in an intelligent way. With that in mind, we present in this paper an integrated data combination and data management architecture that is able to accommodate real‐time data gathered by a fleet of robotic vehicles on a crisis site, and we present and publish these data in a way that is easy to understand by end‐users. In the scope of this paper, a fleet of unmanned ground and aerial search‐and‐rescue vehicles is considered, developed within the scope of the European ICARUS project. As a first step toward the integrated data‐management methodology, the different robotic systems require an interoperable framework in order to pass data from one to another and toward the unified command and control station. As a second step, a data fusion methodology will be presented, combining the data acquired by the different heterogenic robotic systems. The computation needed for this process is done in a novel mobile data center and then (as a third step) published in a software as a service (SaaS) model. The SaaS model helps in providing access to robotic data over ubiquitous Ethernet connections. As a final step, we show how the presented data‐management architecture allows for reusing recorded exercises with real robots and rescue teams for training purposes and teaching search‐and‐rescue personnel how to handle the different robotic tools. The system was validated in two experiments. First, in the controlled environment of a military testing base, a fleet of unmanned ground and aerial vehicles was deployed in an earthquake‐response scenario. The data gathered by the different interoperable robotic systems were combined by a novel mobile data center and presented to the end‐user public. Second, an unmanned aerial system was deployed on an actual mission with an international relief team to help with the relief operations after major flooding in Bosnia in the spring of 2014. Due to the nature of the event (floods), no ground vehicles were deployed here, but all data acquired by the aerial system (mainly three‐dimensional maps) were stored in the ICARUS data center, where they were securely published for authorized personnel all over the world. This mission (which is, to our knowledge, the first recorded deployment of an unmanned aerial system by an official governmental international search‐and‐rescue team in another country) proved also the concept of the procedural integration of the ICARUS data management system into the existing procedural toolchain of the search and rescue workers, and this in an international context (deployment from Belgium to Bosnia). The feedback received from the search‐and‐rescue personnel on both validation exercises was highly positive, proving that the ICARUS data management system can efficiently increase the situational awareness of the search‐and‐rescue personnel.}, doi = {10.1002/rob.21651}, publisher = {Wiley}, project = {ICARUS}, unit= {meca-ras}, url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.21651}, }
- S. W. T.Th. Nguyen J. De Baerdemaeker, “Detection of red and bicoloured apples on tree with an RGB-D camera," Biosystems Engineering, vol. 146, pp. 33-44, 2016.
[BibTeX] [DOI]@article{article_tt_002, author={T.Th. Nguyen, J. De Baerdemaeker, W. Saeys}, title={Detection of red and bicoloured apples on tree with an RGB-D camera}, journal={Biosystems Engineering}, volume={146}, year={2016}, doi={https://doi.org/10.1016/j.biosystemseng.2016.01.007}, pages={33-44} }
- S. Sarkar and I. N. Kar, “Formation of multiple groups of mobile robots: multi-timescale convergence perspective," Nonlinear Dynamics, vol. 85, iss. 4, p. 2611–2627, 2016.
[BibTeX]@article{sarkar2016formation, title={Formation of multiple groups of mobile robots: multi-timescale convergence perspective}, author={Sarkar, Soumic and Kar, Indra Narayan}, journal={Nonlinear Dynamics}, volume={85}, number={4}, pages={2611--2627}, year={2016}, publisher={Springer} }
2015
- G. De Cubber, “Search and Rescue Robots," Belgisch Militair Tijdschrift, vol. 10, p. 50–60, 2015.
[BibTeX] [Abstract] [Download PDF]
This article provides an overview of the work on search and rescue robotics and more specifically the research performed within the ICARUS research project.
@Article{de2015search, author = {De Cubber, Geert}, journal = {Belgisch Militair Tijdschrift}, title = {Search and Rescue Robots}, year = {2015}, pages = {50--60}, volume = {10}, abstract = {This article provides an overview of the work on search and rescue robotics and more specifically the research performed within the ICARUS research project.}, publisher = {Defensie}, project = {ICARUS}, unit= {meca-ras}, url = {http://mecatron.rma.ac.be/pub/2015/rmb102.pdf}, }
2014
- C. Armbrust, G. De Cubber, and K. Berns, “ICARUS Control Systems for Search and Rescue Robots," Field and Assistive Robotics – Advances in Systems and Algorithms, 2014.
[BibTeX] [Abstract] [Download PDF]
This paper describes results of the European project ICARUS in the field of search and rescue robotics. It presents the software architectures of two unmanned ground vehicles (a small and a large one) developed in the context of the project. The architectures of the two vehicles share many similarities. This allows for component reuse and thus reduces the overall development effort. Hence, the main contribution of this paper are design concepts that can serve as a basis for the development of different robot control systems.
@Article{armbrust2014icarus, author = {Armbrust, Christopher and De Cubber, Geert and Berns, Karsten}, journal = {Field and Assistive Robotics - Advances in Systems and Algorithms}, title = {{ICARUS} Control Systems for Search and Rescue Robots}, year = {2014}, abstract = {This paper describes results of the European project ICARUS in the field of search and rescue robotics. It presents the software architectures of two unmanned ground vehicles (a small and a large one) developed in the context of the project. The architectures of the two vehicles share many similarities. This allows for component reuse and thus reduces the overall development effort. Hence, the main contribution of this paper are design concepts that can serve as a basis for the development of different robot control systems.}, publisher = {Shaker Verlag}, project = {ICARUS}, url = {https://pdfs.semanticscholar.org/713d/8c8561eba9b577f17d3059155e1f3953893a.pdf}, unit= {meca-ras} }
- S. Sarkar and I. N. Kar, “On the Topologies Conforming Singular Perturbation in Formation Control of Nonholonomic Robots," IFAC Proceedings Volumes, vol. 47, iss. 1, p. 781–786, 2014.
[BibTeX]@article{sarkar2014topologies, title={On the Topologies Conforming Singular Perturbation in Formation Control of Nonholonomic Robots}, author={Sarkar, Soumic and Kar, Indra Narayan}, journal={IFAC Proceedings Volumes}, volume={47}, number={1}, pages={781--786}, year={2014}, publisher={Elsevier} }
- S. Papili, T. Wever, Y. Dupont, and V. Van Lancker, “Storm influence on the burial of objects in a shallow sandy shelf environment," Marine Geology, vol. 349, pp. 61-72, 2014.
[BibTeX] [DOI]@article{sonia08, title={Storm influence on the burial of objects in a shallow sandy shelf environment}, author={Papili, S. and Wever, T and Dupont, Y. and Van Lancker, V.}, journal={Marine Geology}, volume={349}, pages={61-72}, year={2014}, doi={10.1016/j.margeo.2014.01.004}, project = {DISCIMBA}, publisher={Elsevier} }
2013
- G. De Cubber and H. Sahli, “Augmented Lagrangian-based approach for dense three-dimensional structure and motion estimation from binocular image sequences," IET Computer Vision, 2013.
[BibTeX] [Abstract] [Download PDF] [DOI]
In this study, the authors propose a framework for stereo–motion integration for dense depth estimation. They formulate the stereo–motion depth reconstruction problem into a constrained minimisation one. A sequential unconstrained minimisation technique, namely, the augmented Lagrange multiplier (ALM) method has been implemented to address the resulting constrained optimisation problem. ALM has been chosen because of its relative insensitivity to whether the initial design points for a pseudo-objective function are feasible or not. The development of the method and results from solving the stereo–motion integration problem are presented. Although the authors work is not the only one adopting the ALMs framework in the computer vision context, to thier knowledge the presented algorithm is the first to use this mathematical framework in a context of stereo–motion integration. This study describes how the stereo–motion integration problem was cast in a mathematical context and solved using the presented ALM method. Results on benchmark and real visual input data show the validity of the approach.
@Article{de2013augmented, author = {De Cubber, Geert and Sahli, Hichem}, journal = {IET Computer Vision}, title = {Augmented Lagrangian-based approach for dense three-dimensional structure and motion estimation from binocular image sequences}, year = {2013}, abstract = {In this study, the authors propose a framework for stereo–motion integration for dense depth estimation. They formulate the stereo–motion depth reconstruction problem into a constrained minimisation one. A sequential unconstrained minimisation technique, namely, the augmented Lagrange multiplier (ALM) method has been implemented to address the resulting constrained optimisation problem. ALM has been chosen because of its relative insensitivity to whether the initial design points for a pseudo-objective function are feasible or not. The development of the method and results from solving the stereo–motion integration problem are presented. Although the authors work is not the only one adopting the ALMs framework in the computer vision context, to thier knowledge the presented algorithm is the first to use this mathematical framework in a context of stereo–motion integration. This study describes how the stereo–motion integration problem was cast in a mathematical context and solved using the presented ALM method. Results on benchmark and real visual input data show the validity of the approach.}, doi = {10.1049/iet-cvi.2013.0017}, publisher = {IET Digital Library}, project = {ICARUS,ViewFinder,Mobiniss}, url = {https://digital-library.theiet.org/content/journals/10.1049/iet-cvi.2013.0017}, unit= {meca-ras} }
2012
- J. B{k{e}}dkowski, A. Mas{l}owski, and G. De Cubber, “Real time 3D localization and mapping for USAR robotic application," Industrial Robot: An International Journal, vol. 39, iss. 5, p. 464–474, 2012.
[BibTeX] [DOI]@Article{bkedkowski2012real, author = {B{\k{e}}dkowski, Janusz and Mas{\l}owski, Andrzej and De Cubber, Geert}, journal = {Industrial Robot: An International Journal}, title = {Real time {3D} localization and mapping for {USAR} robotic application}, year = {2012}, number = {5}, pages = {464--474}, volume = {39}, doi = {10.1108/01439911211249751}, project = {ICARUS}, publisher = {Emerald Group Publishing Limited}, unit= {meca-ras} }
- G. De Cubber and H. Sahli, “Partial differential equation-based dense 3D structure and motion estimation from monocular image sequences," IET computer vision, vol. 6, iss. 3, p. 174–185, 2012.
[BibTeX] [DOI]@Article{de2012partial, author = {De Cubber, Geert and Sahli, Hichem}, journal = {IET computer vision}, title = {Partial differential equation-based dense {3D} structure and motion estimation from monocular image sequences}, year = {2012}, number = {3}, pages = {174--185}, volume = {6}, doi = {10.1049/iet-cvi.2011.0174}, project = {ViewFinder, Mobiniss}, publisher = {IET Digital Library}, unit= {meca-ras,vu-etro} }
- J. B{k{e}}dkowski, G. De Cubber, and A. Mas{l}owski, “6D SLAM with GPGPU computation," Pomiary Automatyka Robotyka, vol. 16, iss. 2, p. 275–280, 2012.
[BibTeX] [Abstract] [Download PDF]
The main goal was to improve a state of the art 6D SLAM algorithm with a new GPGPU-based implementation of data registration module. Data registration is based on ICP (Iterative Closest Point) algorithm that is fully implemented in the GPU with NVIDIA FERMI architecture. In our research we focus on mobile robot inspection intervention systems applicable in hazardous environments. The goal is to deliver a complete system capable of being used in real life. In this paper we demonstrate our achievements in the field of on line robot localization and mapping. We demonstrated an experiment in real large environment. We compared two strategies of data alingment – simple ICP and ICP using so called meta scan.
@Article{bkedkowski20126d, author = {B{\k{e}}dkowski, Janusz and De Cubber, Geert and Mas{\l}owski, Andrzej}, journal = {Pomiary Automatyka Robotyka}, title = {{6D SLAM} with {GPGPU} computation}, year = {2012}, number = {2}, pages = {275--280}, volume = {16}, project = {ICARUS}, abstract = {The main goal was to improve a state of the art 6D SLAM algorithm with a new GPGPU-based implementation of data registration module. Data registration is based on ICP (Iterative Closest Point) algorithm that is fully implemented in the GPU with NVIDIA FERMI architecture. In our research we focus on mobile robot inspection intervention systems applicable in hazardous environments. The goal is to deliver a complete system capable of being used in real life. In this paper we demonstrate our achievements in the field of on line robot localization and mapping. We demonstrated an experiment in real large environment. We compared two strategies of data alingment - simple ICP and ICP using so called meta scan.}, url = {http://www.par.pl/en/content/download/14036/170476/file/275_280.pdf}, unit= {meca-ras} }
2011
- T. Nguyen, E. Laroche, L. Cuvillon, J. Gangloff, and O. Piccin, “Identification d’un modèle phénoménologique de robot à câbles.," Journal Européen des Systèmes Automatisés (JESA), 2011.
[BibTeX]@article{article_tt_001, author={T.Th. {Nguyen} and E. {Laroche} and L. {Cuvillon} and J. {Gangloff} and O. {Piccin}}, title={Identification d’un modèle phénoménologique de robot à câbles.}, journal={Journal Européen des Systèmes Automatisés (JESA)}, year={2011} }
2010
- G. De Cubber, S. A. Berrabah, D. Doroftei, Y. Baudoin, and H. Sahli, “Combining Dense Structure from Motion and Visual SLAM in a Behavior-Based Robot Control Architecture," International Journal of Advanced Robotic Systems, vol. 7, iss. 1, 2010.
[BibTeX] [Abstract] [Download PDF] [DOI]
In this paper, we present a control architecture for an intelligent outdoor mobile robot. This enables the robot to navigate in a complex, natural outdoor environment, relying on only a single on-board camera as sensory input. This is achieved through a twofold analysis of the visual data stream: a dense structure from motion algorithm calculates a depth map of the environment and a visual simultaneous localization and mapping algorithm builds a map of the surroundings using image features. This information enables a behavior-based robot motion and path planner to navigate the robot through the environment. In this paper, we show the theoretical aspects of setting up this architecture.
@Article{de2010combining, author = {De Cubber, Geert and Sid Ahmed Berrabah and Daniela Doroftei and Yvan Baudoin and Hichem Sahli}, journal = {International Journal of Advanced Robotic Systems}, title = {Combining Dense Structure from Motion and Visual {SLAM} in a Behavior-Based Robot Control Architecture}, year = {2010}, month = mar, number = {1}, volume = {7}, abstract = {In this paper, we present a control architecture for an intelligent outdoor mobile robot. This enables the robot to navigate in a complex, natural outdoor environment, relying on only a single on-board camera as sensory input. This is achieved through a twofold analysis of the visual data stream: a dense structure from motion algorithm calculates a depth map of the environment and a visual simultaneous localization and mapping algorithm builds a map of the surroundings using image features. This information enables a behavior-based robot motion and path planner to navigate the robot through the environment. In this paper, we show the theoretical aspects of setting up this architecture.}, doi = {10.5772/7240}, publisher = {{SAGE} Publications}, project = {ViewFinder, Mobiniss}, url = {http://mecatron.rma.ac.be/pub/2010/e_from_motion_and_visual_slam_in_a_behavior-based_robot_control_architecture.pdf}, unit= {meca-ras,vub-etro} }
- D. Doroftei and E. Colon, “Decentralized Multi-Robot Coordination in an Urban Environment," European Journal of Mechanical en Environmental Engineering, vol. 1, 2010.
[BibTeX] [Abstract] [Download PDF]
In this paper, a novel control strategy is presented for multi‐robot coordination. An important aspect of the presented control architecture is that it is formulated in a decentralized context. This means that the robots cannot rely on traditional global path planning algorithms for navigation. The presented approach casts the multi‐robot control problem as a behavior‐based control problem.
@Article{doro2010decentralized, author = {Doroftei, Daniela and Colon, Eric}, journal = {European Journal of Mechanical en Environmental Engineering}, title = {Decentralized Multi-Robot Coordination in an Urban Environment}, year = {2010}, volume = {1}, abstract = {In this paper, a novel control strategy is presented for multi‐robot coordination. An important aspect of the presented control architecture is that it is formulated in a decentralized context. This means that the robots cannot rely on traditional global path planning algorithms for navigation. The presented approach casts the multi‐robot control problem as a behavior‐based control problem. }, project = {NMRS}, address = {Sheffield, UK}, url = {http://mecatron.rma.ac.be/pub/2010/EJMEE2010_doroftei_colon.pdf}, unit= {meca-ras} }
- A. Borghgraef, O. Barnich, F. D. Lapierre, V. M. Droogenbroeck, W. Philips, and M. Acheroy, “An Evaluation of Pixel-based Methods for the Detection of Floating Objects on the Sea Surface," EURASIP Journal on Applied Signal Processing (JASP), 2010.
[BibTeX] [DOI]@article{Borghgraef2010EURASIP, year = {2010}, author = {A. Borghgraef and O. Barnich and F.D. Lapierre and M. Van Droogenbroeck and W. Philips and M. Acheroy}, title = {An Evaluation of Pixel-based Methods for the Detection of Floating Objects on the Sea Surface}, journal = {EURASIP Journal on Applied Signal Processing (JASP)}, eid = {978451}, numpages = {11}, doi = {10.1155/2010/978451}, unit= {ciss}, keywords = SIC09 }
- F. D. Lapierre, A. Borghgraef, and M. and Vandewal, “Statistical Real-time Model for Performance Prediction of Ship Detection from Microsatellite Electro-Optical Imagers," EURASIP Journal on Applied Signal Processing (JASP), 2010.
[BibTeX] [DOI]@article{Lapierre2010EURASIP, year = {2010}, author = {Fabian D. Lapierre and Alexander Borghgraef and and Marijke Vandewal}, title = {Statistical Real-time Model for Performance Prediction of Ship Detection from Microsatellite Electro-Optical Imagers}, journal = {EURASIP Journal on Applied Signal Processing (JASP)}, eid = {475948}, numpages = {15}, doi = {10.1155/2010/475948}, unit= {ciss}, keywords = SIC09 }
2009
- D. Doroftei, E. Colon, Y. Baudoin, and H. Sahli, “Development of a behaviour-based control and software architecture for a visually guided mine detection robot," European Journal of Automated Systems (JESA), vol. 43, iss. 3, p. 295–314, 2009.
[BibTeX] [Abstract] [Download PDF]
Humanitarian demining is a labor-intensive and high-risk which could benefit from the development of a humanitarian mine detection robot, capable of scanning a minefield semi-automatically. The design of such an outdoor autonomous robots requires the consideration and integration of multiple aspects: sensing, data fusion, path and motion planning and robot control embedded in a control and software architecture. This paper focuses on three main aspects of the design process: visual sensing using stereo and image motion analysis, design of a behaviour-based control architecture and implementation of a modular software architecture.
@Article{doro2009development, author = {Doroftei, Daniela and Colon, Eric and Baudoin, Yvan and Sahli, Hichem}, journal = {European Journal of Automated Systems ({JESA})}, title = {Development of a behaviour-based control and software architecture for a visually guided mine detection robot}, year = {2009}, volume = {43}, number = {3}, abstract = { Humanitarian demining is a labor-intensive and high-risk which could benefit from the development of a humanitarian mine detection robot, capable of scanning a minefield semi-automatically. The design of such an outdoor autonomous robots requires the consideration and integration of multiple aspects: sensing, data fusion, path and motion planning and robot control embedded in a control and software architecture. This paper focuses on three main aspects of the design process: visual sensing using stereo and image motion analysis, design of a behaviour-based control architecture and implementation of a modular software architecture.}, pages = {295--314}, project = {Mobiniss, ViewFinder}, url = {http://mecatron.rma.ac.be/pub/2009/doc-article-hermes.pdf}, unit= {meca-ras} }
2008
- D. Doroftei, E. Colon, and G. De Cubber, “A Behaviour-Based Control and Software Architecture for the Visually Guided Robudem Outdoor Mobile Robot," Journal of Automation Mobile Robotics and Intelligent Systems, vol. 2, iss. 4, p. 19–24, 2008.
[BibTeX] [Abstract] [Download PDF]
The design of outdoor autonomous robots requires the careful consideration and integration of multiple aspects: sensors and sensor data fusion, design of a control and software architecture, design of a path planning algorithm and robot control. This paper describes partial aspects of this research work, which is aimed at developing a semiautonomous outdoor robot for risky interventions. This paper focuses on three main aspects of the design process: visual sensing using stereo vision and image motion analysis, design of a behaviourbased control architecture and implementation of modular software architecture.
@Article{doroftei2008behaviour, author = {Doroftei, Daniela and Colon, Eric and De Cubber, Geert}, journal = {Journal of Automation Mobile Robotics and Intelligent Systems}, title = {A Behaviour-Based Control and Software Architecture for the Visually Guided Robudem Outdoor Mobile Robot}, year = {2008}, issn = {1897-8649}, month = oct, number = {4}, pages = {19--24}, volume = {2}, abstract = {The design of outdoor autonomous robots requires the careful consideration and integration of multiple aspects: sensors and sensor data fusion, design of a control and software architecture, design of a path planning algorithm and robot control. This paper describes partial aspects of this research work, which is aimed at developing a semiautonomous outdoor robot for risky interventions. This paper focuses on three main aspects of the design process: visual sensing using stereo vision and image motion analysis, design of a behaviourbased control architecture and implementation of modular software architecture.}, project = {ViewFinder, Mobiniss}, url = {http://mecatron.rma.ac.be/pub/2008/XXX JAMRIS No8 - Doroftei.pdf}, unit= {meca-ras} }
- G. De Cubber, “Dense 3D structure and motion estimation as an aid for robot navigation," Journal of Automation Mobile Robotics and Intelligent Systems, vol. 2, iss. 4, p. 14–18, 2008.
[BibTeX] [Abstract] [Download PDF]
Three-dimensional scene reconstruction is an important tool in many applications varying from computer graphics to mobile robot navigation. In this paper, we focus on the robotics application, where the goal is to estimate the 3D rigid motion of a mobile robot and to reconstruct a dense three-dimensional scene representation. The reconstruction problem can be subdivided into a number of subproblems. First, the egomotion has to be estimated. For this, the camera (or robot) motion parameters are iteratively estimated by reconstruction of the epipolar geometry. Secondly, a dense depth map is calculated by fusing sparse depth information from point features and dense motion information from the optical flow in a variational framework. This depth map corresponds to a point cloud in 3D space, which can then be converted into a model to extract information for the robot navigation algorithm. Here, we present an integrated approach for the structure and egomotion estimation problem.
@Article{DeCubber2008, author = {De Cubber, Geert}, journal = {Journal of Automation Mobile Robotics and Intelligent Systems}, title = {Dense {3D} structure and motion estimation as an aid for robot navigation}, year = {2008}, issn = {1897-8649}, month = oct, number = {4}, pages = {14--18}, volume = {2}, abstract = {Three-dimensional scene reconstruction is an important tool in many applications varying from computer graphics to mobile robot navigation. In this paper, we focus on the robotics application, where the goal is to estimate the 3D rigid motion of a mobile robot and to reconstruct a dense three-dimensional scene representation. The reconstruction problem can be subdivided into a number of subproblems. First, the egomotion has to be estimated. For this, the camera (or robot) motion parameters are iteratively estimated by reconstruction of the epipolar geometry. Secondly, a dense depth map is calculated by fusing sparse depth information from point features and dense motion information from the optical flow in a variational framework. This depth map corresponds to a point cloud in 3D space, which can then be converted into a model to extract information for the robot navigation algorithm. Here, we present an integrated approach for the structure and egomotion estimation problem.}, project = {ViewFinder,Mobiniss}, url = {http://www.jamris.org/images/ISSUES/ISSUE-2008-04/002 JAMRIS No8 - De Cubber.pdf}, unit= {meca-ras} }
2007
- E. Colon, G. De Cubber, H. Ping, J. Habumuremyi, H. Sahli, and Y. Baudoin, “Integrated Robotic systems for Humanitarian Demining," International Journal of Advanced Robotic Systems, vol. 4, iss. 2, p. 24, 2007.
[BibTeX] [Abstract] [Download PDF] [DOI]
This paper summarises the main results of 10 years of research and development in Humanitarian Demining. The Hudem project focuses on mine detection systems and aims at provided different solutions to support the mine detection operations. Robots using different kind of locomotion systems have been designed and tested on dummy minefields. In order to control these robots, software interfaces, control algorithms, visual positioning and terrain following systems have also been developed. Typical data acquisition results obtained during trial campaigns with robots and data acquisition systems are reported. Lessons learned during the project and future work conclude this paper.
@Article{colon2007integrated, author = {Colon, Eric and De Cubber, Geert and Ping, Hong and Habumuremyi, Jean-Claude and Sahli, Hichem and Baudoin, Yvan}, journal = {International Journal of Advanced Robotic Systems}, title = {Integrated Robotic systems for Humanitarian Demining}, year = {2007}, month = jun, number = {2}, pages = {24}, volume = {4}, abstract = {This paper summarises the main results of 10 years of research and development in Humanitarian Demining. The Hudem project focuses on mine detection systems and aims at provided different solutions to support the mine detection operations. Robots using different kind of locomotion systems have been designed and tested on dummy minefields. In order to control these robots, software interfaces, control algorithms, visual positioning and terrain following systems have also been developed. Typical data acquisition results obtained during trial campaigns with robots and data acquisition systems are reported. Lessons learned during the project and future work conclude this paper.}, doi = {10.5772/5694}, publisher = {{SAGE} Publications}, project = {Mobiniss}, url = {http://mecatron.rma.ac.be/pub/2007/10.1.1.691.7544.pdf}, unit= {meca-ras} }
2006
- G. De Cubber, V. Enescu, H. Sahli, E. Demeester, M. Nuttin, and D. Vanhooydonck, “Active stereo vision-based mobile robot navigation for person tracking," Integrated Computer-Aided Engineering, vol. 13, p. 203–222, 2006.
[BibTeX] [Abstract] [Download PDF]
In this paper, we propose a mobile robot architecture for person tracking, consisting of an active stereo vision module (ASVM) and a navigation module (NM). The first uses a stereo head equipped with a pan-tilt mechanism to track a moving target (selected by an operator) and keep it centered in the visual field. Its output, i.e. the 3D position of the person, is fed to the NM, which drives the robot towards the target while avoiding obstacles. For this, a hybrid navigation algorithm is adopted with a reactive part that efficiently reacts to the most recent sensor data, and a deliberative part that generates a globally optimal path to a target destination, such as the person’s location. As a peculiarity of the system, there is no feedback from the NM or the robot motion controller (RMC) to the ASVM. While this imparts flexibility in combining the ASVM with a wide range of robot platforms, it puts considerable strain on the ASVM. Indeed, besides the changes in the target dynamics, it has to cope with the robot motion during obstacle avoidance. These disturbances are accommodated via a suitable stochastic dynamic model for the stereo head-target system. Robust tracking is achieved by combining a color-based particle filter with a method to update the color model of the target under changing illumination conditions. The main contributions of this paper lie in (1) devising a robust color-based 3D target tracking method, (2) proposing a hybrid deliberative/reactive navigation scheme, and (3) integrating them on a wheelchair platform for the final goal of person following. Experimental results are presented for ASVM separately and in combination with a wheelchair platform-based implementation of the NM.
@Article{2c2cd28d2aea4009ae0135448c005050, author = {De Cubber, Geert and Valentin Enescu and Hichem Sahli and Eric Demeester and Marnix Nuttin and Dirk Vanhooydonck}, journal = {Integrated Computer-Aided Engineering}, title = {Active stereo vision-based mobile robot navigation for person tracking}, year = {2006}, issn = {1069-2509}, month = jul, note = {Integrated Computer-Aided Engineering, Vol. ?, Nr. ?, pp. ?, .}, pages = {203--222}, volume = {13}, abstract = {In this paper, we propose a mobile robot architecture for person tracking, consisting of an active stereo vision module (ASVM) and a navigation module (NM). The first uses a stereo head equipped with a pan-tilt mechanism to track a moving target (selected by an operator) and keep it centered in the visual field. Its output, i.e. the 3D position of the person, is fed to the NM, which drives the robot towards the target while avoiding obstacles. For this, a hybrid navigation algorithm is adopted with a reactive part that efficiently reacts to the most recent sensor data, and a deliberative part that generates a globally optimal path to a target destination, such as the person's location. As a peculiarity of the system, there is no feedback from the NM or the robot motion controller (RMC) to the ASVM. While this imparts flexibility in combining the ASVM with a wide range of robot platforms, it puts considerable strain on the ASVM. Indeed, besides the changes in the target dynamics, it has to cope with the robot motion during obstacle avoidance. These disturbances are accommodated via a suitable stochastic dynamic model for the stereo head-target system. Robust tracking is achieved by combining a color-based particle filter with a method to update the color model of the target under changing illumination conditions. The main contributions of this paper lie in (1) devising a robust color-based 3D target tracking method, (2) proposing a hybrid deliberative/reactive navigation scheme, and (3) integrating them on a wheelchair platform for the final goal of person following. Experimental results are presented for ASVM separately and in combination with a wheelchair platform-based implementation of the NM.}, day = {24}, keywords = {mobile robot, active vision, stereo, navigation}, language = {English}, project = {Mobiniss, ViewFinder}, publisher = {IOS Press}, unit= {meca-ras,vub-etro}, url = {https://cris.vub.be/en/publications/active-stereo-visionbased-mobile-robot-navigation-for-person-tracking(2c2cd28d-2aea-4009-ae01-35448c005050)/export.html}, }
2004
- G. De Cubber, S. A. Berrabah, and H. Sahli, “Color-based visual servoing under varying illumination conditions," Robotics and Autonomous Systems, vol. 47, iss. 4, p. 225–249, 2004.
[BibTeX] [Abstract] [Download PDF] [DOI]
Visual servoing, or the control of motion on the basis of image analysis in a closed loop, is more and more recognized as an important tool in modern robotics. Here, we present a new model driven approach to derive a description of the motion of a target object. This method can be subdivided into an illumination invariant target detection stage and a servoing process which uses an adaptive Kalman filter to update the model of the non-linear system. This technique can be applied to any pan tilt zoom camera mounted on a mobile vehicle as well as to a static camera tracking moving environmental features.
@Article{de2004color, author = {De Cubber, Geert and Berrabah, Sid Ahmed and Sahli, Hichem}, journal = {Robotics and Autonomous Systems}, title = {Color-based visual servoing under varying illumination conditions}, year = {2004}, month = jul, number = {4}, pages = {225--249}, volume = {47}, abstract = {Visual servoing, or the control of motion on the basis of image analysis in a closed loop, is more and more recognized as an important tool in modern robotics. Here, we present a new model driven approach to derive a description of the motion of a target object. This method can be subdivided into an illumination invariant target detection stage and a servoing process which uses an adaptive Kalman filter to update the model of the non-linear system. This technique can be applied to any pan tilt zoom camera mounted on a mobile vehicle as well as to a static camera tracking moving environmental features.}, doi = {10.1016/j.robot.2004.03.015}, publisher = {Elsevier {BV}}, project = {Mobiniss}, url = {https://www.sciencedirect.com/science/article/abs/pii/S0921889004000570}, unit= {meca-ras,vub-etro} }