Conference Papers

Disclaimer: The papers below are intended for private viewing by the page owner or those who otherwise have legitimate access to them. No part of it may in any form or by any electronic, mechanical, photocopying, recording, or any other means be reproduced, stored in a retrieval system or be broadcast or transmitted without the prior permission of the respective publishers. If your organization has a valid subscription of the journals, click on the DOI link for the legitimate copy of the papers.

2024

  • Y. Geers, T. Willems, C. Nita, T-T. Nguyen, and J. Aelterman, “Maritime surveillance using unmanned vehicles: deep learning-based vessel re-identification," in 2024 SPIE: Security + Defence, Artificial Intelligence for Security and Defence Applications II, 2024.
    [BibTeX] [Download PDF] [DOI]
    @inproceedings{spie2024,
    title={Maritime surveillance using unmanned vehicles: deep learning-based vessel re-identification},
    author={Geers, Yoni and Willems, Tim and Nita, Cornelia and Nguyen, T-T. and Aelterman, Jan},
    booktitle={2024 SPIE: Security + Defence, Artificial Intelligence for Security and Defence Applications II},
    editors ={},
    publisher = {},
    year = {2024},
    vol = {13206},
    location = {Edinburgh, United Kingdom},
    unit= {meca-ras},
    doi = {https://doi.org/10.1117/12.3028805},
    url={https://www.spiedigitallibrary.org/conference-proceedings-of-spie/13206/3028805/Maritime-surveillance-using-unmanned-vehicles--deep-learning-based-vessel/10.1117/12.3028805.full},
    unit= {meca-ras},
    project= {MarLand}
    }

  • Z. Chekakta, N. Aouf, S. Govindaraj, F. Polisano, and G. De Cubber, “Towards Learning-Based Distributed Task Allocation Approach for Multi-Robot System," in 2024 10th International Conference on Automation, Robotics and Applications (ICARA), 2024, pp. 34-39.
    [BibTeX] [DOI]
    @INPROCEEDINGS{10553196,
    author={Chekakta, Zakaria and Aouf, Nabil and Govindaraj, Shashank and Polisano, Fabio and De Cubber, Geert},
    booktitle={2024 10th International Conference on Automation, Robotics and Applications (ICARA)},
    title={Towards Learning-Based Distributed Task Allocation Approach for Multi-Robot System},
    year={2024},
    volume={},
    number={},
    pages={34-39},
    keywords={Sequential analysis;Automation;Accuracy;Robot kinematics;Prediction algorithms;Approximation algorithms;Resource management;Task Allocation;Multirobot System;Distributed Algorithms;Graph Convolutional Neural Networks},
    doi={10.1109/ICARA60736.2024.10553196},
    unit= {meca-ras},
    project= {AIDED}
    }

  • A. Miuccio, R. Manríquez-Cisterna, A. A. Ravankar, J. Victorio Salazar Luces, Y. Hirata, and P. Rocco, “A B-spline Approach for Improved Environmental Awareness in Virtual Walking System using Avatar Robot," in 2024 33rd IEEE International Conference on Robot and Human Interactive Communication (ROMAN), 2024, pp. 189-195.
    [BibTeX] [DOI]
    @INPROCEEDINGS{10731425,
    author={Miuccio, Alessandra and Manríquez-Cisterna, Ricardo and Ravankar, Ankit A. and Victorio Salazar Luces, Jose and Hirata, Yasuhisa and Rocco, Paolo},
    booktitle={2024 33rd IEEE International Conference on Robot and Human Interactive Communication (ROMAN)},
    title={A B-spline Approach for Improved Environmental Awareness in Virtual Walking System using Avatar Robot},
    year={2024},
    volume={},
    number={},
    pages={189-195},
    keywords={Legged locomotion;Navigation;Avatars;Aerospace electronics;Cameras;Real-time systems;User experience;Safety;Splines (mathematics);Robots},
    doi={10.1109/RO-MAN60168.2024.10731425}
    }

  • A. Borghgraef, M. Vandewal, and G. De Cubber, “COURAGEOUS: test methods for counter-UAS systems," in In Proceedings SPIE Sensors + Imaging, Target and Background Signatures X: Traditional Methods and Artificial Intelligence, 2024, p. 131990D.
    [BibTeX] [Download PDF] [DOI]
    @inproceedings{spie_alex,
    title={COURAGEOUS: test methods for counter-UAS systems},
    author={Borghgraef, Alexander and Vandewal, Marijke and De Cubber, Geert},
    year={2024},
    booktitle={In Proceedings SPIE Sensors + Imaging, Target and Background Signatures X: Traditional Methods and Artificial Intelligence},
    publisher = {SPIE},
    location = {Edinburgh, United Kingdom},
    unit= {meca-ras, ciss},
    project= {COURAGEOUS},
    volume = {13199},
    editor = {Karin Stein and Ric Schleijpen},
    organization = {International Society for Optics and Photonics},
    publisher = {SPIE},
    pages = {131990D},
    keywords = {counter-UAS, drone, border protection, standardization, measurement campaign, law enforcement, DTI, evaluation methods},
    doi = {10.1117/12.3030928},
    url = {https://doi.org/10.1117/12.3030928}
    }

  • A. M. Casado Fauli, M. Malizia, K. Hasselmann, E. Le Flécher, G. De Cubber, and B. Lauwens, “HADRON: Human-friendly Control and Artificial Intelligence for Military Drone Operations," in In Proceedings 33rd IEEE International Conference on Robot and Human Interactive Communication, IEEE RO-MAN 2024, 2024.
    [BibTeX] [Download PDF]
    @inproceedings{fauli2024hadronhumanfriendlycontrolartificial,
    title={HADRON: Human-friendly Control and Artificial Intelligence for Military Drone Operations},
    author={Casado Fauli, Ana Maria and Malizia, Mario and Hasselmann, Ken and Le Flécher, Emile and De Cubber, Geert and Lauwens, Ben},
    year={2024},
    booktitle={In Proceedings 33rd IEEE International Conference on Robot and Human Interactive Communication, IEEE RO-MAN 2024},
    publisher = {IEEE},
    location = {Pasadena, USA},
    unit= {meca-ras},
    project= {HADRON},
    eprint={2408.07063},
    archivePrefix={arXiv},
    primaryClass={cs.RO},
    url={https://arxiv.org/abs/2408.07063},
    }

  • M. Malizia, A. M. Casado Fauli, K. Hasselmann, E. Le Flécher, G. De Cubber, and R. Haelterman, “Assisted Explosive Ordnance Disposal: Teleoperated Robotic Systems with AI, Virtual Reality, and Semi-Autonomous Manipulation for Safer Demining Operations," in 20th International Symposium Mine Action, 2024, pp. 52-55.
    [BibTeX] [Download PDF]
    @inproceedings{maliziamineact2024,
    title={Assisted Explosive Ordnance Disposal: Teleoperated Robotic Systems with AI, Virtual Reality, and Semi-Autonomous Manipulation for Safer Demining Operations},
    author={Malizia, Mario and Casado Fauli, Ana Maria and Hasselmann, Ken and Le Flécher, Emile and De Cubber, Geert and Haelterman, Rob},
    booktitle={20th International Symposium Mine Action},
    publisher = {CTRO-HR},
    year = {2024},
    location = {Cavtat, Croatia},
    unit= {meca-ras},
    url={https://www.ctro.hr/userfiles/files/MINE%20ACTION_2024_ONLIINE.pdf},
    pages={52-55},
    project= {BELGIAN}
    }

  • K. Hasselmann, M. Malizia, R. Caballero, F. Polisano, S. Govindaraj, J. Stigler, O. Ilchenko, M. Bajic, and G. De Cubber, “A multi-robot system for the detection of explosive devices," in “IEEE ICRA Workshop on Field Robotics", 2024.
    [BibTeX] [Download PDF] [DOI]
    @inproceedings{Hasselmannetal2024ICRAWSFRO,
    doi = {10.48550/ARXIV.2404.14167},
    url={https://arxiv.org/abs/2404.14167},
    booktitle = {"IEEE ICRA Workshop on Field Robotics"},
    author = {Hasselmann, Ken and Malizia, Mario and Caballero, Rafael and Polisano, Fabio and Govindaraj, Shashank and Stigler, Jakob and Ilchenko, Oleksii and Bajic, Milan and De Cubber, Geert},
    title = {A multi-robot system for the detection of explosive devices},
    year = {2024},
    unit= {meca-ras},
    project= {AIDED, AIDEDEX}
    }

  • T-T. Nguyen, A. Crismer, G. De Cubber, B. Janssens, and H. Bruyninckx, “Landing UAV on Moving Surface Vehicle: Visual Tracking and Motion Prediction of Landing Deck," in 2024 IEEE/SICE International Symposium on System Integration (SII)., 2024.
    [BibTeX] [Download PDF] [DOI]
    @inproceedings{sii2024,
    title={Landing UAV on Moving Surface Vehicle: Visual Tracking and Motion Prediction of Landing Deck},
    author={Nguyen, T-T. and Crismer, A and De Cubber, G. and Janssens, B. and Bruyninckx, H.},
    booktitle={2024 IEEE/SICE International Symposium on System Integration (SII).},
    editors ={},
    publisher = {IEEE},
    year = {2024},
    vol = {},
    location = {Ha Long, Vietnam},
    unit= {meca-ras},
    doi = {https://doi.org/10.1109/SII58957.2024.10417303},
    url={https://drive.google.com/file/d/1UiF4uPF9VkxgMxMX_JCBOFiZJPmoDSRv/view?usp=drive_link},
    project= {MarLand}
    }

2023

  • Timothée Fréville, C. Hamesse., B. Pairet., and R. Haelterman., “Object Detection in Floor Plans for Automated VR Environment Generation," in Proceedings of the 18th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications (VISIGRAPP 2023) – Volume 5: VISAPP, 2023, pp. 480-486.
    [BibTeX] [DOI]
    @conference{visapp23,
    author={Timothée Fréville, and Charles Hamesse. and Bênoit Pairet. and Rob Haelterman.},
    title={Object Detection in Floor Plans for Automated VR Environment Generation},
    booktitle={Proceedings of the 18th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications (VISIGRAPP 2023) - Volume 5: VISAPP},
    year={2023},
    pages={480-486},
    publisher={SciTePress},
    organization={INSTICC},
    doi={10.5220/0011629300003417},
    isbn={978-989-758-634-7},
    issn={2184-4321},
    }

  • T. Fréville, C. Hamesse, B. Pairet, and R. Haelterman, “AI to generate VR worlds from floorplans," in Defence Training Technology Exhibition and Technical Conference (IT²EC), 2023.
    [BibTeX]
    @inproceedings{IT²EC2023a,
    author = {Fréville, Timothée and Hamesse, Charles and Pairet, Benoit and Haelterman, Rob},
    year = {2023},
    month = {04},
    pages = {},
    title = {AI to generate VR worlds from floorplans},
    booktitle={Defence Training Technology Exhibition and Technical Conference (IT²EC)},
    }

  • B. Pairet, C. Hamesse, T. Fréville, and H. Rob, “From maps to 3D environments, automatically," in Defence Training Technology Exhibition and Technical Conference (IT²EC), 2023.
    [BibTeX]
    @inproceedings{IT²EC2023b,
    author = {Pairet, Benoit and Hamesse, Charles and Fréville, Timothée and Haelterman Rob},
    year = {2023},
    month = {04},
    pages = {},
    booktitle={Defence Training Technology Exhibition and Technical Conference (IT²EC)},
    title = {From maps to 3D environments, automatically}
    }

  • C. Hamesse, T. Fréville, B. Pairet, and R. Haelterman, “Towards Dense 3D Mapping with Portable Systems," in Defence Training Technology Exhibition and Technical Conference (IT²EC), 2023.
    [BibTeX]
    @inproceedings{IT²EC2023c,
    author = {Hamesse, Charles and Fréville, Timothée and Pairet, Benoit and Haelterman, Rob},
    year = {2023},
    month = {04},
    pages = {},
    booktitle={Defence Training Technology Exhibition and Technical Conference (IT²EC)},
    title = {Towards Dense 3D Mapping with Portable Systems}
    }

  • G. De Cubber, P. Petsioti, R. Roman, A. Mohamoud, I. Maza, and C. Church, “The COURAGEOUS project efforts towards standardized test methods for assessing the performance of counter-drone solutions," in In Proceedings 11th biennial Symposium on Non-Lethal Weapons, 2023, p. 44.
    [BibTeX] [Download PDF]
    @inproceedings{decubbercuas2023,
    title={The COURAGEOUS project efforts towards standardized test methods for assessing the performance of counter-drone solutions},
    author={De Cubber, Geert and Petsioti, Petsioti and Roman, Razvan and Mohamoud, Ali and Maza, Ivan and Church, Christopher},
    booktitle={In Proceedings 11th biennial Symposium on Non-Lethal Weapons},
    publisher = {European Working Group on Non-Lethal Weapons},
    year = {2023},
    location = {Brussels, Belgium},
    unit= {meca-ras},
    url={https://mecatron.rma.ac.be/pub/2024/Towards%20standardized%20test%20methods%20for%20assessing%20the%20performance%20of%20counter-drone%20solutions.pdf},
    pages={44},
    project= {COURAGEOUS}
    }

  • G. De Cubber, E. Le Flécher, A. La Grappe, E. Ghisoni, E. Maroulis, P. Ouendo, D. Hawari, and D. Doroftei, “Dual Use Security Robotics: A Demining, Resupply and Reconnaissance Use Case," in IEEE International Conference on Safety, Security, and Rescue Robotics, 2023.
    [BibTeX] [Download PDF]
    @inproceedings{ssrr2023decubber,
    title={Dual Use Security Robotics: A Demining, Resupply and Reconnaissance Use Case},
    author={De Cubber, Geert and Le Flécher, Emile and La Grappe, Alexandre and Ghisoni, Enzo and Maroulis, Emmanouil and Ouendo, Pierre-Edouard and Hawari, Danial and Doroftei, Daniela},
    booktitle={IEEE International Conference on Safety, Security, and Rescue Robotics},
    editors ={Kimura, Tetsuya},
    publisher = {IEEE},
    year = {2023},
    vol = {1},
    project = {AIDED, iMUGs, CUGS},
    location = {Fukushima, Japan},
    unit= {meca-ras},
    doi = {},
    url={https://mecatron.rma.ac.be/pub/2023/SSRR2023-DeCubber.pdf}
    }

  • T-T. Nguyen, L. Somers, J. Van den Bosch, G. De Cubber, B. Janssens, and H. Bruyninckx, “Affordable and Customizable Research and Educational Aerial and Surface Vehicles Robot Platforms – first implementation," in 17th Mechatronics Forum International Conference., 2023.
    [BibTeX] [Download PDF]
    @inproceedings{mechatronics20203usv,
    title={Affordable and Customizable Research and Educational Aerial and Surface Vehicles Robot Platforms – first implementation},
    author={Nguyen, T-T. and Somers, L. and Van den Bosch, J. and De Cubber, G. and Janssens, B. and Bruyninckx, H.},
    booktitle={17th Mechatronics Forum International Conference.},
    editors ={},
    publisher = {},
    year = {2023},
    vol = {},
    location = {Leuven, Belgium},
    unit= {meca-ras},
    doi = {},
    url={https://mechatronics2023.eu/wp-content/uploads/2023/09/MX_2023_session_3_paper_3_nguyen.pdf},
    project= {MarLand}
    }

  • T-T. Nguyen, J. Duverger, G. De Cubber, B. Janssens, and H. Bruyninckx, “Development of Dual-function Adaptive Landing Gear and Gripper for Unmanned Aerial Vehicles," in 17th Mechatronics Forum International Conference., 2023.
    [BibTeX] [Download PDF]
    @inproceedings{mechatronics20203gripper,
    title={Development of Dual-function Adaptive Landing Gear and Gripper for Unmanned Aerial Vehicles},
    author={Nguyen, T-T. and Duverger, J. and De Cubber, G. and Janssens, B. and Bruyninckx, H.},
    booktitle={17th Mechatronics Forum International Conference.},
    editors ={},
    publisher = {},
    year = {2023},
    vol = {},
    location = {Leuven, Belgium},
    unit= {meca-ras},
    doi = {},
    url={https://mechatronics2023.eu/wp-content/uploads/2023/09/MX_2023_session_3_paper_1_nguyen.pdf},
    project= {MarLand}
    }

  • G. De Cubber, E. Le Flécher, A. Dominicus, and D. Doroftei, “Human-agent teaming between soldiers and unmanned ground systems in a resupply scenario," in Human Factors in Robots, Drones and Unmanned Systems. AHFE (2023) International Conference., 2023.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    Thanks to advances in embedded computing and robotics, intelligent Unmanned Ground Systems (UGS) are used more and more in our daily lives. Also in the military domain, the use of UGS is highly investigated for applications like force protection of military installations, surveillance, target acquisition, reconnaissance, handling of chemical, biological, radiological, nuclear (CBRN) threats, explosive ordnance disposal, etc. A pivotal research aspect for the integration of these military UGS in the standard operating procedures is the question of how to achieve a seamless collaboration between human and robotic agents in such high-stress and non-structured environments. Indeed, in these kind of operations, it is critical that the human-agent mutual understanding is flawless; hence, the focus on human factors and ergonomic design of the control interfaces.The objective of this paper is to focus on one key military application of UGS, more specifically logistics, and elaborate how efficient human-machine teaming can be achieved in such a scenario. While getting much less attention than other application areas, the domain of logistics is in fact one of the most important for any military operation, as it is an application area that is very well suited for robotic systems. Indeed, military troops are very often burdened by having to haul heavy gear across large distances, which is a problem UGS can solve.The significance of this paper is that it is based on more than two years of field research work on human + multi-agent UGS collaboration in realistic military operating conditions, performed within the scope of the European project iMUGS. In the framework of this project, not less than six large-scale field trial campaigns were organized across Europe. In each field trial campaign, soldiers and UGS had to work together to achieve a set of high-level mission goals that were distributed among them via a planning & scheduling mechanism. This paper will focus on the outcomes of the Belgian field trial, which concentrated on a resupply logistics mission.Within this paper, a description of the iMUGS test setup and operational scenarios is provided. The ergonomic design of the tactical planning system is elaborated, together with the high-level swarming and task scheduling methods that divide the work between robotic and human agents in the fieldThe resupply mission, as described in this paper, was executed in summer 2022 in Belgium by a mixed team of soldiers and UGS for an audience of around 200 people from defence actors from European member states. The results of this field trial were evaluated as highly positive, as all high-level requirements were obtained by the robotic fleet.

    @inproceedings{ahfe20203decubber,
    title={Human-agent teaming between soldiers and unmanned ground systems in a resupply scenario},
    author={De Cubber, G. and Le Flécher, E. and Dominicus, A. and Doroftei, D.},
    booktitle={Human Factors in Robots, Drones and Unmanned Systems. AHFE (2023) International Conference.},
    editors ={Tareq Ahram and Waldemar Karwowski},
    publisher = {AHFE Open Access, AHFE International, USA},
    year = {2023},
    vol = {93},
    project = {iMUGs},
    location = {San Francisco, USA},
    unit= {meca-ras},
    doi = {http://doi.org/10.54941/ahfe1003746},
    url={https://openaccess.cms-conferences.org/publications/book/978-1-958651-69-8/article/978-1-958651-69-8_5},
    abstract = {Thanks to advances in embedded computing and robotics, intelligent Unmanned Ground Systems (UGS) are used more and more in our daily lives. Also in the military domain, the use of UGS is highly investigated for applications like force protection of military installations, surveillance, target acquisition, reconnaissance, handling of chemical, biological, radiological, nuclear (CBRN) threats, explosive ordnance disposal, etc. A pivotal research aspect for the integration of these military UGS in the standard operating procedures is the question of how to achieve a seamless collaboration between human and robotic agents in such high-stress and non-structured environments. Indeed, in these kind of operations, it is critical that the human-agent mutual understanding is flawless; hence, the focus on human factors and ergonomic design of the control interfaces.The objective of this paper is to focus on one key military application of UGS, more specifically logistics, and elaborate how efficient human-machine teaming can be achieved in such a scenario. While getting much less attention than other application areas, the domain of logistics is in fact one of the most important for any military operation, as it is an application area that is very well suited for robotic systems. Indeed, military troops are very often burdened by having to haul heavy gear across large distances, which is a problem UGS can solve.The significance of this paper is that it is based on more than two years of field research work on human + multi-agent UGS collaboration in realistic military operating conditions, performed within the scope of the European project iMUGS. In the framework of this project, not less than six large-scale field trial campaigns were organized across Europe. In each field trial campaign, soldiers and UGS had to work together to achieve a set of high-level mission goals that were distributed among them via a planning & scheduling mechanism. This paper will focus on the outcomes of the Belgian field trial, which concentrated on a resupply logistics mission.Within this paper, a description of the iMUGS test setup and operational scenarios is provided. The ergonomic design of the tactical planning system is elaborated, together with the high-level swarming and task scheduling methods that divide the work between robotic and human agents in the fieldThe resupply mission, as described in this paper, was executed in summer 2022 in Belgium by a mixed team of soldiers and UGS for an audience of around 200 people from defence actors from European member states. The results of this field trial were evaluated as highly positive, as all high-level requirements were obtained by the robotic fleet.}
    }

  • D. Doroftei, G. De Cubber, and H. De Smet, “Human factors assessment for drone operations: towards a virtual drone co-pilot," in Human Factors in Robots, Drones and Unmanned Systems. AHFE (2023) International Conference., 2023.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    As the number of drone operations increases, so does the risk of incidents with these novel, yet sometimes dangerous unmanned systems. Research has shown that over 70% of drone incidents are caused by human error, so in order to reduce the risk of incidents, the human factors related to the operation of the drone should be studied. However, this is not a trivial exercise, because on the one hand, a realistic operational environment is required (in order to study the human behaviour in realistic conditions), while on the other hand a standardised environment is required, such that repeatable experiments can be set up in order to ensure statistical relevance. In order to remedy this, within the scope of the ALPHONSE project, a realistic simulation environment was developed that is specifically geared towards the evaluation of human factors for military drone operations. Within the ALPHONSE simulator, military (and other) drone pilots can perform missions in realistic operational conditions. At the same time, they are subjected to a range of factors that can influence operator performance. These constitute both person-induced factors like pressure to achieve the set goals in time or people talking to the pilot and environment-induced stress factors like changing weather conditions. During the flight operation, the ALPHONSE simulator continuously monitors over 65 flight parameters. After the flight, an overall performance score is calculated, based upon the achievement of the mission objectives. Throughout the ALPHONSE trials, a wide range of pilots has flown in the simulator, ranging from beginner to expert pilots. Using all the data recorded during these flights, three actions are performed:-An Artificial Intelligence (AI) – based classifier was trained to automatically recognize in real time good and bad flight behaviour. This allows for the development of a virtual co-pilot that can warn the pilot at any given moment when the pilot is starting to exhibit behaviour that is recognized by the classifier to correspond mostly to the behaviour of inexperienced pilots and not to the behaviour of good pilots.-An identification and ranking of the human factors and their impact on the flight performance, by linking the induced stress factors to the performance scores-An update of the training procedures to take into consideration the human factors that impact flight performance, such that newly trained pilots are better aware of these influences.The objective of this paper is to present the complete ALPHONSE simulator system for the evaluation of human factors for drone operations and present the results of the experiments with real military flight operators. The focus of the paper will be on the elaboration of the design choices for the development of the AI – based classifier for real-time flight performance evaluation.The proposed development is highly significant, as it presents a concrete and cost-effective methodology for developing a virtual co-pilot for drone pilots that can render drone operations safer. Indeed, while the initial training of the AI model requires considerable computing resources, the implementation of the classifier can be readily integrated in commodity flight controllers to provide real-time alerts when pilots are manifesting undesired flight behaviours.The paper will present results of tests with drone pilots from Belgian Defence and civilian Belgian Defence researchers that have flown within the ALPHONSE simulator. These pilots have first acted as data subjects to provide flight data to train the model and have later been used to validate the model. The validation shows that the virtual co-pilot achieves a very high accuracy and can in over 80% of the cases correctly identify bad flight profiles in real-time.

    @inproceedings{ahfe20203doroftei,
    title={Human factors assessment for drone operations: towards a virtual drone co-pilot},
    author={Doroftei, D. and De Cubber, G. and De Smet, H.},
    booktitle={Human Factors in Robots, Drones and Unmanned Systems. AHFE (2023) International Conference.},
    editors ={Tareq Ahram and Waldemar Karwowski},
    publisher = {AHFE Open Access, AHFE International, USA},
    year = {2023},
    vol = {93},
    project = {Alphonse},
    location = {San Francisco, USA},
    unit= {meca-ras},
    doi = {http://doi.org/10.54941/ahfe1003747},
    url={https://openaccess.cms-conferences.org/publications/book/978-1-958651-69-8/article/978-1-958651-69-8_6},
    abstract = {As the number of drone operations increases, so does the risk of incidents with these novel, yet sometimes dangerous unmanned systems. Research has shown that over 70% of drone incidents are caused by human error, so in order to reduce the risk of incidents, the human factors related to the operation of the drone should be studied. However, this is not a trivial exercise, because on the one hand, a realistic operational environment is required (in order to study the human behaviour in realistic conditions), while on the other hand a standardised environment is required, such that repeatable experiments can be set up in order to ensure statistical relevance. In order to remedy this, within the scope of the ALPHONSE project, a realistic simulation environment was developed that is specifically geared towards the evaluation of human factors for military drone operations. Within the ALPHONSE simulator, military (and other) drone pilots can perform missions in realistic operational conditions. At the same time, they are subjected to a range of factors that can influence operator performance. These constitute both person-induced factors like pressure to achieve the set goals in time or people talking to the pilot and environment-induced stress factors like changing weather conditions. During the flight operation, the ALPHONSE simulator continuously monitors over 65 flight parameters. After the flight, an overall performance score is calculated, based upon the achievement of the mission objectives. Throughout the ALPHONSE trials, a wide range of pilots has flown in the simulator, ranging from beginner to expert pilots. Using all the data recorded during these flights, three actions are performed:-An Artificial Intelligence (AI) - based classifier was trained to automatically recognize in real time good and bad flight behaviour. This allows for the development of a virtual co-pilot that can warn the pilot at any given moment when the pilot is starting to exhibit behaviour that is recognized by the classifier to correspond mostly to the behaviour of inexperienced pilots and not to the behaviour of good pilots.-An identification and ranking of the human factors and their impact on the flight performance, by linking the induced stress factors to the performance scores-An update of the training procedures to take into consideration the human factors that impact flight performance, such that newly trained pilots are better aware of these influences.The objective of this paper is to present the complete ALPHONSE simulator system for the evaluation of human factors for drone operations and present the results of the experiments with real military flight operators. The focus of the paper will be on the elaboration of the design choices for the development of the AI - based classifier for real-time flight performance evaluation.The proposed development is highly significant, as it presents a concrete and cost-effective methodology for developing a virtual co-pilot for drone pilots that can render drone operations safer. Indeed, while the initial training of the AI model requires considerable computing resources, the implementation of the classifier can be readily integrated in commodity flight controllers to provide real-time alerts when pilots are manifesting undesired flight behaviours.The paper will present results of tests with drone pilots from Belgian Defence and civilian Belgian Defence researchers that have flown within the ALPHONSE simulator. These pilots have first acted as data subjects to provide flight data to train the model and have later been used to validate the model. The validation shows that the virtual co-pilot achieves a very high accuracy and can in over 80% of the cases correctly identify bad flight profiles in real-time.}
    }

  • E. Ghisoni, S. Govindaraj, A. M. C. Faul{‘i}, G. De Cubber, F. Polisano, N. Aouf, D. Rondao, Z. Chekakta, and B. de Waard, “Multi-agent system and AI for Explosive Ordnance Disposal," in 19th International Symposium Mine Action, 2023, p. 26.
    [BibTeX] [Download PDF]
    @inproceedings{ghisonimulti,
    title={Multi-agent system and AI for Explosive Ordnance Disposal},
    author={Ghisoni, Enzo and Govindaraj, Shashank and Faul{\'\i}, Ana Mar{\'\i}a Casado and De Cubber, Geert and Polisano, Fabio and Aouf, Nabil and Rondao, Duarte and Chekakta, Zakaria and de Waard, Bob},
    booktitle={19th International Symposium Mine Action},
    publisher = {CEIA},
    year = {2023},
    project = {AIDED},
    location = {Croatia},
    unit= {meca-ras},
    url={https://www.ctro.hr/userfiles/files/MINE-ACTION-2023_.pdf},
    pages={26}
    }

  • E. Ghisoni, S. Govindaraj, A. M. C. Faul{‘i}, G. De Cubber, F. Polisano, N. Aouf, D. Rondao, Z. Chekakta, and B. de Waard, “Multi-agent system and AI for Explosive Ordnance Disposal," in 19th International Symposium Mine Action, 2023, p. 26.
    [BibTeX] [Download PDF]
    @inproceedings{ghisonimulti,
    title={Multi-agent system and AI for Explosive Ordnance Disposal},
    author={Ghisoni, Enzo and Govindaraj, Shashank and Faul{\'\i}, Ana Mar{\'\i}a Casado and De Cubber, Geert and Polisano, Fabio and Aouf, Nabil and Rondao, Duarte and Chekakta, Zakaria and de Waard, Bob},
    booktitle={19th International Symposium Mine Action},
    publisher = {CEIA},
    year = {2023},
    project = {AIDED},
    location = {Croatia},
    unit= {meca-ras},
    url={https://www.ctro.hr/userfiles/files/MINE-ACTION-2023_.pdf},
    pages={26}
    }

2022

  • R. Lahouli., G. De Cubber, B. Pairet., C. Hamesse., T. Fréville., and R. Haelterman., “Deep Learning based Object Detection and Tracking for Maritime Situational Awareness," in Proceedings of the 17th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications (VISIGRAPP 2022) – Volume 4: VISAPP, 2022, pp. 643-650.
    [BibTeX] [DOI]
    @conference{visapp22,
    author={Rihab Lahouli. and Geert {De Cubber}. and Benoît Pairet. and Charles Hamesse. and Timothée Fréville. and Rob Haelterman.},
    title={Deep Learning based Object Detection and Tracking for Maritime Situational Awareness},
    booktitle={Proceedings of the 17th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications (VISIGRAPP 2022) - Volume 4: VISAPP},
    year={2022},
    pages={643-650},
    publisher={SciTePress},
    organization={INSTICC},
    doi={10.5220/0010901000003124},
    isbn={978-989-758-555-5},
    issn={2184-4321},
    }

  • R. Lahouli, G. De Cubber, B. Pairet, C. Hamesse, T. Freville, and R. Haelterman, “Deep Learning based Object Detection and Tracking for Maritime Situational Awareness," in Proceedings of the 17th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications – Volume 4: VISAPP,, 2022, pp. 643-650.
    [BibTeX] [Download PDF] [DOI]
    @conference{visapp22,
    author={Lahouli, Rihab and De Cubber, Geert and Pairet, Benoit and Hamesse, Charles and Freville, Timothee and Haelterman, Rob},
    title={Deep Learning based Object Detection and Tracking for Maritime Situational Awareness},
    booktitle={Proceedings of the 17th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications - Volume 4: VISAPP,},
    year={2022},
    pages={643-650},
    publisher={SciTePress},
    organization={INSTICC},
    doi={10.5220/0010901000003124},
    isbn={978-989-758-555-5},
    project={SSAVE},
    url={https://www.scitepress.org/PublicationsDetail.aspx?ID=mJ5eF6o+SbM=&t=1},
    unit= {meca-ras}
    }

  • D. Doroftei, G. De Cubber, and H. De Smet, “A quantitative measure for the evaluation of drone-based video quality on a target," in Eighteenth International Conference on Autonomic and Autonomous Systems (ICAS), Venice, Italy, 2022.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    This paper presents a methodology to assess video quality and based on that automatically calculate drone trajectories that optimize the video quality.

    @InProceedings{doroftei2022alphonse2,
    author = {Doroftei, Daniela and De Cubber, Geert and De Smet, Hans},
    booktitle = {Eighteenth International Conference on Autonomic and Autonomous Systems (ICAS)},
    title = {A quantitative measure for the evaluation of drone-based video quality on a target},
    year = {2022},
    month = jun,
    organization = {IARIA},
    publisher = {ThinkMind},
    address = {Venice, Italy},
    url = {https://www.thinkmind.org/articles/icas_2022_1_40_20018.pdf},
    isbn={978-1-61208-966-9},
    doi = {https://www.thinkmind.org/index.php?view=article&articleid=icas_2022_1_40_20018},
    abstract = {This paper presents a methodology to assess video quality and based on that automatically calculate drone trajectories that optimize the video quality.},
    project = {Alphonse},
    unit= {meca-ras}
    }

  • E. Ghisoni and G. De Cubber, “AIDED: Robotics & Artificial Intelligence for Explosive Ordnance Disposal," in International Workshop on Robotics for risky interventions and Environmental Surveillance-Maintenance (VRISE), Les Bons Villers, Belgium, 2022.
    [BibTeX] [Abstract] [Download PDF]

    This paper presents an overview of the AIDED project on AI for IED detection.

    @InProceedings{ghisoni2022a,
    author = {Ghisoni, Enzo and De Cubber, Geert},
    booktitle = {International Workshop on Robotics for risky interventions and Environmental Surveillance-Maintenance (VRISE)},
    title = {AIDED: Robotics & Artificial Intelligence for Explosive Ordnance Disposal},
    year = {2022},
    month = jun,
    organization = {IMEKO},
    publisher = {IMEKO},
    address = {Les Bons Villers, Belgium},
    url = {https://www.ici-belgium.be/registration-and-program-vrise2022-june-7/},
    abstract = {This paper presents an overview of the AIDED project on AI for IED detection.},
    project = {AIDED},
    unit= {meca-ras}
    }

  • D. Doroftei, G. De Cubber, and H. De Smet, “Assessing Human Factors for Drone Operations in a Simulation Environment," in Human Factors in Robots, Drones and Unmanned Systems – AHFE (2022) International Conference, New York, USA, 2022.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    This paper presents an overview of the Alphonse methodology for Assessing Human Factors for Drone Operations in a Simulation Environment.

    @InProceedings{doroftei2022a,
    author = {Doroftei, Daniela and De Cubber, Geert and De Smet, Hans},
    booktitle = {Human Factors in Robots, Drones and Unmanned Systems - AHFE (2022) International Conference},
    title = {Assessing Human Factors for Drone Operations in a Simulation Environment},
    year = {2022},
    month = jul,
    volume = {57},
    editor = {Tareq Ahram and Waldemar Karwowski},
    publisher = {AHFE International},
    address = {New York, USA},
    url = {https://openaccess-api.cms-conferences.org/articles/download/978-1-958651-33-9_16},
    abstract = {This paper presents an overview of the Alphonse methodology for Assessing Human Factors for Drone Operations in a Simulation Environment.},
    doi = {http://doi.org/10.54941/ahfe1002319},
    project = {Alphonse},
    unit= {meca-ras}
    }

  • T. Halleux, T. Nguyen, C. Hamesse, G. De Cubber, and B. Janssens, “Visual Drone Detection and Tracking for Autonomous Operation from Maritime Vessel," in Proceedings of TC17-ISMCR2022 – A Topical Event of Technical Committee on Measurement and Control of Robotics (TC17), International Measurement Confederation (IMEKO), Theme: “Robotics and Virtual Tools for a New Era", 2022.
    [BibTeX] [Download PDF] [DOI]
    @INPROCEEDINGS{ismcr2022_1,
    author={Halleux, Timothy and Nguyen, Tien-Thanh and Hamesse, Charles and De Cubber, Geert and Janssens, Bart},
    booktitle={Proceedings of TC17-ISMCR2022 - A Topical Event of Technical Committee on Measurement and Control of Robotics (TC17), International Measurement Confederation (IMEKO), Theme: "Robotics and Virtual Tools for a New Era"},
    title={Visual Drone Detection and Tracking for Autonomous Operation from Maritime Vessel},
    year={2022},
    volume={},
    number={},
    url={https://mecatron.rma.ac.be/pub/2022/ISMCR-Drone_detection_tracking_FullPaper.pdf},
    project={MarLand, COURAGEOUS},
    publisher={IMEKO},
    address={},
    doi={10.5281/zenodo.7074445},
    month={September},
    unit= {meca-ras}
    }

  • T. Dutrannois, T. Nguyen, C. Hamesse, G. De Cubber, and B. Janssens, “Visual SLAM for Autonomous Drone Landing on a Maritime Platform," in Proceedings of TC17-ISMCR2022 – A Topical Event of Technical Committee on Measurement and Control of Robotics (TC17), International Measurement Confederation (IMEKO), Theme: “Robotics and Virtual Tools for a New Era", 2022.
    [BibTeX] [Download PDF] [DOI]
    @INPROCEEDINGS{ismcr2022_2,
    author={Dutrannois, Thomas and Nguyen, Tien-Thanh and Hamesse, Charles and De Cubber, Geert and Janssens, Bart},
    booktitle={Proceedings of TC17-ISMCR2022 - A Topical Event of Technical Committee on Measurement and Control of Robotics (TC17), International Measurement Confederation (IMEKO), Theme: "Robotics and Virtual Tools for a New Era"},
    title={Visual SLAM for Autonomous Drone Landing on a Maritime Platform},
    year={2022},
    volume={},
    number={},
    url={https://mecatron.rma.ac.be/pub/2022/ISMCR-Visual_SLAM_FullPaper.pdf},
    project={MarLand},
    publisher={IMEKO},
    address={},
    doi={10.5281/zenodo.7074451},
    month={September},
    unit= {meca-ras}
    }

  • A. Borghgraef, F. B. Othmen, and M. Vandewal, “Obtaining ground truth data in C-UAS trials," in Target and Background Signatures VIII, 2022, p. 122700B.
    [BibTeX] [Download PDF] [DOI]
    @inproceedings{10.1117/12.2635723,
    author = {Alexander Borghgraef and Fatma Ben Othmen and Marijke Vandewal},
    title = {{Obtaining ground truth data in C-UAS trials}},
    volume = {12270},
    booktitle = {Target and Background Signatures VIII},
    editor = {Karin Stein and Ric Schleijpen},
    organization = {International Society for Optics and Photonics},
    publisher = {SPIE},
    pages = {122700B},
    keywords = {C-UAS, sensor evaluations, ground truth},
    year = {2022},
    doi = {10.1117/12.2635723},
    URL = {https://doi.org/10.1117/12.2635723},
    unit= {ciss}
    }

2021

  • T. Freville, C. Hamesse, B. Pairet, R. Lahouli, and R. Haelterman, “From Floor Plans to Virtual Reality," in 2021 IEEE International Conference on Artificial Intelligence and Virtual Reality (AIVR), Los Alamitos, CA, USA, 2021, pp. 129-133.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    Creating realistic VR environments is a tedious task. For many types of applications, these environments must respect certain constraints or correspond as faithfully as possible to a real place. Floor plans are a simple and abundant format that humans can read and edit. As such, they are a good basis to create VR environments that match real buildings. We propose a method to convert floor plans to VR environments with minimal human intervention. Leveraging traditional computer vision, machine learning and 3D engines, our method is efficient but remains flexible and fast, and creates simple yet realistic environments that can be used for various VR applications. We demonstrate results for our specific use case for Belgian Defence's tactical intervention teams.

    @INPROCEEDINGS {9644381,
    author = {T. Freville and C. Hamesse and B. Pairet and R. Lahouli and R. Haelterman},
    booktitle = {2021 IEEE International Conference on Artificial Intelligence and Virtual Reality (AIVR)},
    title = {From Floor Plans to Virtual Reality},
    year = {2021},
    volume = {},
    issn = {},
    pages = {129-133},
    abstract = {Creating realistic VR environments is a tedious task. For many types of applications, these environments must respect certain constraints or correspond as faithfully as possible to a real place. Floor plans are a simple and abundant format that humans can read and edit. As such, they are a good basis to create VR environments that match real buildings. We propose a method to convert floor plans to VR environments with minimal human intervention. Leveraging traditional computer vision, machine learning and 3D engines, our method is efficient but remains flexible and fast, and creates simple yet realistic environments that can be used for various VR applications. We demonstrate results for our specific use case for Belgian Defence's tactical intervention teams.},
    keywords = {geometry;computer vision;three-dimensional displays;conferences;buildings;virtual reality;machine learning},
    doi = {10.1109/AIVR52153.2021.00030},
    url = {https://doi.ieeecomputersociety.org/10.1109/AIVR52153.2021.00030},
    publisher = {IEEE Computer Society},
    address = {Los Alamitos, CA, USA},
    month = {nov}
    }

  • Hamesse Charles, Pairet Benoît, Lahouli Rihab, Fréville Timothée, and H. Rob, “Simulation of Pan-Tilt-Zoom Tracking for Augmented Reality Air Traffic Control," in 2021 International Conference on 3D Immersion (IC3D), 2021, pp. 1-5.
    [BibTeX] [DOI]
    @INPROCEEDINGS{9687257,
    author={Hamesse Charles, and Pairet Benoît, and Lahouli Rihab, and Fréville Timothée, and Haelterman Rob},
    booktitle={2021 International Conference on 3D Immersion (IC3D)},
    title={Simulation of Pan-Tilt-Zoom Tracking for Augmented Reality Air Traffic Control},
    year={2021},
    volume={},
    number={},
    pages={1-5},
    keywords={Headphones;Visualization;Solid modeling;Three-dimensional displays;Atmospheric modeling;Cameras;Air traffic control;Augmented Reality;Air Traffic Control;Pan Tilt Zoom;Visual Tracker;Aircraft},
    doi={10.1109/IC3D53758.2021.9687257}
    }

  • D. G. Ramos, D. Bozhinoski, G. Francesca, L. Garattoni, K. Hasselmann, M. Kegeleirs, J. Kuckling, A. Ligot, F. J. Mendiburu, F. Pagnozzi, and others, “The automatic off-line design of robot swarms: recent advances and perspectives," in R2T2: Robotics Research for Tomorrow’s Technology, 2021.
    [BibTeX]
    @inproceedings{ramos2021automatic,
    title={The automatic off-line design of robot swarms: recent advances and perspectives},
    author={Ramos, David Garz{\'o}n and Bozhinoski, Darko and Francesca, Gianpiero and Garattoni, Lorenzo and Hasselmann, Ken and Kegeleirs, Miquel and Kuckling, Jonas and Ligot, Antoine and Mendiburu, Fernando J and Pagnozzi, Federico and others},
    booktitle={R2T2: Robotics Research for Tomorrow’s Technology},
    year={2021}
    }

  • D. Doroftei, T. De Vleeschauwer, S. L. Bue, M. Dewyn, F. Vanderstraeten, and G. De Cubber, “Human-Agent Trust Evaluation in a Digital Twin Context," in 2021 30th IEEE International Conference on Robot Human Interactive Communication (RO-MAN), Vancouver, BC, Canada, 2021, pp. 203-207.
    [BibTeX] [Download PDF] [DOI]
    @INPROCEEDINGS{9515445,
    author={Doroftei, Daniela and De Vleeschauwer, Tom and Bue, Salvatore Lo and Dewyn, Michaël and Vanderstraeten, Frik and De Cubber, Geert},
    booktitle={2021 30th IEEE International Conference on Robot Human Interactive Communication (RO-MAN)},
    title={Human-Agent Trust Evaluation in a Digital Twin Context},
    year={2021},
    volume={},
    number={},
    pages={203-207},
    url={https://www.researchgate.net/profile/Geert-De-Cubber/publication/354078858_Human-Agent_Trust_Evaluation_in_a_Digital_Twin_Context/links/61430bd22bfbd83a46cf2b8c/Human-Agent-Trust-Evaluation-in-a-Digital-Twin-Context.pdf?_sg%5B0%5D=BdEPB9AGDUV3sOwnEQKCr-DgWRA7uDNeMlvyQYNaMPGSO2bhCDbyG4AENXXxH3j323ypYTq9nMftVbDr2fsCSA.ePETOgrc5VHnE0GK_yjBK1XVVfdQ9S6g2UKVfg8Z8miIkGlMPXpzaYKlB0JPDSiroGp9QoFbmcY2egYAXbL1ZQ&_sg%5B1%5D=ykQnQS2LN8fUQXAYx5Fpiy2NXqIwqO1UyVCENkpSUUWZn8Qqgrelh1bb4ry9Q9XPgCts7lVXU1_68YLjqnCPh4seSzWfG5BpKHc3MuFwsK6l.ePETOgrc5VHnE0GK_yjBK1XVVfdQ9S6g2UKVfg8Z8miIkGlMPXpzaYKlB0JPDSiroGp9QoFbmcY2egYAXbL1ZQ&_iepl=},
    project={Alphonse},
    publisher={IEEE},
    address={Vancouver, BC, Canada},
    month=aug,
    doi={10.1109/RO-MAN50785.2021.9515445},
    unit= {meca-ras}}

  • Y. Baudoin, G. De Cubber, and E. Cepolina, “Mobile Robots Supporting Risky Interventions, Humanitarian actions and Demining, in particular the promising DISARMADILLO Tool," in Proceedings of TC17-VRISE2021 – A VIRTUAL Topical Event of Technical Committee on Measurement and Control of Robotics (TC17), International Measurement Confederation (IMEKO), Theme: “Robotics for Risky Interventions and Environmental Surveillance", Houston, TX, USA, 2021, pp. 5-6.
    [BibTeX] [Download PDF]
    @INPROCEEDINGS{knvrise,
    author={Baudoin, Yvan and De Cubber, Geert and Cepolina, Emanuela},
    booktitle={Proceedings of TC17-VRISE2021 - A VIRTUAL Topical Event of Technical Committee on Measurement and Control of Robotics (TC17), International Measurement Confederation (IMEKO), Theme: "Robotics for Risky Interventions and Environmental Surveillance"},
    title={Mobile Robots Supporting Risky Interventions, Humanitarian actions and Demining, in particular the promising DISARMADILLO Tool},
    year={2021},
    volume={},
    number={},
    pages={5-6},
    url={https://mecatron.rma.ac.be/pub/2021/TC17-VRISE2021-Abstract%20Proceedings.pdf},
    project={AIDED, Alphonse, MarSur, SSAVE, MarLand, iMUGs, ICARUS, TIRAMISU},
    publisher={IMEKO},
    address={Houston, TX, USA},
    month=oct,
    unit= {meca-ras}
    }

  • C. A. M. Fauli, R. P. Soria, M. C. Diaz De Espada, M. A. Trujillo, A. Viguria, and A. Ollero, “Assisted Flight Control for Aerial Contact UAV s in Industrial Environments," in 2021 Aerial Robotic Systems Physically Interacting with the Environment (AIRPHARO), 2021, pp. 1-4.
    [BibTeX] [DOI]
    @INPROCEEDINGS{9571048,
    author={Fauli, A.M. Casado and Soria, P. Ramon and Diaz De Espada, C. Martin and Trujillo, M.A. and Viguria, A. and Ollero, A.},
    booktitle={2021 Aerial Robotic Systems Physically Interacting with the Environment (AIRPHARO)},
    title={Assisted Flight Control for Aerial Contact UAV s in Industrial Environments},
    year={2021},
    volume={},
    number={},
    pages={1-4},
    doi={10.1109/AIRPHARO52252.2021.9571048}
    }

2020

  • M. Bellemans, D. Lamrnens, J. De Sloover, T. De Vleeschauwer, E. Schoofs, W. Jordens, B. Van Steenhuyse, J. Mangelschots, S. Selleri, C. Hamesse, T. Fréville, and R. Haeltermani, “Training Firefighters in Virtual Reality," in 2020 International Conference on 3D Immersion (IC3D), 2020, pp. 1-6.
    [BibTeX] [DOI]
    @INPROCEEDINGS{9376336,
    author={Bellemans, M. and Lamrnens, D. and De Sloover, J. and De Vleeschauwer, T. and Schoofs, E. and Jordens, W. and Van Steenhuyse, B. and Mangelschots, J. and Selleri, S. and Hamesse, C. and Fréville, T. and Haeltermani, R.},
    booktitle={2020 International Conference on 3D Immersion (IC3D)},
    title={Training Firefighters in Virtual Reality},
    year={2020},
    volume={},
    number={},
    pages={01-06},
    keywords={Training;Industries;Technological innovation;Three-dimensional displays;Collaboration;Virtual reality;Virtual Reality;Firefighting;Innovation},
    doi={10.1109/IC3D51119.2020.9376336}}

  • A. Ligot, K. Hasselmann, and M. Birattari, “AutoMoDe-Arlequin: Neural Networks as Behavioral Modules for the Automatic Design of Probabilistic Finite-State Machines," in Swarm Intelligence: 12th International Conference, ANTS 2020, Barcelona, Spain, October 26–28, 2020, Proceedings 12, 2020, p. 271–281.
    [BibTeX]
    @inproceedings{ligot2020automode,
    title={AutoMoDe-Arlequin: Neural Networks as Behavioral Modules for the Automatic Design of Probabilistic Finite-State Machines},
    author={Ligot, Antoine and Hasselmann, Ken and Birattari, Mauro},
    booktitle={Swarm Intelligence: 12th International Conference, ANTS 2020, Barcelona, Spain, October 26--28, 2020, Proceedings 12},
    pages={271--281},
    year={2020},
    organization={Springer International Publishing}
    }

  • D. Doroftei, G. De Cubber, and H. De Smet, “Reducing drone incidents by incorporating human factors in the drone and drone pilot accreditation process," in Advances in Human Factors in Robots, Drones and Unmanned Systems, San Diego, USA, 2020, p. 71–77.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    Considering the ever-increasing use of drones in a plentitude of application areas, the risk is that also an ever-increasing number of drone incidents would be ob-served. Research has shown that a large majority of all incidents with drones is due not to technological, but to human error. An advanced risk-reduction meth-odology, focusing on the human element, is thus required in order to allow for the safe use of drones. In this paper, we therefore introduce a novel concept to pro-vide a qualitative and quantitative assessment of the performance of the drone op-erator. The proposed methodology is based on one hand upon the development of standardized test methodologies and on the other hand on human performance modeling of the drone operators in a highly realistic simulation environment.

    @InProceedings{doroftei2020alphonse,
    author = {Doroftei, Daniela and De Cubber, Geert and De Smet, Hans},
    booktitle = {Advances in Human Factors in Robots, Drones and Unmanned Systems},
    title = {Reducing drone incidents by incorporating human factors in the drone and drone pilot accreditation process},
    year = {2020},
    month = jul,
    editor = {Zallio, Matteo},
    publisher = {Springer International Publishing},
    pages = {71--77},
    isbn = {978-3-030-51758-8},
    organization = {AHFE},
    address = {San Diego, USA},
    abstract = {Considering the ever-increasing use of drones in a plentitude of application areas, the risk is that also an ever-increasing number of drone incidents would be ob-served. Research has shown that a large majority of all incidents with drones is due not to technological, but to human error. An advanced risk-reduction meth-odology, focusing on the human element, is thus required in order to allow for the safe use of drones. In this paper, we therefore introduce a novel concept to pro-vide a qualitative and quantitative assessment of the performance of the drone op-erator. The proposed methodology is based on one hand upon the development of standardized test methodologies and on the other hand on human performance modeling of the drone operators in a highly realistic simulation environment.},
    doi = {10.1007/978-3-030-51758-8_10},
    unit= {meca-ras},
    project = {Alphonse},
    url = {http://mecatron.rma.ac.be/pub/2020/Reducing%20drone%20incidents%20by%20incorporating%20human%20factors%20in%20the%20drone%20and%20drone%20pilot%20accreditation%20process.pdf},
    }

  • G. De Cubber, R. Lahouli, D. Doroftei, and R. Haelterman, “Distributed coverage optimization for a fleet of unmanned maritime systems for a maritime patrol and surveillance application," in ISMCR 2020: 23rd International Symposium on Measurement and Control in Robotics, Budapest, Hungary, 2020.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    In order for unmanned maritime systems to provide added value for maritime law enforcement agencies, they have to be able to work together as a coordinated team for tasks such as area surveillance and patrolling. Therefore, this paper proposes a methodology that optimizes the coverage of a fleet of unmanned maritime systems, and thereby maximizes the chances of noticing threats. Unlike traditional approaches for maritime coverage optimization, which are also used for example in search and rescue operations when searching for victims at sea, this approaches takes into consideration the limited seaworthiness of small unmanned systems, as compared to traditional large ships, by incorporating the danger level in the design of the optimizer.

    @InProceedings{decubber2020dco,
    author = {De Cubber, Geert and Lahouli, Rihab and Doroftei, Daniela and Haelterman, Rob},
    booktitle = {ISMCR 2020: 23rd International Symposium on Measurement and Control in Robotics},
    title = {Distributed coverage optimization for a fleet of unmanned maritime systems for a maritime patrol and surveillance application},
    year = {2020},
    month = oct,
    organization = {ISMCR},
    publisher = {{IEEE}},
    abstract = {In order for unmanned maritime systems to provide added value for maritime law enforcement agencies, they have to be able to work together as a coordinated team for tasks such as area surveillance and patrolling. Therefore, this paper proposes a methodology that optimizes the coverage of a fleet of unmanned maritime systems, and thereby maximizes the chances of noticing threats. Unlike traditional approaches for maritime coverage optimization, which are also used for example in search and rescue operations when searching for victims at sea, this approaches takes into consideration the limited seaworthiness of small unmanned systems, as compared to traditional large ships, by incorporating the danger level in the design of the optimizer.},
    project = {SSAVE,MarSur},
    address = {Budapest, Hungary},
    doi = {10.1109/ISMCR51255.2020.9263740},
    url = {http://mecatron.rma.ac.be/pub/2020/conference_101719.pdf},
    unit= {meca-ras}
    }

  • B. Pairet, G. C. Gonzalez, and L. Jacques, “Morphological components analysis for circumstellar disks imaging," in Proc. Int. Traveling Workshop Interact. Between Sparse Models Technol.(iTWIST), 2020.
    [BibTeX]
    @inproceedings{pairet2020itwist,
    title={Morphological components analysis for circumstellar disks imaging},
    author={Pairet, Beno{\^\i}t and Gonzalez, C Gomez and Jacques, Laurent },
    booktitle={Proc. Int. Traveling Workshop Interact. Between Sparse Models Technol.(iTWIST)},
    year={2020}
    }

2019

  • G. De Cubber, “Opportunities and threats posed by new technologies," in SciFi-IT, Ghent, Belgium, 2019.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    The technological evolution is introducing in a fast pace new technologies in our everyday lives. As always, these new technologies can be applied for good causes and thereby give us the opportunity to do many interesting new things. Think for example about drones transporting blood samples between hospitals. However, like always, new technologies can also be applied for bad causes. Think for example about the same drones, but this time transporting bomb parcels instead of blood. In this paper, we focus on a number of novel technologies and discuss how security actors are currently doing their best to maximize the good use of these tools while minimizing the bad use. We will focus on research actions taken by Belgian Royal Military Academy in the domains of: – Augmented reality, and showcase how this technology can be used to improve surveillance operations. – Unmanned Aerial Systems (Drones), and showcase how the potential security threats posed by these systems can be mitigated by novel drone detection systems. – Unmanned Maritime Systems, and showcase how this technology can be used to increase the safety at sea. – Unmanned Ground Systems, and more specifically the autonomous cars, showcasing how to prevent potential cyber-attacks on these future transportation tools.

    @InProceedings{de2019opportunities,
    author = {De Cubber, Geert},
    booktitle = {SciFi-IT},
    title = {Opportunities and threats posed by new technologies},
    year = {2019},
    abstract = {The technological evolution is introducing in a fast pace new technologies in our everyday lives. As always, these new technologies can be applied for good causes and thereby give us the opportunity to do many interesting new things. Think for example about drones transporting blood samples between hospitals. However, like always, new technologies can also be applied for bad causes. Think for example about the same drones, but this time transporting bomb parcels instead of blood.
    In this paper, we focus on a number of novel technologies and discuss how security actors are currently
    doing their best to maximize the good use of these tools while minimizing the bad use. We will focus on research actions taken by Belgian Royal Military Academy in the domains of:
    - Augmented reality, and showcase how this technology can be used to improve surveillance operations.
    - Unmanned Aerial Systems (Drones), and showcase how the potential security threats posed by these systems can be mitigated by novel drone detection systems.
    - Unmanned Maritime Systems, and showcase how this technology can be used to increase the safety at sea.
    - Unmanned Ground Systems, and more specifically the autonomous cars, showcasing how to prevent potential cyber-attacks on these future transportation tools.},
    doi = {https://doi.org/10.5281/zenodo.2628758},
    address = {Ghent, Belgium},
    project = {MarSur,SafeShore},
    url = {http://mecatron.rma.ac.be/pub/2019/Sci-Fi-It-2019-DeCubber (2).pdf},
    unit= {meca-ras}
    }

  • G. De Cubber, “Explosive drones: How to deal with this new threat?," in International workshop on Measurement, Prevention, Protection and Management of CBRN Risks (RISE), Les Bon Villers, Belgium, 2019.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    As the commercial and recreative use of small unmanned aerial vehicles or drones is booming, so are the military and criminals starting to use these systems more and more. Due to improvements in flight stability, autonomy and payload capacity it becomes possible to equip these drones with explosive charges, making them threat agents where traditional response mechanisms have few answers against. In this paper, we will discuss this new type of threat in detail, making the difference between the loitering munition, as used by regular armies and the traditional drones equipped with explosive charges, used in guerrilla warfare and by criminals. We will then discuss what research actions are currently being undertaken to provide answers to each of these threats and what countermeasures that are currently already available and which ones will be available in the near future.

    @InProceedings{de2019explosive,
    author = {De Cubber, Geert},
    booktitle = {International workshop on Measurement, Prevention, Protection and Management of CBRN Risks (RISE)},
    title = {Explosive drones: How to deal with this new threat?},
    year = {2019},
    number = {9},
    address = {Les Bon Villers, Belgium},
    abstract = {As the commercial and recreative use of small unmanned aerial vehicles or drones is booming, so are the military and criminals starting to use these systems more and more. Due to improvements in flight stability, autonomy and payload capacity it becomes possible to equip these drones with explosive charges, making them threat agents where traditional response mechanisms have few answers against. In this paper, we will discuss this new type of threat in detail, making the difference between the loitering munition, as used by regular armies and the traditional drones equipped with explosive charges, used in guerrilla warfare and by criminals. We will then discuss what research actions are currently being undertaken to provide answers to each of these threats and what countermeasures that are currently already available and which ones will be available in the near future.},
    doi = {10.5281/ZENODO.2628752},
    project = {SafeShore},
    url = {http://mecatron.rma.ac.be/pub/2019/Explosive drones - How to deal with this new threat.pdf},
    unit= {meca-ras}
    }

  • G. De Cubber and R. Haelterman, “Optimized distributed scheduling for a fleet of heterogeneous unmanned maritime systems," in 2019 IEEE International Symposium on Measurement and Control in Robotics (ISMCR), Houston, USA, 2019.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    Due to the increase in embedded computing power, modern robotic systems are capable of running a wide range of perception and control algorithms simultaneously. This raises the question where to optimally allocate each robotic cognition process. In this paper, we present a concept for a novel load distribution approach. The proposed methodology adopts a decentralised approach towards the allocation of perception and control processes to different agents (unmanned vessels, fog or cloud services) based on an estimation of the communication parameters (bandwidth, latency, cost), the agent capabilities in terms of processing hardware (not only focusing on the CPU, but also taking into consideration the GPU, disk & memory speed and size) and the requirements in terms of timely delivery of quality output data. The presented approach is extensively validated in a simulation environment and shows promising properties.

    @InProceedings{de2019optimized,
    author = {De Cubber, Geert and Haelterman, Rob},
    booktitle = {2019 {IEEE} International Symposium on Measurement and Control in Robotics ({ISMCR})},
    title = {Optimized distributed scheduling for a fleet of heterogeneous unmanned maritime systems},
    year = {2019},
    month = sep,
    number = {23},
    publisher = {{IEEE}},
    address = {Houston, USA},
    abstract = {Due to the increase in embedded computing power, modern robotic systems are capable of running a wide range of perception and control algorithms simultaneously. This raises the question where to optimally allocate each robotic cognition process. In this paper, we present a concept for a novel load distribution approach. The proposed methodology adopts a decentralised approach towards the allocation of perception and control processes to different agents (unmanned vessels, fog or cloud services) based on an estimation of the communication parameters (bandwidth, latency, cost), the agent capabilities in terms of processing hardware (not only focusing on the CPU, but also taking into consideration the GPU, disk & memory speed and size) and the requirements in terms of timely delivery of quality output data. The presented approach is extensively validated in a simulation environment and shows promising properties.},
    doi = {10.1109/ismcr47492.2019.8955727},
    project = {MarSur},
    url = {http://mecatron.rma.ac.be/pub/2019/ICMCR-DeCubber.pdf},
    unit= {meca-ras}
    }

  • H. Balta, J. Velagic, G. De Cubber, and B. Siciliano, “Semi-Automated 3D Registration for Heterogeneous Unmanned Robots Based on Scale Invariant Method," in 2019 IEEE International Symposium on Safety, Security, and Rescue Robotics (SSRR), Wurzburg, Germany, 2019.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    This paper addresses the problem of 3D registration of outdoor environments combining heterogeneous datasets acquired from unmanned aerial (UAV) and ground (UGV) vehicles. In order to solve this problem, we introduced a novel Scale Invariant Registration Method (SIRM) for semi-automated registration of 3D point clouds. The method is capable of coping with an arbitrary scale difference between the point clouds, without any information about their initial position and orientation. Furthermore, the SIRM does not require having a good initial overlap between two heterogeneous datasets. Our method strikes an elegant balance between the existing fully automated 3D registration systems (which often fail in the case of heterogeneous datasets and harsh outdoor environments) and fully manual registration approaches (which are labour-intensive). The experimental validation of the proposed 3D heterogeneous registration system was performed on large-scale datasets representing unstructured and harsh outdoor environments, demonstrating the potential and benefits of the proposed 3D registration system in real-world environments.

    @InProceedings{balta2019semi,
    author = {Balta, Haris and Velagic, Jasmin and De Cubber, Geert and Siciliano, Bruno},
    booktitle = {2019 {IEEE} International Symposium on Safety, Security, and Rescue Robotics ({SSRR})},
    title = {Semi-Automated {3D} Registration for Heterogeneous Unmanned Robots Based on Scale Invariant Method},
    year = {2019},
    month = sep,
    publisher = {{IEEE}},
    volume = {1},
    address = {Wurzburg, Germany},
    abstract = {This paper addresses the problem of 3D registration of outdoor environments combining heterogeneous datasets acquired from unmanned aerial (UAV) and ground (UGV) vehicles. In order to solve this problem, we introduced a novel Scale Invariant Registration Method (SIRM) for semi-automated registration of 3D point clouds. The method is capable of coping with an arbitrary scale difference between the point clouds, without any information about their initial position and orientation. Furthermore, the SIRM does not require having a good initial overlap between two heterogeneous datasets. Our method strikes an elegant balance between the existing fully automated 3D registration systems (which often fail in the case of heterogeneous datasets and harsh outdoor environments) and fully manual registration approaches (which are labour-intensive). The experimental validation of the proposed 3D heterogeneous registration system was performed on large-scale datasets representing unstructured and harsh outdoor environments, demonstrating the potential and benefits of the proposed 3D registration system in real-world environments.},
    doi = {10.1109/ssrr.2019.8848951},
    project = {NRTP},
    url = {https://ieeexplore.ieee.org/document/8848951},
    unit= {meca-ras}
    }

  • D. Doroftei and H. De Smet, “Evaluating Human Factors for Drone Operations using Simulations and Standardized Tests," in 10th International Conference on Applied Human Factors and Ergonomics (AHFE 2019), Washington DC, USA, 2019.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    This poster publication presents an overview of the Alphonse project on the development of new training curricula to reduce the number of drone incidents due to human error.

    @InProceedings{doroftei2019alphonse,
    author = {Doroftei, Daniela and De Smet, Han},
    booktitle = {10th International Conference on Applied Human Factors and Ergonomics (AHFE 2019)},
    title = {Evaluating Human Factors for Drone Operations using Simulations and Standardized Tests},
    year = {2019},
    month = jul,
    organization = {AHFE},
    publisher = {Springer},
    address = {Washington DC, USA},
    abstract = {This poster publication presents an overview of the Alphonse project on the development of new training curricula to reduce the number of drone incidents due to human error.},
    doi = {10.5281/zenodo.3742199},
    project = {Alphonse},
    url = {http://mecatron.rma.ac.be/pub/2019/Poster_Alphonse_Print.pdf},
    unit= {meca-ras}
    }

  • R. Lahouli, M. H. Chaudhary, S. Basak, and B. Scheers, “Tracking of Rescue Workers in Harsh Indoor and Outdoor Environments," in Ad-Hoc, Mobile, and Wireless Networks – 18th International Conference on Ad-Hoc Networks and Wireless, ADHOC-NOW 2019, Luxembourg, Luxembourg, October 1-3, 2019, Proceedings, 2019, p. 48–61.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    Making use of reliable and precise location and tracking sys-tems is essential to save firefighters lives during fire operations and tospeed up the rescue intervention. The issue is that Global NavigationSatellite System (GNSS) (e.g., GPS and Galileo) is not always availableespecially in harsh wireless environments such as inside buildings andin dense forests. This is why GNSS technology needs to be combinedwith auxiliary sensors like inertial measurement units (IMU) and ultra-wideband (UWB) radios for ranging to enhance the availability and theaccuracy of the positioning system. In this paper, we report our work inthe scope of the AIOSAT (Autonomous Indoor/Outdoor Safety Track-ing System) project, funded under the EU H2020 framework. In thisproject, the Royal Military Academy (RMA) is responsible for develop-ing a solution to measure inter-distances between firefighters, based onIEEE Std 802.15.4 compliant UWB radios. For these inter-distance mea-surements, accuracy better than 50 cm is obtained with high availabilityand robustness. Medium access control based on time division multipleaccess (TDMA) mechanism is also implemented to solve the conflict toaccess the UWB channel. As a result, each node in a network can per-form range measurements to its neighbors in less than 84 ms. In addition,in this project, we are in charge of developing a long-range narrow-bandcommunication solution based on LoRa and Nb-IoT to report updatedpositions to the brigade leader and the command center.

    @inproceedings{DBLP:conf/adhoc-now/LahouliCBS19,
    author = {Rihab Lahouli and
    Muhammad Hafeez Chaudhary and
    Sanjoy Basak and
    Bart Scheers},
    editor = {Maria Rita Palattella and
    Stefano Scanzio and
    Sinem Coleri Ergen},
    title = {Tracking of Rescue Workers in Harsh Indoor and Outdoor Environments},
    booktitle = {Ad-Hoc, Mobile, and Wireless Networks - 18th International Conference
    on Ad-Hoc Networks and Wireless, {ADHOC-NOW} 2019, Luxembourg, Luxembourg,
    October 1-3, 2019, Proceedings},
    series = {Lecture Notes in Computer Science},
    volume = {11803},
    pages = {48--61},
    publisher = {Springer},
    year = {2019},
    doi = {10.1007/978-3-030-31831-4\_4},
    timestamp = {Thu, 26 Sep 2019 14:42:25 +0200},
    biburl = {https://dblp.org/rec/conf/adhoc-now/LahouliCBS19.bib},
    bibsource = {dblp computer science bibliography, https://dblp.org},
    abstract = {Making use of reliable and precise location and tracking sys-tems is essential to save firefighters lives during fire operations and tospeed up the rescue intervention. The issue is that Global NavigationSatellite System (GNSS) (e.g., GPS and Galileo) is not always availableespecially in harsh wireless environments such as inside buildings andin dense forests. This is why GNSS technology needs to be combinedwith auxiliary sensors like inertial measurement units (IMU) and ultra-wideband (UWB) radios for ranging to enhance the availability and theaccuracy of the positioning system. In this paper, we report our work inthe scope of the AIOSAT (Autonomous Indoor/Outdoor Safety Track-ing System) project, funded under the EU H2020 framework. In thisproject, the Royal Military Academy (RMA) is responsible for develop-ing a solution to measure inter-distances between firefighters, based onIEEE Std 802.15.4 compliant UWB radios. For these inter-distance mea-surements, accuracy better than 50 cm is obtained with high availabilityand robustness. Medium access control based on time division multipleaccess (TDMA) mechanism is also implemented to solve the conflict toaccess the UWB channel. As a result, each node in a network can per-form range measurements to its neighbors in less than 84 ms. In addition,in this project, we are in charge of developing a long-range narrow-bandcommunication solution based on LoRa and Nb-IoT to report updatedpositions to the brigade leader and the command center.},
    url = {https://www.researchgate.net/publication/336050955_Tracking_of_Rescue_Workers_in_Harsh_Indoor_and_Outdoor_Environments},
    unit= {ciss}
    }

  • A. Coluccia, A. Fascista, A. Schumann, L. Sommer, M. Ghenescu, T. Piatrik, G. De Cubber, M. Nalamati, A. Kapoor, M. Saqib, N. Sharma, M. Blumenstein, V. Magoulianitis, D. Ataloglou, A. Dimou, D. Zarpalas, P. Daras, C. Craye, S. Ardjoune, D. De la Iglesia, M. Mández, R. Dosil, and I. González, “Drone-vs-Bird Detection Challenge at IEEE AVSS2019," in 2019 16th IEEE International Conference on Advanced Video and Signal Based Surveillance (AVSS), 2019, pp. 1-7.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    This paper presents the second edition of the “drone-vs-bird” detection challenge, launched within the activities of the 16-th IEEE International Conference on Advanced Video and Signal-based Surveillance (AVSS). The challenge’s goal is to detect one or more drones appearing at some point in video sequences where birds may be also present, together with motion in background or foreground. Submitted algorithms should raise an alarm and provide a position estimate only when a drone is present, while not issuing alarms on birds, nor being confused by the rest of the scene. This paper reports on the challenge results on the 2019 dataset, which extends the first edition dataset provided by the SafeShore project with additional footage under different conditions.

    @INPROCEEDINGS{8909876,
    author={A. {Coluccia} and A. {Fascista} and A. {Schumann} and L. {Sommer} and M. {Ghenescu} and T. {Piatrik} and G. {De Cubber} and M. {Nalamati} and A. {Kapoor} and M. {Saqib} and N. {Sharma} and M. {Blumenstein} and V. {Magoulianitis} and D. {Ataloglou} and A. {Dimou} and D. {Zarpalas} and P. {Daras} and C. {Craye} and S. {Ardjoune} and D. {De la Iglesia} and M. {Mández} and R. {Dosil} and I. {González}},
    booktitle={2019 16th IEEE International Conference on Advanced Video and Signal Based Surveillance (AVSS)},
    title={Drone-vs-Bird Detection Challenge at IEEE AVSS2019},
    year={2019},
    volume={},
    number={},
    pages={1-7},
    project = {SafeShore,MarSur},
    doi = {10.1109/AVSS.2019.8909876},
    abstract = {This paper presents the second edition of the “drone-vs-bird” detection challenge, launched within the activities of the 16-th IEEE International Conference on Advanced Video and Signal-based Surveillance (AVSS). The challenge's goal is to detect one or more drones appearing at some point in video sequences where birds may be also present, together with motion in background or foreground. Submitted algorithms should raise an alarm and provide a position estimate only when a drone is present, while not issuing alarms on birds, nor being confused by the rest of the scene. This paper reports on the challenge results on the 2019 dataset, which extends the first edition dataset provided by the SafeShore project with additional footage under different conditions.},
    url = {https://ieeexplore.ieee.org/abstract/document/8909876},
    unit= {meca-ras}
    }

  • D. Leca, V. Cadenat, T. Sentenac, A. Durand-Petiteville, F. Gouaisbaut, and L. E. Flécher, “Sensor-based Obstacles Avoidance Using Spiral Controllers For an Aircraft Maintenance Inspection Robot," in Proceeding of European Control Conference, 2019, p. 7.
    [BibTeX]
    @inproceedings{ECC_2019,
    place={Naples, Italy},
    title={Sensor-based Obstacles Avoidance Using Spiral Controllers For an Aircraft Maintenance Inspection Robot},
    booktitle={Proceeding of European Control Conference},
    author={D Leca and V Cadenat and T Sentenac and A Durand-Petiteville and F Gouaisbaut and E Le Flécher},
    year={2019},
    pages={7} }

  • E. Flécher, A. Durand-Petiteville, G. F., V. Cadenat, S. Vougioukas, and S. T., “Nonlinear Output Feedback for Autonomous U-turn Maneuvers of a Robot in Orchard Headlands:," in Proceedings of the 16th International Conference on Informatics in Control, Automation and Robotics, 2019, p. 355–362.
    [BibTeX] [DOI]
    @inproceedings{ICINCO_2019_1, place={Prague, Czech Republic},
    title={Nonlinear Output Feedback for Autonomous U-turn Maneuvers of a Robot in Orchard Headlands:},
    ISBN={978-989-758-380-3},
    DOI={10.5220/0007918803550362},
    booktitle={Proceedings of the 16th International Conference on Informatics in Control, Automation and Robotics},
    publisher={SCITEPRESS - Science and Technology Publications},
    author={E. Flécher and A. Durand-Petiteville and Gouaisbaut F. and V. Cadenat and S. Vougioukas and Sentenac T.},
    year={2019},
    pages={355--362} }

  • E. Flécher, A. Durand-Petiteville, V. Cadenat, and T. Sentenac, “Visual Predictive Control of Robotic Arms with Overlapping Workspace," in Proceedings of the 16th International Conference on Informatics in Control, Automation and Robotics, 2019, p. 130–137.
    [BibTeX] [DOI]
    @inproceedings{ICINCO_2019_2,
    place={Prague, Czech Republic},
    title={Visual Predictive Control of Robotic Arms with Overlapping Workspace},
    ISBN={978-989-758-380-3},
    DOI={10.5220/0008119001300137},
    booktitle={Proceedings of the 16th International Conference on Informatics in Control, Automation and Robotics},
    publisher={SCITEPRESS - Science and Technology Publications},
    author={E. Flécher and A. Durand-Petiteville and V. Cadenat and T. Sentenac},
    year={2019},
    pages={130--137} }

  • P. S., D. Y., and V. L. V., “Marine ecosystem engineers: a challenge for mine countermeasure," in Conference Proceedings of 5th Underwater Acoustics Conference and Exhibition, 2019.
    [BibTeX]
    @inproceedings{sonia02,
    title={Marine ecosystem engineers: a challenge for mine countermeasure},
    author={Papili S. and Dupont Y. and Van Lancker V.},
    booktitle={Conference Proceedings of 5th Underwater Acoustics Conference and Exhibition},
    issn={2408-0195},
    year={2019},
    project = {DISCIMBA},
    organization={UACE}
    }

2018

  • K. Hasselmann, F. Robert, and M. Birattari, “Automatic design of communication-based behaviors for robot swarms," in Swarm Intelligence: 11th International Conference, ANTS 2018, Rome, Italy, October 29–31, 2018, Proceedings 11, 2018, p. 16–29.
    [BibTeX]
    @inproceedings{hasselmann2018automatic,
    title={Automatic design of communication-based behaviors for robot swarms},
    author={Hasselmann, Ken and Robert, Fr{\'e}d{\'e}ric and Birattari, Mauro},
    booktitle={Swarm Intelligence: 11th International Conference, ANTS 2018, Rome, Italy, October 29--31, 2018, Proceedings 11},
    pages={16--29},
    year={2018},
    organization={Springer International Publishing}
    }

  • I. Lahouli, R. Haelterman, Z. Chtourou, G. De Cubber, and R. Attia, “Pedestrian Detection and Tracking in Thermal Images from Aerial MPEG videos," in International Conference on Computer Vision Theory and Applications, Funchal, Portugal, 2018, p. 487–495.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    Video surveillance for security and intelligence purposes has been a precious tool as long as the technology has been available but is computationally heavy. In this paper, we present a fast and efficient framework for pedestrian detection and tracking using thermal images. It is designed for automatic surveillance applications in an outdoor environment like preventing border intrusions or attacks on sensitive facilities using image and video processing techniques implemented on-board Unmanned Aerial Vehicles (UAV)s. The proposed framework exploits raw H.264 compressed video streams with limited computational overhead. Our work is driven by the fact that Motion Vectors (MV) are an integral part of any video compression technique, by day and night capabilities of thermal sensors and the distinguished thermal signature of humans. Six different scenarios were carried out and filmed using a thermal camera in order to simulate suspicious events. The obtained results show the effectiveness of the proposed framework and its low computational requirements which make it adequate for on-board processing and real-time applications.

    @InProceedings{lahouli2018pedestrian,
    author = {Lahouli, Ichraf and Haelterman, Robby and Chtourou, Zied and De Cubber, Geert and Attia, Rabah},
    booktitle = {International Conference on Computer Vision Theory and Applications},
    title = {Pedestrian Detection and Tracking in Thermal Images from Aerial {MPEG} videos},
    year = {2018},
    organization = {DOI 10.5220/0006723704870495},
    pages = {487--495},
    publisher = {{SCITEPRESS} - Science and Technology Publications},
    volume = {1},
    abstract = {Video surveillance for security and intelligence purposes has been a precious tool as long as the technology has been available but is computationally heavy. In this paper, we present a fast and efficient framework for pedestrian detection and tracking using thermal images. It is designed for automatic surveillance applications in an outdoor environment like preventing border intrusions or attacks on sensitive facilities using image and video processing techniques implemented on-board Unmanned Aerial Vehicles (UAV)s. The proposed framework exploits raw H.264 compressed video streams with limited computational overhead. Our work is driven by the fact that Motion Vectors (MV) are an integral part of any video compression technique, by day and night capabilities of thermal sensors and the distinguished thermal signature of humans. Six different scenarios were carried out and filmed using a thermal camera in order to simulate suspicious events. The obtained results show the effectiveness of the proposed framework and its low computational requirements which make it adequate for on-board processing and real-time applications.},
    doi = {10.5220/0006723704870495},
    project = {SafeShore},
    address = {Funchal, Portugal},
    url = {https://www.scitepress.org/Papers/2018/67237/67237.pdf},
    unit= {meca-ras}
    }

  • I. Lahouli, R. Haelterman, G. De Cubber, Z. Chtourou, and R. Attia, “A fast and robust approach for human detection in thermal imagery for surveillance using UAVs," in 15th Multi-Conference on Systems, Signals and Devices, Hammamet, Tunisia, 2018.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    The use of Unmanned Aerial Vehicles (UAV)s has spread in various fields such as surveillance and search and rescue purposes. This leads to many research efforts that are focusing on the detection of people using aerial images. However, these platforms have limited resources of power and bandwidth which cause many restrictions and challenges. The use of the thermal sensors offers the possibility to work day and night and the detection of the human bodies because of its distinguished thermal signature. In this paper, we propose a fast and efficient method for the detection of humans in outdoor scenes using thermal images taken from aerial platforms. We start by extracting the bright blobs based on a conjunction between a saliency map and a contrast enhancement techniques. Then, we use the Discrete Chebyshev Moments as a shape descriptor and finally, we classify the blobs into humans and non-humans. The proposed framework is first tested using a well-known thermal database that covers a wide range of lighting and weather conditions and further and then compared to an also well-known blob extractor which is the Maximally Stable Extremal Regions detector (MSER). The results highlight the effectiveness and even the superiority of the proposed method in terms of true positives, false alarms and processing time.

    @InProceedings{lahouli2018fast,
    author = {Lahouli, Ichraf and Haelterman, Robby and De Cubber, Geert and Chtourou, Zied and Attia, Rabah},
    booktitle = {15th Multi-Conference on Systems, Signals and Devices},
    title = {A fast and robust approach for human detection in thermal imagery for surveillance using {UAVs}},
    year = {2018},
    volume = {1},
    abstract = {The use of Unmanned Aerial Vehicles (UAV)s has spread in various fields such as surveillance and search and rescue purposes. This leads to many research efforts that are focusing on the detection of people using aerial images. However, these platforms have limited resources of power and bandwidth which cause many restrictions and challenges. The use of the thermal sensors offers the possibility to work day and night and the detection of the human bodies because of its distinguished thermal signature. In this paper, we propose a fast and efficient method for the detection of humans in outdoor scenes using thermal images taken from aerial platforms. We start by extracting the bright blobs based on a conjunction between a saliency map and a contrast enhancement techniques. Then, we use the Discrete Chebyshev Moments as a shape descriptor and finally, we classify the blobs into humans and non-humans. The proposed framework is first tested using a well-known thermal database that covers a wide range of lighting and weather conditions and further and then compared to an also well-known blob extractor which is the Maximally Stable Extremal Regions detector (MSER). The results highlight the effectiveness and even the superiority of the proposed method in terms of true positives, false alarms and processing time.},
    doi = {10.1109/ssd.2018.8570637},
    file = {:lahouli2018fast - A Fast and Robust Approach for Human Detection in Thermal Imagery for Surveillance Using UAVs.PDF:PDF},
    project = {SafeShore},
    address = {Hammamet, Tunisia},
    url = {https://ieeexplore.ieee.org/document/8570637},
    unit= {meca-ras}
    }

  • N. Nauwynck, H. Balta, G. De Cubber, and H. Sahli, “In-flight launch of unmanned aerial vehicles," in International Symposium on Measurement and Control in Robotics ISMCR2018, Mons, Belgium, 2018.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    This paper considers the development of a system to enable the in-flight-launch of one aerial system by another. The paper will discuss how an optimal release mechanism was developed, taking into account the aerodynamics of one specific mother and child UAV. Furthermore, it will discuss the PID-based control concept that was introduced in order to autonomously stabilize the child UAV after being released from the mothership UAV. Finally, the paper will show how the concept of a mothership UAV + child UAV combination could be usefully taken into advantage in the context of a search and rescue operation.

    @InProceedings{nauwynck2018flight,
    author = {Nauwynck, Niels and Balta, Haris and De Cubber, Geert and Sahli, Hichem},
    booktitle = {International Symposium on Measurement and Control in Robotics ISMCR2018},
    title = {In-flight launch of unmanned aerial vehicles},
    year = {2018},
    volume = {1},
    abstract = {This paper considers the development of a system to enable the in-flight-launch of one aerial system by another. The paper will discuss how an optimal release mechanism was developed, taking into account the aerodynamics of one specific mother and child UAV. Furthermore, it will discuss the PID-based control concept that was introduced in order to autonomously stabilize the child UAV after being released from the mothership UAV. Finally, the paper will show how the concept of a mothership UAV + child UAV combination could be usefully taken into advantage in the context of a search and rescue operation.},
    doi = {10.5281/zenodo.1462605},
    file = {:nauwynck2018flight - In Flight Launch of Unmanned Aerial Vehicles.PDF:PDF},
    keywords = {Unmanned Aerial Vehicles, Control, Autonomous stabilization, Search and Rescue drones, Heterogeneous systems},
    project = {NRTP},
    address = {Mons, Belgium},
    url = {http://mecatron.rma.ac.be/pub/2018/Paper_Niels.pdf},
    unit= {meca-ras}
    }

  • D. Doroftei and G. De Cubber, “Qualitative and quantitative validation of drone detection systems," in International Symposium on Measurement and Control in Robotics ISMCR2018, Mons, Belgium, 2018.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    As drones are more and more entering our world, so comes the need to regulate the access to airspace for these systems. A necessary tool in order to do this is a means of detecting these drones. Numerous commercial and non-commercial parties have started the development of such drone detection systems. A big problem with these systems is that the evaluation of the performance of drone detection systems is a difficult operation, which requires the careful consideration of all technical and non-technical aspects of the system under test. Indeed, weather conditions and small variations in the appearance of the targets can have a huge difference on the performance of the systems. In order to provide a fair evaluation and an honest comparison between systems, it is therefore paramount that a stringent validation procedure is followed. Moreover, the validation methodology needs to find a compromise between the often contrasting requirements of end users (who want tests to be performed in operational conditions) and platform developers (who want tests to be performed that are statistically relevant). Therefore, we propose in this paper a qualitative and quantitative validation methodology for drone detection systems. The proposed validation methodology seeks to find this compromise between operationally relevant benchmarking (by providing qualitative benchmarking under varying environmental conditions) and statistically relevant evaluation (by providing quantitative score sheets under strictly described conditions).

    @InProceedings{doroftei2018qualitative,
    author = {Doroftei, Daniela and De Cubber, Geert},
    booktitle = {International Symposium on Measurement and Control in Robotics ISMCR2018},
    title = {Qualitative and quantitative validation of drone detection systems},
    year = {2018},
    volume = {1},
    abstract = {As drones are more and more entering our world, so comes the need to regulate the access to airspace for these systems. A necessary tool in order to do this is a means of detecting these drones. Numerous commercial and non-commercial parties have started the development of such drone detection systems. A big problem with these systems is that the evaluation of the performance of drone detection systems is a difficult operation, which requires the careful consideration of all technical and non-technical aspects of the system under test. Indeed, weather conditions and small variations in the appearance of the targets can have a huge difference on the performance of the systems. In order to provide a fair evaluation and an honest comparison between systems, it is therefore paramount that a stringent validation procedure is followed. Moreover, the validation methodology needs to find a compromise between the often contrasting requirements of end users (who want tests to be performed in operational conditions) and platform developers (who want tests to be performed that are statistically relevant). Therefore, we propose in this paper a qualitative and quantitative validation methodology for drone detection systems. The proposed validation methodology seeks to find this compromise between operationally relevant benchmarking (by providing qualitative benchmarking under varying environmental conditions) and statistically relevant evaluation (by providing quantitative score sheets under strictly described conditions).},
    doi = {10.5281/ZENODO.1462586},
    file = {:doroftei2018qualitative - Qualitative and Quantitative Validation of Drone Detection Systems.PDF:PDF},
    keywords = {Unmanned Aerial Vehicles, Drones, Detection systems, Drone detection, Test and evaluation methods},
    project = {SafeShore},
    address = {Mons, Belgium},
    url = {http://mecatron.rma.ac.be/pub/2018/Paper_Daniela.pdf},
    unit= {meca-ras}
    }

  • H. Balta, J. Velagic, G. De Cubber, W. Bosschaerts, and B. Siciliano, “Fast Statistical Outlier Removal Based Method for Large 3D Point Clouds of Outdoor Environments," in 12th IFAC SYMPOSIUM ON ROBOT CONTROL – SYROCO 2018, Budapest, Hungary, 2018, p. 348–353.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    This paper proposes a very effective method for data handling and preparation of the input 3D scans acquired from laser scanner mounted on the Unmanned Ground Vehicle (UGV). The main objectives are to improve and speed up the process of outliers removal for large-scale outdoor environments. This process is necessary in order to filter out the noise and to downsample the input data which will spare computational and memory resources for further processing steps, such as 3D mapping of rough terrain and unstructured environments. It includes the Voxel-subsampling and Fast Cluster Statistical Outlier Removal (FCSOR) subprocesses. The introduced FCSOR represents an extension on the Statistical Outliers Removal (SOR) method which is effective for both homogeneous and heterogeneous point clouds. This method is evaluated on real data obtained in outdoor environment.

    @InProceedings{balta2018fast01,
    author = {Balta, Haris and Velagic, Jasmin and De Cubber, Geert and Bosschaerts, Walter and Siciliano, Bruno},
    booktitle = {12th IFAC SYMPOSIUM ON ROBOT CONTROL - SYROCO 2018},
    title = {Fast Statistical Outlier Removal Based Method for Large {3D} Point Clouds of Outdoor Environments},
    year = {2018},
    number = {22},
    pages = {348--353},
    publisher = {Elsevier {BV}},
    volume = {51},
    abstract = {This paper proposes a very effective method for data handling and preparation of the input 3D scans acquired from laser scanner mounted on the Unmanned Ground Vehicle (UGV). The main objectives are to improve and speed up the process of outliers removal for large-scale outdoor environments. This process is necessary in order to filter out the noise and to downsample the input data which will spare computational and memory resources for further processing steps, such as 3D mapping of rough terrain and unstructured environments. It includes the Voxel-subsampling and Fast Cluster Statistical Outlier Removal (FCSOR) subprocesses. The introduced FCSOR represents an extension on the Statistical Outliers Removal (SOR) method which is effective for both homogeneous and heterogeneous point clouds. This method is evaluated on real data obtained in outdoor environment.},
    doi = {10.1016/j.ifacol.2018.11.566},
    file = {:balta2018fast - Fast Statistical Outlier Removal Based Method for Large 3D Point Clouds of Outdoor Environments.PDF:PDF},
    journal = {{IFAC}-{PapersOnLine}},
    project = {NRTP},
    address = {Budapest, Hungary},
    url = {https://www.sciencedirect.com/science/article/pii/S2405896318332725},
    unit= {meca-ras}
    }

  • H. Balta, J. Velagic, G. De Cubber, W. Bosschaerts, and B. Siciliano, “Fast Iterative 3D Mapping for Large-Scale Outdoor Environments with Local Minima Escape Mechanism," in 12th IFAC SYMPOSIUM ON ROBOT CONTROL – SYROCO 2018, Budapest, Hungary, 2018, p. 298–305.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    This paper introduces a novel iterative 3D mapping framework for large scale natural terrain and complex environments. The framework is based on an Iterative-Closest-Point (ICP) algorithm and an iterative error minimization mechanism, allowing robust 3D map registration. This was accomplished by performing pairwise scan registrations without any prior known pose estimation information and taking into account the measurement uncertainties due to the 6D coordinates (translation and rotation) deviations in the acquired scans. Since the ICP algorithm does not guarantee to escape from local minima during the mapping, new algorithms for the local minima estimation and local minima escape process were proposed. The proposed framework is validated using large scale field test data sets. The experimental results were compared with those of standard, generalized and non-linear ICP registration methods and the performance evaluation is presented, showing improved performance of the proposed 3D mapping framework.

    @InProceedings{balta2018fast02,
    author = {Balta, Haris and Velagic, Jasmin and De Cubber, Geert and Bosschaerts, Walter and Siciliano, Bruno},
    booktitle = {12th IFAC SYMPOSIUM ON ROBOT CONTROL - SYROCO 2018},
    title = {Fast Iterative {3D} Mapping for Large-Scale Outdoor Environments with Local Minima Escape Mechanism},
    year = {2018},
    number = {22},
    pages = {298--305},
    publisher = {Elsevier {BV}},
    volume = {51},
    abstract = {This paper introduces a novel iterative 3D mapping framework for large scale natural terrain and complex environments. The framework is based on an Iterative-Closest-Point (ICP) algorithm and an iterative error minimization mechanism, allowing robust 3D map registration. This was accomplished by performing pairwise scan registrations without any prior known pose estimation information and taking into account the measurement uncertainties due to the 6D coordinates (translation and rotation) deviations in the acquired scans. Since the ICP algorithm does not guarantee to escape from local minima during the mapping, new algorithms for the local minima estimation and local minima escape process were proposed. The proposed framework is validated using large scale field test data sets. The experimental results were compared with those of standard, generalized and non-linear ICP registration methods and the performance evaluation is presented, showing improved performance of the proposed 3D mapping framework.},
    doi = {10.1016/j.ifacol.2018.11.558},
    journal = {{IFAC}-{PapersOnLine}},
    address = {Budapest, Hungary},
    project = {NRTP},
    url = {https://www.sciencedirect.com/science/article/pii/S2405896318332646},
    unit= {meca-ras}
    }

  • G. De Cubber, “Legal Issues in Search and Rescue UAV operations," in IROS2018 forum on Legal Issues, Cybersecurity and Policymakers Implication in AI Robotics, Madrid, Spain, 2018.
    [BibTeX]
    @InProceedings{de2018legal,
    author = {De Cubber, Geert},
    booktitle = {IROS2018 forum on Legal Issues, Cybersecurity and Policymakers Implication in AI Robotics},
    title = {Legal Issues in Search and Rescue {UAV} operations},
    year = {2018},
    address = {Madrid, Spain},
    project = {ICARUS},
    unit= {meca-ras}
    }

  • B. Pairet, G. C. Gonzalez, and L. Jacques, “Reference-less algorithm for circumstellar disks imaging," in Proc. Int. Traveling Workshop Interact. Between Sparse Models Technol.(iTWIST), 2018.
    [BibTeX]
    @inproceedings{pairet2018itwist,
    title={Reference-less algorithm for circumstellar disks imaging},
    author={Pairet, Beno{\^\i}t and Gonzalez, C Gomez and Jacques, Laurent },
    booktitle={Proc. Int. Traveling Workshop Interact. Between Sparse Models Technol.(iTWIST)},
    year={2018}
    }

  • F. Arrichiello, S. Sarkar, S. Chiaverini, and G. Antonelli, “Localization of an Array of Hydrophones Towed by an Autonomous Underwater Vehicle," in 2018 26th Mediterranean Conference on Control and Automation (MED), 2018, p. 601–606.
    [BibTeX]
    @inproceedings{arrichiello2018localization,
    title={Localization of an Array of Hydrophones Towed by an Autonomous Underwater Vehicle},
    author={Arrichiello, Filippo and Sarkar, Soumic and Chiaverini, Stefano and Antonelli, Gianluca},
    booktitle={2018 26th Mediterranean Conference on Control and Automation (MED)},
    pages={601--606},
    year={2018},
    organization={IEEE}
    }

2017

  • G. De Cubber, R. Shalom, A. Coluccia, O. Borcan, R. Chamrád, T. Radulescu, E. Izquierdo, and Z. Gagov, “The SafeShore system for the detection of threat agents in a maritime border environment," in IARP Workshop on Risky Interventions and Environmental Surveillance, Les Bon Villers, Belgium, 2017.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    This paper discusses the goals of the H2020-SafeShore project, which has as a main goal to cover existing gaps in coastal border surveillance, increasing internal security by preventing cross-border crime such as trafficking in human beings and the smuggling of drugs. It is designed to be integrated with existing systems and create a continuous detection line along the border

    @InProceedings{de2017safeshore,
    author = {De Cubber, Geert and Shalom, Ron and Coluccia, Angelo and Borcan, Octavia and Chamr{\'a}d, Richard and Radulescu, Tudor and Izquierdo, Ebroul and Gagov, Zhelyazko},
    booktitle = {IARP Workshop on Risky Interventions and Environmental Surveillance},
    title = {The {SafeShore} system for the detection of threat agents in a maritime border environment},
    year = {2017},
    organization = {IARP},
    abstract = {This paper discusses the goals of the H2020-SafeShore project, which has as a main goal to cover existing gaps in coastal border
    surveillance, increasing internal security by preventing cross-border crime such as trafficking in human beings and the smuggling of drugs. It is designed to be integrated with existing systems and create a continuous detection line along the border},
    doi = {10.5281/zenodo.1115552},
    keywords = {SafeShore, Counter UAV, Counter RPAS},
    language = {en},
    project = {Safeshore},
    address = {Les Bon Villers, Belgium},
    url = {http://mecatron.rma.ac.be/pub/2017/SafeShore Abstract RISE-2017_.pdf},
    unit= {meca-ras}
    }

  • A. Coluccia, M. Ghenescu, T. Piatrik, G. D. Cubber, A. Schumann, L. Sommer, J. Klatte, T. Schuchert, J. Beyerer, M. Farhadi, R. Amandi, C. Aker, S. Kalkan, M. Saqib, N. Sharma, S. Daud, K. Makkah, and M. Blumenstein, “Drone-vs-Bird detection challenge at IEEE AVSS2017," in 2017 14th IEEE International Conference on Advanced Video and Signal Based Surveillance (AVSS), Lecce, Italy, 2017, p. 1–6.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    Small drones are a rising threat due to their possible misuse for illegal activities, in particular smuggling and terrorism. The project SafeShore, funded by the European Commission under the Horizon 2020 program, has launched the drone-vs-bird detection challenge to address one of the many technical issues arising in this context. The goal is to detect a drone appearing at some point in a video where birds may be also present: the algorithm should raise an alarm and provide a position estimate only when a drone is present, while not issuing alarms on birds. This paper reports on the challenge proposal, evaluation, and results

    @InProceedings{coluccia2017drone,
    author = {Angelo Coluccia and Marian Ghenescu and Tomas Piatrik and Geert De Cubber and Arne Schumann and Lars Sommer and Johannes Klatte and Tobias Schuchert and Juergen Beyerer and Mohammad Farhadi and Ruhallah Amandi and Cemal Aker and Sinan Kalkan and Muhammad Saqib and Nabin Sharma and Sultan Daud and Khan Makkah and Michael Blumenstein},
    booktitle = {2017 14th {IEEE} International Conference on Advanced Video and Signal Based Surveillance ({AVSS})},
    title = {Drone-vs-Bird detection challenge at {IEEE} {AVSS}2017},
    year = {2017},
    month = aug,
    organization = {IEEE},
    pages = {1--6},
    publisher = {{IEEE}},
    abstract = {Small drones are a rising threat due to their possible misuse for illegal activities, in particular smuggling and terrorism. The project SafeShore, funded by the European Commission under the Horizon 2020 program, has launched the drone-vs-bird detection challenge to address one of the many technical issues arising in this context. The goal is to detect a drone appearing at some point in a video where birds may be also present: the algorithm should raise an alarm and provide a position estimate only when a drone is present, while not issuing alarms on birds. This paper reports on the challenge proposal, evaluation, and results},
    doi = {10.1109/avss.2017.8078464},
    project = {SafeShore},
    address = {Lecce, Italy},
    url = {http://mecatron.rma.ac.be/pub/2017/WOSDETCpaper (1).pdf},
    unit= {meca-ras}
    }

  • I. Lahouli, R. Haelterman, Z. Chtourou, G. De Cubber, and R. Attia, “Pedestrian Tracking in the Compressed Domain Using Thermal Images," in VIIth International Workshop on Representation, analysis and recognition of shape and motion from Image data, Savoie, France, 2017.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    The video surveillance of sensitive facilities or borders poses many challenges like the high bandwidth requirements and the high computational cost. In this paper, we propose a framework for detecting and tracking pedestrians in the compressed domain using thermal images. Firstly, the detection process uses a conjunction between saliency maps and contrast enhancement techniques followed by a global image content descriptor based on Discrete Chebychev Moments (DCM) and a linear Support Vector Machine (SVM) as a classifier. Secondly, the tracking process exploits raw H.264 compressed video streams with limited computational overhead. In addition to two, well-known, public datasets, we have generated our own dataset by carrying six different scenarios of suspicious events using a thermal camera. The obtained results show the effectiveness and the low computational requirements of the proposed framework which make it suitable for real-time applications and on-board implementation.

    @InProceedings{lahouli2017pedestrian,
    author = {Lahouli, Ichraf and Haelterman, Robby and Chtourou, Zied and De Cubber, Geert and Attia, Rabah},
    booktitle = {VIIth International Workshop on Representation, analysis and recognition of shape and motion from Image data},
    title = {Pedestrian Tracking in the Compressed Domain Using Thermal Images},
    year = {2017},
    number = {1},
    volume = {1},
    abstract = {The video surveillance of sensitive facilities or borders poses many challenges like the high bandwidth requirements and the high computational cost. In this paper, we propose a framework for detecting and tracking pedestrians in the compressed domain using thermal images. Firstly, the detection process uses a conjunction between saliency maps and contrast enhancement techniques followed by a global image content descriptor based on Discrete Chebychev Moments (DCM) and a linear
    Support Vector Machine (SVM) as a classifier. Secondly, the tracking process exploits raw H.264 compressed video streams with limited computational overhead. In addition to two, well-known, public datasets, we have generated our own dataset by carrying six different scenarios of suspicious events using a thermal camera. The obtained results show the effectiveness and the low computational requirements of the proposed framework which make it suitable for real-time applications and on-board implementation.},
    doi = {10.1007/978-3-030-19816-9_3},
    project = {SafeShore},
    address = {Savoie, France},
    url = {http://mecatron.rma.ac.be/pub/2017/RFMI2017_LAHOULI.pdf},
    unit= {meca-ras}
    }

  • D. Lapandic, J. Velagic, and H. Balta, “Framework for automated reconstruction of 3D model from multiple 2D aerial images," in 2017 International Symposium ELMAR, Zadar, Croatia, 2017, pp. 173-176.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    The paper considers a problem of 3D environment model reconstruction from a set of 2D images acquired by the Unmanned Aerial Vehicle (UAV) in near real-time. The designed framework combines the FAST (Features from Accelerated Segment Test) algorithm and optical flow approach for detection of interest image points and adjacent images reconstruction. The robust estimation of camera locations is performed using the image points tracking. The coordinates of 3D points and the projection matrix are computed simultaneously using Structure-from-Motion (SfM) algorithm, from which the 3D model of environment is generated. The designed framework is tested using real image data and video sequences captured with camera mounted on the UAV. The effectiveness and quality of the proposed framework are verified through analyses of accuracy of the 3D model reconstruction and its time execution.

    @INPROCEEDINGS{8124461,
    author={D. {Lapandic} and J. {Velagic} and H. {Balta}},
    booktitle={2017 International Symposium ELMAR},
    title={Framework for automated reconstruction of 3D model from multiple 2D aerial images},
    year={2017},
    volume={},
    number={},
    pages={173-176},
    abstract={The paper considers a problem of 3D environment model reconstruction from a set of 2D images acquired by the Unmanned Aerial Vehicle (UAV) in near real-time. The designed framework combines the FAST (Features from Accelerated Segment Test) algorithm and optical flow approach for detection of interest image points and adjacent images reconstruction. The robust estimation of camera locations is performed using the image points tracking. The coordinates of 3D points and the projection matrix are computed simultaneously using Structure-from-Motion (SfM) algorithm, from which the 3D model of environment is generated. The designed framework is tested using real image data and video sequences captured with camera mounted on the UAV. The effectiveness and quality of the proposed framework are verified through analyses of accuracy of the 3D model reconstruction and its time execution.},
    keywords={autonomous aerial vehicles;cameras;feature extraction;image reconstruction;image segmentation;image sensors;image sequences;remotely operated vehicles;video signal processing;automated reconstruction;multiple 2D aerial images;3D environment model reconstruction;UAV;optical flow approach;interest image points;robust estimation;camera locations;image data;3D model reconstruction;unmanned aerial vehicle;adjacent image reconstruction;structure-from-motion algorithm;features from accelerated segment test;Three-dimensional displays;Solid modeling;Image reconstruction;Two dimensional displays;Cameras;Feature extraction;Optical imaging;3D Model reconstruction;Aerial images;Structure from motion;Unmanned aerial vehicle},
    doi={10.23919/ELMAR.2017.8124461},
    ISSN={},
    project={NRTP,ICARUS},
    address = {Zadar, Croatia},
    publisher={IEEE},
    url={https://ieeexplore.ieee.org/document/8124461},
    month={Sep.},
    unit= {meca-ras}
    }

  • E. Le Flécher, A. Durand-Petiteville, V. Cadenat, T. Sentenac, and S. Vougioukas, “Implementation on a harvesting robot of a sensor-based controller performing a u-turn," in Proceedings of IEEE International Workshop of Electronics, Control, Measurement, Signals and their application to Mechatronics, 2017, p. 1–6.
    [BibTeX] [DOI]
    @inproceedings{ECMSM_2017,
    title={Implementation on a harvesting robot of a sensor-based controller performing a u-turn},
    ISBN={978-1-5090-5582-1},
    DOI={10.1109/ECMSM.2017.7945895},
    booktitle={Proceedings of IEEE International Workshop of Electronics, Control, Measurement, Signals and their application to Mechatronics},
    publisher={IEEE},
    author={Le Flécher, E. and A. Durand-Petiteville and V. Cadenat and T. Sentenac and S. Vougioukas},
    year={2017},
    month={May},
    pages={1--6}
    }

  • A. Durand-Petiteville, L. E. Flécher, V. Cadenat, T. Sentenac, and S. Vougioukas, “Design of a Sensor-based Controller Performing U-turn to Navigate in Orchards," in ICINCO, 2017, p. 172–181.
    [BibTeX] [DOI]
    @inproceedings{ICINCO_2017,
    title={Design of a Sensor-based Controller Performing U-turn to Navigate in Orchards},
    ISBN={978-989-758-263-9},
    DOI={10.5220/0006478601720181},
    booktitle={ICINCO},
    publisher={SCITEPRESS - Science and Technology Publications},
    author={A. Durand-Petiteville and E. Le Flécher and V. Cadenat and T. Sentenac and S. Vougioukas},
    year={2017},
    pages={172--181} }

  • F. Arrichiello, S. Sarkar, S. Chiaverini, and G. Antonelli, “Dynamic modelling of a streamer of hydrophones towed with an autonomous underwater vehicle," in International Workshop on Modelling and Simulation for Autonomous Systems, 2017, p. 179–192.
    [BibTeX]
    @inproceedings{arrichiello2017dynamic,
    title={Dynamic modelling of a streamer of hydrophones towed with an autonomous underwater vehicle},
    author={Arrichiello, Filippo and Sarkar, Soumic and Chiaverini, Stefano and Antonelli, Gianluca},
    booktitle={International Workshop on Modelling and Simulation for Autonomous Systems},
    pages={179--192},
    year={2017},
    organization={Springer}
    }

  • P. S. and D. Y., “Shells and shell fragments as modifiers of sediments behavior," in International Meeting of Sedimentology – Book of abstracts, 2017, p. 676.
    [BibTeX]
    @inproceedings{sonia03,
    title={Shells and shell fragments as modifiers of sediments behavior},
    author={Papili S. and Dupont Y.},
    booktitle={International Meeting of Sedimentology - Book of abstracts},
    pages={676},
    project = {DISCIMBA},
    year={2017}
    }

2016

  • M. M. Marques, R. Parreira, V. Lobo, A. Martins, A. Matos, N. Cruz, J. M. Almeida, J. C. Alves, E. Silva, J. Bedkowski, K. Majek, M. Pelka, P. Musialik, H. Ferreira, A. Dias, B. Ferreira, G. Amaral, A. Figueiredo, R. Almeida, F. Silva, D. Serrano, G. Moreno, G. De Cubber, H. Balta, and H. Beglerovic, “Use of multi-domain robots in search and rescue operations — Contributions of the ICARUS team to the euRathlon 2015 challenge," in OCEANS 2016, Shanghai, China, 2016, p. 1–7.
    [BibTeX] [Download PDF] [DOI]
    @InProceedings{marques2016use,
    author = {Mario Monteiro Marques and Rui Parreira and Victor Lobo and Alfredo Martins and Anibal Matos and Nuno Cruz and Jose Miguel Almeida and Jose Carlos Alves and Eduardo Silva and Janusz Bedkowski and Karol Majek and Michal Pelka and Pawel Musialik and Hugo Ferreira and Andre Dias and Bruno Ferreira and Guilherme Amaral and Andre Figueiredo and Rui Almeida and Filipe Silva and Daniel Serrano and German Moreno and De Cubber, Geert and Haris Balta and Halil Beglerovic},
    booktitle = {{OCEANS} 2016},
    title = {Use of multi-domain robots in search and rescue operations {\textemdash} Contributions of the {ICARUS} team to the {euRathlon} 2015 challenge},
    year = {2016},
    month = apr,
    organization = {IEEE},
    pages = {1--7},
    publisher = {{IEEE}},
    doi = {10.1109/oceansap.2016.7485354},
    project = {ICARUS},
    unit= {meca-ras},
    address = {Shanghai, China},
    url = {http://mecatron.rma.ac.be/pub/2016/euRathlon2015_paper_final.pdf},
    }

  • B. Pairet, L. Jacques, G. C. Gonzalez, and O. Absil, “Low rank and group-average sparsity driven convex optimization for direct exoplanets imaging," in Proc. Int. Traveling Workshop Interact. Between Sparse Models Technol.(iTWIST), 2016, p. 24–26.
    [BibTeX]
    @inproceedings{pairet2016itwist,
    title={Low rank and group-average sparsity driven convex optimization for direct exoplanets imaging},
    author={Pairet, Beno{\^\i}t and Jacques, Laurent and Gonzalez, C Gomez and Absil, Olivier},
    booktitle={Proc. Int. Traveling Workshop Interact. Between Sparse Models Technol.(iTWIST)},
    pages={24--26},
    year={2016}
    }

  • P. S. and L. O., “High frequency response on seafloor signature: structure for an innovative methodology for modern monitoring," in North Sea Open Science Conference, 2016.
    [BibTeX]
    @inproceedings{sonia04,
    title={High frequency response on seafloor signature: structure for an innovative methodology for modern monitoring},
    author={Papili. S. and Lopera. O. },
    booktitle={North Sea Open Science Conference},
    project = {DISCIMBA},
    year={2016}
    }

  • L. O. and Papili., “An introductory study of the impact of environmental parameters in the performances of imaging sonar systems," in OCEANS, 2016.
    [BibTeX]
    @inproceedings{sonia05,
    title={An introductory study of the impact of environmental parameters in the performances of imaging sonar systems},
    author={Lopera O. and Papili.},
    booktitle={OCEANS},
    year={2016},
    project = {DISCIMBA},
    publisher={MTS IEEE}
    }

  • W. Th. and P. S., “New strategy for predictions bedform migration," in Fifth international Conference on Marine and River Dune Dynamics., 2016, pp. 200-204.
    [BibTeX]
    @inproceedings{sonia06,
    title={New strategy for predictions bedform migration},
    author={Wever Th. and Papili S.},
    booktitle={Fifth international Conference on Marine and River Dune Dynamics.},
    year={2016},
    pages={200-204},
    editor={Van Landeghem K. and Garlan R. and Baas J. },
    project = {DISCIMBA},
    publisher={MARIDV}
    }

2015

  • D. Doroftei, A. Matos, E. Silva, V. Lobo, R. Wagemans, and G. De Cubber, “Operational validation of robots for risky environments," in 8th IARP Workshop on Robotics for Risky Environments, Lisbon, Portugal, 2015.
    [BibTeX] [Abstract] [Download PDF]

    This paper presents an operational test and validation approach for the evaluation of the performance of a range of marine, aerial and ground search and rescue robots. The proposed approach seeks to find a compromise between the traditional rigorous standardized approaches and the open-ended robot competitions. Operational scenarios are defined, including a performance assessment of individual robots but also collective operations where heterogeneous robots cooperate together and with manned teams in search and rescue activities. That way, it is possible to perform a more complete validation of the use of robotic tools in challenging real world scenarios.

    @InProceedings{doroftei2015operational,
    author = {Doroftei, Daniela and Matos, Anibal and Silva, Eduardo and Lobo, Victor and Wagemans, Rene and De Cubber, Geert},
    booktitle = {8th IARP Workshop on Robotics for Risky Environments},
    title = {Operational validation of robots for risky environments},
    year = {2015},
    abstract = {This paper presents an operational test and validation approach for the evaluation of the performance of a range of marine, aerial and ground search and rescue robots. The proposed approach seeks to find a compromise between the traditional rigorous standardized approaches and the open-ended robot competitions. Operational scenarios are defined, including a performance assessment of individual robots but also collective operations where heterogeneous robots cooperate together and with manned teams in search and rescue activities. That way, it is possible to perform a more complete validation of the use of robotic tools in challenging real world scenarios.},
    project = {ICARUS},
    address = {Lisbon, Portugal},
    url = {http://mecatron.rma.ac.be/pub/2015/Operational validation of robots for risky environments.pdf},
    unit= {meca-ras}
    }

  • D. Serrano, P. Chrobocinski, G. De Cubber, D. Moore, G. Leventakis, and S. Govindaraj, “ICARUS and DARIUS approaches towards interoperability," in 8th IARP Workshop on Robotics for Risky Environments, Lisbon, Portugal, 2015.
    [BibTeX] [Abstract] [Download PDF]

    The two FP7 projects ICARUS and DARIUS share a common objective which is to integrate the unmanned platforms in Search and Rescue operations and assess their added value through the development of an integrated system that will be tested in realistic conditions on the field. This paper describes the concept of both projects towards an optimized interoperability level in the three dimensions: organizational, procedural and technical interoperability, describing the system components and illustrating the results of the trials already performed.

    @InProceedings{serrano2015icarus,
    author = {Serrano, Daniel and Chrobocinski, Philippe and De Cubber, Geert and Moore, Dave and Leventakis, Georgios and Govindaraj, Shashank},
    booktitle = {8th IARP Workshop on Robotics for Risky Environments},
    title = {{ICARUS} and {DARIUS} approaches towards interoperability},
    year = {2015},
    abstract = {The two FP7 projects ICARUS and DARIUS share a common objective which is to integrate the unmanned platforms in Search and Rescue operations and assess their added value through the development of an integrated system that will be tested in realistic conditions on the field. This paper describes the concept of both projects towards an optimized interoperability level in the three dimensions: organizational, procedural and technical interoperability, describing the system components and illustrating the results of the trials already performed.},
    project = {ICARUS},
    address = {Lisbon, Portugal},
    url = {http://mecatron.rma.ac.be/pub/2015/RISE - 2015 - ICARUS and DARIUS approach towards interoperability - rev1.3.pdf},
    unit= {meca-ras}
    }

  • H. Balta, G. De Cubber, Y. Baudoin, and D. Doroftei, “UAS deployment and data processing during the Balkans flooding with the support to Mine Action," in 8th IARP Workshop on Robotics for Risky Environments, Lisbon, Portugal, 2015.
    [BibTeX] [Abstract] [Download PDF]

    In this paper, we provide a report on a real relief operation mission, jointly conducted by two European research projects, in response to the massive flooding in the Balkan in spring 2014. Un Unmanned Aerial System was deployed on-site in collaboration with traditional relief workers, to support them with damage assessment, area mapping, visual inspection and re-localizing the many explosive remnants of war which have been moved due to the flooding and landslides. The destructive impact of landslides, sediment torrents and floods on the mine fields and the change of mine action situation resulted with significant negative environmental and security consequences. Novel robotic technologies and data processing methodologies were brought from the research labs and directly applied onto the terrain in order to support the relief workers and minimize human suffering.

    @InProceedings{balta2015uas,
    author = {Balta, Haris and De Cubber, Geert and Baudoin, Yvan and Doroftei, Daniela},
    booktitle = {8th IARP Workshop on Robotics for Risky Environments},
    title = {{UAS} deployment and data processing during the {Balkans} flooding with the support to Mine Action},
    year = {2015},
    abstract = {In this paper, we provide a report on a real relief operation mission, jointly conducted by two European research projects, in response to the massive flooding in the Balkan in spring 2014. Un Unmanned Aerial System was deployed on-site in collaboration with traditional relief workers, to support them with damage assessment, area mapping, visual inspection and re-localizing the many explosive remnants of war which have been moved due to the flooding and landslides. The destructive impact of landslides, sediment torrents and floods on the mine fields and the change of mine action situation resulted with significant negative environmental and security consequences. Novel robotic technologies and data processing methodologies were brought from the research labs and directly applied onto the terrain in order to support the relief workers and minimize human suffering.},
    project = {ICARUS},
    address = {Lisbon, Portugal},
    url = {http://mecatron.rma.ac.be/pub/2015/RISE_2015_Haris_Balta_RMA.PDF},
    unit= {meca-ras}
    }

  • G. De Cubber and H. Balta, “Terrain Traversability Analysis using full-scale 3D Processing," in 8th IARP Workshop on Robotics for Risky Environments, Lisbon, Portugal, 2015.
    [BibTeX] [Abstract] [Download PDF]

    Autonomous robotic systems which aspire to navigate through rough unstructured terrain require the capability to reason about the environmental characteristics of their environment. As a first priority, the robotic systems need to assess the degree of traversability of their immediate environment to ensure their mobility while navigating through these rough environments. This paper presents a novel terrain-traversability analyis methodology which is based on processing the full 3D model of the terrain, not on a projected or downscaled version of this model. The approach is validated using field tests using a time-of-flight camera.

    @InProceedings{de2015terrain,
    author = {De Cubber, Geert and Balta, Haris},
    booktitle = {8th IARP Workshop on Robotics for Risky Environments},
    title = {Terrain Traversability Analysis using full-scale {3D} Processing},
    year = {2015},
    abstract = {Autonomous robotic systems which aspire to navigate through rough unstructured terrain require the capability to reason about the environmental characteristics of their environment. As a first priority, the robotic systems need to assess the degree of traversability of their immediate environment to ensure their mobility while navigating through these rough environments. This paper presents a novel terrain-traversability analyis methodology which is based on processing the full 3D model of the terrain, not on a projected or downscaled version of this model. The approach is validated using field tests using a time-of-flight camera.},
    project = {ICARUS},
    address = {Lisbon, Portugal},
    url = {http://mecatron.rma.ac.be/pub/2015/Terrain Traversability Analysis.pdf},
    unit= {meca-ras}
    }

  • O. De Meyst, T. Goethals, H. Balta, G. De Cubber, and R. Haelterman, “Autonomous guidance for a UAS along a staircase," in International Symposium on Visual Computing, Las Vegas, USA, 2015, p. 466–475.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    In the quest for fully autonomous unmanned aerial systems (UAS), multiple challenges are faced. For enabling autonomous UAS navigation in indoor environments, one of the major bottlenecks is the capability to autonomously traverse narrow 3D – passages, like staircases. This paper presents a novel integrated system that implements a semi-autonomous navigation system for a quadcopter. The navigation system permits the UAS to detect a staircase using only the images provided by an on-board monocular camera. A 3D model of this staircase is then automatically reconstructed and this model is used to guide the UAS to the top of the detected staircase. For validating the methodology, a proof of concept is created, based on the Parrot AR.Drone 2.0 which is a cheap commercial off-the-shelf quadcopter.

    @InProceedings{de2015autonomous,
    author = {De Meyst, Olivier and Goethals, Thijs and Balta, Haris and De Cubber, Geert and Haelterman, Rob},
    booktitle = {International Symposium on Visual Computing},
    title = {Autonomous guidance for a {UAS} along a staircase},
    year = {2015},
    organization = {Springer, Cham},
    pages = {466--475},
    abstract = {In the quest for fully autonomous unmanned aerial systems (UAS), multiple challenges are faced. For enabling autonomous UAS navigation in indoor environments, one of the major bottlenecks is the capability to autonomously traverse narrow 3D - passages, like staircases. This paper presents a novel integrated system that implements a semi-autonomous navigation system for a quadcopter. The navigation system permits the UAS to detect a staircase using only the images provided by an on-board monocular camera. A 3D model of this staircase is then automatically reconstructed and this model is used to guide the UAS to the top of the detected staircase. For validating the methodology, a proof of concept is created, based on the Parrot AR.Drone 2.0 which is a cheap commercial off-the-shelf quadcopter.},
    doi = {10.1007/978-3-319-27857-5_42},
    project = {ICARUS},
    address = {Las Vegas, USA},
    unit= {meca-ras},
    url = {https://link.springer.com/chapter/10.1007/978-3-319-27857-5_42},
    }

  • E. Avdic, H. Balta, and T. Ivelja, “UAS deployment and data processing of natural disaster with impact to mine action in B and H, case study: Region Olovo," in International Symposium Mine Action 2015, Biograd, Croatia, 2015, pp. 5-12.
    [BibTeX] [Abstract] [Download PDF]

    In this paper, we present a case study report on how novel robotics technologies like the Unmanned Aerial System (UAS) and data processing methodologies could be used in order to support the traditional mine action procedures and be directly applied onto the terrain while increasing the operational efficiency, supporting mine action workers and minimizing human suffering in case of natural disaster with impact to mine action. Our case study is focusing on the region Olovo (Central Bosnia and Herzegovina) in response to massive flooding, landslides and sediment torrents in spring- summer of 2014. Such destructive impact of the natural disaster on the mine action situation resulted with a re-localizing of many explosive remnants of war which have been moved due to the flooding and landslides with significant negative environmental and security consequences increasing new potentially suspected hazardous areas. What will be elaborated in this paper is the following: problem definition with a statement of needs, data acquisition procedures with UAS, data processing and quality assessment and usability in further mine action procedures.

    @INPROCEEDINGS{balta2015article,
    author={Avdic, Esad and Balta, Haris and Ivelja, Tamara},
    booktitle={International Symposium Mine Action 2015},
    year = {2015},
    address = {Biograd, Croatia},
    pages = {5-12},
    keywords = {Mine Action Support, Unmanned Aerial System, Natural Disaster},
    title = {UAS deployment and data processing of natural disaster with impact to mine action in B and H, case study: Region Olovo},
    keyword = {Mine Action Support, Unmanned Aerial System, Natural Disaster},
    publisher = {HCR-CTRO d.o.o.},
    publisherplace = {Biograd, Hrvatska},
    project={TIRAMISU},
    url={http://mecatron.rma.ac.be/pub/2015/HUDEM_2015_Avdic_Balta_Ivelja_final_ver.pdf},
    abstract= {In this paper, we present a case study report on how novel robotics technologies like the Unmanned Aerial System (UAS) and data processing methodologies could be used in order to support the traditional mine action procedures and be directly applied onto the terrain while increasing the operational efficiency, supporting mine action workers and minimizing human suffering in case of natural disaster with impact to mine action. Our case study is focusing on the region Olovo (Central Bosnia and Herzegovina) in response to massive flooding, landslides and sediment torrents in spring- summer of 2014. Such destructive impact of the natural disaster on the mine action situation resulted with a re-localizing of many explosive remnants of war which have been moved due to the flooding and landslides with significant negative environmental and security consequences increasing new potentially suspected hazardous areas. What will be elaborated in this paper is the following: problem definition with a statement of needs, data acquisition procedures with UAS, data processing and quality assessment and usability in further mine action procedures.},
    unit= {meca-ras}
    }

  • S. Sarkar and I. N. Kar, “Formation of multiple groups of mobile robots using sliding mode control," in 2015 54th IEEE Conference on Decision and Control (CDC), 2015, p. 2993–2998.
    [BibTeX]
    @inproceedings{sarkar2015formation,
    title={Formation of multiple groups of mobile robots using sliding mode control},
    author={Sarkar, Soumic and Kar, Indra Narayan},
    booktitle={2015 54th IEEE Conference on Decision and Control (CDC)},
    pages={2993--2998},
    year={2015},
    organization={IEEE}
    }

  • S. Sarkar and I. N. Kar, “Three time scale behaviour analysis of the Leader Follower formation of multiple groups of nonholonomic robots," in 2015 American Control Conference (ACC), 2015, p. 44–49.
    [BibTeX]
    @inproceedings{sarkar2015three,
    title={Three time scale behaviour analysis of the Leader Follower formation of multiple groups of nonholonomic robots},
    author={Sarkar, Soumic and Kar, Indra Narayan},
    booktitle={2015 American Control Conference (ACC)},
    pages={44--49},
    year={2015},
    organization={IEEE}
    }

  • P. S., J. C., R. M., W. T., L. O., and V. L. V., “Influence of Shells and Shell Debris on Backscatter Strength: Investigation using modelling, sonar measurements and sampling on the Belgian Continental Shelf," in Proceedings of the Institute of Acoustics, 2015.
    [BibTeX]
    @inproceedings{sonia07,
    title={Influence of Shells and Shell Debris on Backscatter Strength: Investigation using modelling, sonar measurements and sampling on the Belgian Continental Shelf},
    author={Papili S. and Jenkins C. and Roche M. and Wever T. and Lopera O. and Van Lancker V.},
    booktitle={Proceedings of the Institute of Acoustics},
    project = {DISCIMBA},
    year={2015},
    volume={37},
    part={1}
    }

2014

  • D. Doroftei, A. Matos, and G. De Cubber, “Designing Search and Rescue Robots towards Realistic User Requirements," in Advanced Concepts on Mechanical Engineering (ACME), Iasi, Romania, 2014.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    In the event of a large crisis (think about typhoon Haiyan or the Tohoku earthquake and tsunami in Japan), a primordial task of the rescue services is the search for human survivors on the incident site. This is a complex and dangerous task, which often leads to loss of lives among the human crisis managers themselves. The introduction of unmanned search and rescue devices can offer a valuable tool to save human lives and to speed up the search and rescue process. In this context, the EU-FP7-ICARUS project [1] concentrates on the development of unmanned search and rescue technologies for detecting, locating and rescuing humans. The complex nature and difficult operating conditions of search and rescue operations pose heavy constraints on the mechanical design of the unmanned platforms. In this paper, we discuss the different user requirements which have an impact of the design of the mechanical systems (air, ground and marine robots). We show how these user requirements are obtained, how they are validated, how they lead to design specifications for operational prototypes which are tested in realistic operational conditions and we show how the final mechanical design specifications are derived from these different steps. An important aspect of all these design steps which is emphasized in this paper is to always keep the end-users in the loop in order to come to realistic requirements and specifications, ensuring the practical deployability [2] of the developed platforms.

    @InProceedings{doroftei2014designing,
    author = {Doroftei, Daniela and Matos, Anibal and De Cubber, Geert},
    booktitle = {Advanced Concepts on Mechanical Engineering (ACME)},
    title = {Designing Search and Rescue Robots towards Realistic User Requirements},
    year = {2014},
    abstract = {In the event of a large crisis (think about typhoon Haiyan or the Tohoku earthquake and tsunami in Japan), a primordial task of the rescue services is the search for human survivors on the incident site. This is a complex and dangerous task, which often leads to loss of lives among the human crisis managers themselves. The introduction of unmanned search and rescue devices can
    offer a valuable tool to save human lives and to speed up the search and rescue process. In this context, the EU-FP7-ICARUS project [1] concentrates on the development of unmanned search and rescue technologies for detecting, locating and rescuing humans. The complex nature and difficult operating conditions of search and rescue operations pose heavy constraints on the mechanical design of the unmanned platforms. In this paper, we discuss the different user requirements which have an impact of the design of the mechanical systems (air, ground and marine robots). We show how these user requirements are obtained, how they are validated, how they lead to design specifications for operational prototypes which are tested in realistic operational conditions and we show how the final mechanical design specifications are derived from these different steps. An important aspect of all these design steps which is emphasized in this paper is to always keep the end-users in the loop in order to come to realistic requirements and specifications, ensuring the practical deployability [2] of the developed platforms.},
    doi = {10.4028/www.scientific.net/amm.658.612},
    project = {ICARUS},
    address = {Iasi, Romania},
    url = {http://mecatron.rma.ac.be/pub/2014/Designing Search and Rescue robots towards realistic user requirements - full article -v3.pdf},
    unit= {meca-ras}
    }

  • G. De Cubber, H. Balta, and C. Lietart, “Teodor: A semi-autonomous search and rescue and demining robot," in Advanced Concepts on Mechanical Engineering (ACME), Iasi, Romania, 2014.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    In this paper, we present a ground robotic system which is developed to deal with rough outdoor conditions. The platform is to be used as an environmental monitoring robot for 2 main application areas: 1) Humanitarian demining: The vehicle is equipped with a specialized multichannel metal detector array. An unmanned aerial system supports it for locating suspected locations of mines, which can then be confirmed by the ground vehicle. 2) Search and rescue: The vehicle is equipped with human victim detection sensors and a 3D camera enabling it to assess the traversability of the terrain in front of the robot in order to be able to navigate autonomously. This paper discusses both the mechanical design of these platforms as the autonomous perception capabilities on board of these vehicles.

    @InProceedings{de2014teodor,
    author = {De Cubber, Geert and Balta, Haris and Lietart, Claude},
    booktitle = {Advanced Concepts on Mechanical Engineering (ACME)},
    title = {Teodor: A semi-autonomous search and rescue and demining robot},
    year = {2014},
    abstract = {In this paper, we present a ground robotic system which is developed to deal with rough outdoor conditions. The platform is to be used as an environmental monitoring robot for 2 main application areas: 1) Humanitarian demining: The vehicle is equipped with a specialized multichannel metal detector array. An unmanned aerial system supports it for locating suspected locations of mines, which can then be confirmed by the ground vehicle. 2) Search and rescue: The vehicle is equipped with human victim detection sensors and a 3D camera enabling it to assess the traversability of the terrain in front of the robot in order to be able to navigate autonomously. This paper discusses both the mechanical design of these platforms as the autonomous perception
    capabilities on board of these vehicles.},
    doi = {10.4028/www.scientific.net/amm.658.599},
    project = {ICARUS},
    address = {Iasi, Romania},
    url = {http://mecatron.rma.ac.be/pub/2014/Teodor - A semi-autonomous search and rescue and demining robot - full article.pdf},
    unit= {meca-ras}
    }

  • G. De Cubber, H. Balta, D. Doroftei, and Y. Baudoin, “UAS deployment and data processing during the Balkans flooding," in 2014 IEEE International Symposium on Safety, Security, and Rescue Robotics (2014), Toyako-cho, Hokkaido, Japan, 2014, p. 1–4.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    This project paper provides a report on a real relief operation mission, jointly conducted by two European research projects, in response to the massive flooding in the Balkan in spring 2014. Un Unmanned Aerial System was deployed on-site in collaboration with traditional relief workers, to support them with damage assessment, area mapping, visual inspection and re-localizing the many explosive remnants of war which have been moved due to the flooding and landslides. Novel robotic technologies and data processing methodologies were brought from the research labs and directly applied onto the terrain in order to support the relief workers and minimize human suffering.

    @InProceedings{de2014uas,
    author = {De Cubber, Geert and Balta, Haris and Doroftei, Daniela and Baudoin, Yvan},
    booktitle = {2014 IEEE International Symposium on Safety, Security, and Rescue Robotics (2014)},
    title = {{UAS} deployment and data processing during the Balkans flooding},
    year = {2014},
    organization = {IEEE},
    pages = {1--4},
    abstract = {This project paper provides a report on a real relief operation mission, jointly conducted by two European research projects, in response to the massive flooding in the Balkan in spring 2014. Un Unmanned Aerial System was deployed on-site in collaboration with traditional relief workers, to support them with damage assessment, area mapping, visual inspection and re-localizing the many explosive remnants of war which have been moved due to the flooding and landslides. Novel robotic technologies and data processing methodologies were brought from the research labs and directly applied onto the terrain in order to support the relief workers and minimize human suffering.},
    doi = {10.1109/ssrr.2014.7017670},
    project = {ICARUS},
    address = {Toyako-cho, Hokkaido, Japan},
    url = {http://mecatron.rma.ac.be/pub/2014/SSRR2014_proj_037.pdf},
    unit= {meca-ras}
    }

  • M. Pelka, K. Majek, J. Bedkowski, P. Musialik, A. Maslowski, G. de Cubber, H. Balta, A. Coelho, R. Goncalves, R. Baptista, J. M. Sanchez, and S. Govindaraj, “Training and Support system in the Cloud for improving the situational awareness in Search and Rescue (SAR) operations," in 2014 IEEE International Symposium on Safety, Security, and Rescue Robotics (2014), Toyako-cho, Hokkaido, Japan, 2014, p. 1–6.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    In this paper, a Training and Support system for Search and Rescue operations is described. The system is a component of the ICARUS project (http://www.fp7-icarus.eu) which has a goal to develop sensor, robotic and communication technologies for Human Search And Rescue teams. The support system for planning and managing complex SAR operations is implemented as a command and control component that integrates different sources of spatial information, such as maps of the affected area, satellite images and sensor data coming from the unmanned robots, in order to provide a situation snapshot to the rescue team who will make the necessary decisions. Support issues will include planning of frequency resources needed for given areas, prediction of coverage conditions, location of fixed communication relays, etc. The training system is developed for the ICARUS operators controlling UGVs (Unmanned Ground Vehicles), UAVs (Unmanned Aerial Vehicles) and USVs (Unmanned Surface Vehicles) from a unified Remote Control Station (RC2). The Training and Support system is implemented in SaaS model (Software as a Service). Therefore, its functionality is available over the Ethernet. SAR ICARUS teams from different countries can be trained simultaneously on a shared virtual stage. In this paper we will show the multi-robot 3D mapping component (aerial vehicle and ground vehicles). We will demonstrate that these 3D maps can be used for Training purpose. Finally we demonstrate current approach for ICARUS Urban SAR (USAR) and Marine SAR (MSAR) operation training.

    @InProceedings{pelka2014training,
    author = {Michal Pelka and Karol Majek and Janusz Bedkowski and Pawel Musialik and Andrzej Maslowski and Geert de Cubber and Haris Balta and Antonio Coelho and Ricardo Goncalves and Ricardo Baptista and Jose Manuel Sanchez and Shashank Govindaraj},
    booktitle = {2014 {IEEE} International Symposium on Safety, Security, and Rescue Robotics (2014)},
    title = {Training and Support system in the Cloud for improving the situational awareness in Search and Rescue ({SAR}) operations},
    year = {2014},
    month = oct,
    organization = {IEEE},
    pages = {1--6},
    publisher = {{IEEE}},
    abstract = {In this paper, a Training and Support system for Search and Rescue operations is described. The system is a component of the ICARUS project (http://www.fp7-icarus.eu) which has a goal to develop sensor, robotic and communication technologies for Human Search And Rescue teams. The support system for planning and managing complex SAR operations is implemented as a command and control component that integrates different sources of spatial information, such as maps of the affected area, satellite images and sensor data coming from the unmanned robots, in order to provide a situation snapshot to the rescue team who will make the necessary decisions. Support issues will include planning of frequency resources needed for given areas, prediction of coverage conditions, location of fixed communication relays, etc. The training system is developed for the ICARUS operators controlling UGVs (Unmanned Ground Vehicles), UAVs (Unmanned Aerial Vehicles) and USVs (Unmanned Surface Vehicles) from a unified Remote Control Station (RC2). The Training and Support system is implemented in SaaS model (Software as a Service). Therefore, its functionality is available over the Ethernet. SAR ICARUS teams from different countries can be trained simultaneously on a shared virtual stage. In this paper we will show the multi-robot 3D mapping component (aerial vehicle and ground vehicles). We will demonstrate that these 3D maps can be used for Training purpose. Finally we demonstrate current approach for ICARUS Urban SAR (USAR) and Marine SAR (MSAR) operation training.},
    doi = {10.1109/ssrr.2014.7017644},
    project = {ICARUS},
    address = {Toyako-cho, Hokkaido, Japan},
    url = {https://ieeexplore.ieee.org/document/7017644?arnumber=7017644&sortType=asc_p_Sequence&filter=AND(p_IS_Number:7017643)=},
    unit= {meca-ras}
    }

  • G. De Cubber and H. Balta, “ICARUS RPAS AND THEIR OPERATIONAL USE IN Bosnia," in RPAS 2014, Brussels, Belgium, 2014.
    [BibTeX] [Abstract] [Download PDF]

    This is a report in the field mission with an unmanned aircraft system in Spring 2014 in Bosnia, to help with flood relief and mine clearing operations.

    @InProceedings{de2014icarus,
    author = {De Cubber, Geert and Balta, Haris},
    booktitle = {RPAS 2014},
    title = {{ICARUS RPAS} AND THEIR OPERATIONAL USE IN {Bosnia}},
    year = {2014},
    organization = {UVS International},
    abstract = {This is a report in the field mission with an unmanned aircraft system in Spring 2014 in Bosnia, to help with flood relief and mine clearing operations.},
    project = {ICARUS},
    address = {Brussels, Belgium},
    url = {http://mecatron.rma.ac.be/pub/2014/Icarus Project - RPAS in Bosnia_.pdf},
    unit= {meca-ras}
    }

  • R. Barth, J. Baur, T. Buschmann, Y. Edan, T. Hellström, and T. Nguyen, “Using ROS for agricultural robotics – Design considerations and experiences," in Proceeding of the International Conference on Robotics and associated High-technologies and Equipment for Agriculture and forestry (RHEA), 2014.
    [BibTeX]
    @INPROCEEDINGS{proc_tt_004,
    author={R. {Barth} and J. {Baur} and T. {Buschmann} and Y. {Edan} and T. {Hellström} and T.Th. {Nguyen}},
    title={Using ROS for agricultural robotics - Design considerations and experiences},
    booktitle={Proceeding of the International Conference on Robotics and associated High-technologies and Equipment for Agriculture and forestry (RHEA)},
    month=may,
    year={2014}
    }

  • T. Nguyen, K. Vandevoorde, E. Kayacan, J. De Baerdemaeker, and W. Saeys, “Apple detection algorithm for robotic harvesting using a RGB-D camera," in In International Conference of Agricultural Engineering, CIGR-Ageng, 2014.
    [BibTeX]
    @INPROCEEDINGS{proc_tt_005,
    author={T.Th. {Nguyen} and K. {Vandevoorde} and E. {Kayacan} and J. {De Baerdemaeker} and W. {Saeys}},
    title={Apple detection algorithm for robotic harvesting using a RGB-D camera},
    booktitle={In International Conference of Agricultural Engineering, CIGR-Ageng},
    month=jul,
    year={2014}
    }

  • T. Nguyen, E. Kayakan, J. De Baerdemaeker, and W. Saeys, “Motion planning algorithm and its real-time implementation in apples harvesting robot," in In International Conference of Agricultural Engineering, CIGR-Ageng, 2014.
    [BibTeX]
    @INPROCEEDINGS{proc_tt_006,
    author={T.Th. {Nguyen} and E. {Kayakan} and J. {De Baerdemaeker} and W. {Saeys}},
    title={Motion planning algorithm and its real-time implementation in apples harvesting robot},
    booktitle={In International Conference of Agricultural Engineering, CIGR-Ageng},
    month=jul,
    year={2014},
    unit= {meca-ras}
    }

2013

  • J. Bedkowski, K. Majek, I. Ostrowski, P. Musialik, A. Mas{l}owski, A. Adamek, A. Coelho, and G. De Cubber, “Methodology of Training and Support for Urban Search and Rescue With Robots," in Proc. Ninth International Conference on Autonomic and Autonomous Systems (ICAS), Lisbon, Portugal, Lisbon, Portugal, 2013, p. 77–82.
    [BibTeX] [Abstract] [Download PDF]

    A primordial task of the fire-fighting and rescue services in the event of a large crisis is the search for human survivors on the incident site. This task, being complex and dangerous, often leads to loss of lives. Unmanned search and rescue devices can provide a valuable tool for saving human lives and speeding up the search and rescue operations. Urban Search and Rescue (USAR) community agrees with the fact that the operator skill is the main factor for successfully using unmanned robotic platforms. The key training concept is “train as you fight" mentality. Intervention troops focalize on “real training", as a crisis is difficult to simulate. For this reason, in this paper a methodology of training and support for USAR with unmanned vehicles is proposed. The methodology integrates the Qualitative Spatio-Temporal Representation and Reasoning (QSTRR) framework with USAR tools to decrease the cognitive load on human operators working with sophisticated robotic platforms. Tools for simplifying and improving virtual training environment generation from life data are shown

    @InProceedings{bedkowski2013methodology,
    author = {Bedkowski, Janusz and Majek, Karol and Ostrowski, Igor and Musialik, Pawe{\l} and Mas{\l}owski, Andrzej and Adamek, Artur and Coelho, Antonio and De Cubber, Geert},
    booktitle = {Proc. Ninth International Conference on Autonomic and Autonomous Systems (ICAS), Lisbon, Portugal},
    title = {Methodology of Training and Support for Urban Search and Rescue With Robots},
    year = {2013},
    address = {Lisbon, Portugal},
    month = mar,
    pages = {77--82},
    abstract = {A primordial task of the fire-fighting and rescue services in the event of a large crisis is the search for human survivors on the incident site. This task, being complex and dangerous, often leads to loss of lives. Unmanned search and rescue devices can provide a valuable tool for saving human lives and speeding up the search and rescue operations. Urban Search and Rescue (USAR) community agrees with the fact that the operator skill is the main factor for successfully using unmanned robotic platforms. The key training concept is "train as you fight" mentality. Intervention troops focalize on "real training", as a crisis is difficult to simulate. For this reason, in this paper a methodology of training and support for USAR with unmanned vehicles is proposed. The methodology integrates the Qualitative Spatio-Temporal Representation and Reasoning (QSTRR) framework with USAR tools to decrease the cognitive load on human operators working with sophisticated robotic platforms. Tools for simplifying and improving virtual training environment generation from life data are shown},
    project = {ICARUS},
    url = {https://www.thinkmind.org/download.php?articleid=icas_2013_3_40_20054},
    unit= {meca-ras}
    }

  • H. Balta, G. De Cubber, D. Doroftei, Y. Baudoin, and H. Sahli, “Terrain traversability analysis for off-road robots using time-of-flight 3d sensing," in 7th IARP International Workshop on Robotics for Risky Environment-Extreme Robotics, Saint-Petersburg, Russia, 2013.
    [BibTeX] [Abstract] [Download PDF]

    In this paper we present a terrain traversability analysis methodology which classifies all image pixels in the TOF image as traversable or not, by estimating for each pixel a traversability score which is based upon the analysis of the 3D (depth data) and 2D (IR data) content of the TOF camera data. This classification result is then used for the (semi) – autonomous navigation of two robotic systems, operating in extreme environments: a search and rescue robot and a humanitarian demining robot. Integrated in autonomous robot control architecture, terrain traversability classification increases the environmental situational awareness and enables a mobile robot to navigate (semi) – autonomously in an unstructured dynamical outdoor environment.

    @InProceedings{balta2013terrain,
    author = {Balta, Haris and De Cubber, Geert and Doroftei, Daniela and Baudoin, Yvan and Sahli, Hichem},
    booktitle = {7th IARP International Workshop on Robotics for Risky Environment-Extreme Robotics},
    title = {Terrain traversability analysis for off-road robots using time-of-flight 3d sensing},
    year = {2013},
    abstract = {In this paper we present a terrain traversability analysis methodology which classifies all image pixels in the TOF image as traversable or not, by estimating for each pixel a traversability score which is based upon the analysis of the 3D (depth data) and 2D (IR data) content of the TOF camera data. This classification result is then used for the (semi) – autonomous navigation of two robotic systems, operating in extreme environments: a search and rescue robot and a humanitarian demining robot. Integrated in autonomous robot control architecture, terrain traversability classification increases the environmental situational awareness and enables a mobile robot to navigate (semi) – autonomously in an unstructured dynamical outdoor environment.},
    project = {ICARUS},
    address = {Saint-Petersburg, Russia},
    url = {http://mecatron.rma.ac.be/pub/2013/Terrain Traversability Analysis ver 4-HS.pdf},
    unit= {meca-ras}
    }

  • Y. Baudoin and G. De Cubber, “TIRAMISU-ICARUS: FP7-Projects Challenges for Robotics Systems," in 7th IARP Workshop on Robotics for Risky Environment – Extreme Robotics, Saint-Petersburg, Russia, 2013, p. 55–69.
    [BibTeX] [Abstract] [Download PDF]

    TIRAMISU: Clearing large civilian areas from anti-personnel landmines and cluster munitions is a difficult problem because of the large diversity of hazardous areas and explosive contamination. A single solution does not exist and many Mine Action actors have called for a toolbox from which they could choose the tools best fit to a given situation. Some have built their own toolboxes, usually specific to their activities, such as clearance. The TIRAMISU project aims at providing the foundation for a global toolbox that will cover the main Mine Action activities, from the survey of large areas to the actual disposal of explosive hazards, including Mine Risk Education. The toolbox produced by the project will provide Mine Action actors with a large set of tools, grouped into thematic modules, which will help them to better perform their job. These tools will have been designed with the help of end-users and validated by them in mine affected countries. ICARUS: Recent dramatic events such as the earthquakes in Haiti and L’Aquila or the flooding in Pakistan have shown that local civil authorities and emergency services have difficulties with adequately managing crises. The result is that these crises lead to major disruption of the whole local society. The goal of ICARUS is to decrease the total cost (both in human lives and in euro) of a major crisis. In order to realise this goal, the ICARUS project proposes to equip first responders with a comprehensive and integrated set of unmanned search and rescue tools, to increase the situational awareness of human crisis managers and to assist search and rescue teams for dealing with the difficult and dangerous, but life-saving task of finding human survivors. As every crisis is different, it is impossible to provide one solution which fits all needs. Therefore, the ICARUS project will concentrate on developing components or building blocks that can be directly used by the crisis managers when arriving on the field. The ICARUS tools consist of assistive unmanned air, ground and sea vehicles, equipped with human detection sensors. The ICARUS unmanned vehicles are intended as the first explorers of the area, as well as in-situ supporters to act as safeguards to human personnel. The unmanned vehicles collaborate as a coordinated team, communicating via ad hoc cognitive radionetworking. To ensure optimal human-robot collaboration, these ICARUS tools are seamlessly integrated into the C4I equipment of the human crisis managers and a set of training and support tools is provided to the human crisis to learn to use the ICARUS system.

    @InProceedings{baudoin2013tiramisu,
    author = {Baudoin, Yvan and De Cubber, Geert},
    booktitle = {7th IARP Workshop on Robotics for Risky Environment - Extreme Robotics},
    title = {{TIRAMISU-ICARUS}: {FP7}-Projects Challenges for Robotics Systems},
    year = {2013},
    pages = {55--69},
    address = {Saint-Petersburg, Russia},
    abstract = {TIRAMISU: Clearing large civilian areas from anti-personnel landmines and cluster munitions is a difficult problem because of the large diversity of hazardous areas and explosive contamination. A single solution does not exist and many Mine Action actors have called for a toolbox from which they could choose the tools best fit to a given situation. Some have built their own toolboxes, usually specific to their activities, such as clearance. The TIRAMISU project aims at providing the foundation for a global toolbox that will cover the main Mine Action activities, from the survey of large areas to the actual disposal of explosive hazards, including Mine Risk Education. The toolbox produced by the project will provide Mine Action actors with a large set of tools, grouped into thematic modules, which will help them to better perform their job. These tools will have been designed with the help of end-users and validated by them in mine affected countries.
    ICARUS: Recent dramatic events such as the earthquakes in Haiti and L’Aquila or the flooding in Pakistan have shown that local civil authorities and emergency services have difficulties with adequately managing crises. The result is that these crises lead to major disruption of the whole local society. The goal of ICARUS is to decrease the total cost (both in human lives and in euro) of a major crisis. In order to realise this goal, the ICARUS project proposes to equip first responders with a comprehensive and integrated set of unmanned search and rescue tools, to increase the situational awareness of human crisis managers and to assist search and rescue teams for dealing with the difficult and dangerous, but life-saving task of finding human survivors. As every crisis is different, it is impossible to provide one solution which fits all needs. Therefore, the ICARUS project will concentrate on developing components or building blocks that can be directly used by the crisis managers when arriving on the field. The ICARUS tools consist of assistive unmanned air, ground and sea vehicles, equipped with human detection sensors. The ICARUS unmanned vehicles are intended as the first explorers of the area, as well as in-situ supporters to act as safeguards to human personnel. The unmanned vehicles collaborate as a coordinated team, communicating via ad hoc cognitive radionetworking. To ensure optimal human-robot collaboration, these ICARUS tools are seamlessly integrated into the C4I equipment of the human crisis managers and a set of training and support tools is provided to the human crisis to learn to use the ICARUS system.},
    project = {ICARUS, TIRAMISU},
    url = {http://mecatron.rma.ac.be/pub/2013/KN Paper YB.pdf},
    unit= {meca-ras}
    }

  • G. De Cubber, D. Doroftei, D. Serrano, K. Chintamani, R. Sabino, and S. Ourevitch, “The EU-ICARUS project: developing assistive robotic tools for search and rescue operations," in 2013 IEEE international symposium on safety, security, and rescue robotics (SSRR), Linkoping, Sweden, 2013, p. 1–4.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    The ICARUS EU-FP7 project deals with the development of a set of integrated components to assist search and rescue teams in dealing with the difficult and dangerous, but lifesaving task of finding human survivors. The ICARUS tools consist of assistive unmanned air, ground and sea vehicles, equipped with victim detection sensors. The unmanned vehicles collaborate as a coordinated team, communicating via ad-hoc cognitive radio networking. To ensure optimal human-robot collaboration, these tools are seamlessly integrated into the C4I (command, control, communications, computers, and intelligence) equipment of the human crisis managers and a set of training and support tools is provided to them to learn to use the ICARUS system.

    @InProceedings{de2013eu,
    author = {De Cubber, Geert and Doroftei, Daniela and Serrano, Daniel and Chintamani, Keshav and Sabino, Rui and Ourevitch, Stephane},
    booktitle = {2013 IEEE international symposium on safety, security, and rescue robotics (SSRR)},
    title = {The {EU-ICARUS} project: developing assistive robotic tools for search and rescue operations},
    year = {2013},
    organization = {IEEE},
    pages = {1--4},
    address = {Linkoping, Sweden},
    abstract = {The ICARUS EU-FP7 project deals with the development of a set of integrated components to assist search and rescue teams in dealing with the difficult and dangerous, but lifesaving task of finding human survivors. The ICARUS tools consist of assistive unmanned air, ground and sea vehicles, equipped with victim detection sensors. The unmanned vehicles collaborate as a coordinated team, communicating via ad-hoc cognitive radio networking. To ensure optimal human-robot collaboration, these tools are seamlessly integrated into the C4I (command, control, communications, computers, and intelligence) equipment of the human crisis managers and a set of training and support tools is provided to them to learn to use the ICARUS system.},
    doi = {10.1109/ssrr.2013.6719323},
    project = {ICARUS},
    url = {http://mecatron.rma.ac.be/pub/2013/SSRR2013_ICARUS.pdf},
    unit= {meca-ras}
    }

  • S. Govindaraj, K. Chintamani, J. Gancet, P. Letier, B. van Lierde, Y. Nevatia, G. D. Cubber, D. Serrano, M. E. Palomares, J. Bedkowski, C. Armbrust, J. Sanchez, A. Coelho, and I. Orbe, “The ICARUS project – Command, Control and Intelligence (C2I)," in 2013 IEEE International Symposium on Safety, Security, and Rescue Robotics (SSRR), Linkoping, Sweden, 2013, p. 1–4.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    This paper describes the features and concepts behind the Command, Control and Intelligence (C2I) system under development in the ICARUS project, which aims at improving crisis management with the use of unmanned search and rescue robotic appliances embedded and integrated into existing infrastructures. A beneficial C2I system should assist the search and rescue process by enhancing first responder situational awareness, decision making and crisis handling by designing intuitive user interfaces that convey detailed and extensive information about the crisis and its evolution. The different components of C2I, their architectural and functional aspects are described along with the robot platform used for development and field testing.

    @InProceedings{govindaraj2013icarus,
    author = {Shashank Govindaraj and Keshav Chintamani and Jeremi Gancet and Pierre Letier and Boris van Lierde and Yashodhan Nevatia and Geert De Cubber and Daniel Serrano and Miguel Esbri Palomares and Janusz Bedkowski and Christopher Armbrust and Jose Sanchez and Antonio Coelho and Iratxe Orbe},
    booktitle = {2013 {IEEE} International Symposium on Safety, Security, and Rescue Robotics ({SSRR})},
    title = {The {ICARUS} project - Command, Control and Intelligence (C2I)},
    year = {2013},
    month = oct,
    organization = {IEEE},
    address = {Linkoping, Sweden},
    pages = {1--4},
    publisher = {{IEEE}},
    abstract = {This paper describes the features and concepts behind the Command, Control and Intelligence (C2I) system under development in the ICARUS project, which aims at improving crisis management with the use of unmanned search and rescue robotic appliances embedded and integrated into existing infrastructures. A beneficial C2I system should assist the search and rescue process by enhancing first responder situational awareness, decision making and crisis handling by designing intuitive user interfaces that convey detailed and extensive information about the crisis and its evolution. The different components of C2I, their architectural and functional aspects are described along with the robot platform used for development and field testing.},
    doi = {10.1109/ssrr.2013.6719356},
    project = {ICARUS},
    url = {http://mecatron.rma.ac.be/pub/2013/Govindaraj_SSRR_WS_Paper_V2.0.pdf},
    unit= {meca-ras}
    }

  • H. Balta, S. Rossi, S. Iengo, B. Siciliano, A. Finzi, and G. De Cubber, “Adaptive behavior-based control for robot navigation: A multi-robot case study," in 2013 XXIV International Conference on Information, Communication and Automation Technologies (ICAT), Sarajevo, Bosnia and Herzegovina, 2013, p. 1–7.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    The main focus of the work presented in this paper is to investigate the application of certain biologically-inspired control strategies in the field of autonomous mobile robots, with particular emphasis on multi-robot navigation systems. The control architecture used in this work is based on the behavior-based approach. The main argument in favor of this approach is its impressive and rapid practical success. This powerful methodology has demonstrated simplicity, parallelism, perception-action mapping and real implementation. When a group of autonomous mobile robots needs to achieve a goal operating in complex dynamic environments, such a task involves high computational complexity and a large volume of data needed for continuous monitoring of internal states and the external environment. Most autonomous mobile robots have limited capabilities in computation power or energy sources with limited capability, such as batteries. Therefore, it becomes necessary to build additional mechanisms on top of the control architecture able to efficiently allocate resources for enhancing the performance of an autonomous mobile robot. For this purpose, it is necessary to build an adaptive behavior-based control system focused on sensory adaptation. This adaptive property will assure efficient use of robot’s limited sensorial and cognitive resources. The proposed adaptive behavior-based control system is then validated through simulation in a multi-robot environment with a task of prey/predator scenario.

    @InProceedings{balta2013adaptive,
    author = {Balta, Haris and Rossi, Silvia and Iengo, Salvatore and Siciliano, Bruno and Finzi, Alberto and De Cubber, Geert},
    booktitle = {2013 XXIV International Conference on Information, Communication and Automation Technologies (ICAT)},
    title = {Adaptive behavior-based control for robot navigation: A multi-robot case study},
    year = {2013},
    organization = {IEEE},
    pages = {1--7},
    abstract = {The main focus of the work presented in this paper is to investigate the application of certain biologically-inspired control strategies in the field of autonomous mobile robots, with particular emphasis on multi-robot navigation systems. The control architecture used in this work is based on the behavior-based approach. The main argument in favor of this approach is its impressive and rapid practical success. This powerful methodology has demonstrated simplicity, parallelism, perception-action mapping and real implementation. When a group of autonomous mobile robots needs to achieve a goal operating in complex dynamic environments, such a task involves high computational complexity and a large volume of data needed for continuous monitoring of internal states and the external environment. Most autonomous mobile robots have limited capabilities in computation power or energy sources with limited capability, such as batteries. Therefore, it becomes necessary to build additional mechanisms on top of the control architecture able to efficiently allocate resources for enhancing the performance of an autonomous mobile robot. For this purpose, it is necessary to build an adaptive behavior-based control system focused on sensory adaptation. This adaptive property will assure efficient use of robot's limited sensorial and cognitive resources. The proposed adaptive behavior-based control system is then validated through simulation in a multi-robot environment with a task of prey/predator scenario.},
    doi = {10.1109/icat.2013.6684083},
    address = {Sarajevo, Bosnia and Herzegovina},
    project = {ICARUS},
    url = {https://ieeexplore.ieee.org/document/6684083?tp=&arnumber=6684083},
    unit= {meca-ras}
    }

  • G. De Cubber, D. Serrano, K. Berns, K. Chintamani, R. Sabino, S. Ourevitch, D. Doroftei, C. Armbrust, T. Flamma, and Y. Baudoin, “Search and rescue robots developed by the European Icarus project," in 7th Int Workshop on Robotics for Risky Environments, Saint – Petersburg, Russia, 2013.
    [BibTeX] [Abstract] [Download PDF]

    This paper discusses the efforts of the European ICARUS project towards the development of unmanned search and rescue (SAR) robots. ICARUS project proposes to equip first responders with a comprehensive and integrated set of remotely operated SAR tools, to increase the situational awareness of human crisis managers. In the event of large crises, a primordial task of the fire and rescue services is the search for human survivors on the incident site, which is a complex and dangerous task. The introduction of remotely operated SAR devices can offer a valuable tool to save human lives and to speed up the SAR process. Therefore, ICARUS concentrates on the development of unmanned SAR technologies for detecting, locating and rescuing humans. The remotely operated SAR devices are foreseen to be the first explorers of the area, along with in-situ supporters to act as safeguards to human personnel. While the ICARUS project also considers the development of marine and aerial robots, this paper will mostly concentrate on the development of the unmanned ground vehicles (UGVs) for SAR. Two main UGV platforms are being developed within the context of the project: a large UGV including a powerful arm for manipulation, which is able to make structural changes in disaster scenarios. The large UGV also serves as a base platform for a small UGV (and possibly also a UAV), which is used for entering small enclosures, while searching for human survivors. In order not to increase the cognitive load of the human crisis managers, the SAR robots will be designed to navigate individually or cooperatively and to follow high-level instructions from the base station, being able to navigate in an autonomous and semi-autonomous manner. The robots connect to the base station and to each other using a wireless self-organizing cognitive network of mobile communication nodes which adapts to the terrain. The SAR robots are equipped with sensors that detect the presence of humans and will also be equipped with a wide array of other types of sensors. At the base station, the data is processed and combined with geographical information, thus enhancing the situational awareness of the personnel leading the operation with in-situ processed data that can improve decision-making.

    @InProceedings{de2013search,
    author = {De Cubber, Geert and Serrano, Daniel and Berns, Karsten and Chintamani, Keshav and Sabino, Rui and Ourevitch, Stephane and Doroftei, Daniela and Armbrust, Christopher and Flamma, Tommasso and Baudoin, Yvan},
    booktitle = {7th Int Workshop on Robotics for Risky Environments},
    title = {Search and rescue robots developed by the {European} {Icarus} project},
    year = {2013},
    abstract = {This paper discusses the efforts of the European ICARUS project towards the development of unmanned search and rescue (SAR) robots. ICARUS project proposes to equip first responders with a comprehensive and integrated set of remotely operated SAR tools, to increase the situational awareness of human crisis managers. In the event of large crises, a primordial task of the fire and rescue services is the search for human survivors on the incident site, which is a complex and dangerous task. The introduction of remotely operated SAR devices can offer a valuable tool to save human lives and to speed up the SAR process. Therefore, ICARUS concentrates on the development of unmanned SAR technologies for detecting, locating and rescuing humans. The remotely operated SAR devices are foreseen to be the first explorers of the area, along with in-situ supporters to act as safeguards to human personnel. While the ICARUS project also considers the development of marine and aerial robots, this paper will mostly concentrate on the development of the unmanned ground vehicles (UGVs) for SAR. Two main UGV platforms are being developed within the context of the project: a large UGV including a powerful arm for manipulation, which is able to make structural changes in disaster scenarios. The large UGV also serves as a base platform for a small UGV (and possibly also a UAV), which is used for entering small enclosures, while searching for human survivors. In order not to increase the cognitive load of the human crisis managers, the SAR robots will be designed to navigate individually or cooperatively and to follow high-level instructions from the base station, being able to navigate in an autonomous and semi-autonomous manner. The robots connect to the base station and to each other using a wireless self-organizing cognitive network of mobile communication nodes which adapts to the terrain. The SAR robots are equipped with sensors that detect the presence of humans and will also be equipped with a wide array of other types of sensors. At the base station, the data is processed and
    combined with geographical information, thus enhancing the situational awareness of the personnel leading the operation with in-situ processed data that can improve decision-making.},
    project = {ICARUS},
    address = {Saint - Petersburg, Russia},
    url = {http://mecatron.rma.ac.be/pub/2013/Search and Rescue robots developed by the European ICARUS project - Article.pdf},
    unit= {meca-ras}
    }

  • T. Nguyen, E. Kayacan, J. De Baerdemaeker, and W. Saeys, “Task planning and motion planning for apples harvesting robot," in In The 4th IFAC Conference on Modelling and Control in Agriculture, Horticulture and Post-Harvest Industry, 2013.
    [BibTeX]
    @INPROCEEDINGS{proc_tt_003,
    author={T.Th. {Nguyen} and E. {Kayacan} and J. {De Baerdemaeker} and W. {Saeys}},
    title={Task planning and motion planning for apples harvesting robot},
    booktitle={In The 4th IFAC Conference on Modelling and Control in Agriculture, Horticulture and Post-Harvest Industry},
    month=aug,
    year={2013}
    }

  • S. Sarkar and I. N. Kar, “Formation control of multiple groups of robots," in 52nd IEEE Conference on Decision and Control, 2013, p. 1466–1471.
    [BibTeX]
    @inproceedings{sarkar2013formation,
    title={Formation control of multiple groups of robots},
    author={Sarkar, Soumic and Kar, Indra Narayan},
    booktitle={52nd IEEE Conference on Decision and Control},
    pages={1466--1471},
    year={2013},
    organization={IEEE}
    }

  • S. Sarkar and I. N. Kar, “Formation control of multiple groups of nonholonomic wheeled mobile robots," in Proceedings of Conference on Advances In Robotics, 2013, p. 1–6.
    [BibTeX]
    @inproceedings{sarkar2013formation2,
    title={Formation control of multiple groups of nonholonomic wheeled mobile robots},
    author={Sarkar, Soumic and Kar, Indra Narayan},
    booktitle={Proceedings of Conference on Advances In Robotics},
    pages={1--6},
    year={2013}
    }

2012

  • Y. Yvinec, Y. Baudoin, G. De Cubber, M. Armada, L. Marques, J. Desaulniers, and M. Bajic, “TIRAMISU: FP7-Project for an integrated toolbox in Humanitarian Demining," in GICHD Technology Workshop, Geneva, Switzerland, 2012.
    [BibTeX] [Abstract] [Download PDF]

    The TIRAMISU project aims at providing the foundation for a global toolbox that will cover the main mine action activities, from the survey of large areas to the actual disposal of explosive hazards, including mine risk education and training tools. After a short description of some tools, particular emphasis will be given to the two topics proposed by the GICHD Technology Workshop, namely the methodology adopted by the explosion of an ammunition storage and the possible use of UAV (or UGV/UAV) in Technical survey and/or Close-in-Detection

    @InProceedings{yvinec2012tiramisu01,
    author = {Yvinec, Yann and Baudoin, Yvan and De Cubber, Geert and Armada, Manuel and Marques, Lino and Desaulniers, Jean-Marc and Bajic, Milan},
    booktitle = {GICHD Technology Workshop},
    title = {{TIRAMISU}: {FP7}-Project for an integrated toolbox in Humanitarian Demining},
    year = {2012},
    abstract = {The TIRAMISU project aims at providing the foundation for a global toolbox that will cover the main mine action activities, from the survey of large areas to the actual disposal of explosive hazards, including mine risk education and training tools. After a short description of some tools, particular emphasis will be given to the two topics proposed by the GICHD Technology Workshop, namely the methodology adopted by the explosion of an ammunition storage and the possible use of UAV (or
    UGV/UAV) in Technical survey and/or Close-in-Detection},
    project = {TIRAMISU},
    address = {Geneva, Switzerland},
    url = {http://mecatron.rma.ac.be/pub/2012/TIRAMISU-TWS-GICHD.pdf},
    unit= {meca-ras,ciss}
    }

  • Y. Yvinec, Y. Baudoin, G. De Cubber, M. Armada, L. Marques, J. Desaulniers, M. Bajic, E. Cepolina, and M. Zoppi, “TIRAMISU: FP7-Project for an integrated toolbox in Humanitarian Demining , focus on UGV, UAV and technical survey," in 6th IARP Workshop on Risky Interventions and Environmental Surveillance (RISE), Warsaw, Poland, 2012.
    [BibTeX] [Abstract] [Download PDF]

    The TIRAMISU project aims at providing the foundation for a global toolbox that will cover the main mine action activities, from the survey of large areas to the actual disposal of explosive hazards, including mine risk education and training tools. After a short description of some tools, particular emphasis will be given to the two topics proposed by the GICHD Technology Workshop, namely the methodology adopted by the explosion of an ammunition storage and the possible use of UAV (or UGV/UAV) in Technical survey and/or Close-in-Detection

    @InProceedings{yvinec2012tiramisu02,
    author = {Yvinec, Yann and Baudoin, Yvan and De Cubber, Geert and Armada, Manuel and Marques, Lino and Desaulniers, Jean-Marc and Bajic, Milan and Cepolina, Emanuela and Zoppi, Marco},
    booktitle = {6th IARP Workshop on Risky Interventions and Environmental Surveillance (RISE)},
    title = {{TIRAMISU}: {FP7}-Project for an integrated toolbox in Humanitarian Demining , focus on UGV, UAV and technical survey},
    year = {2012},
    abstract = {The TIRAMISU project aims at providing the foundation for a global toolbox that will cover the main mine action activities, from the survey of large areas to the actual disposal of explosive hazards, including mine risk education and training tools. After a short description of some tools, particular emphasis will be given to the two topics proposed by the GICHD Technology Workshop, namely the methodology adopted by the explosion of an ammunition storage and the possible use of UAV (or
    UGV/UAV) in Technical survey and/or Close-in-Detection},
    address = {Warsaw, Poland},
    project = {TIRAMISU},
    url = {http://mecatron.rma.ac.be/pub/2012/RISE-TIRAMISU.pdf},
    unit= {meca-ras,ciss}
    }

  • G. De Cubber, D. Doroftei, Y. Baudoin, D. Serrano, K. Chintamani, R. Sabino, and S. Ourevitch, “ICARUS : Providing Unmanned Search and Rescue Tools," in 6th IARP Workshop on Risky Interventions and Environmental Surveillance (RISE), Warsaw, Poland, 2012.
    [BibTeX] [Abstract] [Download PDF]

    The ICARUS EU-FP7 project deals with the development of a set of integrated components to assist search and rescue teams in dealing with the difficult and dangerous, but life-saving task of finding human survivors. The ICARUS tools consist of assistive unmanned air, ground and sea vehicles, equipped with victim detection sensors. The unmanned vehicles collaborate as a coordinated team, communicating via ad hoccognitive radio networking. To ensure optimal human-robot collaboration, these tools are seamlessly integrated into the C4I equipment of the human crisis managers and a set of training and support tools is provided to them to learn to use the ICARUS system.

    @InProceedings{de2012icarus01,
    author = {De Cubber, Geert and Doroftei, Daniela and Baudoin, Yvan and Serrano, Daniel and Chintamani, Keshav and Sabino, Rui and Ourevitch, Stephane},
    booktitle = {6th IARP Workshop on Risky Interventions and Environmental Surveillance (RISE)},
    title = {{ICARUS} : Providing Unmanned Search and Rescue Tools},
    year = {2012},
    abstract = {The ICARUS EU-FP7 project deals with the development of a set of integrated components to assist search and rescue teams in dealing with the difficult and dangerous, but life-saving task of finding human survivors. The ICARUS tools consist of assistive unmanned air, ground and sea vehicles, equipped with victim detection sensors. The unmanned vehicles collaborate as a coordinated team, communicating via ad hoccognitive radio networking. To ensure optimal human-robot collaboration, these tools are seamlessly integrated into the C4I equipment of the human crisis managers and a set of training and support tools is provided to them to learn to use the ICARUS system.},
    project = {ICARUS},
    address = {Warsaw, Poland},
    url = {http://mecatron.rma.ac.be/pub/2012/RISE2012_ICARUS.pdf},
    unit= {meca-ras}
    }

  • D. Doroftei, G. De Cubber, and K. Chintamani, “Towards collaborative human and robotic rescue workers," in 5th International Workshop on Human-Friendly Robotics (HFR2012), Brussels, Belgium, 2012, p. 18–19.
    [BibTeX] [Abstract] [Download PDF]

    This paper discusses some of the main remaining bottlenecks towards the successful introduction of robotic search and rescue (SAR) tools, collaborating with human rescue workers. It also sketches some of the recent advances which are being made to in the context of the European ICARUS project to get rid of these bottlenecks.

    @InProceedings{doroftei2012towards,
    author = {Doroftei, Daniela and De Cubber, Geert and Chintamani, Keshav},
    booktitle = {5th International Workshop on Human-Friendly Robotics (HFR2012)},
    title = {Towards collaborative human and robotic rescue workers},
    year = {2012},
    pages = {18--19},
    abstract = {This paper discusses some of the main remaining bottlenecks towards the successful introduction of robotic search and rescue (SAR) tools, collaborating with human rescue workers. It also sketches some of the recent advances which are being made to in the context of the European ICARUS project to get rid of these bottlenecks.},
    project = {ICARUS},
    address = {Brussels, Belgium},
    url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.303.6697&rep=rep1&type=pdf},
    unit= {meca-ras}
    }

  • A. Conduraru, I. Conduraru, E. Puscalau, G. De Cubber, D. Doroftei, and H. Balta, “Development of an autonomous rough-terrain robot," in IROS2012 Workshop on Robots and Sensors integration in future rescue INformation system (ROSIN’12), Villamoura, Portugal, 2012.
    [BibTeX] [Abstract] [Download PDF]

    In this paper, we discuss the development process of a mobile robot intended for environmental observation applications. The paper describes how a standard tele-operated Explosive Ordnance Disposal (EOD) robot was upgraded with electronics, sensors, computing power and autonomous capabilities, such that it becomes able to execute semi-autonomous missions, e.g. for search & rescue or humanitarian demining tasks. The aim of this paper is not to discuss the details of the navigation algorithms (as these are often task-dependent), but more to concentrate on the development of the platform and its control architecture as a whole.

    @InProceedings{conduraru2012development,
    author = {Conduraru, Alina and Conduraru, Ionel and Puscalau, Emanuel and De Cubber, Geert and Doroftei, Daniela and Balta, Haris},
    booktitle = {IROS2012 Workshop on Robots and Sensors integration in future rescue INformation system (ROSIN'12)},
    title = {Development of an autonomous rough-terrain robot},
    year = {2012},
    abstract = {In this paper, we discuss the development process of a mobile robot intended for environmental observation applications. The paper describes how a standard tele-operated Explosive Ordnance Disposal (EOD) robot was upgraded with electronics, sensors, computing power and autonomous capabilities, such that it becomes able to execute semi-autonomous missions, e.g. for search & rescue or humanitarian demining tasks. The aim of this paper is not to discuss the details of the navigation algorithms (as these are often task-dependent), but more to concentrate on the development of the platform and its control architecture as a whole.},
    project = {ICARUS},
    address = {Villamoura, Portugal},
    url = {https://pdfs.semanticscholar.org/884e/6a80c8768044a1fd68ee91f45f17e5125153.pdf},
    unit= {meca-ras}
    }

  • G. De Cubber, D. Doroftei, Y. Baudoin, D. Serrano, K. Chintamani, R. Sabino, and S. Ourevitch, “Operational RPAS scenarios envisaged for search & rescue by the EU FP7 ICARUS project," in Remotely Piloted Aircraft Systems for Civil Operations (RPAS2012), Brussels, Belgium, 2012.
    [BibTeX] [Abstract] [Download PDF]

    The ICARUS EU-FP7 project deals with the development of a set of integrated components to assist search and rescue teams in dealing with the difficult and dangerous, but life-saving task of finding human survivors. The ICARUS tools consist of assistive unmanned air, ground and sea vehicles, equipped with victim detection sensors. The unmanned vehicles collaborate as a coordinated team, communicating via ad hoc cognitive radio networking. To ensure optimal human-robot collaboration, these tools are seamlessly integrated into the C4I equipment of the human crisis managers and a set of training and support tools is provided to them to learn to use the ICARUS system.

    @InProceedings{de2012operational,
    author = {De Cubber, Geert and Doroftei, Daniela and Baudoin, Yvan and Serrano, Daniel and Chintamani, Keshav and Sabino, Rui and Ourevitch, Stephane},
    booktitle = {Remotely Piloted Aircraft Systems for Civil Operations (RPAS2012)},
    title = {Operational {RPAS} scenarios envisaged for search \& rescue by the {EU FP7 ICARUS} project},
    year = {2012},
    abstract = {The ICARUS EU-FP7 project deals with the development of a set of integrated components to assist search and rescue teams in dealing with the difficult and dangerous, but life-saving task of finding human survivors. The ICARUS tools consist of assistive unmanned air, ground and sea vehicles, equipped with victim detection sensors. The unmanned vehicles collaborate as a coordinated team, communicating via ad hoc cognitive radio networking. To ensure optimal human-robot collaboration, these tools are seamlessly integrated into the C4I equipment of the human crisis managers and a set of training and support tools is provided to them to learn to use the ICARUS system.},
    project = {ICARUS},
    address = {Brussels, Belgium},
    url = {http://mecatron.rma.ac.be/pub/2012/De-Cubber-Geert_RMA_Belgium_WP.pdf},
    unit= {meca-ras}
    }

  • G. De Cubber, D. Doroftei, Y. Baudoin, D. Serrano, K. Chintamani, R. Sabino, and S. Ourevitch, “ICARUS: AN EU-FP7 PROJECT PROVIDING UNMANNED SEARCH AND RESCUE TOOLS," in IROS2012 Workshop on Robots and Sensors integration in future rescue INformation system (ROSIN’12), Villamoura, Portugal, 2012.
    [BibTeX] [Abstract] [Download PDF]

    Overview of the objectives of the ICARUS project

    @InProceedings{de2012icarus02,
    author = {De Cubber, Geert and Doroftei, Daniela and Baudoin, Y and Serrano, D and Chintamani, K and Sabino, R and Ourevitch, S},
    booktitle = {IROS2012 Workshop on Robots and Sensors integration in future rescue INformation system (ROSIN'12)},
    title = {{ICARUS}: AN {EU-FP7} PROJECT PROVIDING UNMANNED SEARCH AND RESCUE TOOLS},
    year = {2012},
    abstract = {Overview of the objectives of the ICARUS project},
    project = {ICARUS},
    address = {Villamoura, Portugal},
    url = {http://mecatron.rma.ac.be/pub/2012/Icarus - ROSIN2012 Presentation.pdf},
    unit= {meca-ras}
    }

  • T. Nguyen, “Optimum detaching movement for apples harvesting robot," in In International Conference of Agricultural Engineering, CIGR-Ageng, 2012.
    [BibTeX]
    @INPROCEEDINGS{proc_tt_002,
    author={T.Th. Nguyen},
    title={Optimum detaching movement for apples harvesting robot},
    booktitle={In International Conference of Agricultural Engineering, CIGR-Ageng},
    month=jul,
    year={2012}
    }

2011

  • G. De Cubber, D. Doroftei, H. Sahli, and Y. Baudoin, “Outdoor Terrain Traversability Analysis for Robot Navigation using a Time-Of-Flight Camera," in RGB-D Workshop on 3D Perception in Robotics, Vasteras, Sweden, 2011.
    [BibTeX] [Abstract] [Download PDF]

    Autonomous robotic systems operating in unstructured outdoor environments need to estimate the traversabilityof the terrain in order to navigate safely. Traversability estimation is a challenging problem, as the traversability is a complex function of both the terrain characteristics, such as slopes, vegetation, rocks, etc and the robot mobility characteristics, i.e. locomotion method, wheels, etc. It is thus required to analyze in real-time the 3D characteristics of the terrain and pair this data to the robot capabilities. In this paper, a method is introduced to estimate the traversability using data from a time-of-flight camera.

    @InProceedings{de2011outdoor,
    author = {De Cubber, Geert and Doroftei, Daniela and Sahli, Hichem and Baudoin, Yvan},
    booktitle = {RGB-D Workshop on 3D Perception in Robotics},
    title = {Outdoor Terrain Traversability Analysis for Robot Navigation using a Time-Of-Flight Camera},
    year = {2011},
    abstract = {Autonomous robotic systems operating in unstructured outdoor environments need to estimate the traversabilityof the terrain in order to navigate safely. Traversability estimation is a challenging problem, as the traversability is a complex function of both the terrain characteristics, such as slopes, vegetation, rocks, etc and the robot mobility characteristics, i.e. locomotion method, wheels, etc. It is thus required to analyze in real-time the 3D characteristics of the terrain and pair this data to the robot capabilities. In this paper, a method is introduced to estimate the traversability using data from a time-of-flight camera.},
    project = {ViewFinder, Mobiniss},
    address = {Vasteras, Sweden},
    url = {http://mecatron.rma.ac.be/pub/2011/TTA_TOF.pdf},
    unit= {meca-ras}
    }

  • G. De Cubber and D. Doroftei, “Multimodal terrain analysis for an all-terrain crisis Management Robot," in IARP HUDEM 2011, Sibenik, Croatia, 2011.
    [BibTeX] [Abstract] [Download PDF]

    In this paper, a novel stereo-based terrain-traversability estimation methodology is proposed. The novelty is that – in contrary to classic depth-based terrain classification algorithms – all the information of the stereo camera system is used, also the color information. Using this approach, depth and color information are fused in order to obtain a higher classification accuracy than is possible with uni-modal techniques

    @InProceedings{de2011multimodal,
    author = {De Cubber, Geert and Doroftei, Daniela},
    booktitle = {IARP HUDEM 2011},
    title = {Multimodal terrain analysis for an all-terrain crisis Management Robot},
    year = {2011},
    abstract = {In this paper, a novel stereo-based terrain-traversability estimation methodology is proposed. The novelty is that – in contrary to classic depth-based terrain classification algorithms – all the information of the stereo camera system is used, also the color information. Using this approach, depth and color information are fused in order to obtain a higher classification accuracy than is possible with uni-modal techniques},
    project = {Mobiniss},
    address = {Sibenik, Croatia},
    url = {http://mecatron.rma.ac.be/pub/2011/Multimodal terrain analysis for an all-terrain crisis management robot .pdf},
    unit= {meca-ras}
    }

  • G. De Cubber, D. Doroftei, K. Verbiest, and S. A. Berrabah, “Autonomous camp surveillance with the ROBUDEM robot: challenges and results," in IARP Workshop RISE’2011, Belgium, 2011.
    [BibTeX] [Abstract] [Download PDF]

    Autonomous robotic systems can help for risky interventions to reduce the risk to human lives. An example of such a risky intervention is a camp surveillance scenario, where an environment needs to be patrolled and intruders need to be detected and intercepted. This paper describes the development of a mobile outdoor robot which is capable of performing such a camp surveillance task. The key research issues tackled are the robot design, geo-referenced localization and path planning, traversability estimation, the optimization of the terrain coverage strategy and the development of an intuitive human-robot interface.

    @InProceedings{de2011autonomous,
    author = {De Cubber, Geert and Doroftei, Daniela and Verbiest, Kristel and Berrabah, Sid Ahmed},
    booktitle = {IARP Workshop RISE’2011},
    title = {Autonomous camp surveillance with the {ROBUDEM} robot: challenges and results},
    year = {2011},
    abstract = {Autonomous robotic systems can help for risky interventions to reduce the risk to human lives. An example of such a risky intervention is a camp surveillance scenario, where an environment needs to be patrolled and intruders need to be detected and intercepted. This paper describes the development of a mobile outdoor robot which is capable of performing such a camp surveillance task. The key research issues tackled are the robot design, geo-referenced localization and path planning, traversability estimation, the optimization of the terrain coverage strategy and the development of an intuitive human-robot interface.},
    project = {Mobiniss},
    address = {Belgium},
    url = {http://mecatron.rma.ac.be/pub/2011/ELROB-RISE.pdf},
    unit= {meca-ras}
    }

  • D. Doroftei and E. Colon, “Decentralized multi-robot coordination for a risky surveillance application," in Proc. IARP HUDEM 2011, Sibenik, Croatia, 2011.
    [BibTeX] [Abstract] [Download PDF]

    This paper proposes a multi-robot control methodology that is based on a behavior-based control framework. In this behavior-based context, the robotic team members are controlled using one of 2 mutually exclusive behaviors: patrolling or intercepting. In patrol mode the robot seeks to detect enemy forces as rapidly as possible, by balancing 2 constraints: the intervention time should be minimized and the map coverage should be maximized. In interception mode, the robot tries to advance towards an enemy which was detected by one of the robotic team members. Subsequently, the robot tries to neutralize the threat posed by the enemy before enemy is able to reach the camp.

    @InProceedings{doro2011decentralized,
    author = {Doroftei, Daniela and Colon, Eric},
    booktitle = {Proc. {IARP} {HUDEM} 2011},
    title = {Decentralized multi-robot coordination for a risky surveillance application},
    year = {2011},
    publisher = {{IARP}},
    abstract = {This paper proposes a multi-robot control methodology that is based on a behavior-based control framework. In this behavior-based context, the robotic team members are controlled using one of 2 mutually exclusive behaviors: patrolling or intercepting. In patrol mode the robot seeks to detect enemy forces as rapidly as possible, by balancing 2 constraints: the intervention time should be minimized and the map coverage should be maximized. In interception mode, the robot tries to advance towards an enemy which was detected by one of the robotic team members. Subsequently, the robot tries to neutralize the threat posed by the enemy before enemy is able to reach the camp. },
    project = {NMRS},
    address = {Sibenik, Croatia},
    url = {http://mecatron.rma.ac.be/pub/2011/HUDEM2011_Doroftei_Colon.pdf},
    unit= {meca-ras}
    }

  • T. Nguyen, E. Laroche, L. Cuvillon, J. Gangloff, and O. Piccin, “Identification d’un robot à câbles," in In Journées Identification et Modélisation Expérimentale (JIME), 2011.
    [BibTeX]
    @INPROCEEDINGS{proc_tt_001,
    author={T.Th. {Nguyen} and E. {Laroche} and L. {Cuvillon} and J. {Gangloff} and O. {Piccin}},
    title={Identification d’un robot à câbles},
    booktitle={In Journées Identification et Modélisation Expérimentale (JIME)},
    month=apr,
    year={2011}
    }

2010

  • G. De Cubber, “On-line and Off-line 3D Reconstruction for Crisis Management Applications," in Fourth International Workshop on Robotics for risky interventions and Environmental Surveillance-Maintenance, RISE’2010, Sheffield, UK, 2010.
    [BibTeX] [Abstract] [Download PDF]

    We present in this paper a 3D reconstruction methodology. This approach fuses dense stereo and sparse motion data to estimate high quality instantaneous depth maps. This methodology achieves near realtime processing frame rates, such that it can be directly used on-line by the crisis management teams.

    @InProceedings{de2010line,
    author = {De Cubber, Geert},
    booktitle = {Fourth International Workshop on Robotics for risky interventions and Environmental Surveillance-Maintenance, RISE’2010},
    title = {On-line and Off-line {3D} Reconstruction for Crisis Management Applications},
    year = {2010},
    abstract = {We present in this paper a 3D reconstruction methodology. This approach fuses dense stereo and sparse motion data to estimate high quality instantaneous depth maps. This methodology achieves near realtime processing frame rates, such that it can be directly used on-line by the crisis management teams.},
    project = {ViewFinder, Mobiniss},
    address = {Sheffield, UK},
    url = {http://mecatron.rma.ac.be/pub/RISE/RISE - 2010/On-line and Off-line 3D Reconstruction_Geert_De_Cubber.pdf},
    unit= {meca-ras}
    }

  • Y. Baudoin, G. De Cubber, E. Colon, D. Doroftei, and S. A. Berrabah, “Robotics Assistance by Risky Interventions: Needs and Realistic Solutions," in Workshop on Robotics for Extreme conditions, Saint-Petersburg, Russia, 2010.
    [BibTeX] [Abstract] [Download PDF]

    This paper discusses the requirements towards robotics systems in the domains of firefighting, CBRN-E and humanitarian demining.

    @InProceedings{baudoin2010robotics,
    author = {Baudoin, Yvan and De Cubber, Geert and Colon, Eric and Doroftei, Daniela and Berrabah, Sid Ahmed},
    booktitle = {Workshop on Robotics for Extreme conditions},
    title = {Robotics Assistance by Risky Interventions: Needs and Realistic Solutions},
    year = {2010},
    abstract = {This paper discusses the requirements towards robotics systems in the domains of firefighting, CBRN-E and humanitarian demining.},
    project = {ViewFinder, Mobiniss},
    address = {Saint-Petersburg, Russia},
    url = {http://mecatron.rma.ac.be/pub/2010/Robotics Assistance by risky interventions.pdf},
    unit= {meca-ras}
    }

  • G. De Cubber, D. Doroftei, and S. A. Berrabah, “Using visual perception for controlling an outdoor robot in a crisis management scenario," in ROBOTICS 2010, Clermont-Ferrand, France, 2010.
    [BibTeX] [Abstract] [Download PDF]

    Crisis management teams (e.g. fire and rescue services, anti-terrorist units …) are often confronted with dramatic situations where critical decisions have to be made within hard time constraints. Therefore, they need correct information about what is happening on the crisis site. In this context, the View-Finder projects aims at developing robots which can assist the human crisis managers, by gathering data. This paper gives an overview of the development of such an outdoor robot. The presented robotic system is able to detect human victims at the incident site, by using vision-based human body shape detection. To increase the perceptual awareness of the human crisis managers, the robotic system is capable of reconstructing a 3D model of the environment, based on vision data. Also for navigation, the robot depends mostly on visual perception, as it combines a model-based navigation approach using geo-referenced positioning with stereo-based terrain traversability analysis for obstacle avoidance. The robot control scheme is embedded in a behavior-based robot control architecture, which integrates all the robot capabilities. This paper discusses all the above mentioned technologies.

    @InProceedings{de2010using,
    author = {De Cubber, Geert and Doroftei, Daniela and Berrabah, Sid Ahmed},
    booktitle = {ROBOTICS 2010},
    title = {Using visual perception for controlling an outdoor robot in a crisis management scenario},
    year = {2010},
    abstract = {Crisis management teams (e.g. fire and rescue services, anti-terrorist units ...) are often confronted with dramatic situations where critical decisions have to be made within hard time constraints. Therefore, they need correct information about what is happening on the crisis site. In this context, the View-Finder projects aims at developing robots which can assist the human crisis managers, by gathering data. This paper gives an overview of the development of such an outdoor robot. The presented robotic system is able to detect human victims at the incident site, by using vision-based human body shape detection. To increase the perceptual awareness of the human crisis managers, the robotic system is capable of reconstructing a 3D model of the environment, based on vision data. Also for navigation, the robot depends mostly on visual perception, as it combines a model-based navigation approach using geo-referenced positioning with stereo-based terrain traversability analysis for obstacle avoidance. The robot control scheme is embedded in a behavior-based robot control architecture, which integrates all the robot capabilities. This paper discusses all the above mentioned technologies.},
    project = {ViewFinder, Mobiniss},
    address = {Clermont-Ferrand, France},
    unit= {meca-ras},
    url = {http://mecatron.rma.ac.be/pub/2010/Usingvisualperceptionforcontrollinganoutdoorrobotinacrisismanagementscenario (1).pdf},
    }

  • D. Doroftei and E. Colon, “Multi-robot collaboration and coordination in a high-risk transportation scenario," in Proc. IARP HUDEM 2010, Sousse, Tunisia, 2010.
    [BibTeX] [Abstract] [Download PDF]

    This paper discusses a decentralized multi-robot coordination strategy which aims to control and guide a team of robotic agents safely through a hostile area. The ”hostility” of the environment is due to the presence of enemy forces, seeking to intercept the robotic team. In order to avoid detection and ensure global team safety, the robotic agents must carefully plan their trajectory towards a list of goal locations, while holding a defensive formation.

    @InProceedings{doro2001multi,
    author = {Doroftei, Daniela and Colon, Eric},
    booktitle = {Proc. {IARP} {HUDEM} 2010},
    title = {Multi-robot collaboration and coordination in a high-risk transportation scenario},
    year = {2010},
    publisher = {{IARP}},
    abstract = {This paper discusses a decentralized multi-robot coordination strategy which aims to control and guide a team of robotic agents safely through a hostile area. The ”hostility” of the environment is due to the presence of enemy forces, seeking to intercept the robotic team. In order to avoid detection and ensure global team safety, the robotic agents must carefully plan their trajectory towards a list of goal locations, while holding a defensive formation. },
    project = {NMRS},
    address = {Sousse, Tunisia},
    url = {http://mecatron.rma.ac.be/pub/HUDEM/HUDEM%20-%202010/HUDEM2010_Doroftei.pdf},
    unit= {meca-ras}
    }

  • D. Doroftei and E. Colon, “Decentralized Multi-Robot Coordination for Risky Interventions," in Fourth International Workshop on Robotics for risky interventions and Environmental Surveillance-Maintenance RISE, Sheffield, UK, 2010.
    [BibTeX] [Abstract] [Download PDF]

    The paper describes an approach to design a behavior-based architecture, how each behavior was designed and how the behavior fusion problem was solved.

    @InProceedings{doro2010multibis,
    author = {Doroftei, Daniela and Colon, Eric},
    booktitle = {Fourth International Workshop on Robotics for risky interventions and Environmental Surveillance-Maintenance {RISE}},
    title = {Decentralized Multi-Robot Coordination for Risky Interventions},
    year = {2010},
    abstract = {The paper describes an approach to design a behavior-based architecture, how each behavior was designed and how the behavior fusion problem was solved.},
    project = {NMRS, ViewFinder},
    address = {Sheffield, UK},
    url = {http://mecatron.rma.ac.be/pub/RISE/RISE%20-%202010/Decentralized%20Multi-Robot%20Coordination%20for%20Risky%20Interventio.pdf},
    unit= {meca-ras}
    }

  • S. Sarkar, S. N. Shome, and S. Nandy, “An intelligent algorithm for the path planning of autonomous mobile robot for dynamic environment," in FIRA RoboWorld Congress, 2010, p. 202–209.
    [BibTeX]
    @inproceedings{sarkar2010intelligent,
    title={An intelligent algorithm for the path planning of autonomous mobile robot for dynamic environment},
    author={Sarkar, Soumic and Shome, Sankar Nath and Nandy, S},
    booktitle={FIRA RoboWorld Congress},
    pages={202--209},
    year={2010},
    organization={Springer}
    }

2009

  • G. De Cubber, D. Doroftei, L. Nalpantidis, G. C. Sirakoulis, and A. Gasteratos, “Stereo-based terrain traversability analysis for robot navigation," in IARP/EURON Workshop on Robotics for Risky Interventions and Environmental Surveillance, Brussels, Belgium, Brussels, Belgium, 2009.
    [BibTeX] [Abstract] [Download PDF]

    In this paper, we present an approach where a classification of the terrain in the classes traversable and obstacle is performed using only stereo vision as input data.

    @InProceedings{de2009stereo,
    author = {De Cubber, Geert and Doroftei, Daniela and Nalpantidis, Lazaros and Sirakoulis, Georgios Ch and Gasteratos, Antonios},
    booktitle = {IARP/EURON Workshop on Robotics for Risky Interventions and Environmental Surveillance, Brussels, Belgium},
    title = {Stereo-based terrain traversability analysis for robot navigation},
    year = {2009},
    abstract = {In this paper, we present an approach where a classification of the terrain in the classes traversable and obstacle is performed using only stereo vision as input data.},
    project = {ViewFinder, Mobiniss},
    address = {Brussels, Belgium},
    url = {http://mecatron.rma.ac.be/pub/2009/RISE-DECUBBER-DUTH.pdf},
    unit= {meca-ras}
    }

  • G. De Cubber and G. Marton, “Human Victim Detection," in Third International Workshop on Robotics for risky interventions and Environmental Surveillance-Maintenance, RISE, Brussels, Belgium, 2009.
    [BibTeX] [Abstract] [Download PDF]

    This paper presents an approach to achieve robust victim detection from color video images. The applied approach goes out from the Viola-Jones algorithm for Haar-features based template recognition. This algorithm was adapted to recognize persons lying on the ground in difficult outdoor illumination conditions.

    @InProceedings{de2009human,
    author = {De Cubber, Geert and Marton, Gabor},
    booktitle = {Third International Workshop on Robotics for risky interventions and Environmental Surveillance-Maintenance, RISE},
    title = {Human Victim Detection},
    year = {2009},
    abstract = {This paper presents an approach to achieve robust victim detection from color video images. The applied approach goes out from the Viola-Jones algorithm for Haar-features based template recognition. This algorithm was adapted to recognize persons lying on the ground in difficult outdoor illumination conditions.},
    project = {ViewFinder, Mobiniss},
    address = {Brussels, Belgium},
    url = {http://mecatron.rma.ac.be/pub/2009/RISE-DECUBBER_BUTE.pdf},
    unit= {meca-ras}
    }

  • D. Doroftei, G. De Cubber, E. Colon, and Y. Baudoin, “Behavior based control for an outdoor crisis management robot," in Proceedings of the IARP International Workshop on Robotics for Risky Interventions and Environmental Surveillance, Brussels, Belgium, 2009, p. 12–14.
    [BibTeX] [Abstract] [Download PDF]

    The design and development of a control architecture for a robotic crisis management agent raises 3 main questions: 1. How can we design the individual behaviors, such that the robot is capable of avoiding obstacles and of navigating semi-autonomously? 2. How can these individual behaviors be combined in an optimal, leading to a rational and coherent global robot behavior? 3. How can all these capabilities be combined in a comprehensive and modular framework, such that the robot can handle a high-level task (searching for human victims) with minimal input from human operators, by navigating in a complex, dynamic and environment, while avoiding potentially hazardous obstacles? In this paper, we present each of these three main aspects of the general robot control architecture more in detail.

    @InProceedings{doroftei2009behavior,
    author = {Doroftei, Daniela and De Cubber, Geert and Colon, Eric and Baudoin, Yvan},
    booktitle = {Proceedings of the IARP International Workshop on Robotics for Risky Interventions and Environmental Surveillance},
    title = {Behavior based control for an outdoor crisis management robot},
    year = {2009},
    pages = {12--14},
    abstract = {The design and development of a control architecture for a robotic crisis management agent raises 3 main questions:
    1. How can we design the individual behaviors, such that the robot is capable of avoiding obstacles and of navigating semi-autonomously?
    2. How can these individual behaviors be combined in an optimal, leading to a rational and coherent global robot behavior?
    3. How can all these capabilities be combined in a comprehensive and modular framework, such that the robot can handle a high-level task (searching for human victims) with minimal input from human operators, by navigating in a complex, dynamic and environment, while avoiding potentially hazardous obstacles?
    In this paper, we present each of these three main aspects of the general robot control architecture more in detail.},
    project = {ViewFinder, Mobiniss},
    address = {Brussels, Belgium},
    url = {http://mecatron.rma.ac.be/pub/2009/RISE-DOROFTEI.pdf},
    unit= {meca-ras}
    }

  • Y. Baudoin, D. Doroftei, D. G. Cubber, S. A. Berrabah, C. Pinzon, F. Warlet, J. Gancet, E. Motard, M. Ilzkovitz, L. Nalpantidis, and A. Gasteratos, “VIEW-FINDER : Robotics assistance to fire-fighting services and Crisis Management," in 2009 IEEE International Workshop on Safety, Security & Rescue Robotics (SSRR 2009), Denver, USA, 2009, p. 1–6.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    In the event of an emergency due to a fire or other crisis, a necessary but time consuming pre-requisite, that could delay the real rescue operation, is to establish whether the ground or area can be entered safely by human emergency workers. The objective of the VIEW-FINDER project is to develop robots which have the primary task of gathering data. The robots are equipped with sensors that detect the presence of chemicals and, in parallel, image data is collected and forwarded to an advanced Control station (COC). The robots will be equipped with a wide array of chemical sensors, on-board cameras, Laser and other sensors to enhance scene understanding and reconstruction. At the Base Station (BS) the data is processed and combined with geographical information originating from a Web of sources; thus providing the personnel leading the operation with in-situ processed data that can improve decision making. This paper will focus on the Crisis Management Information System that has been developed for improving a Disaster Management Action Plan and for linking the Control Station with a out-site Crisis Management Centre, and on the software tools implemented on the mobile robot gathering data in the outdoor area of the crisis.

    @InProceedings{Baudoin2009view01,
    author = {Y. Baudoin and D. Doroftei and G. De Cubber and S. A. Berrabah and C. Pinzon and F. Warlet and J. Gancet and E. Motard and M. Ilzkovitz and L. Nalpantidis and A. Gasteratos},
    booktitle = {2009 {IEEE} International Workshop on Safety, Security {\&} Rescue Robotics ({SSRR} 2009)},
    title = {{VIEW}-{FINDER} : Robotics assistance to fire-fighting services and Crisis Management},
    year = {2009},
    month = nov,
    organization = {IEEE},
    pages = {1--6},
    publisher = {{IEEE}},
    abstract = {In the event of an emergency due to a fire or other crisis, a necessary but time consuming pre-requisite, that could delay the real rescue operation, is to establish whether the ground or area can be entered safely by human emergency workers. The objective of the VIEW-FINDER project is to develop robots which have the primary task of gathering data. The robots are equipped with sensors that detect the presence of chemicals and, in parallel, image data is collected and forwarded to an advanced Control station (COC). The robots will be equipped with a wide array of chemical sensors, on-board cameras, Laser and other sensors to enhance scene understanding and reconstruction. At the Base Station (BS) the data is processed and combined with geographical information originating from a Web of sources; thus providing the personnel leading the operation with in-situ processed data that can improve decision making. This paper will focus on the Crisis Management Information System that has been developed for improving a Disaster Management Action Plan and for linking the Control Station with a out-site Crisis Management Centre, and on the software tools implemented on the mobile robot gathering data in the outdoor area of the crisis.},
    doi = {10.1109/ssrr.2009.5424172},
    project = {ViewFinder},
    address = {Denver, USA},
    url = {https://ieeexplore.ieee.org/document/5424172},
    unit= {meca-ras}
    }

  • Y. Baudoin, D. Doroftei, G. De Cubber, S. A. Berrabah, C. Pinzon, J. Penders, A. Maslowski, and J. Bedkowski, “VIEW-FINDER : Outdoor Robotics Assistance to Fire-Fighting services," in International Symposium Clawar, Istanbul, Turkey, 2009.
    [BibTeX] [Abstract] [Download PDF]

    In the event of an emergency due to a fire or other crisis, a necessary but time consuming pre-requisite, that could delay the real rescue operation, is to establish whether the ground or area can be entered safely by human emergency workers. The objective of the VIEW-FINDER project is to develop robots which have the primary task of gathering data. The robots are equipped with sensors that detect the presence of chemicals and, in parallel, image data is collected and forwarded to an advanced Control station (COC). The robots will be equipped with a wide array of chemical sensors, on-board cameras, Laser and other sensors to enhance scene understanding and reconstruction. At the control station the data is processed and combined with geographical information originating from a web of sources; thus providing the personnel leading the operation with in-situ processed data that can improve decision making. The information may also be forwarded to other forces involved in the operation (e.g. fire fighters, rescue workers, police, etc.). The robots will be designed to navigate individually or cooperatively and to follow high-level instructions from the base station. The robots are off-theshelf units, consisting of wheeled robots. The robots connect wirelessly to the control station. The control station collects in-situ data and combines it with information retrieved from the large-scale GMES-information bases. It will be equipped with a sophisticated human interface to display the processed information to the human operators and operation command.

    @InProceedings{baudoin2009view02,
    author = {Baudoin, Yvan and Doroftei, Daniela and De Cubber, Geert and Berrabah, Sid Ahmed and Pinzon, Carlos and Penders, Jacques and Maslowski, Andrzej and Bedkowski, Janusz},
    booktitle = {International Symposium Clawar},
    title = {{VIEW-FINDER} : Outdoor Robotics Assistance to Fire-Fighting services},
    year = {2009},
    abstract = {In the event of an emergency due to a fire or other crisis, a necessary but time consuming pre-requisite, that could delay the real rescue operation, is to establish whether the ground or area can be entered safely by human emergency workers. The objective of the VIEW-FINDER project is to develop robots which have the primary task of gathering data. The robots are equipped with sensors that detect the presence of chemicals and, in parallel, image data is collected and forwarded to an advanced Control station (COC). The robots will be equipped with a wide array of chemical sensors, on-board cameras, Laser and other sensors to enhance scene understanding and reconstruction. At the control station the data is processed and combined with geographical information originating from a web of sources; thus providing the personnel leading the operation with in-situ processed data that can improve decision making. The information may also be forwarded to other forces involved in the operation (e.g. fire fighters, rescue workers, police, etc.). The robots will be designed to navigate individually or cooperatively and to follow high-level instructions from the base station. The robots are off-theshelf units, consisting of wheeled robots. The robots connect wirelessly to the control station. The control station collects in-situ data and combines it with information retrieved from the large-scale GMES-information bases. It
    will be equipped with a sophisticated human interface to display the processed information to the human operators and operation command.},
    project = {ViewFinder, Mobiniss},
    address = {Istanbul, Turkey},
    url = {http://mecatron.rma.ac.be/pub/2009/CLAWAR2009.pdf},
    unit= {meca-ras}
    }

  • Y. Baudoin, D. Doroftei, G. De Cubber, S. A. Berrabah, E. Colon, C. Pinzon, A. Maslowski, and J. Bedkowski, “View-Finder: a European project aiming the Robotics assistance to Fire-fighting services and Crisis Management," in IARP workshop on Service Robotics and Nanorobotics, Bejing, China, 2009.
    [BibTeX] [Abstract] [Download PDF]

    In the event of an emergency due to a fire or other crisis, a necessary but time consuming pre-requisite, that could delay the real rescue operation, is to establish whether the ground or area can be entered safely by human emergency workers. The objective of the VIEW-FINDER project is to develop robots which have the primary task of gathering data. The robots are equipped with sensors that detect the presence of chemicals and, in parallel, image data is collected and forwarded to an advanced Control station (COC). The robots will be equipped with a wide array of chemical sensors, on-board cameras, Laser and other sensors to enhance scene understanding and reconstruction. At the control station the data is processed and combined with geographical information originating from a web of sources; thus providing the personnel leading the operation with in-situ processed data that can improve decision making. The information may also be forwarded to other forces involved in the operation (e.g. fire fighters, rescue workers, police, etc.). The robots connect wirelessly to the control station. The control station collects in-situ data and combines it with information retrieved from the large-scale GMES-information bases. It will be equipped with a sophisticated human interface to display the processed information to the human operators and operation command. We’ll essentially focus in this paper to the steps entrusted to the RMA and PIAP through the work-packages of the project.

    @InProceedings{baudoin2009view03,
    author = {Baudoin, Yvan and Doroftei, Daniela and De Cubber, Geert and Berrabah, Sid Ahmed and Colon, Eric and Pinzon, Carlos and Maslowski, Andrzej and Bedkowski, Janusz},
    booktitle = {IARP workshop on Service Robotics and Nanorobotics},
    title = {{View-Finder}: a European project aiming the Robotics assistance to Fire-fighting services and Crisis Management},
    year = {2009},
    abstract = {In the event of an emergency due to a fire or other crisis, a necessary but time consuming pre-requisite, that could delay the real rescue operation, is to establish whether the ground or area can be entered safely by human emergency workers. The objective of the VIEW-FINDER project is to develop robots which have the primary task of gathering data. The robots are equipped with sensors that detect the presence of chemicals and, in parallel, image data is collected and forwarded to an advanced Control station (COC). The robots will be equipped with a wide array of chemical sensors, on-board cameras, Laser and other sensors to enhance scene understanding and reconstruction. At the control station the data is processed and combined with geographical information originating from a web of sources; thus providing the personnel leading the operation with in-situ processed data that can improve decision making. The information may also be forwarded to other forces involved in the operation (e.g. fire fighters, rescue workers, police, etc.). The robots connect wirelessly to the control station. The control station collects in-situ data and combines it with information retrieved from the large-scale GMES-information bases. It will be equipped with a sophisticated human interface to display the processed information to the human operators and operation command.
    We’ll essentially focus in this paper to the steps entrusted to the RMA and PIAP through the work-packages of the project.},
    project = {ViewFinder},
    address = {Bejing, China},
    url = {http://mecatron.rma.ac.be/pub/2009/IARP-paper2009.pdf},
    unit= {meca-ras}
    }

  • Y. Baudoin, G. De Cubber, S. A. Berrabah, D. Doroftei, E. Colon, C. Pinzon, A. Maslowski, and J. Bedkowski, “VIEW-FINDER: European Project Aiming CRISIS MANAGEMENT TOOLS and the Robotics Assistance to Fire-Fighting Services," in IARP WS on service Robotics, Beijing, Bejing, China, 2009.
    [BibTeX] [Abstract] [Download PDF]

    Overview of the View-Finder project

    @InProceedings{baudoin2009view04,
    author = {Baudoin, Yvan and De Cubber, Geert and Berrabah, Sid Ahmed and Doroftei, Daniela and Colon, E and Pinzon, C and Maslowski, A and Bedkowski, J},
    booktitle = {IARP WS on service Robotics, Beijing},
    title = {{VIEW-FINDER}: European Project Aiming CRISIS MANAGEMENT TOOLS and the Robotics Assistance to Fire-Fighting Services},
    year = {2009},
    abstract = {Overview of the View-Finder project},
    project = {ViewFinder},
    address = {Bejing, China},
    unit= {meca-ras},
    url = {https://www.academia.edu/2879650/VIEW-FINDER_European_Project_Aiming_CRISIS_MANAGEMENT_TOOLS_and_the_Robotics_Assistance_to_Fire-Fighting_Services},
    }

  • A. Borghgraef, F. Lapierre, W. Philips, and M. Acheroy, “Matched filter based detection of floating mines in IR spacetime," in Electro-Optical Remote Sensing, Photonic Technologies, and Applications III, 2009, p. 74820U.
    [BibTeX] [Download PDF] [DOI]
    @conference{borghgraef:74820U,
    author = {Alexander Borghgraef and Fabian Lapierre and Wilfried Philips and Marc Acheroy},
    editor = {Gary W. Kamerman and Ove K. Steinvall and Keith L. Lewis and Richard C. Hollins and Thomas J. Merlet and Gary J. Bishop and John D. Gonglewski},
    collaboration = {},
    title = {Matched filter based detection of floating mines in {IR} spacetime},
    publisher = {SPIE},
    year = {2009},
    booktitle = {Electro-Optical Remote Sensing, Photonic Technologies, and Applications III},
    volume = {7482},
    number = {1},
    eid = {74820U},
    numpages = {11},
    pages = {74820U},
    location = {Berlin, Germany},
    url = {http://link.aip.org/link/?PSI/7482/74820U/1},
    doi = {10.1117/12.830224},
    unit= {ciss}
    }

2008

  • G. De Cubber, L. Nalpantidis, G. C. Sirakoulis, and A. Gasteratos, “Intelligent robots need intelligent vision: visual 3D perception," in RISE’08: Proceedings of the EURON/IARP International Workshop on Robotics for Risky Interventions and Surveillance of the Environment, Benicassim, Spain, 2008.
    [BibTeX] [Abstract] [Download PDF]

    In this paper, we investigate the possibilities of stereo and structure from motion approaches. It is not the aim to compare both theories of depth reconstruction with the goal of designating a winner and a loser. Both methods are capable of providing sparse as well as dense 3D reconstructions and both approaches have their merits and defects. The thorough, year-long research in the field indicates that accurate depth perception requires a combination of methods rather than a sole one. In fact, cognitive research has shown that the human brain uses no less than 12 different cues to estimate depth. Therefore, we also finally introduce in a following section a methodology to integrate stereo and structure from motion.

    @InProceedings{de2008intelligent,
    author = {De Cubber, Geert and Nalpantidis, Lazaros and Sirakoulis, Georgios Ch and Gasteratos, Antonios},
    booktitle = {RISE’08: Proceedings of the EURON/IARP International Workshop on Robotics for Risky Interventions and Surveillance of the Environment},
    title = {Intelligent robots need intelligent vision: visual {3D} perception},
    year = {2008},
    abstract = {In this paper, we investigate the possibilities of stereo and structure from motion approaches. It is not the aim to compare both theories of depth reconstruction with the goal of designating a winner and a loser. Both methods are capable of providing sparse as well as dense 3D reconstructions and both approaches have their merits and defects. The thorough, year-long research in the field indicates that accurate depth perception requires a combination of methods rather than a sole one. In fact, cognitive research has shown that the human brain uses no less than 12 different cues to estimate depth. Therefore, we also finally introduce in a following section a methodology to integrate stereo and structure from motion.},
    project = {ViewFinder, Mobiniss},
    address = {Benicassim, Spain},
    url = {http://mecatron.rma.ac.be/pub/2008/DeCubber.pdf},
    unit= {meca-ras}
    }

  • G. De Cubber, D. Doroftei, and G. Marton, “Development of a visually guided mobile robot for environmental observation as an aid for outdoor crisis management operations," in Proceedings of the IARP Workshop on Environmental Maintenance and Protection, Baden Baden, Germany, 2008.
    [BibTeX] [Abstract] [Download PDF]

    To solve these issues, an outdoor mobile robotic platform was equipped with a differential GPS system for accurate geo-registered positioning, and a stereo vision system. This stereo vision systems serves two purposes: 1) victim detection and 2) obstacle detection and avoidance. For semi-autonomous robot control and navigation, we rely on a behavior-based robot motion and path planner. In this paper, we present each of the three main aspects (victim detection, stereo-based obstacle detection and behavior-based navigation) of the general robot control architecture more in detail.

    @InProceedings{de2008development,
    author = {De Cubber, Geert and Doroftei, Daniela and Marton, Gabor},
    booktitle = {Proceedings of the IARP Workshop on Environmental Maintenance and Protection},
    title = {Development of a visually guided mobile robot for environmental observation as an aid for outdoor crisis management operations},
    year = {2008},
    abstract = {To solve these issues, an outdoor mobile robotic platform was equipped with a differential GPS system for accurate geo-registered positioning, and a stereo vision system. This stereo vision systems serves two purposes: 1) victim detection and 2) obstacle detection and avoidance. For semi-autonomous robot control and navigation, we rely on a behavior-based robot motion and path planner. In this paper, we present each of the three main aspects (victim detection, stereo-based obstacle detection and behavior-based navigation) of the general robot control architecture more in detail.},
    project = {ViewFinder, Mobiniss},
    address = {Baden Baden, Germany},
    url = {http://mecatron.rma.ac.be/pub/2008/environmental observation as an aid for outdoor crisis management operations.pdf},
    unit= {meca-ras}
    }

  • D. Doroftei and Y. Baudoin, “Development of a semi-autonomous De-mining vehicle," in 7th IARP Workshop HUDEM2008, Cairo, Egypt, 2008.
    [BibTeX] [Abstract]

    The paper describes the Development of a semi-autonomous De-mining vehicle

    @InProceedings{doro2008development,
    author = {Doroftei, Daniela and Baudoin, Yvan},
    booktitle = {7th {IARP} Workshop {HUDEM}2008},
    title = {Development of a semi-autonomous De-mining vehicle},
    year = {2008},
    abstract = {The paper describes the Development of a semi-autonomous De-mining vehicle},
    address = {Cairo, Egypt},
    project = {Mobiniss},
    unit= {meca-ras}
    }

  • D. Doroftei and J. Bedkowski, “Towards the autonomous navigation of robots for risky interventions," in Third International Workshop on Robotics for risky interventions and Environmental Surveillance-Maintenance RISE, Benicassim, Spain, 2008.
    [BibTeX] [Abstract] [Download PDF]

    In the course of the ViewFinder project, two robotics teams (RMS and PIAP) are working on the development of an intelligent autonomous mobile robot. This paper reports on the progress of both teams.

    @InProceedings{doro2008towards,
    author = {Doroftei, Daniela and Bedkowski, Janusz},
    booktitle = {Third International Workshop on Robotics for risky interventions and Environmental Surveillance-Maintenance {RISE}},
    title = {Towards the autonomous navigation of robots for risky interventions},
    year = {2008},
    abstract = {In the course of the ViewFinder project, two robotics teams (RMS and PIAP) are working on the development of an intelligent autonomous mobile robot. This paper reports on the progress of both teams.},
    project = {ViewFinder, Mobiniss},
    address = {Benicassim, Spain},
    url = {http://mecatron.rma.ac.be/pub/2008/Doroftei.pdf},
    unit= {meca-ras}
    }

2007

  • G. De Cubber, “Dense 3D structure and motion estimation as an aid for robot navigation," in ISMCR 2007, Warsaw, Poland, 2007.
    [BibTeX] [Abstract] [Download PDF]

    Three-dimensional scene reconstruction is an important tool in many applications varying from computer graphics to mobile robot navigation. In this paper, we focus on the robotics application, where the goal is to estimate the 3D rigid motion of a mobile robot and to reconstruct a dense three-dimensional scene representation. The reconstruction problem can be subdivided into a number of subproblems. First, the egomotion has to be estimated. For this, the camera (or robot) motion parameters are iteratively estimated by reconstruction of the epipolar geometry. Secondly, a dense depth map is calculated by fusing sparse depth information from point features and dense motion information from the optical flow in a variational framework. This depth map corresponds to a point cloud in 3D space, which can then be converted into a model to extract information for the robot navigation algorithm. Here, we present an integrated approach for the structure and egomotion estimation problem.

    @InProceedings{de2007dense,
    author = {De Cubber, Geert},
    booktitle = {ISMCR 2007},
    title = {Dense {3D} structure and motion estimation as an aid for robot navigation},
    year = {2007},
    abstract = {Three-dimensional scene reconstruction is an important tool in many applications varying from computer graphics to mobile robot navigation. In this paper, we focus on the robotics application, where the goal is to estimate the 3D rigid motion of a mobile robot and to reconstruct a dense three-dimensional scene representation. The reconstruction problem can be subdivided into a number of subproblems. First, the egomotion has to be estimated. For this, the camera (or robot) motion parameters are iteratively estimated by reconstruction of the epipolar geometry. Secondly, a dense depth map is calculated by fusing sparse depth information from point features and dense motion information from the optical flow in a variational framework. This depth map corresponds to a point cloud in 3D space, which can then be converted into a model to extract information for the robot navigation algorithm. Here, we present an integrated approach for the structure and egomotion estimation problem.},
    project = {ViewFinder,Mobiniss},
    address = {Warsaw, Poland},
    url = {http://mecatron.rma.ac.be/pub/2007/Dense 3D Structure and Motion Estimation as an aid for Robot Navigation.pdf},
    unit= {meca-ras,vub-etro}
    }

  • D. Doroftei, E. Colon, and G. De Cubber, “A behaviour-based control and software architecture for the visually guided Robudem outdoor mobile robot,," in ISMCR 2007, Warsaw, Poland,, 2007.
    [BibTeX] [Abstract] [Download PDF]

    The design of outdoor autonomous robots requires the careful consideration and integration of multiple aspects: sensors and sensor data fusion, design of a control and software architecture, design of a path planning algorithm and robot control. This paper describes partial aspects of this research work, which is aimed at developing an semi‐autonomous outdoor robot for risky interventions. This paper focuses mainly on three main aspects of the design process: visual sensing using stereo and image motion analysis, design of a behaviour‐based control architecture and implementation of a modular software architecture.

    @InProceedings{doroftei2007behaviour,
    author = {Doroftei, Daniela and Colon, Eric and De Cubber, Geert},
    booktitle = {ISMCR 2007},
    title = {A behaviour-based control and software architecture for the visually guided {Robudem} outdoor mobile robot,},
    year = {2007},
    address = {Warsaw, Poland,},
    abstract = {The design of outdoor autonomous robots requires the careful consideration and integration of multiple aspects: sensors and sensor data fusion, design of a control and software architecture, design of a path planning algorithm and robot control. This paper describes partial aspects of this research work, which is aimed at developing an semi‐autonomous outdoor robot for risky interventions. This paper focuses mainly on three main aspects of the design process: visual sensing using stereo and image motion analysis, design of a behaviour‐based control architecture and implementation of a modular software architecture.},
    project = {ViewFinder,Mobiniss},
    url = {http://mecatron.rma.ac.be/pub/2007/Doroftei_ISMCR07.pdf},
    unit= {meca-ras}
    }

  • D. Doroftei, E. Colon, Y. Baudoin, and H. Sahli, “Development of a semi-autonomous off-road vehicle.," in IEEE HuMan’07’, Timimoun, Algeria, 2007, p. 340–343.
    [BibTeX] [Abstract] [Download PDF]

    Humanitarian demining is still a highly laborintensive and high-risk operation. Advanced sensors and mechanical aids can significantly reduce the demining time. In this context, it is the aim to develop a humanitarian demining mobile robot which is able to scan semi-automatically a minefield. This paper discusses the development of a control scheme for such a semi-autonomous mobile robot for humanitarian demining. This process requires the careful consideration and integration of multiple aspects: sensors and sensor data fusion, design of a control and software architecture, design of a path planning algorithm and robot control.

    @InProceedings{doro2007development,
    author = {Doroftei, Daniela and Colon, Eric and Baudoin, Yvan and Sahli, Hichem},
    booktitle = {{IEEE} {HuMan}'07'},
    title = {Development of a semi-autonomous off-road vehicle.},
    year = {2007},
    address = {Timimoun, Algeria},
    pages = {340--343},
    abstract = {Humanitarian demining is still a highly laborintensive and high-risk operation. Advanced sensors and mechanical aids can significantly reduce the demining time. In this context, it is the aim to develop a humanitarian demining mobile robot which is able to scan semi-automatically a minefield. This paper discusses the development of a control scheme for such a semi-autonomous mobile robot for humanitarian demining. This process requires the careful consideration and integration of multiple aspects: sensors and sensor data fusion, design of a control and software architecture, design of a path planning algorithm and robot control.},
    project = {Mobiniss, ViewFinder},
    url = {http://mecatron.rma.ac.be/pub/2007/Development_of_a_semi-autonomous_off-road_vehicle.pdf},
    unit= {meca-ras}
    }

2006

  • S. A. Berrabah, G. De Cubber, V. Enescu, and H. Sahli, “MRF-Based Foreground Detection in Image Sequences from a Moving Camera," in 2006 International Conference on Image Processing, Atlanta, USA, 2006, p. 1125–1128.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    This paper presents a Bayesian approach for simultaneously detecting the moving objects (foregrounds) and estimating their motion in image sequences taken with a moving camera mounted on the top of a mobile robot. To model the background, the algorithm uses the GMM approach for its simplicity and capability to adapt to illumination changes and small motions in the scene. To overcome the limitations of the GMM approach with its pixel-wise processing, the background model is combined with the motion cue in a maximum a posteriori probability (MAP)-MRF framework. This enables us to exploit the advantages of spatio-temporal dependencies that moving objects impose on pixels and the interdependence of motion and segmentation fields. As a result, the detected moving objects have visually attractive silhouettes and they are more accurate and less affected by noise than those obtained with simple pixel-wise methods. To enhance the segmentation accuracy, the background model is re-updated using the MAP-MRF results. Experimental results and a qualitative study of the proposed approach are presented on image sequences with a static camera as well as with a moving camera.

    @InProceedings{berrabah2006mrf,
    author = {Berrabah, Sid Ahmed and De Cubber, Geert and Enescu, Valentin and Sahli, Hichem},
    booktitle = {2006 International Conference on Image Processing},
    title = {{MRF}-Based Foreground Detection in Image Sequences from a Moving Camera},
    year = {2006},
    month = oct,
    organization = {IEEE},
    pages = {1125--1128},
    publisher = {{IEEE}},
    abstract = {This paper presents a Bayesian approach for simultaneously detecting the moving objects (foregrounds) and estimating their motion in image sequences taken with a moving camera mounted on the top of a mobile robot. To model the background, the algorithm uses the GMM approach for its simplicity and capability to adapt to illumination changes and small motions in the scene. To overcome the limitations of the GMM approach with its pixel-wise processing, the background model is combined with the motion cue in a maximum a posteriori probability (MAP)-MRF framework. This enables us to exploit the advantages of spatio-temporal dependencies that moving objects impose on pixels and the interdependence of motion and segmentation fields. As a result, the detected moving objects have visually attractive silhouettes and they are more accurate and less affected by noise than those obtained with simple pixel-wise methods. To enhance the segmentation accuracy, the background model is re-updated using the MAP-MRF results. Experimental results and a qualitative study of the proposed approach are presented on image sequences with a static camera as well as with a moving camera.},
    doi = {10.1109/icip.2006.312754},
    project = {MOBINISS,ViewFinder},
    address = {Atlanta, USA},
    url = {http://ieeexplore.ieee.org/xpls/abs_all.jsp?arnumber=4106732},
    unit= {meca-ras,vub-etro}
    }

  • K. Cauwerts, G. De Cubber, T. Geerinck, W. Mattheyses, I. Ravyse, H. Sahli, M. Shami, P. Soens, W. Verhelst, and P. Verhoeve, “Audio-Visual Signal Processing: Speech and emotion processing for human-machine interaction," in Second annual IEEE BENELUX/DSP Valley Signal Processing Symposium (SPS-DARTS 2006), Brussels, Belgium, 2006.
    [BibTeX] [Download PDF]
    @InProceedings{cauwerts2006audio,
    author = {Cauwerts, Kenny and De Cubber, Geert and Geerinck, Thomas and Mattheyses, W and Ravyse, Ilse and Sahli, Hichem and Shami, M and Soens, P and Verhelst, Werner and Verhoeve, P},
    booktitle = {Second annual IEEE BENELUX/DSP Valley Signal Processing Symposium (SPS-DARTS 2006)},
    title = {Audio-Visual Signal Processing: Speech and emotion processing for human-machine interaction},
    year = {2006},
    address = {Brussels, Belgium},
    unit= {meca-ras},
    url = {https://www.semanticscholar.org/paper/Audio-Visual-Signal-Processing:-Speech-and-emotion-Cauwerts-Cubber/c6cc775bfc9f5528c8c889d32af53566f1ae8415},
    }

  • D. Doroftei, E. Colon, and Y. Baudoin, “A modular control architecture for semi-autonomous navigation," in CLAWAR 2006, Brussels, Belgium, 2006, p. 712–715.
    [BibTeX] [Abstract] [Download PDF]

    Humanitarian demining is still a highly laborintensive and high-risk operation. Advanced sensors and mechanical aids can significantly reduce the demining time. In this context, it is the aim to develop a humanitarian demining mobile robot which is able to scan semi-automatically a minefield. This paper discusses the development of a control scheme for such a semi-autonomous mobile robot for humanitarian demining. This process requires the careful consideration and integration of multiple aspects: sensors and sensor data fusion, design of a control and software architecture, design of a path planning algorithm and robot control.

    @InProceedings{doro2006modular,
    author = {Doroftei, Daniela and Colon, Eric and Baudoin, Yvan},
    booktitle = {{CLAWAR} 2006},
    title = {A modular control architecture for semi-autonomous navigation},
    year = {2006},
    pages = {712--715},
    abstract = {Humanitarian demining is still a highly laborintensive and high-risk operation. Advanced sensors and mechanical aids can significantly reduce the demining time. In this context, it is the aim to develop a humanitarian demining mobile robot which is able to scan semi-automatically a minefield. This paper discusses the development of a control scheme for such a semi-autonomous mobile robot for humanitarian demining. This process requires the careful consideration and integration of multiple aspects: sensors and sensor data fusion, design of a control and software architecture, design of a path planning algorithm and robot control. },
    project = {Mobiniss, ViewFinder},
    address = {Brussels, Belgium},
    url = {http://mecatron.rma.ac.be/pub/2006/Clawar2006_Doroftei_colon.pdf},
    unit= {meca-ras}
    }

  • D. Doroftei, E. Colon, and Y. Baudoin, “Development of a control architecture for the ROBUDEM outdoor mobile robot platform," in IARP Workshop RISE 2006, Brussels, Belgium, 2006.
    [BibTeX] [Abstract] [Download PDF]

    Humanitarian demining still is a highly labor-intensive and high-risk operation. Advanced sensors and mechanical aids can significantly reduce the demining time. In this context, it is the aim to develop a humanitarian demining mobile robot which is able to scan a minefield semi-automatically. This paper discusses the development of a control scheme for such a semi-autonomous mobile robot for humanitarian demining. This process requires the careful consideration and integration of multiple aspects: sensors and sensor data fusion, design of a control and software architecture, design of a path planning algorithm and robot control.

    @InProceedings{doro2006development,
    author = {Doroftei, Daniela and Colon, Eric and Baudoin, Yvan},
    booktitle = {{IARP} Workshop {RISE} 2006},
    title = {Development of a control architecture for the ROBUDEM outdoor mobile robot platform},
    year = {2006},
    abstract = {Humanitarian demining still is a highly labor-intensive and high-risk operation. Advanced sensors and mechanical aids can significantly reduce the demining time. In this context, it is the aim to develop a humanitarian demining mobile robot which is able to scan a minefield semi-automatically. This paper discusses the development of a control scheme for such a semi-autonomous mobile robot for humanitarian demining. This process requires the careful consideration and integration of multiple aspects: sensors and sensor data fusion, design of a control and software architecture, design of a path planning algorithm and robot control. },
    project = {Mobiniss, ViewFinder},
    address = {Brussels, Belgium},
    url = {http://mecatron.rma.ac.be/pub/2006/IARPWS2006_Doroftei_Colon.pdf},
    unit= {meca-ras}
    }

  • A. Borghgraef and M. Acheroy, “Using optical flow for the detection of floating mines in IR image sequences.," in Proceedings of SPIE Optics and Photonics in Security and Defence 2006, Stockholm, Sweden, 2006.
    [BibTeX]
    @InProceedings{ OF_mines_OPSD_2006,
    title = "Using optical flow for the detection of floating mines in {IR} image sequences.",
    booktitle = "Proceedings of SPIE Optics and Photonics in Security and Defence 2006",
    author = "A. Borghgraef and M. Acheroy",
    address = "Stockholm, Sweden",
    volume = "6395",
    year = "2006",
    month = sep,
    keywords = "SIC06",
    pdf = "http://www.sic.rma.ac.be/~aborghgr/articles/SPIE_OPSD_2006.pdf",
    unit= {ciss}
    }

2005

  • V. Enescu, G. De Cubber, H. Sahli, E. Demeester, D. Vanhooydonck, and M. Nuttin, “Active stereo vision-based mobile robot navigation for person tracking," in International Conference on Informatics in Control, Automation and Robotics, Barcelona, Spain, 2005, p. 32–39.
    [BibTeX] [Abstract] [Download PDF] [DOI]

    In this paper, we propose a mobile robot architecture for person tracking, consisting of an active stereo vision module (ASVM) and a navigation module (NM). The first tracks the person in stereo images and controls the pan/tilt unit to keep the target in the visual field. Its output, i.e. the 3D position of the person, is fed to the NM, which drives the robot towards the target while avoiding obstacles. As a peculiarity of the system, there is no feedback from the NM or the robot motion controller (RMC) to the ASVM. While this imparts flexibility in combining the ASVM with a wide range of robot platforms, it puts considerable strain on the ASVM.Indeed, besides the changes in the target dynamics, it has to cope with the robot motion during obstacle avoidance. These disturbances are accommodated by generating target location hypotheses in an efficient manner. Robustness against outliers and occlusions is achieved by employing a multi-hypothesis tracking method – the particle filter – based on a color model of the target. Moreover, to deal with illumination changes, the system adaptively updates the color model of the target. The main contributions of this paper lie in (1) devising a stereo, color-based target tracking method using the stereo geometry constraint and (2) integrating it with a robotic agent in a loosely coupled manner.

    @InProceedings{enescu2005active,
    author = {Enescu, Valentin and De Cubber, Geert and Sahli, Hichem and Demeester, Eric and Vanhooydonck, Dirk and Nuttin, Marnix},
    booktitle = {International Conference on Informatics in Control, Automation and Robotics},
    title = {Active stereo vision-based mobile robot navigation for person tracking},
    year = {2005},
    address = {Barcelona, Spain},
    month = sep,
    pages = {32--39},
    abstract = {In this paper, we propose a mobile robot architecture for person tracking, consisting of an active stereo vision module (ASVM) and a navigation module (NM). The first tracks the person in stereo images and controls the pan/tilt unit to keep the target in the visual field. Its output, i.e. the 3D position of the person, is fed to the NM, which drives the robot towards the target while avoiding obstacles. As a peculiarity of the system, there is no feedback from the NM or the robot motion controller (RMC) to the ASVM. While this imparts flexibility in combining the ASVM with a wide range of robot platforms, it puts considerable strain on the ASVM.Indeed, besides the changes in the target dynamics, it has to cope with the robot motion during obstacle avoidance. These disturbances are accommodated by generating target location hypotheses in an efficient manner. Robustness against outliers and occlusions is achieved by employing a multi-hypothesis tracking method - the particle filter - based on a color model of the target. Moreover, to deal with illumination changes, the system adaptively updates the color model of the target. The main contributions of this paper lie in (1) devising a stereo, color-based target tracking method using the stereo geometry constraint and (2) integrating
    it with a robotic agent in a loosely coupled manner.},
    project = {Mobiniss, ViewFinder},
    doi = {10.3233/ica-2006-13302},
    url = {http://mecatron.rma.ac.be/pub/2005/f969ee9e1169623340aa409f539fddb9c413.pdf},
    unit= {meca-ras,vub-etro}
    }

2004

  • A.Borghgraef and S.Gautama, “Change detection in SAR images: spatial accuracy analysis," in IEEE Intl. Geoscience and Remote Sensing Symposium, Anchorage, USA, 2004.
    [BibTeX]
    @InProceedings{ IGARSS_2004,
    title = "Change detection in SAR images: spatial accuracy analysis",
    booktitle = "IEEE Intl. Geoscience and Remote Sensing Symposium",
    month = sep,
    author = "A.Borghgraef and S.Gautama",
    address = "Anchorage, USA",
    year = "2004",
    unit= {ciss}
    }

2003

  • G. De Cubber, H. Sahli, E. Colon, and Y. Baudoin, “Visual Servoing under Changing Illumination Conditions," in Proc. International Workshop on Attention and Performance in Computer Vision (ICVS03), Graz, Austria, 2003, p. 47–54.
    [BibTeX] [Abstract] [Download PDF]

    Visual servoing, or the control of motion on the basis of image analysis in a closed loop, is more and more recognized as an important tool in modern robotics. In this paper, we present a new model-driven approach to derive a description of the motion of a target object. This method can be subdivided into an illumination invariant target detection stage and a servoing process which uses an adaptive Kalman filter to update the model of the nonlinear system. This technique can be applied to any pan-tilt-zoom camera mounted on a mobile vehicle as well as to a static camera tracking moving environmental features

    @InProceedings{de2003visual,
    author = {De Cubber, Geert and Sahli, Hichem and Colon, Eric and Baudoin, Yvan},
    booktitle = {Proc. International Workshop on Attention and Performance in Computer Vision (ICVS03)},
    title = {Visual Servoing under Changing Illumination Conditions},
    year = {2003},
    pages = {47--54},
    address = {Graz, Austria},
    abstract = {Visual servoing, or the control of motion on the basis of image analysis in a closed loop, is more and more recognized as an important tool in modern robotics. In this paper, we present a new model-driven approach to derive a description of the motion of a target object. This method can be subdivided into an illumination invariant target detection stage and a servoing process which uses an adaptive Kalman filter to update the model of the nonlinear system. This technique can be applied to any pan-tilt-zoom camera mounted on a mobile vehicle as well as to a static camera tracking moving environmental features},
    url = {http://mecatron.rma.ac.be/pub/2003/ICVS03_Geert.pdf},
    project = {Mobiniss},
    unit= {meca-ras,vub-etro}
    }

  • G. De Cubber, S. A. Berrabah, and H. Sahli, “A Bayesian Approach for Color Constancy based Visual Servoing," in 11th International Conference on Advanced Robotics, Coimbra, Portugal, 2003.
    [BibTeX] [Download PDF]
    @InProceedings{de2003bayesian,
    author = {De Cubber, Geert and Berrabah, Sid Ahmed and Sahli, Hichem},
    booktitle = {11th International Conference on Advanced Robotics},
    title = {A Bayesian Approach for Color Constancy based Visual Servoing},
    year = {2003},
    address = {Coimbra, Portugal},
    unit= {meca-ras,vub-etro},
    project = {Mobiniss},
    url = {https://www.semanticscholar.org/paper/A-Bayesian-Approach-for-Color-Constancy-based-Cubber-Berrabah/ed5636626e307f2b8d0c5f4fcc79d5d54a9cc639},
    }

  • A.Borghgraef and S.Gautama, “Analysis of spatial quality for change detection using SAR images," in 14th ProRISC workshop on Circuits, Systems and Signal Processing, Veldhoven, Netherlands, 2003.
    [BibTeX]
    @InProceedings{ ProRISC_2003,
    title = "Analysis of spatial quality for change detection using SAR images",
    booktitle = "14th ProRISC workshop on Circuits, Systems and Signal Processing",
    author = "A.Borghgraef and S.Gautama",
    address = "Veldhoven, Netherlands",
    year = "2003",
    month = nov,
    unit= {ciss}
    }

  • S.Gautama and A.Borghgraef, “Using graph matching to compare VHR satellite images with GIS data," in Proceedings IEEE International Geoscience and Remote Sensing Symposium 2003, Toulouse, France, 2003.
    [BibTeX]
    @InProceedings{ GraphMatching_IGARSS_2003,
    title = "Using graph matching to compare VHR satellite images with GIS data",
    booktitle = "Proceedings IEEE International Geoscience and Remote Sensing Symposium 2003",
    author = "S.Gautama and A.Borghgraef",
    address = "Toulouse, France",
    year = "2003",
    month = jul,
    unit= {ciss}
    }

  • “Detecting change in road networks using continuous relaxation labeling," in Proc. Joint Workshop ISPRS/EARSEL High Resolution Mapping from Space 2003, Hannover, Germany, 2003.
    [BibTeX]
    @InProceedings{ ISPRS_EARSEL_2003,
    title = "Detecting change in road networks using continuous relaxation labeling",
    booktitle = "Proc. Joint Workshop ISPRS/EARSEL High Resolution Mapping from Space 2003",
    address = "Hannover, Germany",
    year = "2003",
    unit= {ciss},
    month = oct
    }

2002

  • G. De Cubber, H. Sahli, and F. Decroos, “Sensor Integration on a Mobile Robot," in ISMCR 2002: Proc. 12th Int’l Symp. Measurement and Control in Robotics,, Bourges, France, 2002.
    [BibTeX] [Abstract] [Download PDF]

    The purpose of this paper is to show an application of path planning for a mobile pneumatic robot. The robot is capable of searching for a specific target in the scene and navigating towards it, in an a priori unknown environment. To accomplish this task, the robot uses a colour pan-tilt camera and two ultrasonic sensors. As the camera is only used for target tracking, the robot is left with very incomplete sensor data with a high degree of uncertainty. To counter this, a fuzzy logic – based sensor fusion procedure is set up to aid the map building process in constructing a reliable environmental model. The significance of this work is that it shows that the use of fuzzy logic based fusion and potential field navigation can achieve good results for path planning

    @InProceedings{de2002sensor,
    author = {De Cubber, Geert and Sahli, Hichem and Decroos, Francis},
    booktitle = {ISMCR 2002: Proc. 12th Int'l Symp. Measurement and Control in Robotics,},
    title = {Sensor Integration on a Mobile Robot},
    year = {2002},
    address = {Bourges, France},
    abstract = {The purpose of this paper is to show an application of path planning for a mobile pneumatic robot. The robot is capable of searching for a specific target in the scene and navigating towards it, in an a priori unknown environment. To accomplish this task, the robot uses a colour pan-tilt camera and two ultrasonic sensors. As the camera is only used for target tracking, the robot is left with very incomplete sensor data with a high degree of uncertainty. To counter this, a fuzzy logic - based sensor fusion procedure is set up to aid the map building process in constructing a reliable environmental model. The significance of this work is that it shows that the use of fuzzy logic based fusion and potential field navigation can achieve good results for path planning},
    url = {http://mecatron.rma.ac.be/pub/2002/Paper ISMCR'02 - Sensor Integration on a Mobile Robot.pdf},
    project = {Mobiniss},
    unit= {meca-ras,vub-etro}
    }

  • G. De Cubber, H. Sahli, H. Ping, and E. Colon, “A Colour Constancy Approach for Illumination Invariant Colour Target Tracking," in IARP Workshop on Robots for Humanitarian Demining, Vienna, Austria, 2002.
    [BibTeX] [Abstract] [Download PDF]

    Many robotic agents use color vision to retrieve quality information about the environment. In this work, we present a visual servoing technique, where vision is the primary sensing modality and sensing is based upon the analysis of the perceived visual information. We describe how colored targets can be identified and how their position and motion can be estimated quickly and reliably. The visual servoing procedure is essentially a four-stage process, with color target identification, motion parameter estimation, target tracking and target position estimation. These individual parts add up to a global vision system enabling precise positioning for a demining robot.

    @InProceedings{de2002colour,
    author = {De Cubber, Geert and Sahli, Hichem and Ping, Hong and Colon, Eric},
    booktitle = {IARP Workshop on Robots for Humanitarian Demining},
    title = {A Colour Constancy Approach for Illumination Invariant Colour Target Tracking},
    year = {2002},
    address = {Vienna, Austria},
    abstract = {Many robotic agents use color vision to retrieve quality information about the environment. In this work, we present a visual servoing technique, where vision is the primary sensing modality and sensing is based upon the analysis of the perceived visual information. We describe how colored targets can be identified and how their position and motion can be estimated quickly and reliably. The visual servoing procedure is essentially a four-stage process, with color target identification, motion parameter estimation, target tracking and target position estimation. These individual parts add up to a global vision system enabling precise positioning for a demining robot.},
    url = {http://mecatron.rma.ac.be/pub/2002/Paper IARP - Geert De Cubber.pdf},
    project = {Mobiniss},
    unit= {meca-ras,vub-etro}
    }

  • A.Borghgraef and S.Gautama, “Performance study of genetic algorithms for graph matching problems," in 3rd IEEE Benelux Signal Processing Symposium 2002, Leuven, Belgium, 2002.
    [BibTeX]
    @InProceedings{ IEEE_SPS_2002,
    title = "Performance study of genetic algorithms for graph matching problems",
    booktitle = "3rd IEEE Benelux Signal Processing Symposium 2002",
    author = "A.Borghgraef and S.Gautama",
    edition = "3",
    year = "2002",
    address = "Leuven, Belgium",
    unit= {ciss}
    }

  • S.Gautama, A.Borghgraef, and I.Bruyland, “Automatic registration of satellite images with GIS databases," in Proceedings Advanced Concepts for Intelligent Vision Systems ACIVS 2002, 2002, p. Gent, Belgium.
    [BibTeX]
    @InProceedings{ ACIVS_2002,
    title = "Automatic registration of satellite images with GIS databases",
    booktitle = "Proceedings Advanced Concepts for Intelligent Vision Systems ACIVS 2002",
    author = "S.Gautama and A.Borghgraef and I.Bruyland",
    pages = "Gent, Belgium",
    year = "2002",
    unit= {ciss}
    }