{"id":1294,"date":"2022-07-22T11:51:53","date_gmt":"2022-07-22T11:51:53","guid":{"rendered":"https:\/\/www.heron-h2020.eu\/?page_id=1294"},"modified":"2024-07-08T13:07:02","modified_gmt":"2024-07-08T13:07:02","slug":"publications-2","status":"publish","type":"page","link":"https:\/\/www.heron-h2020.eu\/?page_id=1294","title":{"rendered":"Publications"},"content":{"rendered":"<p><div class=\"fusion-fullwidth fullwidth-box fusion-builder-row-1 fusion-flex-container hundred-percent-fullwidth non-hundred-percent-height-scrolling\" style=\"--awb-border-radius-top-left:0px;--awb-border-radius-top-right:0px;--awb-border-radius-bottom-right:0px;--awb-border-radius-bottom-left:0px;--awb-padding-top:100px;--awb-padding-bottom:100px;--awb-background-color:#ffffff;--awb-flex-wrap:wrap;\" ><div class=\"fusion-builder-row fusion-row fusion-flex-align-items-flex-start fusion-flex-justify-content-center fusion-flex-content-wrap\" style=\"width:104% !important;max-width:104% !important;margin-left: calc(-4% \/ 2 );margin-right: calc(-4% \/ 2 );\"><div class=\"fusion-layout-column fusion_builder_column fusion-builder-column-0 fusion_builder_column_2_3 2_3 fusion-flex-column\" style=\"--awb-bg-size:cover;--awb-width-large:66.666666666667%;--awb-margin-top-large:0px;--awb-spacing-right-large:2.88%;--awb-margin-bottom-large:2%;--awb-spacing-left-large:2.88%;--awb-width-medium:100%;--awb-spacing-right-medium:1.92%;--awb-spacing-left-medium:1.92%;--awb-width-small:100%;--awb-spacing-right-small:1.92%;--awb-spacing-left-small:1.92%;\"><div class=\"fusion-column-wrapper fusion-flex-justify-content-flex-start fusion-content-layout-column\"><div class=\"fusion-title title fusion-title-1 fusion-sep-none fusion-title-center fusion-title-text fusion-title-size-two\" style=\"--awb-margin-top-small:0px;--awb-margin-right-small:0px;--awb-margin-bottom-small:20px;--awb-margin-left-small:0px;\"><h2 class=\"fusion-title-heading title-heading-center fusion-responsive-typography-calculated\" style=\"margin:0;--fontSize:40;line-height:1.3;\">Publications<\/h2><\/div><div class=\"fusion-separator\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-bottom:30px;width:100%;max-width:300px;\"><div class=\"fusion-separator-border sep-single sep-solid\" style=\"--awb-height:20px;--awb-amount:20px;--awb-sep-color:#00b8f2;border-color:#00b8f2;border-top-width:2px;\"><\/div><\/div><\/div><\/div><\/div><\/div><div class=\"fusion-fullwidth fullwidth-box fusion-builder-row-2 fusion-flex-container hundred-percent-fullwidth non-hundred-percent-height-scrolling\" style=\"--awb-border-radius-top-left:0px;--awb-border-radius-top-right:0px;--awb-border-radius-bottom-right:0px;--awb-border-radius-bottom-left:0px;--awb-background-color:#f8f8f8;--awb-flex-wrap:wrap;\" ><div class=\"fusion-builder-row fusion-row fusion-flex-align-items-flex-start fusion-flex-content-wrap\" style=\"width:104% !important;max-width:104% !important;margin-left: calc(-4% \/ 2 );margin-right: calc(-4% \/ 2 );\"><div class=\"fusion-layout-column fusion_builder_column fusion-builder-column-1 fusion_builder_column_1_1 1_1 fusion-flex-column\" style=\"--awb-bg-size:cover;--awb-width-large:100%;--awb-margin-top-large:0px;--awb-spacing-right-large:1.92%;--awb-margin-bottom-large:0px;--awb-spacing-left-large:1.92%;--awb-width-medium:100%;--awb-order-medium:0;--awb-spacing-right-medium:1.92%;--awb-spacing-left-medium:1.92%;--awb-width-small:100%;--awb-order-small:0;--awb-spacing-right-small:1.92%;--awb-spacing-left-small:1.92%;\"><div class=\"fusion-column-wrapper fusion-column-has-shadow fusion-flex-justify-content-flex-start fusion-content-layout-column\"><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-top:30px;width:100%;\"><\/div><div class=\"accordian fusion-accordian\" style=\"--awb-border-size:1px;--awb-icon-size:16px;--awb-content-font-size:16px;--awb-icon-alignment:left;--awb-hover-color:#f9f9fb;--awb-border-color:#e2e2e2;--awb-background-color:#ffffff;--awb-divider-color:#e0dede;--awb-divider-hover-color:#e0dede;--awb-icon-color:#ffffff;--awb-title-color:#636468;--awb-content-color:#858a9f;--awb-icon-box-color:#212934;--awb-toggle-hover-accent-color:#00b8f2;--awb-title-font-family:&quot;Roboto Slab&quot;;--awb-title-font-weight:700;--awb-title-font-style:normal;--awb-title-font-size:24px;--awb-title-line-height:1.4;--awb-content-font-family:&quot;Roboto Slab&quot;;--awb-content-font-style:normal;--awb-content-font-weight:300;\"><div class=\"panel-group fusion-toggle-icon-boxed\" id=\"accordion-1294-1\"><div class=\"fusion-panel panel-default panel-0759c0c69f66ccb52 fusion-toggle-has-divider\" style=\"--awb-title-color:#636468;--awb-content-color:#858a9f;\"><div class=\"panel-heading\"><h4 class=\"panel-title toggle\" id=\"toggle_0759c0c69f66ccb52\"><a aria-expanded=\"false\" aria-controls=\"0759c0c69f66ccb52\" role=\"button\" data-toggle=\"collapse\" data-parent=\"#accordion-1294-1\" data-target=\"#0759c0c69f66ccb52\" href=\"#0759c0c69f66ccb52\"><span class=\"fusion-toggle-icon-wrapper\" aria-hidden=\"true\"><i class=\"fa-fusion-box active-icon awb-icon-minus\" aria-hidden=\"true\"><\/i><i class=\"fa-fusion-box inactive-icon awb-icon-plus\" aria-hidden=\"true\"><\/i><\/span><span class=\"fusion-toggle-heading\">Robotic Maintenance of Road Infrastructures: The HERON Project<\/span><\/a><\/h4><\/div><div id=\"0759c0c69f66ccb52\" class=\"panel-collapse collapse \" aria-labelledby=\"toggle_0759c0c69f66ccb52\"><div class=\"panel-body toggle-content fusion-clearfix\">\n<p>Katsamenis I., Bimpas M., Protopapadakis E.,\u00a0 Zafeiropoulos Ch., Kalogeras D.,\u00a0 Doulamis A.,\u00a0 Doulamis N., Montoliu C. M-P.,\u00a0 Handanos Y., Schmidt F.,\u00a0 Ott L.,\u00a0 Cantero M.,\u00a0 Lopez R. (2022).\u00a0 Robotic Maintenance of Road Infrastructures: The HERON Project. <em><span class=\"epub-section__title\">Proceedings of the 15th International Conference on PErvasive Technologies Related to Assistive Environments, <\/span><\/em><span class=\"epub-section__title\">Corfu, Greece.<\/span><\/p>\n<p><a href=\"https:\/\/dl.acm.org\/doi\/10.1145\/3529190.3534746\">https:\/\/dl.acm.org\/doi\/10.1145\/3529190.3534746<\/a><\/p>\n<p><strong><em>Abstract:<\/em><\/strong> Of all public assets, road infrastructure tops the list. Roads are crucial for economic development and growth, providing access to education, health, and employment. The maintenance, repair, and upgrade of roads are therefore vital to road users\u2019 health and safety as well as to a well-functioning and prosperous modern economy. The EU-funded HERON project will develop an integrated automated system to adequately maintain road infrastructure. In turn, this will reduce accidents, lower maintenance costs, and increase road network capacity and efficiency. To coordinate maintenance works, the project will design an autonomous ground robotic vehicle that will be supported by autonomous drones. Sensors and scanners for 3D mapping will be used in addition to artificial intelligence toolkits to help coordinate road maintenance and upgrade workflows.<\/p>\n<div class=\"author-data\"><\/div>\n<\/div><\/div><\/div><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-bottom:30px;width:100%;\"><div class=\"fusion-separator-border sep-single sep-solid\" style=\"--awb-height:20px;--awb-amount:20px;--awb-sep-color:#00b8f2;border-color:#00b8f2;border-top-width:2px;\"><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-top:30px;width:100%;\"><\/div><div class=\"accordian fusion-accordian\" style=\"--awb-border-size:1px;--awb-icon-size:16px;--awb-content-font-size:16px;--awb-icon-alignment:left;--awb-hover-color:#f9f9fb;--awb-border-color:#e2e2e2;--awb-background-color:#ffffff;--awb-divider-color:#e0dede;--awb-divider-hover-color:#e0dede;--awb-icon-color:#ffffff;--awb-title-color:#636468;--awb-content-color:#858a9f;--awb-icon-box-color:#212934;--awb-toggle-hover-accent-color:#00b8f2;--awb-title-font-family:&quot;Roboto Slab&quot;;--awb-title-font-weight:700;--awb-title-font-style:normal;--awb-title-font-size:24px;--awb-title-line-height:1.4;--awb-content-font-family:&quot;Roboto Slab&quot;;--awb-content-font-style:normal;--awb-content-font-weight:300;\"><div class=\"panel-group fusion-toggle-icon-boxed\" id=\"accordion-1294-2\"><div class=\"fusion-panel panel-default panel-43505792c6a1112a5 fusion-toggle-has-divider\" style=\"--awb-title-color:#636468;--awb-content-color:#858a9f;\"><div class=\"panel-heading\"><h4 class=\"panel-title toggle\" id=\"toggle_43505792c6a1112a5\"><a aria-expanded=\"false\" aria-controls=\"43505792c6a1112a5\" role=\"button\" data-toggle=\"collapse\" data-parent=\"#accordion-1294-2\" data-target=\"#43505792c6a1112a5\" href=\"#43505792c6a1112a5\"><span class=\"fusion-toggle-icon-wrapper\" aria-hidden=\"true\"><i class=\"fa-fusion-box active-icon awb-icon-minus\" aria-hidden=\"true\"><\/i><i class=\"fa-fusion-box inactive-icon awb-icon-plus\" aria-hidden=\"true\"><\/i><\/span><span class=\"fusion-toggle-heading\">A holistic monitoring scheme for road infrastructures<\/span><\/a><\/h4><\/div><div id=\"43505792c6a1112a5\" class=\"panel-collapse collapse \" aria-labelledby=\"toggle_43505792c6a1112a5\"><div class=\"panel-body toggle-content fusion-clearfix\">\n<p>Zafeiropoulos Ch., Protopapadakis E., Chatzidaki A., Doulamis A., Vamvatsikos D., Zotos N., Bogdos G., Kostaridis A., Schmidt F., Ientile S., Sevilla I., Tilon S., Rallis I. (2022).\u00a0 A holistic monitoring scheme for road infrastructures. <em><span class=\"epub-section__title\">Proceedings of the 15th International Conference on PErvasive Technologies Related to Assistive Environments, <\/span><\/em><span class=\"epub-section__title\">Corfu, Greece.<\/span><\/p>\n<p><a href=\"https:\/\/dl.acm.org\/doi\/abs\/10.1145\/3529190.3534745\">https:\/\/dl.acm.org\/doi\/abs\/10.1145\/3529190.3534745<\/a><\/p>\n<p><strong><em>Abstract:<\/em><\/strong> This monitoring system aims at increasing the resilience of the road infrastructures and ensuring reliable network availability under unfavourable conditions, such as extreme weather, landslides, and earthquakes. The main target is to combine downscaled climate change scenarios (applied to road infrastructures) with simulation tools (structural\/geotechnical) and actual data (from existing and novel sensors), so as to provide the operators with an integrated tool able to support more effective management of their infrastructures at planning, maintenance and operation level. Towards this, the proposed framework aims to use high resolution modelling data for the determination and the assessment of the climatic risk of the selected, transport infrastructures and associated expected damages, use existing SHM data (from accelerometers, strain gauges etc.) with new types of sensor-generated data (computer vision) to feed the structural\/geotechnical simulator, utilize tailored weather forecasts (combining seamlessly all available data sources) for specific hot-spots, providing early warnings with corresponding impact assessment in real time; develop improved multi-temporal, multi-sensor UAV, computer vision and machine learning-based damage diagnostic for diverse transport infrastructures; design and implement a Holistic Resilience Assessment Platform environment as an innovative planning tool that will permit a quantitative resilience assessment through an end-to-end simulation environment, running \u201cwhat-if\u201d impact\/risk\/resilience assessment scenarios. The effects of adaptation measures can be investigated by changing the hazard, exposure and vulnerability input parameters; design and implement a Common Operational Picture, including an enhanced visualisation interface and an Incident Management System. The integrated platform (and its sub-modules) will be validated in two real case studies in Spain and in Greece.<\/p>\n<div class=\"author-data\"><\/div>\n<\/div><\/div><\/div><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-bottom:30px;width:100%;\"><div class=\"fusion-separator-border sep-single sep-solid\" style=\"--awb-height:20px;--awb-amount:20px;--awb-sep-color:#00b8f2;border-color:#00b8f2;border-top-width:2px;\"><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-top:30px;width:100%;\"><\/div><div class=\"accordian fusion-accordian\" style=\"--awb-border-size:1px;--awb-icon-size:16px;--awb-content-font-size:16px;--awb-icon-alignment:left;--awb-hover-color:#f9f9fb;--awb-border-color:#e2e2e2;--awb-background-color:#ffffff;--awb-divider-color:#e0dede;--awb-divider-hover-color:#e0dede;--awb-icon-color:#ffffff;--awb-title-color:#636468;--awb-content-color:#858a9f;--awb-icon-box-color:#212934;--awb-toggle-hover-accent-color:#00b8f2;--awb-title-font-family:&quot;Roboto Slab&quot;;--awb-title-font-weight:700;--awb-title-font-style:normal;--awb-title-font-size:24px;--awb-title-line-height:1.4;--awb-content-font-family:&quot;Roboto Slab&quot;;--awb-content-font-style:normal;--awb-content-font-weight:300;\"><div class=\"panel-group fusion-toggle-icon-boxed\" id=\"accordion-1294-3\"><div class=\"fusion-panel panel-default panel-c0af3bc97ab664842 fusion-toggle-has-divider\" style=\"--awb-title-color:#636468;--awb-content-color:#858a9f;\"><div class=\"panel-heading\"><h4 class=\"panel-title toggle\" id=\"toggle_c0af3bc97ab664842\"><a aria-expanded=\"false\" aria-controls=\"c0af3bc97ab664842\" role=\"button\" data-toggle=\"collapse\" data-parent=\"#accordion-1294-3\" data-target=\"#c0af3bc97ab664842\" href=\"#c0af3bc97ab664842\"><span class=\"fusion-toggle-icon-wrapper\" aria-hidden=\"true\"><i class=\"fa-fusion-box active-icon awb-icon-minus\" aria-hidden=\"true\"><\/i><i class=\"fa-fusion-box inactive-icon awb-icon-plus\" aria-hidden=\"true\"><\/i><\/span><span class=\"fusion-toggle-heading\">TraCon: A Novel Dataset for Real-Time Traffic Cones Detection Using Deep Learning<\/span><\/a><\/h4><\/div><div id=\"c0af3bc97ab664842\" class=\"panel-collapse collapse \" aria-labelledby=\"toggle_c0af3bc97ab664842\"><div class=\"panel-body toggle-content fusion-clearfix\">\n<p>Katsamenis I., Karolou E.E., Davradou A., Protopapadakis E., Doulamis A., Doulamis N., Kalogeras D. (2022).\u00a0 TraCon: A Novel Dataset for Real-Time Traffic Cones Detection Using Deep Learning. <em>Proceedings of the 2nd International Conference (NiDS 2022)<span class=\"epub-section__title\">, <\/span><\/em><span class=\"epub-section__title\">Athens, Greece.<\/span><\/p>\n<p><a href=\"https:\/\/doi.org\/10.1007\/978-3-031-17601-2_37\">https:\/\/doi.org\/10.1007\/978-3-031-17601-2_37<\/a><\/p>\n<p><strong><em>Abstract:<\/em><\/strong> Substantial progress has been made in the field of object detection in road scenes. However, it is mainly focused on vehicles and pedestrians. To this end, we investigate traffic cone detection, an object category crucial for road effects and maintenance. In this work, the YOLOv5 algorithm is employed, in order to find a solution for the efficient and fast detection of traffic cones. The YOLOv5 can achieve a high detection accuracy with the score of IoU up to 91.31%. The proposed method is been applied to an RGB roadwork image dataset, collected from various sources.<\/p>\n<div class=\"author-data\"><\/div>\n<\/div><\/div><\/div><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-bottom:30px;width:100%;\"><div class=\"fusion-separator-border sep-single sep-solid\" style=\"--awb-height:20px;--awb-amount:20px;--awb-sep-color:#00b8f2;border-color:#00b8f2;border-top-width:2px;\"><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-top:30px;width:100%;\"><\/div><div class=\"accordian fusion-accordian\" style=\"--awb-border-size:1px;--awb-icon-size:16px;--awb-content-font-size:16px;--awb-icon-alignment:left;--awb-hover-color:#f9f9fb;--awb-border-color:#e2e2e2;--awb-background-color:#ffffff;--awb-divider-color:#e0dede;--awb-divider-hover-color:#e0dede;--awb-icon-color:#ffffff;--awb-title-color:#636468;--awb-content-color:#858a9f;--awb-icon-box-color:#212934;--awb-toggle-hover-accent-color:#00b8f2;--awb-title-font-family:&quot;Roboto Slab&quot;;--awb-title-font-weight:700;--awb-title-font-style:normal;--awb-title-font-size:24px;--awb-title-line-height:1.4;--awb-content-font-family:&quot;Roboto Slab&quot;;--awb-content-font-style:normal;--awb-content-font-weight:300;\"><div class=\"panel-group fusion-toggle-icon-boxed\" id=\"accordion-1294-4\"><div class=\"fusion-panel panel-default panel-56bd74a44f0422980 fusion-toggle-has-divider\" style=\"--awb-title-color:#636468;--awb-content-color:#858a9f;\"><div class=\"panel-heading\"><h4 class=\"panel-title toggle\" id=\"toggle_56bd74a44f0422980\"><a aria-expanded=\"false\" aria-controls=\"56bd74a44f0422980\" role=\"button\" data-toggle=\"collapse\" data-parent=\"#accordion-1294-4\" data-target=\"#56bd74a44f0422980\" href=\"#56bd74a44f0422980\"><span class=\"fusion-toggle-icon-wrapper\" aria-hidden=\"true\"><i class=\"fa-fusion-box active-icon awb-icon-minus\" aria-hidden=\"true\"><\/i><i class=\"fa-fusion-box inactive-icon awb-icon-plus\" aria-hidden=\"true\"><\/i><\/span><span class=\"fusion-toggle-heading\">Evaluating YOLO Transferability Limitation for Road Infrastructures Monitoring<\/span><\/a><\/h4><\/div><div id=\"56bd74a44f0422980\" class=\"panel-collapse collapse \" aria-labelledby=\"toggle_56bd74a44f0422980\"><div class=\"panel-body toggle-content fusion-clearfix\">\n<p>Katsamenis I., Davradou A., Karolou E.E., Protopapadakis E., Doulamis A., Doulamis N., Kalogeras D. (2022).\u00a0 Evaluating YOLO Transferability Limitation for Road Infrastructures Monitoring. <em>Proceedings of the 2nd International Conference (NiDS 2022)<span class=\"epub-section__title\">, <\/span><\/em><span class=\"epub-section__title\">Athens, Greece.<\/span><\/p>\n<p><a href=\"https:\/\/doi.org\/10.1007\/978-3-031-17601-2_34\">https:\/\/doi.org\/10.1007\/978-3-031-17601-2_34<\/a><\/p>\n<p><strong><em>Abstract:<\/em><\/strong> Road infrastructure is positively associated with a country\u2019s socio-economic growth and therefore road maintenance is of great importance for every country. One of the critical maintenance steps is road damage detection, which typically requires large amounts of time and high costs. In this work, the YOLOv5 two-stage detector is leveraged, in order to create an image-based solution for road defect detection and classification. The damages are classified into three main categories: cracks, potholes, and blurred markings. The YOLOv5 can achieve a relatively high detection accuracy with a score of Intersection over Union (IoU) up to 88.89% and classification accuracy with an F1 score up to 80.72%. The precision and recall scores are 84.26% and 78.38%, respectively.<\/p>\n<div class=\"author-data\"><\/div>\n<\/div><\/div><\/div><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-bottom:30px;width:100%;\"><div class=\"fusion-separator-border sep-single sep-solid\" style=\"--awb-height:20px;--awb-amount:20px;--awb-sep-color:#00b8f2;border-color:#00b8f2;border-top-width:2px;\"><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-top:30px;width:100%;\"><\/div><div class=\"accordian fusion-accordian\" style=\"--awb-border-size:1px;--awb-icon-size:16px;--awb-content-font-size:16px;--awb-icon-alignment:left;--awb-hover-color:#f9f9fb;--awb-border-color:#e2e2e2;--awb-background-color:#ffffff;--awb-divider-color:#e0dede;--awb-divider-hover-color:#e0dede;--awb-icon-color:#ffffff;--awb-title-color:#636468;--awb-content-color:#858a9f;--awb-icon-box-color:#212934;--awb-toggle-hover-accent-color:#00b8f2;--awb-title-font-family:&quot;Roboto Slab&quot;;--awb-title-font-weight:700;--awb-title-font-style:normal;--awb-title-font-size:24px;--awb-title-line-height:1.4;--awb-content-font-family:&quot;Roboto Slab&quot;;--awb-content-font-style:normal;--awb-content-font-weight:300;\"><div class=\"panel-group fusion-toggle-icon-boxed\" id=\"accordion-1294-5\"><div class=\"fusion-panel panel-default panel-fb065ea87e193681d fusion-toggle-has-divider\" style=\"--awb-title-color:#636468;--awb-content-color:#858a9f;\"><div class=\"panel-heading\"><h4 class=\"panel-title toggle\" id=\"toggle_fb065ea87e193681d\"><a aria-expanded=\"false\" aria-controls=\"fb065ea87e193681d\" role=\"button\" data-toggle=\"collapse\" data-parent=\"#accordion-1294-5\" data-target=\"#fb065ea87e193681d\" href=\"#fb065ea87e193681d\"><span class=\"fusion-toggle-icon-wrapper\" aria-hidden=\"true\"><i class=\"fa-fusion-box active-icon awb-icon-minus\" aria-hidden=\"true\"><\/i><i class=\"fa-fusion-box inactive-icon awb-icon-plus\" aria-hidden=\"true\"><\/i><\/span><span class=\"fusion-toggle-heading\">Collaborative Robot Mapping using Spectral Graph Analysis<\/span><\/a><\/h4><\/div><div id=\"fb065ea87e193681d\" class=\"panel-collapse collapse \" aria-labelledby=\"toggle_fb065ea87e193681d\"><div class=\"panel-body toggle-content fusion-clearfix\">\n<p>Burnreiter L., Khattak S., Ott L., Siegwart R., Hutter M., Cadena C.. (2022).\u00a0 Collaborative Robot Mapping using Spectral Graph Analysis. <em>Proceedings of the 2022 International Conference on Robotics and Automation (ICRA)<span class=\"epub-section__title\">, <\/span><\/em><span class=\"epub-section__title\">Philadelphia, USA.<\/span><\/p>\n<p><a href=\"https:\/\/ieeexplore.ieee.org\/document\/9812102\">https:\/\/ieeexplore.ieee.org\/document\/9812102<\/a><\/p>\n<p><strong><em>Abstract:<\/em><\/strong> In this paper, we deal with the problem of creating globally consistent pose graphs in a centralized multi-robot SLAM framework. For each robot to act autonomously, individual onboard pose estimates and maps are maintained, which are then communicated to a central server to build an optimized global map. However, inconsistencies between onboard and server estimates can occur due to onboard odometry drift or failure. Furthermore, robots do not benefit from the collaborative map if the server provides no feedback in a computationally tractable and bandwidth-efficient manner. Motivated by this challenge, this paper proposes a novel collaborative mapping framework to enable accurate global mapping among robots and server. In particular, structural differences between robot and server graphs are exploited at different spatial scales using graph spectral analysis to generate necessary constraints for the individual robot pose graphs. The proposed approach is thoroughly analyzed and validated using several real-world multi-robot field deployments where we show improvements of the onboard system up to 90%.<\/p>\n<div class=\"author-data\"><\/div>\n<\/div><\/div><\/div><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-bottom:30px;width:100%;\"><div class=\"fusion-separator-border sep-single sep-solid\" style=\"--awb-height:20px;--awb-amount:20px;--awb-sep-color:#00b8f2;border-color:#00b8f2;border-top-width:2px;\"><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-top:30px;width:100%;\"><\/div><div class=\"accordian fusion-accordian\" style=\"--awb-border-size:1px;--awb-icon-size:16px;--awb-content-font-size:16px;--awb-icon-alignment:left;--awb-hover-color:#f9f9fb;--awb-border-color:#e2e2e2;--awb-background-color:#ffffff;--awb-divider-color:#e0dede;--awb-divider-hover-color:#e0dede;--awb-icon-color:#ffffff;--awb-title-color:#636468;--awb-content-color:#858a9f;--awb-icon-box-color:#212934;--awb-toggle-hover-accent-color:#00b8f2;--awb-title-font-family:&quot;Roboto Slab&quot;;--awb-title-font-weight:700;--awb-title-font-style:normal;--awb-title-font-size:24px;--awb-title-line-height:1.4;--awb-content-font-family:&quot;Roboto Slab&quot;;--awb-content-font-style:normal;--awb-content-font-weight:300;\"><div class=\"panel-group fusion-toggle-icon-boxed\" id=\"accordion-1294-6\"><div class=\"fusion-panel panel-default panel-57b209ed49e400cc8 fusion-toggle-has-divider\" style=\"--awb-title-color:#636468;--awb-content-color:#858a9f;\"><div class=\"panel-heading\"><h4 class=\"panel-title toggle\" id=\"toggle_57b209ed49e400cc8\"><a aria-expanded=\"false\" aria-controls=\"57b209ed49e400cc8\" role=\"button\" data-toggle=\"collapse\" data-parent=\"#accordion-1294-6\" data-target=\"#57b209ed49e400cc8\" href=\"#57b209ed49e400cc8\"><span class=\"fusion-toggle-icon-wrapper\" aria-hidden=\"true\"><i class=\"fa-fusion-box active-icon awb-icon-minus\" aria-hidden=\"true\"><\/i><i class=\"fa-fusion-box inactive-icon awb-icon-plus\" aria-hidden=\"true\"><\/i><\/span><span class=\"fusion-toggle-heading\">Automatic extension of a symbolic mobile manipulation skill set<\/span><\/a><\/h4><\/div><div id=\"57b209ed49e400cc8\" class=\"panel-collapse collapse \" aria-labelledby=\"toggle_57b209ed49e400cc8\"><div class=\"panel-body toggle-content fusion-clearfix\">\n<p>Foster J., Ott L., Nieto J., Lawrance N., Siegwart R. (2023).\u00a0 Automatic Extension of a Symbolic Mobile Manipulation Skill Set. <em>Robotics and Autonomous Systems<\/em><span class=\"epub-section__title\">.<\/span><\/p>\n<p><a href=\"https:\/\/doi.org\/10.1016\/j.robot.2023.104428\">https:\/\/doi.org\/10.1016\/j.robot.2023.104428<\/a><\/p>\n<p><strong><em>Abstract:<\/em><\/strong> Symbolic planning can provide an intuitive interface for non-expert users to operate autonomous robots by abstracting away much of the low-level programming. However, symbolic planners assume that the initially provided abstract domain and problem descriptions are closed and complete. This means that they are fundamentally unable to adapt to changes in the environment or tasks that are not captured by the initial description. We propose a method that allows an agent to automatically extend the abstract description of its skill set upon encountering such a situation. We introduce strategies for generalizing from previous experience, completing sequences of key actions and discovering preconditions to ensure computational efficiency. The resulting system is evaluated on a symbolic planning benchmark task and on object rearrangement tasks in simulation. Compared to a Monte Carlo Tree Search baseline, our strategies for efficient search have on average a 25% higher success rate at a 67% faster runtime.<\/p>\n<div class=\"author-data\"><\/div>\n<\/div><\/div><\/div><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-bottom:30px;width:100%;\"><div class=\"fusion-separator-border sep-single sep-solid\" style=\"--awb-height:20px;--awb-amount:20px;--awb-sep-color:#00b8f2;border-color:#00b8f2;border-top-width:2px;\"><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-top:30px;width:100%;\"><\/div><div class=\"accordian fusion-accordian\" style=\"--awb-border-size:1px;--awb-icon-size:16px;--awb-content-font-size:16px;--awb-icon-alignment:left;--awb-hover-color:#f9f9fb;--awb-border-color:#e2e2e2;--awb-background-color:#ffffff;--awb-divider-color:#e0dede;--awb-divider-hover-color:#e0dede;--awb-icon-color:#ffffff;--awb-title-color:#636468;--awb-content-color:#858a9f;--awb-icon-box-color:#212934;--awb-toggle-hover-accent-color:#00b8f2;--awb-title-font-family:&quot;Roboto Slab&quot;;--awb-title-font-weight:700;--awb-title-font-style:normal;--awb-title-font-size:24px;--awb-title-line-height:1.4;--awb-content-font-family:&quot;Roboto Slab&quot;;--awb-content-font-style:normal;--awb-content-font-weight:300;\"><div class=\"panel-group fusion-toggle-icon-boxed\" id=\"accordion-1294-7\"><div class=\"fusion-panel panel-default panel-2ccd74eb31ccecedb fusion-toggle-has-divider\" style=\"--awb-title-color:#636468;--awb-content-color:#858a9f;\"><div class=\"panel-heading\"><h4 class=\"panel-title toggle\" id=\"toggle_2ccd74eb31ccecedb\"><a aria-expanded=\"false\" aria-controls=\"2ccd74eb31ccecedb\" role=\"button\" data-toggle=\"collapse\" data-parent=\"#accordion-1294-7\" data-target=\"#2ccd74eb31ccecedb\" href=\"#2ccd74eb31ccecedb\"><span class=\"fusion-toggle-icon-wrapper\" aria-hidden=\"true\"><i class=\"fa-fusion-box active-icon awb-icon-minus\" aria-hidden=\"true\"><\/i><i class=\"fa-fusion-box inactive-icon awb-icon-plus\" aria-hidden=\"true\"><\/i><\/span><span class=\"fusion-toggle-heading\">Deep transformer networks for precise pothole segmentation tasks<\/span><\/a><\/h4><\/div><div id=\"2ccd74eb31ccecedb\" class=\"panel-collapse collapse \" aria-labelledby=\"toggle_2ccd74eb31ccecedb\"><div class=\"panel-body toggle-content fusion-clearfix\">\n<p>Katsamenis I., Sakelliou A., Bakalos N., Protopapadakis E., Klaridopoulos C., Grangakis N., Bimpas M., Kalogeras D.\u00a0 (2023).\u00a0 Deep transformer networks for precise pothole segmentation tasks. <em>Proceedings of the 16th International Conference on PErvasive Technologies Related to Assistive Environments<\/em><span class=\"epub-section__title\">.<\/span><\/p>\n<p><a href=\"https:\/\/doi.org\/10.1145\/3594806.3596560\">https:\/\/doi.org\/10.1145\/3594806.3596560<\/a><\/p>\n<p><strong><em>Abstract:<\/em><\/strong> Potholes on the road surface are a significant safety hazard and can cause severe damage to vehicles. Identifying and repairing potholes is a challenging task that requires efficient and accurate methods. In recent years, deep learning models, such as U-Nets and transformers, have been used for image segmentation tasks with promising results. This paper proposes a transformer-based model and in particular the SegFormer framework, for pothole segmentation using high-resolution images captured from a road inspection vehicle. The proposed network outperformed the traditional U-Net model that demonstrates state-of-the-art performance in various segmentation tasks, achieving an average F1-score close to 80%. The results show that the proposed method can effectively identify and localize potholes, providing a useful auxiliary tool for road maintenance and safety.<\/p>\n<div class=\"author-data\"><\/div>\n<\/div><\/div><\/div><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-bottom:30px;width:100%;\"><div class=\"fusion-separator-border sep-single sep-solid\" style=\"--awb-height:20px;--awb-amount:20px;--awb-sep-color:#00b8f2;border-color:#00b8f2;border-top-width:2px;\"><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-top:30px;width:100%;\"><\/div><div class=\"accordian fusion-accordian\" style=\"--awb-border-size:1px;--awb-icon-size:16px;--awb-content-font-size:16px;--awb-icon-alignment:left;--awb-hover-color:#f9f9fb;--awb-border-color:#e2e2e2;--awb-background-color:#ffffff;--awb-divider-color:#e0dede;--awb-divider-hover-color:#e0dede;--awb-icon-color:#ffffff;--awb-title-color:#636468;--awb-content-color:#858a9f;--awb-icon-box-color:#212934;--awb-toggle-hover-accent-color:#00b8f2;--awb-title-font-family:&quot;Roboto Slab&quot;;--awb-title-font-weight:700;--awb-title-font-style:normal;--awb-title-font-size:24px;--awb-title-line-height:1.4;--awb-content-font-family:&quot;Roboto Slab&quot;;--awb-content-font-style:normal;--awb-content-font-weight:300;\"><div class=\"panel-group fusion-toggle-icon-boxed\" id=\"accordion-1294-8\"><div class=\"fusion-panel panel-default panel-ffc808caae27bcb72 fusion-toggle-has-divider\" style=\"--awb-title-color:#636468;--awb-content-color:#858a9f;\"><div class=\"panel-heading\"><h4 class=\"panel-title toggle\" id=\"toggle_ffc808caae27bcb72\"><a aria-expanded=\"false\" aria-controls=\"ffc808caae27bcb72\" role=\"button\" data-toggle=\"collapse\" data-parent=\"#accordion-1294-8\" data-target=\"#ffc808caae27bcb72\" href=\"#ffc808caae27bcb72\"><span class=\"fusion-toggle-icon-wrapper\" aria-hidden=\"true\"><i class=\"fa-fusion-box active-icon awb-icon-minus\" aria-hidden=\"true\"><\/i><i class=\"fa-fusion-box inactive-icon awb-icon-plus\" aria-hidden=\"true\"><\/i><\/span><span class=\"fusion-toggle-heading\">A Few-Shot Attention Recurrent Residual U-Net for Crack Segmentation<\/span><\/a><\/h4><\/div><div id=\"ffc808caae27bcb72\" class=\"panel-collapse collapse \" aria-labelledby=\"toggle_ffc808caae27bcb72\"><div class=\"panel-body toggle-content fusion-clearfix\">\n<p>Katsamenis I., Protopapadakis E., Bakalos N., Varvarigos A., Doulamis A., Doulamis N., Voulodimos A. (2023).\u00a0 A Few-Shot Attention Recurrent Residual U-Net for Crack Segmentation. In: Bebis, G.,\u00a0<i>et al.<\/i> Advances in Visual Computing. ISVC 2023. Lecture Notes in Computer Science, vol 14361<span class=\"epub-section__title\">.<\/span><\/p>\n<p><a href=\"https:\/\/doi.org\/10.1007\/978-3-031-47969-4_16\">https:\/\/doi.org\/10.1007\/978-3-031-47969-4_16<\/a><\/p>\n<p><strong><em>Abstract:<\/em><\/strong> Recent studies indicate that deep learning plays a crucial role in the automated visual inspection of road infrastructures. However, current learning schemes are static, implying no dynamic adaptation to users\u2019 feedback. To address this drawback, we present a few-shot learning paradigm for the automated segmentation of road cracks, which is based on a U-Net architecture with recurrent residual and attention modules (R2AU-Net). The retraining strategy dynamically fine-tunes the weights of the U-Net as a few new rectified samples are being fed into the classifier. Extensive experiments show that the proposed few-shot R2AU-Net framework outperforms other state-of-the-art networks in terms of Dice and IoU metrics, on a new dataset, named CrackMap, which is made publicly available at\u00a0<a href=\"https:\/\/github.com\/ikatsamenis\/CrackMap\">https:\/\/github.com\/ikatsamenis\/CrackMap<\/a>.<\/p>\n<div class=\"author-data\"><\/div>\n<\/div><\/div><\/div><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-bottom:30px;width:100%;\"><div class=\"fusion-separator-border sep-single sep-solid\" style=\"--awb-height:20px;--awb-amount:20px;--awb-sep-color:#00b8f2;border-color:#00b8f2;border-top-width:2px;\"><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-top:30px;width:100%;\"><\/div><div class=\"accordian fusion-accordian\" style=\"--awb-border-size:1px;--awb-icon-size:16px;--awb-content-font-size:16px;--awb-icon-alignment:left;--awb-hover-color:#f9f9fb;--awb-border-color:#e2e2e2;--awb-background-color:#ffffff;--awb-divider-color:#e0dede;--awb-divider-hover-color:#e0dede;--awb-icon-color:#ffffff;--awb-title-color:#636468;--awb-content-color:#858a9f;--awb-icon-box-color:#212934;--awb-toggle-hover-accent-color:#00b8f2;--awb-title-font-family:&quot;Roboto Slab&quot;;--awb-title-font-weight:700;--awb-title-font-style:normal;--awb-title-font-size:24px;--awb-title-line-height:1.4;--awb-content-font-family:&quot;Roboto Slab&quot;;--awb-content-font-style:normal;--awb-content-font-weight:300;\"><div class=\"panel-group fusion-toggle-icon-boxed\" id=\"accordion-1294-9\"><div class=\"fusion-panel panel-default panel-d56156ba84d1dbe6b fusion-toggle-has-divider\" style=\"--awb-title-color:#636468;--awb-content-color:#858a9f;\"><div class=\"panel-heading\"><h4 class=\"panel-title toggle\" id=\"toggle_d56156ba84d1dbe6b\"><a aria-expanded=\"false\" aria-controls=\"d56156ba84d1dbe6b\" role=\"button\" data-toggle=\"collapse\" data-parent=\"#accordion-1294-9\" data-target=\"#d56156ba84d1dbe6b\" href=\"#d56156ba84d1dbe6b\"><span class=\"fusion-toggle-icon-wrapper\" aria-hidden=\"true\"><i class=\"fa-fusion-box active-icon awb-icon-minus\" aria-hidden=\"true\"><\/i><i class=\"fa-fusion-box inactive-icon awb-icon-plus\" aria-hidden=\"true\"><\/i><\/span><span class=\"fusion-toggle-heading\">Robotics-enabled roadwork maintenance and upgrading<\/span><\/a><\/h4><\/div><div id=\"d56156ba84d1dbe6b\" class=\"panel-collapse collapse \" aria-labelledby=\"toggle_d56156ba84d1dbe6b\"><div class=\"panel-body toggle-content fusion-clearfix\">\n<p>Bakalos N., et al. (2024).\u00a0 Robotics-enabled roadwork maintenance and upgrading. In: <em>Robotics and Automation Solutions for Inspection and Maintenance in Critical Infrastructures. <\/em>Now Publishers<\/p>\n<p><a href=\"https:\/\/doi.org\/10.3929\/ethz-b-000666060\">https:\/\/doi.org\/10.3929\/ethz-b-000666060<\/a><\/p>\n<p><strong><em>Abstract:<\/em><\/strong> Closing, this chapter presents recent research efforts and results on a Robotics-enabled Roadwork Maintenance and Upgrading approach and tools. This includes the implementation of a road infrastructure blueprint developed including advanced engineering solutions for interconnecting and facilitating<span class=\"hidden-text\"> seamless transitions between different transportation modes in the event of severe disruptions affecting one mode of transportation.<\/span><\/p>\n<div class=\"author-data\"><\/div>\n<\/div><\/div><\/div><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-bottom:30px;width:100%;\"><div class=\"fusion-separator-border sep-single sep-solid\" style=\"--awb-height:20px;--awb-amount:20px;--awb-sep-color:#00b8f2;border-color:#00b8f2;border-top-width:2px;\"><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-top:30px;width:100%;\"><\/div><div class=\"accordian fusion-accordian\" style=\"--awb-border-size:1px;--awb-icon-size:16px;--awb-content-font-size:16px;--awb-icon-alignment:left;--awb-hover-color:#f9f9fb;--awb-border-color:#e2e2e2;--awb-background-color:#ffffff;--awb-divider-color:#e0dede;--awb-divider-hover-color:#e0dede;--awb-icon-color:#ffffff;--awb-title-color:#636468;--awb-content-color:#858a9f;--awb-icon-box-color:#212934;--awb-toggle-hover-accent-color:#00b8f2;--awb-title-font-family:&quot;Roboto Slab&quot;;--awb-title-font-weight:700;--awb-title-font-style:normal;--awb-title-font-size:24px;--awb-title-line-height:1.4;--awb-content-font-family:&quot;Roboto Slab&quot;;--awb-content-font-style:normal;--awb-content-font-weight:300;\"><div class=\"panel-group fusion-toggle-icon-boxed\" id=\"accordion-1294-10\"><div class=\"fusion-panel panel-default panel-cda6cbdcd92d79780 fusion-toggle-has-divider\" style=\"--awb-title-color:#636468;--awb-content-color:#858a9f;\"><div class=\"panel-heading\"><h4 class=\"panel-title toggle\" id=\"toggle_cda6cbdcd92d79780\"><a aria-expanded=\"false\" aria-controls=\"cda6cbdcd92d79780\" role=\"button\" data-toggle=\"collapse\" data-parent=\"#accordion-1294-10\" data-target=\"#cda6cbdcd92d79780\" href=\"#cda6cbdcd92d79780\"><span class=\"fusion-toggle-icon-wrapper\" aria-hidden=\"true\"><i class=\"fa-fusion-box active-icon awb-icon-minus\" aria-hidden=\"true\"><\/i><i class=\"fa-fusion-box inactive-icon awb-icon-plus\" aria-hidden=\"true\"><\/i><\/span><span class=\"fusion-toggle-heading\">H2020 Project HERON<\/span><\/a><\/h4><\/div><div id=\"cda6cbdcd92d79780\" class=\"panel-collapse collapse \" aria-labelledby=\"toggle_cda6cbdcd92d79780\"><div class=\"panel-body toggle-content fusion-clearfix\">\n<p>Andreoli G., Schmidt F., Katsamenis I., Bakalos N., Oleynikova H., et al.. (2024) H2020 Project HERON. <i>10th conference Transport Research Arena (TRA 2024)<\/i>, Dublin, Ireland.<\/p>\n<p><a href=\"https:\/\/univ-eiffel.hal.science\/hal-04575144\">https:\/\/univ-eiffel.hal.science\/hal-04575144<\/a><\/p>\n<p><strong><em>Abstract:<\/em><\/strong> SCOPE : HERON is among the projects funded by the EU under the H2020 program for road infrastructure maintenance. Within HERON&#8217;s scope is the implementation of a road infrastructure plan, CONTEXT : The Forever Open Road (FOR) concept has been developed, with its national declinations (R5G in France for example) by the association of European Road Research Laboratories (FEHRL). HERON answers the three concepts of FOR, namely the resilient, the automated and the adaptable road (www.foreveropenroad.eu\/). As outlined by FOR, HERON integrates existing technical components, including off-the-shelf technologies and outcomes from previous or ongoing research projects. GOAL : The goal of HERON is to develop an integrated automated system capable of conducting various road maintenance and improvement tasks, such as crack sealing, pothole patching, asphalt rejuvenation, autonomous replacement of Removable Urban Pavement (RUP) elements, and road marking\/painting. Thus, HERON is tasked with supporting both pre- and post-intervention phases, which involve automated and controlled visual inspections, as well as the placement and removal of traffic cones.<\/p>\n<div class=\"author-data\"><\/div>\n<\/div><\/div><\/div><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-bottom:30px;width:100%;\"><div class=\"fusion-separator-border sep-single sep-solid\" style=\"--awb-height:20px;--awb-amount:20px;--awb-sep-color:#00b8f2;border-color:#00b8f2;border-top-width:2px;\"><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-top:30px;width:100%;\"><\/div><div class=\"accordian fusion-accordian\" style=\"--awb-border-size:1px;--awb-icon-size:16px;--awb-content-font-size:16px;--awb-icon-alignment:left;--awb-hover-color:#f9f9fb;--awb-border-color:#e2e2e2;--awb-background-color:#ffffff;--awb-divider-color:#e0dede;--awb-divider-hover-color:#e0dede;--awb-icon-color:#ffffff;--awb-title-color:#636468;--awb-content-color:#858a9f;--awb-icon-box-color:#212934;--awb-toggle-hover-accent-color:#00b8f2;--awb-title-font-family:&quot;Roboto Slab&quot;;--awb-title-font-weight:700;--awb-title-font-style:normal;--awb-title-font-size:24px;--awb-title-line-height:1.4;--awb-content-font-family:&quot;Roboto Slab&quot;;--awb-content-font-style:normal;--awb-content-font-weight:300;\"><div class=\"panel-group fusion-toggle-icon-boxed\" id=\"accordion-1294-11\"><div class=\"fusion-panel panel-default panel-d32838617b58d217a fusion-toggle-has-divider\" style=\"--awb-title-color:#636468;--awb-content-color:#858a9f;\"><div class=\"panel-heading\"><h4 class=\"panel-title toggle\" id=\"toggle_d32838617b58d217a\"><a aria-expanded=\"false\" aria-controls=\"d32838617b58d217a\" role=\"button\" data-toggle=\"collapse\" data-parent=\"#accordion-1294-11\" data-target=\"#d32838617b58d217a\" href=\"#d32838617b58d217a\"><span class=\"fusion-toggle-icon-wrapper\" aria-hidden=\"true\"><i class=\"fa-fusion-box active-icon awb-icon-minus\" aria-hidden=\"true\"><\/i><i class=\"fa-fusion-box inactive-icon awb-icon-plus\" aria-hidden=\"true\"><\/i><\/span><span class=\"fusion-toggle-heading\">UAV-based Localization of Removable Urban Pavement Elements Through Deep Object Detection Methods<\/span><\/a><\/h4><\/div><div id=\"d32838617b58d217a\" class=\"panel-collapse collapse \" aria-labelledby=\"toggle_d32838617b58d217a\"><div class=\"panel-body toggle-content fusion-clearfix\">\n<p>Katsamenis I., Andreoli G., Skamantzari M.,\u00a0 Bakalos N., Schmidt F., et al.. (2024) UAV-based Localization of Removable Urban Pavement Elements Through Deep Object Detection Methods.\u00a0<em>Proceedings of the 17th International Conference on PErvasive Technologies Related to Assistive Environments.<\/em><\/p>\n<p><a href=\"https:\/\/doi.org\/10.1145\/3652037.3663934\">https:\/\/doi.org\/10.1145\/3652037.3663934<\/a><\/p>\n<p><strong><em>Abstract:<\/em><\/strong> We introduce a deep learning framework leveraging YOLOv8 architecture to automate the localization of Removable Urban Pavements (RUPs) using UAV imagery. The core idea behind RUPs is to provide pavements that can be quickly opened and closed using lightweight on-site equipment. This approach aims to efficiently restore the street\u2019s original appearance and functionalities within a short timeframe, typically just a few hours. Our study explores the feasibility of autonomously localizing RUP elements, paving the way for robotic-driven replacement with prefabricated, fully functional components. Moreover, the integration of UAV data enhances safety and accessibility to challenging areas. Experimental results underscore the efficacy of our approach in achieving precise localization and thereby enabling proactive maintenance efforts.<\/p>\n<div class=\"author-data\"><\/div>\n<\/div><\/div><\/div><\/div><\/div><div class=\"fusion-separator fusion-full-width-sep\" style=\"align-self: center;margin-left: auto;margin-right: auto;margin-bottom:30px;width:100%;\"><div class=\"fusion-separator-border sep-single sep-solid\" style=\"--awb-height:20px;--awb-amount:20px;--awb-sep-color:#00b8f2;border-color:#00b8f2;border-top-width:2px;\"><\/div><\/div><\/div><\/div><\/div><\/div><\/p>\n","protected":false},"excerpt":{"rendered":"","protected":false},"author":3,"featured_media":0,"parent":0,"menu_order":0,"comment_status":"closed","ping_status":"closed","template":"100-width.php","meta":{"footnotes":""},"class_list":["post-1294","page","type-page","status-publish","hentry"],"_links":{"self":[{"href":"https:\/\/www.heron-h2020.eu\/index.php?rest_route=\/wp\/v2\/pages\/1294","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.heron-h2020.eu\/index.php?rest_route=\/wp\/v2\/pages"}],"about":[{"href":"https:\/\/www.heron-h2020.eu\/index.php?rest_route=\/wp\/v2\/types\/page"}],"author":[{"embeddable":true,"href":"https:\/\/www.heron-h2020.eu\/index.php?rest_route=\/wp\/v2\/users\/3"}],"replies":[{"embeddable":true,"href":"https:\/\/www.heron-h2020.eu\/index.php?rest_route=%2Fwp%2Fv2%2Fcomments&post=1294"}],"version-history":[{"count":17,"href":"https:\/\/www.heron-h2020.eu\/index.php?rest_route=\/wp\/v2\/pages\/1294\/revisions"}],"predecessor-version":[{"id":1527,"href":"https:\/\/www.heron-h2020.eu\/index.php?rest_route=\/wp\/v2\/pages\/1294\/revisions\/1527"}],"wp:attachment":[{"href":"https:\/\/www.heron-h2020.eu\/index.php?rest_route=%2Fwp%2Fv2%2Fmedia&parent=1294"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}