<?xml version='1.0' encoding='UTF-8'?><?xml-stylesheet href='static/style.xsl' type='text/xsl'?><OAI-PMH xmlns="http://www.openarchives.org/OAI/2.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/ http://www.openarchives.org/OAI/2.0/OAI-PMH.xsd"><responseDate>2026-04-07T11:25:07Z</responseDate><request verb="GetRecord" identifier="oai:ebiltegia.mondragon.edu:20.500.11984/6385" metadataPrefix="rdf">https://ebiltegia.mondragon.edu/oai/request</request><GetRecord><record><header><identifier>oai:ebiltegia.mondragon.edu:20.500.11984/6385</identifier><datestamp>2024-05-24T11:21:43Z</datestamp><setSpec>com_20.500.11984_1143</setSpec><setSpec>col_20.500.11984_1148</setSpec></header><metadata><rdf:RDF xmlns:rdf="http://www.openarchives.org/OAI/2.0/rdf/" xmlns:ow="http://www.ontoweb.org/ontology/1#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:ds="http://dspace.org/ds/elements/1.1/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:doc="http://www.lyncode.com/xoai" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/rdf/ http://www.openarchives.org/OAI/2.0/rdf.xsd">
   <ow:Publication rdf:about="oai:ebiltegia.mondragon.edu:20.500.11984/6385">
      <dc:title>Multi-modal person detection and tracking from a mobile robot in a crowded environment</dc:title>
      <dc:creator>Zuriarrain Arcarazo, Iker</dc:creator>
      <dc:contributor>Mekonnen, Alhayat Ali</dc:contributor>
      <dc:contributor>Lerasle, Frederic</dc:contributor>
      <dc:subject>Multi-person tracking</dc:subject>
      <dc:subject>Multi-modal data fusion</dc:subject>
      <dc:subject>MCMC particle filtering</dc:subject>
      <dc:subject>Interactive robotics</dc:subject>
      <dc:description>This paper addresses multi-modal person detection and tracking using a 2D SICK Laser Range Finder and a visual camera from a mobile robot in a crowded and cluttered environment. A sequential approach in which the laser data is segmented to filter human leg like structures to generate person hypothesis which are further refined by a state of the art parts based visual person detector for final detection, is proposed. Based on this detection routine, a Monte Carlo Markov Chain (MCMC) particle filtering strategy is utilized to track multiple persons around the robot. Integration of the implemented multi-modal person detector and tracker in our robotic platform and associated experiments are presented. Results obtained from all tests carried out have been clearly reported proving the multi-modal approach outperforms its single sensor counterparts taking detection, subsequent use, computation time, and precision into account. The work presented here will be used to define navigational con trol laws for passer-by avoidance during a service robot’s person following activity.</dc:description>
      <dc:date>2024-04-29T13:53:19Z</dc:date>
      <dc:date>2024-04-29T13:53:19Z</dc:date>
      <dc:date>2011</dc:date>
      <dc:type>http://purl.org/coar/resource_type/c_c94f</dc:type>
      <dc:identifier>9789898425478</dc:identifier>
      <dc:identifier>https://katalogoa.mondragon.edu/janium-bin/janium_login_opac.pl?find&amp;ficha_no=154734</dc:identifier>
      <dc:identifier>https://hdl.handle.net/20.500.11984/6385</dc:identifier>
      <dc:language>eng</dc:language>
      <dc:rights>Attribution-NonCommercial-NoDerivatives 4.0 International</dc:rights>
      <dc:rights>http://creativecommons.org/licenses/by-nc-nd/4.0/</dc:rights>
      <dc:rights>© 2011 SCITEPRESS</dc:rights>
      <dc:publisher>Scitepress</dc:publisher>
   </ow:Publication>
</rdf:RDF></metadata></record></GetRecord></OAI-PMH>