@inproceedings{Schreiter1830088, author = {Schreiter, Tim and Morillo-Mendez, Lucas and Chadalavada, Ravi T. and Rudenko, Andrey and Billing, Erik and Magnusson, Martin and Arras, Kai O. and Lilienthal, Achim J.}, booktitle = {2023 32nd IEEE International Conference on Robot and Human Interactive Communication (RO-MAN) : Proceedings}, institution = {Örebro University, School of Science and Technology}, institution = {Robert Bosch GmbH, Corporate Research, Stuttgart, Germany}, institution = {Interaction Lab, University of Skövde, Skövde, Sweden}, institution = {Robert Bosch GmbH, Corporate Research, Stuttgart, Germany}, institution = {TU Munich, Germany}, pages = {293--300}, title = {Advantages of Multimodal versus Verbal-Only Robot-to-Human Communication with an Anthropomorphic Robotic Mock Driver}, series = {IEEE RO-MAN}, DOI = {10.1109/RO-MAN57019.2023.10309629}, abstract = {Robots are increasingly used in shared environments with humans, making effective communication a necessity for successful human-robot interaction. In our work, we study a crucial component: active communication of robot intent. Here, we present an anthropomorphic solution where a humanoid robot communicates the intent of its host robot acting as an "Anthropomorphic Robotic Mock Driver" (ARMoD). We evaluate this approach in two experiments in which participants work alongside a mobile robot on various tasks, while the ARMoD communicates a need for human attention, when required, or gives instructions to collaborate on a joint task. The experiments feature two interaction styles of the ARMoD: a verbal-only mode using only speech and a multimodal mode, additionally including robotic gaze and pointing gestures to support communication and register intent in space. Our results show that the multimodal interaction style, including head movements and eye gaze as well as pointing gestures, leads to more natural fixation behavior. Participants naturally identified and fixated longer on the areas relevant for intent communication, and reacted faster to instructions in collaborative tasks. Our research further indicates that the ARMoD intent communication improves engagement and social interaction with mobile robots in workplace settings. }, ISBN = {9798350336702}, ISBN = {9798350336719}, year = {2023} } @article{Molina1797296, author = {Molina, Sergi and Mannucci, Anna and Magnusson, Martin and Adolfsson, Daniel and Andreasson, Henrik and Hamad, Mazin and Abdolshah, Saeed and Chadalavada, Ravi Teja and Palmieri, Luigi and Linder, Timm and Swaminathan, Chittaranjan Srinivas and Kucner, Tomasz Piotr and Hanheide, Marc and Fernandez-Carmona, Manuel and Cielniak, Grzegorz and Duckett, Tom and Pecora, Federico and Bokesand, Simon and Arras, Kai O. and Haddadin, Sami and Lilienthal, Achim J}, institution = {Örebro University, School of Science and Technology}, institution = {University of Lincoln, Lincoln, U.K}, institution = {Robert Bosch GmbH, Renningen, Germany}, institution = {Technical University of Munich, Munich, Germany}, institution = {Technical University of Munich, Munich, Germany}, institution = {Robert Bosch GmbH, Renningen, Germany}, institution = {Robert Bosch GmbH, Renningen, Germany}, institution = {Aalto University, Aalto, Finland}, institution = {University of Lincoln, Lincoln, U.K.}, institution = {University of Lincoln, Lincoln, U.K.}, institution = {University of Lincoln, Lincoln, U.K.}, institution = {University of Lincoln, Lincoln, U.K.}, institution = {Kollmorgen Automation AB, Mölndal, Sweden}, institution = {Robert Bosch GmbH, Renningen, Germany}, institution = {Technical University of Munich, Munich, Germany}, journal = {IEEE robotics & automation magazine}, title = {The ILIAD Safety Stack : Human-Aware Infrastructure-Free Navigation of Industrial Mobile Robots}, DOI = {10.1109/MRA.2023.3296983}, keywords = {Robots, Safety, Navigation, Mobile robots, Human-robot interaction, Hidden Markov models, Trajectory}, abstract = {Current intralogistics services require keeping up with e-commerce demands, reducing delivery times and waste, and increasing overall flexibility. As a consequence, the use of automated guided vehicles (AGVs) and, more recently, autonomous mobile robots (AMRs) for logistics operations is steadily increasing. }, year = {2023} } @inproceedings{Schreiter1720267, author = {Schreiter, Tim and Morillo-Mendez, Lucas and Chadalavada, Ravi Teja and Rudenko, Andrey and Billing, Erik Alexander and Lilienthal, Achim J.}, booktitle = {SCRITA Workshop Proceedings (arXiv:2208.11090) : }, institution = {Örebro University, School of Science and Technology}, institution = {Robert Bosch GmbH, Corporate Research, Stuttgart, Germany}, institution = {Interaction Lab, University of Skövde, Sweden}, title = {The Effect of Anthropomorphism on Trust in an Industrial Human-Robot Interaction}, DOI = {10.48550/arXiv.2208.14637}, abstract = {Robots are increasingly deployed in spaces shared with humans, including home settings and industrial environments. In these environments, the interaction between humans and robots (HRI) is crucial for safety, legibility, and efficiency. A key factor in HRI is trust, which modulates the acceptance of the system. Anthropomorphism has been shown to modulate trust development in a robot, but robots in industrial environments are not usually anthropomorphic. We designed a simple interaction in an industrial environment in which an anthropomorphic mock driver (ARMoD) robot simulates to drive an autonomous guided vehicle (AGV). The task consisted of a human crossing paths with the AGV, with or without the ARMoD mounted on the top, in a narrow corridor. The human and the system needed to negotiate trajectories when crossing paths, meaning that the human had to attend to the trajectory of the robot to avoid a collision with it. There was a significant increment in the reported trust scores in the condition where the ARMoD was present, showing that the presence of an anthropomorphic robot is enough to modulate the trust, even in limited interactions as the one we present here.  }, year = {2022} } @inproceedings{Rudenko1524236, author = {Rudenko, Andrey and Kucner, Tomasz Piotr and Swaminathan, Chittaranjan Srinivas and Chadalavada, Ravi Teja and Arras, Kai Oliver and Lilienthal, Achim}, booktitle = { : }, institution = {Örebro University, School of Science and Technology}, institution = {Bosch Corporate Research, Renningen, Germany}, title = {Benchmarking Human Motion Prediction Methods}, keywords = {human motion prediction, benchmarking, datasets}, abstract = {In this extended abstract we present a novel dataset for benchmarking motion prediction algorithms. We describe our approach to data collection which generates diverse and accurate human motion in a controlled weakly-scripted setup. We also give insights for building a universal benchmark for motion prediction. }, year = {2020} } @article{Chadalavada1374911, author = {Chadalavada, Ravi Teja and Andreasson, Henrik and Schindler, Maike and Palm, Rainer and Lilienthal, Achim J.}, institution = {Örebro University, School of Science and Technology}, institution = {Faculty of Human Sciences, University of Cologne, Germany}, journal = {Robotics and Computer-Integrated Manufacturing}, note = {Funding Agencies:KKS SIDUS project AIR: "Action and Intention Recognition in Human Interaction with Autonomous Systems"  20140220H2020 project ILIAD: "Intra-Logistics with Integrated Automatic Deployment: Safe and Scalable Fleets in Shared Spaces"  732737}, eid = {101830}, title = {Bi-directional navigation intent communication using spatial augmented reality and eye-tracking glasses for improved safety in human-robot interaction}, volume = {61}, DOI = {10.1016/j.rcim.2019.101830}, keywords = {Human-robot interaction (HRI), Mobile robots, Intention communication, Eye-tracking, Intention recognition, Spatial augmented reality, Stimulated recall interview, Obstacle avoidance, Safety, Logistics}, abstract = {Safety, legibility and efficiency are essential for autonomous mobile robots that interact with humans. A key factor in this respect is bi-directional communication of navigation intent, which we focus on in this article with a particular view on industrial logistic applications. In the direction robot-to-human, we study how a robot can communicate its navigation intent using Spatial Augmented Reality (SAR) such that humans can intuitively understand the robot's intention and feel safe in the vicinity of robots. We conducted experiments with an autonomous forklift that projects various patterns on the shared floor space to convey its navigation intentions. We analyzed trajectories and eye gaze patterns of humans while interacting with an autonomous forklift and carried out stimulated recall interviews (SRI) in order to identify desirable features for projection of robot intentions. In the direction human-to-robot, we argue that robots in human co-habited environments need human-aware task and motion planning to support safety and efficiency, ideally responding to people's motion intentions as soon as they can be inferred from human cues. Eye gaze can convey information about intentions beyond what can be inferred from the trajectory and head pose of a person. Hence, we propose eye-tracking glasses as safety equipment in industrial environments shared by humans and robots. In this work, we investigate the possibility of human-to-robot implicit intention transference solely from eye gaze data and evaluate how the observed eye gaze patterns of the participants relate to their navigation decisions. We again analyzed trajectories and eye gaze patterns of humans while interacting with an autonomous forklift for clues that could reveal direction intent. Our analysis shows that people primarily gazed on that side of the robot they ultimately decided to pass by. We discuss implications of these results and relate to a control approach that uses human gaze for early obstacle avoidance. }, year = {2020} } @article{Rudenko1387088, author = {Rudenko, Andrey and Kucner, Tomasz Piotr and Swaminathan, Chittaranjan Srinivas and Chadalavada, Ravi Teja and Arras, Kai O. and Lilienthal, Achim J.}, institution = {Örebro University, School of Science and Technology}, institution = {Robotics Research, Bosch Corporate Research, Stuttgart, Germany}, institution = {Robotics Research, Bosch Corporate Research, Stuttgart, Germany}, journal = {IEEE Robotics and Automation Letters}, number = {2}, pages = {676--682}, title = {TH{\"O}R : Human-Robot Navigation Data Collection and Accurate Motion Trajectories Dataset}, volume = {5}, DOI = {10.1109/LRA.2020.2965416}, keywords = {Social Human-Robot Interaction, Motion and Path Planning, Human Detection and Tracking}, abstract = {Understanding human behavior is key for robots and intelligent systems that share a space with people. Accordingly, research that enables such systems to perceive, track, learn and predict human behavior as well as to plan and interact with humans has received increasing attention over the last years. The availability of large human motion datasets that contain relevant levels of difficulty is fundamental to this research. Existing datasets are often limited in terms of information content, annotation quality or variability of human behavior. In this paper, we present THÖR, a new dataset with human motion trajectory and eye gaze data collected in an indoor environment with accurate ground truth for position, head orientation, gaze direction, social grouping, obstacles map and goal coordinates. THÖR also contains sensor data collected by a 3D lidar and involves a mobile robot navigating the space. We propose a set of metrics to quantitatively analyze motion trajectory datasets such as the average tracking duration, ground truth noise, curvature and speed variation of the trajectories. In comparison to prior art, our dataset has a larger variety in human motion behavior, is less noisy, and contains annotations at higher frequencies. }, URL = {https://arxiv.org/abs/1909.04403}, year = {2020} } @incollection{Palm1391193, author = {Palm, Rainer and Chadalavada, Ravi Teja and Lilienthal, Achim}, booktitle = {Computational Intelligence : International Joint Conference, IJCCI2016 Porto, Portugal, November 9–11,2016 Revised Selected Papers}, institution = {Örebro University, School of Science and Technology}, pages = {149--177}, title = {Fuzzy Modeling, Control and Prediction in Human-Robot Systems}, series = {Studies in Computational Intelligence}, number = {792}, DOI = {10.1007/978-3-319-99283-9}, keywords = {Fuzzy control, Fuzzy modeling, Prediction, Human-robot interaction, Human intentions, Obstacle avoidance, Velocity obstacles}, abstract = {A safe and synchronized interaction between human agents and robots in shared areas requires both long distance prediction of their motions and an appropriate control policy for short distance reaction. In this connection recognition of mutual intentions in the prediction phase is crucial to improve the performance of short distance control.We suggest an approach for short distance control inwhich the expected human movements relative to the robot are being summarized in a so-called “compass dial” from which fuzzy control rules for the robot’s reactions are derived. To predict possible collisions between robot and human at the earliest possible time, the travel times to predicted human-robot intersections are calculated and fed into a hybrid controller for collision avoidance. By applying the method of velocity obstacles, the relation between a change in robot’s motion direction and its velocity during an interaction is optimized and a combination with fuzzy expert rules is used for a safe obstacle avoidance. For a prediction of human intentions to move to certain goals pedestrian tracks are modeled by fuzzy clustering, and trajectories of human and robot agents are extrapolated to avoid collisions at intersections. Examples with both simulated and real data show the applicability of the presented methods and the high performance of the results. }, ISBN = {978-3-319-99282-2}, ISBN = {978-3-319-99283-9}, year = {2019} } @inproceedings{Chadalavada1391172, author = {Chadalavada, Ravi Teja and Andreasson, Henrik and Schindler, Maike and Lilienthal, Achim J.}, booktitle = { : }, institution = {Örebro University, School of Science and Technology}, institution = {Faculty of Human Sciences, University of Cologne, Germany, Cologne, Gemany}, title = {Implicit intention transference using eye-tracking glasses for improved safety in human-robot interaction}, keywords = {Human-robot interaction, intention communication, eye tracking, spatial augmented reality, electrodermal activity, stress, cognitive load.}, abstract = {Eye gaze can convey information about intentions beyond what can beinferred from the trajectory and head pose of a person. We propose eye-trackingglasses as safety equipment in industrial environments shared by humans androbots. In this work, an implicit intention transference system was developed and implemented. Robot was given access to human eye gaze data, and it responds tothe eye gaze data through spatial augmented reality projections on the sharedfloor space in real-time and the robot could also adapt its path. This allows proactivesafety approaches in HRI for example by attempting to get the human'sattention when they are in the vicinity of a moving robot. A study was conductedwith workers at an industrial warehouse. The time taken to understand the behaviorof the system was recorded. Electrodermal activity and pupil diameter wererecorded to measure the increase in stress and cognitive load while interactingwith an autonomous system, using these measurements as a proxy to quantifytrust in autonomous systems. }, year = {2019} } @inproceedings{Chadalavada1270176, author = {Chadalavada, Ravi Teja and Andreasson, Henrik and Schindler, Maike and Palm, Rainer and Lilienthal, Achim}, booktitle = {Advances in Manufacturing Technology XXXII : Proceedings of the 16th International Conference on Manufacturing Research, incorporating the 33rd National Conference on Manufacturing Research, September 11–13, 2018, University of Skövde, Sweden}, institution = {Örebro University, School of Science and Technology}, pages = {253--258}, title = {Accessing your navigation plans! Human-Robot Intention Transfer using Eye-Tracking Glasses}, series = {Advances in Transdisciplinary Engineering}, number = {8}, DOI = {10.3233/978-1-61499-902-7-253}, keywords = {Human-Robot Interaction (HRI), Eye-tracking, Eye-Tracking Glasses, Navigation Intent, Implicit Intention Transference, Obstacle avoidance.}, abstract = {Robots in human co-habited environments need human-aware task and motion planning, ideally responding to people’s motion intentions as soon as they can be inferred from human cues. Eye gaze can convey information about intentions beyond trajectory and head pose of a person. Hence, we propose eye-tracking glasses as safety equipment in industrial environments shared by humans and robots. This paper investigates the possibility of human-to-robot implicit intention transference solely from eye gaze data.  We present experiments in which humans wearing eye-tracking glasses encountered a small forklift truck under various conditions. We evaluate how the observed eye gaze patterns of the participants related to their navigation decisions. Our analysis shows that people primarily gazed on that side of the robot they ultimately decided to pass by. We discuss implications of these results and relate to a control approach that uses human eye gaze for early obstacle avoidance. }, ISBN = {978-1-61499-901-0}, ISBN = {978-1-61499-902-7}, year = {2018} } @inproceedings{Schindler1070809, author = {Schindler, Maike and Lilienthal, Achim and Chadalavada, Ravi and {\"O}gren, Magnus}, booktitle = {Proceedings of the 40th Conference of the International Group for the Psychology of Mathematics Education (PME) : }, institution = {Örebro University, School of Science and Technology}, title = {Creativity in the eye of the student : Refining investigations of mathematical creativity using eye-tracking goggles}, abstract = {Mathematical creativity is increasingly important for improved innovation and problem-solving. In this paper, we address the question of how to best investigate mathematical creativity and critically discuss dichotomous creativity scoring schemes. In order to gain deeper insights into creative problem-solving processes, we suggest the use of mobile, unobtrusive eye-trackers for evaluating students’ creativity in the context of Multiple Solution Tasks (MSTs). We present first results with inexpensive eye-tracking goggles that reveal the added value of evaluating students’ eye movements when investigating mathematical creativity—compared to an analysis of written/drawn solutions as well as compared to an analysis of simple videos. }, year = {2016} } @inproceedings{Chadalavada1070994, author = {Chadalavada, Ravi Teja and Andreasson, Henrik and Krug, Robert and Lilienthal, Achim}, booktitle = {Proceedings of RSS Workshop "Social Trust in Autonomous Robots 2016" : }, institution = {Örebro University, School of Science and Technology}, title = {Empirical evaluation of human trust in an expressive mobile robot}, keywords = {Human robot interaction, hri, mobile robot, trust, evaluation}, abstract = {A mobile robot communicating its intentions using Spatial Augmented Reality (SAR) on the shared floor space makes humans feel safer and more comfortable around the robot. Our previous work [1] and several other works established this fact. We built upon that work by adding an adaptable information and control to the SAR module. An empirical study about how a mobile robot builds trust in humans by communicating its intentions was conducted. A novel way of evaluating that trust is presented and experimentally shown that adaption in SAR module lead to natural interaction and the new evaluation system helped us discover that the comfort levels between human-robot interactions approached those of human-human interactions. }, year = {2016} } @inproceedings{Palm1051090, author = {Palm, Rainer and Chadalavada, Ravi and Lilienthal, Achim}, booktitle = {Proceedings of the 8th International Joint Conference on Computational Intelligence (IJCCI 2016) : }, institution = {Örebro University, School of Science and Technology}, note = {Funding Agency:AIR-project, Action and Intention Recognition in Human Interaction with Autonomous Systems}, pages = {67--74}, title = {Fuzzy Modeling and Control for Intention Recognition in Human-Robot Systems}, volume = {2}, DOI = {10.5220/0006015400670074}, keywords = {Fuzzy control, Fuzzy modeling, Human-Robot interaction, human intentions}, abstract = {The recognition of human intentions from trajectories in the framework of human-robot interaction is a challenging field of research. In this paper some control problems of the human-robot interaction and their intentions to compete or cooperate in shared work spaces are addressed and the time schedule of the information flow is discussed. The expected human movements relative to the robot are summarized in a so-called "compass dial" from which fuzzy control rules for the robot's reactions are derived. To avoid collisions between robot and human very early the computation of collision times at predicted human-robot intersections is discussed and a switching controller for collision avoidance is proposed. In the context of the recognition of human intentions to move to certain goals, pedestrian tracks are modeled by fuzzy clustering, lanes preferred by human agents are identified, and the identification of degrees of membership of a pedestrian track to specific lanes are discussed. Computations based on simulated and experimental data show the applicability of the methods presented. }, ISBN = {978-989-758-201-1}, year = {2016} } @mastersthesis{Chadalavada1071029, author = {Chadalavada, Ravi Teja}, institution = {Chalmers University of Technology}, pages = {38}, school = {Chalmers University of Technology}, title = {Human Robot Interaction for Autonomous Systems in Industrial Environments}, keywords = {HRI, Human Robot Interaction, Spatial Augmented Reality, SAR, Logistics, Mobile Robots, Intention Communication}, abstract = {The upcoming new generation of autonomous vehicles for transporting materials in industrial environments will be more versatile, flexible and efficient than traditional Automatic Guided Vehicles (AGV), which simply follow pre-defined paths. However, freely navigating vehicles can appear unpredictable to human workers and thus cause stress and render joint use of the available space inefficient. This work addresses the problem of providing information regarding a service robot’s intention to humans co-populating the environment. The overall goal is to make humans feel safer and more comfortable, even when they are in close vicinity of the robot. A spatial Augmented Reality (AR) system for robot intention communication by means of projecting proxemic information onto shared floor space is developed on a robotic fork-lift by equipping it with a LED projector. This helps in visualizing internal state information and intents on the shared floors spaces. The robot’s ability to communicate its intentions is evaluated in realistic situations where test subjects meet the robotic forklift. A Likert scalebased evaluation which also includes comparisons to human-human intention communication was performed. The results show that already adding simple information, such as the trajectory and the space to be occupied by the robot in the near future, is able to effectively improve human response to the robot. This kind of synergistic human-robot interaction in a work environment is expected to increase the robot’s acceptability in the industry. }, URL = {http://publications.lib.chalmers.se/records/fulltext/238513/238513.pdf}, year = {2016} } @inproceedings{Palm1051078, author = {Palm, Rainer and Chadalavada, Ravi and Lilienthal, Achim}, booktitle = {2016 9th International Conference on Human System Interactions, HSI 2016 : Proceedings}, institution = {Örebro University, School of Science and Technology}, note = {Funding Agency:AIR-project Action and Intention Recognition in Human Interaction with Autonomous Systems}, pages = {229--235}, title = {Recognition of Human-Robot Motion Intentions by Trajectory Observation}, series = {Conference on Human System Interaction}, DOI = {10.1109/HSI.2016.7529636}, keywords = {Human robot interaction, human intentions, obstacle avoidance, fuzzy rules}, abstract = {The intention of humans and autonomous robots to interact in shared spatial areas is a challenging field of research regarding human safety, system stability and performance of the system's behavior. In this paper the intention recognition between human and robot from the control point of view are addressed and the time schedule of the exchanged signals is discussed. After a description of the kinematic and geometric relations between human and robot a so-called 'compass dial' with the relative velocities is presented from which suitable fuzzy control rules are derived. The computation of the collision times at intersections and possible avoidance strategies are further discussed. Computations based on simulated and experimental data show the applicability of the methods presented. }, ISBN = {9781509017294}, year = {2016} } @inproceedings{Bunz1071024, author = {Bunz, Elsa and Chadalavada, Ravi Teja and Andreasson, Henrik and Krug, Robert and Schindler, Maike and Lilienthal, Achim}, booktitle = {Proceedings of RO-MAN 2016 Workshop : Workshop on Communicating Intentions in Human-Robot Interaction}, institution = {Örebro University, School of Science and Technology}, institution = {Örebro University, Örebro, Sweden}, title = {Spatial Augmented Reality and Eye Tracking for Evaluating Human Robot Interaction}, abstract = {Freely moving autonomous mobile robots may leadto anxiety when operating in workspaces shared with humans.Previous works have given evidence that communicating in-tentions using Spatial Augmented Reality (SAR) in the sharedworkspace will make humans more comfortable in the vicinity ofrobots. In this work, we conducted experiments with the robotprojecting various patterns in order to convey its movementintentions during encounters with humans. In these experiments,the trajectories of both humans and robot were recorded witha laser scanner. Human test subjects were also equipped withan eye tracker. We analyzed the eye gaze patterns and thelaser scan tracking data in order to understand how the robot’sintention communication affects the human movement behavior.Furthermore, we used retrospective recall interviews to aid inidentifying the reasons that lead to behavior changes. }, year = {2016} } @inproceedings{Chadalavada900532, author = {Chadalavada, Ravi Teja and Andreasson, Henrik and Krug, Robert and Lilienthal, Achim}, booktitle = {2015 European Conference on Mobile Robots (ECMR) : }, institution = {Örebro University, School of Science and Technology}, publisher = {IEEE conference proceedings}, title = {That’s on my Mind! : Robot to Human Intention Communication through on-board Projection on Shared Floor Space}, DOI = {10.1109/ECMR.2015.7403771}, keywords = {Human Robot Interaction, Intention Communication, Shared spaces}, abstract = {The upcoming new generation of autonomous vehicles for transporting materials in industrial environments will be more versatile, flexible and efficient than traditional AGVs, which simply follow pre-defined paths. However, freely navigating vehicles can appear unpredictable to human workers and thus cause stress and render joint use of the available space inefficient. Here we address this issue and propose on-board intention projection on the shared floor space for communication from robot to human. We present a research prototype of a robotic fork-lift equipped with a LED projector to visualize internal state information and intents. We describe the projector system and discuss calibration issues. The robot’s ability to communicate its intentions is evaluated in realistic situations where test subjects meet the robotic forklift. The results show that already adding simple information, such as the trajectory and the space to be occupied by the robot in the near future, is able to effectively improve human response to the robot. }, ISBN = {978-1-4673-9163-4}, year = {2015} } @inproceedings{Schreiter1830088, author = {Schreiter, Tim and Morillo-Mendez, Lucas and Chadalavada, Ravi T. and Rudenko, Andrey and Billing, Erik and Magnusson, Martin and Arras, Kai O. and Lilienthal, Achim J.}, booktitle = {2023 32nd IEEE International Conference on Robot and Human Interactive Communication (RO-MAN) : Proceedings}, institution = {Örebro University, School of Science and Technology}, institution = {Robert Bosch GmbH, Corporate Research, Stuttgart, Germany}, institution = {Interaction Lab, University of Skövde, Skövde, Sweden}, institution = {Robert Bosch GmbH, Corporate Research, Stuttgart, Germany}, institution = {TU Munich, Germany}, pages = {293--300}, title = {Advantages of Multimodal versus Verbal-Only Robot-to-Human Communication with an Anthropomorphic Robotic Mock Driver}, series = {IEEE RO-MAN}, DOI = {10.1109/RO-MAN57019.2023.10309629}, abstract = {Robots are increasingly used in shared environments with humans, making effective communication a necessity for successful human-robot interaction. In our work, we study a crucial component: active communication of robot intent. Here, we present an anthropomorphic solution where a humanoid robot communicates the intent of its host robot acting as an "Anthropomorphic Robotic Mock Driver" (ARMoD). We evaluate this approach in two experiments in which participants work alongside a mobile robot on various tasks, while the ARMoD communicates a need for human attention, when required, or gives instructions to collaborate on a joint task. The experiments feature two interaction styles of the ARMoD: a verbal-only mode using only speech and a multimodal mode, additionally including robotic gaze and pointing gestures to support communication and register intent in space. Our results show that the multimodal interaction style, including head movements and eye gaze as well as pointing gestures, leads to more natural fixation behavior. Participants naturally identified and fixated longer on the areas relevant for intent communication, and reacted faster to instructions in collaborative tasks. Our research further indicates that the ARMoD intent communication improves engagement and social interaction with mobile robots in workplace settings. }, ISBN = {9798350336702}, ISBN = {9798350336719}, year = {2023} } @article{Molina1797296, author = {Molina, Sergi and Mannucci, Anna and Magnusson, Martin and Adolfsson, Daniel and Andreasson, Henrik and Hamad, Mazin and Abdolshah, Saeed and Chadalavada, Ravi Teja and Palmieri, Luigi and Linder, Timm and Swaminathan, Chittaranjan Srinivas and Kucner, Tomasz Piotr and Hanheide, Marc and Fernandez-Carmona, Manuel and Cielniak, Grzegorz and Duckett, Tom and Pecora, Federico and Bokesand, Simon and Arras, Kai O. and Haddadin, Sami and Lilienthal, Achim J}, institution = {Örebro University, School of Science and Technology}, institution = {University of Lincoln, Lincoln, U.K}, institution = {Robert Bosch GmbH, Renningen, Germany}, institution = {Technical University of Munich, Munich, Germany}, institution = {Technical University of Munich, Munich, Germany}, institution = {Robert Bosch GmbH, Renningen, Germany}, institution = {Robert Bosch GmbH, Renningen, Germany}, institution = {Aalto University, Aalto, Finland}, institution = {University of Lincoln, Lincoln, U.K.}, institution = {University of Lincoln, Lincoln, U.K.}, institution = {University of Lincoln, Lincoln, U.K.}, institution = {University of Lincoln, Lincoln, U.K.}, institution = {Kollmorgen Automation AB, Mölndal, Sweden}, institution = {Robert Bosch GmbH, Renningen, Germany}, institution = {Technical University of Munich, Munich, Germany}, journal = {IEEE robotics & automation magazine}, title = {The ILIAD Safety Stack : Human-Aware Infrastructure-Free Navigation of Industrial Mobile Robots}, DOI = {10.1109/MRA.2023.3296983}, keywords = {Robots, Safety, Navigation, Mobile robots, Human-robot interaction, Hidden Markov models, Trajectory}, abstract = {Current intralogistics services require keeping up with e-commerce demands, reducing delivery times and waste, and increasing overall flexibility. As a consequence, the use of automated guided vehicles (AGVs) and, more recently, autonomous mobile robots (AMRs) for logistics operations is steadily increasing. }, year = {2023} } @inproceedings{Schreiter1720267, author = {Schreiter, Tim and Morillo-Mendez, Lucas and Chadalavada, Ravi Teja and Rudenko, Andrey and Billing, Erik Alexander and Lilienthal, Achim J.}, booktitle = {SCRITA Workshop Proceedings (arXiv:2208.11090) : }, institution = {Örebro University, School of Science and Technology}, institution = {Robert Bosch GmbH, Corporate Research, Stuttgart, Germany}, institution = {Interaction Lab, University of Skövde, Sweden}, title = {The Effect of Anthropomorphism on Trust in an Industrial Human-Robot Interaction}, DOI = {10.48550/arXiv.2208.14637}, abstract = {Robots are increasingly deployed in spaces shared with humans, including home settings and industrial environments. In these environments, the interaction between humans and robots (HRI) is crucial for safety, legibility, and efficiency. A key factor in HRI is trust, which modulates the acceptance of the system. Anthropomorphism has been shown to modulate trust development in a robot, but robots in industrial environments are not usually anthropomorphic. We designed a simple interaction in an industrial environment in which an anthropomorphic mock driver (ARMoD) robot simulates to drive an autonomous guided vehicle (AGV). The task consisted of a human crossing paths with the AGV, with or without the ARMoD mounted on the top, in a narrow corridor. The human and the system needed to negotiate trajectories when crossing paths, meaning that the human had to attend to the trajectory of the robot to avoid a collision with it. There was a significant increment in the reported trust scores in the condition where the ARMoD was present, showing that the presence of an anthropomorphic robot is enough to modulate the trust, even in limited interactions as the one we present here.  }, year = {2022} } @inproceedings{Rudenko1524236, author = {Rudenko, Andrey and Kucner, Tomasz Piotr and Swaminathan, Chittaranjan Srinivas and Chadalavada, Ravi Teja and Arras, Kai Oliver and Lilienthal, Achim}, booktitle = { : }, institution = {Örebro University, School of Science and Technology}, institution = {Bosch Corporate Research, Renningen, Germany}, title = {Benchmarking Human Motion Prediction Methods}, keywords = {human motion prediction, benchmarking, datasets}, abstract = {In this extended abstract we present a novel dataset for benchmarking motion prediction algorithms. We describe our approach to data collection which generates diverse and accurate human motion in a controlled weakly-scripted setup. We also give insights for building a universal benchmark for motion prediction. }, year = {2020} } @article{Chadalavada1374911, author = {Chadalavada, Ravi Teja and Andreasson, Henrik and Schindler, Maike and Palm, Rainer and Lilienthal, Achim J.}, institution = {Örebro University, School of Science and Technology}, institution = {Faculty of Human Sciences, University of Cologne, Germany}, journal = {Robotics and Computer-Integrated Manufacturing}, note = {Funding Agencies:KKS SIDUS project AIR: "Action and Intention Recognition in Human Interaction with Autonomous Systems"  20140220H2020 project ILIAD: "Intra-Logistics with Integrated Automatic Deployment: Safe and Scalable Fleets in Shared Spaces"  732737}, eid = {101830}, title = {Bi-directional navigation intent communication using spatial augmented reality and eye-tracking glasses for improved safety in human-robot interaction}, volume = {61}, DOI = {10.1016/j.rcim.2019.101830}, keywords = {Human-robot interaction (HRI), Mobile robots, Intention communication, Eye-tracking, Intention recognition, Spatial augmented reality, Stimulated recall interview, Obstacle avoidance, Safety, Logistics}, abstract = {Safety, legibility and efficiency are essential for autonomous mobile robots that interact with humans. A key factor in this respect is bi-directional communication of navigation intent, which we focus on in this article with a particular view on industrial logistic applications. In the direction robot-to-human, we study how a robot can communicate its navigation intent using Spatial Augmented Reality (SAR) such that humans can intuitively understand the robot's intention and feel safe in the vicinity of robots. We conducted experiments with an autonomous forklift that projects various patterns on the shared floor space to convey its navigation intentions. We analyzed trajectories and eye gaze patterns of humans while interacting with an autonomous forklift and carried out stimulated recall interviews (SRI) in order to identify desirable features for projection of robot intentions. In the direction human-to-robot, we argue that robots in human co-habited environments need human-aware task and motion planning to support safety and efficiency, ideally responding to people's motion intentions as soon as they can be inferred from human cues. Eye gaze can convey information about intentions beyond what can be inferred from the trajectory and head pose of a person. Hence, we propose eye-tracking glasses as safety equipment in industrial environments shared by humans and robots. In this work, we investigate the possibility of human-to-robot implicit intention transference solely from eye gaze data and evaluate how the observed eye gaze patterns of the participants relate to their navigation decisions. We again analyzed trajectories and eye gaze patterns of humans while interacting with an autonomous forklift for clues that could reveal direction intent. Our analysis shows that people primarily gazed on that side of the robot they ultimately decided to pass by. We discuss implications of these results and relate to a control approach that uses human gaze for early obstacle avoidance. }, year = {2020} } @article{Rudenko1387088, author = {Rudenko, Andrey and Kucner, Tomasz Piotr and Swaminathan, Chittaranjan Srinivas and Chadalavada, Ravi Teja and Arras, Kai O. and Lilienthal, Achim J.}, institution = {Örebro University, School of Science and Technology}, institution = {Robotics Research, Bosch Corporate Research, Stuttgart, Germany}, institution = {Robotics Research, Bosch Corporate Research, Stuttgart, Germany}, journal = {IEEE Robotics and Automation Letters}, number = {2}, pages = {676--682}, title = {TH{\"O}R : Human-Robot Navigation Data Collection and Accurate Motion Trajectories Dataset}, volume = {5}, DOI = {10.1109/LRA.2020.2965416}, keywords = {Social Human-Robot Interaction, Motion and Path Planning, Human Detection and Tracking}, abstract = {Understanding human behavior is key for robots and intelligent systems that share a space with people. Accordingly, research that enables such systems to perceive, track, learn and predict human behavior as well as to plan and interact with humans has received increasing attention over the last years. The availability of large human motion datasets that contain relevant levels of difficulty is fundamental to this research. Existing datasets are often limited in terms of information content, annotation quality or variability of human behavior. In this paper, we present THÖR, a new dataset with human motion trajectory and eye gaze data collected in an indoor environment with accurate ground truth for position, head orientation, gaze direction, social grouping, obstacles map and goal coordinates. THÖR also contains sensor data collected by a 3D lidar and involves a mobile robot navigating the space. We propose a set of metrics to quantitatively analyze motion trajectory datasets such as the average tracking duration, ground truth noise, curvature and speed variation of the trajectories. In comparison to prior art, our dataset has a larger variety in human motion behavior, is less noisy, and contains annotations at higher frequencies. }, URL = {https://arxiv.org/abs/1909.04403}, year = {2020} } @incollection{Palm1391193, author = {Palm, Rainer and Chadalavada, Ravi Teja and Lilienthal, Achim}, booktitle = {Computational Intelligence : International Joint Conference, IJCCI2016 Porto, Portugal, November 9–11,2016 Revised Selected Papers}, institution = {Örebro University, School of Science and Technology}, pages = {149--177}, title = {Fuzzy Modeling, Control and Prediction in Human-Robot Systems}, series = {Studies in Computational Intelligence}, number = {792}, DOI = {10.1007/978-3-319-99283-9}, keywords = {Fuzzy control, Fuzzy modeling, Prediction, Human-robot interaction, Human intentions, Obstacle avoidance, Velocity obstacles}, abstract = {A safe and synchronized interaction between human agents and robots in shared areas requires both long distance prediction of their motions and an appropriate control policy for short distance reaction. In this connection recognition of mutual intentions in the prediction phase is crucial to improve the performance of short distance control.We suggest an approach for short distance control inwhich the expected human movements relative to the robot are being summarized in a so-called “compass dial” from which fuzzy control rules for the robot’s reactions are derived. To predict possible collisions between robot and human at the earliest possible time, the travel times to predicted human-robot intersections are calculated and fed into a hybrid controller for collision avoidance. By applying the method of velocity obstacles, the relation between a change in robot’s motion direction and its velocity during an interaction is optimized and a combination with fuzzy expert rules is used for a safe obstacle avoidance. For a prediction of human intentions to move to certain goals pedestrian tracks are modeled by fuzzy clustering, and trajectories of human and robot agents are extrapolated to avoid collisions at intersections. Examples with both simulated and real data show the applicability of the presented methods and the high performance of the results. }, ISBN = {978-3-319-99282-2}, ISBN = {978-3-319-99283-9}, year = {2019} } @inproceedings{Chadalavada1391172, author = {Chadalavada, Ravi Teja and Andreasson, Henrik and Schindler, Maike and Lilienthal, Achim J.}, booktitle = { : }, institution = {Örebro University, School of Science and Technology}, institution = {Faculty of Human Sciences, University of Cologne, Germany, Cologne, Gemany}, title = {Implicit intention transference using eye-tracking glasses for improved safety in human-robot interaction}, keywords = {Human-robot interaction, intention communication, eye tracking, spatial augmented reality, electrodermal activity, stress, cognitive load.}, abstract = {Eye gaze can convey information about intentions beyond what can beinferred from the trajectory and head pose of a person. We propose eye-trackingglasses as safety equipment in industrial environments shared by humans androbots. In this work, an implicit intention transference system was developed and implemented. Robot was given access to human eye gaze data, and it responds tothe eye gaze data through spatial augmented reality projections on the sharedfloor space in real-time and the robot could also adapt its path. This allows proactivesafety approaches in HRI for example by attempting to get the human'sattention when they are in the vicinity of a moving robot. A study was conductedwith workers at an industrial warehouse. The time taken to understand the behaviorof the system was recorded. Electrodermal activity and pupil diameter wererecorded to measure the increase in stress and cognitive load while interactingwith an autonomous system, using these measurements as a proxy to quantifytrust in autonomous systems. }, year = {2019} } @inproceedings{Chadalavada1270176, author = {Chadalavada, Ravi Teja and Andreasson, Henrik and Schindler, Maike and Palm, Rainer and Lilienthal, Achim}, booktitle = {Advances in Manufacturing Technology XXXII : Proceedings of the 16th International Conference on Manufacturing Research, incorporating the 33rd National Conference on Manufacturing Research, September 11–13, 2018, University of Skövde, Sweden}, institution = {Örebro University, School of Science and Technology}, pages = {253--258}, title = {Accessing your navigation plans! Human-Robot Intention Transfer using Eye-Tracking Glasses}, series = {Advances in Transdisciplinary Engineering}, number = {8}, DOI = {10.3233/978-1-61499-902-7-253}, keywords = {Human-Robot Interaction (HRI), Eye-tracking, Eye-Tracking Glasses, Navigation Intent, Implicit Intention Transference, Obstacle avoidance.}, abstract = {Robots in human co-habited environments need human-aware task and motion planning, ideally responding to people’s motion intentions as soon as they can be inferred from human cues. Eye gaze can convey information about intentions beyond trajectory and head pose of a person. Hence, we propose eye-tracking glasses as safety equipment in industrial environments shared by humans and robots. This paper investigates the possibility of human-to-robot implicit intention transference solely from eye gaze data.  We present experiments in which humans wearing eye-tracking glasses encountered a small forklift truck under various conditions. We evaluate how the observed eye gaze patterns of the participants related to their navigation decisions. Our analysis shows that people primarily gazed on that side of the robot they ultimately decided to pass by. We discuss implications of these results and relate to a control approach that uses human eye gaze for early obstacle avoidance. }, ISBN = {978-1-61499-901-0}, ISBN = {978-1-61499-902-7}, year = {2018} } @inproceedings{Schindler1070809, author = {Schindler, Maike and Lilienthal, Achim and Chadalavada, Ravi and {\"O}gren, Magnus}, booktitle = {Proceedings of the 40th Conference of the International Group for the Psychology of Mathematics Education (PME) : }, institution = {Örebro University, School of Science and Technology}, title = {Creativity in the eye of the student : Refining investigations of mathematical creativity using eye-tracking goggles}, abstract = {Mathematical creativity is increasingly important for improved innovation and problem-solving. In this paper, we address the question of how to best investigate mathematical creativity and critically discuss dichotomous creativity scoring schemes. In order to gain deeper insights into creative problem-solving processes, we suggest the use of mobile, unobtrusive eye-trackers for evaluating students’ creativity in the context of Multiple Solution Tasks (MSTs). We present first results with inexpensive eye-tracking goggles that reveal the added value of evaluating students’ eye movements when investigating mathematical creativity—compared to an analysis of written/drawn solutions as well as compared to an analysis of simple videos. }, year = {2016} } @inproceedings{Chadalavada1070994, author = {Chadalavada, Ravi Teja and Andreasson, Henrik and Krug, Robert and Lilienthal, Achim}, booktitle = {Proceedings of RSS Workshop "Social Trust in Autonomous Robots 2016" : }, institution = {Örebro University, School of Science and Technology}, title = {Empirical evaluation of human trust in an expressive mobile robot}, keywords = {Human robot interaction, hri, mobile robot, trust, evaluation}, abstract = {A mobile robot communicating its intentions using Spatial Augmented Reality (SAR) on the shared floor space makes humans feel safer and more comfortable around the robot. Our previous work [1] and several other works established this fact. We built upon that work by adding an adaptable information and control to the SAR module. An empirical study about how a mobile robot builds trust in humans by communicating its intentions was conducted. A novel way of evaluating that trust is presented and experimentally shown that adaption in SAR module lead to natural interaction and the new evaluation system helped us discover that the comfort levels between human-robot interactions approached those of human-human interactions. }, year = {2016} } @inproceedings{Palm1051090, author = {Palm, Rainer and Chadalavada, Ravi and Lilienthal, Achim}, booktitle = {Proceedings of the 8th International Joint Conference on Computational Intelligence (IJCCI 2016) : }, institution = {Örebro University, School of Science and Technology}, note = {Funding Agency:AIR-project, Action and Intention Recognition in Human Interaction with Autonomous Systems}, pages = {67--74}, title = {Fuzzy Modeling and Control for Intention Recognition in Human-Robot Systems}, volume = {2}, DOI = {10.5220/0006015400670074}, keywords = {Fuzzy control, Fuzzy modeling, Human-Robot interaction, human intentions}, abstract = {The recognition of human intentions from trajectories in the framework of human-robot interaction is a challenging field of research. In this paper some control problems of the human-robot interaction and their intentions to compete or cooperate in shared work spaces are addressed and the time schedule of the information flow is discussed. The expected human movements relative to the robot are summarized in a so-called "compass dial" from which fuzzy control rules for the robot's reactions are derived. To avoid collisions between robot and human very early the computation of collision times at predicted human-robot intersections is discussed and a switching controller for collision avoidance is proposed. In the context of the recognition of human intentions to move to certain goals, pedestrian tracks are modeled by fuzzy clustering, lanes preferred by human agents are identified, and the identification of degrees of membership of a pedestrian track to specific lanes are discussed. Computations based on simulated and experimental data show the applicability of the methods presented. }, ISBN = {978-989-758-201-1}, year = {2016} } @mastersthesis{Chadalavada1071029, author = {Chadalavada, Ravi Teja}, institution = {Chalmers University of Technology}, pages = {38}, school = {Chalmers University of Technology}, title = {Human Robot Interaction for Autonomous Systems in Industrial Environments}, keywords = {HRI, Human Robot Interaction, Spatial Augmented Reality, SAR, Logistics, Mobile Robots, Intention Communication}, abstract = {The upcoming new generation of autonomous vehicles for transporting materials in industrial environments will be more versatile, flexible and efficient than traditional Automatic Guided Vehicles (AGV), which simply follow pre-defined paths. However, freely navigating vehicles can appear unpredictable to human workers and thus cause stress and render joint use of the available space inefficient. This work addresses the problem of providing information regarding a service robot’s intention to humans co-populating the environment. The overall goal is to make humans feel safer and more comfortable, even when they are in close vicinity of the robot. A spatial Augmented Reality (AR) system for robot intention communication by means of projecting proxemic information onto shared floor space is developed on a robotic fork-lift by equipping it with a LED projector. This helps in visualizing internal state information and intents on the shared floors spaces. The robot’s ability to communicate its intentions is evaluated in realistic situations where test subjects meet the robotic forklift. A Likert scalebased evaluation which also includes comparisons to human-human intention communication was performed. The results show that already adding simple information, such as the trajectory and the space to be occupied by the robot in the near future, is able to effectively improve human response to the robot. This kind of synergistic human-robot interaction in a work environment is expected to increase the robot’s acceptability in the industry. }, URL = {http://publications.lib.chalmers.se/records/fulltext/238513/238513.pdf}, year = {2016} } @inproceedings{Palm1051078, author = {Palm, Rainer and Chadalavada, Ravi and Lilienthal, Achim}, booktitle = {2016 9th International Conference on Human System Interactions, HSI 2016 : Proceedings}, institution = {Örebro University, School of Science and Technology}, note = {Funding Agency:AIR-project Action and Intention Recognition in Human Interaction with Autonomous Systems}, pages = {229--235}, title = {Recognition of Human-Robot Motion Intentions by Trajectory Observation}, series = {Conference on Human System Interaction}, DOI = {10.1109/HSI.2016.7529636}, keywords = {Human robot interaction, human intentions, obstacle avoidance, fuzzy rules}, abstract = {The intention of humans and autonomous robots to interact in shared spatial areas is a challenging field of research regarding human safety, system stability and performance of the system's behavior. In this paper the intention recognition between human and robot from the control point of view are addressed and the time schedule of the exchanged signals is discussed. After a description of the kinematic and geometric relations between human and robot a so-called 'compass dial' with the relative velocities is presented from which suitable fuzzy control rules are derived. The computation of the collision times at intersections and possible avoidance strategies are further discussed. Computations based on simulated and experimental data show the applicability of the methods presented. }, ISBN = {9781509017294}, year = {2016} } @inproceedings{Bunz1071024, author = {Bunz, Elsa and Chadalavada, Ravi Teja and Andreasson, Henrik and Krug, Robert and Schindler, Maike and Lilienthal, Achim}, booktitle = {Proceedings of RO-MAN 2016 Workshop : Workshop on Communicating Intentions in Human-Robot Interaction}, institution = {Örebro University, School of Science and Technology}, institution = {Örebro University, Örebro, Sweden}, title = {Spatial Augmented Reality and Eye Tracking for Evaluating Human Robot Interaction}, abstract = {Freely moving autonomous mobile robots may leadto anxiety when operating in workspaces shared with humans.Previous works have given evidence that communicating in-tentions using Spatial Augmented Reality (SAR) in the sharedworkspace will make humans more comfortable in the vicinity ofrobots. In this work, we conducted experiments with the robotprojecting various patterns in order to convey its movementintentions during encounters with humans. In these experiments,the trajectories of both humans and robot were recorded witha laser scanner. Human test subjects were also equipped withan eye tracker. We analyzed the eye gaze patterns and thelaser scan tracking data in order to understand how the robot’sintention communication affects the human movement behavior.Furthermore, we used retrospective recall interviews to aid inidentifying the reasons that lead to behavior changes. }, year = {2016} } @inproceedings{Chadalavada900532, author = {Chadalavada, Ravi Teja and Andreasson, Henrik and Krug, Robert and Lilienthal, Achim}, booktitle = {2015 European Conference on Mobile Robots (ECMR) : }, institution = {Örebro University, School of Science and Technology}, publisher = {IEEE conference proceedings}, title = {That’s on my Mind! : Robot to Human Intention Communication through on-board Projection on Shared Floor Space}, DOI = {10.1109/ECMR.2015.7403771}, keywords = {Human Robot Interaction, Intention Communication, Shared spaces}, abstract = {The upcoming new generation of autonomous vehicles for transporting materials in industrial environments will be more versatile, flexible and efficient than traditional AGVs, which simply follow pre-defined paths. However, freely navigating vehicles can appear unpredictable to human workers and thus cause stress and render joint use of the available space inefficient. Here we address this issue and propose on-board intention projection on the shared floor space for communication from robot to human. We present a research prototype of a robotic fork-lift equipped with a LED projector to visualize internal state information and intents. We describe the projector system and discuss calibration issues. The robot’s ability to communicate its intentions is evaluated in realistic situations where test subjects meet the robotic forklift. The results show that already adding simple information, such as the trajectory and the space to be occupied by the robot in the near future, is able to effectively improve human response to the robot. }, ISBN = {978-1-4673-9163-4}, year = {2015} }