@article{9ba44c4e758b49218ad8c061ea645014,
title = "Employing emerging technologies to develop and evaluate in-vehicle intelligent systems for driver support: infotainment AR HUD case study",
abstract = "The plurality of current infotainment devices within the in-vehicle space produces an unprecedented volume of incoming data that overwhelm the typical driver, leading to higher collision probability. This work presents an investigation to an alternative option which aims to manage the incoming information while offering an uncluttered and timely manner of presenting and interacting with the incoming data safely. The latter is achieved through the use of an augmented reality (AR) head-up display (HUD) system, which projects the information within the driver{\textquoteright}s field of view. An uncluttered gesture recognition interface provides the interaction with the AR visuals. For the assessment of the system{\textquoteright}s effectiveness, we developed a full-scale virtual reality driving simulator which immerses the drivers in challenging, collision-prone, scenarios. The scenarios unfold within a digital twin model of the surrounding motorways of the city of Glasgow. The proposed system was evaluated in contrast to a typical head-down display (HDD) interface system by 30 users, showing promising results that are discussed in detail.",
keywords = "augmented reality, human-computer interaction, head-up display, digital twin, artificial intelligence, simulation, driver distraction, gesture recognition, smart city, virtual reality",
author = "Vassilis Charissis and Jannat Falah and Ramesh Lagoo and Alfalah, {Salsabeel F.M.} and Soheeb Khan and Shu Wang and Samar Altarteer and {Bram Larbi}, Kweku and Dimitris Drikakis",
note = "Acceptance in SAN OA journal",
year = "2021",
month = feb,
doi = "10.3390/app11041397",
language = "English",
volume = "11",
pages = "1--28",
journal = "Applied Sciences",
issn = "2076-3417",
publisher = "MDPI",
number = "4",
}