@inproceedings{3c083320d62347e7bfbe614ce556d9b0,
title = "Audio or tactile feedback: which modality when?",
abstract = "When designing interfaces for mobile devices it is import-ant to take into account the variety of contexts of use. We present a study that examines how changing noise and disturbance in the environment affects user performance in a touchscreen typing task with the interface being presented through visual only, visual and tactile, or visual and audio feedback. The aim of the study is to show at what exact environmental levels audio or tactile feedback become ineffective. The results show significant decreases in performance for audio feedback at levels of 94dB and above as well as decreases in performance for tactile feedback at vibration levels of 9.18g/s. These results suggest that at these levels, feedback should be presented by a different modality. These findings will allow designers to take advantage of sensor enabled mobile devices to adapt the provided feed-back to the user's current context.",
keywords = "Audio, Crossmodal, Mobile interaction, Tactile, Touchscreen",
author = "Eve Hoggan and Andrew Crossan and Stephen Brewster and Topi Kaaresoja",
year = "2009",
month = apr,
doi = "10.1145/1518701.1519045",
language = "English",
isbn = "9781605582474",
volume = "1",
series = "Conference on Human Factors in Computing Systems - Proceedings",
publisher = "Association for Computing Machinery (ACM)",
pages = "2253--2256",
editor = "Saul Greenberg and Hudson, {Scott E.} and Ken Hinckley and Morris, {Meredith Ringel} and {Olsen Jr.}, {Dan R.}",
booktitle = "CHI '09: Proceedings of the SIGCHI Conference on Human Factors in Computing Systems",
address = "United States",
note = "27th International Conference on Human Factors in Computing Systems (CHI 2009), CHI 2009 ; Conference date: 04-04-2009 Through 09-04-2009",
}