@inproceedings{7a4af7fc9b2741c29ccbe279653b999b,
title = "Follow that sound: Using sonification and corrective verbal feedback to teach touchscreen gestures",
abstract = "While sighted users may learn to perform touchscreen gestures through observation (e.g., of other users or video tutorials), such mechanisms are inaccessible for users with visual impairments. As a result, learning to perform gestures can be challenging. We propose and evaluate two techniques to teach touchscreen gestures to users with visual impairments: (1) corrective verbal feedback using text-to-speech and automatic analysis of the user's drawn gesture; (2) gesture sonification to generate sound based on finger touches, creating an audio representation of a gesture. To refine and evaluate the techniques, we conducted two controlled lab studies. The first study, with 12 sighted participants, compared parameters for sonifying gestures in an eyes-free scenario and identified pitch + stereo panning as the best combination. In the second study, 6 blind and low-vision participants completed gesture replication tasks with the two feedback techniques. Subjective data and preliminary performance findings indicate that the techniques offer complementary advantages.",
keywords = "Blindness, Gestures, Sonification, Touchscreen, Visual impairments",
author = "Uran Oh and Kane, {Shaun K.} and Leah Findlater",
year = "2013",
doi = "10.1145/2513383.2513455",
language = "English",
isbn = "9781450324052",
series = "Proceedings of the 15th International ACM SIGACCESS Conference on Computers and Accessibility, ASSETS 2013",
booktitle = "Proceedings of the 15th International ACM SIGACCESS Conference on Computers and Accessibility, ASSETS 2013",
note = "15th International ACM SIGACCESS Conference on Computers and Accessibility, ASSETS 2013 ; Conference date: 21-10-2013 Through 23-10-2013",
}