@inproceedings{fba187d4aecb4c78ab5ed89bda5a8157,
title = "Say and find it: A multimodal wearable interface for people with visual impairment",
abstract = "Recent advances in computer vision and natural language processing using deep neural networks (DNNs) have enabled rich and intuitive multimodal interfaces. However, research on intelligent assistance systems for persons with visual impairment has not been well explored. In this work, we present an interactive object recognition and guidance interface based on multimodal interaction for blind and partially sighted people using an embedded mobile device. We demonstrate that the proposed solution using DNNs can effectively assist visually impaired people. We believe that this work will provide new and helpful insights for designing intelligent assistance systems in the future.",
keywords = "Assistive system, Mobile interface, Multimodal wearable interface, Visual impairment",
author = "Taeyong Kim and Sanghong Kim and Joonhee Choi and Youngsun Lee and Bowon Lee",
note = "Funding Information: This work was supported by the Ministry of Education of the Republic of Korea and the National Research Foundation of Korea (NRF-2018S1A5A2A03037308) and by the Industrial Technology Innovation Program funded by the Ministry of Trade, Industry & Energy (10073154). Publisher Copyright: {\textcopyright} 2019 Copyright is held by the owner/author(s).; 32nd Annual ACM Symposium on User Interface Software and Technology, UIST 2019 ; Conference date: 20-10-2019 Through 23-10-2019",
year = "2019",
month = oct,
day = "14",
doi = "10.1145/3332167.3357104",
language = "English",
series = "UIST 2019 Adjunct - Adjunct Publication of the 32nd Annual ACM Symposium on User Interface Software and Technology",
publisher = "Association for Computing Machinery, Inc",
pages = "27--29",
booktitle = "UIST 2019 Adjunct - Adjunct Publication of the 32nd Annual ACM Symposium on User Interface Software and Technology",
}