@inproceedings{cf77da382c904d04b0968bc45cf9b563,
title = "Virtually zooming-in with sensory substitution for blind users",
abstract = "When perceiving a scene visually we constantly move our eyes and focus on particular details, which we integrate into a coherent percept. Can blind individuals integrate visual information this way? Can they even conceptualize zooming-in on sub-parts of visual images? We explore this question virtually using the EyeMusic Sensory Substitution Device (SSD). SSDs transfer information usually received by one sense via another, here 'seeing' with sound. This question is especially important for SSD users since SSDs typically down-sample the visual stimuli into low-resolution images in which zooming-in to sub-parts could significantly improve users' perception. Five blind participants used the EyeMusic with a zoom-mechanism in a virtual environment to identify cartoon figures. Using a touchscreen they could zoom into different parts of the image, identify individual facial features and integrate them into a full facial representation. These findings show that indeed such integration of visual information is possible even for users who are blind from birth and demonstrates the approach's potential for practical visual rehabilitation.",
keywords = "action-perception, active sensing, motor control, sensory substitution, vision rehabilitation",
author = "Galit Buchs and Shachar Maidenbaum and Amir Amedi and Shelly Levy-Tzedek",
note = "Publisher Copyright: {\textcopyright} 2015 IEEE.; 11th Annual International Conference on Virtual Rehabilitation, ICVR 2015 ; Conference date: 09-06-2015 Through 12-06-2015",
year = "2015",
month = dec,
day = "16",
doi = "10.1109/ICVR.2015.7358613",
language = "English",
series = "International Conference on Virtual Rehabilitation, ICVR",
publisher = "Institute of Electrical and Electronics Engineers",
pages = "133--134",
booktitle = "2015 International Conference on Virtual Rehabilitation, ICVR 2015",
address = "United States",
}