@inproceedings{f34f31ea57f643d79a044d07ad1eff7a,
title = "Conveying the configuration of a virtual human hand using vibrotactile feedback",
abstract = "Upper-limb prostheses users lack proprioception of their artificial arm, and rely heavily on vision to understand its configuration. With the goal of reducing the amount of energy expended on visual cues during upper-limb prosthesis use, this study investigates whether haptic feedback can relay the configuration of a virtual hand in the absence of sight. Two mappings from waistbelt-mounted tactor vibration patterns to hand configuration are explored: (1) Synergy-based hand motions derived from the results of a principal component analysis run on an aggregate of hand motions and (2) Decoupled hand motions, which include experimenter-selected motions such as finger grasp and finger spread. Results show that users can identify complex hand configurations with vibrotactile feedback patterns based on both the Synergies and Decoupled methods, although 30-45 seconds are required to achieve this task. Also, findings demonstrate that users are likely to memorize correspondence between an overall feeling of the tactor pattern to a hand configuration rather than constructing the hand configuration by isolating and considering each tactor individually. Last, results indicate that hand configuration is most accurately conveyed by maximizing information along a synergy-based space.",
keywords = "Hand Synergies, Haptic Feedback, Prosthetics, Sensory Substitution, Vibrotactile Feedback",
author = "Andrew Cheng and Nichols, {Kirk A.} and Weeks, {Heidi M.} and Netta Gurari and Okamura, {Allison M.}",
year = "2012",
doi = "10.1109/HAPTIC.2012.6183784",
language = "English (US)",
isbn = "9781467308090",
series = "Haptics Symposium 2012, HAPTICS 2012 - Proceedings",
pages = "155--162",
booktitle = "Haptics Symposium 2012, HAPTICS 2012 - Proceedings",
note = "2012 IEEE Haptics Symposium, HAPTICS 2012 ; Conference date: 04-03-2012 Through 07-03-2012",
}