Abstract
In-hand object pose estimation is challenging for humans and robots due to occlusion caused by the hand and object. This paper proposes a soft finger that integrates inner vision with kinesthetic sensing to estimate object pose inspired by human fingers. The soft finger has a flexible skeleton and skin that adapts to different objects, and the skeleton deformations during interaction provide contact information obtained by the image from the inner camera. The proposed framework is an end-to-end method that uses raw images from soft fingers to estimate in-hand object pose. It consists of an encoder for kinesthetic information processing and an object pose and category estimator. The framework was tested on seven objects, achieving an impressive error of 2.02 mm and 11.34 degrees for pose error and 99.05% for classification.
Links
BibTeX (Download)
@article{Liu2023BioInspired, title = {Bio-inspired Proprioceptive Touch of a Soft Finger with Inner-Finger Kinesthetic Perception}, author = {Xiaobo Liu and Xudong Han and Ning Guo and Fang Wan and Chaoyang Song}, doi = {10.3390/biomimetics8060501}, year = {2023}, date = {2023-10-21}, urldate = {2023-10-21}, journal = {Biomimetics}, volume = {8}, issue = {6}, pages = {501}, abstract = {In-hand object pose estimation is challenging for humans and robots due to occlusion caused by the hand and object. This paper proposes a soft finger that integrates inner vision with kinesthetic sensing to estimate object pose inspired by human fingers. The soft finger has a flexible skeleton and skin that adapts to different objects, and the skeleton deformations during interaction provide contact information obtained by the image from the inner camera. The proposed framework is an end-to-end method that uses raw images from soft fingers to estimate in-hand object pose. It consists of an encoder for kinesthetic information processing and an object pose and category estimator. The framework was tested on seven objects, achieving an impressive error of 2.02 mm and 11.34 degrees for pose error and 99.05% for classification.}, keywords = {}, pubstate = {published}, tppubtype = {article} }