Citation

BibTex format

@inproceedings{Nguyen:2018:10.1109/IROS.2018.8594519,
author = {Nguyen, P and Fischer, T and Chang, HJ and Pattacini, U and Metta, G and Demiris, Y},
doi = {10.1109/IROS.2018.8594519},
pages = {6667--6674},
publisher = {IEEE},
title = {Transferring visuomotor learning from simulation to the real world for robotics manipulation tasks},
url = {http://dx.doi.org/10.1109/IROS.2018.8594519},
year = {2018}
}

RIS format (EndNote, RefMan)

TY  - CPAPER
AB - Hand-eye coordination is a requirement for many manipulation tasks including grasping and reaching. However, accurate hand-eye coordination has shown to be especially difficult to achieve in complex robots like the iCub humanoid. In this work, we solve the hand-eye coordination task using a visuomotor deep neural network predictor that estimates the arm's joint configuration given a stereo image pair of the arm and the underlying head configuration. As there are various unavoidable sources of sensing error on the physical robot, we train the predictor on images obtained from simulation. The images from simulation were modified to look realistic using an image-to-image translation approach. In various experiments, we first show that the visuomotor predictor provides accurate joint estimates of the iCub's hand in simulation. We then show that the predictor can be used to obtain the systematic error of the robot's joint measurements on the physical iCub robot. We demonstrate that a calibrator can be designed to automatically compensate this error. Finally, we validate that this enables accurate reaching of objects while circumventing manual fine-calibration of the robot.
AU - Nguyen,P
AU - Fischer,T
AU - Chang,HJ
AU - Pattacini,U
AU - Metta,G
AU - Demiris,Y
DO - 10.1109/IROS.2018.8594519
EP - 6674
PB - IEEE
PY - 2018///
SN - 2153-0866
SP - 6667
TI - Transferring visuomotor learning from simulation to the real world for robotics manipulation tasks
UR - http://dx.doi.org/10.1109/IROS.2018.8594519
UR - http://hdl.handle.net/10044/1/62214
ER -