Cai, Caixia; Liang, Ying Siu; Somani, Nikhil; Wu, Yan Inferring the Geometric Nullspace of Robot Skills from Human Demonstrations Inproceedings In: 2020 IEEE International Conference on Robotics and Automation (ICRA), pp. 7668-7675, IEEE, Paris, France, 2020, ISSN: 2577-087X. Taunyazov, Tasbolat; Koh, Hui Fang; Wu, Yan; Cai, Caixia; Soh, Harold Towards Effective Tactile Identification of Textures using a Hybrid Touch Approach Inproceedings In: 2019 International Conference on Robotics and Automation (ICRA), pp. 4269-4275, IEEE, Montreal, Canada, 2019, ISBN: 978-1-5386-6027-0. You, Yangwei; Cai, Caixia; Wu, Yan 3D Visibility Graph Based Motion Planning and Control Inproceedings In: The 2019 5th International Conference on Robotics and Artificial Intelligence (ICRAI), pp. 48–53, ACM, Singapore, 2019, ISBN: 9781450372350.2020
@inproceedings{cai2020inferring,
title = {Inferring the Geometric Nullspace of Robot Skills from Human Demonstrations},
author = {Caixia Cai and Ying Siu Liang and Nikhil Somani and Yan Wu},
url = {https://ieeexplore.ieee.org/document/9197174
https://yan-wu.com/wp-content/uploads/2020/05/cai2020inferring.pdf},
doi = {10.1109/ICRA40945.2020.9197174},
issn = {2577-087X},
year = {2020},
date = {2020-05-31},
booktitle = {2020 IEEE International Conference on Robotics and Automation (ICRA)},
pages = {7668-7675},
publisher = {IEEE},
address = {Paris, France},
abstract = {In this paper we present a framework to learn skills from human demonstrations in the form of geometric nullspaces, which can be executed using a robot. We collect data of human demonstrations, fit geometric nullspaces to them, and also infer their corresponding geometric constraint models. These geometric constraints provide a powerful mathematical model as well as an intuitive representation of the skill in terms of the involved objects. To execute the skill using a robot, we combine this geometric skill description with the robot's kinematics and other environmental constraints, from which poses can be sampled for the robot's execution. The result of our framework is a system that takes the human demonstrations as input, learns the underlying skill model, and executes the learnt skill with different robots in different dynamic environments. We evaluate our approach on a simulated industrial robot, and execute the final task on the iCub humanoid robot.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2019
@inproceedings{taunyazov2019towards,
title = {Towards Effective Tactile Identification of Textures using a Hybrid Touch Approach},
author = {Tasbolat Taunyazov and Hui Fang Koh and Yan Wu and Caixia Cai and Harold Soh},
url = {https://ieeexplore.ieee.org/document/8793967
http://www.yan-wu.com/docs/taunyanov2019towards.pdf},
doi = {10.1109/ICRA.2019.8793967},
isbn = {978-1-5386-6027-0},
year = {2019},
date = {2019-05-24},
booktitle = {2019 International Conference on Robotics and Automation (ICRA)},
pages = {4269-4275},
publisher = {IEEE},
address = {Montreal, Canada},
abstract = {The sense of touch is arguably the first human sense to develop. Empowering robots with the sense of touch may augment their understanding of interacted objects and the environment beyond standard sensory modalities (e.g., vision). This paper investigates the effect of hybridizing touch and sliding movements for tactile-based texture classification. We develop three machine-learning methods within a framework to discriminate between surface textures; the first two methods use hand-engineered features, whilst the third leverages convolutional and recurrent neural network layers to learn feature representations from raw data. To compare these methods, we constructed a dataset comprising tactile data from 23 textures gathered using the iCub platform under a loosely constrained setup, i.e., with nonlinear motion. In line with findings from neuroscience, our experiments show that a good initial estimate can be obtained via touch data, which can be further refined via sliding; combining both touch and sliding data results in 98% classification accuracy over unseen test data.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
@inproceedings{you20193d,
title = {3D Visibility Graph Based Motion Planning and Control},
author = {Yangwei You and Caixia Cai and Yan Wu},
url = {https://dl.acm.org/doi/pdf/10.1145/3373724.3373735},
doi = {10.1145/3373724.3373735},
isbn = {9781450372350},
year = {2019},
date = {2019-01-01},
booktitle = {The 2019 5th International Conference on Robotics and Artificial Intelligence (ICRAI)},
pages = {48–53},
publisher = {ACM},
address = {Singapore},
abstract = {This paper proposed a motion planning and control approach for real-time collision avoidance of high degree-of-freedom (DoF) robots. It constructs a 3D visibility graph by representing external obstacles with discrete polyhedrons and searches for the shortest path on it. To consider the complex internal structure of high DoF robots and take full advantage of redundancy, inverse kinematics is solved as an optimization problem which allows for flexible cost functions and constraints. The distance between the robot and external obstacles is treated as an extra constraint in the optimization which works together with visibility graph for collision avoidance. At the end, a pick-place task was successfully performed among obstacles by a simulated dual-arm quadrupedal robot considering the whole-body kinematics and stability, which demonstrated the feasibility of the proposed method.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Publications
Inferring the Geometric Nullspace of Robot Skills from Human Demonstrations Inproceedings In: 2020 IEEE International Conference on Robotics and Automation (ICRA), pp. 7668-7675, IEEE, Paris, France, 2020, ISSN: 2577-087X. Towards Effective Tactile Identification of Textures using a Hybrid Touch Approach Inproceedings In: 2019 International Conference on Robotics and Automation (ICRA), pp. 4269-4275, IEEE, Montreal, Canada, 2019, ISBN: 978-1-5386-6027-0. 3D Visibility Graph Based Motion Planning and Control Inproceedings In: The 2019 5th International Conference on Robotics and Artificial Intelligence (ICRAI), pp. 48–53, ACM, Singapore, 2019, ISBN: 9781450372350.2020
2019