@inproceedings{e785ef56076144608007d5b314614787,
title = "Attention-based combination of CNN and RNN for relation classification",
abstract = "Relation classification is an essential task in natural language processing (NLP) in order to extract structured data from sentences. In this paper, we propose a novel model Att-ComNN combining convolutional neural network (CNN) and bidirectional recurrent neural network (RNN) for relation classification. By combining RNN and CNN, we obtain more accurate context representations of words, which benefits classifying relations. Besides, with both shortest dependency path (SDP) attention and pooling attention added, this model captures the most informative context representation for better classification without using other handcrafted features. The results of experiments show that our model improves the relation classification performance on the SemEval-2010 Task 8 and outperforms most of previous state-of-the-art methods, including those depending on much richer forms of handcrafted features and prior knowledge.",
keywords = "Attention mechanism, Deep neural network, Relation classification",
author = "Xiaoyu Guo and Hui Zhang and Rui Liu and Xin Ding and Runqi Tian and Bencheng Wang",
note = "Publisher Copyright: {\textcopyright} Springer Nature Switzerland AG 2018.; 25th International Conference on Neural Information Processing, ICONIP 2018 ; Conference date: 13-12-2018 Through 16-12-2018",
year = "2018",
doi = "10.1007/978-3-030-04212-7\_21",
language = "英语",
isbn = "9783030042110",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "244--255",
editor = "Seiichi Ozawa and Leung, \{Andrew Chi Sing\} and Long Cheng",
booktitle = "Neural Information Processing - 25th International Conference, ICONIP 2018, Proceedings",
address = "德国",
}