
@Article{jihpp.2019.06357,
AUTHOR = {Jun  Sun, Yan  Li, Yatian  Shen, Wenke  Ding, Xianjin  Shi, Lei  Zhang, Xiajiong  Shen, Jing  He},
TITLE = {Joint Self-Attention Based Neural Networks for Semantic Relation Extraction},
JOURNAL = {Journal of Information Hiding and Privacy Protection},
VOLUME = {1},
YEAR = {2019},
NUMBER = {2},
PAGES = {69--75},
URL = {http://www.techscience.com/jihpp/v1n2/28998},
ISSN = {2637-4226},
ABSTRACT = {Relation extraction is an important task in NLP community. However, some models often fail in capturing Long-distance dependence on semantics, and the interaction between semantics of two entities is ignored. In this paper, we propose a novel neural network model for semantic relation classification called joint self-attention bi-LSTM (SA-Bi-LSTM) to model the internal structure of the sentence to obtain the importance of each word of the sentence without relying on additional information, and capture Long-distance dependence on semantics. We conduct experiments using the SemEval-2010 Task 8 dataset. Extensive experiments and the results demonstrated that the proposed method is effective against relation classification, which can obtain state-of-the-art classification accuracy just with minimal feature engineering.},
DOI = {10.32604/jihpp.2019.06357}
}



