@Article{iasc.2021.016320, AUTHOR = {Dianqing Liu, Lanqiu Zhang, Yanqiu Shao, Junzhao Sun}, TITLE = {Leverage External Knowledge and Self-attention for Chinese Semantic Dependency Graph Parsing}, JOURNAL = {Intelligent Automation \& Soft Computing}, VOLUME = {28}, YEAR = {2021}, NUMBER = {2}, PAGES = {447--458}, URL = {http://www.techscience.com/iasc/v28n2/42059}, ISSN = {2326-005X}, ABSTRACT = {Chinese semantic dependency graph (CSDG) parsing aims to analyze the semantic relationship between words in a sentence. Since it is a deep semantic analysis task, the parser needs a lot of prior knowledge about the real world to distinguish different semantic roles and determine the range of the head nodes of each word. Existing CSDG parsers usually use part-of-speech (POS) and lexical features, which can only provide linguistic knowledge, but not semantic knowledge about the word. To solve this problem, we propose an entity recognition method based on distant supervision and entity classification to recognize entities in sentences, and then we integrate the category information of entities as external knowledge feature into our CSDG parser. Furthermore, there are many long sentences in some domains, which makes it difficult for the parser to deal with long-distance dependence. In this paper, we combine self-attention mechanism with Bi-LSTM, which significantly improved the performance of the parser on long texts. We also adopt Bert model to generate more powerful sentence representation and alleviate the problem of unknown words. Experiment results show that both external knowledge and self-attention are beneficial for improving the accuracy of CSDG parser and our parser achieves state-of-the-art performance in the datasets of SemEval-2016 Task 9: Chinese Semantic Dependency Parsing.}, DOI = {10.32604/iasc.2021.016320} }