
@Article{cmes.2025.074349,
AUTHOR = {Gan Zhu, Yongtao Yu, Xiaofan Deng, Yuanchen Dai, Zhenyuan Li},
TITLE = {A Hybrid Split-Attention and Transformer Architecture for High-Performance Network Intrusion Detection},
JOURNAL = {Computer Modeling in Engineering \& Sciences},
VOLUME = {145},
YEAR = {2025},
NUMBER = {3},
PAGES = {4317--4348},
URL = {http://www.techscience.com/CMES/v145n3/65008},
ISSN = {1526-1506},
ABSTRACT = {Existing deep learning Network Intrusion Detection Systems (NIDS) struggle to simultaneously capture fine-grained, multi-scale features and long-range temporal dependencies. To address this gap, this paper introduces TransNeSt, a hybrid architecture integrating a ResNeSt block (using split-attention for multi-scale feature representation) with a Transformer encoder (using self-attention for global temporal modeling). This integration of multi-scale and temporal attention was validated on four benchmarks: NSL-KDD, UNSW-NB15, CIC-IDS2017, and CICIOT2023. TransNeSt consistently outperformed its individual components and several state-of-the-art models, demonstrating significant quantitative gains. The model achieved high efficacy across all datasets, with F1-Scores of 99.04% (NSL-KDD), 91.92% (UNSW-NB15), 99.18% (CIC-IDS2017), and 97.85% (CICIOT2023), confirming its robustness.},
DOI = {10.32604/cmes.2025.074349}
}



