
@Article{cmc.2025.066367,
AUTHOR = {Jianwei Zhang, Hongying Zhao, Yuan Feng, Zengyu Cai, Liang Zhu},
TITLE = {NetST: Network Encrypted Traffic Classification Based on Swin Transformer},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {84},
YEAR = {2025},
NUMBER = {3},
PAGES = {5279--5298},
URL = {http://www.techscience.com/cmc/v84n3/63189},
ISSN = {1546-2226},
ABSTRACT = {Network traffic classification is a crucial research area aimed at improving quality of service, simplifying network management, and enhancing network security. To address the growing complexity of cryptography, researchers have proposed various machine learning and deep learning approaches to tackle this challenge. However, existing mainstream methods face several general issues. On one hand, the widely used Transformer architecture exhibits high computational complexity, which negatively impacts its efficiency. On the other hand, traditional methods are often unreliable in traffic representation, frequently losing important byte information while retaining unnecessary biases. To address these problems, this paper introduces the Swin Transformer architecture into the domain of network traffic classification and proposes the NetST (Network Swin Transformer) model. This model improves the Swin Transformer to better accommodate the characteristics of network traffic, effectively addressing efficiency issues. Furthermore, this paper presents a traffic representation scheme designed to extract meaningful information from large volumes of traffic while minimizing bias. We integrate four datasets relevant to network traffic classification for our experiments, and the results demonstrate that NetST achieves a high accuracy rate while maintaining low memory usage.},
DOI = {10.32604/cmc.2025.066367}
}



