
@Article{cmc.2025.065899,
AUTHOR = {Abdulwahab Alazeb, Muhammad Hanzla, Naif Al Mudawi, Mohammed Alshehri, Haifa F. Alhasson, Dina Abdulaziz AlHammadi, Ahmad Jalal},
TITLE = {Nighttime Intelligent UAV-Based Vehicle Detection and Classification Using YOLOv10 and Swin Transformer},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {84},
YEAR = {2025},
NUMBER = {3},
PAGES = {4677--4697},
URL = {http://www.techscience.com/cmc/v84n3/63174},
ISSN = {1546-2226},
ABSTRACT = {Unmanned Aerial Vehicles (UAVs) have become indispensable for intelligent traffic monitoring, particularly in low-light conditions, where traditional surveillance systems struggle. This study presents a novel deep learning-based framework for nighttime aerial vehicle detection and classification that addresses critical challenges of poor illumination, noise, and occlusions. Our pipeline integrates MSRCR enhancement with OPTICS segmentation to overcome low-light challenges, while YOLOv10 enables accurate vehicle localization. The framework employs GLOH and Dense-SIFT for discriminative feature extraction, optimized using the Whale Optimization Algorithm to enhance classification performance. A Swin Transformer-based classifier provides the final categorization, leveraging hierarchical attention mechanisms for robust performance. Extensive experimentation validates our approach, achieving detection mAP@0.5 scores of 91.5% (UAVDT) and 89.7% (VisDrone), alongside classification accuracies of 95.50% and 92.67%, respectively. These results outperform state-of-the-art methods by up to 5.10% in accuracy and 4.2% in mAP, demonstrating the framework’s effectiveness for real-time aerial surveillance and intelligent traffic management in challenging nighttime environments.},
DOI = {10.32604/cmc.2025.065899}
}



