
@Article{cmc.2025.059797,
AUTHOR = {Can Wu, Wenyi Tang, Yunbo Rao, Yinjie Chen, Hui Ding, Shuzhen Zhu, Yuanyuan Wang},
TITLE = {An Uncertainty Quantization-Based Method for Anti-UAV Detection in Infrared Images},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {83},
YEAR = {2025},
NUMBER = {1},
PAGES = {1415--1434},
URL = {http://www.techscience.com/cmc/v83n1/60078},
ISSN = {1546-2226},
ABSTRACT = {Infrared unmanned aerial vehicle (UAV) target detection presents significant challenges due to the interplay between small targets and complex backgrounds. Traditional methods, while effective in controlled environments, often fail in scenarios involving long-range targets, high noise levels, or intricate backgrounds, highlighting the need for more robust approaches. To address these challenges, we propose a novel three-stage UAV segmentation framework that leverages uncertainty quantification to enhance target saliency. This framework incorporates a Bayesian convolutional neural network capable of generating both segmentation maps and probabilistic uncertainty maps. By utilizing uncertainty predictions, our method refines segmentation outcomes, achieving superior detection accuracy. Notably, this marks the first application of uncertainty modeling within the context of infrared UAV target detection. Experimental evaluations on three publicly available infrared UAV datasets demonstrate the effectiveness of the proposed framework. The results reveal significant improvements in both detection precision and robustness when compared to state-of-the-art deep learning models. Our approach also extends the capabilities of encoder-decoder convolutional neural networks by introducing uncertainty modeling, enabling the network to better handle the challenges posed by small targets and complex environmental conditions. By bridging the gap between theoretical uncertainty modeling and practical detection tasks, our work offers a new perspective on enhancing model interpretability and performance. The codes of this work are available openly at  (acceessed on 11 November 2024).},
DOI = {10.32604/cmc.2025.059797}
}



