
@Article{cmc.2025.063109,
AUTHOR = {Gang Li, Zheng Zhou, Yang Zhang, Chuanyun Xu, Zihan Ruan, Pengfei Lv, Ru Wang, Xinyu Fan, Wei Tan},
TITLE = {Implicit Feature Contrastive Learning for Few-Shot Object Detection},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {84},
YEAR = {2025},
NUMBER = {1},
PAGES = {1615--1632},
URL = {http://www.techscience.com/cmc/v84n1/61728},
ISSN = {1546-2226},
ABSTRACT = {Although conventional object detection methods achieve high accuracy through extensively annotated datasets, acquiring such large-scale labeled data remains challenging and cost-prohibitive in numerous real-world applications. Few-shot object detection presents a new research idea that aims to localize and classify objects in images using only limited annotated examples. However, the inherent challenge in few-shot object detection lies in the insufficient sample diversity to fully characterize the sample feature distribution, which consequently impacts model performance. Inspired by contrastive learning principles, we propose an Implicit Feature Contrastive Learning (IFCL) module to address this limitation and augment feature diversity for more robust representational learning. This module generates augmented support sample features in a mixed feature space and implicitly contrasts them with query Region of Interest (RoI) features. This approach facilitates more comprehensive learning of both intra-class feature similarity and inter-class feature diversity, thereby enhancing the model’s object classification and localization capabilities. Extensive experiments on PASCAL VOC show that our method achieves a respective improvement of 3.2%, 1.8%, and 2.3% on 10-shot of three Novel Sets compared to the baseline model FPD.},
DOI = {10.32604/cmc.2025.063109}
}



