
@Article{iasc.2023.039600,
AUTHOR = {Qi Liu, Jing Li, Xianmin Wang, Wenpeng Zhao},
TITLE = {Attentive Neighborhood Feature Augmentation for Semi-supervised Learning},
JOURNAL = {Intelligent Automation \& Soft Computing},
VOLUME = {37},
YEAR = {2023},
NUMBER = {2},
PAGES = {1753--1771},
URL = {http://www.techscience.com/iasc/v37n2/53247},
ISSN = {2326-005X},
ABSTRACT = {Recent state-of-the-art semi-supervised learning (SSL) methods usually use data augmentations as core components. Such methods, however, are limited to simple transformations such as the augmentations under the instance’s naive representations or the augmentations under the instance’s semantic representations. To tackle this problem, we offer a unique insight into data augmentations and propose a novel data-augmentation-based semi-supervised learning method, called Attentive Neighborhood Feature Augmentation (ANFA). The motivation of our method lies in the observation that the relationship between the given feature and its neighborhood may contribute to constructing more reliable transformations for the data, and further facilitating the classifier to distinguish the ambiguous features from the low-dense regions. Specially, we first project the labeled and unlabeled data points into an embedding space and then construct a neighbor graph that serves as a similarity measure based on the similar representations in the embedding space. Then, we employ an attention mechanism to transform the target features into augmented ones based on the neighbor graph. Finally, we formulate a novel semi-supervised loss by encouraging the predictions of the interpolations of augmented features to be consistent with the corresponding interpolations of the predictions of the target features. We carried out experiments on SVHN and CIFAR-10 benchmark datasets and the experimental results demonstrate that our method outperforms the state-of-the-art methods when the number of labeled examples is limited.},
DOI = {10.32604/iasc.2023.039600}
}



