
@Article{cmc.2026.074417,
AUTHOR = {Chang Xu, Xianbo Yin, Zhiyong Zhou, Bomin Liu},
TITLE = {A CNN-Transformer Hybrid Model for Real-Time Recognition of Affective Tactile Biosignals},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {87},
YEAR = {2026},
NUMBER = {1},
PAGES = {--},
URL = {http://www.techscience.com/cmc/v87n1/66093},
ISSN = {1546-2226},
ABSTRACT = {This study presents a hybrid CNN-Transformer model for real-time recognition of affective tactile biosignals. The proposed framework combines convolutional neural networks (CNNs) to extract spatial and local temporal features with the Transformer encoder that captures long-range dependencies in time-series data through multi-head attention. Model performance was evaluated on two widely used tactile biosignal datasets, HAART and CoST, which contain diverse affective touch gestures recorded from pressure sensor arrays. The CNN-Transformer model achieved recognition rates of 93.33% on HAART and 80.89% on CoST, outperforming existing methods on both benchmarks. By incorporating temporal windowing, the model enables instantaneous prediction, improving generalization across gestures of varying duration. These results highlight the effectiveness of deep learning for tactile biosignal processing and demonstrate the potential of the CNN-Transformer approach for future applications in wearable sensors, affective computing, and biomedical monitoring.},
DOI = {10.32604/cmc.2026.074417}
}



