
@Article{cmes.2025.066984,
AUTHOR = {Yasir Khan Jadoon, Yasir Noman Khalid, Muhammad Attique Khan, Jungpil Shin, Fatimah Alhayan, Hee-Chan Cho, Byoungchol Chang},
TITLE = {A Novel Attention-Based Parallel Blocks Deep Architecture for Human Action Recognition},
JOURNAL = {Computer Modeling in Engineering \& Sciences},
VOLUME = {144},
YEAR = {2025},
NUMBER = {1},
PAGES = {1143--1164},
URL = {http://www.techscience.com/CMES/v144n1/63294},
ISSN = {1526-1506},
ABSTRACT = {Real-time surveillance is attributed to recognizing the variety of actions performed by humans. Human Action Recognition (HAR) is a technique that recognizes human actions from a video stream. A range of variations in human actions makes it difficult to recognize with considerable accuracy. This paper presents a novel deep neural network architecture called Attention RB-Net for HAR using video frames. The input is provided to the model in the form of video frames. The proposed deep architecture is based on the unique structuring of residual blocks with several filter sizes. Features are extracted from each frame via several operations with specific parameters defined in the presented novel Attention-based Residual Bottleneck (Attention-RB) DCNN architecture. A fully connected layer receives an attention-based features matrix, and final classification is performed. Several hyperparameters of the proposed model are initialized using Bayesian Optimization (BO) and later utilized in the trained model for testing. In testing, features are extracted from the self-attention layer and passed to neural network classifiers for the final action classification. Two highly cited datasets, HMDB51 and UCF101, were used to validate the proposed architecture and obtained an average accuracy of 87.70% and 97.30%, respectively. The deep convolutional neural network (DCNN) architecture is compared with state-of-the-art (SOTA) methods, including pre-trained models, inside blocks, and recently published techniques, and performs better.},
DOI = {10.32604/cmes.2025.066984}
}



