@Article{cmc.2023.034540, AUTHOR = {Kamalrulnizam Bin Abu Bakar, Fatima Tul Zuhra, Babangida Isyaku,3, Fuad A. Ghaleb}, TITLE = {Optimized Identification with Severity Factors of Gastric Cancer for Internet of Medical Things}, JOURNAL = {Computers, Materials \& Continua}, VOLUME = {75}, YEAR = {2023}, NUMBER = {1}, PAGES = {785--798}, URL = {http://www.techscience.com/cmc/v75n1/51465}, ISSN = {1546-2226}, ABSTRACT = {The Internet of Medical Things (IoMT) emerges with the vision of the Wireless Body Sensor Network (WBSN) to improve the health monitoring systems and has an enormous impact on the healthcare system for recognizing the levels of risk/severity factors (premature diagnosis, treatment, and supervision of chronic disease i.e., cancer) via wearable/electronic health sensor i.e., wireless endoscopic capsule. However, AI-assisted endoscopy plays a very significant role in the detection of gastric cancer. Convolutional Neural Network (CNN) has been widely used to diagnose gastric cancer based on various feature extraction models, consequently, limiting the identification and categorization performance in terms of cancerous stages and grades associated with each type of gastric cancer. This paper proposed an optimized AI-based approach to diagnose and assess the risk factor of gastric cancer based on its type, stage, and grade in the endoscopic images for smart healthcare applications. The proposed method is categorized into five phases such as image pre-processing, Four-Dimensional (4D) image conversion, image segmentation, K-Nearest Neighbour (K-NN) classification, and multi-grading and staging of image intensities. Moreover, the performance of the proposed method has experimented on two different datasets consisting of color and black and white endoscopic images. The simulation results verified that the proposed approach is capable of perceiving gastric cancer with 88.09% sensitivity, 95.77% specificity, and 96.55% overall accuracy respectively.}, DOI = {10.32604/cmc.2023.034540} }