
@Article{cmc.2024.049640,
AUTHOR = {Haiyue Li, Jing Xie, Jing Ke, Ye Yuan, Xiaoyong Pan, Hongyi Xin, Hongbin Shen},
TITLE = {GAN-DIRNet: A Novel Deformable Image Registration Approach for Multimodal Histological Images},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {80},
YEAR = {2024},
NUMBER = {1},
PAGES = {487--506},
URL = {http://www.techscience.com/cmc/v80n1/57362},
ISSN = {1546-2226},
ABSTRACT = {Multi-modal histological image registration tasks pose significant challenges due to tissue staining operations causing partial loss and folding of tissue. Convolutional neural network (CNN) and generative adversarial network (GAN) are pivotal in medical image registration. However, existing methods often struggle with severe interference and deformation, as seen in histological images of conditions like Cushing’s disease. We argue that the failure of current approaches lies in underutilizing the feature extraction capability of the discriminator in GAN. In this study, we propose a novel multi-modal registration approach GAN-DIRNet based on GAN for deformable histological image registration. To begin with, the discriminators of two GANs are embedded as a new dual parallel feature extraction module into the unsupervised registration networks, characterized by implicitly extracting feature descriptors of specific modalities. Additionally, modal feature description layers and registration layers collaborate in unsupervised optimization, facilitating faster convergence and more precise results. Lastly, experiments and evaluations were conducted on the registration of the Mixed National Institute of Standards and Technology database (MNIST), eight publicly available datasets of histological sections and the Clustering-Registration-Classification-Segmentation (CRCS) dataset on the Cushing’s disease. Experimental results demonstrate that our proposed GAN-DIRNet method surpasses existing approaches like DIRNet in terms of both registration accuracy and time efficiency, while also exhibiting robustness across different image types.},
DOI = {10.32604/cmc.2024.049640}
}



