
@Article{cmc.2026.076676,
AUTHOR = {Jianbin Li, Hang Bao, Xin Tong},
TITLE = {SubPFed: A Personalized Federated Learning Approach with Subgraphs},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {},
YEAR = {},
NUMBER = {},
PAGES = {{pages}},
URL = {http://www.techscience.com/cmc/online/detail/26445},
ISSN = {1546-2226},
ABSTRACT = {The proliferation of large-scale graph data has enabled Graph Neural Networks (GNNs) to achieve significant success in domains such as recommender systems, social network analysis, and biomedicine. However, in practical networked environments, particularly in distributed service infrastructures, graph data is often isolated between multiple edge smart devices and cannot be shared due to privacy, making GNN models weak in generalization. Subgraph Federated Learning (SFL) mitigates this challenge by treating local client data as subgraphs of the global graph to decentralized GNN training. Unfortunately, client-side missing edges make GNN model difficult to capture dependency information between subgraphs, and local heterogeneous data hinders global model convergence, thereby limiting the performance of federated GNN model. To address this, we propose SubPFed, a personalized federated learning approach tailored for subgraph-based training. SubPFed computes the functional embeddings of local GNNs using random graph inputs. It then estimates subgraph similarity by weighting these embeddings and the structural information of the overlapping nodes. Finally, a personalized weighted aggregation strategy is designed based on the similarity to enhance representation consistency across clients and mitigate data heterogeneity. Experiments on three real-world graph datasets show that SubPFed consistently outperforms state-of-the-art baselines, improving node classification accuracy by 4.28% to 26.50%. Furthermore, SubPFed demonstrates strong robustness under varying subgraph overlap ratios, underscoring its adaptability and scalability in federated graph learning scenarios.},
DOI = {10.32604/cmc.2026.076676}
}



