
@Article{cmc.2026.073657,
AUTHOR = {Lei Sun, Jingwen Wang, Peng Hu, Xiuqing Mao, Cuiyun Hu, Zhihong Wang},
TITLE = {IG-3D: Integrated-Gradients 3D Optimization for Private Transformer Inference},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {87},
YEAR = {2026},
NUMBER = {2},
PAGES = {--},
URL = {http://www.techscience.com/cmc/v87n2/66569},
ISSN = {1546-2226},
ABSTRACT = {Transformer models face significant computational challenges in private inference (PI). Existing optimization methods often rely on isolated techniques, neglecting joint structural and operational improvements. We propose IG-3D, a unified framework that integrates structured compression and operator approximation through accurate importance assessment. Our approach first evaluates attention head importance using Integrated Gradients (IG), offering greater stability and theoretical soundness than gradient-based methods. We then apply a three-dimensional optimization: (1) structurally pruning redundant attention heads; (2) replacing Softmax with adaptive polynomial approximation to avoid exponential computations; (3) implementing layer-wise GELU substitution to accommodate different layer characteristics. A joint threshold mechanism coordinates compression across dimensions under accuracy constraints. Experimental results on the GLUE benchmark show that our method achieves an average 2.9<mml:math id="mml-ieqn-1"><mml:mo>×</mml:mo></mml:math> speedup in inference latency and a 50% reduction in communication cost, while controlling the accuracy loss within 2.3%, demonstrating significant synergistic effects and a superior accuracy-efficiency trade-off compared to single-technique optimization strategies.},
DOI = {10.32604/cmc.2026.073657}
}



