
@Article{cmc.2020.010522,
AUTHOR = {Panyu Liu, Huilin Ren, Xiaojun Shi, Yangyang Li, Zhiping Cai, Fang Liu, Huacheng Zeng},
TITLE = {MoTransFrame: Model Transfer Framework for CNNs on Low-Resource Edge Computing Node},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {65},
YEAR = {2020},
NUMBER = {3},
PAGES = {2321--2334},
URL = {http://www.techscience.com/cmc/v65n3/40172},
ISSN = {1546-2226},
ABSTRACT = {Deep learning technology has been widely used in computer vision, speech 
recognition, natural language processing, and other related fields. The deep learning 
algorithm has high precision and high reliability. However, the lack of resources in the edge 
terminal equipment makes it difficult to run deep learning algorithms that require more
memory and computing power. In this paper, we propose MoTransFrame, a general model 
processing framework for deep learning models. Instead of designing a model compression 
algorithm with a high compression ratio, MoTransFrame can transplant popular convolutional 
neural networks models to resources-starved edge devices promptly and accurately. By the 
integration method, Deep learning models can be converted into portable projects for Arduino, 
a typical edge device with limited resources. Our experiments show that MoTransFrame has 
good adaptability in edge devices with limited memories. It is more flexible than other model 
transplantation methods. It can keep a small loss of model accuracy when the number of 
parameters is compressed by tens of times. At the same time, the computational resources 
needed in the reasoning process are less than what the edge node could handle.},
DOI = {10.32604/cmc.2020.010522}
}



