@Article{cmes.2022.019378, AUTHOR = {Siyuan Mei, Yingyi Chen, Hanxiang Qin, Huihui Yu, Daoliang Li, Boyang Sun, Ling Yang, Yeqi Liu}, TITLE = {A Method Based on Knowledge Distillation for Fish School Stress State Recognition in Intensive Aquaculture}, JOURNAL = {Computer Modeling in Engineering \& Sciences}, VOLUME = {131}, YEAR = {2022}, NUMBER = {3}, PAGES = {1315--1335}, URL = {http://www.techscience.com/CMES/v131n3/47389}, ISSN = {1526-1506}, ABSTRACT = {Fish behavior analysis for recognizing stress is very important for fish welfare and production management in aquaculture. Recent advances have been made in fish behavior analysis based on deep learning. However, most existing methods with top performance rely on considerable memory and computational resources, which is impractical in the real-world scenario. In order to overcome the limitations of these methods, a new method based on knowledge distillation is proposed to identify the stress states of fish schools. The knowledge distillation architecture transfers additional inter-class information via a mixed relative loss function, and it forces a lightweight network (GhostNet) to mimic the soft probabilities output of a well-trained fish stress state recognition network (ResNeXt101). The fish school stress state recognition model's accuracy is improved from 94.17% to 98.12% benefiting from the method. The proposed model has about 5.18 M parameters and requires 0.15 G FLOPs (floating-point operations) to process an image of size 224 × 224. Furthermore, fish behavior images are collected in a land-based factory, and a dataset is constructed and extended through flip, rotation, and color jitter augmentation techniques. The proposed method is also compared with other state-of-the-art methods. The experimental results show that the proposed model is more suitable for deployment on resource-constrained devices or real-time applications, and it is conducive for real-time monitoring of fish behavior.}, DOI = {10.32604/cmes.2022.019378} }