
@Article{cmc.2026.080767,
AUTHOR = {Yingying Yu, Jun Yuan, Tong Liu},
TITLE = {LAH-Net: A Low-Light Aware Hybrid Network for Robotic Manipulation},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {},
YEAR = {},
NUMBER = {},
PAGES = {{pages}},
URL = {http://www.techscience.com/cmc/online/detail/26711},
ISSN = {1546-2226},
ABSTRACT = {Accurate grasp detection is fundamental to successful robotic manipulation. Existing methods achieve reliable performance under good light conditions. However, their performance in low-light environments suffers from severe degradation due to the diminishing discriminative ability of visual features. In this paper, a novel low-light aware hybrid network LAH-Net is proposed. It comprises an alternating transformer-CNN module (ATCM) between the encoder and decoder, and a knowledge distillation-guided low-light enhancement module (KDLEM) before the encoder, which is activated by an illumination gate under low-light conditions. To generate highly robust and synergistic features, the ATCM module facilitates the iterative exchange between the local representations from CNNs and the global contexts modeled by transformers. Additionally, a transformer-to-CNN adapter and a CNN-to-transformer adapter are designed for bidirectional feature alignment. Meanwhile, the KDLEM module employs a teacher-student framework where the simplified student network distills knowledge from the powerful teacher to enhance low-light adaptability and maintain computational efficiency. Moreover, we introduce a real-world low-light grasp detection dataset (RLGD) for algorithm evaluation, which contains over 70 objects captured under four distinct low-light conditions. Our method achieves 99.4% and 98.8% accuracy on the Cornell dataset and 96.05% accuracy on the Jacquard V2 dataset. It also attains an accuracy of 96.5% on the RLGD dataset and demonstrates strong generalization across various low-light intensity levels. The real-world experiments in low-light scenarios validate the effectiveness of the proposed method.},
DOI = {10.32604/cmc.2026.080767}
}



