
@Article{jai.2025.070773,
AUTHOR = {Amit Prakash Singh, Kajal Kaul, Anuradha Chug, Ravinder Kumar, Veerubommu Shanmugam},
TITLE = {Leveraging Segmentation for Potato Plant Disease Severity Estimation and Classification via CBAM-EfficientNetB0 Transfer Learning},
JOURNAL = {Journal on Artificial Intelligence},
VOLUME = {7},
YEAR = {2025},
NUMBER = {1},
PAGES = {451--468},
URL = {http://www.techscience.com/jai/v7n1/64395},
ISSN = {2579-003X},
ABSTRACT = {In agricultural farms in India where the staple diet for most of the households is potato, plant leaf diseases, namely Potato Early Blight (PEB) and Potato Late Blight (PLB), are quite common. The class label Plant Healthy (PH) is also used. If these diseases are not identified early, they can cause massive crop loss and thereby incur huge economic losses to the farmers in the agricultural domain and can impact the gross domestic product of the nation. This paper presents a hybrid approach for potato plant disease severity estimation and classification of diseased and healthy leaves, combining the strengths of classical image processing, computer vision, and deep learning. We propose a pipeline that initially employs OpenCV’s cv2 led color-based image segmentation to isolate and highlight diseased brown, yellow-colored lesions or regions and healthy green colored lesion areas associated with various potato leaf diseases. Adaptive Thresholding for illumination and texture feature extraction and U-Net Segmentation for mask refinement for severity estimation. It has a mathematical framework for quantifying the severity based on the spatial area distribution of these regions. This allows for both visual representation of the segmented regions in the form of overlay masks and quantification of distinct disease severity. We detail the implementation of the approach, including color space selection, segmentation strategies, mask creation, area calculation, and a potential mathematical model for severity calculation. Overlay masks generated are then used as input to a CBAM-EfficientNetB0 model, leveraging transfer learning for improved classification accuracy and efficiency. For the Plant Village dataset, the test accuracy achieved is 0.99, whereas the test loss is 0.02, respectively. For the Plant Doc dataset, the test accuracy achieved is 0.97, whereas the test loss is 0.06, respectively. Also, the CBAM attention mechanism model lays emphasis on relevant features within the lesions and overall image context. The results achieved with the Plant Village dataset are slightly better in comparison to the Plant Doc dataset.},
DOI = {10.32604/jai.2025.070773}
}



