
@Article{phyton.2025.068955,
AUTHOR = {Sadia Alam Shammi, Yanbo Huang, Weiwei Xie, Gary Feng, Haile Tewolde, Xin Zhang, Johnie Jenkins, Mark Shankle},
TITLE = {Modeling and Estimating Soybean Leaf Area Index and Biomass Using Machine Learning Based on Unmanned Aerial Vehicle-Captured Multispectral Images},
JOURNAL = {Phyton-International Journal of Experimental Botany},
VOLUME = {94},
YEAR = {2025},
NUMBER = {9},
PAGES = {2745--2766},
URL = {http://www.techscience.com/phyton/v94n9/63929},
ISSN = {1851-5657},
ABSTRACT = {Crop leaf area index (LAI) and biomass are two major biophysical parameters to measure crop growth and health condition. Measuring LAI and biomass in field experiments is a destructive method. Therefore, we focused on the application of unmanned aerial vehicles (UAVs) in agriculture, which is a cost and labor-efficient method. Hence, UAV-captured multispectral images were applied to monitor crop growth, identify plant bio-physical conditions, and so on. In this study, we monitored soybean crops using UAV and field experiments. This experiment was conducted at the MAFES (Mississippi Agricultural and Forestry Experiment Station) Pontotoc Ridge-Flatwoods Branch Experiment Station. It followed a randomized block design with five cover crops: Cereal Rye, Vetch, Wheat, MC: mixed Mustard and Cereal Rye, and native vegetation. Planting was made in the fall, and three fertilizer treatments were applied: Synthetic Fertilizer, Poultry Litter, and none, applied before planting the soybean, in a full factorial combination. We monitored soybean reproductive phases at R3 (initial pod development), R5 (initial seed development), R6 (full seed development), and R7 (initial maturity) and used UAV multispectral remote sensing for soybean LAI and biomass estimations. The major goal of this study was to assess LAI and biomass estimations from UAV multispectral images in the reproductive stages when the development of leaves and biomass was stabilized. We made about fourteen vegetation indices (VIs) from UAV multispectral images at these stages to estimate LAI and biomass. We modeled LAI and biomass based on these remotely sensed VIs and ground-truth measurements using machine learning methods, including linear regression, Random Forest (RF), and support vector regression (SVR). Thereafter, the models were applied to estimate LAI and biomass. According to the model results, LAI was better estimated at the R6 stage and biomass at the R3 stage. Compared to the other models, the RF models showed better estimation, i.e., an R<sup>2</sup> of about 0.58–0.68 with an RMSE (root mean square error) of 0.52–0.60 (m<sup>2</sup>/m<sup>2</sup>) for the LAI and about 0.44–0.64 for R<sup>2</sup> and 21–26 (g dry weight/5 plants) for RMSE of biomass estimation. We performed a leave-one-out cross-validation. Based on cross-validated models with field experiments, we also found that the R6 stage was the best for estimating LAI, and the R3 stage for estimating crop biomass. The cross-validated RF model showed the estimation ability with an R<sup>2</sup> about 0.25–0.44 and RMSE of 0.65–0.85 (m<sup>2</sup>/m<sup>2</sup>) for LAI estimation; and R<sup>2</sup> about 0.1–0.31 and an RMSE of about 28–35 (g dry weight/5 plants) for crop biomass estimation. This result will be helpful to promote the use of non-destructive remote sensing methods to determine the crop LAI and biomass status, which may bring more efficient crop production and management.},
DOI = {10.32604/phyton.2025.068955}
}



