
@Article{cmc.2025.059224,
AUTHOR = {Yawar Abbas, Aisha Ahmed Alarfaj, Ebtisam Abdullah Alabdulqader, Asaad Algarni, Ahmad Jalal, Hui Liu},
TITLE = {Drone-Based Public Surveillance Using 3D Point Clouds and Neuro-Fuzzy Classifier},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {82},
YEAR = {2025},
NUMBER = {3},
PAGES = {4759--4776},
URL = {http://www.techscience.com/cmc/v82n3/59891},
ISSN = {1546-2226},
ABSTRACT = {Human Activity Recognition (HAR) in drone-captured videos has become popular because of the interest in various fields such as video surveillance, sports analysis, and human-robot interaction. However, recognizing actions from such videos poses the following challenges: variations of human motion, the complexity of backdrops, motion blurs, occlusions, and restricted camera angles. This research presents a human activity recognition system to address these challenges by working with drones’ red-green-blue (RGB) videos. The first step in the proposed system involves partitioning videos into frames and then using bilateral filtering to improve the quality of object foregrounds while reducing background interference before converting from RGB to grayscale images. The YOLO (You Only Look Once) algorithm detects and extracts humans from each frame, obtaining their skeletons for further processing. The joint angles, displacement and velocity, histogram of oriented gradients (HOG), 3D points, and geodesic Distance are included. These features are optimized using Quadratic Discriminant Analysis (QDA) and utilized in a Neuro-Fuzzy Classifier (NFC) for activity classification. Real-world evaluations on the Drone-Action, Unmanned Aerial Vehicle (UAV)-Gesture, and Okutama-Action datasets substantiate the proposed system’s superiority in accuracy rates over existing methods. In particular, the system obtains recognition rates of 93% for drone action, 97% for UAV gestures, and 81% for Okutama-action, demonstrating the system’s reliability and ability to learn human activity from drone videos.},
DOI = {10.32604/cmc.2025.059224}
}



