
@Article{jiot.2025.060786,
AUTHOR = {Alireza Bahmani},
TITLE = {Optimization of Machine Learning Methods for Intrusion Detection in IoT},
JOURNAL = {Journal on Internet of Things},
VOLUME = {7},
YEAR = {2025},
NUMBER = {1},
PAGES = {1--17},
URL = {http://www.techscience.com/jiot/v7n1/62471},
ISSN = {2579-0080},
ABSTRACT = {With the development of the Internet of Things (IoT) technology and its widespread integration in various aspects of life, the risks associated with cyberattacks on these systems have increased significantly. Vulnerabilities in IoT devices, stemming from insecure designs and software weaknesses, have made attacks on them more complex and dangerous compared to traditional networks. Conventional intrusion detection systems are not fully capable of identifying and managing these risks in the IoT environment, making research and evaluation of suitable intrusion detection systems for IoT crucial. In this study, deep learning, multi-layer perceptron (MLP), Random Forest (RF), Extreme Gradient Boosting (XGBoost), and their extensions were implemented to identify the most effective method. The results of the experiments conducted in this research demonstrate that while deep learning methods with regularization and dropout techniques attained an accuracy of 0.88, the optimized RF classifier using HalvingGridSearchCV achieved the highest accuracy of 0.91. The study also emphasized the importance of balancing accuracy and computational efficiency, especially in resource-constrained IoT environments, where the optimized RF classifier with acceptable computational time emerged as a practical solution. These methods were evaluated on the CICIoT2023 dataset. These findings contribute valuable insights for future research in the field of IoT security and underscore the potential of optimized machine learning techniques in enhancing intrusion detection capabilities.},
DOI = {10.32604/jiot.2025.060786}
}



