
@Article{cmc.2020.010306,
AUTHOR = {Chuqing Zhang, Jiangyuan Yao, Guangwu Hu, Thomas Schøtt},
TITLE = {Applying Feature-Weighted Gradient Decent K-Nearest Neighbor  to Select Promising Projects for Scientific Funding},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {64},
YEAR = {2020},
NUMBER = {3},
PAGES = {1741--1753},
URL = {http://www.techscience.com/cmc/v64n3/39456},
ISSN = {1546-2226},
ABSTRACT = {Due to its outstanding ability in processing large quantity and high-dimensional 
data, machine learning models have been used in many cases, such as pattern recognition, 
classification, spam filtering, data mining and forecasting. As an outstanding machine 
learning algorithm, K-Nearest Neighbor (KNN) has been widely used in different situations, 
yet in selecting qualified applicants for winning a funding is almost new. The major problem 
lies in how to accurately determine the importance of attributes. In this paper, we propose a 
Feature-weighted Gradient Decent K-Nearest Neighbor (FGDKNN) method to classify
funding applicants in to two types: approved ones or not approved ones. The FGDKNN is 
based on a gradient decent learning algorithm to update weight. It updatesthe weight of labels 
by minimizing error ratio iteratively, so that the importance of attributes can be described 
better. We investigate the performance of FGDKNN with Beijing Innofund. The results show 
that FGDKNN performs about 23%, 20%, 18%, 15% better than KNN, SVM, DT and ANN, 
respectively. Moreover, the FGDKNN has fast convergence time under different training 
scales, and has good performance under different settings.},
DOI = {10.32604/cmc.2020.010306}
}



