@inproceedings{412a7df3c6fc4beaa9fb2dd208e4f1a4,

title = "On the learning and convergence of the radial basis networks",

abstract = "Although the radial basis networks have been shown to be able to model any {"}well behaved{"} nonlinear function to any desired accuracy, there is no guarantee that the correct networks weights can be learned using any existing training rule. This paper reports a convergence result for training radial basis networks based on a modified gradient descent training rule, which is the same as the standard gradient descent algorithm except that a deadzone around the origin of the error coordinates is incorporated in the training rule. The result says that, if the deadzone size is large enough to cover the modeling error and if the learning rate is seleted within certain range, then the norm of the parameter error will converge to a constant, and the output error between the network and the nonlinear function will converge into a small ball. Simulations are used to verify the theoretical results.",

author = "Fu-Chuang Chen and Lin, {Mao Hsing}",

year = "1993",

month = jan,

day = "1",

doi = "10.1109/ICNN.1993.298691",

language = "English",

series = "IEEE International Conference on Neural Networks - Conference Proceedings",

publisher = "Institute of Electrical and Electronics Engineers Inc.",

pages = "983--988",

booktitle = "1993 IEEE International Conference on Neural Networks, ICNN 1993",

address = "United States",

note = "null ; Conference date: 28-03-1993 Through 01-04-1993",

}