@article{Faster Convergent Artificial Neural Networks_2018, volume={17}, url={https://rajpub.com/index.php/ijct/article/view/7106}, DOI={10.24297/ijct.v17i1.7106}, abstractNote={<p>Proposed in this paper is a novel fast-convergence algorithm applied  to neural networks (ANNs) with a learning rate based on the eigenvalues of the associated Hessian matrix of the input data.   That is, the learning rate applied to the backpropagation algorithm changes dynamically with the input data used for training.  The best choice of learning rate to converge to an accurate value quickly is derived. This newly proposed fast-convergence algorithm is applied to a traditional multilayer ANN architecture with feed-forward and backpropagation techniques.  The proposed strategy is applied to various functions learned by the ANN through training.  Learning curves obtained using calculated learning rates according to the novel method proposed are compared to learning curves utilizing an arbitrary learning rate to demonstrate the usefulness of this novel technique.  This study shows that convergence to accurate values can be achieved much more quickly (a reduction in iterations by a factor of  hundred) using the techniques proposed here.  This approach is illustrated in this research work with derivations and pertinent examples to illustrate the method and learning curves obtained. </p>}, number={1}, journal={INTERNATIONAL JOURNAL OF COMPUTERS & TECHNOLOGY}, year={2018}, month={Jan.}, pages={7126–7132} }