{ "id": "cond-mat/0310205", "version": "v1", "published": "2003-10-09T10:31:45.000Z", "updated": "2003-10-09T10:31:45.000Z", "title": "Influence of topology on the performance of a neural network", "authors": [ "Joaquin J. Torres", "Miguel A. Munoz", "J. Marro", "P. L. Garrido" ], "comment": "6 eps Figures. 6 pages. To appear in Neurocomputing", "journal": "Neurocomputing, vol. 58-60, pag. 229-234 (2004)", "categories": [ "cond-mat.stat-mech", "cond-mat.dis-nn" ], "abstract": "We studied the computational properties of an attractor neural network (ANN) with different network topologies. Though fully connected neural networks exhibit, in general, a good performance, they are biologically unrealistic, as it is unlikely that natural evolution leads to such a large connectivity. We demonstrate that, at finite temperature, the capacity to store and retrieve binary patterns is higher for ANN with scale--free (SF) topology than for highly random--diluted Hopfield networks with the same number of synapses. We also show that, at zero temperature, the relative performance of the SF network increases with increasing values of the distribution power-law exponent. Some consequences and possible applications of our findings are discussed.", "revisions": [ { "version": "v1", "updated": "2003-10-09T10:31:45.000Z" } ], "analyses": { "keywords": [ "performance", "distribution power-law exponent", "attractor neural network", "sf network increases", "retrieve binary patterns" ], "tags": [ "journal article" ], "note": { "typesetting": "TeX", "pages": 6, "language": "en", "license": "arXiv", "status": "editable", "adsabs": "2003cond.mat.10205T" } } }