{ "id": "1904.10566", "version": "v1", "published": "2019-04-23T23:24:39.000Z", "updated": "2019-04-23T23:24:39.000Z", "title": "Time-Varying Matrix Eigenanalyses via Zhang Neural Networks and look-Ahead Finite Difference Equations", "authors": [ "Frank Uhlig", "Yunong Zhang" ], "categories": [ "math.NA" ], "abstract": "This paper adapts look-ahead and backward finite difference formulas to compute future eigenvectors and eigenvalues of piecewise smooth time-varying symmetric matrix flows $A(t)$. It is based on the Zhang Neural Network (ZNN) model for time-varying problems and uses the associated error function $E(t) = A(t)V(t) - V(t) D(t)$ or $e_i(t) = A(t)v_i(t) -\\la_i(t)v_i(t)$ with the Zhang design stipulation that $\\dot E(t) = - \\eta E(t)$ or $\\dot e_i(t) = - \\eta e_i(t)$ with $\\eta > 0$ so that $E(t)$ and $e(t)$ decrease exponentially over time. This leads to a discrete-time differential equation of the form $P(t_k) \\dot z(t_k) = q(t_k)$ for the eigendata vector $z(t_k)$ of $A(t_k)$. Convergent look-ahead finite difference formulas of varying error orders then allow us to express $z(t_{k+1})$ in terms of earlier $A$ and $z$ data. Numerical tests, comparisons and open questions complete the paper.", "revisions": [ { "version": "v1", "updated": "2019-04-23T23:24:39.000Z" } ], "analyses": { "subjects": [ "65H17", "65L12", "65F15", "65Q10", "92B20" ], "keywords": [ "look-ahead finite difference equations", "zhang neural network", "time-varying matrix eigenanalyses", "look-ahead finite difference formulas", "smooth time-varying symmetric matrix" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }