Echo State Networks (ESNs) are stated in literature to use a random structure to project an input sequence into a higher dimensional space where the input becomes linearly separable. However, the linear mathematics used for this projection are incapable of increasing the dimensionality of the input, and the commonly used tanh() activation function tends not to produce much nonlinearity. Therefore, any increase in dimensionality is due to the echoes of the ESN. We introduce Lagged Input Regression Computation to investigate what types of ESN can be replaced with simpler non-randomised structures. We show that tanh()-based ESNs behave as simple linear memory systems, whereas LeakyReLU provides a more effective non-linearity. We also show that the use of certain orthogonal polynomials in defining nonlinear memory capacity benchmarks gives a misleading impression of nonlinearity, due to the relevant high order polynomials nevertheless containing a linear term.
doi:10.1007/978-3-032-15641-9_18
@inproceedings(Griffin:2025-UCNC,
author = "David Griffin and James Stovold and Simon O’Keefe and Susan Stepney",
title = "Evaluating {ESN}s Against Lagged Input Regression Computation",
pages = "262-276",
doi = "10.1007/978-3-032-15641-9_18",
crossref = "UCNC-2025"
)
@proceedings(UCNC-2025,
title = "UCNC 2025, Nice, France, September 2025",
booktitle = "UCNC 2025, Nice, France, September 2025",
series = "LNCS",
volume = 16364,
publisher = "Springer",
year = 2026
)