@Article{eld15,
Title = {Computing {Frechet} Derivatives in Partial Least Squares Regression},
Author = {L. Eld\'en},
Journal = {Linear Algebra and its Applications},
Year = {2015},
Pages = {316-338},
Volume = {473},
Abstract = {Partial least squares is a common technique for multivariate regression. The pro- cedure is recursive and in each step basis vectors are computed for the explaining variables and the solution vectors. A linear model is fitted by projection onto the span of the basis vectors. The procedure is mathematically equivalent to Golub-Kahan bidiagonalization, which is a Krylov method, and which is equiv- alent to a pair of matrix factorizations. The vectors of regression coefficients and prediction are non-linear functions of the right hand side. An algorithm for computing the Frechet derivatives of these functions is derived, based on perturbation theory for the matrix factorizations. From the Frechet derivative of the prediction vector one can compute the number of degrees of freedom, which can be used as a stopping criterion for the recursion. A few numerical examples are given.},
Doi = {10.1016/j.laa.2014.09.017},
Keywords = {Partial Least Squares, PLS, regression, least squares, prediction, Golub-Kahan bidiagonalization, Krylov method, Frechet derivative, recursion, perturbation theory, degrees of freedom},
Url = {http://www.sciencedirect.com/science/article/pii/S0024379514006028}
}