@article{f91ee175ca174f75a90fd9ab0bbb1d1e,
title = "OFFO minimization algorithms for second-order optimality and their complexity",
abstract = "An Adagrad-inspired class of algorithms for smooth unconstrained optimizationis presented in which the objective function is never evaluated and yet thegradient norms decrease at least as fast as lO(1/\sqrt{k+1}) whilesecond-order optimality measures converge to zero at least as fast asO(1/(k+1)^{1/3}). This latter rate of convergence is shown to beessentially sharp and is identical to that known for more standard algorithms(like trust-region or adaptive-regularization methods) using both function andderivatives' evaluations. A related ``divergent stepsize'' method is alsodescribed, whose essentially sharp rate of convergence is slighly inferior. Itis finally discussed how to obtain weaker second-order optimality guaranteesat a (much) reduced computional cost.",
keywords = "Adagrad, Evaluation complexity, Global rate of convergence, Objective-function-free optimization (OFFO), Second-order optimality",
author = "Serge Gratton and Philippe TOINT",
note = "Funding Information: S. Gratton.: Work partially supported by 3IA Artificial and Natural Intelligence Toulouse Institute (ANITI), French {"}Investing for the Future - PIA3{"} program under the Grant agreement ANR-19-PI3A-0004{"}. Ph. L. Toint.: Work partially supported by ANITI. Publisher Copyright: {\textcopyright} 2022, The Author(s), under exclusive licence to Springer Science+Business Media, LLC, part of Springer Nature.",
year = "2023",
month = feb,
day = "15",
doi = "10.1007/s10589-022-00435-2",
language = "English",
volume = "84",
pages = "573--607",
journal = "Computational Optimization and Applications",
issn = "0926-6003",
publisher = "Springer Netherlands",
number = "2",
}