aligator  0.10.0
A primal-dual augmented Lagrangian-type solver for nonlinear trajectory optimization.
Loading...
Searching...
No Matches
linesearch-nonmonotone.hpp
Go to the documentation of this file.
1#pragma once
2
3#include <proxsuite-nlp/linesearch-base.hpp>
4#include <functional>
5
6namespace aligator {
7using proxsuite::nlp::Linesearch;
8using proxsuite::nlp::LinesearchStrategy;
9
13template <typename Scalar> struct NonmonotoneLinesearch : Linesearch<Scalar> {
14 using typename Linesearch<Scalar>::FunctionSample;
15 using typename Linesearch<Scalar>::Options;
16 using fun_t = std::function<Scalar(Scalar)>;
17
18 Scalar run(fun_t f, Scalar phi0, Scalar dphi0, Scalar &a_opt);
19 NonmonotoneLinesearch(const Options &options);
20
21 void reset() {
22 mov_avg = Scalar(0.);
23 avg_weight = Scalar(0.);
24 }
25
27 Scalar avg_eta = 0.85;
28 Scalar beta_dec = 0.5;
29
30private:
31 Scalar mov_avg;
32 Scalar avg_weight;
33};
34
35template <typename Scalar>
37 : Linesearch<Scalar>(options), mov_avg(0.), avg_weight(0.) {}
38
39template <typename Scalar>
40Scalar NonmonotoneLinesearch<Scalar>::run(fun_t f, Scalar phi0, Scalar dphi0,
41 Scalar &a_opt) {
42 const Options &opts = this->options_;
43 mov_avg = avg_eta * avg_weight * mov_avg + phi0;
44 avg_weight = avg_eta * avg_weight + 1;
45 mov_avg /= avg_weight;
46
47 while (a_opt > opts.alpha_min) {
48 try {
49 const Scalar phia = f(a_opt);
50 bool suff_decrease = phia <= mov_avg + opts.armijo_c1 * a_opt * dphi0;
51 if (suff_decrease)
52 return phia;
53 } catch (const std::runtime_error &) {
54 }
55 a_opt *= beta_dec;
56 }
57
58 // then, a_opt <= opts.alpha_min
59 a_opt = std::max(a_opt, opts.alpha_min);
60 return f(a_opt);
61}
62
63} // namespace aligator
Main package namespace.
Nonmonotone Linesearch algorithm. Modifies the Armijo condition with a moving average of function val...
NonmonotoneLinesearch(const Options &options)
Scalar avg_eta
Weight for moving average.
Scalar run(fun_t f, Scalar phi0, Scalar dphi0, Scalar &a_opt)
std::function< Scalar(Scalar)> fun_t