sqpmethod.cpp
1 /*
2  * This file is part of CasADi.
3  *
4  * CasADi -- A symbolic framework for dynamic optimization.
5  * Copyright (C) 2010-2023 Joel Andersson, Joris Gillis, Moritz Diehl, Kobe Bergmans
6  * KU Leuven. All rights reserved.
7  * Copyright (C) 2011-2014 Greg Horn
8  *
9  * CasADi is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 3 of the License, or (at your option) any later version.
13  *
14  * CasADi is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with CasADi; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  *
23  */
24 
25 #include "sqpmethod.hpp"
26 
27 #include "casadi/core/casadi_misc.hpp"
28 #include "casadi/core/calculus.hpp"
29 #include "casadi/core/conic.hpp"
30 #include "casadi/core/conic_impl.hpp"
31 #include "casadi/core/convexify.hpp"
32 
33 #include <ctime>
34 #include <iomanip>
35 #include <fstream>
36 #include <cmath>
37 #include <cfloat>
38 
39 namespace casadi {
40 
41 extern "C"
42 int CASADI_NLPSOL_SQPMETHOD_EXPORT
43  casadi_register_nlpsol_sqpmethod(Nlpsol::Plugin* plugin) {
44  plugin->creator = Sqpmethod::creator;
45  plugin->name = "sqpmethod";
46  plugin->doc = Sqpmethod::meta_doc.c_str();
47  plugin->version = CASADI_VERSION;
48  plugin->options = &Sqpmethod::options_;
49  plugin->deserialize = &Sqpmethod::deserialize;
50  return 0;
51 }
52 
53 extern "C"
54 void CASADI_NLPSOL_SQPMETHOD_EXPORT casadi_load_nlpsol_sqpmethod() {
56 }
57 
58 Sqpmethod::Sqpmethod(const std::string& name, const Function& nlp)
59  : Nlpsol(name, nlp) {
60 }
61 
63  clear_mem();
64 }
65 
67 = {{&Nlpsol::options_},
68  {{"qpsol",
69  {OT_STRING,
70  "The QP solver to be used by the SQP method [qpoases]"}},
71  {"qpsol_options",
72  {OT_DICT,
73  "Options to be passed to the QP solver"}},
74  {"hessian_approximation",
75  {OT_STRING,
76  "limited-memory|exact"}},
77  {"max_iter",
78  {OT_INT,
79  "Maximum number of SQP iterations"}},
80  {"min_iter",
81  {OT_INT,
82  "Minimum number of SQP iterations"}},
83  {"max_iter_ls",
84  {OT_INT,
85  "Maximum number of linesearch iterations"}},
86  {"tol_pr",
87  {OT_DOUBLE,
88  "Stopping criterion for primal infeasibility"}},
89  {"tol_du",
90  {OT_DOUBLE,
91  "Stopping criterion for dual infeasability"}},
92  {"c1",
93  {OT_DOUBLE,
94  "Armijo condition, coefficient of decrease in merit"}},
95  {"beta",
96  {OT_DOUBLE,
97  "Line-search parameter, restoration factor of stepsize"}},
98  {"merit_memory",
99  {OT_INT,
100  "Size of memory to store history of merit function values"}},
101  {"lbfgs_memory",
102  {OT_INT,
103  "Size of L-BFGS memory."}},
104  {"print_header",
105  {OT_BOOL,
106  "Print the header with problem statistics"}},
107  {"print_iteration",
108  {OT_BOOL,
109  "Print the iterations"}},
110  {"print_status",
111  {OT_BOOL,
112  "Print a status message after solving"}},
113  {"min_step_size",
114  {OT_DOUBLE,
115  "The size (inf-norm) of the step size should not become smaller than this."}},
116  {"hess_lag",
117  {OT_FUNCTION,
118  "Function for calculating the Hessian of the Lagrangian (autogenerated by default)"}},
119  {"jac_fg",
120  {OT_FUNCTION,
121  "Function for calculating the gradient of the objective and Jacobian of the constraints "
122  "(autogenerated by default)"}},
123  {"convexify_strategy",
124  {OT_STRING,
125  "NONE|regularize|eigen-reflect|eigen-clip. "
126  "Strategy to convexify the Lagrange Hessian before passing it to the solver."}},
127  {"convexify_margin",
128  {OT_DOUBLE,
129  "When using a convexification strategy, make sure that "
130  "the smallest eigenvalue is at least this (default: 1e-7)."}},
131  {"max_iter_eig",
132  {OT_DOUBLE,
133  "Maximum number of iterations to compute an eigenvalue decomposition (default: 50)."}},
134  {"elastic_mode",
135  {OT_BOOL,
136  "Enable the elastic mode which is used when the QP is infeasible (default: false)."}},
137  {"gamma_0",
138  {OT_DOUBLE,
139  "Starting value for the penalty parameter of elastic mode (default: 1)."}},
140  {"gamma_max",
141  {OT_DOUBLE,
142  "Maximum value for the penalty parameter of elastic mode (default: 1e20)."}},
143  {"gamma_1_min",
144  {OT_DOUBLE,
145  "Minimum value for gamma_1 (default: 1e-5)."}},
146  {"second_order_corrections",
147  {OT_BOOL,
148  "Enable second order corrections. "
149  "These are used when a step is considered bad by the merit function and constraint norm "
150  "(default: false)."}},
151  {"init_feasible",
152  {OT_BOOL,
153  "Initialize the QP subproblems with a feasible initial value (default: false)."}}
154  }
155 };
156 
157 void Sqpmethod::init(const Dict& opts) {
158  // Call the init method of the base class
159  Nlpsol::init(opts);
160 
161  // Default options
162  min_iter_ = 0;
163  max_iter_ = 50;
164  max_iter_ls_ = 3;
165  c1_ = 1e-4;
166  beta_ = 0.8;
167  merit_memsize_ = 4;
168  lbfgs_memory_ = 10;
169  tol_pr_ = 1e-6;
170  tol_du_ = 1e-6;
171  std::string hessian_approximation = "exact";
172  min_step_size_ = 1e-10;
173  std::string qpsol_plugin = "qpoases";
174  Dict qpsol_options;
175  print_header_ = true;
176  print_iteration_ = true;
177  print_status_ = true;
178  elastic_mode_ = false;
179  gamma_0_ = 1;
180  gamma_max_ = 1e20;
181  gamma_1_min_ = 1e-5;
182  so_corr_ = false;
183  init_feasible_ = false;
184 
185  std::string convexify_strategy = "none";
186  double convexify_margin = 1e-7;
187  casadi_int max_iter_eig = 200;
188 
189  // Read user options
190  for (auto&& op : opts) {
191  if (op.first=="max_iter") {
192  max_iter_ = op.second;
193  } else if (op.first=="min_iter") {
194  min_iter_ = op.second;
195  } else if (op.first=="max_iter_ls") {
196  max_iter_ls_ = op.second;
197  } else if (op.first=="c1") {
198  c1_ = op.second;
199  } else if (op.first=="beta") {
200  beta_ = op.second;
201  } else if (op.first=="merit_memory") {
202  merit_memsize_ = op.second;
203  } else if (op.first=="lbfgs_memory") {
204  lbfgs_memory_ = op.second;
205  } else if (op.first=="tol_pr") {
206  tol_pr_ = op.second;
207  } else if (op.first=="tol_du") {
208  tol_du_ = op.second;
209  } else if (op.first=="hessian_approximation") {
210  hessian_approximation = op.second.to_string();
211  } else if (op.first=="min_step_size") {
212  min_step_size_ = op.second;
213  } else if (op.first=="qpsol") {
214  qpsol_plugin = op.second.to_string();
215  } else if (op.first=="qpsol_options") {
216  qpsol_options = op.second;
217  } else if (op.first=="print_header") {
218  print_header_ = op.second;
219  } else if (op.first=="print_iteration") {
220  print_iteration_ = op.second;
221  } else if (op.first=="print_status") {
222  print_status_ = op.second;
223  } else if (op.first=="hess_lag") {
224  Function f = op.second;
225  casadi_assert_dev(f.n_in()==4);
226  casadi_assert_dev(f.n_out()==1);
227  set_function(f, "nlp_hess_l");
228  } else if (op.first=="jac_fg") {
229  Function f = op.second;
230  casadi_assert_dev(f.n_in()==2);
231  casadi_assert_dev(f.n_out()==4);
232  set_function(f, "nlp_jac_fg");
233  } else if (op.first=="convexify_strategy") {
234  convexify_strategy = op.second.to_string();
235  } else if (op.first=="convexify_margin") {
236  convexify_margin = op.second;
237  } else if (op.first=="max_iter_eig") {
238  max_iter_eig = op.second;
239  } else if (op.first=="elastic_mode") {
240  elastic_mode_ = op.second;
241  } else if (op.first=="gamma_0") {
242  gamma_0_ = op.second;
243  } else if (op.first=="gamma_max") {
244  gamma_max_ = op.second;
245  } else if (op.first=="gamma_1_min") {
246  gamma_1_min_ = op.second;
247  } else if (op.first=="second_order_corrections") {
248  so_corr_ = op.second;
249  } else if (op.first=="init_feasible") {
250  init_feasible_ = op.second;
251  }
252  }
253 
254  if (elastic_mode_) {
255  auto it = qpsol_options.find("error_on_fail");
256  if (it==qpsol_options.end()) {
257  qpsol_options["error_on_fail"] = false;
258  } else {
259  casadi_assert(!it->second,
260  "QP solver with setting error_on_fail is incompatible with elastic mode sqpmethod.");
261  }
262  }
263 
264  // Use exact Hessian?
265  exact_hessian_ = hessian_approximation =="exact";
266 
267  convexify_ = false;
268 
269  // Get/generate required functions
270  if (max_iter_ls_ || so_corr_) create_function("nlp_fg", {"x", "p"}, {"f", "g"});
271  // First order derivative information
272 
273  if (!has_function("nlp_jac_fg")) {
274  create_function("nlp_jac_fg", {"x", "p"},
275  {"f", "grad:f:x", "g", "jac:g:x"});
276  }
277  Asp_ = get_function("nlp_jac_fg").sparsity_out(3);
278 
279  if (exact_hessian_) {
280  if (!has_function("nlp_hess_l")) {
281  create_function("nlp_hess_l", {"x", "p", "lam:f", "lam:g"},
282  {"hess:gamma:x:x"}, {{"gamma", {"f", "g"}}});
283  }
284  Hsp_ = get_function("nlp_hess_l").sparsity_out(0);
285  casadi_assert(Hsp_.is_symmetric(), "Hessian must be symmetric");
286  if (convexify_strategy!="none") {
287  convexify_ = true;
288  Dict opts;
289  opts["strategy"] = convexify_strategy;
290  opts["margin"] = convexify_margin;
291  opts["max_iter_eig"] = max_iter_eig;
292  opts["verbose"] = verbose_;
294  }
295  } else {
297  }
298 
299  casadi_assert(!qpsol_plugin.empty(), "'qpsol' option has not been set");
300  qpsol_ = conic("qpsol", qpsol_plugin, {{"h", Hsp_}, {"a", Asp_}},
301  qpsol_options);
302  alloc(qpsol_);
303 
304  if (elastic_mode_) {
305  // Generate sparsity patterns for elastic mode
306  Sparsity Hsp_ela = Sparsity(Hsp_);
307  Sparsity Asp_ela = Sparsity(Asp_);
308 
309  std::vector<casadi_int> n_v = range(nx_);
310  Hsp_ela.enlarge(2*ng_ + nx_, 2*ng_ + nx_, n_v, n_v);
311 
312  Sparsity dsp = Sparsity::diag(ng_, ng_);
313  Asp_ela.appendColumns(dsp);
314  Asp_ela.appendColumns(dsp);
315 
316  // Allocate QP solver for elastic mode
317  Dict qpsol_ela_options = Dict(qpsol_options);
318 
319  casadi_assert(!qpsol_plugin.empty(), "'qpsol' option has not been set");
320  qpsol_ela_ = conic("qpsol_ela", qpsol_plugin, {{"h", Hsp_ela}, {"a", Asp_ela}},
321  qpsol_ela_options);
322  alloc(qpsol_ela_);
323  }
324 
325 
326  // BFGS?
327  if (!exact_hessian_) {
328  alloc_w(2*nx_); // casadi_bfgs
329  }
330 
331  // Header
332  if (print_header_) {
333  print("-------------------------------------------\n");
334  print("This is casadi::Sqpmethod.\n");
335  if (exact_hessian_) {
336  print("Using exact Hessian\n");
337  } else {
338  print("Using limited memory BFGS Hessian approximation\n");
339  }
340  print("Number of variables: %9d\n", nx_);
341  print("Number of constraints: %9d\n", ng_);
342  print("Number of nonzeros in constraint Jacobian: %9d\n", Asp_.nnz());
343  print("Number of nonzeros in Lagrangian Hessian: %9d\n", Hsp_.nnz());
344  print("\n");
345  }
346 
347 
348  set_sqpmethod_prob();
349  // Allocate memory
350  casadi_int sz_w, sz_iw;
351  casadi_sqpmethod_work(&p_, &sz_iw, &sz_w, elastic_mode_, so_corr_);
352  alloc_iw(sz_iw, true);
353  alloc_w(sz_w, true);
354  if (convexify_) {
357  }
358 }
359 
360 void Sqpmethod::set_sqpmethod_prob() {
361  p_.sp_h = Hsp_;
362  p_.sp_a = Asp_;
365  p_.nlp = &p_nlp_;
366 }
367 
368 void Sqpmethod::set_work(void* mem, const double**& arg, double**& res,
369  casadi_int*& iw, double*& w) const {
370  auto m = static_cast<SqpmethodMemory*>(mem);
371 
372  // Set work in base classes
373  Nlpsol::set_work(mem, arg, res, iw, w);
374 
375  m->d.prob = &p_;
376  casadi_sqpmethod_init(&m->d, &arg, &res, &iw, &w, elastic_mode_, so_corr_);
377 
378  m->iter_count = -1;
379 }
380 
381 int Sqpmethod::init_mem(void* mem) const {
382  if (Nlpsol::init_mem(mem)) return 1;
383  auto m = static_cast<SqpmethodMemory*>(mem);
384 
385  if (convexify_) m->add_stat("convexify");
386  m->add_stat("BFGS");
387  m->add_stat("QP");
388  m->add_stat("linesearch");
389  m->mem_qp = qpsol_->checkout();
390  return 0;
391 }
392 
393 void Sqpmethod::free_mem(void* mem) const {
394  auto m = static_cast<SqpmethodMemory*>(mem);
395  if (m->mem_qp >= 0) qpsol_.release(m->mem_qp);
396  delete static_cast<SqpmethodMemory*>(mem);
397 }
398 
399 int Sqpmethod::solve(void* mem) const {
400  auto m = static_cast<SqpmethodMemory*>(mem);
401  auto d_nlp = &m->d_nlp;
402  auto d = &m->d;
403 
404  // Number of SQP iterations
405  m->iter_count = 0;
406 
407  // Number of line-search iterations
408  casadi_int ls_iter = 0;
409 
410  // Last linesearch successfull
411  bool ls_success = true;
412 
413  // Last second order correction successfull
414  bool so_succes = false;
415 
416  // Reset
417  m->merit_ind = 0;
418  m->sigma = 0.; // NOTE: Move this into the main optimization loop
419  m->reg = 0;
420 
421  // Default stepsize
422  double t = 0;
423 
424  // For seeds
425  const double one = 1.;
426 
427  // Info for printing
428  std::string info = "";
429 
430  // gamma_1
431  double gamma_1 = 0.0; // Fix may be used uninitialized warning
432 
433  // ela_it
434  casadi_int ela_it = -1;
435 
436  casadi_clear(d->dx, nx_);
437 
438  // MAIN OPTIMIZATION LOOP
439  while (true) {
440  // Evaluate f, g and first order derivative information
441  m->arg[0] = d_nlp->z;
442  m->arg[1] = d_nlp->p;
443  m->res[0] = &d_nlp->objective;
444  m->res[1] = d->gf;
445  m->res[2] = d_nlp->z + nx_;
446  m->res[3] = d->Jk;
447  switch (calc_function(m, "nlp_jac_fg")) {
448  case -1:
449  m->return_status = "Non_Regular_Sensitivities";
450  m->unified_return_status = SOLVER_RET_NAN;
451  if (print_status_)
452  print("MESSAGE(sqpmethod): No regularity of sensitivities at current point.\n");
453  return 1;
454  case 0:
455  break;
456  default:
457  return 1;
458  }
459  // Evaluate the gradient of the Lagrangian
460  casadi_copy(d->gf, nx_, d->gLag);
461  casadi_mv(d->Jk, Asp_, d_nlp->lam+nx_, d->gLag, true);
462  casadi_axpy(nx_, 1., d_nlp->lam, d->gLag);
463 
464  // Primal infeasability
465  double pr_inf = casadi_max_viol(nx_+ng_, d_nlp->z, d_nlp->lbz, d_nlp->ubz);
466 
467  // inf-norm of Lagrange gradient
468  double du_inf = casadi_norm_inf(nx_, d->gLag);
469 
470  // inf-norm of step
471  double dx_norminf = casadi_norm_inf(nx_, d->dx);
472 
473  // Printing information about the actual iterate
474  if (print_iteration_) {
475  if (m->iter_count % 10 == 0) print_iteration();
476  print_iteration(m->iter_count, d_nlp->objective, pr_inf, du_inf, dx_norminf,
477  m->reg, ls_iter, ls_success, so_succes, info);
478  info = "";
479  so_succes = false;
480  }
481 
482  // Callback function
483  if (callback(m)) {
484  if (print_status_) print("WARNING(sqpmethod): Aborted by callback...\n");
485  m->return_status = "User_Requested_Stop";
486  break;
487  }
488 
489  // Checking convergence criteria
490  if (m->iter_count >= min_iter_ && pr_inf < tol_pr_ && du_inf < tol_du_) {
491  if (print_status_)
492  print("MESSAGE(sqpmethod): Convergence achieved after %d iterations\n", m->iter_count);
493  m->return_status = "Solve_Succeeded";
494  m->success = true;
495  break;
496  }
497 
498  if (m->iter_count >= max_iter_) {
499  if (print_status_) print("MESSAGE(sqpmethod): Maximum number of iterations reached.\n");
500  m->return_status = "Maximum_Iterations_Exceeded";
501  m->unified_return_status = SOLVER_RET_LIMITED;
502  break;
503  }
504 
505  if (m->iter_count >= 1 && m->iter_count >= min_iter_ && dx_norminf <= min_step_size_) {
506  if (print_status_) print("MESSAGE(sqpmethod): Search direction becomes too small without "
507  "convergence criteria being met.\n");
508  m->return_status = "Search_Direction_Becomes_Too_Small";
509  break;
510  }
511 
512  if (exact_hessian_) {
513  // Update/reset exact Hessian
514  m->arg[0] = d_nlp->z;
515  m->arg[1] = d_nlp->p;
516  m->arg[2] = &one;
517  m->arg[3] = d_nlp->lam + nx_;
518  m->res[0] = d->Bk;
519  if (calc_function(m, "nlp_hess_l")) return 1;
520  if (convexify_) {
521  ScopedTiming tic(m->fstats.at("convexify"));
522  if (convexify_eval(&convexify_data_.config, d->Bk, d->Bk, m->iw, m->w)) return 1;
523  }
524  } else if (m->iter_count==0) {
525  ScopedTiming tic(m->fstats.at("BFGS"));
526  // Initialize BFGS
527  casadi_fill(d->Bk, Hsp_.nnz(), 1.);
528  casadi_bfgs_reset(Hsp_, d->Bk);
529  } else {
530  ScopedTiming tic(m->fstats.at("BFGS"));
531  // Update BFGS
532  if (m->iter_count % lbfgs_memory_ == 0) casadi_bfgs_reset(Hsp_, d->Bk);
533  // Update the Hessian approximation
534  casadi_bfgs(Hsp_, d->Bk, d->dx, d->gLag, d->gLag_old, m->w);
535  }
536 
537  // Formulate the QP
538  casadi_copy(d_nlp->lbz, nx_+ng_, d->lbdz);
539  casadi_axpy(nx_+ng_, -1., d_nlp->z, d->lbdz);
540  casadi_copy(d_nlp->ubz, nx_+ng_, d->ubdz);
541  casadi_axpy(nx_+ng_, -1., d_nlp->z, d->ubdz);
542 
543  // Initial guess
544  casadi_copy(d_nlp->lam, nx_+ng_, d->dlam);
545  casadi_clear(d->dx, nx_);
546 
547  // Make initial guess feasable
548  /*if (init_feasible_) {
549  for (casadi_int i = 0; i < nx_; ++i) {
550  if (d->lbdz[i] > 0) d->dx[i] = d->lbdz[i];
551  else if (d->ubdz[i] < 0) d->dx[i] = d->ubdz[i];
552  }
553  }*/
554 
555  // Increase counter
556  m->iter_count++;
557 
558  // Solve the QP
559  int ret = solve_QP(m, d->Bk, d->gf, d->lbdz, d->ubdz, d->Jk, d->dx, d->dlam, 0);
560 
561  // Elastic mode calculations
562  if (elastic_mode_) {
563  if (ret == 0) {
564  ela_it = -1;
565  } else if (ret == SOLVER_RET_INFEASIBLE) {
566  if (ela_it == -1) {
567  ela_it = 0;
568  gamma_1 = calc_gamma_1(m);
569  }
570  ret = solve_elastic_mode(m, &ela_it, gamma_1, ls_iter, ls_success, so_succes,
571  pr_inf, du_inf, dx_norminf, &info, 0);
572 
573  if (ret == SOLVER_RET_INFEASIBLE) continue;
574  } else if (ela_it == -1) {
575  double pi_inf = casadi_norm_inf(ng_, d->dlam+nx_);
576  gamma_1 = calc_gamma_1(m);
577 
578  if (pi_inf > gamma_1) {
579  ela_it = 0;
580  ret = solve_elastic_mode(m, &ela_it, gamma_1, ls_iter, ls_success, so_succes,
581  pr_inf, du_inf, dx_norminf, &info, 0);
582  if (ret == SOLVER_RET_INFEASIBLE) continue;
583  }
584  }
585  }
586 
587  // Detecting indefiniteness
588  double gain = casadi_bilin(d->Bk, Hsp_, d->dx, d->dx);
589  if (gain < 0) {
590  if (print_status_) print("WARNING(sqpmethod): Indefinite Hessian detected\n");
591  }
592 
593  // Pre calculatations for second order corrections and linesearch
594  double l1_infeas, l1;
595  if (so_corr_ || (max_iter_ls_>0)) {
596  // Calculate penalty parameter of merit function
597  m->sigma = std::fmax(m->sigma, 1.01*casadi_norm_inf(nx_+ng_, d->dlam));
598 
599  // Calculate L1-merit function in the actual iterate
600  l1_infeas = casadi_sum_viol(nx_+ng_, d_nlp->z, d_nlp->lbz, d_nlp->ubz);
601  l1 = d_nlp->objective + m->sigma * l1_infeas;
602  }
603 
604  // Pre calculations for second order corrections
605  double l1_infeas_cand, l1_cand, fk_cand;
606  l1_infeas_cand = 0;
607  if (so_corr_) {
608  // Take candidate step
609  casadi_copy(d_nlp->z, nx_, d->z_cand);
610  casadi_axpy(nx_, 1., d->dx, d->z_cand);
611 
612  // Evaluating objective and constraints
613  m->arg[0] = d->z_cand;
614  m->arg[1] = d_nlp->p;
615  m->res[0] = &fk_cand;
616  m->res[1] = d->z_cand + nx_;
617  if (calc_function(m, "nlp_fg")) {
618  l1_cand = -inf; // Make sure the second order corrections are not used!
619  } else {
620  l1_infeas_cand = casadi_sum_viol(nx_+ng_, d->z_cand, d_nlp->lbz, d_nlp->ubz);
621  l1_cand = fk_cand + m->sigma*l1_infeas_cand;
622  }
623  }
624 
625  if (so_corr_ && l1_cand > l1 && l1_infeas_cand > l1_infeas) {
626  // Copy in case of a fail
627  casadi_copy(d->dx, nx_, d->temp_sol);
628  casadi_copy(d->dlam, nx_+ng_, d->temp_sol+nx_);
629 
630  // Add gradient times proposal step to bounds
631  casadi_clear(d->lbdz, nx_+ng_);
632  casadi_mv(d->Jk, Asp_, d->dx, d->lbdz+nx_, false);
633  casadi_copy(d->lbdz, nx_+ng_, d->ubdz);
634 
635  // Add bounds
636  casadi_axpy(nx_+ng_, 1., d_nlp->lbz, d->lbdz);
637  casadi_axpy(nx_+ng_, 1., d_nlp->ubz, d->ubdz);
638 
639  // Subtract constraints in candidate step from bounds
640  casadi_axpy(nx_, -1., d_nlp->z, d->lbdz);
641  casadi_axpy(ng_, -1., d->z_cand+nx_, d->lbdz+nx_);
642  casadi_axpy(nx_, -1., d_nlp->z, d->ubdz);
643  casadi_axpy(ng_, -1., d->z_cand+nx_, d->ubdz+nx_);
644 
645  int ret = SOLVER_RET_INFEASIBLE;
646  // Second order corrections without elastic mode (ela_it is -1 if elastic mode is turned off)
647  if (ela_it == -1) {
648  // Initial guess
649  casadi_copy(d_nlp->lam, nx_+ng_, d->dlam);
650  casadi_clear(d->dx, nx_);
651 
652  // Make initial guess feasible
653  if (init_feasible_) {
654  for (casadi_int i = 0; i < nx_; ++i) {
655  if (d->lbdz[i] > 0) d->dx[i] = d->lbdz[i];
656  else if (d->ubdz[i] < 0) d->dx[i] = d->ubdz[i];
657  }
658  }
659 
660  // Solve the QP
661  ret = solve_QP(m, d->Bk, d->gf, d->lbdz, d->ubdz, d->Jk,
662  d->dx, d->dlam, 1);
663  }
664 
665  if (elastic_mode_ && (ret == SOLVER_RET_INFEASIBLE || ela_it != -1)) {
666  // Second order corrections in elastic mode
667  if (ela_it == -1) {
668  ela_it = 0;
669  gamma_1 = calc_gamma_1(m);
670  }
671 
672  ret = solve_elastic_mode(m, &ela_it, gamma_1, ls_iter, ls_success, so_succes,
673  pr_inf, du_inf, dx_norminf, &info, 1);
674 
675  }
676 
677  // Fallback on previous solution if the second order correction failed
678  if (ret != 0) {
679  casadi_copy(d->temp_sol, nx_, d->dx);
680  casadi_copy(d->temp_sol+nx_, nx_+ng_, d->dlam);
681  so_succes = false;
682  } else {
683  // Check if corrected step is better than the original one using the merit function
684  double l1_cand_norm = l1_cand;
685  double l1_cand_soc;
686 
687  // Take candidate step
688  casadi_copy(d_nlp->z, nx_, d->z_cand);
689  casadi_axpy(nx_, 1., d->dx, d->z_cand);
690 
691  // Evaluating objective and constraints
692  m->arg[0] = d->z_cand;
693  m->arg[1] = d_nlp->p;
694  m->res[0] = &fk_cand;
695  m->res[1] = d->z_cand + nx_;
696  if (calc_function(m, "nlp_fg")) {
697  l1_cand_soc = inf; // Make sure the second order corrections are not used!
698  } else {
699  l1_infeas_cand = casadi_sum_viol(nx_+ng_, d->z_cand, d_nlp->lbz, d_nlp->ubz);
700  l1_cand_soc = fk_cand + m->sigma*l1_infeas_cand;
701  }
702 
703  if (l1_cand_norm < l1_cand_soc) {
704  // Copy normal step if merit function increases
705  casadi_copy(d->temp_sol, nx_, d->dx);
706  casadi_copy(d->temp_sol+nx_, nx_+ng_, d->dlam);
707  so_succes = false;
708  } else {
709  so_succes = true;
710  }
711  }
712  }
713 
714  if (max_iter_ls_>0) { // max_iter_ls_== 0 disables line-search
715  // Line-search
716  if (verbose_) print("Starting line-search\n");
717  ScopedTiming tic(m->fstats.at("linesearch"));
718 
719  // Reset line-search counter, success marker
720  ls_iter = 0;
721  ls_success = true;
722 
723  // Stepsize
724  t = 1.0;
725 
726  // Right-hand side of Armijo condition
727  double tl1 = casadi_dot(nx_, d->dx, d->gf) - m->sigma*l1_infeas;
728 
729  // Storing the actual merit function value in a list
730  d->merit_mem[m->merit_ind] = l1;
731  m->merit_ind++;
732  m->merit_ind %= merit_memsize_;
733  // Calculating maximal merit function value so far
734  //double meritmax = casadi_vfmax(d->merit_mem+1,
735  // std::min(merit_memsize_, static_cast<casadi_int>(m->iter_count))-1, d->merit_mem[0]);
736 
737  // Line-search loop
738  while (true) {
739  // Increase counter
740  ls_iter++;
741 
742  // Candidate step
743  casadi_copy(d_nlp->z, nx_, d->z_cand);
744  casadi_axpy(nx_, t, d->dx, d->z_cand);
745 
746  // Evaluating objective and constraints
747  if (!so_corr_ || !so_succes) {
748  m->arg[0] = d->z_cand;
749  m->arg[1] = d_nlp->p;
750  m->res[0] = &fk_cand;
751  m->res[1] = d->z_cand + nx_;
752  if (calc_function(m, "nlp_fg")) {
753  // Avoid infinite recursion
754  if (ls_iter == max_iter_ls_) {
755  ls_success = false;
756  l1_infeas = nan;
757  break;
758  }
759  // line-search failed, skip iteration
760  t = beta_ * t;
761  continue;
762  }
763  }
764 
765  // Calculating merit-function in candidate
766  l1_cand = fk_cand + m->sigma*casadi_sum_viol(nx_+ng_, d->z_cand, d_nlp->lbz, d_nlp->ubz);
767  if (l1_cand <= l1 + t * c1_ * tl1) {
768  break;
769  }
770 
771  // Line-search not successful, but we accept it.
772  if (ls_iter == max_iter_ls_) {
773  ls_success = false;
774  break;
775  }
776 
777  // Backtracking
778  t = beta_ * t;
779  }
780 
781  // Candidate accepted, update dual variables
782  casadi_scal(nx_ + ng_, 1-t, d_nlp->lam);
783  casadi_axpy(nx_ + ng_, t, d->dlam, d_nlp->lam);
784 
785  casadi_scal(nx_, t, d->dx);
786  } else {
787  // Full step
788  casadi_copy(d->dlam, nx_ + ng_, d_nlp->lam);
789  }
790 
791  // Take step
792  casadi_axpy(nx_, 1., d->dx, d_nlp->z);
793 
794  if (!exact_hessian_) {
795  // Evaluate the gradient of the Lagrangian with the old x but new lam (for BFGS)
796  casadi_copy(d->gf, nx_, d->gLag_old);
797  casadi_mv(d->Jk, Asp_, d_nlp->lam+nx_, d->gLag_old, true);
798  casadi_axpy(nx_, 1., d_nlp->lam, d->gLag_old);
799  }
800 
801  // If linesearch failed enter elastic mode
802  if (!ls_success && elastic_mode_ && (max_iter_ls_>0) && ela_it == -1) {
803  ela_it = 0;
804  gamma_1 = calc_gamma_1(m);;
805  }
806  }
807 
808  return 0;
809 }
810 
812  print("%4s %14s %9s %9s %9s %7s %2s %7s\n", "iter", "objective", "inf_pr",
813  "inf_du", "||d||", "lg(rg)", "ls", "info");
814 }
815 
816 void Sqpmethod::print_iteration(casadi_int iter, double obj,
817  double pr_inf, double du_inf,
818  double dx_norm, double rg,
819  casadi_int ls_trials, bool ls_success,
820  bool so_succes, std::string info) const {
821  print("%4d %14.6e %9.2e %9.2e %9.2e ", iter, obj, pr_inf, du_inf, dx_norm);
822  if (rg>0) {
823  print("%7.2f ", log10(rg));
824  } else {
825  print("%7s ", "-");
826  }
827 
828  print("%2d", ls_trials);
829  if (!ls_success) {
830  print("F");
831  } else {
832  print(" ");
833  }
834 
835  if (so_succes) {
836  print(" - SOC");
837  }
838 
839  print(" - ");
840  print(info.c_str());
841  print("\n");
842 }
843 
844 int Sqpmethod::solve_QP(SqpmethodMemory* m, const double* H, const double* g,
845  const double* lbdz, const double* ubdz, const double* A,
846  double* x_opt, double* dlam, int mode) const {
847  ScopedTiming tic(m->fstats.at("QP"));
848  // Inputs
849  std::fill_n(m->arg, qpsol_.n_in(), nullptr);
850  m->arg[CONIC_H] = H;
851  m->arg[CONIC_G] = g;
852  m->arg[CONIC_X0] = x_opt;
853  m->arg[CONIC_LAM_X0] = dlam;
854  m->arg[CONIC_LAM_A0] = dlam + nx_;
855  m->arg[CONIC_LBX] = lbdz;
856  m->arg[CONIC_UBX] = ubdz;
857  m->arg[CONIC_A] = A;
858  m->arg[CONIC_LBA] = lbdz+nx_;
859  m->arg[CONIC_UBA] = ubdz+nx_;
860 
861  // Outputs
862  std::fill_n(m->res, qpsol_.n_out(), nullptr);
863  m->res[CONIC_X] = x_opt;
864  m->res[CONIC_LAM_X] = dlam;
865  m->res[CONIC_LAM_A] = dlam + nx_;
866  double cost;
867  m->res[CONIC_COST] = &cost;
868 
869  // Solve the QP
870  qpsol_(m->arg, m->res, m->iw, m->w, m->mem_qp);
871  auto m_qpsol = static_cast<ConicMemory*>(qpsol_->memory(m->mem_qp));
872 
873  // Check if the QP was infeasible for elastic mode
874  if (!m_qpsol->d_qp.success) {
875  if ((elastic_mode_ && m_qpsol->d_qp.unified_return_status == SOLVER_RET_INFEASIBLE)
876  || (mode == 1)) {
877  return SOLVER_RET_INFEASIBLE;
878  }
879  }
880 
881  if (verbose_) print("QP solved\n");
882  return 0;
883 }
884 
885 int Sqpmethod::solve_ela_QP(SqpmethodMemory* m, const double* H, const double* g,
886  const double* lbdz, const double* ubdz, const double* A,
887  double* x_opt, double* dlam) const {
888  ScopedTiming tic(m->fstats.at("QP"));
889  // Inputs
890  std::fill_n(m->arg, qpsol_ela_.n_in(), nullptr);
891  m->arg[CONIC_H] = H;
892  m->arg[CONIC_G] = g;
893  m->arg[CONIC_X0] = x_opt;
894  m->arg[CONIC_LAM_X0] = dlam;
895  m->arg[CONIC_LAM_A0] = dlam + nx_ + 2*ng_;
896  m->arg[CONIC_LBX] = lbdz;
897  m->arg[CONIC_UBX] = ubdz;
898  m->arg[CONIC_A] = A;
899  m->arg[CONIC_LBA] = lbdz + nx_ + 2*ng_;
900  m->arg[CONIC_UBA] = ubdz + nx_ + 2*ng_;
901 
902  // Outputs
903  std::fill_n(m->res, qpsol_ela_.n_out(), nullptr);
904  m->res[CONIC_X] = x_opt;
905  m->res[CONIC_LAM_X] = dlam;
906  m->res[CONIC_LAM_A] = dlam + nx_ + 2*ng_;
907  double cost;
908  m->res[CONIC_COST] = &cost;
909 
910  // Solve the QP
911  qpsol_ela_(m->arg, m->res, m->iw, m->w, 0);
912  auto m_qpsol_ela = static_cast<ConicMemory*>(qpsol_ela_->memory(0));
913 
914  // Check if the QP was infeasible
915  if (!m_qpsol_ela->d_qp.success) {
916  if (m_qpsol_ela->d_qp.unified_return_status == SOLVER_RET_INFEASIBLE) {
917  return SOLVER_RET_INFEASIBLE;
918  }
919  }
920 
921  if (verbose_) print("Elastic QP solved\n");
922 
923  return 0;
924 }
925 
927  casadi_int* ela_it, double gamma_1,
928  casadi_int ls_iter, bool ls_success, bool so_succes, double pr_inf,
929  double du_inf, double dx_norminf, std::string* info, int mode) const {
930  auto d_nlp = &m->d_nlp;
931  auto d = &m->d;
932 
933  if (mode != 0 && mode != 1) casadi_error("Wrong mode provided to solve_elastic_mode.");
934 
935  double gamma = 0.;
936 
937  if (mode == 0) (*ela_it)++;
938 
939  // Temp datastructs for data copy
940  double *temp_1, *temp_2;
941 
942  // Make larger jacobian (has 2 extra diagonal matrices with -1 and 1 respectively)
943  temp_1 = d->Jk + Asp_.nnz();
944  casadi_fill(temp_1, ng_, -1.);
945  temp_1 += ng_;
946  casadi_fill(temp_1, ng_, 1.);
947 
948  // Initialize bounds
949  temp_1 = d->lbdz + nx_;
950  temp_2 = d->lbdz + nx_+2*ng_;
951  casadi_copy(temp_1, ng_, temp_2);
952  casadi_clear(temp_1, 2*ng_);
953 
954  temp_1 = d->ubdz + nx_;
955  temp_2 = d->ubdz + nx_+2*ng_;
956  casadi_copy(temp_1, ng_, temp_2);
957  casadi_fill(temp_1, 2*ng_, inf);
958 
959  if (*ela_it > 1) {
960  gamma = pow(10, *ela_it * (*ela_it - 1) / 2) * gamma_1;
961  } else {
962  gamma = gamma_1;
963  }
964 
965  if (gamma > gamma_max_) {
966  casadi_error("Error in elastic mode of QP solver."
967  "Gamma became larger than gamma_max.");
968  }
969 
970  if (mode == 0 && print_iteration_) {
971  ls_iter = 0;
972  ls_success = true;
973  print_iteration(m->iter_count, d_nlp->objective, pr_inf, du_inf, dx_norminf,
974  m->reg, ls_iter, ls_success, so_succes, *info);
975  }
976 
977  // Make larger gradient (has gamma for slack variables)
978  temp_1 = d->gf + nx_;
979  casadi_fill(temp_1, 2 * ng_, gamma);
980 
981  // Initial guess
982  casadi_clear(d->dlam, nx_ + 3 * ng_);
983  casadi_copy(d_nlp->lam, nx_, d->dlam);
984  casadi_copy(d_nlp->lam + nx_, ng_, d->dlam + nx_ + 2 * ng_);
985  casadi_clear(d->dx, nx_ + 2 * ng_);
986 
987  // Make initial guess feasible on x values
988  if (init_feasible_) {
989  for (casadi_int i = 0; i < nx_; ++i) {
990  if (d->lbdz[i] > 0) d->dx[i] = d->lbdz[i];
991  else if (d->ubdz[i] < 0) d->dx[i] = d->ubdz[i];
992  }
993 
994  // Make initial guess feasible on constraints by altering slack variables
995  casadi_mv(d->Jk, Asp_, d->dx, d->temp_mem, false);
996  for (casadi_int i = 0; i < ng_; ++i) {
997  if (d->ubdz[nx_+2*ng_+i]-d->temp_mem[i] < 0) {
998  d->dx[nx_+i] = -d->ubdz[nx_+2*ng_+i]+d->temp_mem[i];
999  }
1000 
1001  if (d->lbdz[nx_+2*ng_+i]-d->temp_mem[i] > 0) {
1002  d->dx[nx_+ng_+i] = d->lbdz[nx_+2*ng_+i]-d->temp_mem[i];
1003  }
1004  }
1005  }
1006 
1007  // Solve the QP
1008  int ret = solve_ela_QP(m, d->Bk, d->gf, d->lbdz, d->ubdz, d->Jk, d->dx, d->dlam);
1009 
1010  if (mode == 0) *info = "Elastic mode QP (gamma = " + str(gamma) + ")";
1011 
1012  // Copy constraint dlam to the right place
1013  casadi_copy(d->dlam+nx_+2*ng_, ng_, d->dlam+nx_);
1014 
1015  return ret;
1016 }
1017 
1019  auto d = &m->d;
1020  return std::max(gamma_0_*casadi_norm_inf(nx_, d->gf), gamma_1_min_);
1021 }
1022 
1025 
1026  if (max_iter_ls_ || so_corr_) g.add_dependency(get_function("nlp_fg"));
1027  g.add_dependency(get_function("nlp_jac_fg"));
1028  if (exact_hessian_) g.add_dependency(get_function("nlp_hess_l"));
1029  if (calc_f_ || calc_g_ || calc_lam_x_ || calc_lam_p_)
1030  g.add_dependency(get_function("nlp_grad"));
1034 }
1035 
1038  codegen_body_enter(g);
1039  // From nlpsol
1040 
1041  g.local("d", "struct casadi_sqpmethod_data*");
1042  g.init_local("d", "&" + codegen_mem(g));
1043  g.local("p", "struct casadi_sqpmethod_prob");
1044 
1045  g << "d->prob = &p;\n";
1046  g << "p.sp_h = " << g.sparsity(Hsp_) << ";\n";
1047  g << "p.sp_a = " << g.sparsity(Asp_) << ";\n";
1048  g << "p.merit_memsize = " << merit_memsize_ << ";\n";
1049  g << "p.max_iter_ls = " << max_iter_ls_ << ";\n";
1050  g << "p.nlp = &p_nlp;\n";
1051  g << "casadi_sqpmethod_init(d, &arg, &res, &iw, &w, "
1052  << elastic_mode_ << ", " << so_corr_ << ");\n";
1053 
1054  if (elastic_mode_) {
1055  g.local("gamma_1", "double");
1056  g.local("ela_it", "casadi_int");
1057  g.init_local("ela_it", "-1");
1058  g.local("temp_norm", "double");
1059  }
1060  g.local("ret", "int");
1061 
1062  g.local("iter_count", "casadi_int");
1063  g.init_local("iter_count", "0");
1064  if (max_iter_ls_ || so_corr_) {
1065  //g.local("merit_ind", "casadi_int");
1066  //g.init_local("merit_ind", "0");
1067  g.local("sigma", "casadi_real");
1068  g.init_local("sigma", "0.0");
1069  }
1070  if (max_iter_ls_) {
1071  g.local("ls_iter", "casadi_int");
1072  g.init_local("ls_iter", "0");
1073  g.local("t", "casadi_real");
1074  g.init_local("t", "0.0");
1075  }
1076 
1077  if (elastic_mode_ && max_iter_ls_) {
1078  g.local("ls_success", "casadi_int");
1079  g.init_local("ls_success", "1");
1080  }
1081 
1082  g << g.clear("d->dx", nx_) << "\n";
1083  g.comment("MAIN OPTIMIZATION LOOP");
1084  g << "while (1) {\n";
1085  g.comment("Evaluate f, g and first order derivative information");
1086  g << "d->arg[0] = d_nlp.z;\n";
1087  g << "d->arg[1] = d_nlp.p;\n";
1088  g << "d->res[0] = &d_nlp.objective;\n";
1089  g << "d->res[1] = d->gf;\n";
1090  g << "d->res[2] = d_nlp.z+" + str(nx_) + ";\n";
1091  g << "d->res[3] = d->Jk;\n";
1092  std::string nlp_jac_fg = g(get_function("nlp_jac_fg"), "d->arg", "d->res", "d->iw", "d->w");
1093  g << "if (" + nlp_jac_fg + ") return 1;\n";
1094  g.comment("Evaluate the gradient of the Lagrangian");
1095  g << g.copy("d->gf", nx_, "d->gLag") << "\n";
1096  g << g.mv("d->Jk", Asp_, "d_nlp.lam+"+str(nx_), "d->gLag", true) << "\n";
1097  g << g.axpy(nx_, "1.0", "d_nlp.lam", "d->gLag") << "\n";
1098  g.comment("Primal infeasability");
1099  g.local("pr_inf", "casadi_real");
1100  g << "pr_inf = " << g.max_viol(nx_+ng_, "d_nlp.z", "d_nlp.lbz", "d_nlp.ubz") << ";\n";
1101  g.comment("inf-norm of lagrange gradient");
1102  g.local("du_inf", "casadi_real");
1103  g << "du_inf = " << g.norm_inf(nx_, "d->gLag") << ";\n";
1104  g.comment("inf-norm of step");
1105  g.local("dx_norminf", "casadi_real");
1106  g << "dx_norminf = " << g.norm_inf(nx_, "d->dx") << ";\n";
1107  g.comment("Checking convergence criteria");
1108  g << "if (iter_count >= " << min_iter_ << " && pr_inf < " << tol_pr_ <<
1109  " && du_inf < " << tol_du_ << ") break;\n";
1110  g << "if (iter_count >= " << max_iter_ << ") break;\n";
1111  g << "if (iter_count >= 1 && iter_count >= " << min_iter_ << " && dx_norminf <= " <<
1112  min_step_size_ << ") break;\n";
1113  if (exact_hessian_) {
1114  g.comment("Update/reset exact Hessian");
1115  g << "d->arg[0] = d_nlp.z;\n";
1116  g << "d->arg[1] = d_nlp.p;\n";
1117  g.local("one", "const casadi_real");
1118  g.init_local("one", "1");
1119  g << "d->arg[2] = &one;\n";
1120  g << "d->arg[3] = d_nlp.lam+" + str(nx_) + ";\n";
1121  g << "d->res[0] = d->Bk;\n";
1122  std::string nlp_hess_l = g(get_function("nlp_hess_l"), "d->arg", "d->res", "d->iw", "d->w");
1123  g << "if (" + nlp_hess_l + ") return 1;\n";
1124 
1125  if (convexify_) {
1126  std::string ret = g.convexify_eval(convexify_data_, "d->Bk", "d->Bk", "d->iw", "d->w");
1127  g << "if (" << ret << ") return 1;\n";
1128  }
1129  } else {
1130  g << "if (iter_count==0) {\n";
1131  g.comment("Initialize BFGS");
1132  g << g.fill("d->Bk", Hsp_.nnz(), "1.") << "\n";
1133  g << "casadi_bfgs_reset(p.sp_h, d->Bk);\n";
1134  g << "} else {\n";
1135  g.comment("Update BFGS");
1136  g << "if (iter_count % " << lbfgs_memory_ << "==0) ";
1137  g << "casadi_bfgs_reset(p.sp_h, d->Bk);\n";
1138  g.comment("Update the Hessian approximation");
1139  g << "casadi_bfgs(p.sp_h, d->Bk, d->dx, d->gLag, d->gLag_old, d->w);\n";
1140  g << "}\n";
1141  }
1142 
1143  g.comment("Formulate the QP");
1144  g << g.copy("d_nlp.lbz", nx_+ng_, "d->lbdz") << "\n";
1145  g << g.axpy(nx_+ng_, "-1.0", "d_nlp.z", "d->lbdz") << "\n";
1146  g << g.copy("d_nlp.ubz", nx_+ng_, "d->ubdz") << "\n";
1147  g << g.axpy(nx_+ng_, "-1.0", "d_nlp.z", "d->ubdz") << "\n";
1148  g.comment("Initial guess");
1149  g << g.copy("d_nlp.lam", nx_+ng_, "d->dlam") << "\n";
1150  g << g.clear("d->dx", nx_) << "\n";
1151 
1152  if (init_feasible_) {
1153  g.comment("Make initial guess feasible");
1154  g << "for (casadi_int i = 0; i < " << nx_ << "; ++i) {\n";
1155  g << "if (d->lbdz[i] > 0) d->dx[i] = d->lbdz[i];\n";
1156  g << "else if (d->ubdz[i] < 0) d->dx[i] = d->ubdz[i];\n";
1157  g << "}\n";
1158  }
1159 
1160  g.comment("Increase counter");
1161  g << "iter_count++;\n";
1162  g.comment("Solve the QP");
1163  codegen_qp_solve(g, "d->Bk", "d->gf", "d->lbdz", "d->ubdz", "d->Jk", "d->dx", "d->dlam", 0);
1164 
1165  if (elastic_mode_) {
1166  g.comment("Elastic mode calculations");
1167  g << "if (ret == " << 0 << ") {\n";
1168  g << "ela_it = -1;\n";
1169  g << "} else if (ret == " << SOLVER_RET_INFEASIBLE << ") {\n";
1170  g << "if (ela_it == -1) {\n";
1171  g << "ela_it = 0;\n";
1173  g << "}\n";
1175  g << "if (ret == " << SOLVER_RET_INFEASIBLE << ") continue;\n";
1176 
1177  g << "} else if (ela_it == -1) {\n";
1178  g << "double pi_inf = " << g.norm_inf(ng_, "d->dlam+" + str(nx_)) << ";\n";
1180  g << "if (pi_inf > gamma_1) {\n";
1181  g << "ela_it = 0;\n";
1183  g << "if (ret == " << SOLVER_RET_INFEASIBLE << ") continue;\n";
1184  g << "}\n";
1185  g << "}\n";
1186  }
1187 
1188  if (max_iter_ls_ > 0 || so_corr_) {
1189  g.comment("Calculate penalty parameter of merit function");
1190  g << "sigma = " << g.fmax("sigma", "(1.01*" + g.norm_inf(nx_+ng_, "d->dlam")+")") << ";\n";
1191  g.comment("Calculate L1-merit function in the actual iterate");
1192  g.local("l1_infeas", "casadi_real");
1193  g << "l1_infeas = " << g.sum_viol(nx_+ng_, "d_nlp.z", "d_nlp.lbz", "d_nlp.ubz") << ";\n";
1194  g.local("l1", "casadi_real");
1195  g << "l1 = d_nlp.objective + sigma * l1_infeas;\n";
1196  }
1197  if (so_corr_) {
1198  g.local("l1_infeas_cand", "casadi_real");
1199  g.init_local("l1_infeas_cand", "0");
1200  g.local("l1_cand", "casadi_real");
1201  g.local("fk_cand", "casadi_real");
1202  g.comment("Take candidate step");
1203  g << g.copy("d_nlp.z", nx_, "d->z_cand") << ";\n";
1204  g << g.axpy(nx_, "1.", "d->dx", "d->z_cand") << ";\n";
1205 
1206  g.comment("Evaluate objective and constraints");
1207  g << "d->arg[0] = d->z_cand;\n;";
1208  g << "d->arg[1] = d_nlp.p;\n;";
1209  g << "d->res[0] = &fk_cand;\n;";
1210  g << "d->res[1] = d->z_cand+" + str(nx_) + ";\n;";
1211  std::string nlp_fg = g(get_function("nlp_fg"), "d->arg", "d->res", "d->iw", "d->w");
1212  g << "if (" << nlp_fg << ") {\n";
1213  g << "l1_cand = -casadi_inf;\n";
1214  g << "} else {\n";
1215  g << "l1_infeas_cand = " << g.sum_viol(nx_+ng_, "d->z_cand", "d_nlp.lbz", "d_nlp.ubz") << ";\n";
1216  g << "l1_cand = fk_cand + sigma*l1_infeas_cand;\n";
1217  g << "}\n";
1218 
1219  g << "if (l1_cand > l1 && l1_infeas_cand > l1_infeas) {\n";
1220  g.comment("Copy in case of fail");
1221  g << g.copy("d->dx", nx_, "d->temp_sol") << "\n";
1222  g << g.copy("d->dlam", nx_+ng_, "d->temp_sol+"+str(nx_)) << "\n";
1223 
1224  g.comment("Add gradient times proposal step to bounds");
1225  g << g.clear("d->lbdz", nx_+ng_) << ";\n";
1226  g << g.mv("d->Jk", Asp_, "d->dx", "d->lbdz+" + str(nx_), false) << ";\n";
1227  g << g.copy("d->lbdz", nx_+ng_, "d->ubdz") << ";\n";
1228 
1229  g.comment("Add bounds");
1230  g << g.axpy(nx_+ng_, "1.", "d_nlp.lbz", "d->lbdz") << ";\n";
1231  g << g.axpy(nx_+ng_, "1.", "d_nlp.ubz", "d->ubdz") << ";\n";
1232 
1233  g.comment("Subtract constraints in candidate step from bounds");
1234  g << g.axpy(nx_, "-1.", "d_nlp.z", "d->lbdz") << ";\n";
1235  g << g.axpy(ng_, "-1", "d->z_cand+" + str(nx_), "d->lbdz+" + str(nx_)) << ";\n";
1236  g << g.axpy(nx_, "-1.", "d_nlp.z", "d->ubdz") << ";\n";
1237  g << g.axpy(ng_, "-1.", "d->z_cand+" + str(nx_), "d->ubdz+" + str(nx_)) << ";\n";
1238 
1239  if (!elastic_mode_) {
1240  g.comment("Initial guess");
1241  g << g.copy("d_nlp.lam", nx_+ng_, "d->dlam") << "\n";
1242  g << g.clear("d->dx", nx_);
1243 
1244  if (init_feasible_) {
1245  g.comment("Make initial guess feasible");
1246  g << "for (casadi_int i = 0; i < " << nx_ << "; ++i) {\n";
1247  g << "if (d->lbdz[i] > 0) d->dx[i] = d->lbdz[i];\n";
1248  g << "else if (d->ubdz[i] < 0) d->dx[i] = d->ubdz[i];\n";
1249  g << "}\n";
1250  }
1251 
1252  codegen_qp_solve(g, "d->Bk", "d->gf", "d->lbdz", "d->ubdz", "d->Jk", "d->dx", "d->dlam", 1);
1253  } else {
1254  g << "ret = " << SOLVER_RET_INFEASIBLE << ";\n";
1255  g.comment("Second order corrections without elastic mode");
1256  g << "if (ela_it == -1) {\n";
1257 
1258  g.comment("Initial guess");
1259  g << g.copy("d_nlp.lam", nx_+ng_, "d->dlam") << "\n";
1260  g << g.clear("d->dx", nx_);
1261 
1262  if (init_feasible_) {
1263  g.comment("Make initial guess feasible");
1264  g << "for (casadi_int i = 0; i < " << nx_ << "; ++i) {\n";
1265  g << "if (d->lbdz[i] > 0) d->dx[i] = d->lbdz[i];\n";
1266  g << "else if (d->ubdz[i] < 0) d->dx[i] = d->ubdz[i];\n";
1267  g << "}\n";
1268  }
1269 
1270  codegen_qp_solve(g, "d->Bk", "d->gf", "d->lbdz", "d->ubdz", "d->Jk", "d->dx", "d->dlam", 1);
1271  g << "}\n";
1272 
1273  g.comment("Second order corrections in elastic mode");
1274  g << "if (ret == " << SOLVER_RET_INFEASIBLE << " || ela_it != -1) {\n";
1275  g << "if (ela_it == -1) {\n";
1276  g << "ela_it = 0;\n";
1278  g << "}\n";
1280  g << "}\n";
1281  }
1282  g << "}\n";
1283  g.comment("Fallback on previous solution if the second order correction failed");
1284  g << "if (ret != " << 0 << ") {\n";
1285  g << g.copy("d->temp_sol", nx_, "d->dx") << "\n";
1286  g << g.copy("d->temp_sol+"+str(nx_), nx_+ng_, "d->dlam") << "\n";
1287  g << "} else {\n";
1288  g.comment("Check if corrected step is better than the original one using the merit function");
1289  g << "double l1_cand_norm = l1_cand;\n";
1290  g << "double l1_cand_soc;\n";
1291 
1292  g.comment("Take candidate step");
1293  g << g.copy("d_nlp.z", nx_, "d->z_cand") << "\n";
1294  g << g.axpy(nx_, "1.", "d->dx", "d->z_cand") << "\n";
1295 
1296  g.comment("Evaluate objective and constraints");
1297  g << "d->arg[0] = d->z_cand;\n;";
1298  g << "d->arg[1] = d_nlp.p;\n;";
1299  g << "d->res[0] = &fk_cand;\n;";
1300  g << "d->res[1] = d->z_cand+" + str(nx_) + ";\n;";
1301  nlp_fg = g(get_function("nlp_fg"), "d->arg", "d->res", "d->iw", "d->w");
1302  g << "if (" << nlp_fg << ") {\n";
1303  g << "l1_cand_soc = casadi_inf;\n";
1304  g << "} else {\n";
1305  g << "l1_infeas_cand = " << g.sum_viol(nx_+ng_, "d->z_cand", "d_nlp.lbz", "d_nlp.ubz") << ";\n";
1306  g << "l1_cand_soc = fk_cand + sigma*l1_infeas_cand;\n";
1307  g << "}\n";
1308 
1309  g << "if (l1_cand_norm < l1_cand_soc) {\n";
1310  g.comment("Copy normal step if merit function increases");
1311  g << g.copy("d->temp_sol", nx_, "d->dx") << "\n";
1312  g << g.copy("d->temp_sol+"+str(nx_), nx_+ng_, "d->dlam") << "\n";
1313  g << "}\n";
1314 
1315  g << "}\n";
1316  }
1317 
1318  if (max_iter_ls_) {
1319  g.comment("Detecting indefiniteness");
1320  g.comment("Right-hand side of Armijo condition");
1321  g.local("F_sens", "casadi_real");
1322  g << "F_sens = " << g.dot(nx_, "d->dx", "d->gf") << ";\n";
1323  g.local("tl1", "casadi_real");
1324  g << "tl1 = F_sens - sigma * l1_infeas;\n";
1325  /*g.comment("Storing the actual merit function value in a list");
1326  g << "d->merit_mem[merit_ind] = l1;\n";
1327  g << "merit_ind++;\n";
1328  g << "merit_ind %= " << merit_memsize_ << ";\n";
1329  g.comment("Calculating maximal merit function value so far");
1330  g.local("meritmax", "casadi_real");
1331  g << "meritmax = " << g.vfmax("d->merit_mem+1", g.min(str(merit_memsize_),
1332  "iter_count")+"-1", "d->merit_mem[0]") << "\n";*/
1333  g.comment("Stepsize");
1334  g << "t = 1.0;\n";
1335  g.local("fk_cand", "casadi_real");
1336  g.comment("Merit function value in candidate");
1337  g.local("l1_cand", "casadi_real");
1338  g << "l1_cand = 0.0;\n";
1339  g.comment("Reset line-search counter, success marker");
1340  g << "ls_iter = 0;\n";
1341  if (elastic_mode_ && max_iter_ls_) g << "ls_success = 1;\n";
1342  g.comment("Line-search loop");
1343  g << "while (1) {\n";
1344  g.comment(" Increase counter");
1345  g << "ls_iter++;\n";
1346 
1347  g.comment("Candidate step");
1348  g << g.copy("d_nlp.z", nx_, "d->z_cand") << "\n";
1349  g << g.axpy(nx_, "t", "d->dx", "d->z_cand") << "\n";
1350  g.comment("Evaluating objective and constraints");
1351  g << "d->arg[0] = d->z_cand;\n";
1352  g << "d->arg[1] = d_nlp.p;\n";
1353  g << "d->res[0] = &fk_cand;\n";
1354  g << "d->res[1] = d->z_cand+" + str(nx_) + ";\n";
1355  std::string nlp_fg = g(get_function("nlp_fg"), "d->arg", "d->res", "d->iw", "d->w");
1356  g << "if (" << nlp_fg << ") {\n";
1357  g.comment("Avoid infinite recursion");
1358  g << "if (ls_iter == " << max_iter_ls_ << ") {\n";
1359  if (elastic_mode_ && max_iter_ls_) g << "ls_success = 0;\n";
1360  g << "break;\n";
1361  g << "}\n";
1362  g.comment("line-search failed, skip iteration");
1363  g << "t = " << beta_ << "* t;\n";
1364  g << "continue;\n";
1365  g << "}\n";
1366 
1367  g.comment("Calculating merit-function in candidate");
1368  g << "l1_cand = fk_cand + sigma * "
1369  << g.sum_viol(nx_+ng_, "d->z_cand", "d_nlp.lbz", "d_nlp.ubz") + ";\n";
1370  g << "if (l1_cand <= l1 + t * " << c1_ << "* tl1) {\n";
1371  g << "break;\n";
1372  g << "}\n";
1373  g.comment("Line-search not successful, but we accept it.");
1374  g << "if (ls_iter == " << max_iter_ls_ << ") {\n";
1375  if (elastic_mode_ && max_iter_ls_) g << "ls_success = 0;\n";
1376  g << "break;\n";
1377  g << "}\n";
1378  g.comment("Backtracking");
1379  g << "t = " << beta_ << "* t;\n";
1380  g << "}\n";
1381  g.comment("Candidate accepted, update dual variables");
1382  g << g.scal(nx_+ng_, "1-t", "d_nlp.lam") << "\n";
1383  g << g.axpy(nx_+ng_, "t", "d->dlam", "d_nlp.lam") << "\n";
1384  g << g.scal(nx_, "t", "d->dx") << "\n";
1385  } else {
1386  g.comment("Full step");
1387  g << g.copy("d->dlam", nx_ + ng_, "d_nlp.lam") << "\n";
1388  }
1389 
1390  g.comment("Take step");
1391  g << g.axpy(nx_, "1.0", "d->dx", "d_nlp.z") << "\n";
1392 
1393  if (!exact_hessian_) {
1394  g.comment("Evaluate the gradient of the Lagrangian with the old x but new lam (for BFGS)");
1395  g << g.copy("d->gf", nx_, "d->gLag_old") << "\n";
1396  g << g.mv("d->Jk", Asp_, "d_nlp.lam+"+str(nx_), "d->gLag_old", true) << "\n";
1397  g << g.axpy(nx_, "1.0", "d_nlp.lam", "d->gLag_old") << "\n";
1398  }
1399 
1400  if (elastic_mode_ && max_iter_ls_ > 0) {
1401  g.comment("If linesearch failed enter elastic mode");
1402  g << "if (ls_success == 0 && ela_it == -1) {\n";
1403  g << "ela_it = 0;\n";
1405  g << "}\n";
1406  }
1407  g << "}\n";
1408  codegen_body_exit(g);
1409 }
1410 void Sqpmethod::codegen_qp_solve(CodeGenerator& cg, const std::string& H, const std::string& g,
1411  const std::string& lbdz, const std::string& ubdz,
1412  const std::string& A, const std::string& x_opt, const std::string& dlam, int mode) const {
1413  for (casadi_int i=0;i<qpsol_.n_in();++i) cg << "d->arg[" << i << "] = 0;\n";
1414  cg << "d->arg[" << CONIC_H << "] = " << H << ";\n";
1415  cg << "d->arg[" << CONIC_G << "] = " << g << ";\n";
1416  cg << "d->arg[" << CONIC_X0 << "] = " << x_opt << ";\n";
1417  cg << "d->arg[" << CONIC_LAM_X0 << "] = " << dlam << ";\n";
1418  cg << "d->arg[" << CONIC_LAM_A0 << "] = " << dlam << "+" << nx_ << ";\n";
1419  cg << "d->arg[" << CONIC_LBX << "] = " << lbdz << ";\n";
1420  cg << "d->arg[" << CONIC_UBX << "] = " << ubdz << ";\n";
1421  cg << "d->arg[" << CONIC_A << "] = " << A << ";\n";
1422  cg << "d->arg[" << CONIC_LBA << "] = " << lbdz << "+" << nx_ << ";\n";
1423  cg << "d->arg[" << CONIC_UBA << "] = " << ubdz << "+" << nx_ << ";\n";
1424  for (casadi_int i=0;i<qpsol_.n_out();++i) cg << "d->res[" << i << "] = 0;\n";
1425  cg << "d->res[" << CONIC_X << "] = " << x_opt << ";\n";
1426  cg << "d->res[" << CONIC_LAM_X << "] = " << dlam << ";\n";
1427  cg << "d->res[" << CONIC_LAM_A << "] = " << dlam << "+" << nx_ << ";\n";
1428  std::string flag = cg(qpsol_, "d->arg", "d->res", "d->iw", "d->w");
1429  cg << "ret = " << flag << ";\n";
1430  cg << "if (ret == -1000) return -1000;\n"; // equivalent to raise Exception
1431 }
1432 
1433 void Sqpmethod::codegen_qp_ela_solve(CodeGenerator& cg, const std::string& H,
1434  const std::string& g, const std::string& lbdz, const std::string& ubdz,
1435  const std::string& A, const std::string& x_opt, const std::string& dlam) const {
1436  for (casadi_int i=0;i<qpsol_ela_.n_in();++i) cg << "d->arg[" << i << "] = 0;\n";
1437  cg << "d->arg[" << CONIC_H << "] = " << H << ";\n";
1438  cg << "d->arg[" << CONIC_G << "] = " << g << ";\n";
1439  cg << "d->arg[" << CONIC_X0 << "] = " << x_opt << ";\n";
1440  cg << "d->arg[" << CONIC_LAM_X0 << "] = " << dlam << ";\n";
1441  cg << "d->arg[" << CONIC_LAM_A0 << "] = " << dlam << "+" << nx_+2*ng_ << ";\n";
1442  cg << "d->arg[" << CONIC_LBX << "] = " << lbdz << ";\n";
1443  cg << "d->arg[" << CONIC_UBX << "] = " << ubdz << ";\n";
1444  cg << "d->arg[" << CONIC_A << "] = " << A << ";\n";
1445  cg << "d->arg[" << CONIC_LBA << "] = " << lbdz << "+" << nx_+2*ng_ << ";\n";
1446  cg << "d->arg[" << CONIC_UBA << "] = " << ubdz << "+" << nx_+2*ng_ << ";\n";
1447  for (casadi_int i=0;i<qpsol_.n_out();++i) cg << "d->res[" << i << "] = 0;\n";
1448  cg << "d->res[" << CONIC_X << "] = " << x_opt << ";\n";
1449  cg << "d->res[" << CONIC_LAM_X << "] = " << dlam << ";\n";
1450  cg << "d->res[" << CONIC_LAM_A << "] = " << dlam << "+" << nx_+2*ng_ << ";\n";
1451  std::string flag = cg(qpsol_ela_, "d->arg", "d->res", "d->iw", "d->w");
1452  cg << "ret = " << flag << ";\n";
1453  cg << "if (ret == -1000) return -1000;\n"; // equivalent to raise Exception
1454 }
1455 
1457  cg << "double gamma = 0.;\n";
1458 
1459  if (mode == 0) cg << "ela_it++;\n";
1460 
1461  cg.comment("Temp datastructs for data copy");
1462  cg << "double *temp_1, *temp_2;\n";
1463 
1464  cg.comment("Make larger jacobian (has 2 extra diagonal matrices with -1 and 1 respectively)");
1465  cg << "temp_1 = d->Jk + " << Asp_.nnz() << ";\n";
1466  cg << cg.fill("temp_1", ng_, "-1.") << ";\n";
1467  cg << "temp_1 += " << ng_ << ";\n";
1468  cg << cg.fill("temp_1", ng_, "1.") << ";\n";
1469 
1470  cg.comment("Initialize bounds");
1471  cg << "temp_1 = d->lbdz + " << nx_ << ";\n";
1472  cg << "temp_2 = d->lbdz + " << nx_ + 2*ng_ << ";\n";
1473  cg << cg.copy("temp_1", ng_, "temp_2") << ";\n";
1474  cg << cg.clear("temp_1", 2*ng_) << ";\n";
1475  cg << "temp_1 = d->ubdz + " << nx_ << ";\n";
1476  cg << "temp_2 = d->ubdz + " << nx_ + 2*ng_ << ";\n";
1477  cg << cg.copy("temp_1", ng_, "temp_2") << ";\n";
1478  cg << cg.fill("temp_1", 2*ng_, cg.constant(inf)) << ";\n";
1479 
1480  cg << "if (ela_it > 1) {\n";
1481  cg << "gamma = pow(10, ela_it*(ela_it-1)/2)*gamma_1;\n";
1482  cg << "} else {\n";
1483  cg << "gamma = gamma_1;\n";
1484  cg << "}\n";
1485  cg << "if (gamma > " << gamma_max_ << ") " << "return -1" << ";\n";
1486 
1487  cg.comment("Make larger gradient (has gamma for slack variables)");
1488  cg << "temp_1 = d->gf + " << nx_ << ";\n";
1489  cg << cg.fill("temp_1", 2*ng_, "gamma") << ";\n";
1490 
1491  cg.comment("Initial guess");
1492  cg << cg.clear("d->dlam", nx_+3*ng_) << "\n";
1493  cg << cg.copy("d_nlp.lam", nx_, "d->dlam") << "\n";
1494  cg << cg.copy("d_nlp.lam+"+str(nx_), ng_, "d->dlam+"+str(nx_+2*ng_)) << "\n";
1495  cg << cg.clear("d->dx", nx_+2*ng_);
1496 
1497  if (init_feasible_) {
1498  cg.comment("Make initial guess feasible on x values");
1499  cg << "for (casadi_int i = 0; i < " << nx_ << "; ++i) {\n";
1500  cg << "if (d->lbdz[i] > 0) d->dx[i] = d->lbdz[i];\n";
1501  cg << "else if (d->ubdz[i] < 0) d->dx[i] = d->ubdz[i];\n";
1502  cg << "}\n";
1503 
1504  cg.comment("Make initial guess feasible on constraints by altering slack variables");
1505  cg << cg.mv("d->Jk", Asp_, "d->dx", "d->temp_mem", false) << "\n";
1506  cg << "for (casadi_int i = 0; i < " << ng_ << "; ++i) {\n";
1507  cg << "if (d->ubdz[" << nx_+2*ng_ << "+i]-d->temp_mem[i] < 0) {\n";
1508  cg << "d->dx[" << nx_ << "+i] = -d->ubdz[" << nx_+2*ng_ << "+i]+d->temp_mem[i];\n";
1509  cg << "}\n";
1510 
1511  cg << "if (d->lbdz[" << nx_+2*ng_ << "+i]-d->temp_mem[i] > 0) {\n";
1512  cg << "d->dx[" << nx_+ng_ << "+i] = d->lbdz[" << nx_+2*ng_ << "+i]-d->temp_mem[i];\n";
1513  cg << "}\n";
1514  cg << "}\n";
1515  }
1516 
1517  cg.comment("Solve the QP");
1518  codegen_qp_ela_solve(cg, "d->Bk", "d->gf", "d->lbdz", "d->ubdz", "d->Jk", "d->dx", "d->dlam");
1519 
1520  cg.comment("Copy constraint dlam to the right place");
1521  cg << cg.copy("d->dlam+"+str(nx_+2*ng_), ng_, "d->dlam+"+str(nx_)) << "\n";
1522 
1523 }
1524 
1526  cg << "temp_norm = " << gamma_0_ << "*" << cg.norm_inf(nx_, "d->gf") << ";\n";
1527  cg << "gamma_1 = " << cg.fmax("temp_norm", str(gamma_1_min_)) << ";\n";
1528 }
1529 
1530 Dict Sqpmethod::get_stats(void* mem) const {
1531  Dict stats = Nlpsol::get_stats(mem);
1532  auto m = static_cast<SqpmethodMemory*>(mem);
1533  stats["return_status"] = m->return_status;
1534  stats["iter_count"] = m->iter_count;
1535  return stats;
1536 }
1537 
1539  int version = s.version("Sqpmethod", 1, 3);
1540  s.unpack("Sqpmethod::qpsol", qpsol_);
1541  if (version>=3) {
1542  s.unpack("Sqpmethod::qpsol_ela", qpsol_ela_);
1543  }
1544  s.unpack("Sqpmethod::exact_hessian", exact_hessian_);
1545  s.unpack("Sqpmethod::max_iter", max_iter_);
1546  s.unpack("Sqpmethod::min_iter", min_iter_);
1547  s.unpack("Sqpmethod::lbfgs_memory", lbfgs_memory_);
1548  s.unpack("Sqpmethod::tol_pr_", tol_pr_);
1549  s.unpack("Sqpmethod::tol_du_", tol_du_);
1550  s.unpack("Sqpmethod::min_step_size_", min_step_size_);
1551  s.unpack("Sqpmethod::c1", c1_);
1552  s.unpack("Sqpmethod::beta", beta_);
1553  s.unpack("Sqpmethod::max_iter_ls_", max_iter_ls_);
1554  s.unpack("Sqpmethod::merit_memsize_", merit_memsize_);
1555  s.unpack("Sqpmethod::beta", beta_);
1556  s.unpack("Sqpmethod::print_header", print_header_);
1557  s.unpack("Sqpmethod::print_iteration", print_iteration_);
1558  s.unpack("Sqpmethod::print_status", print_status_);
1559 
1560  if (version>=3) {
1561  s.unpack("Sqpmethod::elastic_mode", elastic_mode_);
1562  s.unpack("Sqpmethod::gamma_0", gamma_0_);
1563  s.unpack("Sqpmethod::gamma_max", gamma_max_);
1564  s.unpack("Sqpmethod::gamma_1_min", gamma_1_min_);
1565  s.unpack("Sqpmethod::init_feasible", init_feasible_);
1566  s.unpack("Sqpmethod::so_corr", so_corr_);
1567  } else {
1568  elastic_mode_ = false;
1569  gamma_0_ = 0;
1570  gamma_max_ = 0;
1571  gamma_1_min_ = 0;
1572  init_feasible_ = false;
1573  so_corr_ = false;
1574  }
1575 
1576  s.unpack("Sqpmethod::Hsp", Hsp_);
1577  if (version==1) {
1578  Sparsity Hrsp;
1579  s.unpack("Sqpmethod::Hrsp", Hrsp);
1580  }
1581  s.unpack("Sqpmethod::Asp", Asp_);
1582  if (version==1) {
1583  double convexify_margin;
1584  s.unpack("Sqpmethod::convexify_margin", convexify_margin);
1585  char convexify_strategy;
1586  s.unpack("Sqpmethod::convexify_strategy", convexify_strategy);
1587  casadi_assert(convexify_strategy==0, "deserializtion failed.");
1588  bool Hsp_project;
1589  s.unpack("Sqpmethod::Hsp_project", Hsp_project);
1590  bool scc_transform;
1591  s.unpack("Sqpmethod::scc_transform", scc_transform);
1592  std::vector<casadi_int> scc_offset;
1593  s.unpack("Sqpmethod::scc_offset", scc_offset);
1594  std::vector<casadi_int> scc_mapping;
1595  s.unpack("Sqpmethod::scc_mapping", scc_mapping);
1596  casadi_int max_iter_eig;
1597  s.unpack("Sqpmethod::max_iter_eig", max_iter_eig);
1598  casadi_int block_size;
1599  s.unpack("Sqpmethod::block_size", block_size);
1600  Sparsity scc_sp;
1601  s.unpack("Sqpmethod::scc_sp", scc_sp);
1602  convexify_ = false;
1603  }
1604  if (version>=2) {
1605  s.unpack("Sqpmethod::convexify", convexify_);
1606  if (convexify_) Convexify::deserialize(s, "Sqpmethod::", convexify_data_);
1607  }
1608  set_sqpmethod_prob();
1609 }
1610 
1613  s.version("Sqpmethod", 3);
1614  s.pack("Sqpmethod::qpsol", qpsol_);
1615  s.pack("Sqpmethod::qpsol_ela", qpsol_ela_);
1616  s.pack("Sqpmethod::exact_hessian", exact_hessian_);
1617  s.pack("Sqpmethod::max_iter", max_iter_);
1618  s.pack("Sqpmethod::min_iter", min_iter_);
1619  s.pack("Sqpmethod::lbfgs_memory", lbfgs_memory_);
1620  s.pack("Sqpmethod::tol_pr_", tol_pr_);
1621  s.pack("Sqpmethod::tol_du_", tol_du_);
1622  s.pack("Sqpmethod::min_step_size_", min_step_size_);
1623  s.pack("Sqpmethod::c1", c1_);
1624  s.pack("Sqpmethod::beta", beta_);
1625  s.pack("Sqpmethod::max_iter_ls_", max_iter_ls_);
1626  s.pack("Sqpmethod::merit_memsize_", merit_memsize_);
1627  s.pack("Sqpmethod::beta", beta_);
1628  s.pack("Sqpmethod::print_header", print_header_);
1629  s.pack("Sqpmethod::print_iteration", print_iteration_);
1630  s.pack("Sqpmethod::print_status", print_status_);
1631 
1632  s.pack("Sqpmethod::elastic_mode", elastic_mode_);
1633  s.pack("Sqpmethod::gamma_0", gamma_0_);
1634  s.pack("Sqpmethod::gamma_max", gamma_max_);
1635  s.pack("Sqpmethod::gamma_1_min", gamma_1_min_);
1636 
1637  s.pack("Sqpmethod::init_feasible", init_feasible_);
1638  s.pack("Sqpmethod::so_corr", so_corr_);
1639 
1640  s.pack("Sqpmethod::Hsp", Hsp_);
1641  s.pack("Sqpmethod::Asp", Asp_);
1642  s.pack("Sqpmethod::convexify", convexify_);
1643  if (convexify_) Convexify::serialize(s, "Sqpmethod::", convexify_data_);
1644 }
1645 
1646 } // namespace casadi
Helper class for C code generation.
std::string fill(const std::string &res, std::size_t n, const std::string &v)
Create a fill operation.
std::string axpy(casadi_int n, const std::string &a, const std::string &x, const std::string &y)
Codegen axpy: y += a*x.
std::string add_dependency(const Function &f)
Add a function dependency.
std::string copy(const std::string &arg, std::size_t n, const std::string &res)
Create a copy operation.
void comment(const std::string &s)
Write a comment line (ignored if not verbose)
std::string constant(const std::vector< casadi_int > &v)
Represent an array constant; adding it when new.
std::string scal(casadi_int n, const std::string &alpha, const std::string &x)
What does scal do??
std::string sum_viol(casadi_int n, const std::string &x, const std::string &lb, const std::string &ub)
sum_viol
std::string mv(const std::string &x, const Sparsity &sp_x, const std::string &y, const std::string &z, bool tr)
Codegen sparse matrix-vector multiplication.
void local(const std::string &name, const std::string &type, const std::string &ref="")
Declare a local variable.
std::string norm_inf(casadi_int n, const std::string &x)
norm_inf
void init_local(const std::string &name, const std::string &def)
Specify the default value for a local variable.
std::string dot(casadi_int n, const std::string &x, const std::string &y)
Codegen inner product.
std::string max_viol(casadi_int n, const std::string &x, const std::string &lb, const std::string &ub)
max_viol
std::string convexify_eval(const ConvexifyData &d, const std::string &Hin, const std::string &Hout, const std::string &iw, const std::string &w)
convexify
std::string sparsity(const Sparsity &sp, bool canonical=true)
std::string fmax(const std::string &x, const std::string &y)
fmax
std::string clear(const std::string &res, std::size_t n)
Create a fill operation.
void add_auxiliary(Auxiliary f, const std::vector< std::string > &inst={"casadi_real"})
Add a built-in auxiliary function.
static void serialize(SerializingStream &s, const std::string &prefix, const ConvexifyData &d)
Definition: convexify.cpp:111
static Sparsity setup(ConvexifyData &d, const Sparsity &H, const Dict &opts=Dict(), bool inplace=true)
Definition: convexify.cpp:166
static MXNode * deserialize(DeserializingStream &s)
Deserialize without type information.
Definition: convexify.hpp:104
Helper class for Serialization.
void unpack(Sparsity &e)
Reconstruct an object from the input stream.
void version(const std::string &name, int v)
void alloc_iw(size_t sz_iw, bool persistent=false)
Ensure required length of iw field.
std::string codegen_mem(CodeGenerator &g, const std::string &index="mem") const
Get thread-local memory object.
size_t sz_w() const
Get required length of w field.
void alloc_w(size_t sz_w, bool persistent=false)
Ensure required length of w field.
void alloc(const Function &f, bool persistent=false, int num_threads=1)
Ensure work vectors long enough to evaluate function.
size_t sz_iw() const
Get required length of iw field.
Function object.
Definition: function.hpp:60
void release(int mem) const
Release a memory object.
Definition: function.cpp:1777
casadi_int n_out() const
Get the number of function outputs.
Definition: function.cpp:823
casadi_int n_in() const
Get the number of function inputs.
Definition: function.cpp:819
NLP solver storage class.
Definition: nlpsol_impl.hpp:59
void codegen_body_exit(CodeGenerator &g) const override
Generate code for the function body.
Definition: nlpsol.cpp:1269
bool calc_lam_p_
Options.
Definition: nlpsol_impl.hpp:97
Dict get_stats(void *mem) const override
Get all statistics.
Definition: nlpsol.cpp:1162
static const Options options_
Options.
void codegen_body_enter(CodeGenerator &g) const override
Generate code for the function body.
Definition: nlpsol.cpp:1179
void codegen_declarations(CodeGenerator &g) const override
Generate code for the declarations of the C function.
Definition: nlpsol.cpp:1250
void init(const Dict &opts) override
Initialize.
Definition: nlpsol.cpp:420
casadi_int ng_
Number of constraints.
Definition: nlpsol_impl.hpp:69
int init_mem(void *mem) const override
Initalize memory block.
Definition: nlpsol.cpp:603
casadi_nlpsol_prob< double > p_nlp_
Definition: nlpsol_impl.hpp:63
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
Definition: nlpsol.cpp:1306
bool calc_f_
Options.
Definition: nlpsol_impl.hpp:97
bool calc_g_
Options.
Definition: nlpsol_impl.hpp:97
bool calc_lam_x_
Options.
Definition: nlpsol_impl.hpp:97
int callback(NlpsolMemory *m) const
Definition: nlpsol.cpp:1121
casadi_int nx_
Number of variables.
Definition: nlpsol_impl.hpp:66
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
Definition: nlpsol.cpp:795
void set_function(const Function &fcn, const std::string &fname, bool jit=false)
Function create_function(const Function &oracle, const std::string &fname, const std::vector< std::string > &s_in, const std::vector< std::string > &s_out, const Function::AuxOut &aux=Function::AuxOut(), const Dict &opts=Dict())
int calc_function(OracleMemory *m, const std::string &fcn, const double *const *arg=nullptr, int thread_id=0) const
std::vector< std::string > get_function() const override
Get list of dependency functions.
bool has_function(const std::string &fname) const override
static void registerPlugin(const Plugin &plugin, bool needs_lock=true)
Register an integrator in the factory.
void print(const char *fmt,...) const
C-style formatted printing during evaluation.
int checkout() const
Checkout a memory object.
void * memory(int ind) const
Memory objects.
bool verbose_
Verbose printout.
void clear_mem()
Clear all memory (called from destructor)
Helper class for Serialization.
void version(const std::string &name, int v)
void pack(const Sparsity &e)
Serializes an object to the output stream.
General sparsity class.
Definition: sparsity.hpp:106
void enlarge(casadi_int nrow, casadi_int ncol, const std::vector< casadi_int > &rr, const std::vector< casadi_int > &cc, bool ind1=false)
Enlarge matrix.
Definition: sparsity.cpp:544
static Sparsity diag(casadi_int nrow)
Create diagonal sparsity pattern *.
Definition: sparsity.hpp:190
static Sparsity dense(casadi_int nrow, casadi_int ncol=1)
Create a dense rectangular sparsity pattern *.
Definition: sparsity.cpp:1012
casadi_int nnz() const
Get the number of (structural) non-zeros.
Definition: sparsity.cpp:148
void appendColumns(const Sparsity &sp)
Append another sparsity patten horizontally.
Definition: sparsity.cpp:499
bool is_symmetric() const
Is symmetric?
Definition: sparsity.cpp:317
void codegen_qp_ela_solve(CodeGenerator &cg, const std::string &H, const std::string &g, const std::string &lbdz, const std::string &ubdz, const std::string &A, const std::string &x_opt, const std::string &dlam) const
Definition: sqpmethod.cpp:1433
double calc_gamma_1(SqpmethodMemory *m) const
Definition: sqpmethod.cpp:1018
static ProtoFunction * deserialize(DeserializingStream &s)
Deserialize into MX.
Definition: sqpmethod.hpp:239
Function qpsol_
QP solver for the subproblems.
Definition: sqpmethod.hpp:116
bool init_feasible_
Initialize feasible qp's.
Definition: sqpmethod.hpp:146
void print_iteration() const
Print iteration header.
Definition: sqpmethod.cpp:811
casadi_int lbfgs_memory_
Memory size of L-BFGS method.
Definition: sqpmethod.hpp:131
virtual int solve_QP(SqpmethodMemory *m, const double *H, const double *g, const double *lbdz, const double *ubdz, const double *A, double *x_opt, double *dlam, int mode) const
Definition: sqpmethod.cpp:844
static Nlpsol * creator(const std::string &name, const Function &nlp)
Create a new NLP Solver.
Definition: sqpmethod.hpp:80
casadi_int min_iter_
Definition: sqpmethod.hpp:128
virtual int solve_ela_QP(SqpmethodMemory *m, const double *H, const double *g, const double *lbdz, const double *ubdz, const double *A, double *x_opt, double *dlam) const
Definition: sqpmethod.cpp:885
void codegen_qp_solve(CodeGenerator &cg, const std::string &H, const std::string &g, const std::string &lbdz, const std::string &ubdz, const std::string &A, const std::string &x_opt, const std::string &dlam, int mode) const
Definition: sqpmethod.cpp:1410
~Sqpmethod() override
Definition: sqpmethod.cpp:62
double gamma_0_
Initial and maximum penalty parameter for elastic mode.
Definition: sqpmethod.hpp:143
bool elastic_mode_
Elastic mode.
Definition: sqpmethod.hpp:140
int solve(void *mem) const override
Definition: sqpmethod.cpp:399
casadi_sqpmethod_prob< double > p_
Definition: sqpmethod.hpp:113
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
Definition: sqpmethod.cpp:368
ConvexifyData convexify_data_
Data for convexification.
Definition: sqpmethod.hpp:166
static const Options options_
Options.
Definition: sqpmethod.hpp:86
virtual int solve_elastic_mode(SqpmethodMemory *m, casadi_int *ela_it, double gamma_1, casadi_int ls_iter, bool ls_success, bool so_succes, double pr_inf, double du_inf, double dx_norminf, std::string *info, int mode) const
Definition: sqpmethod.cpp:926
void codegen_solve_elastic_mode(CodeGenerator &cg, int mode) const
Definition: sqpmethod.cpp:1456
Dict get_stats(void *mem) const override
Get all statistics.
Definition: sqpmethod.cpp:1530
int init_mem(void *mem) const override
Initalize memory block.
Definition: sqpmethod.cpp:381
casadi_int merit_memsize_
Definition: sqpmethod.hpp:153
void free_mem(void *mem) const override
Free memory block.
Definition: sqpmethod.cpp:393
double min_step_size_
Minimum step size allowed.
Definition: sqpmethod.hpp:137
casadi_int max_iter_
Maximum, minimum number of SQP iterations.
Definition: sqpmethod.hpp:128
bool convexify_
convexify?
Definition: sqpmethod.hpp:169
casadi_int max_iter_ls_
Definition: sqpmethod.hpp:152
bool exact_hessian_
Exact Hessian?
Definition: sqpmethod.hpp:122
void codegen_body(CodeGenerator &g) const override
Generate code for the function body.
Definition: sqpmethod.cpp:1036
void codegen_calc_gamma_1(CodeGenerator &cg) const
Definition: sqpmethod.cpp:1525
void init(const Dict &opts) override
Initialize.
Definition: sqpmethod.cpp:157
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
Definition: sqpmethod.cpp:1611
static const std::string meta_doc
A documentation string.
Definition: sqpmethod.hpp:232
void codegen_declarations(CodeGenerator &g) const override
Generate code for the declarations of the C function.
Definition: sqpmethod.cpp:1023
Sqpmethod(const std::string &name, const Function &nlp)
Definition: sqpmethod.cpp:58
Function qpsol_ela_
QP solver for elastic mode subproblems.
Definition: sqpmethod.hpp:119
double tol_pr_
Tolerance of primal and dual infeasibility.
Definition: sqpmethod.hpp:134
Function conic(const std::string &name, const std::string &solver, const SpDict &qp, const Dict &opts)
Definition: conic.cpp:44
The casadi namespace.
Definition: archiver.cpp:28
T1 casadi_max_viol(casadi_int n, const T1 *x, const T1 *lb, const T1 *ub)
Largest bound violation.
std::vector< casadi_int > range(casadi_int start, casadi_int stop, casadi_int step, casadi_int len)
Range function.
@ CONIC_UBA
dense, (nc x 1)
Definition: conic.hpp:181
@ CONIC_X0
dense, (n x 1)
Definition: conic.hpp:187
@ CONIC_A
The matrix A: sparse, (nc x n) - product with x must be dense.
Definition: conic.hpp:177
@ CONIC_G
The vector g: dense, (n x 1)
Definition: conic.hpp:175
@ CONIC_LBA
dense, (nc x 1)
Definition: conic.hpp:179
@ CONIC_UBX
dense, (n x 1)
Definition: conic.hpp:185
@ CONIC_H
Definition: conic.hpp:173
@ CONIC_LAM_A0
dense
Definition: conic.hpp:191
@ CONIC_LBX
dense, (n x 1)
Definition: conic.hpp:183
@ CONIC_LAM_X0
dense
Definition: conic.hpp:189
T1 casadi_bilin(const T1 *A, const casadi_int *sp_A, const T1 *x, const T1 *y)
T1 casadi_sum_viol(casadi_int n, const T1 *x, const T1 *lb, const T1 *ub)
Sum of bound violations.
void casadi_copy(const T1 *x, casadi_int n, T1 *y)
COPY: y <-x.
void casadi_fill(T1 *x, casadi_int n, T1 alpha)
FILL: x <- alpha.
std::string str(const T &v)
String representation, any type.
GenericType::Dict Dict
C++ equivalent of Python's dict or MATLAB's struct.
const double inf
infinity
Definition: calculus.hpp:50
int CASADI_NLPSOL_SQPMETHOD_EXPORT casadi_register_nlpsol_sqpmethod(Nlpsol::Plugin *plugin)
Definition: sqpmethod.cpp:43
T1 casadi_dot(casadi_int n, const T1 *x, const T1 *y)
Inner product.
const double nan
Not a number.
Definition: calculus.hpp:53
void casadi_scal(casadi_int n, T1 alpha, T1 *x)
SCAL: x <- alpha*x.
void casadi_axpy(casadi_int n, T1 alpha, const T1 *x, T1 *y)
AXPY: y <- a*x + y.
void CASADI_NLPSOL_SQPMETHOD_EXPORT casadi_load_nlpsol_sqpmethod()
Definition: sqpmethod.cpp:54
T1 casadi_norm_inf(casadi_int n, const T1 *x)
void casadi_clear(T1 *x, casadi_int n)
CLEAR: x <- 0.
void casadi_mv(const T1 *x, const casadi_int *sp_x, const T1 *y, T1 *z, casadi_int tr)
Sparse matrix-vector multiplication: z <- z + x*y.
@ SOLVER_RET_NAN
@ SOLVER_RET_INFEASIBLE
@ SOLVER_RET_LIMITED
@ CONIC_X
The primal solution.
Definition: conic.hpp:201
@ CONIC_LAM_A
The dual solution corresponding to linear bounds.
Definition: conic.hpp:205
@ CONIC_COST
The optimal cost.
Definition: conic.hpp:203
@ CONIC_LAM_X
The dual solution corresponding to simple bounds.
Definition: conic.hpp:207
casadi_convexify_config< double > config
Definition: mx.hpp:61
casadi_int sz_iw
Definition: mx.hpp:62
casadi_int sz_w
Definition: mx.hpp:63
casadi_nlpsol_data< double > d_nlp
Definition: nlpsol_impl.hpp:42
Options metadata for a class.
Definition: options.hpp:40
std::map< std::string, FStats > fstats
void add_stat(const std::string &s)
int iter_count
Iteration count.
Definition: sqpmethod.hpp:61
const char * return_status
Last return status.
Definition: sqpmethod.hpp:58
casadi_sqpmethod_data< double > d
Definition: sqpmethod.hpp:46
double reg
Hessian regularization.
Definition: sqpmethod.hpp:51
const casadi_nlpsol_prob< T1 > * nlp
const casadi_int * sp_h
const casadi_int * sp_a