nlpsol.cpp
1 /*
2  * This file is part of CasADi.
3  *
4  * CasADi -- A symbolic framework for dynamic optimization.
5  * Copyright (C) 2010-2023 Joel Andersson, Joris Gillis, Moritz Diehl,
6  * KU Leuven. All rights reserved.
7  * Copyright (C) 2011-2014 Greg Horn
8  *
9  * CasADi is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 3 of the License, or (at your option) any later version.
13  *
14  * CasADi is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with CasADi; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  *
23  */
24 
25 
26 #include "nlpsol_impl.hpp"
27 #include "external.hpp"
28 #include "casadi/core/timing.hpp"
29 #include "nlp_builder.hpp"
30 #include "nlp_tools.hpp"
31 #include <cctype>
32 
33 namespace casadi {
34 
35  bool has_nlpsol(const std::string& name) {
36  return Nlpsol::has_plugin(name);
37  }
38 
39  void load_nlpsol(const std::string& name) {
40  Nlpsol::load_plugin(name);
41  }
42 
43  std::string doc_nlpsol(const std::string& name) {
44  return Nlpsol::getPlugin(name).doc;
45  }
46 
47  bool name_has_g(const std::string& name) {
48  size_t pos = name.find("g");
49 
50  // 'g' does not occur
51  if (pos == std::string::npos) return false;
52 
53  // Check if 'g' has a word boundary on the left
54  bool left = pos==0 || !isalnum(name[pos-1]);
55 
56  // Check if 'g' has a word boundary on the right
57  bool right = pos+1 == name.size() || !isalnum(name[pos+1]);
58 
59  return left && right;
60  }
61 
62  template<class X>
63  Function construct_nlpsol(const std::string& name, const std::string& solver,
64  const std::map<std::string, X>& nlp, const Dict& opts) {
65 
66  if (get_from_dict(opts, "detect_simple_bounds", false)) {
67  X x = get_from_dict(nlp, "x", X(0, 1));
68  X p = get_from_dict(nlp, "p", X(0, 1));
69  X f = get_from_dict(nlp, "f", X(0));
70  X g = get_from_dict(nlp, "g", X(0, 1));
71 
72  if (g.size1()>0 || g.size2()>0) {
73  // Dimension checks
74  casadi_assert(g.is_dense() && g.is_vector(),
75  "Expected a dense vector 'g', but got " + g.dim(true) + ".");
76  }
77 
78  // Read dimensions
79  casadi_int ng = g.size1();
80  casadi_int nx = x.size1();
81 
82  // Get constraint Jacobian sparsity
83  Sparsity sp = jacobian_sparsity(g, x).T();
84 
85  // Reset result vector
86  std::vector<bool> is_simple(ng, true);
87 
88  // Check nonlinearity
89  std::vector<bool> is_nonlin = which_depends(g, x, 2, true);
90 
91  const casadi_int* row = sp.colind();
92  for (casadi_int i=0;i<ng;++i) {
93  // Check if each row of jac_g_x only depends on one column
94  bool single_dependency = row[i+1]-row[i]==1;
95  is_simple[i] = single_dependency && !is_nonlin[i];
96  }
97 
98  // Full-indices of all simple constraints
99  std::vector<casadi_int> sgi = boolvec_to_index(is_simple);
100  std::vector<casadi_int> gi = boolvec_to_index(boolvec_not(is_simple));
101  X g_bounds = g(sgi);
102 
103  // Detect f2(p)x+f1(p)==0
104  Function gf = Function("gf", std::vector<X>{x, p},
105  std::vector<X>{jtimes(g_bounds, x, X::ones(nx, 1)), g_bounds});
106  casadi_assert_dev(!gf.has_free());
107 
108  std::vector<casadi_int> target_x;
109  // Loop over all constraints
110  for (casadi_int i=0;i<ng;++i) {
111  // Only treat simple ones
112  if (!is_simple[i]) continue;
113  target_x.push_back(sp.row()[row[i]]);
114  }
115 
116  Dict nlpsol_opts = opts;
117  nlpsol_opts["detect_simple_bounds_is_simple"] = is_simple;
118  nlpsol_opts["detect_simple_bounds_parts"] = gf;
119  nlpsol_opts["detect_simple_bounds_target_x"] = target_x;
120 
121  // Check for cache entries
122  Dict cache = get_from_dict(opts, "cache", Dict());
123  for (auto&& e : cache) {
124  const Function& f = e.second;
125 
126  // Check for cached Functions where g appears in in/out
127  bool needs_update = false;
128  for (casadi_int i=0;i<f.n_in();++i) {
129  // Does name contain 'g'?
130  if (name_has_g(f.name_in(i))) {
131  // Already has compated size
132  if (f.size1_in(i)==gi.size()) continue;
133  // Needs to be compacted
134  if (f.size1_in(i)==ng) needs_update = true;
135  // Structure not understood, fallback to higher-level sanity checking
136  }
137  }
138  for (casadi_int i=0;i<f.n_out();++i) {
139  if (name_has_g(f.name_out(i))) {
140  if (f.size1_out(i)==gi.size()) continue;
141  // Needs to be compacted
142  if (f.size1_out(i)==ng) needs_update = true;
143  // Structure not understood, fallback to higher-level sanity checking
144  }
145  }
146 
147  if (!needs_update) continue;
148 
149  // Arguments to create a wrapper
150  std::vector<X> args = f.sym_in<X>();
151  // Arguments to pass to the cached function
152  std::vector<X> f_args = args;
153 
154  for (casadi_int i=0;i<f.n_in();++i) {
155  if (name_has_g(f.name_in(i))) {
156  // Needs a compacted symbol
157  args[i] = X::sym(f.name_in(i), gi.size());
158 
159  // Perform projects
160  f_args[i] = X::zeros(ng);
161  f_args[i](gi) = args[i];
162  }
163  }
164 
165  // Peform call
166  std::vector<X> res;
167  f.call(f_args, res, false, false);
168 
169  for (casadi_int i=0;i<f.n_out();++i) {
170  if (name_has_g(f.name_out(i))) {
171  // Select compacted rows out of result
172  res[i] = res[i](gi, casadi::Slice());
173  }
174  }
175 
176  // Create wrapper
177  cache[e.first] = Function(f.name(), args, res, f.name_in(), f.name_out());
178  }
179  // Pass along potentially updated cache
180  nlpsol_opts["cache"] = cache;
181 
182  if (opts.find("equality")!=opts.end()) {
183  std::vector<bool> equality = opts.find("equality")->second;
184  nlpsol_opts["equality"] = vector_select(equality, is_simple, true);
185  }
186 
187  std::map<std::string, X> nlpsol_nlp = nlp;
188  nlpsol_nlp["g"] = g(gi);
189  return nlpsol(name, solver, Nlpsol::create_oracle(nlpsol_nlp, opts), nlpsol_opts);
190  } else {
191  return nlpsol(name, solver, Nlpsol::create_oracle(nlp, opts), opts);
192  }
193  }
194 
195  Function nlpsol(const std::string& name, const std::string& solver,
196  const SXDict& nlp, const Dict& opts) {
197  return construct_nlpsol(name, solver, nlp, opts);
198  }
199 
200  Function nlpsol(const std::string& name, const std::string& solver,
201  const MXDict& nlp, const Dict& opts) {
202  return construct_nlpsol(name, solver, nlp, opts);
203  }
204 
205  template<typename XType>
206  Function Nlpsol::create_oracle(const std::map<std::string, XType>& d,
207  const Dict& opts) {
208  std::vector<XType> nl_in(NL_NUM_IN), nl_out(NL_NUM_OUT);
209  for (auto&& i : d) {
210  if (i.first=="x") {
211  nl_in[NL_X]=i.second;
212  } else if (i.first=="p") {
213  nl_in[NL_P]=i.second;
214  } else if (i.first=="f") {
215  nl_out[NL_F]=i.second;
216  } else if (i.first=="g") {
217  nl_out[NL_G]=i.second;
218  } else {
219  casadi_error("No such field: " + i.first);
220  }
221  }
222  if (nl_out[NL_F].is_empty()) nl_out[NL_F] = 0;
223  if (nl_out[NL_G].is_empty()) nl_out[NL_G] = XType(0, 1);
224 
225  // Options for the oracle
226  Dict oracle_options;
227  Dict::const_iterator it = opts.find("oracle_options");
228  if (it != opts.end()) {
229  // "oracle_options" has been set
230  oracle_options = it->second;
231  } else {
232  // Propagate selected options from Nlpsol to oracle by default
233  for (const char* op : {"verbose", "regularity_check"})
234  if ((it = opts.find(op)) != opts.end()) {
235  oracle_options[op] = it->second;
236  }
237  }
238 
239  // Create oracle
240  return Function("nlp", nl_in, nl_out, NL_INPUTS, NL_OUTPUTS, oracle_options);
241  }
242 
243  Function nlpsol(const std::string& name, const std::string& solver,
244  const NlpBuilder& nl, const Dict& opts) {
245  MXDict nlp;
246  nlp["x"] = vertcat(nl.x);
247  nlp["f"] = nl.f;
248  nlp["g"] = vertcat(nl.g);
249  return nlpsol(name, solver, nlp, opts);
250  }
251 
252  Function nlpsol(const std::string& name, const std::string& solver,
253  const std::string& fname, const Dict& opts) {
254  // If fname ends with .c, JIT
255  if (fname.size()>2 && fname.compare(fname.size()-2, fname.size(), ".c")==0) {
256  Importer compiler(fname, "clang");
257  return nlpsol(name, solver, compiler, opts);
258  } else {
259  return nlpsol(name, solver, external("nlp", fname), opts);
260  }
261  }
262 
263  Function nlpsol(const std::string& name, const std::string& solver,
264  const Importer& compiler, const Dict& opts) {
265  return nlpsol(name, solver, external("nlp", compiler), opts);
266  }
267 
268  Function nlpsol(const std::string& name, const std::string& solver,
269  const Function& nlp, const Dict& opts) {
270  // Make sure that nlp is sound
271  if (nlp.has_free()) {
272  casadi_error("Cannot create '" + name + "' since " + str(nlp.get_free()) + " are free.");
273  }
274  return Function::create(Nlpsol::instantiate(name, solver, nlp), opts);
275  }
276 
277  std::vector<std::string> nlpsol_in() {
278  std::vector<std::string> ret(nlpsol_n_in());
279  for (size_t i=0; i<ret.size(); ++i) ret[i]=nlpsol_in(i);
280  return ret;
281  }
282 
283  std::vector<std::string> nlpsol_out() {
284  std::vector<std::string> ret(nlpsol_n_out());
285  for (size_t i=0; i<ret.size(); ++i) ret[i]=nlpsol_out(i);
286  return ret;
287  }
288 
289  double nlpsol_default_in(casadi_int ind) {
290  switch (ind) {
291  case NLPSOL_LBX:
292  case NLPSOL_LBG:
293  return -std::numeric_limits<double>::infinity();
294  case NLPSOL_UBX:
295  case NLPSOL_UBG:
296  return std::numeric_limits<double>::infinity();
297  default:
298  return 0;
299  }
300  }
301 
302  std::vector<double> nlpsol_default_in() {
303  std::vector<double> ret(nlpsol_n_in());
304  for (size_t i=0; i<ret.size(); ++i) ret[i]=nlpsol_default_in(i);
305  return ret;
306  }
307 
308  std::string nlpsol_in(casadi_int ind) {
309  switch (static_cast<NlpsolInput>(ind)) {
310  case NLPSOL_X0: return "x0";
311  case NLPSOL_P: return "p";
312  case NLPSOL_LBX: return "lbx";
313  case NLPSOL_UBX: return "ubx";
314  case NLPSOL_LBG: return "lbg";
315  case NLPSOL_UBG: return "ubg";
316  case NLPSOL_LAM_X0: return "lam_x0";
317  case NLPSOL_LAM_G0: return "lam_g0";
318  case NLPSOL_NUM_IN: break;
319  }
320  return std::string();
321  }
322 
323  std::string nlpsol_out(casadi_int ind) {
324  switch (static_cast<NlpsolOutput>(ind)) {
325  case NLPSOL_X: return "x";
326  case NLPSOL_F: return "f";
327  case NLPSOL_G: return "g";
328  case NLPSOL_LAM_X: return "lam_x";
329  case NLPSOL_LAM_G: return "lam_g";
330  case NLPSOL_LAM_P: return "lam_p";
331  case NLPSOL_NUM_OUT: break;
332  }
333  return std::string();
334  }
335 
336  casadi_int nlpsol_n_in() {
337  return NLPSOL_NUM_IN;
338  }
339 
340  casadi_int nlpsol_n_out() {
341  return NLPSOL_NUM_OUT;
342  }
343 
344  Nlpsol::Nlpsol(const std::string& name, const Function& oracle)
345  : OracleFunction(name, oracle) {
346 
347  // Set default options
348  callback_step_ = 1;
349  eval_errors_fatal_ = false;
350  warn_initial_bounds_ = false;
352  print_time_ = true;
353  calc_multipliers_ = false;
354  bound_consistency_ = false;
355  min_lam_ = 0;
356  calc_lam_x_ = calc_f_ = calc_g_ = false;
357  calc_lam_p_ = true;
358  no_nlp_grad_ = false;
359  error_on_fail_ = false;
360  sens_linsol_ = "qr";
361  }
362 
364  clear_mem();
365  }
366 
367  bool Nlpsol::is_a(const std::string& type, bool recursive) const {
368  return type=="Nlpsol" || (recursive && OracleFunction::is_a(type, recursive));
369  }
370 
372  switch (static_cast<NlpsolInput>(i)) {
373  case NLPSOL_X0:
374  case NLPSOL_LBX:
375  case NLPSOL_UBX:
376  case NLPSOL_LAM_X0:
377  return get_sparsity_out(NLPSOL_X);
378  case NLPSOL_LBG:
379  case NLPSOL_UBG:
380  case NLPSOL_LAM_G0:
381  return get_sparsity_out(NLPSOL_G);
382  case NLPSOL_P:
383  return oracle_.sparsity_in(NL_P);
384  case NLPSOL_NUM_IN: break;
385  }
386  return Sparsity();
387  }
388 
390  switch (static_cast<NlpsolOutput>(i)) {
391  case NLPSOL_F:
392  return oracle_.sparsity_out(NL_F);
393  case NLPSOL_X:
394  case NLPSOL_LAM_X:
395  return oracle_.sparsity_in(NL_X);
396  case NLPSOL_LAM_G:
397  case NLPSOL_G:
398  if (detect_simple_bounds_is_simple_.empty()) {
399  return oracle_.sparsity_out(NL_G);
400  } else {
402  }
403  case NLPSOL_LAM_P:
404  return get_sparsity_in(NLPSOL_P);
405  case NLPSOL_NUM_OUT: break;
406  }
407  return Sparsity();
408  }
409 
412  {{"iteration_callback",
413  {OT_FUNCTION,
414  "A function that will be called at each iteration with the solver as input. "
415  "Check documentation of Callback."}},
416  {"iteration_callback_step",
417  {OT_INT,
418  "Only call the callback function every few iterations."}},
419  {"iteration_callback_ignore_errors",
420  {OT_BOOL,
421  "If set to true, errors thrown by iteration_callback will be ignored."}},
422  {"ignore_check_vec",
423  {OT_BOOL,
424  "If set to true, the input shape of F will not be checked."}},
425  {"warn_initial_bounds",
426  {OT_BOOL,
427  "Warn if the initial guess does not satisfy LBX and UBX"}},
428  {"eval_errors_fatal",
429  {OT_BOOL,
430  "When errors occur during evaluation of f,g,...,"
431  "stop the iterations"}},
432  {"verbose_init",
433  {OT_BOOL,
434  "Print out timing information about "
435  "the different stages of initialization"}},
436  {"discrete",
437  {OT_BOOLVECTOR,
438  "Indicates which of the variables are discrete, i.e. integer-valued"}},
439  {"equality",
440  {OT_BOOLVECTOR,
441  "Indicate an upfront hint which of the constraints are equalities. "
442  "Some solvers may be able to exploit this knowledge. "
443  "When true, the corresponding lower and upper bounds are assumed equal. "
444  "When false, the corresponding bounds may be equal or different."}},
445  {"calc_multipliers",
446  {OT_BOOL,
447  "Calculate Lagrange multipliers in the Nlpsol base class"}},
448  {"calc_lam_x",
449  {OT_BOOL,
450  "Calculate 'lam_x' in the Nlpsol base class"}},
451  {"calc_lam_p",
452  {OT_BOOL,
453  "Calculate 'lam_p' in the Nlpsol base class"}},
454  {"calc_f",
455  {OT_BOOL,
456  "Calculate 'f' in the Nlpsol base class"}},
457  {"calc_g",
458  {OT_BOOL,
459  "Calculate 'g' in the Nlpsol base class"}},
460  {"no_nlp_grad",
461  {OT_BOOL,
462  "Prevent the creation of the 'nlp_grad' function"}},
463  {"bound_consistency",
464  {OT_BOOL,
465  "Ensure that primal-dual solution is consistent with the bounds"}},
466  {"min_lam",
467  {OT_DOUBLE,
468  "Minimum allowed multiplier value"}},
469  {"oracle_options",
470  {OT_DICT,
471  "Options to be passed to the oracle function"}},
472  {"sens_linsol",
473  {OT_STRING,
474  "Linear solver used for parametric sensitivities (default 'qr')."}},
475  {"sens_linsol_options",
476  {OT_DICT,
477  "Linear solver options used for parametric sensitivities."}},
478  {"detect_simple_bounds",
479  {OT_BOOL,
480  "Automatically detect simple bounds (lbx/ubx) (default false). "
481  "This is hopefully beneficial to speed and robustness but may also have adverse affects: "
482  "1) Subtleties in heuristics and stopping criteria may change the solution, "
483  "2) IPOPT may lie about multipliers of simple equality bounds unless "
484  "'fixed_variable_treatment' is set to 'relax_bounds'."}},
485  {"detect_simple_bounds_is_simple",
486  {OT_BOOLVECTOR,
487  "For internal use only."}},
488  {"detect_simple_bounds_parts",
489  {OT_FUNCTION,
490  "For internal use only."}},
491  {"detect_simple_bounds_target_x",
492  {OT_INTVECTOR,
493  "For internal use only."}}
494  }
495  };
496 
497  void Nlpsol::init(const Dict& opts) {
498  // Read options
499  for (auto&& op : opts) {
500  if (op.first=="detect_simple_bounds_is_simple") {
501  assign_vector(op.second.to_bool_vector(), detect_simple_bounds_is_simple_);
502  //detect_simple_bounds_is_simple_ = op.second.to_bool_vector();
503  } else if (op.first=="detect_simple_bounds_parts") {
504  detect_simple_bounds_parts_ = op.second;
505  } else if (op.first=="detect_simple_bounds_target_x") {
506  detect_simple_bounds_target_x_ = op.second;
507  }
508  }
509 
510  for (casadi_int i=0;i<detect_simple_bounds_is_simple_.size();++i) {
512  detect_simple_bounds_target_g_.push_back(i);
513  }
514  }
515 
516  // Call the initialization method of the base class
517  OracleFunction::init(opts);
518 
519  // Read options
520  for (auto&& op : opts) {
521  if (op.first=="iteration_callback") {
522  fcallback_ = op.second;
523  } else if (op.first=="iteration_callback_step") {
524  callback_step_ = op.second;
525  } else if (op.first=="eval_errors_fatal") {
526  eval_errors_fatal_ = op.second;
527  } else if (op.first=="warn_initial_bounds") {
528  warn_initial_bounds_ = op.second;
529  } else if (op.first=="iteration_callback_ignore_errors") {
531  } else if (op.first=="discrete") {
532  discrete_ = op.second;
533  } else if (op.first=="equality") {
534  equality_ = op.second;
535  } else if (op.first=="calc_multipliers") {
536  calc_multipliers_ = op.second;
537  } else if (op.first=="calc_lam_x") {
538  calc_lam_x_ = op.second;
539  } else if (op.first=="calc_lam_p") {
540  calc_lam_p_ = op.second;
541  } else if (op.first=="calc_f") {
542  calc_f_ = op.second;
543  } else if (op.first=="calc_g") {
544  calc_g_ = op.second;
545  } else if (op.first=="no_nlp_grad") {
546  no_nlp_grad_ = op.second;
547  } else if (op.first=="bound_consistency") {
548  bound_consistency_ = op.second;
549  } else if (op.first=="min_lam") {
550  min_lam_ = op.second;
551  } else if (op.first=="sens_linsol") {
552  sens_linsol_ = op.second.to_string();
553  } else if (op.first=="sens_linsol_options") {
554  sens_linsol_options_ = op.second;
555  }
556  }
557 
558  // Deprecated option
559  if (calc_multipliers_) {
560  calc_lam_x_ = true;
561  calc_lam_p_ = true;
562  }
563 
564  // Get dimensions
565  nx_ = nnz_out(NLPSOL_X);
566  np_ = nnz_in(NLPSOL_P);
568 
569  // No need to calculate non-existant quantities
570  if (np_==0) calc_lam_p_ = false;
571  if (ng_==0) calc_g_ = false;
572 
573  // Consistency check
574  if (no_nlp_grad_) {
575  casadi_assert(!calc_lam_p_, "Options 'no_nlp_grad' and 'calc_lam_p' inconsistent");
576  casadi_assert(!calc_lam_x_, "Options 'no_nlp_grad' and 'calc_lam_x' inconsistent");
577  casadi_assert(!calc_f_, "Options 'no_nlp_grad' and 'calc_f' inconsistent");
578  casadi_assert(!calc_g_, "Options 'no_nlp_grad' and 'calc_g' inconsistent");
579  }
580 
581  // Dimension checks
582  casadi_assert(sparsity_out_.at(NLPSOL_G).is_dense()
583  && sparsity_out_.at(NLPSOL_G).is_vector(),
584  "Expected a dense vector 'g', but got " + sparsity_out_.at(NLPSOL_G).dim(true) + ".");
585 
586  casadi_assert(sparsity_out_.at(NLPSOL_F).is_dense(),
587  "Expected a dense 'f', but got " + sparsity_out_.at(NLPSOL_F).dim(true) + ".");
588 
589  casadi_assert(sparsity_out_.at(NLPSOL_X).is_dense()
590  && sparsity_out_.at(NLPSOL_X).is_vector(),
591  "Expected a dense vector 'x', but got " + sparsity_out_.at(NLPSOL_X).dim(true) + ".");
592 
593  // Discrete marker
594  mi_ = false;
595  if (!discrete_.empty()) {
596  casadi_assert(discrete_.size()==nx_, "\"discrete\" option has wrong length");
597  if (std::find(discrete_.begin(), discrete_.end(), true)!=discrete_.end()) {
598  casadi_assert(integer_support(),
599  "Discrete variables require a solver with integer support");
600  mi_ = true;
601  }
602  }
603  if (!equality_.empty()) {
604  casadi_assert(equality_.size()==ng_, "\"equality\" option has wrong length. "
605  "Expected " + str(ng_) + " elements, but got " +
606  str(equality_.size()) + " instead.");
607  }
608 
609  set_nlpsol_prob();
610 
611  // Allocate memory
612  casadi_int sz_arg, sz_res, sz_w, sz_iw;
613  casadi_nlpsol_work(&p_nlp_, &sz_arg, &sz_res, &sz_iw, &sz_w);
614  alloc_arg(sz_arg, true);
615  alloc_res(sz_res, true);
616  alloc_iw(sz_iw, true);
617  alloc_w(sz_w, true);
618 
619  if (!fcallback_.is_null()) {
620  // Consistency checks
621  casadi_assert_dev(!fcallback_.is_null());
622  casadi_assert(fcallback_.n_out()==1 && fcallback_.numel_out()==1,
623  "Callback function must return a scalar.");
624  casadi_assert(fcallback_.n_in()==n_out_,
625  "Callback input signature must match the NLP solver output signature");
626  for (casadi_int i=0; i<n_out_; ++i) {
627  // Ignore empty arguments
628  if (fcallback_.sparsity_in(i).is_empty()) continue;
629  casadi_assert(fcallback_.size_in(i)==size_out(i),
630  "Callback function input size mismatch. For argument '" + nlpsol_out(i) + "', "
631  "callback has shape " + fcallback_.sparsity_in(i).dim() + " while NLP has " +
632  sparsity_out_.at(i).dim() + ".");
633  // TODO(@jaeandersson): Wrap fcallback_ in a function with correct sparsity
634  casadi_assert(fcallback_.sparsity_in(i)==sparsity_out_.at(i),
635  "Callback function input size mismatch. "
636  "For argument " + nlpsol_out(i) + "', callback has shape " +
637  fcallback_.sparsity_in(i).dim() + " while NLP has " +
638  sparsity_out_.at(i).dim() + ".");
639  }
640 
641  // Allocate temporary memory
642  alloc(fcallback_);
643  }
644 
645  // Function calculating f, g and the gradient of the Lagrangian w.r.t. x and p
646  if (!no_nlp_grad_) {
647  create_function("nlp_grad", {"x", "p", "lam:f", "lam:g"},
648  {"f", "g", "grad:gamma:x", "grad:gamma:p"},
649  {{"gamma", {"f", "g"}}});
650  }
651  }
652 
653  int detect_bounds_callback(const double** arg, double** res,
654  casadi_int* iw, double* w, void* callback_data) {
655  Function* f = static_cast<Function*>(callback_data);
656  return f->operator()(arg, res, iw, w);
657  }
658 
659  void Nlpsol::set_nlpsol_prob() {
660  p_nlp_.nx = nx_;
661  p_nlp_.ng = ng_;
662  p_nlp_.np = np_;
663 
667 
668  if (p_nlp_.detect_bounds.ng) {
677  }
678  }
679 
680  int Nlpsol::init_mem(void* mem) const {
681  if (OracleFunction::init_mem(mem)) return 1;
682  auto m = static_cast<NlpsolMemory*>(mem);
683  m->add_stat("callback_fun");
684  m->success = false;
685  m->d_nlp.prob = nullptr;
686  m->unified_return_status = SOLVER_RET_UNKNOWN;
687  return 0;
688  }
689 
690  void Nlpsol::check_inputs(void* mem) const {
691  auto m = static_cast<NlpsolMemory*>(mem);
692  auto d_nlp = &m->d_nlp;
693 
694  // Skip check?
695  if (!inputs_check_) return;
696 
697  const double inf = std::numeric_limits<double>::infinity();
698 
699  // Number of equality constraints
700  casadi_int n_eq = 0;
701 
702  // Detect ill-posed problems (simple bounds)
703  for (casadi_int i=0; i<nx_; ++i) {
704  double lb = d_nlp->lbx ? d_nlp->lbx[i] : get_default_in(NLPSOL_LBX);
705  double ub = d_nlp->ubx ? d_nlp->ubx[i] : get_default_in(NLPSOL_UBX);
706  double x0 = d_nlp->x0 ? d_nlp->x0[i] : get_default_in(NLPSOL_X0);
707  casadi_assert(lb <= ub && lb!=inf && ub!=-inf,
708  "Ill-posed problem detected: "
709  "LBX[" + str(i) + "] <= UBX[" + str(i) + "] was violated. "
710  "Got LBX[" + str(i) + "]=" + str(lb) + " and UBX[" + str(i) + "] = " + str(ub) + ".");
711  if (warn_initial_bounds_ && (x0>ub || x0<lb)) {
712  casadi_warning("Nlpsol: The initial guess does not satisfy LBX and UBX. "
713  "Option 'warn_initial_bounds' controls this warning.");
714  break;
715  }
716  if (lb==ub) n_eq++;
717  }
718 
719  // Detect ill-posed problems (nonlinear bounds)
720  for (casadi_int i=0; i<nnz_out(NLPSOL_G); ++i) {
721  double lb = d_nlp->lbg ? d_nlp->lbg[i] : get_default_in(NLPSOL_LBG);
722  double ub = d_nlp->ubg ? d_nlp->ubg[i] : get_default_in(NLPSOL_UBG);
723  casadi_assert(lb <= ub && lb!=inf && ub!=-inf,
724  "Ill-posed problem detected: "
725  "LBG[" + str(i) + "] <= UBG[" + str(i) + "] was violated. "
726  "Got LBG[" + str(i) + "] = " + str(lb) + " and UBG[" + str(i) + "] = " + str(ub) + ".");
727  if (lb==ub) n_eq++;
728  }
729 
730  // Make sure enough degrees of freedom
731  using casadi::str; // Workaround, MingGW bug, cf. CasADi issue #890
732  if (n_eq> nx_) {
733  casadi_warning("NLP is overconstrained: There are " + str(n_eq) +
734  " equality constraints but only " + str(nx_) + " variables.");
735  }
736  }
737 
738  std::map<std::string, Nlpsol::Plugin> Nlpsol::solvers_;
739 
740 #ifdef CASADI_WITH_THREADSAFE_SYMBOLICS
741  std::mutex Nlpsol::mutex_solvers_;
742 #endif // CASADI_WITH_THREADSAFE_SYMBOLICS
743 
744  const std::string Nlpsol::infix_ = "nlpsol";
745 
747  casadi_error("getReducedHessian not defined for class " + class_name());
748  return DM();
749  }
750 
751  void Nlpsol::setOptionsFromFile(const std::string & file) {
752  casadi_error("setOptionsFromFile not defined for class " + class_name());
753  }
754 
755  void Nlpsol::bound_consistency(casadi_int n, double* z, double* lam,
756  const double* lbz, const double* ubz) {
757  casadi_assert_dev(z!=nullptr);
758  casadi_assert_dev(lam!=nullptr);
759  casadi_assert_dev(lbz!=nullptr);
760  casadi_assert_dev(ubz!=nullptr);
761  // Local variables
762  casadi_int i;
763  // Loop over variables
764  for (i=0; i<n; ++i) {
765  // Make sure bounds are respected
766  z[i] = std::fmin(std::fmax(z[i], lbz[i]), ubz[i]);
767  // Adjust multipliers
768  if (std::isinf(lbz[i]) && std::isinf(ubz[i])) {
769  // Both multipliers are infinite
770  lam[i] = 0.;
771  } else if (std::isinf(lbz[i]) || z[i] - lbz[i] > ubz[i] - z[i]) {
772  // Infinite lower bound or closer to upper bound than lower bound
773  lam[i] = std::fmax(0., lam[i]);
774  } else if (std::isinf(ubz[i]) || z[i] - lbz[i] < ubz[i] - z[i]) {
775  // Infinite upper bound or closer to lower bound than upper bound
776  lam[i] = std::fmin(0., lam[i]);
777  }
778  }
779  }
780 
781  int Nlpsol::eval(const double** arg, double** res, casadi_int* iw, double* w, void* mem) const {
782  auto m = static_cast<NlpsolMemory*>(mem);
783 
784  auto d_nlp = &m->d_nlp;
785 
786  // Reset the solver, prepare for solution
787  setup(m, arg, res, iw, w);
788  auto p_nlp = d_nlp->prob;
789 
790  // Set initial guess
791  casadi_copy(d_nlp->x0, nx_, d_nlp->z);
792 
793  // Read simple bounds and multiplier guesses
794  casadi_copy(d_nlp->lbx, nx_, d_nlp->lbz);
795  casadi_copy(d_nlp->ubx, nx_, d_nlp->ubz);
796  casadi_copy(d_nlp->lam_x0, nx_, d_nlp->lam);
797 
798  if (p_nlp->detect_bounds.ng==0) {
799  // Read constraint bounds and multiplier guesses
800  casadi_copy(d_nlp->lbg, ng_, d_nlp->lbz+nx_);
801  casadi_copy(d_nlp->ubg, ng_, d_nlp->ubz+nx_);
802  casadi_copy(d_nlp->lam_g0, ng_, d_nlp->lam+nx_);
803  } else {
804  if (casadi_detect_bounds_before(d_nlp)) return 1;
805  }
806 
807  // Set multipliers to nan
808  casadi_fill(d_nlp->lam_p, np_, nan);
809 
810  // Reset f, g
811  d_nlp->objective = nan;
812  casadi_fill(d_nlp->z + nx_, ng_, nan);
813 
814  // Check the provided inputs
815  check_inputs(m);
816 
817  // Solve the NLP
818  int flag = solve(m);
819 
820  // Join statistics (introduced for parallel oracle facilities)
821  join_results(m);
822 
823  // Calculate multiplers
824  if ((calc_f_ || calc_g_ || calc_lam_x_ || calc_lam_p_) && !flag) {
825  const double lam_f = 1.;
826  m->arg[0] = d_nlp->z;
827  m->arg[1] = d_nlp->p;
828  m->arg[2] = &lam_f;
829  m->arg[3] = d_nlp->lam + nx_;
830  m->res[0] = calc_f_ ? &d_nlp->objective : nullptr;
831  m->res[1] = calc_g_ ? d_nlp->z + nx_ : nullptr;
832  m->res[2] = calc_lam_x_ ? d_nlp->lam : nullptr;
833  m->res[3] = calc_lam_p_ ? d_nlp->lam_p : nullptr;
834  if (calc_function(m, "nlp_grad")) {
835  casadi_warning("Failed to calculate multipliers");
836  }
837  if (calc_lam_x_) casadi_scal(nx_, -1., d_nlp->lam);
838  if (calc_lam_p_) casadi_scal(np_, -1., d_nlp->lam_p);
839  }
840 
841  // Make sure that an optimal solution is consistant with bounds
842  if (bound_consistency_ && !flag) {
843  bound_consistency(nx_+ng_, d_nlp->z, d_nlp->lam, d_nlp->lbz, d_nlp->ubz);
844  }
845 
846  // Get optimal solution
847  casadi_copy(d_nlp->z, nx_, d_nlp->x);
848 
849  if (p_nlp->detect_bounds.ng==0) {
850  casadi_copy(d_nlp->z + nx_, ng_, d_nlp->g);
851  casadi_copy(d_nlp->lam, nx_, d_nlp->lam_x);
852  casadi_copy(d_nlp->lam + nx_, ng_, d_nlp->lam_g);
853  } else {
854  if (casadi_detect_bounds_after(d_nlp)) return 1;
855  }
856 
857  casadi_copy(d_nlp->lam_p, np_, d_nlp->lam_p);
858  casadi_copy(&d_nlp->objective, 1, d_nlp->f);
859 
860  if (m->success) m->unified_return_status = SOLVER_RET_SUCCESS;
861 
862  if (error_on_fail_ && !m->success)
863  casadi_error("nlpsol process failed. "
864  "Set 'error_on_fail' option to false to ignore this error.");
865 
866  if (m->unified_return_status==SOLVER_RET_EXCEPTION) {
867  casadi_error("An exception was raised in the solver.");
868  }
869  return flag;
870  }
871 
872  void Nlpsol::set_work(void* mem, const double**& arg, double**& res,
873  casadi_int*& iw, double*& w) const {
874  auto m = static_cast<NlpsolMemory*>(mem);
875 
876  // Problem has not been solved at this point
877  m->success = false;
878  m->unified_return_status = SOLVER_RET_UNKNOWN;
879 
880  m->d_nlp.prob = &p_nlp_;
881  m->d_nlp.oracle = &m->d_oracle;
882 
883  casadi_nlpsol_data<double>& d_nlp = m->d_nlp;
884  d_nlp.p = arg[NLPSOL_P];
885  d_nlp.lbx = arg[NLPSOL_LBX];
886  d_nlp.ubx = arg[NLPSOL_UBX];
887  d_nlp.lbg = arg[NLPSOL_LBG];
888  d_nlp.ubg = arg[NLPSOL_UBG];
889  d_nlp.x0 = arg[NLPSOL_X0];
890  d_nlp.lam_x0 = arg[NLPSOL_LAM_X0];
891  d_nlp.lam_g0 = arg[NLPSOL_LAM_G0];
892 
893  d_nlp.x = res[NLPSOL_X];
894  d_nlp.f = res[NLPSOL_F];
895  d_nlp.g = res[NLPSOL_G];
896  d_nlp.lam_x = res[NLPSOL_LAM_X];
897  d_nlp.lam_g = res[NLPSOL_LAM_G];
898  d_nlp.lam_p = res[NLPSOL_LAM_P];
899 
900 
901  arg += NLPSOL_NUM_IN;
902  res += NLPSOL_NUM_OUT;
903 
904  casadi_nlpsol_init(&m->d_nlp, &arg, &res, &iw, &w);
905  }
906 
907  std::vector<std::string> nlpsol_options(const std::string& name) {
908  return Nlpsol::plugin_options(name).all();
909  }
910 
911  std::string nlpsol_option_type(const std::string& name, const std::string& op) {
912  return Nlpsol::plugin_options(name).type(op);
913  }
914 
915  std::string nlpsol_option_info(const std::string& name, const std::string& op) {
916  return Nlpsol::plugin_options(name).info(op);
917  }
918 
919  void Nlpsol::disp_more(std::ostream& stream) const {
920  stream << "minimize f(x;p) subject to lbx<=x<=ubx, lbg<=g(x;p)<=ubg defined by:\n";
921  oracle_.disp(stream, true);
922  }
923 
925 #ifdef CASADI_WITH_THREADSAFE_SYMBOLICS
926  // Safe access to kkt_
927  std::lock_guard<std::mutex> lock(kkt_mtx_);
928 #endif // CASADI_WITH_THREADSAFE_SYMBOLICS
929  // Quick return if cached
930  SharedObject temp;
931  if (kkt_.shared_if_alive(temp)) {
932  return shared_cast<Function>(temp);
933  }
934 
935  // Generate KKT function
936  Function ret = oracle_.factory("kkt", {"x", "p", "lam:f", "lam:g"},
937  {"jac:g:x", "hess:gamma:x:x"}, {{"gamma", {"f", "g"}}});
938 
939  // Cache and return
940  kkt_ = ret;
941  return ret;
942  }
943 
944 
946  get_forward(casadi_int nfwd, const std::string& name,
947  const std::vector<std::string>& inames,
948  const std::vector<std::string>& onames,
949  const Dict& opts) const {
950  casadi_assert(detect_simple_bounds_is_simple_.empty(),
951  "Simple bound detection not compatible with get_forward");
952 
953  // Symbolic expression for the input
954  std::vector<MX> arg = mx_in(), res = mx_out();
955 
956  // Initial guesses not used for derivative calculations
958  std::string name = arg[i].is_symbolic() ? arg[i].name() : "tmp_get_forward";
959  arg[i] = MX::sym(name, Sparsity(arg[i].size()));
960  }
961 
962  // Optimal solution
963  MX x = res[NLPSOL_X];
964  MX lam_g = res[NLPSOL_LAM_G];
965  MX lam_x = res[NLPSOL_LAM_X];
966  MX lam_p = res[NLPSOL_LAM_P];
967  MX f = res[NLPSOL_F];
968  MX g = res[NLPSOL_G];
969 
970  // Inputs used
971  MX lbx = arg[NLPSOL_LBX];
972  MX ubx = arg[NLPSOL_UBX];
973  MX lbg = arg[NLPSOL_LBG];
974  MX ubg = arg[NLPSOL_UBG];
975  MX p = arg[NLPSOL_P];
976 
977  // Get KKT function
978  Function kkt = this->kkt();
979 
980  // Hessian of the Lagrangian, Jacobian of the constraints
981  std::vector<MX> HJ_res = kkt({x, p, 1, lam_g});
982  MX JG = HJ_res.at(0);
983  MX HL = HJ_res.at(1);
984 
985  // Active set (assumed known and given by the multiplier signs)
986  MX ubIx = lam_x > min_lam_;
987  MX lbIx = lam_x < -min_lam_;
988  MX bIx = ubIx + lbIx;
989  MX iIx = 1-bIx;
990  MX ubIg = lam_g > min_lam_;
991  MX lbIg = lam_g < -min_lam_;
992  MX bIg = ubIg + lbIg;
993  MX iIg = 1-bIg;
994 
995  // KKT matrix
996  MX H_11 = mtimes(diag(iIx), HL) + diag(bIx);
997  MX H_12 = mtimes(diag(iIx), JG.T());
998  MX H_21 = mtimes(diag(bIg), JG);
999  MX H_22 = diag(-iIg);
1000  MX H = MX::blockcat({{H_11, H_12}, {H_21, H_22}});
1001 
1002  // Sensitivity inputs
1003  std::vector<MX> fseed(NLPSOL_NUM_IN);
1004  MX fwd_lbx = fseed[NLPSOL_LBX] = MX::sym("fwd_lbx", repmat(x.sparsity(), 1, nfwd));
1005  MX fwd_ubx = fseed[NLPSOL_UBX] = MX::sym("fwd_ubx", repmat(x.sparsity(), 1, nfwd));
1006  MX fwd_lbg = fseed[NLPSOL_LBG] = MX::sym("fwd_lbg", repmat(g.sparsity(), 1, nfwd));
1007  MX fwd_ubg = fseed[NLPSOL_UBG] = MX::sym("fwd_ubg", repmat(g.sparsity(), 1, nfwd));
1008  MX fwd_p = fseed[NLPSOL_P] = MX::sym("fwd_p", repmat(p.sparsity(), 1, nfwd));
1009 
1010  // Guesses are unused
1012  fseed[i] = MX(repmat(Sparsity(arg[i].size()), 1, nfwd));
1013  }
1014 
1015  // nlp_grad has the signature
1016  // (x, p, lam_f, lam_g) -> (f, g, grad_x, grad_p)
1017  // with lam_f=1 and lam_g=lam_g, grad_x = -lam_x, grad_p=-lam_p
1018  Function nlp_grad = get_function("nlp_grad");
1019 
1020  // fwd_nlp_grad has the signature
1021  // (x, p, lam_f, lam_g, f, g, grad_x, grad_p,
1022  // fwd_x, fwd_p, fwd_lam_f, fwd_lam_g)
1023  // -> (fwd_f, fwd_g, fwd_grad_x, fwd_grad_p)
1024  Function fwd_nlp_grad = nlp_grad.forward(nfwd);
1025 
1026  // Calculate sensitivities from fwd_p
1027  std::vector<MX> vv = {x, p, 1, lam_g, f, g, -lam_x, -lam_p, 0., fwd_p, 0., 0.};
1028  vv = fwd_nlp_grad(vv);
1029  MX fwd_g_p = vv.at(1);
1030  MX fwd_gL_p = vv.at(2);
1031 
1032  // Propagate forward seeds
1033  MX fwd_alpha_x = (if_else(lbIx, fwd_lbx, 0) + if_else(ubIx, fwd_ubx, 0))
1034  - if_else(iIx, fwd_gL_p, 0);
1035  MX fwd_alpha_g = (if_else(ubIg, fwd_ubg, 0) + if_else(lbIg, fwd_lbg, 0))
1036  - if_else(bIg, fwd_g_p, 0);
1037  MX v = MX::vertcat({fwd_alpha_x, fwd_alpha_g});
1038 
1039  // Solve
1041 
1042  // Extract sensitivities in x, lam_x and lam_g
1043  std::vector<MX> v_split = vertsplit(v, {0, nx_, nx_+ng_});
1044  MX fwd_x = v_split.at(0);
1045  MX fwd_lam_g = v_split.at(1);
1046 
1047  // Calculate sensitivities in lam_x, lam_g
1048  vv = {x, p, 1, lam_g, f, g, -lam_x, -lam_p,
1049  fwd_x, fwd_p, 0, fwd_lam_g};
1050  vv = fwd_nlp_grad(vv);
1051  MX fwd_f = vv.at(0);
1052  MX fwd_g = vv.at(1);
1053  MX fwd_lam_x = -vv.at(2);
1054  MX fwd_lam_p = -vv.at(3);
1055 
1056  // Forward sensitivities
1057  std::vector<MX> fsens(NLPSOL_NUM_OUT);
1058  fsens[NLPSOL_X] = fwd_x;
1059  fsens[NLPSOL_F] = fwd_f;
1060  fsens[NLPSOL_G] = fwd_g;
1061  fsens[NLPSOL_LAM_X] = fwd_lam_x;
1062  fsens[NLPSOL_LAM_G] = fwd_lam_g;
1063  fsens[NLPSOL_LAM_P] = fwd_lam_p;
1064 
1065  // Gather return values
1066  arg.insert(arg.end(), res.begin(), res.end());
1067  arg.insert(arg.end(), fseed.begin(), fseed.end());
1068  res = fsens;
1069 
1070  Dict options = opts;
1071  options["allow_duplicate_io_names"] = true;
1072 
1073  return Function(name, arg, res, inames, onames, options);
1074  }
1075 
1077  get_reverse(casadi_int nadj, const std::string& name,
1078  const std::vector<std::string>& inames,
1079  const std::vector<std::string>& onames,
1080  const Dict& opts) const {
1081  casadi_assert(detect_simple_bounds_is_simple_.empty(),
1082  "Simple bound detection not compatible with get_reverse");
1083 
1084  // Symbolic expression for the input
1085  std::vector<MX> arg = mx_in(), res = mx_out();
1086 
1087  // Initial guesses not used for derivative calculations
1089  std::string name = arg[i].is_symbolic() ? arg[i].name() : "tmp_get_reverse";
1090  arg[i] = MX::sym(name, Sparsity(arg[i].size()));
1091  }
1092 
1093  // Optimal solution
1094  MX x = res[NLPSOL_X];
1095  MX lam_g = res[NLPSOL_LAM_G];
1096  MX lam_x = res[NLPSOL_LAM_X];
1097  MX lam_p = res[NLPSOL_LAM_P];
1098  MX f = res[NLPSOL_F];
1099  MX g = res[NLPSOL_G];
1100 
1101  // Inputs used
1102  MX lbx = arg[NLPSOL_LBX];
1103  MX ubx = arg[NLPSOL_UBX];
1104  MX lbg = arg[NLPSOL_LBG];
1105  MX ubg = arg[NLPSOL_UBG];
1106  MX p = arg[NLPSOL_P];
1107 
1108  // Get KKT function
1109  Function kkt = this->kkt();
1110 
1111  // Hessian of the Lagrangian, Jacobian of the constraints
1112  std::vector<MX> HJ_res = kkt({x, p, 1, lam_g});
1113  MX JG = HJ_res.at(0);
1114  MX HL = HJ_res.at(1);
1115 
1116  // Active set (assumed known and given by the multiplier signs)
1117  MX ubIx = lam_x > min_lam_;
1118  MX lbIx = lam_x < -min_lam_;
1119  MX bIx = ubIx + lbIx;
1120  MX iIx = 1-bIx;
1121  MX ubIg = lam_g > min_lam_;
1122  MX lbIg = lam_g < -min_lam_;
1123  MX bIg = ubIg + lbIg;
1124  MX iIg = 1-bIg;
1125 
1126  // KKT matrix
1127  MX H_11 = mtimes(diag(iIx), HL) + diag(bIx);
1128  MX H_12 = mtimes(diag(iIx), JG.T());
1129  MX H_21 = mtimes(diag(bIg), JG);
1130  MX H_22 = diag(-iIg);
1131  MX H = MX::blockcat({{H_11, H_12}, {H_21, H_22}});
1132 
1133  // Sensitivity inputs
1134  std::vector<MX> aseed(NLPSOL_NUM_OUT);
1135  MX adj_x = aseed[NLPSOL_X] = MX::sym("adj_x", repmat(x.sparsity(), 1, nadj));
1136  MX adj_lam_g = aseed[NLPSOL_LAM_G] = MX::sym("adj_lam_g", repmat(g.sparsity(), 1, nadj));
1137  MX adj_lam_x = aseed[NLPSOL_LAM_X] = MX::sym("adj_lam_x", repmat(x.sparsity(), 1, nadj));
1138  MX adj_lam_p = aseed[NLPSOL_LAM_P] = MX::sym("adj_lam_p", repmat(p.sparsity(), 1, nadj));
1139  MX adj_f = aseed[NLPSOL_F] = MX::sym("adj_f", Sparsity::dense(1, nadj));
1140  MX adj_g = aseed[NLPSOL_G] = MX::sym("adj_g", repmat(g.sparsity(), 1, nadj));
1141 
1142  // nlp_grad has the signature
1143  // (x, p, lam_f, lam_g) -> (f, g, grad_x, grad_p)
1144  // with lam_f=1 and lam_g=lam_g, grad_x = -lam_x, grad_p=-lam_p
1145  Function nlp_grad = get_function("nlp_grad");
1146 
1147  // rev_nlp_grad has the signature
1148  // (x, p, lam_f, lam_g, f, g, grad_x, grad_p,
1149  // adj_f, adj_g, adj_grad_x, adj_grad_p)
1150  // -> (adj_x, adj_p, adj_lam_f, adj_lam_g)
1151  Function rev_nlp_grad = nlp_grad.reverse(nadj);
1152 
1153  // Calculate sensitivities from f, g and lam_x
1154  std::vector<MX> vv = {x, p, 1, lam_g, f, g, -lam_x, -lam_p,
1155  adj_f, adj_g, -adj_lam_x, -adj_lam_p};
1156  vv = rev_nlp_grad(vv);
1157  MX adj_x0 = vv.at(0);
1158  MX adj_p0 = vv.at(1);
1159  MX adj_lam_g0 = vv.at(3);
1160 
1161  // Solve to get beta_x_bar, beta_g_bar
1162  MX v = MX::vertcat({adj_x + adj_x0, adj_lam_g + adj_lam_g0});
1164  std::vector<MX> v_split = vertsplit(v, {0, nx_, nx_+ng_});
1165  MX beta_x_bar = v_split.at(0);
1166  MX beta_g_bar = v_split.at(1);
1167 
1168  // Calculate sensitivities in p
1169  vv = {x, p, 1, lam_g, f, g, -lam_x, -lam_p,
1170  0, bIg*beta_g_bar, iIx*beta_x_bar, 0};
1171  vv = rev_nlp_grad(vv);
1172  MX adj_p = vv.at(1);
1173 
1174  // Reverse sensitivities
1175  std::vector<MX> asens(NLPSOL_NUM_IN);
1176  asens[NLPSOL_UBX] = if_else(ubIx, beta_x_bar, 0);
1177  asens[NLPSOL_LBX] = if_else(lbIx, beta_x_bar, 0);
1178  asens[NLPSOL_UBG] = if_else(ubIg, beta_g_bar, 0);
1179  asens[NLPSOL_LBG] = if_else(lbIg, beta_g_bar, 0);
1180  asens[NLPSOL_P] = adj_p0 - adj_p;
1181 
1182  // Guesses are unused
1184  asens[i] = MX(repmat(Sparsity(arg[i].size()), 1, nadj));
1185  }
1186 
1187  // Gather return values
1188  arg.insert(arg.end(), res.begin(), res.end());
1189  arg.insert(arg.end(), aseed.begin(), aseed.end());
1190  res = asens;
1191 
1192  Dict options = opts;
1193  options["allow_duplicate_io_names"] = true;
1194 
1195  return Function(name, arg, res, inames, onames, options);
1196  }
1197 
1199  // Quick return if no callback function
1200  if (fcallback_.is_null()) return 0;
1201  // Callback inputs
1202  std::fill_n(m->arg, fcallback_.n_in(), nullptr);
1203 
1204  auto d_nlp = &m->d_nlp;
1205 
1206  m->arg[NLPSOL_X] = d_nlp->z;
1207  m->arg[NLPSOL_F] = &d_nlp->objective;
1208  m->arg[NLPSOL_G] = d_nlp->z + nx_;
1209  m->arg[NLPSOL_LAM_G] = d_nlp->lam + nx_;
1210  m->arg[NLPSOL_LAM_X] = d_nlp->lam;
1211 
1212  // Callback outputs
1213  std::fill_n(m->res, fcallback_.n_out(), nullptr);
1214  double ret = 0;
1215  m->res[0] = &ret;
1216 
1217  // Start timer
1218  m->fstats.at("callback_fun").tic();
1219  try {
1220  // Evaluate
1221  fcallback_(m->arg, m->res, m->iw, m->w, 0);
1222  } catch(KeyboardInterruptException& ex) {
1223  (void)ex; // unused
1224  throw;
1225  } catch(std::exception& ex) {
1226  print("WARNING: intermediate_callback error: %s\n", ex.what());
1228  }
1229 
1230  // User user interruption?
1231  if (static_cast<casadi_int>(ret)) return 1;
1232 
1233  // Stop timer
1234  m->fstats.at("callback_fun").toc();
1235 
1236  return 0;
1237  }
1238 
1239  Dict Nlpsol::get_stats(void* mem) const {
1240  Dict stats = OracleFunction::get_stats(mem);
1241  auto m = static_cast<NlpsolMemory*>(mem);
1242  casadi_assert(m->d_nlp.prob,
1243  "No stats available: nlp Solver instance has not yet been called with numerical arguments.");
1244  auto d_nlp = &m->d_nlp;
1245  stats["success"] = m->success;
1246  stats["unified_return_status"] = string_from_UnifiedReturnStatus(m->unified_return_status);
1247  if (d_nlp->prob && d_nlp->prob->detect_bounds.ng) {
1248  std::vector<bool> is_simple;
1250  stats["detect_simple_bounds_is_simple"] = is_simple;
1251  stats["detect_simple_bounds_target_x"] = detect_simple_bounds_target_x_;
1252  }
1253  return stats;
1254  }
1255 
1258  g.local("d_nlp", "struct casadi_nlpsol_data");
1259  g.local("p_nlp", "struct casadi_nlpsol_prob");
1260 
1261  g << "d_nlp.oracle = &d_oracle;\n";
1262 
1263  g << "d_nlp.p = arg[" << NLPSOL_P << "];\n";
1264  g << "d_nlp.lbx = arg[" << NLPSOL_LBX << "];\n";
1265  g << "d_nlp.ubx = arg[" << NLPSOL_UBX << "];\n";
1266  g << "d_nlp.lbg = arg[" << NLPSOL_LBG << "];\n";
1267  g << "d_nlp.ubg = arg[" << NLPSOL_UBG << "];\n";
1268  g << "d_nlp.x0 = arg[" << NLPSOL_X0 << "];\n";
1269  g << "d_nlp.lam_x0 = arg[" << NLPSOL_LAM_X0 << "];\n";
1270  g << "d_nlp.lam_g0 = arg[" << NLPSOL_LAM_G0 << "];\n";
1271 
1272  g << "d_nlp.x = res[" << NLPSOL_X << "];\n";
1273  g << "d_nlp.f = res[" << NLPSOL_F << "];\n";
1274  g << "d_nlp.g = res[" << NLPSOL_G << "];\n";
1275  g << "d_nlp.lam_x = res[" << NLPSOL_LAM_X << "];\n";
1276  g << "d_nlp.lam_g = res[" << NLPSOL_LAM_G << "];\n";
1277  g << "d_nlp.lam_p = res[" << NLPSOL_LAM_P << "];\n";
1278 
1279  g << "d_nlp.prob = &p_nlp;\n";
1280  g << "p_nlp.nx = " << nx_ << ";\n";
1281  g << "p_nlp.ng = " << ng_ << ";\n";
1282  g << "p_nlp.np = " << np_ << ";\n";
1283  g << "p_nlp.detect_bounds.ng = " << detect_simple_bounds_is_simple_.size() << ";\n";
1284  if (detect_simple_bounds_is_simple_.size()) {
1285 
1286 
1287  g << "p_nlp.detect_bounds.sz_arg = " << detect_simple_bounds_parts_.sz_arg() << ";\n";
1288  g << "p_nlp.detect_bounds.sz_res = " << detect_simple_bounds_parts_.sz_res() << ";\n";
1289  g << "p_nlp.detect_bounds.sz_iw = " << detect_simple_bounds_parts_.sz_iw() << ";\n";
1290  g << "p_nlp.detect_bounds.sz_w = " << detect_simple_bounds_parts_.sz_w() << ";\n";
1291 
1292  g << "p_nlp.detect_bounds.nb = " << detect_simple_bounds_target_x_.size() << ";\n";
1293  g << "p_nlp.detect_bounds.target_x = "
1295  g << "p_nlp.detect_bounds.target_g = "
1297  g << "p_nlp.detect_bounds.is_simple = "
1299  std::string w =
1300  g.shorthand(g.wrapper(detect_simple_bounds_parts_, "detect_simple_bounds_wrapper"));
1301  g << "p_nlp.detect_bounds.callback = " << w << ";\n";
1302  g << "p_nlp.detect_bounds.callback_data = 0;\n";
1303  }
1304  g << "casadi_nlpsol_init(&d_nlp, &arg, &res, &iw, &w);\n";
1305 
1306  // Set initial guess
1307  g.copy_default("d_nlp.x0", nx_, "d_nlp.z", "0", false);
1308 
1309  // Read simple bounds and multiplier guesses
1310  g.copy_default("d_nlp.lbx", nx_, "d_nlp.lbz", "-casadi_inf", false);
1311  g.copy_default("d_nlp.ubx", nx_, "d_nlp.ubz", "casadi_inf", false);
1312  g.copy_default("d_nlp.lam_x0", nx_, "d_nlp.lam", "0", false);
1313 
1314  if (detect_simple_bounds_is_simple_.empty()) {
1315  // Read constraint bounds and multiplier guesses
1316  g.copy_default("d_nlp.lbg", ng_, "d_nlp.lbz+"+str(nx_),
1317  "-casadi_inf", false);
1318  g.copy_default("d_nlp.ubg", ng_, "d_nlp.ubz+"+str(nx_),
1319  "casadi_inf", false);
1320  g.copy_default("d_nlp.lam_g0", ng_, "d_nlp.lam+"+str(nx_), "0", false);
1321  } else {
1322  g << "if (casadi_detect_bounds_before(&d_nlp)) return 1;\n";
1323  }
1324 
1325  }
1326 
1329  if (calc_f_ || calc_g_ || calc_lam_x_ || calc_lam_p_)
1330  g.add_dependency(get_function("nlp_grad"));
1331 
1332  if (detect_simple_bounds_is_simple_.size()) {
1334  std::string w =
1335  g.shorthand(g.wrapper(detect_simple_bounds_parts_, "detect_simple_bounds_wrapper"));
1336 
1337  g << "int " << w
1338  << "(const casadi_real** arg, casadi_real** res, "
1339  << "casadi_int* iw, casadi_real* w, void* callback_data) {\n";
1340  std::string flag = g(detect_simple_bounds_parts_, "arg", "res", "iw", "w");
1341  g << "return " + flag + ";\n";
1342  g << "}\n";
1343  }
1344  }
1345 
1347  if (calc_f_ || calc_g_ || calc_lam_x_ || calc_lam_p_) {
1348  g.local("one", "const casadi_real");
1349  g.init_local("one", "1");
1350  g << "d->arg[0] = d_nlp.z;\n";
1351  g << "d->arg[1] = d_nlp.p;\n";
1352  g << "d->arg[2] = &one;\n";
1353  g << "d->arg[3] = d_nlp.lam+" + str(nx_) + ";\n";
1354  g << "d->res[0] = " << (calc_f_ ? "&d_nlp.objective" : "0") << ";\n";
1355  g << "d->res[1] = " << (calc_g_ ? "d_nlp.z+" + str(nx_) : "0") << ";\n";
1356  g << "d->res[2] = " << (calc_lam_x_ ? "d_nlp.lam+" + str(nx_) : "0") << ";\n";
1357  g << "d->res[3] = " << (calc_lam_p_ ? "d_nlp.lam_p" : "0") << ";\n";
1358  std::string nlp_grad = g(get_function("nlp_grad"), "d->arg", "d->res", "d->iw", "d->w");
1359  g << "if (" + nlp_grad + ") return 1;\n";
1360  if (calc_lam_x_) g << g.scal(nx_, "-1.0", "d_nlp.lam") << "\n";
1361  if (calc_lam_p_) g << g.scal(np_, "-1.0", "d_nlp.lam_p") << "\n";
1362  }
1363  if (bound_consistency_) {
1364  g << g.bound_consistency(nx_+ng_, "d_nlp.z", "d_nlp.lam", "d_nlp.lbz", "d_nlp.ubz") << ";\n";
1365  }
1366 
1367  g << g.copy("d_nlp.z", nx_, "d_nlp.x") << "\n";
1368 
1369  if (detect_simple_bounds_is_simple_.empty()) {
1370  g << g.copy("d_nlp.z + " + str(nx_), ng_, "d_nlp.g") << "\n";
1371  g << g.copy("d_nlp.lam", nx_, "d_nlp.lam_x") << "\n";
1372  g << g.copy("d_nlp.lam + " + str(nx_), ng_, "d_nlp.lam_g") << "\n";
1373  } else {
1374  g << "if (casadi_detect_bounds_after(&d_nlp)) return 1;\n";
1375  }
1376 
1377  g.copy_check("&d_nlp.objective", 1, "d_nlp.f", false, true);
1378  g.copy_check("d_nlp.lam_p", np_, "d_nlp.lam_p", false, true);
1379 
1381  }
1382 
1385 
1386  s.version("Nlpsol", 5);
1387  s.pack("Nlpsol::nx", nx_);
1388  s.pack("Nlpsol::ng", ng_);
1389  s.pack("Nlpsol::np", np_);
1390  s.pack("Nlpsol::fcallback", fcallback_);
1391  s.pack("Nlpsol::callback_step", callback_step_);
1392  s.pack("Nlpsol::eval_errors_fatal", eval_errors_fatal_);
1393  s.pack("Nlpsol::warn_initial_bounds", warn_initial_bounds_);
1394  s.pack("Nlpsol::iteration_callback_ignore_errors", iteration_callback_ignore_errors_);
1395  s.pack("Nlpsol::calc_multipliers", calc_multipliers_);
1396  s.pack("Nlpsol::calc_lam_x", calc_lam_x_);
1397  s.pack("Nlpsol::calc_lam_p", calc_lam_p_);
1398  s.pack("Nlpsol::calc_f", calc_f_);
1399  s.pack("Nlpsol::calc_g", calc_g_);
1400  s.pack("Nlpsol::min_lam", min_lam_);
1401  s.pack("Nlpsol::bound_consistency", bound_consistency_);
1402  s.pack("Nlpsol::no_nlp_grad", no_nlp_grad_);
1403  s.pack("Nlpsol::discrete", discrete_);
1404  s.pack("Nlpsol::equality", equality_);
1405  s.pack("Nlpsol::mi", mi_);
1406  s.pack("Nlpsol::sens_linsol", sens_linsol_);
1407  s.pack("Nlpsol::sens_linsol_options", sens_linsol_options_);
1408  s.pack("Nlpsol::detect_simple_bounds_is_simple", detect_simple_bounds_is_simple_);
1409  s.pack("Nlpsol::detect_simple_bounds_parts", detect_simple_bounds_parts_);
1410  s.pack("Nlpsol::detect_simple_bounds_target_x", detect_simple_bounds_target_x_);
1411  }
1412 
1416  }
1417 
1420  }
1421 
1423  int version = s.version("Nlpsol", 1, 5);
1424  s.unpack("Nlpsol::nx", nx_);
1425  s.unpack("Nlpsol::ng", ng_);
1426  s.unpack("Nlpsol::np", np_);
1427  s.unpack("Nlpsol::fcallback", fcallback_);
1428  s.unpack("Nlpsol::callback_step", callback_step_);
1429  if (version<=2) {
1430  s.unpack("Nlpsol::error_on_fail", error_on_fail_);
1431  }
1432  s.unpack("Nlpsol::eval_errors_fatal", eval_errors_fatal_);
1433  s.unpack("Nlpsol::warn_initial_bounds", warn_initial_bounds_);
1434  s.unpack("Nlpsol::iteration_callback_ignore_errors", iteration_callback_ignore_errors_);
1435  s.unpack("Nlpsol::calc_multipliers", calc_multipliers_);
1436  s.unpack("Nlpsol::calc_lam_x", calc_lam_x_);
1437  s.unpack("Nlpsol::calc_lam_p", calc_lam_p_);
1438  s.unpack("Nlpsol::calc_f", calc_f_);
1439  s.unpack("Nlpsol::calc_g", calc_g_);
1440  s.unpack("Nlpsol::min_lam", min_lam_);
1441  s.unpack("Nlpsol::bound_consistency", bound_consistency_);
1442  s.unpack("Nlpsol::no_nlp_grad", no_nlp_grad_);
1443  s.unpack("Nlpsol::discrete", discrete_);
1444  if (version>=4) {
1445  s.unpack("Nlpsol::equality", equality_);
1446  }
1447  s.unpack("Nlpsol::mi", mi_);
1448  if (version>=2) {
1449  s.unpack("Nlpsol::sens_linsol", sens_linsol_);
1450  s.unpack("Nlpsol::sens_linsol_options", sens_linsol_options_);
1451  } else {
1452  sens_linsol_ = "qr";
1453  }
1454 
1455  if (version>=3) {
1456  s.unpack("Nlpsol::detect_simple_bounds_is_simple", detect_simple_bounds_is_simple_);
1457  s.unpack("Nlpsol::detect_simple_bounds_parts", detect_simple_bounds_parts_);
1458  if (version==4) {
1459  casadi_error("Saved detect_simple_bounds_parts changed signature");
1460  }
1461  s.unpack("Nlpsol::detect_simple_bounds_target_x", detect_simple_bounds_target_x_);
1462  }
1463  for (casadi_int i=0;i<detect_simple_bounds_is_simple_.size();++i) {
1465  detect_simple_bounds_target_g_.push_back(i);
1466  }
1467  }
1468  set_nlpsol_prob();
1469  }
1470 
1471 } // namespace casadi
const char * what() const override
Display error.
Definition: exception.hpp:90
Helper class for C code generation.
std::string add_dependency(const Function &f)
Add a function dependency.
std::string wrapper(const Function &base, const std::string &name)
std::string copy(const std::string &arg, std::size_t n, const std::string &res)
Create a copy operation.
std::string constant(const std::vector< casadi_int > &v)
Represent an array constant; adding it when new.
std::string scal(casadi_int n, const std::string &alpha, const std::string &x)
What does scal do??
std::string bound_consistency(casadi_int n, const std::string &x, const std::string &lam, const std::string &lbx, const std::string &ubx)
bound_consistency
void local(const std::string &name, const std::string &type, const std::string &ref="")
Declare a local variable.
void init_local(const std::string &name, const std::string &def)
Specify the default value for a local variable.
std::string shorthand(const std::string &name) const
Get a shorthand.
void copy_check(const std::string &arg, std::size_t n, const std::string &res, bool check_lhs=true, bool check_rhs=true)
void copy_default(const std::string &arg, std::size_t n, const std::string &res, const std::string &def, bool check_rhs=true)
void add_auxiliary(Auxiliary f, const std::vector< std::string > &inst={"casadi_real"})
Add a built-in auxiliary function.
Helper class for Serialization.
void unpack(Sparsity &e)
Reconstruct an object from the input stream.
void version(const std::string &name, int v)
void alloc_iw(size_t sz_iw, bool persistent=false)
Ensure required length of iw field.
void alloc_res(size_t sz_res, bool persistent=false)
Ensure required length of res field.
virtual const std::vector< MX > mx_in() const
Get function input(s) and output(s)
void alloc_arg(size_t sz_arg, bool persistent=false)
Ensure required length of arg field.
virtual bool is_a(const std::string &type, bool recursive) const
Check if the function is of a particular type.
bool inputs_check_
Errors are thrown if numerical values of inputs look bad.
size_t sz_res() const
Get required length of res field.
std::pair< casadi_int, casadi_int > size_out(casadi_int ind) const
Input/output dimensions.
casadi_int nnz_in() const
Number of input/output nonzeros.
std::vector< Sparsity > sparsity_out_
void serialize_type(SerializingStream &s) const override
Serialize type information.
size_t sz_w() const
Get required length of w field.
virtual const std::vector< MX > mx_out() const
Get function input(s) and output(s)
void alloc_w(size_t sz_w, bool persistent=false)
Ensure required length of w field.
casadi_int nnz_out() const
Number of input/output nonzeros.
size_t sz_arg() const
Get required length of arg field.
void setup(void *mem, const double **arg, double **res, casadi_int *iw, double *w) const
Set the (persistent and temporary) work vectors.
void alloc(const Function &f, bool persistent=false, int num_threads=1)
Ensure work vectors long enough to evaluate function.
size_t sz_iw() const
Get required length of iw field.
static std::string string_from_UnifiedReturnStatus(UnifiedReturnStatus status)
Function object.
Definition: function.hpp:60
Function forward(casadi_int nfwd) const
Get a function that calculates nfwd forward derivatives.
Definition: function.cpp:1135
size_t sz_res() const
Get required length of res field.
Definition: function.cpp:1085
const Sparsity & sparsity_out(casadi_int ind) const
Get sparsity of a given output.
Definition: function.cpp:1031
casadi_int size1_in(casadi_int ind) const
Get input dimension.
Definition: function.cpp:827
const std::vector< std::string > & name_in() const
Get input scheme.
Definition: function.cpp:961
const std::string & name() const
Name of the function.
Definition: function.cpp:1315
casadi_int numel_out() const
Get number of output elements.
Definition: function.cpp:863
Function reverse(casadi_int nadj) const
Get a function that calculates nadj adjoint derivatives.
Definition: function.cpp:1143
const T sym_in(casadi_int iind) const
Get symbolic primitives equivalent to the input expressions.
static Function create(FunctionInternal *node)
Create from node.
Definition: function.cpp:336
const Sparsity & sparsity_in(casadi_int ind) const
Get sparsity of a given input.
Definition: function.cpp:1015
size_t sz_iw() const
Get required length of iw field.
Definition: function.cpp:1087
casadi_int n_out() const
Get the number of function outputs.
Definition: function.cpp:823
casadi_int n_in() const
Get the number of function inputs.
Definition: function.cpp:819
std::vector< std::string > get_free() const
Get free variables as a string.
Definition: function.cpp:1193
size_t sz_w() const
Get required length of w field.
Definition: function.cpp:1089
size_t sz_arg() const
Get required length of arg field.
Definition: function.cpp:1083
bool has_free() const
Does the function have free variables.
Definition: function.cpp:1705
casadi_int size1_out(casadi_int ind) const
Get output dimension.
Definition: function.cpp:835
std::pair< casadi_int, casadi_int > size_in(casadi_int ind) const
Get input dimension.
Definition: function.cpp:843
void call(const std::vector< DM > &arg, std::vector< DM > &res, bool always_inline=false, bool never_inline=false) const
Evaluate the function symbolically or numerically.
Definition: function.cpp:357
Function factory(const std::string &name, const std::vector< std::string > &s_in, const std::vector< std::string > &s_out, const AuxOut &aux=AuxOut(), const Dict &opts=Dict()) const
Definition: function.cpp:1820
const std::vector< std::string > & name_out() const
Get output scheme.
Definition: function.cpp:965
static MX sym(const std::string &name, casadi_int nrow=1, casadi_int ncol=1)
Create an nrow-by-ncol symbolic primitive.
bool is_null() const
Is a null pointer?
bool shared_if_alive(Shared &shared) const
Thread-safe alternative to alive()/shared()
Importer.
Definition: importer.hpp:86
MX - Matrix expression.
Definition: mx.hpp:92
const Sparsity & sparsity() const
Get the sparsity pattern.
Definition: mx.cpp:592
static MX blockcat(const std::vector< std::vector< MX > > &v)
Definition: mx.cpp:1197
MX T() const
Transpose the matrix.
Definition: mx.cpp:1029
static MX solve(const MX &a, const MX &b)
Definition: mx.cpp:2007
static MX vertcat(const std::vector< MX > &x)
Definition: mx.cpp:1099
A symbolic NLP representation.
Definition: nlp_builder.hpp:41
std::vector< MX > x
Variables.
Definition: nlp_builder.hpp:50
std::vector< MX > g
Constraints.
Definition: nlp_builder.hpp:56
MX f
Objective.
Definition: nlp_builder.hpp:53
void serialize_type(SerializingStream &s) const override
Serialize type information.
Definition: nlpsol.cpp:1413
Nlpsol(const std::string &name, const Function &oracle)
Constructor.
Definition: nlpsol.cpp:344
bool iteration_callback_ignore_errors_
Options.
Definition: nlpsol_impl.hpp:95
WeakRef kkt_
Cache for KKT function.
void codegen_body_exit(CodeGenerator &g) const override
Generate code for the function body.
Definition: nlpsol.cpp:1346
bool calc_lam_p_
Options.
Definition: nlpsol_impl.hpp:97
Sparsity get_sparsity_out(casadi_int i) override
Sparsities of function inputs and outputs.
Definition: nlpsol.cpp:389
virtual DM getReducedHessian()
Definition: nlpsol.cpp:746
Dict get_stats(void *mem) const override
Get all statistics.
Definition: nlpsol.cpp:1239
Function get_forward(casadi_int nfwd, const std::string &name, const std::vector< std::string > &inames, const std::vector< std::string > &onames, const Dict &opts) const override
Generate a function that calculates forward mode derivatives.
Definition: nlpsol.cpp:946
static const Options options_
Options.
void codegen_body_enter(CodeGenerator &g) const override
Generate code for the function body.
Definition: nlpsol.cpp:1256
void codegen_declarations(CodeGenerator &g) const override
Generate code for the declarations of the C function.
Definition: nlpsol.cpp:1327
void init(const Dict &opts) override
Initialize.
Definition: nlpsol.cpp:497
casadi_int ng_
Number of constraints.
Definition: nlpsol_impl.hpp:69
int eval(const double **arg, double **res, casadi_int *iw, double *w, void *mem) const final
Evaluate numerically.
Definition: nlpsol.cpp:781
Function kkt() const
Definition: nlpsol.cpp:924
virtual void check_inputs(void *mem) const
Check if the inputs correspond to a well-posed problem.
Definition: nlpsol.cpp:690
bool eval_errors_fatal_
Options.
Definition: nlpsol_impl.hpp:93
int init_mem(void *mem) const override
Initalize memory block.
Definition: nlpsol.cpp:680
static ProtoFunction * deserialize(DeserializingStream &s)
Deserialize into MX.
Definition: nlpsol.cpp:1418
Function detect_simple_bounds_parts_
Definition: nlpsol_impl.hpp:85
bool calc_multipliers_
Options.
Definition: nlpsol_impl.hpp:96
static void bound_consistency(casadi_int n, double *z, double *lam, const double *lbz, const double *ubz)
Definition: nlpsol.cpp:755
std::vector< bool > equality_
Options.
bool warn_initial_bounds_
Options.
Definition: nlpsol_impl.hpp:94
static const std::string infix_
Infix.
Dict sens_linsol_options_
Definition: nlpsol_impl.hpp:82
casadi_nlpsol_prob< double > p_nlp_
Definition: nlpsol_impl.hpp:63
void disp_more(std::ostream &stream) const override
Print description.
Definition: nlpsol.cpp:919
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
Definition: nlpsol.cpp:1383
static std::map< std::string, Plugin > solvers_
Collection of solvers.
bool calc_f_
Options.
Definition: nlpsol_impl.hpp:97
bool calc_g_
Options.
Definition: nlpsol_impl.hpp:97
Function get_reverse(casadi_int nadj, const std::string &name, const std::vector< std::string > &inames, const std::vector< std::string > &onames, const Dict &opts) const override
Generate a function that calculates reverse mode derivatives.
Definition: nlpsol.cpp:1077
std::string class_name() const override
Get type name.
std::vector< char > detect_simple_bounds_is_simple_
Definition: nlpsol_impl.hpp:84
casadi_int np_
Number of parameters.
Definition: nlpsol_impl.hpp:72
double min_lam_
Options.
Definition: nlpsol_impl.hpp:99
Sparsity get_sparsity_in(casadi_int i) override
Sparsities of function inputs and outputs.
Definition: nlpsol.cpp:371
static Function create_oracle(const std::map< std::string, XType > &d, const Dict &opts)
Convert dictionary to Problem.
Definition: nlpsol.cpp:206
bool calc_lam_x_
Options.
Definition: nlpsol_impl.hpp:97
std::vector< casadi_int > detect_simple_bounds_target_g_
Definition: nlpsol_impl.hpp:87
casadi_int callback_step_
Execute the callback function only after this amount of iterations.
Definition: nlpsol_impl.hpp:78
virtual void setOptionsFromFile(const std::string &file)
Read options from parameter xml.
Definition: nlpsol.cpp:751
std::vector< casadi_int > detect_simple_bounds_target_x_
Definition: nlpsol_impl.hpp:86
int callback(NlpsolMemory *m) const
Definition: nlpsol.cpp:1198
~Nlpsol() override=0
Destructor.
Definition: nlpsol.cpp:363
std::vector< bool > discrete_
Options.
casadi_int nx_
Number of variables.
Definition: nlpsol_impl.hpp:66
virtual bool integer_support() const
Can discrete variables be treated.
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
Definition: nlpsol.cpp:872
bool bound_consistency_
Options.
Definition: nlpsol_impl.hpp:98
bool no_nlp_grad_
Options.
std::string sens_linsol_
Linear solver and options.
Definition: nlpsol_impl.hpp:81
virtual int solve(void *mem) const =0
Function fcallback_
callback function, executed at each iteration
Definition: nlpsol_impl.hpp:75
bool is_a(const std::string &type, bool recursive) const override
Check if the function is of a particular type.
Definition: nlpsol.cpp:367
double get_default_in(casadi_int ind) const override
Get default input value.
Base class for functions that perform calculation with an oracle.
Function oracle_
Oracle: Used to generate other functions.
Function create_function(const Function &oracle, const std::string &fname, const std::vector< std::string > &s_in, const std::vector< std::string > &s_out, const Function::AuxOut &aux=Function::AuxOut(), const Dict &opts=Dict())
void join_results(OracleMemory *m) const
Combine results from different threads.
void init(const Dict &opts) override
int init_mem(void *mem) const override
Initalize memory block.
virtual void codegen_body_enter(CodeGenerator &g) const
Generate code for the function body.
int calc_function(OracleMemory *m, const std::string &fcn, const double *const *arg=nullptr, int thread_id=0) const
std::vector< std::string > get_function() const override
Get list of dependency functions.
static const Options options_
Options.
Dict get_stats(void *mem) const override
Get all statistics.
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
virtual void codegen_body_exit(CodeGenerator &g) const
Generate code for the function body.
static bool has_plugin(const std::string &pname, bool verbose=false)
Check if a plugin is available or can be loaded.
static Nlpsol * instantiate(const std::string &fname, const std::string &pname, Problem problem)
void serialize_type(SerializingStream &s) const
Serialize type information.
static const Options & plugin_options(const std::string &pname)
Get the plugin options.
static Plugin & getPlugin(const std::string &pname)
Load and get the creator function.
static ProtoFunction * deserialize(DeserializingStream &s)
Deserialize with type disambiguation.
static Plugin load_plugin(const std::string &pname, bool register_plugin=true, bool needs_lock=true)
Load a plugin dynamically.
Base class for FunctionInternal and LinsolInternal.
bool error_on_fail_
Throw an exception on failure?
void print(const char *fmt,...) const
C-style formatted printing during evaluation.
void clear_mem()
Clear all memory (called from destructor)
Helper class for Serialization.
void version(const std::string &name, int v)
void pack(const Sparsity &e)
Serializes an object to the output stream.
GenericShared implements a reference counting framework similar for efficient and.
void disp(std::ostream &stream, bool more=false) const
Print a description of the object.
Class representing a Slice.
Definition: slice.hpp:48
General sparsity class.
Definition: sparsity.hpp:106
casadi_int numel() const
The total number of elements, including structural zeros, i.e. size2()*size1()
Definition: sparsity.cpp:132
std::string dim(bool with_nz=false) const
Get the dimension as a string.
Definition: sparsity.cpp:587
static Sparsity dense(casadi_int nrow, casadi_int ncol=1)
Create a dense rectangular sparsity pattern *.
Definition: sparsity.cpp:1012
Sparsity T() const
Transpose the matrix.
Definition: sparsity.cpp:394
const casadi_int * row() const
Get a reference to row-vector,.
Definition: sparsity.cpp:164
bool is_empty(bool both=false) const
Check if the sparsity is empty.
Definition: sparsity.cpp:144
const casadi_int * colind() const
Get a reference to the colindex of all column element (see class description)
Definition: sparsity.cpp:168
std::string doc_nlpsol(const std::string &name)
Get the documentation string for a plugin.
Definition: nlpsol.cpp:43
bool has_nlpsol(const std::string &name)
Check if a particular plugin is available.
Definition: nlpsol.cpp:35
void load_nlpsol(const std::string &name)
Explicitly load a plugin dynamically.
Definition: nlpsol.cpp:39
std::string nlpsol_option_info(const std::string &name, const std::string &op)
Get documentation for a particular option.
Definition: nlpsol.cpp:915
casadi_int nlpsol_n_in()
Number of NLP solver inputs.
Definition: nlpsol.cpp:336
std::string nlpsol_option_type(const std::string &name, const std::string &op)
Get type info for a particular option.
Definition: nlpsol.cpp:911
std::vector< std::string > nlpsol_options(const std::string &name)
Get all options for a plugin.
Definition: nlpsol.cpp:907
std::vector< std::string > nlpsol_in()
Get input scheme of NLP solvers.
Definition: nlpsol.cpp:277
Function nlpsol(const std::string &name, const std::string &solver, const SXDict &nlp, const Dict &opts)
Definition: nlpsol.cpp:195
casadi_int nlpsol_n_out()
Number of NLP solver outputs.
Definition: nlpsol.cpp:340
std::vector< std::string > nlpsol_out()
Get NLP solver output scheme of NLP solvers.
Definition: nlpsol.cpp:283
double nlpsol_default_in(casadi_int ind)
Default input for an NLP solver.
Definition: nlpsol.cpp:289
The casadi namespace.
Definition: archiver.cpp:28
NlpsolInput
Input arguments of an NLP Solver.
Definition: nlpsol.hpp:194
@ NLPSOL_P
Value of fixed parameters (np x 1)
Definition: nlpsol.hpp:198
@ NLPSOL_UBX
Decision variables upper bound (nx x 1), default +inf.
Definition: nlpsol.hpp:202
@ NLPSOL_X0
Decision variables, initial guess (nx x 1)
Definition: nlpsol.hpp:196
@ NLPSOL_LAM_G0
Lagrange multipliers for bounds on G, initial guess (ng x 1)
Definition: nlpsol.hpp:210
@ NLPSOL_UBG
Constraints upper bound (ng x 1), default +inf.
Definition: nlpsol.hpp:206
@ NLPSOL_LAM_X0
Lagrange multipliers for bounds on X, initial guess (nx x 1)
Definition: nlpsol.hpp:208
@ NLPSOL_NUM_IN
Definition: nlpsol.hpp:211
@ NLPSOL_LBG
Constraints lower bound (ng x 1), default -inf.
Definition: nlpsol.hpp:204
@ NLPSOL_LBX
Decision variables lower bound (nx x 1), default -inf.
Definition: nlpsol.hpp:200
std::map< std::string, MX > MXDict
Definition: mx.hpp:1009
bool name_has_g(const std::string &name)
Definition: nlpsol.cpp:47
NlpsolOutput
Output arguments of an NLP Solver.
Definition: nlpsol.hpp:215
@ NLPSOL_G
Constraints function at the optimal solution (ng x 1)
Definition: nlpsol.hpp:221
@ NLPSOL_X
Decision variables at the optimal solution (nx x 1)
Definition: nlpsol.hpp:217
@ NLPSOL_NUM_OUT
Definition: nlpsol.hpp:228
@ NLPSOL_LAM_P
Lagrange multipliers for bounds on P at the solution (np x 1)
Definition: nlpsol.hpp:227
@ NLPSOL_F
Cost function value at the optimal solution (1 x 1)
Definition: nlpsol.hpp:219
@ NLPSOL_LAM_G
Lagrange multipliers for bounds on G at the solution (ng x 1)
Definition: nlpsol.hpp:225
@ NLPSOL_LAM_X
Lagrange multipliers for bounds on X at the solution (nx x 1)
Definition: nlpsol.hpp:223
T get_from_dict(const std::map< std::string, T > &d, const std::string &key, const T &default_value)
void assign_vector(const std::vector< S > &s, std::vector< D > &d)
@ NL_X
Decision variable.
Definition: nlpsol.hpp:170
@ NL_P
Fixed parameter.
Definition: nlpsol.hpp:172
@ NL_NUM_IN
Number of NLP inputs.
Definition: nlpsol.hpp:174
double if_else(double x, double y, double z)
Definition: calculus.hpp:290
@ NL_F
Objective function.
Definition: nlpsol.hpp:183
@ NL_G
Constraint function.
Definition: nlpsol.hpp:185
@ NL_NUM_OUT
Number of NLP outputs.
Definition: nlpsol.hpp:187
int detect_bounds_callback(const double **arg, double **res, casadi_int *iw, double *w, void *callback_data)
Definition: nlpsol.cpp:653
void casadi_copy(const T1 *x, casadi_int n, T1 *y)
COPY: y <-x.
void casadi_fill(T1 *x, casadi_int n, T1 alpha)
FILL: x <- alpha.
@ OT_BOOLVECTOR
@ OT_INTVECTOR
std::map< std::string, SX > SXDict
Definition: sx_fwd.hpp:40
std::string str(const T &v)
String representation, any type.
GenericType::Dict Dict
C++ equivalent of Python's dict or MATLAB's struct.
std::vector< bool > boolvec_not(const std::vector< bool > &v)
Invert all entries.
const double inf
infinity
Definition: calculus.hpp:50
const std::vector< std::string > NL_INPUTS
Shortname for onput arguments of an NLP function.
Definition: nlpsol.hpp:178
const double nan
Not a number.
Definition: calculus.hpp:53
void casadi_scal(casadi_int n, T1 alpha, T1 *x)
SCAL: x <- alpha*x.
std::vector< T > vector_select(const std::vector< T > &v, const std::vector< bool > &s, bool invert=false)
Select subset of vector.
T * get_ptr(std::vector< T > &v)
Get a pointer to the data contained in the vector.
Function construct_nlpsol(const std::string &name, const std::string &solver, const std::map< std::string, X > &nlp, const Dict &opts)
Definition: nlpsol.cpp:63
Matrix< double > DM
Definition: dm_fwd.hpp:33
Function external(const std::string &name, const Importer &li, const Dict &opts)
Load a just-in-time compiled external function.
Definition: external.cpp:42
@ SOLVER_RET_SUCCESS
@ SOLVER_RET_UNKNOWN
@ SOLVER_RET_EXCEPTION
std::vector< casadi_int > boolvec_to_index(const std::vector< bool > &v)
const std::vector< std::string > NL_OUTPUTS
Shortname for output arguments of an NLP function.
Definition: nlpsol.hpp:191
Integrator memory.
Definition: nlpsol_impl.hpp:40
casadi_nlpsol_data< double > d_nlp
Definition: nlpsol_impl.hpp:42
Options metadata for a class.
Definition: options.hpp:40
std::string type(const std::string &name) const
Definition: options.cpp:289
std::vector< std::string > all() const
Definition: options.cpp:283
std::string info(const std::string &name) const
Definition: options.cpp:295
std::map< std::string, FStats > fstats
void add_stat(const std::string &s)
const T1 * lam_g0
Definition: casadi_nlp.hpp:87
const T1 * lam_x0
Definition: casadi_nlp.hpp:87
casadi_nlpsol_detect_bounds_prob< T1 > detect_bounds
Definition: casadi_nlp.hpp:46